input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
<filename>backend/causx_application.py
from flask.helpers import make_response, send_file
import pandas as pd
import os
import sys
import tempfile
import json
import zipfile
import uuid
import datetime
import jwt
from io import BytesIO
from pathlib import Path
from flask import request
from werkzeug.utils import secure_filename
from app_config import app
from t2wml.project import Project, FileNotPresentInProject, InvalidProjectDirectory
from t2wml.wikification.utility_functions import dict_to_kgtk, kgtk_to_dict
from t2wml.api import annotation_suggester, get_Pnode, get_Qnode, t2wml_settings
from t2wml_web import ( get_kg, autocreate_items, set_web_settings,
get_layers, get_annotations, get_table, save_annotations,
get_project_instance, get_qnodes_layer,
suggest_annotations, update_t2wml_settings, get_entities)
import web_exceptions
from causx.causx_utils import AnnotationNodeGenerator, causx_get_variable_dict, causx_get_variable_metadata, causx_set_variable, get_causx_partial_csv, causx_create_canonical_spreadsheet, get_causx_tags, preload
from causx.wikification import wikify_countries
from utils import create_user_wikification
from web_exceptions import WebException, make_frontend_err_dict
from calc_params import CalcParams
from global_settings import global_settings
debug_mode = False
set_web_settings()
os.makedirs(app.config["PROJECTS_DIR"], exist_ok=True)
def encode_auth_token():
"""
Generates the Auth Token
:return: string
"""
payload = {
'iat': datetime.datetime.utcnow(),
'sub': str(uuid.uuid4())
}
return jwt.encode(
payload,
app.config.get('SECRET_KEY'),
algorithm='HS256'
)
def decode_auth_token(auth_token):
payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'), algorithms=['HS256'])
return payload['sub']
def get_project_folder():
auth_header=request.headers.get("Authentication")
request_folder=decode_auth_token(auth_header)
base_dir=app.config["PROJECTS_DIR"]
project_folder= Path(base_dir) / request_folder
os.makedirs(project_folder, exist_ok=True)
return project_folder
def get_project():
project_folder = get_project_folder()
try:
Project.load(project_folder)
except FileNotPresentInProject:
p=Project(project_folder, title="Causx")
p.save()
project = get_project_instance(project_folder)
return project
def get_project_dict(project):
return_dict = {}
return_dict.update(project.__dict__)
return_dict.update(global_settings.__dict__)
return return_dict
def json_response(func):
def wrapper(*args, **kwargs):
#new_cookie=None
#if not request.cookies.get("user-id"):
# request.cookies=dict(request.cookies)
# new_cookie=str(uuid.uuid4())
# request.cookies["user-id"]=new_cookie
try:
data, code = func(*args, **kwargs)
except WebException as e:
data = {"error": e.error_dict}
code = e.code
except Exception as e:
#print(e)
if "Permission denied" in str(e):
e = web_exceptions.FileOpenElsewhereError(
"Check whether a file you are trying to edit is open elsewhere on your computer: "+str(e))
data = {"error": e.error_dict}
code=403
data = {"error": make_frontend_err_dict(e)}
try:
code = e.code
data["error"]["errorCode"] = code
except AttributeError:
code = 500
finally:
response=make_response(data)
response.status_code=code
#if new_cookie:
# response.set_cookie("user-id", new_cookie)
return response
wrapper.__name__ = func.__name__ # This is required to avoid issues with flask
return wrapper
def get_annotation_name(calc_params):
data_file=calc_params.data_path
sheet_name=calc_params.sheet_name
annotation_path = "annotations/" + Path(data_file).stem + "_" + Path(sheet_name).stem + ".annotation"
os.makedirs(Path(calc_params.project.directory)/"annotations", exist_ok=True)
return annotation_path
def get_calc_params(project, data_required=True):
try:
try:
data_file = request.args['data_file']
except KeyError:
raise web_exceptions.InvalidRequestException(
"data file parameter not specified")
try:
sheet_name = request.args['sheet_name']
except KeyError:
raise web_exceptions.InvalidRequestException(
"sheet name parameter not specified")
except web_exceptions.InvalidRequestException as e:
if data_required:
raise e
else:
return None
calc_params = CalcParams(project, data_file, sheet_name)
calc_params._annotation_path = get_annotation_name(calc_params)
return calc_params
def get_mapping():
project = get_project()
calc_params = get_calc_params(project)
response = dict(project=get_project_dict(project))
response["annotations"], response["yamlContent"] = get_annotations(
calc_params)
try:
preload(calc_params)
except Exception as e:
pass
get_layers(response, calc_params)
return response, 200
@app.route('/api/causx/token', methods=['GET'])
def get_token():
return {"token": encode_auth_token()}, 200
@app.route('/api/causx/project/entities', methods=['GET']) #V
@json_response
def get_project_entities():
project = get_project()
response = get_entities(project)
return response, 200
@app.route('/api/causx/auto_wikinodes', methods=['POST'])
@json_response
def create_auto_nodes():
"""
This function calls the wikifier service to wikifiy a region, and deletes/updates wiki region file's results
:return:
"""
project = get_project()
calc_params = get_calc_params(project)
selection = request.get_json()['selection']
selection = (selection["x1"]-1, selection["y1"] -
1), (selection["x2"]-1, selection["y2"]-1)
is_property = request.get_json()['is_property']
data_type = request.get_json().get("data_type", None)
autocreate_items(calc_params, selection, is_property, data_type)
response = dict(project=get_project_dict(project))
response["layers"] = get_qnodes_layer(calc_params)
return response, 200
@app.route('/api/causx/annotation', methods=['POST'])
@json_response
def upload_annotation():
project = get_project()
calc_params = get_calc_params(project)
annotation = request.get_json()["annotations"]
save_annotations(project, annotation, calc_params.annotation_path,
calc_params.data_path, calc_params.sheet_name)
response = dict(project=get_project_dict(project))
calc_response, code = get_mapping()
response.update(calc_response)
return response, code
@app.route('/api/causx/annotation/suggest', methods=['PUT'])
@json_response
def suggest_annotation_block():
project = get_project()
calc_params = get_calc_params(project)
block = request.get_json()["selection"]
annotation = request.get_json()["annotations"]
response = { # fallback response
# drop metadata
"roles": ["dependentVar", "mainSubject", "property", "qualifier", "unit"],
# drop monolingual string
"types": ["string", "quantity", "time", "wikibaseitem"],
"children": {}
}
try:
response = annotation_suggester(calc_params.sheet, block, annotation)
except Exception as e:
pass #print(e)
return response, 200
@app.route('/api/causx/annotation/guess-blocks', methods=['GET']) #V
@json_response
def guess_annotation_blocks():
project = get_project()
calc_params = get_calc_params(project)
suggest_annotations(calc_params)
return get_mapping()
@app.route('/api/causx/project/settings', methods=['PUT', 'GET']) #V
@app.route('/api/project/settings', methods=['PUT', 'GET'])
@json_response
def update_settings():
"""
This function updates the settings from GUI
:return:
"""
project = get_project()
if request.method == 'PUT':
request_json = request.get_json()
title = request_json.get("title", None)
if title:
project.title = title
description = request_json.get("description", None)
if description is not None:
project.description = description
url = request_json.get("url", None)
if url is not None:
project.url = url
endpoint = request_json.get("endpoint", None)
if endpoint:
project.sparql_endpoint = endpoint
warn = request_json.get("warnEmpty", None)
if warn is not None:
project.warn_for_empty_cells = warn
calendar = request_json.get("handleCalendar", None)
if calendar:
calendar_dict = {
"Replace with Gregorian": "replace",
"Leave Untouched": "leave",
"Add Gregorian": "add",
"replace": "replace",
"add": "add",
"leave": "leave"
}
try:
project.handle_calendar = calendar_dict[calendar]
except KeyError:
raise web_exceptions.InvalidRequestException(
"No such calendar option")
project.save()
update_t2wml_settings(project)
response = dict(project=get_project_dict(project))
return response, 200
@app.route('/api/causx/delete_wikification', methods=['POST'])
@json_response
def delete_wikification():
project = get_project()
calc_params = get_calc_params(project)
sheet_name = calc_params.sheet.name
data_file_name = calc_params.sheet.data_file_name
selection = request.get_json()['selection']
if not selection:
raise web_exceptions.InvalidRequestException('No selection provided')
value = request.get_json().get('value', None)
#context = request.get_json().get("context", "")
top_left, bottom_right = selection
col1, row1 = top_left
col2, row2 = bottom_right
filepath, exists=project.get_wikifier_file(calc_params.data_path)
if exists:
df=pd.read_csv(filepath)
for col in range(col1, col2+1):
for row in range(row1, row2+1):
if value:
df = df.drop(df[(df['column'] == col)
& (df['row'] == row)
& (df['value'] == value)
& (df['file'] == data_file_name)
& (df['sheet'] == sheet_name)].index)
else:
df = df.drop(df[(df['column'] == col)
& (df['row'] == row)
& (df['file'] == data_file_name)
& (df['sheet'] == sheet_name)].index)
df.to_csv(filepath, index=False, header=True)
response = dict(project=get_project_dict(project))
response["layers"] = get_qnodes_layer(calc_params)
return response, 200
@app.route('/api/causx/create_node', methods=['POST'])
@json_response
def create_qnode():
project = get_project()
request_json = request.get_json()
try:
label = request_json.pop("label")
# is_prop=node_id[0].lower()=="p"
is_prop = request_json.pop("is_property")
if is_prop:
data_type = request_json.pop("data_type")
if data_type not in ["globecoordinate", "quantity", "time", "string", "monolingualtext", "externalid", "wikibaseitem", "wikibaseproperty", "url"]:
raise web_exceptions.InvalidRequestException(
"Invalid data type")
except KeyError:
raise web_exceptions.InvalidRequestException(
"Missing required fields in entity definition")
filepath = Path(project.directory)/"user_input_properties.tsv"
if os.path.isfile(filepath):
custom_nodes = kgtk_to_dict(filepath)
else:
custom_nodes = dict()
id = request.json.get("id", None)
if not id:
if is_prop:
node_id = get_Pnode(project, label)
else:
node_id = get_Qnode(project, label)
entity_dict = {
"id": node_id,
"label": label,
}
if is_prop:
entity_dict["data_type"] = data_type
entity_dict["description"] = request_json.get("description", "")
for key in ["P31"]: # may add more
if request_json.get(key, None):
entity_dict[key] = request_json[key]
custom_nodes[node_id] = entity_dict
dict_to_kgtk(custom_nodes, filepath)
project.add_entity_file(filepath)
project.save()
t2wml_settings.wikidata_provider.save_entry(node_id, **entity_dict)
response = dict(entity=entity_dict, project=get_project_dict(project))
selection = request_json.get("selection", None)
if selection:
calc_params = get_calc_params(project)
context = request.get_json().get("context", "")
(col1, row1), (col2, row2) = selection
value = calc_params.sheet[row1, col1]
create_user_wikification(calc_params, project, selection, value,
context, node_id)
response["layers"] = get_qnodes_layer(calc_params)
else:
response["layers"] = {}
return response, 200
def causx_get_file(allowed_extensions):
if 'file' not in request.files:
raise web_exceptions.NoFilePartException(
"Missing 'file' parameter in the file upload request")
in_file = request.files['file']
if in_file.filename == '':
raise web_exceptions.BlankFileNameException(
"No file selected for uploading")
file_extension = Path(in_file.filename).suffix
file_allowed = file_extension in allowed_extensions
if not file_allowed:
raise web_exceptions.FileTypeNotSupportedException(
"File with extension '"+file_extension+"' is not allowed")
return in_file
@app.route('/api/causx/wikify_region', methods=['POST']) #V
@json_response
def causx_wikify():
project = get_project()
region = request.get_json()["selection"]
overwrite_existing = request.get_json().get("overwrite", False)
#context = request.get_json()["context"]
calc_params = get_calc_params(project)
cell_qnode_map, problem_cells = wikify_countries(calc_params, region)
project.add_df_to_wikifier_file(calc_params.data_path, cell_qnode_map, overwrite_existing)
calc_params = get_calc_params(project)
response = dict(project=get_project_dict(project))
response["layers"] = get_qnodes_layer(calc_params)
if problem_cells:
response['wikifierError'] = "Failed to wikify: " + \
",".join(problem_cells)
return response, 200
@app.route('/api/causx/upload/data', methods=['POST'])
@json_response
def causx_upload_data():
project = get_project()
in_file=causx_get_file([".csv", ".xlsx"])
folder = Path(project.directory)
shorter_name = Path(in_file.filename).name
filename = secure_filename(shorter_name)
file_path = folder/filename
in_file.save(str(file_path))
response = dict()
code = 200
file_path = project.add_data_file(file_path)
sheet_name = project.data_files[file_path]["val_arr"][0]
project.save()
response["sheetName"] = sheet_name
calc_params = CalcParams(project, file_path, sheet_name)
response["table"] = get_table(calc_params)
project.title=Path(file_path).stem
project.save()
response.update(dict(project=get_project_dict(project), filepath=file_path))
return response, code
@app.route('/api/causx/upload/project', methods=['POST'])
@json_response
def causx_upload_project():
#upload a project zip and load it as the active project
project = get_project()
in_file = causx_get_file([".t2wmlz"])
proj_dir=Path(project.directory)
new_project=Project(project.directory)
with tempfile.TemporaryDirectory() as tmpdirname:
file_path = Path(tmpdirname) / secure_filename(Path(in_file.filename).name)
in_file.save(str(file_path))
with zipfile.ZipFile(file_path, mode='r', compression=zipfile.ZIP_DEFLATED) as zf:
filemap=json.loads(zf.read("filemap.json"))
zf.extract(filemap["data"], proj_dir)
zf.extract(filemap["annotation"], proj_dir)
for entity_file in filemap["entities"]:
zf.extract(entity_file, proj_dir)
if filemap["wikifier_exists"]:
zf.extract(filemap["wikifier_path"], proj_dir)
new_project.add_data_file(filemap["data"])
sheet_name=filemap["sheet"]
new_project.add_annotation_file(filemap["annotation"], filemap["data"], sheet_name)
for entity_file in filemap["entities"]:
new_project.add_entity_file(entity_file)
new_project.save()
calc_params=CalcParams(new_project, filemap["data"], sheet_name, annotation_path=filemap["annotation"])
response=dict()
response["table"] = get_table(calc_params)
response["annotations"], response["yamlContent"] = get_annotations(calc_params)
response["layers"] = get_qnodes_layer(calc_params)
response["project"]=get_project_dict(new_project)
response["sheetName"]=sheet_name
response["filePath"]=filemap["data"]
return response, 200
@app.route('/api/causx/upload/annotation', methods=['POST'])
@json_response
def causx_upload_annotation():
#upload a project zip, extract the annotation file, and apply it to the current project
project = get_project()
in_file = causx_get_file([".t2wmlz"])
calc_params=get_calc_params(project)
with tempfile.TemporaryDirectory() as tmpdirname:
file_path = Path(tmpdirname) / secure_filename(Path(in_file.filename).name)
in_file.save(str(file_path))
with zipfile.ZipFile(file_path, mode='r', compression=zipfile.ZIP_DEFLATED) as zf:
filemap=json.loads(zf.read("filemap.json"))
zf.extract(filemap["annotation"], calc_params.annotation_path)
annotation_file=project.add_annotation_file(calc_params.annotation_path, calc_params.data_path, calc_params.sheet_name)
preload(calc_params)
response, code = get_mapping()
response["project"]=get_project_dict(project)
return response, code
@app.route('/api/causx/download_project', methods=['GET'])
@json_response
def causx_download_project():
#download a .t2wmlz file with the files needed to restore current project state
project = get_project()
calc_params = get_calc_params(project)
data_path = calc_params.data_path
data_path_arcname=(Path(data_path).name)
sheet_name=calc_params.sheet_name
annotation_path=calc_params.annotation_path
annotation_path_arcname = Path(calc_params._annotation_path).as_posix()
wikifier_path, wikifier_exists = project.get_wikifier_file(data_path)
if wikifier_exists:
wikifier_partial_path=Path(wikifier_path).relative_to(project.directory).as_posix()
else:
wikifier_partial_path=""
attachment_filename = project.title + "_" + Path(data_path).stem +"_"+ Path(sheet_name).stem +".t2wmlz"
filestream=BytesIO()
with zipfile.ZipFile(filestream, mode='w', compression=zipfile.ZIP_DEFLATED) as zf:
zf.write(data_path, arcname=data_path_arcname)
| |
running-config | section bgp
# router bgp 10
# neighbor peer2 peer-group
# neighbor peer2 maximum-routes 12000
# neighbor 1.1.1.1 maximum-routes 12000
# !
# address-family ipv4
# neighbor 1.1.1.1 activate
# network 1.1.1.0/24
# network 1.5.1.0/24 route-map MAP01
# redistribute ospf3 match external
# !
# address-family ipv6
# neighbor peer2 default-originate always
# redistribute isis level-2
# !
# vrf vrft
# address-family ipv6
# redistribute ospf3 match external
# veos(config-router-bgp)#
- name: Overridden
arista.eos.eos_bgp_address_family:
config:
as_number: "10"
address_family:
- afi: "ipv4"
bgp_params:
additional_paths: "receive"
neighbor:
- peer: "peer2"
default_originate:
always: True
state: overridden
# After State:
# veos(config-router-bgp)#show running-config | section bgp
# router bgp 10
# neighbor peer2 peer-group
# neighbor peer2 maximum-routes 12000
# neighbor 1.1.1.1 maximum-routes 12000
# !
# address-family ipv4
# bgp additional-paths receive
# neighbor peer2 default-originate always
# !
# vrf vrft
# address-family ipv6
# redistribute ospf3 match external
# veos(config-router-bgp)#
#
# Module Execution:
#
# "after": {
# "address_family": [
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ]
# },
# {
# "afi": "ipv6",
# "redistribute": [
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ],
# "vrf": "vrft"
# }
# ],
# "as_number": "10"
# },
# "before": {
# "address_family": [
# {
# "afi": "ipv4",
# "neighbor": [
# {
# "activate": true,
# "peer": "1.1.1.1"
# }
# ],
# "network": [
# {
# "address": "1.1.1.0/24"
# },
# {
# "address": "1.5.1.0/24",
# "route_map": "MAP01"
# }
# ],
# "redistribute": [
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ]
# },
# {
# "afi": "ipv6",
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ],
# "redistribute": [
# {
# "isis_level": "level-2",
# "protocol": "isis"
# }
# ]
# },
# {
# "afi": "ipv6",
# "redistribute": [
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ],
# "vrf": "vrft"
# }
# ],
# "as_number": "10"
# },
# "changed": true,
# "commands": [
# "router bgp 10",
# "address-family ipv4",
# "no redistribute ospf3 match external",
# "no network 1.1.1.0/24",
# "no network 1.5.1.0/24 route-map MAP01",
# "neighbor peer2 default-originate always",
# "no neighbor 1.1.1.1 activate",
# "bgp additional-paths receive",
# "exit",
# "no address-family ipv6"
# ],
# using Overridden (overridding af in vrf context):
# Before State:
# veos(config-router-bgp)#show running-config | section bgp
# router bgp 10
# neighbor peer2 peer-group
# neighbor peer2 maximum-routes 12000
# neighbor 1.1.1.1 maximum-routes 12000
# !
# address-family ipv4
# bgp additional-paths receive
# neighbor peer2 default-originate always
# no neighbor 1.1.1.1 activate
# network 1.1.1.0/24
# network 1.5.1.0/24 route-map MAP01
# redistribute ospf3 match external
# !
# address-family ipv6
# bgp additional-paths receive
# neighbor peer2 default-originate always
# !
# vrf vrft
# address-family ipv6
# route-target export 33:11
# redistribute isis level-2
# redistribute ospf3 match external
# veos(config-router-bgp)#
- name: Overridden
arista.eos.eos_bgp_address_family:
config:
as_number: "10"
address_family:
- afi: "ipv4"
bgp_params:
additional_paths: "receive"
neighbor:
- peer: "peer2"
default_originate:
always: True
vrf: vrft
state: overridden
# After State:
# veos(config-router-bgp)#show running-config | section bgp
# router bgp 10
# neighbor peer2 peer-group
# neighbor peer2 maximum-routes 12000
# neighbor 1.1.1.1 maximum-routes 12000
# !
# address-family ipv4
# bgp additional-paths receive
# neighbor peer2 default-originate always
# network 1.1.1.0/24
# network 1.5.1.0/24 route-map MAP01
# redistribute ospf3 match external
# !
# address-family ipv6
# bgp additional-paths receive
# neighbor peer2 default-originate always
# !
# vrf vrft
# address-family ipv4
# bgp additional-paths receive
# veos(config-router-bgp)#
#
# Module Execution:
#
# "after": {
# "address_family": [
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ],
# "network": [
# {
# "address": "1.1.1.0/24"
# },
# {
# "address": "1.5.1.0/24",
# "route_map": "MAP01"
# }
# ],
# "redistribute": [
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ]
# },
# {
# "afi": "ipv6",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ]
# },
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "vrf": "vrft"
# }
# ],
# "as_number": "10"
# },
# "before": {
# "address_family": [
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ],
# "network": [
# {
# "address": "1.1.1.0/24"
# },
# {
# "address": "1.5.1.0/24",
# "route_map": "MAP01"
# }
# ],
# "redistribute": [
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ]
# },
# {
# "afi": "ipv6",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ]
# },
# {
# "afi": "ipv6",
# "redistribute": [
# {
# "isis_level": "level-2",
# "protocol": "isis"
# },
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ],
# "route_target": {
# "mode": "export",
# "target": "33:11"
# },
# "vrf": "vrft"
# }
# ],
# "as_number": "10"
# },
# "changed": true,
# "commands": [
# "router bgp 10",
# "vrf vrft",
# "address-family ipv4",
# "neighbor peer2 default-originate always",
# "bgp additional-paths receive",
# "exit",
# "exit",
# " vrf vrft",
# "no address-family ipv6"
# ],
# Using Deleted:
# veos(config-router-bgp)#show running-config | section bgp
# router bgp 10
# neighbor peer2 peer-group
# neighbor peer2 maximum-routes 12000
# neighbor 1.1.1.1 maximum-routes 12000
# !
# address-family ipv4
# bgp additional-paths receive
# neighbor peer2 default-originate always
# no neighbor 1.1.1.1 activate
# network 1.1.1.0/24
# network 1.5.1.0/24 route-map MAP01
# redistribute ospf3 match external
# !
# address-family ipv6
# bgp additional-paths receive
# neighbor peer2 default-originate always
# !
# vrf vrft
# address-family ipv4
# bgp additional-paths receive
# veos(config-router-bgp)#
- name: Delete
arista.eos.eos_bgp_address_family:
config:
as_number: "10"
address_family:
- afi: "ipv6"
vrf: "vrft"
- afi: "ipv6"
state: deleted
# After State:
# veos(config-router-bgp)#show running-config | section bgp
# router bgp 10
# neighbor peer2 peer-group
# neighbor peer2 maximum-routes 12000
# neighbor 1.1.1.1 maximum-routes 12000
# !
# address-family ipv4
# bgp additional-paths receive
# neighbor peer2 default-originate always
# no neighbor 1.1.1.1 activate
# network 1.1.1.0/24
# network 1.5.1.0/24 route-map MAP01
# redistribute ospf3 match external
# !
# vrf vrft
# address-family ipv4
# bgp additional-paths receive
# veos(config-router-bgp)#
#
# Module Execution:
#
# "after": {
# "address_family": [
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ],
# "network": [
# {
# "address": "1.1.1.0/24"
# },
# {
# "address": "1.5.1.0/24",
# "route_map": "MAP01"
# }
# ],
# "redistribute": [
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ]
# },
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "vrf": "vrft"
# }
# ],
# "as_number": "10"
# },
# "before": {
# "address_family": [
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ],
# "network": [
# {
# "address": "1.1.1.0/24"
# },
# {
# "address": "1.5.1.0/24",
# "route_map": "MAP01"
# }
# ],
# "redistribute": [
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ]
# },
# {
# "afi": "ipv6",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ]
# },
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "vrf": "vrft"
# }
# ],
# "as_number": "10"
# },
# Using parsed:
# parsed_bgp_address_family.cfg :
# router bgp 10
# neighbor n2 peer-group
# neighbor n2 next-hop-unchanged
# neighbor n2 maximum-routes 12000
# neighbor peer2 peer-group
# neighbor peer2 maximum-routes 12000
# network 1.1.1.0/24
# network 1.5.1.0/24 route-map MAP01
# !
# address-family ipv4
# bgp additional-paths receive
# neighbor peer2 default-originate always
# redistribute ospf3 match external
# !
# address-family ipv6
# no bgp additional-paths receive
# neighbor n2 next-hop-unchanged
# redistribute isis level-2
# !
# vrf bgp_10
# ip access-group acl01
# ucmp fec threshold trigger 33 clear 22 warning-only
# !
# address-family ipv4
# route-target import 20:11
# !
# vrf vrft
# address-family ipv4
# bgp additional-paths receive
# !
# address-family ipv6
# redistribute ospf3 match external
- name: parse configs
arista.eos.eos_bgp_address_family:
running_config: "{{ lookup('file', './parsed_bgp_address_family.cfg') }}"
state: parsed
# Module Execution:
# "parsed": {
# "address_family": [
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ],
# "redistribute": [
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ]
# },
# {
# "afi": "ipv6",
# "neighbor": [
# {
# "next_hop_unchanged": true,
# "peer": "n2"
# }
# ],
# "redistribute": [
# {
# "isis_level": "level-2",
# "protocol": "isis"
# }
# ]
# },
# {
# "afi": "ipv4",
# "route_target": {
# "mode": "import",
# "target": "20:11"
# },
# "vrf": "bgp_10"
# },
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "vrf": "vrft"
# },
# {
# "afi": "ipv6",
# "redistribute": [
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ],
# "vrf": "vrft"
# }
# ],
# "as_number": "10"
# }
# }
# Using gathered:
# Device config:
# veos(config-router-bgp)#show running-config | section bgp
# router bgp 10
# neighbor peer2 peer-group
# neighbor peer2 maximum-routes 12000
# neighbor 1.1.1.1 maximum-routes 12000
# !
# address-family ipv4
# bgp additional-paths receive
# neighbor peer2 default-originate always
# no neighbor 1.1.1.1 activate
# network 1.1.1.0/24
# network 1.5.1.0/24 route-map MAP01
# redistribute ospf3 match external
# !
# vrf vrft
# address-family ipv4
# bgp additional-paths receive
# veos(config-router-bgp)#
- name: gather configs
arista.eos.eos_bgp_address_family:
state: gathered
# Module Execution:
# "gathered": {
# "address_family": [
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "neighbor": [
# {
# "default_originate": {
# "always": true
# },
# "peer": "peer2"
# }
# ],
# "network": [
# {
# "address": "1.1.1.0/24"
# },
# {
# "address": "1.5.1.0/24",
# "route_map": "MAP01"
# }
# ],
# "redistribute": [
# {
# "ospf_route": "external",
# "protocol": "ospf3"
# }
# ]
# },
# {
# "afi": "ipv4",
# "bgp_params": {
# "additional_paths": "receive"
# },
# "vrf": "vrft"
# }
# ],
# "as_number": "10"
# },
# using rendered:
- name: | |
options of an |AnalogOut| channel node.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
List[DwfAnalogOutFunction]: The available node waveform shape functions.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_func_bitset = typespec_ctypes.c_unsigned_int()
result = self.lib.FDwfAnalogOutNodeFunctionInfo(
self.hdwf,
channel_index,
node.value,
c_func_bitset)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
func_bitset = c_func_bitset.value
func_list = [func for func in DwfAnalogOutFunction if func_bitset & (1 << func.value)]
return func_list
def nodeFunctionSet(self, channel_index: int, node: DwfAnalogOutNode, func: DwfAnalogOutFunction) -> None:
"""Set the waveform shape function for an |AnalogOut| channel node.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
func (DwfAnalogOutFunction): The waveform shape function to be configured.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
result = self.lib.FDwfAnalogOutNodeFunctionSet(self.hdwf, channel_index, node.value, func.value)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
def nodeFunctionGet(self, channel_index: int, node: DwfAnalogOutNode) -> DwfAnalogOutFunction:
"""Get the waveform shape function for an |AnalogOut| channel node.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
DwfAnalogOutNode: The currently configured waveform shape function.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_func = typespec_ctypes.DwfAnalogOutFunction()
result = self.lib.FDwfAnalogOutNodeFunctionGet(self.hdwf, channel_index, node.value, c_func)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
func = DwfAnalogOutFunction(c_func.value)
return func
def nodeFrequencyInfo(self, channel_index: int, node: DwfAnalogOutNode) -> Tuple[float, float]:
"""Get the channel node valid frequency range for an |AnalogOut| channel node, in Hz.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
Tuple[float, float]: The range of valid frequencies, in Hz.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_frequency_min = typespec_ctypes.c_double()
c_frequency_max = typespec_ctypes.c_double()
result = self.lib.FDwfAnalogOutNodeFrequencyInfo(
self.hdwf,
channel_index,
node.value,
c_frequency_min,
c_frequency_max)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
frequency_min = c_frequency_min.value
frequency_max = c_frequency_max.value
return (frequency_min, frequency_max)
def nodeFrequencySet(self, channel_index: int, node: DwfAnalogOutNode, frequency: float) -> None:
"""Set the channel node frequency for an |AnalogOut| channel node, in Hz.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
frequency (float): The frequency, in Hz.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
result = self.lib.FDwfAnalogOutNodeFrequencySet(
self.hdwf,
channel_index,
node.value,
frequency)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
def nodeFrequencyGet(self, channel_index: int, node: DwfAnalogOutNode) -> float:
"""Get the frequency for an |AnalogOut| channel node, in Hz.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
float: The currently configured frequency, in Hz.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_frequency = typespec_ctypes.c_double()
result = self.lib.FDwfAnalogOutNodeFrequencyGet(
self.hdwf,
channel_index,
node.value,
c_frequency)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
frequency = c_frequency.value
return frequency
def nodeAmplitudeInfo(self, channel_index: int, node: DwfAnalogOutNode) -> Tuple[float, float]:
"""Get the amplitude range for an |AnalogOut| channel node, in Volts.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
Tuple[float, float]: The range of allowed amplitude values, in Volts.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_amplitude_min = typespec_ctypes.c_double()
c_amplitude_max = typespec_ctypes.c_double()
result = self.lib.FDwfAnalogOutNodeAmplitudeInfo(
self.hdwf,
channel_index,
node.value,
c_amplitude_min,
c_amplitude_max)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
amplitude_min = c_amplitude_min.value
amplitude_max = c_amplitude_max.value
return (amplitude_min, amplitude_max)
def nodeAmplitudeSet(self, channel_index: int, node: DwfAnalogOutNode, amplitude: float) -> None:
"""Set the amplitude for an |AnalogOut| channel node, in Volts.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
amplitude (float): The amplitude to be configured, in Volts.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
result = self.lib.FDwfAnalogOutNodeAmplitudeSet(
self.hdwf,
channel_index,
node.value,
amplitude)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
def nodeAmplitudeGet(self, channel_index: int, node: DwfAnalogOutNode) -> float:
"""Get the amplitude for an |AnalogOut| channel node, in Volts.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
float: The currently configured amplitude, in Volts.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_amplitude = typespec_ctypes.c_double()
result = self.lib.FDwfAnalogOutNodeAmplitudeGet(
self.hdwf,
channel_index,
node.value,
c_amplitude)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
amplitude = c_amplitude.value
return amplitude
def nodeOffsetInfo(self, channel_index: int, node: DwfAnalogOutNode) -> Tuple[float, float]:
"""Get the valid offset range for an |AnalogOut| channel node, in Volts.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
Tuple[float, float]: The range of valid node offsets, in Volts.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_offset_min = typespec_ctypes.c_double()
c_offset_max = typespec_ctypes.c_double()
result = self.lib.FDwfAnalogOutNodeOffsetInfo(self.hdwf, channel_index, node.value, c_offset_min, c_offset_max)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
offset_min = c_offset_min.value
offset_max = c_offset_max.value
return (offset_min, offset_max)
def nodeOffsetSet(self, channel_index: int, node: DwfAnalogOutNode, offset: float) -> None:
"""Set the offset for an |AnalogOut| channel node, in Volts.
Note:
Configuring the offset of the *Carrier* node takes a noticeable amount of time (100s of milliseconds).
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
offset (float): The channel offset to be configured, in Volts.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
result = self.lib.FDwfAnalogOutNodeOffsetSet(self.hdwf, channel_index, node.value, offset)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
def nodeOffsetGet(self, channel_index: int, node: DwfAnalogOutNode) -> float:
"""Get the offset for an |AnalogOut| channel node, in Volts.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
float: The currently configured node offset.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_offset = typespec_ctypes.c_double()
result = self.lib.FDwfAnalogOutNodeOffsetGet(self.hdwf, channel_index, node.value, c_offset)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
offset = c_offset.value
return offset
def nodeSymmetryInfo(self, channel_index: int, node: DwfAnalogOutNode) -> Tuple[float, float]:
"""Get the *symmetry* range for an |AnalogOut| channel node.
The *symmetry* value alters the waveform shape function of the node.
The *symmetry* value ranges from 0 to 100 for most waveform shape functions, except for the
:py:attr:`~pydwf.core.auxiliary.enum_types.DwfAnalogOutFunction.SinePower` waveform shape function,
where it ranges from -100 to +100.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
Tuple[float, float]: The range of valid symmetry settings.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_symmetry_min = typespec_ctypes.c_double()
c_symmetry_max = typespec_ctypes.c_double()
result = self.lib.FDwfAnalogOutNodeSymmetryInfo(
self.hdwf,
channel_index,
node.value,
c_symmetry_min,
c_symmetry_max)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
symmetry_min = c_symmetry_min.value
symmetry_max = c_symmetry_max.value
return (symmetry_min, symmetry_max)
def nodeSymmetrySet(self, channel_index: int, node: DwfAnalogOutNode, symmetry: float) -> None:
"""Set the *symmetry* value for an |AnalogOut| channel node.
The *symmetry* value alters the waveform shape function of the node.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
symmetry (float): The symmetry setting.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
result = self.lib.FDwfAnalogOutNodeSymmetrySet(
self.hdwf,
channel_index,
node.value,
symmetry)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
def nodeSymmetryGet(self, channel_index: int, node: DwfAnalogOutNode) -> float:
"""Get the *symmetry* value for an |AnalogOut| channel node.
The *symmetry* value alters the waveform shape function of the node.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
float: The currently configured channel node symmetry value.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_symmetry = typespec_ctypes.c_double()
result = self.lib.FDwfAnalogOutNodeSymmetryGet(
self.hdwf,
channel_index,
node.value,
c_symmetry)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
symmetry = c_symmetry.value
return symmetry
def nodePhaseInfo(self, channel_index: int, node: DwfAnalogOutNode) -> Tuple[float, float]:
"""Get the valid phase range for an |AnalogOut| channel node, in degrees.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
Returns:
Tuple[float, float]: The range of valid channel node phase values, in degrees.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
c_phase_min = typespec_ctypes.c_double()
c_phase_max = typespec_ctypes.c_double()
result = self.lib.FDwfAnalogOutNodePhaseInfo(
self.hdwf,
channel_index,
node.value,
c_phase_min,
c_phase_max)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
phase_min = c_phase_min.value
phase_max = c_phase_max.value
return (phase_min, phase_max)
def nodePhaseSet(self, channel_index: int, node: DwfAnalogOutNode, phase: float) -> None:
"""Set the phase for an |AnalogOut| channel node, in degrees.
Parameters:
channel_index (int): The |AnalogOut| channel.
node (DwfAnalogOutNode): The channel node.
phase (float): The phase setting, in degrees.
Raises:
DwfLibraryError: An error occurred while executing the operation.
"""
result = self.lib.FDwfAnalogOutNodePhaseSet(
self.hdwf,
channel_index,
node.value,
phase)
if result != RESULT_SUCCESS:
raise self.dwf.exception()
def nodePhaseGet(self, channel_index: int, node: DwfAnalogOutNode) -> float:
"""Get the phase for an |AnalogOut| channel node, in degrees.
Parameters:
channel_index (int): The | |
(p45 - 1)*(apd + (np.exp(-p44*apd) - 1)/p44)
if flags['sw'][8]:
t[8]=apdf*(p[32] + p[45]*plg[3] + p[34]*plg[10] + \
(p[100]*plg[1] + p[101]*plg[6] + p[102]*plg[15])*cd14*flags['swc'][4] +
(p[121]*plg[2] + p[122]*plg[7] + p[123]*plg[16])*flags['swc'][6]*np.cos(hr*(tloc - p[124])))
if flags['sw'][9] and inputp['g_lon'] > -1000:
# longitudinal
if flags['sw'][10]:
t[10] = (1 + p[80]*dfa*flags['swc'][0])*((p[64]*plg[4] + p[65]*plg[11] + p[66]*plg[22]\
+ p[103]*plg[2] + p[104]*plg[7] + p[105]*plg[16]\
+ flags['swc'][4]*(p[109]*plg[2] + p[110]*plg[7] + p[111]*plg[16])*cd14)*np.cos(np.deg2rad(inputp['g_lon'])) \
+(p[90]*plg[4]+p[91]*plg[11]+p[92]*plg[22] + p[106]*plg[2]+p[107]*plg[7]+p[108]*plg[16]\
+ flags['swc'][4]*(p[112]*plg[2] + p[113]*plg[7] + p[114]*plg[16])*cd14)*np.sin(np.deg2rad(inputp['g_lon'])))
# ut and mixed ut, longitude
if flags['sw'][11]:
t[11]=(1 + p[95]*plg[1])*(1 + p[81]*dfa*flags['swc'][0])*\
(1 + p[119]*plg[1]*flags['swc'][4]*cd14)*\
((p[68]*plg[1] + p[69]*plg[6] + p[70]*plg[15])*np.cos(sr*(inputp['sec'] - p[71])))
t[11] += flags['swc'][10]*(p[76]*plg[8] + p[77]*plg[17] + p[78]*plg[30])*\
np.cos(sr*(inputp['sec'] - p[79]) + 2*np.deg2rad(inputp['g_lon']))*(1 + p[137]*dfa*flags['swc'][0])
# ut, longitude magnetic activity
if flags['sw'][10]:
if flags['sw'][8] == -1:
if p[51]:
t[12] = apt[0]*flags['swc'][10]*(1 + p[132]*plg[1])*\
((p[52]*plg[4] + p[98]*plg[11] + p[67]*plg[22])* np.cos(np.deg2rad(inputp['g_lon'] - p[97])))\
+ apt[0]*flags['swc'][10]*flags['swc'][4]*(p[133]*plg[2] + p[134]*plg[7] + p[135]*plg[16])*\
cd14*np.cos(np.deg2rad(inputp['g_lon'] - p[136])) + apt[0]*flags['swc'][11]* \
(p[55]*plg[1] + p[56]*plg[6] + p[57]*plg[15])*np.cos(sr*(inputp['sec'] - p[58]))
else:
t[12] = apdf*flags['swc'][10]*(1 + p[120]*plg[1])*((p[60]*plg[4] + p[61]*plg[11] + p[62]*plg[22])*\
np.cos(np.deg2rad(inputp['g_lon']-p[63])))+apdf*flags['swc'][10]*flags['swc'][4]* \
(p[115]*plg[2] + p[116]*plg[7] + p[117]*plg[16])* \
cd14*np.cos(np.deg2rad(inputp['g_lon'] - p[118])) \
+ apdf*flags['swc'][11]*(p[83]*plg[1] + p[84]*plg[6] + p[85]*plg[15])* np.cos(sr*(inputp['sec'] - p[75]))
# parms not used: 82, 89, 99, 139-149
tinf = p[30]
for i in range(14):
tinf = tinf + np.abs(flags['sw'][i])*t[i]
return tinf,[dfa,plg,ctloc,stloc,c2tloc,s2tloc,s3tloc,c3tloc,apdf,apt]
def glob7s(p,inputp,flags,varli):
pset = 2
t = np.zeros(14)
dr = 1.72142E-2
[dfa,plg,ctloc,stloc,c2tloc,s2tloc,s3tloc,c3tloc,apdf,apt] = varli
# confirm parameter set
if p[99] == 0: p[99] = pset
if p[99] != pset:
print("Wrong parameter set for glob7s")
return -1
for j in range(14):
t[j] = 0
cd32 = np.cos(dr*(inputp['doy'] - p[31]))
cd18 = np.cos(2*dr*(inputp['doy'] - p[17]))
cd14 = np.cos(dr*(inputp['doy'] - p[13]))
cd39 = np.cos(2*dr*(inputp['doy'] - p[38]))
# F10.7
t[0] = p[21]*dfa
# time independent
t[1] = p[1]*plg[3] + p[2]*plg[10] + p[22]*plg[21] + p[26]*plg[1] + p[14]*plg[6] + p[59]*plg[15]
# symmetrical annual
t[2] = (p[18] + p[47]*plg[3] + p[29]*plg[10])*cd32
# symmetrical semiannual
t[3] = (p[15] + p[16]*plg[3] + p[30]*plg[10])*cd18
# asymmetrical annual
t[4] = (p[9]*plg[1] + p[10]*plg[6] + p[20]*plg[15])*cd14
# asymmetrical semiannual
t[5] = p[37]*plg[1]*cd39;
# diurnal
if flags['sw'][6]:
t71 = p[11]*plg[4]*cd14*flags['swc'][4]
t72 = p[12]*plg[4]*cd14*flags['swc'][4]
t[6] = ((p[3]*plg[2] + p[4]*plg[7] + t71)*ctloc + (p[6]*plg[2] + p[7]*plg[7] + t72)*stloc)
# semidiurnal
if flags['sw'][7]:
t81 = (p[23]*plg[8] + p[35]*plg[17])*cd14*flags['swc'][4]
t82 = (p[33]*plg[8] + p[36]*plg[17])*cd14*flags['swc'][4]
t[7] = ((p[5]*plg[5] + p[41]*plg[12] + t81)*c2tloc + (p[8]*plg[5] + p[42]*plg[12] + t82)*s2tloc)
# terdiurnal
if flags['sw'][13]:
t[13] = p[39]*plg[9]*s3tloc + p[40]*plg[9]*c3tloc
# magnetic activity
if flags['sw'][8]:
if flags['sw'][8]==1:
t[8] = apdf * (p[32] + p[45]*plg[3]*flags['swc'][1])
if flags['sw'][8]==-1:
t[8]=(p[50]*apt[0] + p[96]*plg[3]*apt[0]*flags['swc'][1])
# longitudinal
if not (flags['sw'][9]==0 or flags['sw'][10]==0 or inputp['g_lon']<=-1000):
t[10] = (1 + plg[1]*(p[80]*flags['swc'][4]*np.cos(dr*(inputp['doy'] - p[81]))\
+ p[85]*flags['swc'][5]*np.cos(2*dr*(inputp['doy'] - p[86])))\
+ p[83]*flags['swc'][2]*np.cos(dr*(inputp['doy'] - p[84]))\
+ p[87]*flags['swc'][3]*np.cos(2*dr*(inputp['doy'] - p[88])))\
*((p[64]*plg[4] + p[65]*plg[11] + p[66]*plg[22]\
+ p[74]*plg[2] + p[75]*plg[7] + p[76]*plg[16])*np.cos(np.deg2rad(inputp['g_lon']))\
+ (p[90]*plg[4] + p[91]*plg[11] + p[92]*plg[22]\
+ p[77]*plg[2] + p[78]*plg[7] + p[79]*plg[16])*np.sin(np.deg2rad(inputp['g_lon'])))
tt = 0
for i in range(14):
tt += np.abs(flags['sw'][i])*t[i]
return tt
def gtd7(inputp,switches):
tz = 0
zn3 = np.array([32.5,20.0,15.0,10.0,0.0])
zn2 = np.array([72.5,55.0,45.0,32.5])
zmix= 62.5
output = {'d':{'He':0,'O':0,'N2':0,'O2':0,'AR':0,'RHO':0,'H':0,'N':0,'ANM O':0},\
't':{'TINF':0,'TG':0}}
flags = tselec(switches)
# Latitude variation of gravity (none for sw[1]=0)
xlat = inputp['g_lat']
if flags['sw'][1]==0: xlat = 45
gsurf,re = glatf(xlat)
pt,pd,ps,pdl,ptm,pdm,ptl,pma,sam,pavgm = nrlmsis00_data()
xmm = pdm[2,4]
# thermosphere/mesosphere (above zn2[0])
if inputp['alt'] > zn2[0]:
altt = inputp['alt']
else:
altt = zn2[0]
tmp = inputp['alt']
inputp['alt'] = altt
soutput,dm28,[meso_tn1,meso_tn2,meso_tn3,meso_tgn1,meso_tgn2,meso_tgn3],[dfa,plg,ctloc,stloc,c2tloc,s2tloc,s3tloc,c3tloc,apdf,apt] = gts7(inputp,flags,gsurf,re)
altt = inputp['alt']
inputp['alt'] = tmp
# metric adjustment
dm28m = dm28*1E6
output['t']['TINF'] = soutput['t']['TINF']
output['t']['TG'] = soutput['t']['TG']
if inputp['alt'] >= zn2[0]:
output['d'] = soutput['d']
return output
varli = [dfa,plg,ctloc,stloc,c2tloc,s2tloc,s3tloc,c3tloc,apdf,apt]
meso_tgn2[0] = meso_tgn1[1]
meso_tn2[0] = meso_tn1[4]
meso_tn2[1] = pma[0,0]*pavgm[0]/(1-flags['sw'][19]*glob7s(pma[0], inputp, flags,varli))
meso_tn2[2] = pma[1,0]*pavgm[1]/(1-flags['sw'][19]*glob7s(pma[1], inputp, flags,varli))
meso_tn2[3] = pma[2,0]*pavgm[2]/(1-flags['sw'][19]*flags['sw'][21]*glob7s(pma[2], inputp, flags,varli))
meso_tgn2[1] = pavgm[8]*pma[9,0]*(1+flags['sw'][19]*flags['sw'][21]*glob7s(pma[9], inputp, flags,varli))*meso_tn2[3]*meso_tn2[3]/(pma[2,0]*pavgm[2])**2
meso_tn3[0] = meso_tn2[3]
if inputp['alt'] <= zn3[0]:
meso_tgn3[0] = meso_tgn2[1]
meso_tn3[1] = pma[3,0]*pavgm[3]/(1-flags['sw'][21]*glob7s(pma[3], inputp, flags,varli))
meso_tn3[2] = pma[4,0]*pavgm[4]/(1-flags['sw'][21]*glob7s(pma[4], inputp, flags,varli))
meso_tn3[3] = pma[5,0]*pavgm[5]/(1-flags['sw'][21]*glob7s(pma[5], inputp, flags,varli))
meso_tn3[4] = pma[6,0]*pavgm[6]/(1-flags['sw'][21]*glob7s(pma[6], inputp, flags,varli))
meso_tgn3[1] = pma[7,0]*pavgm[7]*(1+flags['sw'][21]*glob7s(pma[7], inputp, flags,varli)) *meso_tn3[4]*meso_tn3[4]/(pma[6,0]*pavgm[6])**2
# linear transition to full mixing below znz[0]
dmc = 0
if inputp['alt'] > zmix:
dmc = 1 - (zn2[0]-inputp['alt'])/(zn2[0] - zmix)
dz28 = soutput['d']['N2']
# N2 density
dmr = soutput['d']['N2'] / dm28m - 1
output['d']['N2'],tz = densm(inputp['alt'],dm28m,xmm, tz, zn3, meso_tn3, meso_tgn3, zn2, meso_tn2, meso_tgn2,gsurf,re)
output['d']['N2'] = output['d']['N2'] * (1 + dmr*dmc)
# HE density
dmr = soutput['d']['He'] / (dz28 * pdm[0,1]) - 1
output['d']['He'] = output['d']['N2'] * pdm[0,1] * (1 + dmr*dmc)
# O density
output['d']['O'] = 0
output['d']['ANM O'] = 0
# O2 density
dmr = soutput['d']['O2'] / (dz28 * pdm[3,1]) - 1
output['d']['O2'] = output['d']['N2'] * pdm[3,1] * (1 + dmr*dmc)
# AR density
dmr = soutput['d']['AR'] / (dz28 * pdm[4,1]) - 1
output['d']['AR'] = output['d']['N2'] * pdm[4,1] * (1 + dmr*dmc)
# Hydrogen density
output['d']['H'] = 0
# Atomic nitrogen density
output['d']['N'] = 0
# Total mass density
output['d']['RHO'] = 1.66E-24 * (4 * output['d']['He'] + 16 * output['d']['O'] + 28 * output['d']['N2']\
+ 32 * output['d']['O2'] + 40 * output['d']['AR'] + output['d']['H'] + 14 * output['d']['N'])
output['d']['RHO'] = output['d']['RHO']/1000
# temperature at altitude
dd,tz = densm(inputp['alt'], 1, 0, tz, zn3, meso_tn3, meso_tgn3, zn2, meso_tn2, meso_tgn2,gsurf,re)
output['t']['TG'] = tz
return output
def gtd7d(inputp, flags):
output = gtd7(inputp, flags)
output['d']['RHO'] = 1.66E-24 * (4 * output['d']['He'] + 16 * output['d']['O'] + 28 * output['d']['N2']\
+ 32 * output['d']['O2'] + 40 * output['d']['AR'] + output['d']['H'] + 14 * output['d']['N'] + 16 * output['d']['ANM O'])
output['d']['RHO'] = output['d']['RHO']/1e3
return output
def gts7(inputp,flags,gsurf,re):
output = {'d':{'He':0,'O':0,'N2':0,'O2':0,'AR':0,'RHO':0,'H':0,'N':0,'ANM O':0},\
't':{'TINF':0,'TG':0}}
tz = 0
dm28 = 0
meso_tn1,meso_tn3 = [np.zeros(5) for i in range(2)]
meso_tn2 = np.zeros(4)
meso_tgn1,meso_tgn2,meso_tgn3 = [np.zeros(2) for i in range(3)]
zn1 = np.array([120.0, 110.0, 100.0, 90.0, 72.5])
dr = 1.72142E-2
alpha = np.array([-0.38, 0.0, 0.0, 0.0, 0.17, 0.0, -0.38, 0.0, 0.0])
altl = np.array([200.0, 300.0, 160.0, 250.0, 240.0, 450.0, 320.0, 450.0])
pt,pd,ps,pdl,ptm,pdm,ptl,pma,sam,pavgm = nrlmsis00_data()
za = pdl[1,15]
zn1[0] = za
# tinf variations not important below za or zn1[0]
if inputp['alt'] > zn1[0]:
tinf_tmp,varli = globe7(pt,inputp,flags)
tinf = ptm[0]*pt[0] * (1+flags['sw'][15]*tinf_tmp)
else:
tinf = ptm[0]*pt[0]
output['t']['TINF'] = tinf
# gradient variations not important below zn1[4]
if inputp['alt'] > zn1[4]:
tinf_tmp,varli = globe7(ps,inputp,flags)
grad = ptm[3]*ps[0] * (1+flags['sw'][18]*tinf_tmp)
else:
grad = ptm[3]*ps[0]
tinf_tmp,varli = globe7(pd[3],inputp,flags)
tlb = ptm[1] * (1 + flags['sw'][16]*tinf_tmp)*pd[3,0]
s = grad/(tinf - tlb)
# Lower thermosphere temp variations not significant for density above 300 km
if inputp['alt'] < 300:
meso_tn1[1] = ptm[6]*ptl[0,0]/(1.0-flags['sw'][17]*glob7s(ptl[0], inputp, flags,varli))
meso_tn1[2] = ptm[2]*ptl[1,0]/(1.0-flags['sw'][17]*glob7s(ptl[1], inputp, flags,varli))
meso_tn1[3] = ptm[7]*ptl[2,0]/(1.0-flags['sw'][17]*glob7s(ptl[2], inputp, flags,varli))
meso_tn1[4] = ptm[4]*ptl[3,0]/(1.0-flags['sw'][17]*flags['sw'][19]*glob7s(ptl[3], inputp, flags,varli))
meso_tgn1[1] = ptm[8]*pma[8,0]*(1.0+flags['sw'][17]*flags['sw'][19]*glob7s(pma[8], inputp, flags,varli))*meso_tn1[4]*meso_tn1[4]/(ptm[4]*ptl[3,0])**2
else:
meso_tn1[1]=ptm[6]*ptl[0,0]
meso_tn1[2]=ptm[2]*ptl[1,0]
meso_tn1[3]=ptm[7]*ptl[2,0]
meso_tn1[4]=ptm[4]*ptl[3,0]
meso_tgn1[1]=ptm[8]*pma[8,0]*meso_tn1[4]*meso_tn1[4]/(ptm[4]*ptl[3,0])**2
# N2 variation factor at Zlb
tinf_tmp,varli = globe7(pd[2],inputp,flags)
g28 = flags['sw'][20]*tinf_tmp
# variation of turbopause height
zhf = pdl[1,24]*(1+flags['sw'][4]*pdl[0,24]*np.sin(np.deg2rad(inputp['g_lat']))*np.cos(dr*(inputp['doy']-pt[13])))
output['t']['TINF'] = tinf
xmm = pdm[2,4]
z = inputp['alt']
# N2 density
# Diffusive density at Zlb
db28 = pdm[2,0]*np.exp(g28)*pd[2,0]
# Diffusive density at Alt
output['d']['N2'],output['t']['TG'] = densu(z,db28,tinf,tlb,28,alpha[2],output['t']['TG'],ptm[5],s,zn1,meso_tn1,meso_tgn1,gsurf,re)
dd = output['d']['N2']
# Turbopause
zh28 = pdm[2,2]*zhf
zhm28 = pdm[2,3]*pdl[1,5]
xmd = 28 - xmm
# Mixed density at Zlb
b28,tz = densu(zh28,db28,tinf,tlb,xmd,(alpha[2]-1),tz,ptm[5],s, zn1,meso_tn1,meso_tgn1,gsurf,re)
if flags['sw'][14] and z <= altl[2]:
# Mixed density at Alt
dm28,tz = densu(z,b28,tinf,tlb,xmm,alpha[2],tz,ptm[5],s,zn1,meso_tn1,meso_tgn1,gsurf,re)
# Net density at Alt
output['d']['N2'] = dnet(output['d']['N2'],dm28,zhm28,xmm,28)
# HE density
# Density variation factor at Zlb
tinf_tmp,varli = globe7(pd[0],inputp,flags)
g4 = flags['sw'][20]*tinf_tmp
# Diffusive density at Zlb
db04 = pdm[0,0]*np.exp(g4)*pd[0,0]
# Diffusive density at Alt
output['d']['He'],output['t']['TG'] = densu(z,db04,tinf,tlb, 4,alpha[0],output['t']['TG'],ptm[5],s,zn1,meso_tn1,meso_tgn1,gsurf,re)
dd = output['d']['He']
if flags['sw'][14] and z<altl[0]:
# Turbopause
zh04 = pdm[0,2]
# Mixed density at Zlb
b04,output['t']['TG'] = densu(zh04,db04,tinf,tlb,4-xmm,alpha[0]-1,output['t']['TG'],ptm[5],s,zn1,meso_tn1,meso_tgn1,gsurf,re)
# Mixed density at Alt
dm04,output['t']['TG'] = densu(z,b04,tinf,tlb,xmm,0,output['t']['TG'],ptm[5],s,zn1,meso_tn1,meso_tgn1,gsurf,re)
zhm04 = zhm28
# Net density at Alt
output['d']['He'] = dnet(output['d']['He'],dm04,zhm04,xmm,4)
# Correction to specified mixing ratio at ground
rl = np.log(b28*pdm[0,1]/b04)
zc04 = pdm[0,4]*pdl[1,0]
hc04 = pdm[0,5]*pdl[1,1]
# Net density corrected at | |
load the model twice
if not self.options['gauge']=='Feynman' and 'QED' in coupling_type:
logger.info('Switch to Feynman gauge because '+\
'model loop_qcd_qed_sm is restricted only to Feynman gauge.')
self._curr_model = None
mg_interface.MadGraphCmd.do_set(self,'gauge Feynman')
if coupling_type == ['QCD',]:
add_on = ''
elif coupling_type in [['QED'],['QCD','QED']]:
add_on = 'qcd_qed_'
else:
raise MadGraph5Error(
"The pertubation coupling cannot be '%s'"\
%str(coupling_type)+" in SM loop processes")
logger.info("MG5_aMC now loads 'loop_%s%s'."%(add_on,model_name))
#import model with correct treatment of the history
self.history.move_to_last('generate')
last_command = self.history[-1]
self.exec_cmd(" import model loop_%s%s" % (add_on,model_name), precmd=True)
self.history.append(last_command)
elif stop:
raise self.InvalidCmd(
"The model %s cannot handle loop processes"%model_name)
if loop_type and not loop_type.startswith('real') and \
not self.options['gauge']=='Feynman' and \
not self._curr_model['perturbation_couplings'] in [[],['QCD']]:
if 1 in self._curr_model.get('gauge'):
logger.info("Setting gauge to Feynman in order to process all"+\
" possible loop computations available in the model.")
mg_interface.MadGraphCmd.do_set(self,'gauge Feynman')
else:
logger.warning("You will only be able to do tree level and QCD"+\
" corrections with this model because it does not support Feynman gauge.")
class LoopInterface(CheckLoop, CompleteLoop, HelpLoop, CommonLoopInterface):
supported_ML_format = ['standalone', 'standalone_rw', 'matchbox']
def __init__(self, mgme_dir = '', *completekey, **stdin):
""" Special init tasks for the Loop Interface """
mg_interface.MadGraphCmd.__init__(self, mgme_dir = '', *completekey, **stdin)
self.setup()
def setup(self):
""" Special tasks when switching to this interface """
# Refresh all the interface stored value as things like generated
# processes and amplitudes are not to be reused in between different
# interfaces
# Clear history, amplitudes and matrix elements when a model is imported
# Remove previous imports, generations and outputs from history
self.history.clean(remove_bef_last='import',
to_keep=['set','load','import', 'define'])
# Reset amplitudes and matrix elements
self._done_export=False
self._curr_amps = diagram_generation.AmplitudeList()
self._curr_matrix_elements = helas_objects.HelasMultiProcess()
self._v4_export_formats = []
self._export_formats = [ 'matrix', 'standalone' ]
self._nlo_modes_for_completion = ['virt']
self.validate_model()
# Set where to look for CutTools installation.
# In further versions, it will be set in the same manner as _mgme_dir so that
# the user can chose its own CutTools distribution.
self._cuttools_dir=str(os.path.join(self._mgme_dir,'vendor','CutTools'))
if not os.path.isdir(os.path.join(self._cuttools_dir, 'src','cts')):
logger.warning(('Warning: Directory %s is not a valid CutTools directory.'+\
'Using default CutTools instead.') % \
self._cuttools_dir)
self._cuttools_dir=str(os.path.join(self._mgme_dir,'vendor','CutTools'))
# Set where to look for IREGI installation
self._iregi_dir=str(os.path.join(self._mgme_dir,'vendor','IREGI','src'))
if not os.path.isdir(self._iregi_dir):
logger.warning(('Warning: Directory %s is not a valid IREGI directory.'+\
'Using default IREGI instead.')%\
self._iregi_dir)
self._iregi_dir=str(os.path.join(self._mgme_dir,'vendor','IREGI','src'))
def do_display(self,line, *argss, **opt):
""" Display born or loop diagrams, otherwise refer to the default display
command """
args = self.split_arg(line)
#check the validity of the arguments
self.check_display(args)
if args[0]=='diagrams':
if len(args)>=2 and args[1] in ['loop','born']:
self.draw(' '.join(args[2:]),args[1])
else:
self.draw(' '.join(args[1:]),'all')
else:
mg_interface.MadGraphCmd.do_display(self,line,*argss,**opt)
def do_output(self, line):
"""Main commands:Initialize a new Template or reinitialize one"""
args = self.split_arg(line)
# Check Argument validity
self.check_output(args)
noclean = '-noclean' in args
force = '-f' in args
nojpeg = '-nojpeg' in args
main_file_name = ""
try:
main_file_name = args[args.index('-name') + 1]
except Exception:
pass
# Whatever the format we always output the quadruple precision routines
# to allow for curing possible unstable points.
aloha_original_quad_mode = aloha.mp_precision
aloha.mp_precision = True
if self._export_format not in self.supported_ML_format:
raise self.InvalidCmd('ML5 only support "%s" as export format.' % \
''.join(self.supported_ML_format))
if not os.path.isdir(self._export_dir) and self._export_format in ['matrix']:
raise self.InvalidCmd('Specified export directory %s does not exist.'\
%str(self._export_dir))
if not force and not noclean and os.path.isdir(self._export_dir)\
and self._export_format.startswith('standalone'):
# Don't ask if user already specified force or noclean
logger.info('INFO: directory %s already exists.' % self._export_dir)
logger.info('If you continue this directory will be cleaned')
answer = self.ask('Do you want to continue?', 'y', ['y','n'])
if answer != 'y':
raise self.InvalidCmd('Stopped by user request')
else:
try:
shutil.rmtree(self._export_dir)
except OSError:
raise self.InvalidCmd('Could not remove directory %s.'\
%str(self._export_dir))
if self._export_format.startswith('standalone'):
output_type = 'madloop'
elif self._export_format == 'matchbox':
output_type = 'madloop_matchbox'
self._curr_exporter = export_v4.ExportV4Factory(self, \
noclean, output_type=output_type, group_subprocesses=False)
if self._export_format in ['standalone', 'matchbox']:
self._curr_exporter.copy_v4template(modelname=self._curr_model.get('name'))
if self._export_format == "standalone_rw":
self._export_format = "standalone"
self._curr_exporter.copy_v4template(modelname=self._curr_model.get('name'))
self._export_format = "standalone_rw"
# Reset _done_export, since we have new directory
self._done_export = False
# Perform export and finalize right away
self.ML5export(nojpeg, main_file_name)
# Automatically run finalize
self.ML5finalize(nojpeg)
# Remember that we have done export
self._done_export = (self._export_dir, self._export_format)
# Reset _export_dir, so we don't overwrite by mistake later
self._export_dir = None
# Put aloha back in its original mode.
aloha.mp_precision = aloha_original_quad_mode
# Export a matrix element
def ML5export(self, nojpeg = False, main_file_name = ""):
"""Export a generated amplitude to file"""
def generate_matrix_elements(self):
"""Helper function to generate the matrix elements before exporting"""
# Sort amplitudes according to number of diagrams,
# to get most efficient multichannel output
self._curr_amps.sort(lambda a1, a2: a2.get_number_of_diagrams() - \
a1.get_number_of_diagrams())
cpu_time1 = time.time()
ndiags = 0
if not self._curr_matrix_elements.get_matrix_elements():
self._curr_matrix_elements = \
loop_helas_objects.LoopHelasProcess(self._curr_amps,
optimized_output = self.options['loop_optimized_output'])
ndiags = sum([len(me.get('diagrams')) for \
me in self._curr_matrix_elements.\
get_matrix_elements()])
# assign a unique id number to all process
uid = 0
for me in self._curr_matrix_elements.get_matrix_elements():
uid += 1 # update the identification number
me.get('processes')[0].set('uid', uid)
cpu_time2 = time.time()
return ndiags, cpu_time2 - cpu_time1
# Start of the actual routine
ndiags, cpu_time = generate_matrix_elements(self)
calls = 0
path = self._export_dir
if self._export_format in self.supported_ML_format:
path = pjoin(path, 'SubProcesses')
cpu_time1 = time.time()
# Pick out the matrix elements in a list
matrix_elements = \
self._curr_matrix_elements.get_matrix_elements()
# Fortran MadGraph5_aMC@NLO Standalone
if self._export_format in self.supported_ML_format:
for me in matrix_elements:
calls = calls + \
self._curr_exporter.generate_subprocess_directory_v4(\
me, self._curr_fortran_model)
# If all ME's do not share the same maximum loop vertex rank and the
# same loop maximum wavefunction size, we need to set the maximum
# in coef_specs.inc of the HELAS Source and warn the user that this
# might be a problem
if self.options['loop_optimized_output'] and len(matrix_elements)>1:
max_lwfspins = [m.get_max_loop_particle_spin() for m in \
matrix_elements]
try:
max_loop_vert_ranks = [me.get_max_loop_vertex_rank() for me in \
matrix_elements]
except MadGraph5Error:
pass
else:
if len(set(max_lwfspins))>1 or len(set(max_loop_vert_ranks))>1:
self._curr_exporter.fix_coef_specs(max(max_lwfspins),\
max(max_loop_vert_ranks))
logger.warning('ML5 has just output processes which do not'+\
' share the same maximum loop wavefunction size or the '+\
' same maximum loop vertex rank. This is potentially '+\
' dangerous. Please prefer to output them separately.')
# Just the matrix.f files
if self._export_format == 'matrix':
for me in matrix_elements:
filename = pjoin(path, 'matrix_' + \
me.get('processes')[0].shell_string() + ".f")
if os.path.isfile(filename):
logger.warning("Overwriting existing file %s" % filename)
else:
logger.info("Creating new file %s" % filename)
calls = calls + self._curr_exporter.write_matrix_element_v4(\
writers.FortranWriter(filename),\
me, self._curr_fortran_model)
cpu_time2 = time.time() - cpu_time1
logger.info(("Generated helas calls for %d subprocesses " + \
"(%d diagrams) in %0.3f s") % \
(len(matrix_elements),
ndiags, cpu_time))
if calls:
if "cpu_time2" in locals():
logger.info("Wrote files for %d OPP calls in %0.3f s" % \
(calls, cpu_time2))
else:
logger.info("Wrote files for %d OPP calls" % \
(calls))
# Replace the amplitudes with the actual amplitudes from the
# matrix elements, which allows proper diagram drawing also of
# decay chain processes
self._curr_amps = diagram_generation.AmplitudeList(\
[me.get('base_amplitude') for me in \
matrix_elements])
def ML5finalize(self, nojpeg, online = False):
"""Copy necessary sources and output the ps representation of
the diagrams, if needed"""
if self._export_format in self.supported_ML_format:
logger.info('Export UFO model to MG4 format')
# wanted_lorentz are the lorentz structures which are
# actually used in the wavefunctions and amplitudes in
# these processes
wanted_lorentz = self._curr_matrix_elements.get_used_lorentz()
wanted_couplings = self._curr_matrix_elements.get_used_couplings()
# For a unique output of multiple type of exporter model information
# are save in memory
if hasattr(self, 'previous_lorentz'):
wanted_lorentz = list(set(self.previous_lorentz + wanted_lorentz))
wanted_couplings = list(set(self.previous_couplings + wanted_couplings))
del self.previous_lorentz
del self.previous_couplings
self._curr_exporter.convert_model_to_mg4(self._curr_model,
wanted_lorentz,
wanted_couplings)
compiler = {'fortran': self.options['fortran_compiler'],
'f2py': self.options['f2py_compiler']}
if self._export_format in self.supported_ML_format:
self._curr_exporter.finalize_v4_directory( \
self._curr_matrix_elements,
self.history,
not nojpeg,
online,
compiler)
if self._export_format in self.supported_ML_format:
logger.info('Output to directory ' + self._export_dir + ' done.')
def do_launch(self, line, *args,**opt):
"""Main commands: Check that the type of launch is fine before proceeding with the
mother function. """
args = self.split_arg(line)
# check argument validity and normalise argument
(options, args) = mg_interface._launch_parser.parse_args(args)
self.check_launch(args, options)
if not args[0].startswith('standalone'):
raise self.InvalidCmd('ML5 can only launch standalone runs.')
start_cwd = os.getcwd()
options = options.__dict__
# args is now MODE | |
19) not in self.prov_hols[province])
def test_naefelser_fahrt(self):
known_good = [(2018, 4, 5), (2019, 4, 4), (2020, 4, 2),
(2021, 4, 8), (2022, 4, 7), (2023, 4, 13),
(2024, 4, 4), (2025, 4, 3), (2026, 4, 9),
(2027, 4, 1), (2028, 4, 6), (2029, 4, 5),
(2030, 4, 4), (2031, 4, 3), (2032, 4, 1),
(2033, 4, 7), (2034, 4, 13), (2035, 4, 5)]
provinces_that_have = {'GL'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertTrue(date(y, m, d) not in self.prov_hols[province])
def test_karfreitag(self):
known_good = [(2018, 3, 30), (2019, 4, 19), (2020, 4, 10),
(2021, 4, 2), (2022, 4, 15), (2023, 4, 7),
(2024, 3, 29), (2025, 4, 18), (2026, 4, 3),
(2027, 3, 26), (2028, 4, 14), (2029, 3, 30),
(2030, 4, 19), (2031, 4, 11), (2032, 3, 26),
(2033, 4, 15), (2034, 4, 7), (2035, 3, 23)]
provinces_that_dont = {'VS'}
provinces_that_have = set(holidays.CH.PROVINCES) - provinces_that_dont
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertTrue(date(y, m, d) not in self.prov_hols[province])
def test_ostern(self):
known_good = [(2018, 4, 1), (2019, 4, 21), (2020, 4, 12),
(2021, 4, 4), (2022, 4, 17), (2023, 4, 9),
(2024, 3, 31), (2025, 4, 20), (2026, 4, 5),
(2027, 3, 28), (2028, 4, 16), (2029, 4, 1),
(2030, 4, 21), (2031, 4, 13), (2032, 3, 28),
(2033, 4, 17), (2034, 4, 9), (2035, 3, 25)]
for province, (y, m, d) in product(holidays.CH.PROVINCES, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
def test_ostermontag(self):
known_good = [(2018, 4, 2), (2019, 4, 22), (2020, 4, 13),
(2021, 4, 5), (2022, 4, 18), (2023, 4, 10),
(2024, 4, 1), (2025, 4, 21), (2026, 4, 6),
(2027, 3, 29), (2028, 4, 17), (2029, 4, 2),
(2030, 4, 22), (2031, 4, 14), (2032, 3, 29),
(2033, 4, 18), (2034, 4, 10), (2035, 3, 26)]
provinces_that_dont = {'VS'}
provinces_that_have = set(holidays.CH.PROVINCES) - provinces_that_dont
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertTrue(date(y, m, d) not in self.prov_hols[province])
def test_auffahrt(self):
known_good = [(2018, 5, 10), (2019, 5, 30), (2020, 5, 21),
(2021, 5, 13), (2022, 5, 26), (2023, 5, 18),
(2024, 5, 9), (2025, 5, 29), (2026, 5, 14),
(2027, 5, 6), (2028, 5, 25), (2029, 5, 10),
(2030, 5, 30), (2031, 5, 22), (2032, 5, 6),
(2033, 5, 26), (2034, 5, 18), (2035, 5, 3)]
for province, (y, m, d) in product(holidays.CH.PROVINCES, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
def test_pfingsten(self):
known_good = [(2018, 5, 20), (2019, 6, 9), (2020, 5, 31),
(2021, 5, 23), (2022, 6, 5), (2023, 5, 28),
(2024, 5, 19), (2025, 6, 8), (2026, 5, 24),
(2027, 5, 16), (2028, 6, 4), (2029, 5, 20),
(2030, 6, 9), (2031, 6, 1), (2032, 5, 16),
(2033, 6, 5), (2034, 5, 28), (2035, 5, 13)]
for province, (y, m, d) in product(holidays.CH.PROVINCES, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
def test_pfingstmontag(self):
known_good = [(2018, 5, 21), (2019, 6, 10), (2020, 6, 1),
(2021, 5, 24), (2022, 6, 6), (2023, 5, 29),
(2024, 5, 20), (2025, 6, 9), (2026, 5, 25),
(2027, 5, 17), (2028, 6, 5), (2029, 5, 21),
(2030, 6, 10), (2031, 6, 2), (2032, 5, 17),
(2033, 6, 6), (2034, 5, 29), (2035, 5, 14)]
for province, (y, m, d) in product(holidays.CH.PROVINCES, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
def test_fronleichnam(self):
known_good = [(2014, 6, 19), (2015, 6, 4), (2016, 5, 26),
(2017, 6, 15), (2018, 5, 31), (2019, 6, 20),
(2020, 6, 11), (2021, 6, 3), (2022, 6, 16),
(2023, 6, 8), (2024, 5, 30)]
provinces_that_have = {'AI', 'JU', 'LU', 'NW', 'OW', 'SZ', 'TI', 'UR',
'VS', 'ZG'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, (y, m, d) in product(provinces_that_have, known_good):
self.assertTrue(date(y, m, d) in self.prov_hols[province])
for province, (y, m, d) in product(provinces_that_dont, known_good):
self.assertTrue(date(y, m, d) not in self.prov_hols[province])
def test_fest_der_unabhaengikeit(self):
provinces_that_have = {'JU'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 6, 23) in self.prov_hols[province])
# 2011 is "Fronleichnam" on the same date, we don't test this year
for province, year in product(provinces_that_dont, range(1970, 2010)):
self.assertTrue(date(year, 6, 23) not in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(2012, 2050)):
self.assertTrue(date(year, 6, 23) not in self.prov_hols[province])
def test_peter_und_paul(self):
provinces_that_have = {'TI'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 6, 29) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 6, 29) not in self.prov_hols[province])
def test_mariae_himmelfahrt(self):
provinces_that_have = {'AI', 'JU', 'LU', 'NW', 'OW', 'SZ', 'TI', 'UR',
'VS', 'ZG'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 8, 15) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 8, 15) not in self.prov_hols[province])
def test_bruder_chlaus(self):
provinces_that_have = {'OW'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 9, 25) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 9, 25) not in self.prov_hols[province])
def test_allerheiligen(self):
provinces_that_have = {'AI', 'GL', 'JU', 'LU', 'NW', 'OW', 'SG', 'SZ',
'TI', 'UR', 'VS', 'ZG'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 11, 1) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 11, 1) not in self.prov_hols[province])
def test_escalade_de_geneve(self):
provinces_that_have = {'GE'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 12, 12) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 12, 12) not in self.prov_hols[province])
def test_stephanstag(self):
provinces_that_have = {'AG', 'AR', 'AI', 'BL', 'BS', 'BE', 'FR', 'GL',
'GR', 'LU', 'NE', 'NW', 'OW', 'SG', 'SH', 'SZ',
'SO', 'TG', 'TI', 'UR', 'ZG', 'ZH'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 12, 26) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 12, 26) not in self.prov_hols[province])
def test_wiedererstellung_der_republik(self):
provinces_that_have = {'GE'}
provinces_that_dont = set(holidays.CH.PROVINCES) - provinces_that_have
for province, year in product(provinces_that_have, range(1970, 2050)):
self.assertTrue(date(year, 12, 31) in self.prov_hols[province])
for province, year in product(provinces_that_dont, range(1970, 2050)):
self.assertTrue(date(year, 12, 31) not in self.prov_hols[province])
class TestAR(unittest.TestCase):
def setUp(self):
self.holidays = holidays.AR(observed=True)
def test_new_years(self):
self.holidays.observed = False
self.assertNotIn(date(2010, 12, 31), self.holidays)
self.assertNotIn(date(2017, 1, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(2017, 1, 1), self.holidays)
for year in range(1900, 2100):
dt = date(year, 1, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_carnival_day(self):
for dt in [date(2018, 2, 12), date(2018, 2, 13), date(2017, 2, 27),
date(2017, 2, 28), date(2016, 2, 8), date(2016, 2, 9)]:
self.assertIn(dt, self.holidays)
def test_memory_national_day(self):
self.holidays.observed = False
self.assertNotIn(date(1907, 3, 24), self.holidays)
self.assertNotIn(date(2002, 3, 24), self.holidays)
self.holidays.observed = True
for dt in [date(2018, 3, 24), date(2017, 3, 24),
date(2016, 3, 24)]:
self.assertIn(dt, self.holidays)
def test_holy_week_day(self):
for dt in [date(2018, 3, 29), date(2018, 3, 30), date(2017, 4, 13),
date(2017, 4, 14), date(2016, 3, 24), date(2016, 3, 25)]:
self.assertIn(dt, self.holidays)
def test_malvinas_war_day(self):
for year in range(1900, 2100):
dt = date(year, 4, 2)
self.assertIn(dt, self.holidays)
def test_labor_day(self):
self.holidays.observerd = False
self.assertNotIn(date(2010, 4, 30), self.holidays)
self.assertNotIn(date(2011, 5, 2), self.holidays)
self.holidays.observed = True
self.assertIn(date(1922, 5, 1), self.holidays)
for year in range(1900, 2100):
dt = date(year, 5, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_may_revolution_day(self):
self.holidays.observed = False
self.assertNotIn(date(1930, 5, 25), self.holidays)
self.assertNotIn(date(2014, 5, 25), self.holidays)
self.holidays.observed = True
for year in range(1900, 2100):
dt = date(year, 5, 1)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_guemes_day(self):
for year in range(1900, 2100):
dt = date(year, 6, 17)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_belgrano_day(self):
for year in range(1900, 2100):
dt = date(year, 6, 20)
self.assertIn(dt, self.holidays)
self.assertNotIn(dt + relativedelta(days=-1), self.holidays)
self.assertNotIn(dt + relativedelta(days=+1), self.holidays)
def test_independence_day(self):
self.holidays.observed = False
self.assertNotIn(date(2017, 7, 9), self.holidays)
self.assertNotIn(date(2011, 7, 9), self.holidays)
self.holidays.observed = True
self.assertIn(date(2017, 7, 9), self.holidays)
self.assertIn(date(2011, 7, 9), self.holidays)
for | |
<gh_stars>1-10
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Copyright (c) 2022 by <NAME>, UC3M. +
# All rights reserved. This file is part of the HH-VAEM, and is released under +
# the "MIT License Agreement". Please see the LICENSE file that should have +
# been included as part of this package. +
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
from src.models.h_vae import *
from src.models.hmc import *
# ============= HHVAE ============= #
class HHVAE(HVAE):
"""
Implements a Hierarchical Hamiltonian VAE (HH-VAE) as described in https://arxiv.org/abs/2202.04599
"""
def __init__(self,
dataset: str, dim_x: int, dim_y: int, arch='base', dim_h=256,
likelihood_x = 'gaussian', likelihood_y = 'gaussian', variance=0.1, imbalanced_y = False,
categories_y = 1, prediction_metric='rmse',
batch_size=128, lr=1e-3, samples_MC = 1, data_path='../data/', split_idx=0,
latent_dims: list=[10, 5], sr_coef=0.001, balance_kl_steps=15e3, anneal_kl_steps=1e3,
update_prior = False,
L=5, T=10, chains=1, chains_sksd=30, sksd=1,
pre_steps=18e3,
lr_pre=1e-3, lr_encoder=1e-3, lr_decoder=1e-3, lr_prior = 1e-3, lr_predictor=1e-3, lr_hmc=1e-3, lr_scale = 1e-2,
update_s_each=10
):
"""
HHVAE initialization
Args:
dataset (str): name of the dataset (boston, mnist, ...)
dim_x (int): input data dimension
dim_y (int): target data dimension
arch (str, optional): name of the architecture for encoder/decoder from the 'archs' file. Defaults to 'base'.
dim_h (int, optional): dimension of the hidden vectors. Defaults to 256.
likelihood_x (str, optional): input data likelihood type. Defaults to 'gaussian'.
likelihood_y (str, optional): target data likelihood type. Defaults to 'gaussian'.
variance (float, optional): fixed variance for Gaussian likelihoods. Defaults to 0.1.
imbalanced_y (bool, optional): True for compensating imbalanced classification. Defaults to False.
categories_y (int, optional): number of categories when the target is categorical. Defaults to 1.
prediction_metric (str, optional): name of the prediction metric for validation ('rmse', 'accuracy'). Defaults to 'rmse'.
batch_size (int, optional): batch size. Defaults to 128.
lr (float, optional): learning rate for the parameter optimization. Defaults to 1e-3.
samples_MC (int, optional): number of MC samples for computing the ELBO. Defaults to 1.
data_path (str, optional): path to load/save the data. Defaults to '../data/'.
split_idx (int, optional): idx of the training split. Defaults to 0.
latent_dims (list): list of ints containing the latent dimension at each layer. First element corresponds to the shallowest layer, connected to the data
sr_coef (float, optional): coefficient for spectral normalization of parameters (Non-used). Defaults to 0.001.
balance_kl_steps (float, optional): number of steps for balancing the KL terms of the different layers. Defaults to 2e3.
anneal_kl_steps (float, optional): number of steps for annealing the KL. Defaults to 1e3.
update_prior (bool, optional): update the prior variance via ML and VI at the end of each epoch (True). Defaults to False.
L (int, optional): number of Leapfrog steps. Defaults to 5.
T (int, optional): length of the HMC chains. Defaults to 10.
chains (int, optional): number of parallel HMC chains. Defaults to 1.
chains_sksd (int, optional): number of parallel HMC chains for computing the SKSD. Defaults to 30.
sksd (int, optional): learn a scale factor for q(eps|zy) using the SKSD regularizer (1) or not (0). Defaults to 1.
pre_steps (float, optional): number of standard VI training steps (before using HMC). Defaults to 18e3.
lr_pre (float, optional): learning reate for all the parameters during the VI training stage. Defaults to 1e-3.
lr_encoder (float, optional): Learning rate for the encoder parameters. Defaults to 1e-3.
lr_decoder (float, optional): Learning rate for the decoder (p(x|z1)). Defaults to 1e-3.
lr_prior (float, optional): Learning rate for the hierarchical transformations (f(zl|zl+1)). Defaults to 1e-3.
lr_predictor (float, optional): Learning rate for the predictor. Defaults to 1e-3.
lr_hmc (float, optional): Learning rate for the HMC hyperparameters (matrix of step sizes). Defaults to 1e-3.
lr_scale (_type_, optional): Learning rate for the scale (inflation) factor Defaults to 1e-2.
update_s_each (int, optional): Interval of steps for optimizing the scale factor. Defaults to 10.
"""
super(HHVAE, self).__init__(dataset=dataset, dim_x=dim_x, dim_y=dim_y,
arch=arch, dim_h=dim_h, likelihood_x = likelihood_x, likelihood_y = likelihood_y,
variance=variance, imbalanced_y = imbalanced_y,
categories_y=categories_y,
prediction_metric=prediction_metric, batch_size=batch_size, lr=lr, samples_MC = samples_MC,
data_path=data_path, split_idx=split_idx,
latent_dims=latent_dims, sr_coef=sr_coef,
balance_kl_steps=balance_kl_steps, anneal_kl_steps=anneal_kl_steps,
update_prior=update_prior
)
self.automatic_optimization=False
self.L = L
self.T = T
self.chains = chains
self.chains_sksd = chains_sksd
self.sksd = sksd
self.pre_steps = pre_steps
self.lr_pre = lr_pre
self.lr_encoder = lr_encoder
self.lr_decoder = lr_decoder
self.lr_prior = lr_prior
self.lr_predictor = lr_predictor
self.lr_hmc = lr_hmc
self.lr_scale = lr_scale
self.update_s_each = update_s_each
self.HMC = HMC(dim=np.sum(latent_dims), L=L, T=T, chains=chains, chains_sksd=chains_sksd, logp=None, scale_per_layer=latent_dims)
self.save_hyperparameters('L', 'T', 'chains', 'chains_sksd', 'sksd', 'pre_steps',
'lr_pre', 'lr_encoder', 'lr_decoder', 'lr_prior', 'lr_predictor', 'lr_hmc', 'lr_scale',
'update_s_each')
self.step_idx=0 # training step index
# ============= Modified HVAE functions ============= #
def forward(self, batch: tuple, hmc=True, samples=1) -> tuple:
"""
Forward data through the model. For the pretraining stage, use the ELBO. For the rest, use HMC
Args:
batch (tuple): contains (data, observed_data, target, observed_target)
hmc (bool): sample posterior using HMC (True). Defaults to True
samples (int): number of MC samples for computing the ELBO
Returns:
If hmc=False, returns:
loss_VI, rec_x, rec_y, kl
If hmc=True, returns:
loss_VI, loss_HMC, loss_SKSD, rec_x, rec_y, kl
"""
if hmc==True:
# Activate only encoder
activate(self.encoder)
deactivate(self.decoder)
deactivate(self.prior)
deactivate(self.predictor)
self.HMC.log_eps.requires_grad = False
self.HMC.log_inflation.requires_grad = False
# Get data
x, observed_x, y, observed_y = batch
xn = self.normalize_x(x)
xt, yt, xy, observed = self.preprocess_batch(batch)
# xt is the preprocessed input (xt=x if no preprocessing)
# observed is observed_x OR observed_y (for not using kl if no observed data)
mus, logvars = self.encoder(xy)
z = self.sample_z(mus, logvars, samples=samples, hmc=False)
theta_x = self.decoder(z)
x_hat = self.build_x_hat(xn, observed_x, theta_x)
zx = torch.cat([z,x_hat],dim=-1)
rec_x = self.decoder.logp(xt, observed_x, z=z, theta=theta_x).sum(-1)
rec_y = self.predictor.logp(yt, observed_y, z=zx).sum(-1)
kls = self.encoder.regularizer(mus, logvars, observed)
elbo = rec_x + rec_y - kls.sum(0).unsqueeze(-1)
elbo = elbo[elbo!=0].mean()
rec_x = rec_x[rec_x!=0].mean()
rec_y = rec_y[rec_y!=0].mean()
kl_mean = torch.zeros(len(kls)).to(self.device)
for l, kl in enumerate(kls):
kl_mean[l]= kl[kl!=0].mean()
loss_3 = -elbo
if hmc==False: # returns elbo
return loss_3, rec_x, rec_y, kl_mean
else: # returns elbo, logp and sksd
# Activate decoder, predictor and hmc
activate(self.decoder)
activate(self.prior)
activate(self.predictor)
self.HMC.log_eps.requires_grad = True
deactivate(self.encoder)
self.HMC.log_inflation.requires_grad = False
# Encoder again for not sharing gradients
mus, logvars = self.encoder(xy)
zT, E = self.sample_z(mus, logvars, samples=self.chains, return_eps=True)
loss_1 = -self.HMC.logp(E)
loss_1 = loss_1[loss_1!=0].mean()
if self.sksd==1:
# Deactivate everything except scale
self.HMC.log_inflation.requires_grad = True
deactivate(self.encoder)
deactivate(self.decoder)
deactivate(self.prior)
deactivate(self.predictor)
self.HMC.log_eps.requires_grad = False
loss_2 = self.HMC.evaluate_sksd(torch.cat(mus, -1), torch.exp(torch.cat(logvars, -1)))
else:
loss_2 = None
return loss_3, loss_1, loss_2, rec_x, rec_y, kls
def training_step(self, batch: tuple, batch_idx: int, logging: bool=True):
"""
Perform a traning step following https://arxiv.org/abs/2202.04599
- For the first pre_steps, optimize parameters by maximizing the ELBO
- For the rest, optimize encoder using ELBO, and the rest using HMC objective and SKSD
Args:
batch (tuple): contains (data, observed_data, target, observed_target)
batch_idx (int): batch index from the training set
logging (bool): log metrics into Tensorboard (True). Default True
"""
(opt_vae, opt_decoder, opt_prior, opt_predictor, opt_encoder, opt_hmc, opt_scale) = self.optimizers(use_pl_optimizer=True)
if self.step_idx < self.pre_steps:
self.hmc=False
loss_3, rec_x, rec_y, kls = self.forward(batch, hmc=False, samples=self.samples_MC)
#loss_3 = loss_3 + self.sr_coef * self.spectral_norm_parallel()
opt_vae.zero_grad()
self.manual_backward(loss_3)#, opt_vae)
opt_vae.step()
else:
self.hmc=True
loss_3, loss_1, loss_2, rec_x, rec_y, kls = self.forward(batch)
#loss_3 = loss_3 + self.sr_coef * self.spectral_norm_parallel()
##### Optimization
# Optimize psi (encoder)
activate(self.encoder)
deactivate(self.decoder)
deactivate(self.prior)
deactivate(self.predictor)
self.HMC.log_eps.requires_grad = False
self.HMC.log_inflation.requires_grad = False
opt_encoder.zero_grad()
opt_decoder.zero_grad()
opt_prior.zero_grad()
opt_predictor.zero_grad()
opt_hmc.zero_grad()
opt_scale.zero_grad()
self.manual_backward(loss_3)#, opt_encoder)
opt_encoder.step()
# Optimize theta_x, theta_y and phi (decoders and HMC)
activate(self.decoder)
activate(self.prior)
activate(self.predictor)
self.HMC.log_eps.requires_grad = True
deactivate(self.encoder)
self.HMC.log_inflation.requires_grad = False
opt_encoder.zero_grad()
opt_decoder.zero_grad()
opt_prior.zero_grad()
opt_predictor.zero_grad()
opt_hmc.zero_grad()
opt_scale.zero_grad()
self.manual_backward(loss_1)#, [opt_decoder, opt_prior, opt_predictor, opt_hmc])
opt_decoder.step()
opt_prior.step()
opt_predictor.step()
opt_hmc.step()
if self.sksd and self.step_idx % self.update_s_each == True:
self.HMC.log_inflation.requires_grad = True
deactivate(self.encoder)
deactivate(self.decoder)
deactivate(self.prior)
deactivate(self.predictor)
self.HMC.log_eps.requires_grad = False
opt_encoder.zero_grad()
opt_decoder.zero_grad()
opt_prior.zero_grad()
opt_predictor.zero_grad()
opt_hmc.zero_grad()
opt_scale.zero_grad()
self.manual_backward(loss_2)#, opt_scale)
opt_scale.step()
scale = torch.exp(self.HMC.log_inflation)
[self.log('scale_{:d}'.format(d), s, on_step=False, on_epoch=True, prog_bar=False, logger=True) for d, s in enumerate(scale.reshape(-1))]
if logging:
self.log('SKSD', loss_2, on_step=False, on_epoch=True, prog_bar=False, logger=True)
self.log('HMC_objective', -loss_1, on_step=False, on_epoch=True, prog_bar=True, logger=True)
self.log('ELBO', -loss_3, on_step=False, on_epoch=True, prog_bar=True, logger=True)
if logging:
self.log('-rec_x', -rec_x, on_step=False, on_epoch=True, prog_bar=False, logger=True)
self.log('-rec_y', -rec_y, on_step=False, on_epoch=True, prog_bar=False, logger=True)
for l, kl in enumerate(kls):
self.log('kl_{:d}'.format(l), kl, on_step=False, on_epoch=True, prog_bar=False, logger=True)
self.step_idx+=1
self.encoder.global_step += 1
def sample_z(self, mus: list, logvars: list, samples=1, hmc=True, return_eps=False, all_layers=False):
"""
Draw latent reparameterized | |
<gh_stars>0
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import os
import sys
import time
import urllib
from datetime import datetime
from pathlib import Path
from typing import Any, Dict, List, Optional, Union, cast
import boto3
import botocore
import pandas as pd
from kubernetes import config as k8_config
from kubernetes import watch as k8_watch
from kubernetes.client import *
from kubespawner.objects import make_pod, make_pvc
from slugify import slugify
from aws_orbit_sdk.common import get_properties, get_stepfunctions_waiter_config
from aws_orbit_sdk.common_pod_specification import TeamConstants
logging.basicConfig(
format="%(asctime)s %(levelname)-8s %(message)s",
level=logging.INFO,
datefmt="%Y-%m-%d %H:%M:%S",
)
_logger = logging.getLogger()
MANIFEST_PLUGIN_TYPE = Dict[str, Union[str, Dict[str, Any]]]
MANIFEST_PROPERTY_MAP_TYPE = Dict[str, Union[str, Dict[str, Any]]]
MANIFEST_FILE_TEAM_TYPE = Dict[str, Union[str, int, None, List[MANIFEST_PROPERTY_MAP_TYPE], List[str]]]
MANIFEST_TEAM_TYPE = Dict[str, Union[str, int, None, List[MANIFEST_PLUGIN_TYPE]]]
MANIFEST_PROPERTY_MAP_TYPE = Dict[str, Union[str, Dict[str, Any]]]
__CURRENT_TEAM_MANIFEST__: MANIFEST_TEAM_TYPE = None
__CURRENT_ENV_MANIFEST__: MANIFEST_TEAM_TYPE = None
def read_team_manifest_ssm(env_name: str, team_name: str) -> Optional[MANIFEST_TEAM_TYPE]:
parameter_name: str = f"/orbit/{env_name}/teams/{team_name}/manifest"
_logger.debug("Trying to read manifest from SSM parameter (%s).", parameter_name)
client = boto3.client("ssm")
try:
json_str: str = client.get_parameter(Name=parameter_name)["Parameter"]["Value"]
except client.exceptions.ParameterNotFound:
_logger.debug("Team %s Manifest SSM parameter not found: %s", team_name, parameter_name)
return None
_logger.debug("Team %s Manifest SSM parameter found.", team_name)
return cast(MANIFEST_TEAM_TYPE, json.loads(json_str))
def get_parameter(client, name: str) -> Dict[str, Any]:
try:
json_str: str = client.get_parameter(Name=name)["Parameter"]["Value"]
except botocore.errorfactory.ParameterNotFound as e:
_logger.error("failed to read parameter %s", name)
raise
return cast(Dict[str, Any], json.loads(json_str))
def load_env_context_from_ssm(env_name: str) -> Optional[MANIFEST_TEAM_TYPE]:
ssm = boto3.client("ssm")
context_parameter_name: str = f"/orbit/{env_name}/context"
context = get_parameter(ssm, name=context_parameter_name)
return cast(MANIFEST_TEAM_TYPE, context)
def load_team_context_from_ssm(env_name: str, team_name: str) -> Optional[MANIFEST_TEAM_TYPE]:
ssm = boto3.client("ssm")
context_parameter_name: str = f"/orbit/{env_name}/teams/{team_name}/context"
context = get_parameter(ssm, name=context_parameter_name)
return cast(MANIFEST_TEAM_TYPE, context)
def get_execution_history(notebookDir: str, notebookName: str) -> pd.DataFrame:
"""
Get Notebook Execution History
Parameters
----------
notebookDir: str
Name of notebook directory.
notebookName: str
Name of notebook.
Returns
-------
df: pd.DataFrame
Notebook execution history.
Example
--------
>>> from aws_orbit_sdk import controller
>>> controller.get_execution_history(notebookDir="notebook-directory", notebookName='mynotebook')
"""
props = get_properties()
return _get_execution_history_from_local(notebookDir, notebookName, props)
def _get_execution_history_from_local(notebook_basedir: str, src_notebook: str, props: dict) -> pd.DataFrame:
"""
Get Notebook Execution History from EFS
"""
home = str(Path.home())
nb_name = Path(src_notebook).stem
notebook_dir = os.path.join(home, notebook_basedir, nb_name)
executions = []
for nb in Path(notebook_dir).glob("*.ipynb"):
if not nb.is_file():
continue
executions.append((str(nb), datetime.fromtimestamp(nb.stat().st_mtime), notebook_dir))
if not executions:
_logger.info(f"No output notebooks founds at: {notebook_dir}")
df = pd.DataFrame(executions, columns=["relativePath", "timestamp", "path"])
return df
def _get_execution_history_from_s3(notebookBaseDir: str, srcNotebook: str, props: str) -> pd.DataFrame:
"""
Get Notebook Execution History from s3
"""
s3 = boto3.client("s3")
notebookDir = os.path.join(notebookBaseDir, srcNotebook.split(".")[0])
path = "{}/output/notebooks/{}/".format(props["AWS_ORBIT_TEAM_SPACE"], notebookDir)
executions = []
objects = s3.list_objects_v2(Bucket=props["AWS_ORBIT_S3_BUCKET"], Prefix=path)
if "Contents" in objects.keys():
for key in s3.list_objects_v2(Bucket=props["AWS_ORBIT_S3_BUCKET"], Prefix=path)["Contents"]:
if key["Key"][-1] == "/":
continue
notebookName = os.path.basename(key["Key"])
path = key["Key"]
arg = urllib.parse.quote(path)
s3path = "s3://{}/{}".format(props["AWS_ORBIT_S3_BUCKET"], path)
# link = '<a href="{}" target="_blank">{}</a>'.format(site + arg,"open")
timestamp = key["LastModified"]
executions.append((notebookName, timestamp, s3path))
else:
_logger.info("No output notebooks founds at: s3://{}/{}".format(props["AWS_ORBIT_S3_BUCKET"], path))
df = pd.DataFrame(executions, columns=["relativePath", "timestamp", "s3path"])
return df
def _get_invoke_function_name() -> Any:
"""
Get invoke function Name.
Returns
-------
Function Name.
"""
props = get_properties()
functionName = f"orbit-{props['AWS_ORBIT_ENV']}-{props['AWS_ORBIT_TEAM_SPACE']}-container-runner"
return functionName
def run_python(taskConfiguration: dict) -> Any:
"""
Runs Python Task
Parameters
----------
taskConfiguration : dict
A task definition to execute.
tasks : lst
A list of python task definition to run.
module : str
The python module to run (without .py ext).
functionName : str
The python function to start the execution.
sourcePaths : lst
A list of s3 python source paths used for importing packages or modules into the application.
params : dict
A list of parameters for this task to override the notebook parameters.
compute : optional, dict
A list of runtime parameters to control execution.
container : dict
A list of parameters to control container execution.
p_concurrent : str
The number of parallel threads inside the container that will execute notebooks.
env_vars : optional, list
A list of environment parameters to pass to the container.
Returns
-------
response: Any
Response for the function or an error object.
Example
--------
>>> import aws_orbit_sdk.controller as controller
>>> response = controller.run_python(
... taskConfiguration = {
... "tasks": [
... {
... "module": "pyspark.run_pyspark_local",
... "functionName": "run_spark_job",
... "sourcePaths": ["DataScienceRepo/samples/python"],
... "params": {
... "bucket": "users-env2",
... "p2": 'bar'
... }
... }
... ],
... "compute": {
... "container" : {
... "p_concurrent": "4"
... },
... "env_vars": [
... {
... 'name': 'cluster_name',
... 'value': clusterName
... }
... ]
... })
"""
taskConfiguration["task_type"] = "python"
if "compute_type" not in taskConfiguration or taskConfiguration["compute_type"] == "eks":
return _run_task_eks(taskConfiguration)
else:
raise RuntimeError("Unsupported compute_type '%s'", taskConfiguration["compute_type"])
def run_notebooks(taskConfiguration: dict) -> Any:
"""
Runs Notebooks Tasks
Parameters
----------
taskConfiguration : dict
A task definition to execute.
notebooks : lst
A list of notebook task definition to run.
notebookName : str
The filename of the notebook.
notebookName : str
The relative path to the notebook file starting at the repository root.
targetPath : str
The target S3 directory where the output notebook and all related output will be generated.
params : dict
A list of parameters for this task to override the notebook parameters.
compute : optional, dict
A list of runtime parameters to control execution.
container : dict
A list of parameters to control container execution.
p_concurrent : str
The number of parallel processes inside the container that will execute notebooks.
sns.topic.name : str
A name of a topic to which messages are sent on task completion or failure.
env_vars : optional, lst
A list of environment parameters to pass to the container.
Returns
-------
response: Any
Response for the function or an error object.
Example
--------
>>> import aws_orbit_sdk.controller as controller
>>> response = controller.run_notebooks(
... taskConfiguration = {
... "notebooks": [ {
... "notebookName": "Example-2-Extract-Files.ipynb",
... "sourcePath": "samples/notebooks/A-LakeCreator",
... "targetPath": "tests/createLake",
... "params": {
... "bucketName": bucketName,
... "zipFileName": file,
... "targetFolder": extractedFolder
... },
... ...
... },
... "compute": {
... "container" : {
... "p_concurrent": "4",
... },
... "env_vars": [
... {
... 'name': 'cluster_name',
... 'value': clusterName
... }
... ],
... "sns.topic.name": 'TestTopic',
... }
... )
"""
taskConfiguration["task_type"] = "jupyter"
if "compute_type" not in taskConfiguration or taskConfiguration["compute_type"] == "eks":
return _run_task_eks(taskConfiguration)
else:
raise RuntimeError("Unsupported compute_type '%s'", taskConfiguration["compute_type"])
def list_team_running_jobs():
return list_running_jobs(True)
def list_my_running_jobs():
return list_running_jobs(False)
def list_running_jobs(team_only: bool = False):
props = get_properties()
team_name = props["AWS_ORBIT_TEAM_SPACE"]
load_kube_config()
username = (os.environ.get("JUPYTERHUB_USER", os.environ.get("USERNAME"))).split("@")[0]
api_instance = BatchV1Api()
# field_selector = "status.successful!=1"
if team_only:
operand = "!="
else:
operand = "="
label_selector = f"app=orbit-runner,username{operand}{username}"
_logger.info("using job selector %s", label_selector)
try:
api_response = api_instance.list_namespaced_job(
namespace=team_name,
_preload_content=False,
label_selector=label_selector,
# field_selector=field_selector,
watch=False,
)
res = json.loads(api_response.data)
except ApiException as e:
_logger.info("Exception when calling BatchV1Api->list_namespaced_job: %s\n" % e)
raise e
if "items" not in res:
return []
return res["items"]
def list_current_pods(label_selector: str = None):
props = get_properties()
team_name = props["AWS_ORBIT_TEAM_SPACE"]
load_kube_config()
api_instance = CoreV1Api()
try:
params = dict()
params["namespace"] = team_name
params["_preload_content"] = False
if label_selector:
params["label_selector"] = label_selector
api_response = api_instance.list_namespaced_pod(**params)
res = json.loads(api_response.data)
except ApiException as e:
_logger.info("Exception when calling BatchV1Api->list_namespaced_job: %s\n" % e)
raise e
if "items" not in res:
return []
return res["items"]
def list_storage_pvc():
load_kube_config()
api_instance = CoreV1Api()
props = get_properties()
team_name = props["AWS_ORBIT_TEAM_SPACE"]
_logger.debug(f"Listing {team_name} namespace persistent volume claims")
params = dict()
params["namespace"] = team_name
params["_preload_content"] = False
try:
api_response = api_instance.list_namespaced_persistent_volume_claim(**params)
res = json.loads(api_response.data)
except ApiException as e:
_logger.info("Exception when calling CoreV1Api->list persistent volume claims: %s\n" % e)
raise e
if "items" not in res:
return []
return res["items"]
def delete_storage_pvc(pvc_name: str):
load_kube_config()
api_instance = CoreV1Api()
props = get_properties()
team_name = props["AWS_ORBIT_TEAM_SPACE"]
_logger.debug(f"Deleting {team_name} namespace persistent volume claim {pvc_name}")
params = dict()
params["name"] = pvc_name
params["namespace"] = team_name
params["_preload_content"] = False
try:
api_response = api_instance.delete_namespaced_persistent_volume_claim(**params)
response = {
"status": str(api_response.status),
"reason": api_response.reason,
"message": f"Successfully deleted persistent volume claim={pvc_name}",
}
except ApiException as e:
_logger.info("Exception when calling CoreV1Api->delete persistent | |
<filename>tests/_core/test_intrusive.py
# _core/test_intrusive.py
"""Tests for rom_operator_inference._core._intrusive.py."""
import pytest
import numpy as np
from scipy import linalg as la
import rom_operator_inference as roi
from . import MODEL_FORMS, MODEL_KEYS, _get_data, _get_operators
# Mixins (private) ============================================================
class TestIntrusiveMixin:
"""Test _core._intrusive._IntrusiveMixin."""
class Dummy(roi._core._intrusive._IntrusiveMixin,
roi._core._base._BaseROM):
def __init__(self, modelform):
self.modelform = modelform
def test_dimension_properties(self, n=20, m=3, r=7):
"""Test the properties _core._base._BaseROM.(n|r|Vr)."""
model = self.Dummy("cH")
assert model.n is None
assert model.m == 0
assert model.r is None
assert model.Vr is None
# Try setting n explicitly.
with pytest.raises(AttributeError) as ex:
model.n = n+1
assert ex.value.args[0] == "can't set attribute (n = Vr.shape[0])"
# Try setting r explicitly.
with pytest.raises(AttributeError) as ex:
model.r = r+1
assert ex.value.args[0] == "can't set attribute (r = Vr.shape[1])"
# Correct assignment.
Vr = np.random.random((n,r))
model.Vr = Vr
assert model.n == n
assert model.m == 0
assert model.r == r
assert model.Vr is Vr
# Correct cleanup.
del model.Vr
assert model.Vr is None
assert model.n is None
assert model.r is None
# Try setting Vr to None.
model = self.Dummy("AB")
assert model.n is None
assert model.m is None
assert model.r is None
assert model.Vr is None
with pytest.raises(AttributeError) as ex:
model.Vr = None
assert ex.value.args[0] == "Vr=None not allowed for intrusive ROMs"
def test_operator_properties(self, n=10, m=4, r=2):
"""Test the properties _core._base._BaseROM.(c_|A_|H_|G_|B_)."""
c, A, H, G, B = fom_operators = _get_operators(n, m, True)
c_, A_, H_, G_, B_ = rom_operators = _get_operators(r, m)
model = self.Dummy(self.Dummy._MODEL_KEYS)
model.Vr = np.zeros((n,r))
model.m = m
for fom_key, op, op_ in zip("cAHGB", fom_operators, rom_operators):
rom_key = fom_key + '_'
assert hasattr(model, fom_key)
assert hasattr(model, rom_key)
assert getattr(model, fom_key) is None
assert getattr(model, rom_key) is None
setattr(model, fom_key, op)
setattr(model, rom_key, op_)
assert getattr(model, fom_key) is op
assert getattr(model, rom_key) is op_
model.H = np.zeros((n,n*(n + 1)//2))
model.G = np.zeros((n,n*(n + 1)*(n + 2)//6))
model.H_ = np.zeros((r,r**2))
model.G_ = np.zeros((r,r**3))
def test_check_fom_operator_shape(self, n=10, m=3, r=4):
"""Test _core._intrusive._IntrusiveMixin._check_fom_operator_shape().
"""
c, A, H, G, B = operators = _get_operators(n, m, expanded=True)
# Try correct match but dimension 'r' is missing.
model = self.Dummy("A")
with pytest.raises(AttributeError) as ex:
model._check_fom_operator_shape(A, 'A')
assert ex.value.args[0] == "no basis 'Vr' (call fit())"
# Try correct match but dimension 'm' is missing.
model = self.Dummy("B")
model.Vr = np.zeros((n,r))
with pytest.raises(AttributeError) as ex:
model._check_fom_operator_shape(B, 'B')
assert ex.value.args[0] == "no input dimension 'm' (call fit())"
# Try with dimensions set, but improper shapes.
model = self.Dummy(self.Dummy._MODEL_KEYS)
model.Vr = np.zeros((n,r))
model.m = m
with pytest.raises(ValueError) as ex:
model._check_fom_operator_shape(c[:-1], 'c')
assert ex.value.args[0] == \
f"c.shape = {c[:-1].shape}, must be (n,) with n = {n}"
with pytest.raises(ValueError) as ex:
model._check_fom_operator_shape(A[:-1,1:], 'A')
assert ex.value.args[0] == \
f"A.shape = {A[:-1,1:].shape}, must be (n,n) with n = {n}"
with pytest.raises(ValueError) as ex:
model._check_fom_operator_shape(H[:-1,:-1], 'H')
assert ex.value.args[0] == \
f"H.shape = {H[:-1,:-1].shape}, must be (n,n**2) with n = {n}"
with pytest.raises(ValueError) as ex:
model._check_fom_operator_shape(G[1:], 'G')
assert ex.value.args[0] == \
f"G.shape = {G[1:].shape}, must be (n,n**3) with n = {n}"
with pytest.raises(ValueError) as ex:
model._check_fom_operator_shape(B[1:-1], 'B')
assert ex.value.args[0] == \
f"B.shape = {B[1:-1].shape}, must be (n,m) with n = {n}, m = {m}"
# Correct usage.
for key, op in zip("cAHGB", operators):
model._check_fom_operator_shape(op, key)
def test_check_operators_keys(self):
"""Test _core._intrusive._IntrusiveMixin._check_operators_keys()."""
model = roi._core._intrusive._IntrusiveMixin()
model.modelform = "cAHB"
v = None
# Try with missing operator keys.
with pytest.raises(KeyError) as ex:
model._check_operators_keys({"A":v, "H":v, "B":v})
assert ex.value.args[0] == "missing operator key 'c'"
with pytest.raises(KeyError) as ex:
model._check_operators_keys({"H":v, "B":v})
assert ex.value.args[0] == "missing operator keys 'c', 'A'"
# Try with surplus operator keys.
with pytest.raises(KeyError) as ex:
model._check_operators_keys({'CC':v, "c":v, "A":v, "H":v, "B":v})
assert ex.value.args[0] == "invalid operator key 'CC'"
with pytest.raises(KeyError) as ex:
model._check_operators_keys({"c":v, "A":v, "H":v, "B":v,
'CC':v, 'LL':v})
assert ex.value.args[0] == "invalid operator keys 'CC', 'LL'"
# Correct usage.
model._check_operators_keys({"c":v, "A":v, "H":v, "B":v})
def test_process_fit_arguments(self, n=30, r=10):
"""Test _core._intrusive._IntrusiveMixin._process_fit_arguments()."""
Vr = np.random.random((n,r))
model = self.Dummy("c")
operators = {k:None for k in model.modelform}
# Correct usage.
model._process_fit_arguments(Vr, operators)
assert model.n == n
assert model.r == r
assert model.Vr is Vr
def test_project_operators(self, n=7, m=5, r=3):
"""Test _core._intrusive._IntrusiveMixin._project_operators()."""
# Get test data.
Vr = np.random.random((n,r))
shapes = {
"c": (n,),
"A": (n,n),
"H": (n,n**2),
"G": (n,n**3),
"B": (n,m),
"c_": (r,),
"A_": (r,r),
"H_": (r,r*(r+1)//2),
"G_": (r,r*(r+1)*(r+2)//6),
"B_": (r,m),
}
# Initialize the test model.
model = self.Dummy("cAHGB")
model.Vr = Vr
# Get test operators.
c, A, H, G, B = _get_operators(n, m, expanded=True)
operators = {"c":c, "A":A, "H":H, "G":G, "B":B}
B1d = B[:,0]
# Try to fit the model with operators that are misaligned with Vr.
cbad = c[::2]
Abad = A[:,:-2]
Hbad = H[:,1:]
Gbad = G[:,:-1]
Bbad = B[1:,:]
with pytest.raises(ValueError) as ex:
model._project_operators({"c":cbad, "A":A, "H":H, "G":G, "B":B})
assert ex.value.args[0] == \
f"c.shape = {cbad.shape}, must be (n,) with n = {n}"
with pytest.raises(ValueError) as ex:
model._project_operators({"c":c, "A":Abad, "H":H, "G":G, "B":B})
assert ex.value.args[0] == \
f"A.shape = {Abad.shape}, must be (n,n) with n = {n}"
with pytest.raises(ValueError) as ex:
model._project_operators({"c":c, "A":A, "H":Hbad, "G":G, "B":B})
assert ex.value.args[0] == \
f"H.shape = {Hbad.shape}, must be (n,n**2) with n = {n}"
with pytest.raises(ValueError) as ex:
model._project_operators({"c":c, "A":A, "H":H, "G":Gbad, "B":B})
assert ex.value.args[0] == \
f"G.shape = {Gbad.shape}, must be (n,n**3) with n = {n}"
with pytest.raises(ValueError) as ex:
model._project_operators({"c":c, "A":A, "H":H, "G":G, "B":Bbad})
assert ex.value.args[0] == \
f"B.shape = {Bbad.shape}, must be (n,m) with n = {n}, m = {m}"
# Test each modelform.
for form in MODEL_FORMS:
model = self.Dummy(form)
model.Vr = Vr
ops = {key:val for key,val in operators.items() if key in form}
model._project_operators(ops)
for prefix in MODEL_KEYS:
attr = prefix+'_'
assert hasattr(model, prefix)
assert hasattr(model, attr)
fom_op = getattr(model, prefix)
rom_op = getattr(model, attr)
if prefix in form:
assert fom_op is operators[prefix]
assert fom_op.shape == shapes[prefix]
assert isinstance(rom_op, np.ndarray)
assert rom_op.shape == shapes[attr]
else:
assert fom_op is None
assert rom_op is None
if "B" in form:
assert model.m == m
else:
assert model.m == 0
# Fit the model with 1D inputs (1D array for B)
model = self.Dummy("cAHB")
model.Vr = Vr
model._project_operators({"c":c, "A":A, "H":H, "B":B1d})
assert model.m == 1
assert model.B.shape == (n,1)
assert model.B_.shape == (r,1)
def _test_fit(self, ModelClass, n=7, m=5, r=3):
"""Test _core._intrusive._IntrusiveMixin.fit()."""
# Get test data.
Vr = np.random.random((n,r))
# Get test operators.
c, A, H, G, B = _get_operators(n, m, expanded=True)
operators = {"c":c, "A":A, "H":H, "G":G, "B":B}
B1d = B[:,0]
# Test each modelform.
for form in MODEL_FORMS:
model = ModelClass(form)
ops = {key:val for key,val in operators.items() if key in form}
model.fit(Vr, ops)
if "B" in form: # Also test with one-dimensional inputs.
ops["B"] = B1d
model.fit(Vr, ops)
# Useable classes (public) ====================================================
class TestIntrusiveDiscreteROM:
"""Test _core._intrusive.IntrusiveDiscreteROM."""
def test_f(self, n=5, m=2):
"""Test _core._intrusive.IntrusiveDiscreteROM.f()."""
c, A, H, G, B = _get_operators(n, m, expanded=True)
Vr = np.zeros((n,n//2))
model = roi._core._intrusive.IntrusiveDiscreteROM("cA")
model.Vr = Vr
model.c, model.A = c, A
x = np.random.random(n)
y = c + A @ x
assert np.allclose(model.f(x), y)
assert np.allclose(model.f(x, -1), y)
model = roi._core._intrusive.IntrusiveDiscreteROM("HGB")
model.Vr = Vr
model.m = m
model.H, model.G, model.B = H, G, B
u = np.random.random(m)
x = np.random.random(n)
x2 = np.kron(x, x)
y = H @ x2 + G @ np.kron(x, x2) + B @ u
assert np.allclose(model.f(x, u), y)
def test_fit(self):
"""Test _core._intrusive.IntrusiveDiscreteROM.fit()."""
TestIntrusiveMixin()._test_fit(roi.IntrusiveDiscreteROM)
class TestIntrusiveContinuousROM:
"""Test _core._intrusive.IntrusiveContinuousROM."""
def test_f(self, n=5, m=2):
"""Test _core._intrusive.IntrusiveContinuousROM.f()."""
c, A, H, G, B = _get_operators(n, m, expanded=True)
Vr = np.zeros((n, n//2))
# Check that the constructed f takes the right number of arguments.
model = roi._core._intrusive.IntrusiveContinuousROM("cA")
model.Vr = Vr
model.c, model.A = c, A
x = np.random.random(n)
y = c + A @ x
assert np.allclose(model.f(0, x), y)
assert np.allclose(model.f(1, x), y)
assert np.allclose(model.f(1, x, -1), y)
model = roi._core._intrusive.IntrusiveContinuousROM("HGB")
model.Vr = Vr
model.m = m
model.H, model.G, model.B = H, G, B
uu = np.random.random(m)
u = lambda t: uu + t
x = np.random.random(n)
x2 = np.kron(x, x)
y = H @ x2 + G @ np.kron(x, x2) + B @ uu
assert np.allclose(model.f(0, x, u), y)
y = H @ x2 + G @ np.kron(x, x2) + B @ (uu | |
<filename>Extended_Figure_7/scripts/datasets_build.py
import os
import gzip
import sys
sys.path.append('../')
import pickle
import json
import tqdm
import glob
import click
from multiprocessing import Pool
import pandas as pd
import numpy as np
from sklearn.metrics import roc_auc_score
from mutrate import set_mutrate
import conf
import oncotree
tree = oncotree.Oncotree()
# load model selection
model_selection_pickle = os.path.join(conf.output_boostdm, 'model_selection', 'eval_data.pickle.gz')
with gzip.open(model_selection_pickle, 'rb') as f:
model_selection_dict = pickle.load(f)
# cohorts and driver genes
df_drivers = pd.read_csv(conf.drivers_path, sep='\t')
driver_gene_ttypes = set(map(tuple, df_drivers[['SYMBOL', 'CANCER_TYPE']].drop_duplicates().values.tolist()))
df_drivers_small = df_drivers[['COHORT', 'CANCER_TYPE']].drop_duplicates()
cohort_ttype_dict = dict(zip(df_drivers_small['COHORT'], df_drivers_small['CANCER_TYPE']))
valid_cohorts = set(df_drivers['COHORT'].values)
driver_genes = df_drivers['SYMBOL'].drop_duplicates().values.tolist()
ttype_dict = {}
df_stats_cohort = pd.read_csv(conf.cohorts_path, sep='\t')
for tt, cohort in df_stats_cohort[['CANCER_TYPE', 'COHORT']].drop_duplicates().values:
ttype_dict[tt] = ttype_dict.get(tt, []) + [cohort]
# load observed mutations
def load_observed():
print('Loading observed mutations...')
fn = os.path.join(conf.output_boostdm, 'discovery', 'mutations.tsv')
df = pd.read_csv(fn, sep='\t')
return df
observed_mutations = load_observed()
# load saturation prediction
def load_saturation():
print('Loading in silico saturation mutagenesis...')
fn = os.path.join(os.environ['PATH_SOURCE_DATA'], 'extended-figure7-source-data', 'saturation_aggregate.tsv.gz')
df = pd.read_csv(fn, sep='\t', low_memory=False)
return df
saturation_prediction = load_saturation()
def get_nondriver_gene_ttype():
"""
Get the collection of all non-driver gene-ttype pairs,
with gene being a driver in at least another ttype
"""
# mutrate is defined for a given gene-ttype if defined in some gene-cohort with cohort in ttype
ttypes = tree.get_ttypes('CANCER')
mutrate_gene_ttypes = set()
for ttype in tqdm.tqdm(ttypes):
for cohort in ttype_dict.get(ttype, []):
try:
for fn in glob.glob(os.path.join(conf.mutrate_folder, f'{cohort}.mutrate_output', 'norm_*.out.json')):
gene = os.path.basename(fn).split('_')[1].split('.')[0]
mutrate_gene_ttypes.add((gene, ttype))
except FileNotFoundError as e:
print(e)
continue
# get all mutrates from all possible genes with observed mutations
# only genes that are classified as driver for some ttype
gene_cohort = set(list(zip(observed_mutations['gene'], observed_mutations['COHORT'])))
all_gene_ttypes = set(map(lambda x: (x[0], cohort_ttype_dict.get(x[1], None)), gene_cohort))
# driver_genes = df_drivers['SYMBOL'].unique()
# all_gene_ttypes = {x for x in all_gene_ttypes if (x[0] in driver_genes) and (x[1] is not None)}
all_gene_ttypes = {x for x in all_gene_ttypes if (x[1] is not None)}
# identify all non-driver pairs
pairs = set(map(tuple, df_drivers[['SYMBOL', 'CANCER_TYPE']].drop_duplicates().values.tolist()))
all_non_drivers = list((mutrate_gene_ttypes.intersection(all_gene_ttypes)).difference(pairs))
return all_non_drivers
def put_chr(chromosome):
c = str(chromosome)
if not c.startswith('chr'):
return 'chr' + c
else:
return c
def get_observed_mutations(gene, ttype, driver=True):
df = observed_mutations[(observed_mutations['ttype'] == ttype) & (observed_mutations['gene'] == gene)].copy()
df.drop_duplicates(['chr', 'pos', 'mut'], inplace=True)
dg = df.merge(saturation_prediction[['gene', 'chr', 'pos', 'alt', 'boostDM_score']],
right_on=['gene', 'chr', 'pos', 'alt'],
left_on=['gene', 'chr', 'pos', 'mut'])
if driver:
dg = dg[dg['boostDM_score'] >= 0.5]
sat_hash = dg.apply(lambda r: put_chr(r['chr']) + '.' + str(r['pos']) + '.' + str(r['alt']), axis=1)
return set(sat_hash.values)
def create_observed_vs_non_observed(driver=False):
res = {}
for sat_fn in tqdm.tqdm(glob.glob(os.path.join(conf.output_boostdm,
'saturation', 'prediction',
'*.*.prediction.tsv.gz'))):
gene, ttype = tuple(os.path.basename(sat_fn).split('.')[:2])
print(gene, ttype)
# get all possible mutations
df_sat = pd.read_csv(sat_fn, sep='\t')
if driver:
df_sat = df_sat[df_sat['boostDM_score'] >= 0.5]
sat_hash = df_sat.apply(lambda r: put_chr(r['chr']) + '.' + str(r['pos']) + '.' + str(r['alt']), axis=1)
all_hashes = set(sat_hash.values)
# get observed mutations
observed = get_observed_mutations(gene, ttype, driver=driver)
# get non observed mutations
non_observed = all_hashes.difference(observed)
# mark whether there is a specific model for gene, ttype
ttype_model, gene_model = model_selection_dict.get((ttype, gene), (None, None))
specific = False
if (ttype_model == ttype) and (gene_model == gene):
specific = True
res[(ttype, gene)] = (specific, {'positive': observed, 'negative': non_observed})
return res
def create_driver_vs_passenger(non_observed=False):
res = {}
leaves = tree.get_ttypes('CANCER')
with gzip.open(os.path.join(conf.output_boostdm, 'model_selection', 'eval_data.pickle.gz'), 'rb') as g:
model_dict = pickle.load(g)
gene_ttypes = set(k for k, v in model_dict.items() if (k == v) and (k[0] in leaves))
for ttype, gene in tqdm.tqdm(gene_ttypes):
sat_fn = os.path.join(conf.output_boostdm, 'saturation', 'prediction', f'{gene}.{ttype}.prediction.tsv.gz')
observed = set()
if non_observed:
observed = get_observed_mutations(gene, ttype, driver=False)
# get all possible mutations
try:
df_sat = pd.read_csv(sat_fn, sep='\t')
except FileNotFoundError as e:
print(e)
continue
df_driver = df_sat[df_sat['boostDM_score'] >= 0.5]
sat_driver = df_driver.apply(lambda r: put_chr(r['chr']) + '.' + str(r['pos']) + '.' + str(r['alt']), axis=1)
driver = set(sat_driver.values)
df_passenger = df_sat[df_sat['boostDM_score'] < 0.5]
sat_passenger = df_passenger.apply(lambda r: put_chr(r['chr']) + '.' + str(r['pos']) + '.' + str(r['alt']), axis=1)
passenger = set(sat_passenger.values)
ttype_model, gene_model = model_selection_dict.get((ttype, gene), (None, None))
specific = False
if (ttype_model == ttype) and (gene_model == gene):
specific = True
res[(ttype, gene)] = (specific, {'positive': driver.difference(observed),
'negative': passenger.difference(observed)})
return res
saturation_annotation = os.path.join(conf.output_boostdm, 'saturation', 'annotation')
def create_non_drivers_func(item):
gene, ttype = item
try:
sat_fn = next(iter(glob.glob(os.path.join(saturation_annotation, '*', f'{gene}.annotated.out.gz'))))
except StopIteration:
return dict()
df_sat = pd.read_csv(sat_fn, sep='\t', low_memory=False)
sat_hash = df_sat.apply(lambda r: put_chr(r['chr']) + '.' + str(r['pos']) + '.' + str(r['alt']), axis=1)
all_hashes = set(sat_hash.values)
# get observed mutations
observed = get_observed_mutations(gene, ttype, driver=False)
non_observed = all_hashes.difference(observed)
return {(ttype, gene): (False, {'positive': observed, 'negative': non_observed})}
def create_non_drivers(cores=10):
with open('./datasets/all_non_drivers.json', 'rt') as f:
all_non_drivers = json.load(f)
total = dict()
with Pool(cores) as pool:
for res in tqdm.tqdm(pool.imap(create_non_drivers_func, all_non_drivers), total=len(all_non_drivers)):
total.update(res)
return total
"""
def create_non_drivers(cores=10):
res = {}
with open('./datasets/all_non_drivers.json', 'rt') as f:
all_non_drivers = json.load(f)
saturation_annotation = os.path.join(conf.output_boostdm, 'saturation', 'annotation')
for gene, ttype in tqdm.tqdm(all_non_drivers):
# we just need to retrieve all genomic coordinates for each gene
try:
sat_fn = next(iter(glob.glob(os.path.join(saturation_annotation, '*', f'{gene}.annotated.out.gz'))))
except:
continue
df_sat = pd.read_csv(sat_fn, sep='\t')
sat_hash = df_sat.apply(lambda r: put_chr(r['chr']) + '.' + str(r['pos']) + '.' + str(r['alt']), axis=1)
all_hashes = set(sat_hash.values)
# get observed mutations
observed = get_observed_mutations(gene, ttype, driver=False)
non_observed = all_hashes.difference(observed)
res[(ttype, gene)] = (False, {'positive': observed,
'negative': non_observed})
return res
"""
def create_csqn_type(csqn_type='nonsense'):
res = {}
for sat_fn in tqdm.tqdm(glob.glob(os.path.join(conf.path_saturation, f'*.*.prediction.out.gz'))):
gene, ttype = tuple(os.path.basename(sat_fn).split('.')[:2])
# get all possible mutations
df_sat = pd.read_csv(sat_fn, sep='\t')
df_csqn = df_sat[df_sat[f'csqn_type_{csqn_type}'] == 1]
sat_csqn = df_csqn.apply(lambda r: put_chr(r['chr']) + '.' + str(r['pos']) + '.' + str(r['alt']), axis=1)
csqn = set(sat_csqn.values)
sat_all = df_sat.apply(lambda r: put_chr(r['chr']) + '.' + str(r['pos']) + '.' + str(r['alt']), axis=1)
all = set(sat_all.values)
ttype_model, gene_model = model_selection_dict.get((ttype, gene), (None, None))
specific = False
if (ttype_model == ttype) and (gene_model == gene):
specific = True
other = all.difference(csqn)
res[(ttype, gene)] = (specific, {'positive': csqn, 'negative': other})
return res
def create_feature(feature, threshold=0.0):
"""
Separates between mutations by feature values
If quantile=None, it interprets the feature as a boolean flag
If quantile is not None, it uses it as quantile description
"""
res = {}
for sat_fn in tqdm.tqdm(glob.glob(os.path.join(conf.path_saturation, f'*.*.prediction.out.gz'))):
gene, ttype = tuple(os.path.basename(sat_fn).split('.')[:2])
df = pd.read_csv(sat_fn, sep='\t')
# positive set
dg = df[df[feature] >= threshold]
sat = dg.apply(lambda r: put_chr(r['chr']) + '.' + str(r['pos']) + '.' + str(r['alt']), axis=1)
positive = set(sat.values)
# all mutations
sat_all = df.apply(lambda r: put_chr(r['chr']) + '.' + str(r['pos']) + '.' + str(r['alt']), axis=1)
all = set(sat_all.values)
ttype_model, gene_model = model_selection_dict.get((ttype, gene), (None, None))
specific = False
if (ttype_model == ttype) and (gene_model == gene):
specific = True
negative = all.difference(positive)
res[(ttype, gene)] = (specific, {'positive': positive, 'negative': negative})
return res
def create_saturation_vectors(specific_saturation_folder):
res = {}
for fn in tqdm.tqdm(glob.glob(os.path.join(specific_saturation_folder, '*.*.prediction.tsv.gz'))):
gene = os.path.basename(fn).split('.')[0]
ttype = os.path.basename(fn).split('.')[1]
if (gene, ttype) in driver_gene_ttypes:
df = pd.read_csv(fn, sep='\t')
df.drop_duplicates(inplace=True)
if df.shape[0] == 0 or df[pd.isnull(df["aachange"])].shape[0] > 0:
continue
df["protein_position"] = df.apply(lambda row: int(row["aachange"][1:-1]), axis=1)
df.sort_values("protein_position", inplace=True)
saturation_vector = np.array(list(map(int, df['boostDM_class'].values)))
aa_mutation_vector = np.array(df['aachange'].values)
model = tuple(df.iloc[0, [-5, -4]])
if saturation_vector is not None:
res[gene] = res.get(gene, {})
res[gene].update({ttype: (model, saturation_vector, aa_mutation_vector)})
return res
def create_mutrate_dict_func(item):
gene, ttype = item
probs = np.zeros(96)
burden = []
for cohort in ttype_dict[ttype]:
path_cohort = os.path.join(conf.mutrate_folder, cohort + ".mutrate_output", f"norm_{gene}.out.json")
try:
with open(path_cohort, 'rt') as g:
dict_probs = json.load(g)
except FileNotFoundError:
return dict()
for sample in dict_probs[gene].keys():
burden.append(np.sum(dict_probs[gene][sample]))
probs = np.vstack((probs, dict_probs[gene][sample]))
normalized_probs = np.mean(probs, axis=0)
burden_total = np.sum(burden)
return {(gene, ttype): (normalized_probs, burden_total, len(burden))}
def create_mutrate_dict(gene_ttype_set, cores=10):
total = dict()
with Pool(cores) as pool:
for res in tqdm.tqdm(pool.imap(create_mutrate_dict_func, gene_ttype_set), total=len(gene_ttype_set)):
total.update(res)
return total
"""
def create_mutrate_dict(gene_ttype_set):
d_results = {}
for gene, ttype in tqdm.tqdm(gene_ttype_set):
probs = np.zeros(96)
burden = []
for cohort in ttype_dict[ttype]:
path_cohort = os.path.join(conf.mutrate_folder,
cohort + ".mutrate_output",
f"norm_{gene}.out.json")
try:
dict_probs = json.load(open(path_cohort, 'r'))
except FileNotFoundError:
continue
for sample in dict_probs[gene].keys():
burden.append(np.sum(dict_probs[gene][sample]))
probs = np.vstack((probs, dict_probs[gene][sample]))
normalized_probs = np.mean(probs, axis=0)
N = len(burden)
burden_total = np.sum(burden)
d_results[(gene, ttype)] = (normalized_probs, burden_total, N)
return d_results
"""
def format_data(raw_data_instance, mutrate_table, specific=True):
res = {}
auc = {}
for k, v in tqdm.tqdm(raw_data_instance.items()):
if specific:
if not v[0]:
continue
positive = v[1]['positive']
negative = v[1]['negative']
if (len(positive) > 0) and (len(negative) > 0):
try:
ttype, gene = k[0], k[1]
positive_mutrates = set_mutrate(gene, ttype, v[1]['positive'], mutrate_table=mutrate_table)
negative_mutrates = set_mutrate(gene, ttype, v[1]['negative'], mutrate_table=mutrate_table)
res[k] = [positive_mutrates, negative_mutrates]
y_true = [1] * len(positive_mutrates) + [0] * len(negative_mutrates)
y_score = positive_mutrates + negative_mutrates
auc[k] = roc_auc_score(y_true, y_score)
except AssertionError:
continue
return res, auc
def reformat(raw):
res = raw['mutrate']
auc = | |
Data.RStar.median()
EStar_lower_err = np.percentile(Data.EStar.as_matrix(), 25)
EStar_upper_err = np.percentile(Data.EStar.as_matrix(), 75)
RStar_lower_err = np.percentile(Data.RStar.as_matrix(), 25)
RStar_upper_err = np.percentile(Data.RStar.as_matrix(), 75)
cNorm = colors.Normalize(vmin=MinMChi, vmax=MaxMChi)
plt.cm.ScalarMappable(norm=cNorm, cmap=ColourMap)
# plot the rstar vs estar
sc = ax.scatter(EStarMedian,RStarMedian,c=colour,s=50, edgecolors='k', zorder=100, norm=cNorm)
ax.errorbar(EStarMedian,RStarMedian,xerr=[[EStarMedian-EStar_lower_err],[EStar_upper_err-EStarMedian]], yerr=[[RStarMedian-RStar_lower_err],[RStar_upper_err-RStarMedian]],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
# Finalise the figure
plt.xlabel('$E^*={{-2\:C_{HT}\:L_H}/{S_C}}$')
plt.ylabel('$R^*=S/S_C$')
plt.xlim(0.1,20)
plt.ylim(0.05,1)
# add colour bar
cbar = plt.colorbar(sc,cmap=ColourMap)
colorbarlabel='Basin ID'
cbar.set_label(colorbarlabel, fontsize=10)
# tick_locator = ticker.MaxNLocator(nbins=5)
# cbar.locator = tick_locator
# cbar.update_ticks()
#save output
plt.savefig(PlotDirectory+FilenamePrefix +"_estar_vs_rstar{}.png".format(Sc), dpi=300)
plt.clf()
def PlotEStarRStarSubPlots(DataDirectory, FilenamePrefix, PlotDirectory, Sc = 0.8):
"""
Make a composite plot of E* R* and R* Ksn
Args:
DataDirectory (str): the data directory
FilenamePrefix (str): the file name prefix
PlotDirectory (str): The directory into which the plots are saved
Sc (float): The critical slope to be used in the analysis
FJC
"""
input_csv = PlotDirectory+FilenamePrefix+'_basin_hillslope_data.csv'
df = pd.read_csv(input_csv)
# set up the figure
fig, ax = plt.subplots(nrows=1, ncols=2, figsize=(10,5))
#choose colormap
ColourMap = cm.RdYlBu
# get the basins
basins = df['basin_keys'].unique()
NoBasins = len(basins)
print(basins)
sc = ax[1].scatter(df.Estar_median,df.Rstar_median,c=basins,s=50, edgecolors='k', zorder=100, cmap=ColourMap)
ax[1].errorbar(df.Estar_median,df.Rstar_median,xerr=[df['Estar_lower_err'], df['Estar_upper_err']], yerr=[df['Rstar_lower_err'], df['Rstar_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
sc = ax[0].scatter(df.mchi_median,df.Rstar_median,c=basins,s=50, edgecolors='k', zorder=100, cmap=ColourMap)
ax[0].errorbar(df.mchi_median,df.Rstar_median,xerr=[df['mchi_lower_err'], df['mchi_upper_err']], yerr=[df['Rstar_lower_err'], df['Rstar_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
# plot the theoretical relationships for each one
# Calculate analytical relationship for estar rstar
EStar_model = np.logspace(-1,3,1000)
RStar_model = CalculateRStar(EStar_model)
# Plot with open figure
ax[1].plot(EStar_model,RStar_model,c='0.5',ls='--')
# calculate linear fit for Rstar ksn
slope, intercept, r_value, p_value, std_err = stats.linregress(df.mchi_median, df.Rstar_median)
print(slope, intercept, r_value, p_value)
x = df.mchi_median.values
print(x)
new_y = slope*x + intercept
ax[0].plot(x, new_y, c='0.5', ls='--')
# get the difference between the linear fit and the real R* for each basin and
# print to csv for plotting
residual = df.Rstar_median.values - new_y
print(residual)
df['rstar_ksn_residual'] = residual
OutputFilename = PlotDirectory+FilenamePrefix+'_basin_hillslope_data_residuals.csv'
df.to_csv(OutputFilename, index=False)
# Finalise the figure
ax[1].set_xlabel('$E^*={-2\:C_{\mathrm{HT}}\:L_{\mathrm{H}}}/{S_{\mathrm{c}}}$')
ax[0].set_ylabel('$R^*=S/S_{\mathrm{c}}$')
ax[1].set_xlim(0.1,25)
ax[1].set_ylim(0.2,1)
ax[0].set_xlim(10,90)
ax[0].set_ylim(0.2,1)
ax[0].set_xlabel('$k_{\mathrm{sn}}$')
# add colour bar
m = cm.ScalarMappable(cmap=ColourMap)
m.set_array(basins)
cbar = plt.colorbar(m, ax=ax.ravel().tolist())
tick_locator = ticker.MaxNLocator(nbins=5)
cbar.locator = tick_locator
cbar.update_ticks()
cbar.ax.invert_yaxis()
cbar.set_label('Basin ID')
print("Made the E*R* plots")
#save output
plt.savefig(PlotDirectory+FilenamePrefix +"_estar_rstar_subplots.png", dpi=300)
plt.clf()
def PlotDataAgainstErosionRate(DataDirectory, FilenamePrefix, PlotDirectory):
"""
Make plots of the data against erosion rate. This only works if you have
used the function PlotHillslopeDataWithBasins to generate the correct csv
file first. I wrote it for the MTJ analysis
FJC 11/10/18
"""
df = pd.read_csv(PlotDirectory+FilenamePrefix+'_basin_hillslope_data.csv')
# set up the figure
fig, ax = plt.subplots(nrows=1, ncols=3, figsize=(12,5))
#ax = ax.ravel()
# make a big subplot to allow sharing of axis labels
fig.add_subplot(111, frameon=False)
# hide tick and tick label of the big axes
plt.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
#choose colormap
ColourMap = cm.viridis
# get the basins
basins = df['basin_keys'].unique()
NoBasins = len(basins)
print(basins)
norm = colors.Normalize(vmin=basins.min(), vmax=basins.max())
#sc = ax[0].scatter(df.Erosion_rate_Be,df.Rstar_median,c=basins,s=50, edgecolors='k', zorder=100)
sc = ax[0].scatter(df.Erosion_rate_Al,df.Rstar_median,c=basins,s=50, edgecolors='k', zorder=100, norm=norm)
sc = ax[0].scatter(df.Erosion_rate_Al_min,df.Rstar_median,c=basins,s=50, edgecolors='k', zorder=100, norm=norm)
ax[0].errorbar(df.Erosion_rate_Al,df.Rstar_median,xerr=df['Al_error'], yerr=[df['Rstar_lower_err'], df['Rstar_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
ax[0].errorbar(df.Erosion_rate_Al_min, df.Rstar_median, yerr=[df['Rstar_lower_err'], df['Rstar_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
#ax[0].errorbar(df.Estar_median,df.Rstar_median,xerr=[df['Estar_lower_err'], df['Estar_upper_err']], yerr=[df['Rstar_lower_err'], df['Rstar_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
sc = ax[1].scatter(df.Erosion_rate_Al,df.Estar_median,c=basins,s=50, edgecolors='k', zorder=100, norm=norm)
sc = ax[1].scatter(df.Erosion_rate_Al_min,df.Estar_median,c=basins,s=50, edgecolors='k', zorder=100, norm=norm)
ax[1].errorbar(df.Erosion_rate_Al,df.Estar_median,xerr=df['Al_error'], yerr=[df['Estar_lower_err'], df['Estar_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
ax[1].errorbar(df.Erosion_rate_Al_min, df.Estar_median, yerr=[df['Estar_lower_err'], df['Estar_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
new_df = df[np.isnan(df.Erosion_rate_Al) == False]
newbasins = new_df.basin_keys.unique()
print(new_df.Erosion_rate_Al)
sc = ax[2].scatter(new_df.Estar_median,new_df.Rstar_median,c=newbasins,s=50, edgecolors='k', zorder=100, norm=norm)
#sc = ax[2].scatter(df.Erosion_rate_Al_min,df.cht_median,c=basins,s=50, edgecolors='k', zorder=100, norm=norm)
#ax[2].errorbar(df.Erosion_rate_Al,df.cht_median,xerr=df['Al_error'], yerr=[df['cht_lower_err'], df['cht_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
ax[2].errorbar(new_df.Estar_median, new_df.Rstar_median, xerr=[new_df['Estar_lower_err'], new_df['Estar_upper_err']], yerr=[new_df['Rstar_lower_err'], new_df['Rstar_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
new_df = df[np.isnan(df.Erosion_rate_Al_min) == False]
newbasins = new_df.basin_keys.unique()
sc = ax[2].scatter(new_df.Estar_median,new_df.Rstar_median,c=newbasins,s=50, edgecolors='k', zorder=100, norm=norm)
#sc = ax[2].scatter(df.Erosion_rate_Al_min,df.cht_median,c=basins,s=50, edgecolors='k', zorder=100, norm=norm)
#ax[2].errorbar(df.Erosion_rate_Al,df.cht_median,xerr=df['Al_error'], yerr=[df['cht_lower_err'], df['cht_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
ax[2].errorbar(new_df.Estar_median, new_df.Rstar_median, xerr=[new_df['Estar_lower_err'], new_df['Estar_upper_err']], yerr=[new_df['Rstar_lower_err'], new_df['Rstar_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
# Finalise the figure
ax[0].set_xlabel('Erosion rate (mm/yr)')
ax[0].set_ylabel('$R^*=S/S_C$')
ax[1].set_xlabel('Erosion rate (mm/yr)')
ax[1].set_ylabel('$E^*={{-2\:C_{HT}\:L_H}/{S_C}}$')
ax[2].set_xlabel('$E^*={{-2\:C_{HT}\:L_H}/{S_C}}$')
ax[2].set_ylabel('$R^*=S/S_C$')
#ax[2].set_ylim(0.005,0.0225)
# add colour bar
m = cm.ScalarMappable(cmap=ColourMap)
m.set_array(basins)
cax = fig.add_axes([0.91,0.1,0.02,0.8])
cbar = fig.colorbar(m, cax=cax)
tick_locator = ticker.MaxNLocator(nbins=5)
cbar.locator = tick_locator
cbar.update_ticks()
cbar.set_label('Basin ID')
#plt.tight_layout()
plt.subplots_adjust(left=0.1, right=0.9, bottom=0.1, top=0.9, wspace=0.3)
#save output
plt.savefig(PlotDirectory+FilenamePrefix +"_hs_data_erosion_rate.png", dpi=300)
plt.clf()
def Make3DHillslopePlot(DataDirectory, FilenamePrefix, PlotDirectory):
"""
Function to make a 3d plot of E, R* and E* for a series of basins
"""
df = pd.read_csv(PlotDirectory+FilenamePrefix+'_basin_hillslope_data.csv')
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
#choose colormap
ColourMap = cm.viridis
# get the basins
basins = df['basin_keys'].unique()
NoBasins = len(basins)
print(basins)
norm = colors.Normalize(vmin=basins.min(), vmax=basins.max())
# plot the errorbars. this is annoying in 3d
for i in range(len(df.Erosion_rate_Al)):
# x error
ax.plot([df.Erosion_rate_Al[i] + df.Al_error[i], df.Erosion_rate_Al[i] - df.Al_error[i]], [df.Estar_median[i], df.Estar_median[i]], [df.Rstar_median[i], df.Rstar_median[i]], c='0.4', zorder=-1)
# y error
ax.plot([df.Erosion_rate_Al[i], df.Erosion_rate_Al[i]], [df.Estar_median[i]+df.Estar_upper_err[i], df.Estar_median[i]-df.Estar_lower_err[i]], [df.Rstar_median[i], df.Rstar_median[i]], c='0.4', zorder=-1)
# z error
ax.plot([df.Erosion_rate_Al[i], df.Erosion_rate_Al[i]], [df.Estar_median[i], df.Estar_median[i]], [df.Rstar_median[i]+df.Rstar_upper_err[i], df.Rstar_median[i]-df.Rstar_lower_err[i]], c='0.4', zorder=-1)
for i in range(len(df.Erosion_rate_Al_min)):
# y error
ax.plot([df.Erosion_rate_Al_min[i], df.Erosion_rate_Al_min[i]], [df.Estar_median[i]+df.Estar_upper_err[i], df.Estar_median[i]-df.Estar_lower_err[i]], [df.Rstar_median[i], df.Rstar_median[i]], c='0.4', zorder=-1)
# z error
ax.plot([df.Erosion_rate_Al_min[i], df.Erosion_rate_Al_min[i]], [df.Estar_median[i], df.Estar_median[i]], [df.Rstar_median[i]+df.Rstar_upper_err[i], df.Rstar_median[i]-df.Rstar_lower_err[i]], c='0.4', zorder=-1)
# plot the data
ax.scatter(df.Erosion_rate_Al, df.Estar_median, df.Rstar_median, c=basins, alpha=1, edgecolors='k', s=50, zorder=1, norm=norm)
ax.scatter(df.Erosion_rate_Al_min, df.Estar_median, df.Rstar_median, c=basins, alpha=1, edgecolors='k', s=50, zorder=1, norm=norm)
yflat = np.full_like(df.Estar_median, max(ax.get_ylim()))
zflat = np.full_like(df.Rstar_median, min(ax.get_zlim()))
new_df = df[np.isnan(df.Erosion_rate_Al) == False]
newbasins = new_df.basin_keys.unique()
xflat = np.full_like(new_df.Erosion_rate_Al, min(ax.get_xlim()))
ax.scatter(xflat, new_df.Estar_median, new_df.Rstar_median,c=newbasins, alpha=0.2, edgecolors='k', s=50, zorder=1, norm=norm)
new_df = df[np.isnan(df.Erosion_rate_Al_min) == False]
newbasins = new_df.basin_keys.unique()
xflat = np.full_like(new_df.Erosion_rate_Al_min, min(ax.get_xlim()))
ax.scatter(xflat, new_df.Estar_median, new_df.Rstar_median,c=newbasins, alpha=0.2, edgecolors='k', s=50, zorder=-2, norm=norm)
#ax.scatter(x2flat, df.Estar_median, df.Rstar_median,c=basins, alpha=0.5, edgecolors='k', s=50, zorder=1, norm=norm)
ax.scatter(df.Erosion_rate_Al, yflat, df.Rstar_median,c=basins, alpha=0.2, edgecolors='k', s=50, zorder=-2, norm=norm)
ax.scatter(df.Erosion_rate_Al, df.Estar_median, zflat,c=basins, alpha=0.2, edgecolors='k', s=50, zorder=-2, norm=norm)
ax.scatter(df.Erosion_rate_Al_min, yflat, df.Rstar_median,c=basins, alpha=0.2, edgecolors='k', s=50, zorder=-2, norm=norm)
ax.scatter(df.Erosion_rate_Al_min, df.Estar_median, zflat,c=basins, alpha=0.2, edgecolors='k', s=50, zorder=-2, norm=norm)
# ax.plot([fx[i]+xerror[i], fx[i]-xerror[i]], [fy[i], fy[i]], [fz[i], fz[i]], marker="_")
# ax.plot([fx[i], fx[i]], [fy[i]+yerror[i], fy[i]-yerror[i]], [fz[i], fz[i]], marker="_")
# ax.plot([fx[i], fx[i]], [fy[i], fy[i]], [fz[i]+zerror[i], fz[i]-zerror[i]], marker="_")
# ax.errorbar(df.Erosion_rate_Al, df.Rstar_median, df.Estar_median, xerr=df['Al_error'], yerr=[df['Rstar_lower_err'], df['Rstar_upper_err']], zerr=[df['Estar_lower_err'], df['Estar_upper_err']], fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
# add colour bar
m = cm.ScalarMappable(cmap=ColourMap)
m.set_array(basins)
cax = fig.add_axes([0.85,0.15,0.02,0.65])
cbar = fig.colorbar(m, cax=cax)
tick_locator = ticker.MaxNLocator(nbins=5)
cbar.locator = tick_locator
cbar.update_ticks()
cbar.set_label('Basin ID')
ax.set_xlabel('Erosion rate (mm/yr)')
ax.set_ylabel('$E*$')
ax.set_zlabel('$R*$')
zed = [tick.label.set_fontsize(8) for tick in ax.xaxis.get_major_ticks()]
zed = [tick.label.set_fontsize(8) for tick in ax.yaxis.get_major_ticks()]
zed = [tick.label.set_fontsize(8) for tick in ax.zaxis.get_major_ticks()]
plt.subplots_adjust(left=0.05, right=0.8, bottom=0.1, top=0.9)
# make the grid lines dashed
ax.xaxis._axinfo["grid"]['linestyle'] = ":"
ax.yaxis._axinfo["grid"]['linestyle'] = ":"
ax.zaxis._axinfo["grid"]['linestyle'] = ":"
#save output
plt.savefig(PlotDirectory+FilenamePrefix +"_hs_data_3d.png", dpi=300)
plt.clf()
def PlotHillslopeLengthDistribution(DataDirectory, FilenamePrefix, PlotDirectory, basin_keys=[], basin_labels=[]):
"""
Function to plot the full distribution of hillslope lengths for specified basins
Args:
DataDirectory (str): the data directory
FilenamePrefix (str): the file name prefix
PlotDirectory (str): The directory into which the plots are saved
basin_keys (list): the keys of the basins you want to plot
basin_labels (list): some labels for the basins
FJC
"""
# load the hillslopes data
HillslopeData = ReadHillslopeData(DataDirectory, FilenamePrefix)
print(basin_keys)
basin_dict = MapBasinKeysToJunctions(DataDirectory,FilenamePrefix)
print(basin_dict)
fig, axes = plt.subplots(nrows = 1, ncols= len(basin_keys), figsize=(12,5), sharey=False)
print(basin_keys)
# get hillslope data for each basin in the list of keys
i=0
for key in basin_keys:
jn = basin_dict[int(key)]
BasinHillslopeData = HillslopeData[HillslopeData.BasinID == jn]
n = len(BasinHillslopeData.hilltop_id)
hist = BasinHillslopeData.hist(column='Lh', bins=50, ax=axes[i], grid=False)
axes[i].set_title(basin_labels[i], fontsize=14)
axes[i].text(0.72,0.92,'n = '+str(n),transform=axes[i].transAxes, fontsize=12, bbox=dict(facecolor='white', alpha=0.5))
axes[i].set_xlabel("Hillslope length (m)", fontsize=14)
axes[i].set_xlim(0,800)
median = BasinHillslopeData.Lh.median()
print("basin key: ", key)
print("median hillslope length: ", median)
print("16th per: ", np.percentile(BasinHillslopeData.Lh.as_matrix(), 16))
print("84th per: ", np.percentile(BasinHillslopeData.Lh.as_matrix(), 84))
axes[i].axvline(x=median, c='k')
i=i+1
axes[0].set_ylabel("Count",fontsize=14)
#save output
plt.savefig(PlotDirectory+FilenamePrefix +"_LH_dist.png", dpi=300)
plt.clf()
def PlotDdLh(DataDirectory, FilenamePrefix, PlotDirectory):
"""
Plot drainage density vs hillslope length
FJC
"""
df = pd.read_csv(PlotDirectory+FilenamePrefix+'_basin_hillslope_data.csv')
#choose colormap
ColourMap = cm.RdYlBu
# get the basins
basins = df['basin_keys'].unique()
# set up the figure
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(6,5))
#ax = ax.ravel()
# make a big subplot to allow sharing of axis labels
#fig.add_subplot(111, frameon=False)
# hide tick and tick label of the big axes
#plt.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
plt.scatter(df['Lh_median'], df['dd_median'], c=basins, cmap=ColourMap, edgecolors='k', zorder=100, s=50)
plt.errorbar(df.Lh_median,df.dd_median,xerr=[df['Lh_lower_err'], df['Lh_upper_err']], yerr=[df['dd_lower_err'], df['dd_upper_err']],fmt='o', zorder=1, ecolor='0.5',markersize=1,mfc='white',mec='k')
plt.xlabel('Hillslope length (m)')
plt.ylabel('Drainage density (m/km$^2$)')
#plt.xlim(100, 500)
#plt.ylim(3500, 10000)
# linregress
slope, intercept, r_value, p_value, std_err = stats.linregress(np.log10(df['Lh_median']),np.log10(df['dd_median']))
print(slope, 10**intercept, r_value, p_value)
ax.text(0.5,0.9, '$D_d$ = {0:.{0}f}$L_H^{{{1:f}}}$'.format(10**intercept, round(slope,2), 2)+'\n p = '+str(round(p_value, 2)), transform=ax.transAxes)
# add colour bar
m = cm.ScalarMappable(cmap=ColourMap)
m.set_array(basins)
cbar = plt.colorbar(m)
#tick_locator = ticker.MaxNLocator(nbins=5)
#cbar.locator = tick_locator
#cbar.update_ticks()
cbar.ax.invert_yaxis()
cbar.set_label('Basin ID')
#save output
plt.subplots_adjust(left=0.2)
plt.savefig(PlotDirectory+FilenamePrefix +"_dd_lh.png", dpi=300)
plt.clf()
def PlotHillslopeTraces(DataDirectory, FilenamePrefix, PlotDirectory, CustomExtent=[-9999],FigSizeFormat="epsl"):
"""
Function to plot a hillshade image with hilltops, hillslope traces and the channel network superimposed.
Args:
DataDirectory (str): the | |
<reponame>iTecAI/SPEAR
from asyncio.streams import StreamWriter
from socket import *
import asyncio
from cryptography.fernet import Fernet, InvalidToken
import rsa
import hashlib
import random
import time
import functools
import threading
import base64
import json
from concurrent.futures import ThreadPoolExecutor
import pickle
import traceback
import inspect
def check_port(port):
c_sock = socket(AF_INET, SOCK_STREAM)
res = c_sock.connect_ex(('127.0.0.1', port)) == 0
c_sock.close()
return res
def free_port():
free_socket = socket(AF_INET, SOCK_STREAM)
free_socket.bind(('0.0.0.0', 0))
free_socket.listen(5)
port = free_socket.getsockname()[1]
free_socket.close()
return port
def ip():
return gethostbyname(gethostname())
class Peer:
def __init__(self, originator, target='*', thread_count=None, timeout=1e99):
self.originator = originator
if target == '*':
self.peers = {}
for p in originator.peers['local'].keys():
self.peers[p] = originator.peers['local'][p]
for p in originator.peers['remote'].keys():
if not p in self.peers.keys():
self.peers[p] = originator.peers['remote'][p]
elif type(target) == list:
self.peers = {x['id']: x for x in [
originator.find_peer(i) for i in target]}
else:
self.peers = {}
peer = originator.find_peer(target)
self.peers[peer['id']] = peer
self.thread_count = thread_count
self.timeout = timeout
def command_one(self, target, path, args=[], kwargs={}):
try:
peer = self.peers[target]
temp_key = Fernet.generate_key()
if peer['type'] == 'local':
packet_raw = {
'originator': self.originator.id,
'originator_name': self.originator.name,
'originator_key': base64.urlsafe_b64encode(self.originator.public.save_pkcs1()).decode('utf-8'),
'originator_type': 'local',
'originator_ip': [ip(), self.originator.service_port],
'target': target,
'path': path,
'args': [base64.urlsafe_b64encode(pickle.dumps(arg)).decode('utf-8') for arg in args],
'kwargs': {key: base64.urlsafe_b64encode(pickle.dumps(kwargs[key])).decode('utf-8') for key in kwargs.keys()}
}
encoded_packet = base64.urlsafe_b64encode(
json.dumps(packet_raw).encode('utf-8'))
encrypted_packet = base64.urlsafe_b64encode(
Fernet(temp_key).encrypt(encoded_packet))
encrypted_key = base64.urlsafe_b64encode(
rsa.encrypt(temp_key, peer['public_key']))
assembled_packet = encrypted_key + \
'§'.encode('utf-8') + encrypted_packet
if self.originator.network_encryption:
assembled_packet = base64.urlsafe_b64encode(
self.originator.network_encryption.encrypt(assembled_packet))
temp_socket = create_connection(
(peer['address'].split(':')[0], int(peer['address'].split(':')[1])))
temp_socket.sendall(assembled_packet+b'\n')
response = b''
start = time.time()
while True and time.time() < start + self.timeout:
data = temp_socket.recv(1024)
if len(data) == 0:
break
response += data.strip(b'\n')
if time.time() >= start + self.timeout:
raise TimeoutError
if self.originator.network_encryption:
response = self.originator.network_encryption.decrypt(base64.urlsafe_b64decode(response))
response = base64.urlsafe_b64decode(response)
response = Fernet(temp_key).decrypt(response)
response = base64.urlsafe_b64decode(response).decode('utf-8')
response = json.loads(response)
return response
else:
relay = random.choice(peer['relays'])
if ':' in relay:
relay = [relay.split(':')[0], int(relay.split(':')[1])]
else:
relay = [relay, 2201]
relay_str = relay[0] + ':' + str(relay[1])
packet_raw = {
'type': 'command',
'originator': self.originator.id,
'originator_name': self.originator.name,
'originator_key': base64.urlsafe_b64encode(self.originator.public.save_pkcs1()).decode('utf-8'),
'originator_type': 'remote',
'originator_relay': relay,
'target': target,
'path': path,
'args': [base64.urlsafe_b64encode(pickle.dumps(arg)).decode('utf-8') for arg in args],
'kwargs': {key: base64.urlsafe_b64encode(pickle.dumps(kwargs[key])).decode('utf-8') for key in kwargs.keys()}
}
encoded_packet = base64.urlsafe_b64encode(
json.dumps(packet_raw).encode('utf-8'))
encrypted_packet = base64.urlsafe_b64encode(
Fernet(temp_key).encrypt(encoded_packet))
encrypted_key = base64.urlsafe_b64encode(
rsa.encrypt(temp_key, peer['public_key']))
assembled_packet = encrypted_key + \
'§'.encode('utf-8') + encrypted_packet
if self.originator.network_encryption:
assembled_packet = base64.urlsafe_b64encode(
self.originator.network_encryption.encrypt(assembled_packet))
block_id = hashlib.sha256(str(time.time() + random.uniform(-1,1)).encode('utf-8')).hexdigest()
try:
sock = create_connection(relay, timeout=2)
except TimeoutError:
self.originator.relays[relay_str]['public_key'] = None
return
packet = json.dumps({
'originator': self.originator.id,
'target': target,
'network': self.originator.network_name,
'id': block_id,
'data': assembled_packet.decode('utf-8')
})
packet = 'CMND:' + packet
tfk = Fernet.generate_key()
tempfernet = Fernet(tfk)
enc = rsa.encrypt(tfk, self.originator.relays[relay_str]['public_key'])
to_send = base64.urlsafe_b64encode(
enc)+'§'.encode('utf-8')+base64.urlsafe_b64encode(tempfernet.encrypt(packet.encode('utf-8')))+b'\n'
sock.sendall(to_send)
packet_response = ''
while True:
dat = sock.recv(1024)
if not dat:
break
packet_response += dat.decode('utf-8').strip()
sock.close()
if packet_response == 'error':
self.originator.relays[relay_str]['public_key'] = None
return 'error'
decrypted = tempfernet.decrypt(
base64.urlsafe_b64decode(packet_response.encode('utf-8')))
if decrypted == 'error':
print('Encryption error')
start = time.time()
while True and time.time() < start + self.timeout:
if block_id in self.originator.responses.keys():
break
if time.time() >= start + self.timeout:
raise TimeoutError
response = self.originator.responses[block_id]['data'].encode('utf-8')
if self.originator.network_encryption:
response = self.originator.network_encryption.decrypt(base64.urlsafe_b64decode(response))
response = base64.urlsafe_b64decode(response)
response = Fernet(temp_key).decrypt(response)
response = base64.urlsafe_b64decode(response).decode('utf-8')
response = json.loads(response)
del self.originator.responses[block_id]
return response
except:
traceback.print_exc()
def command(self, path, *args, **kwargs):
with ThreadPoolExecutor(max_workers=self.thread_count) as executor:
results = {pid: executor.submit(
self.command_one, pid, path, args=args, kwargs=kwargs) for pid in self.peers.keys()}
finals = {r: results[r].result() for r in results.keys()}
for f in finals.keys():
if finals[f]['result_status'] == 200:
finals[f] = pickle.loads(base64.urlsafe_b64decode(finals[f]['result'].encode('utf-8')))
else:
finals[f] = {
'result': 'ERROR',
'status': finals[f]['result_status'],
'reason': finals[f]['result']
}
if len(finals.keys()) == 1:
return finals[list(finals.keys())[0]]
else:
return finals
class PeerNotFoundError(KeyError):
pass
class SpearResponse:
def __init__(self, status, data):
self.status = status
self.data = data
class Spear: # Base Peer class
def __init__(
self,
network_name,
peer_name,
relays=[],
network_encryption=None,
advertising_port=2200,
port_range=(2300, 23000),
use_remote=True,
use_local=True,
advertise=True
):
self.network_name = network_name
self.name = peer_name
for i in range(len(relays)):
if not ':' in relays[i]:
relays[i] = relays[i] + ':2201'
self.relays = {i: {
'last_reply': time.time(),
'public_key': None
} for i in relays}
self.ad_port = advertising_port
self.id = hashlib.sha256(
str(time.time() + random.uniform(-1, 1)).encode('utf-8')).hexdigest()
while True:
p = random.randint(*port_range)
if not check_port(p):
self.service_port = p
break
if network_encryption == None:
self.network_encryption = False
else:
if type(network_encryption) == str:
self.network_encryption = Fernet(
network_encryption.encode('utf-8'))
else:
self.network_encryption = Fernet(network_encryption)
self.endpoints = {}
self.sockets = {}
(self.public, self.private) = rsa.newkeys(512)
self.running = False
self.peers = {
'local': {},
'remote': {}
}
self.threads = {}
self.responses = {}
self.use_local = use_local
self.use_remote = use_remote
self.advertise = advertise
def find_peer(self, peer_name_or_id):
if peer_name_or_id in self.peers['local'].keys():
return self.peers['local'][peer_name_or_id]
elif peer_name_or_id in self.peers['remote'].keys():
return self.peers['remote'][peer_name_or_id]
for p in self.peers['local'].values():
if p['name'] == peer_name_or_id:
return p
for p in self.peers['remote'].values():
if p['name'] == peer_name_or_id:
return p
raise PeerNotFoundError(
f'Peer with name/ID "{peer_name_or_id}" not found.')
def target(self, path): # Function decorator to specify commands
def dec_target(func):
self.endpoints[path] = func
@functools.wraps(func)
def wrapper_target(*args, **kwargs):
return func(*args, **kwargs)
return wrapper_target
return dec_target
def run_advertiser(self): # Local UDP advertiser thread
self.sockets['advertiser'] = socket(AF_INET, SOCK_DGRAM)
self.sockets['advertiser'].setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
self.sockets['advertiser'].bind(('', 0))
self.sockets['advertiser'].setsockopt(SOL_SOCKET, SO_BROADCAST, 1)
while self.running:
raw_packet = '§'.join([str(i) for i in [
self.network_name,
self.id,
self.name,
ip() + ':' + str(self.service_port),
base64.urlsafe_b64encode(
self.public.save_pkcs1()).decode('utf-8'),
','.join(self.relays)
]])
if self.network_encryption:
finished_packet = ('e§'+base64.urlsafe_b64encode(self.network_encryption.encrypt(
raw_packet.encode('utf-8'))).decode('utf-8')+'\n').encode('utf-8')
else:
finished_packet = ('d§'+raw_packet+'\n').encode('utf-8')
self.sockets['advertiser'].sendto(
finished_packet,
(
'<broadcast>',
self.ad_port
)
)
time.sleep(1)
self.sockets['advertiser'].close()
def discover_local_loop(self): # Local discovery thread
s = socket(AF_INET, SOCK_DGRAM)
s.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
s.bind(('', self.ad_port))
while self.running:
data, addr = s.recvfrom(1024)
data = data.decode('utf-8')
if not data.endswith('\n'):
continue
if data.startswith('e§'):
try:
proc_packet = self.network_encryption.decrypt(base64.urlsafe_b64decode(
data.split('e§')[1].strip('\n').encode('utf-8'))).decode('utf-8').split('§')
except InvalidToken:
continue
else:
proc_packet = data.split('§', maxsplit=1)[1].strip().split('§')
for i in proc_packet[5].split(','):
if not i in self.relays.keys():
if ':' in i:
r_ip = i
else:
r_ip = i + ':2201'
self.relays[r_ip] = {
'last_reply': time.time(),
'public_key': None
}
if proc_packet[1] == self.id or proc_packet[0] != self.network_name:
continue
proc_packet = {
'id': proc_packet[1],
'name': proc_packet[2],
'network': proc_packet[0],
'address': proc_packet[3],
'public_key': rsa.PublicKey.load_pkcs1(base64.urlsafe_b64decode(proc_packet[4].encode('utf-8'))),
'ping_time': time.time(),
'type': 'local'
}
self.peers['local'][proc_packet['id']] = proc_packet.copy()
def check_peer_timeouts(self): # Peer timeout thread
while self.running:
for k in list(self.peers['local'].keys()):
if self.peers['local'][k]['ping_time'] + 2 < time.time():
del self.peers['local'][k]
for k in list(self.peers['remote'].keys()):
for r in self.peers['remote'][k]['relays'][:]:
if self.relays[r]['last_reply'] + 2 < time.time():
self.peers['remote'][k]['relays'].remove(r)
if len(self.peers['remote'][k]['relays']) == 0:
del self.peers['remote'][k]
time.sleep(1)
def process_message(self, message):
if self.network_encryption:
message = self.network_encryption.decrypt(
base64.urlsafe_b64decode(message)).decode('utf-8')
else:
message = message.decode('utf-8')
key, data = message.split('§', maxsplit=1)
tempfernet = Fernet(rsa.decrypt(
base64.urlsafe_b64decode(key.encode('utf-8')), self.private))
data = json.loads(base64.urlsafe_b64decode(tempfernet.decrypt(
base64.urlsafe_b64decode(data.encode('utf-8')))).decode('utf-8'))
data['args'] = [pickle.loads(base64.urlsafe_b64decode(
arg.encode('utf-8'))) for arg in data['args']]
data['kwargs'] = {k: pickle.loads(base64.urlsafe_b64decode(
data['kwargs'][k].encode('utf-8'))) for k in data['kwargs'].keys()}
if data['path'] in self.endpoints.keys():
try:
aspec = inspect.getfullargspec(self.endpoints[data['path']]) # I see you <3
if 'node' in aspec.kwonlyargs or aspec.varkw:
data['kwargs']['node'] = self
if 'originator' in aspec.kwonlyargs or aspec.varkw:
data['kwargs']['originator'] = [data['originator'], data['originator_name']]
value = self.endpoints[data['path']](*data['args'], **data['kwargs'])
if type(value) == SpearResponse:
status = value.status
value = value.data
else:
status = 200
return_data = {
'type': 'response',
'originator': self.id,
'originator_name': self.name,
'originator_key': base64.urlsafe_b64encode(self.public.save_pkcs1()).decode('utf-8'),
'target': data['originator'],
'result_status': status,
'result': base64.urlsafe_b64encode(pickle.dumps(value)).decode('utf-8')
}
except:
return_data = {
'type': 'response',
'originator': self.id,
'originator_name': self.name,
'originator_key': base64.urlsafe_b64encode(self.public.save_pkcs1()).decode('utf-8'),
'target': data['originator'],
'result_status': 500,
'result': f'Remote function encountered an unexpected error: {traceback.format_exc()}'
}
else:
return_data = {
'type': 'response',
'originator': self.id,
'originator_name': self.name,
'originator_key': base64.urlsafe_b64encode(self.public.save_pkcs1()).decode('utf-8'),
'target': data['originator'],
'result_status': 404,
'result': f'Path "{data["path"]}" not found.'
}
encoded_response = base64.urlsafe_b64encode(json.dumps(return_data).encode('utf-8'))
encrypted_response = tempfernet.encrypt(encoded_response)
packed_response = base64.urlsafe_b64encode(encrypted_response)
if self.network_encryption:
packed_response = base64.urlsafe_b64encode(self.network_encryption.encrypt(packed_response))
return packed_response
def check_one_relay(self, relay): # Function to check individual relays
if not relay in self.relays.keys():
return
if self.relays[relay]['last_reply'] + 2 < time.time():
self.relays[relay]['public_key'] = None
if ':' in relay:
host = relay.split(':')[0]
port = int(relay.split(':')[1])
else:
host = relay
port = 2201
if self.relays[relay]['public_key'] == None:
try:
sock = create_connection((host, port), timeout=2)
except TimeoutError:
self.relays[relay]['public_key'] = None
return
sock.sendall(b'RSAREQUEST\n')
while True:
dat = sock.recv(1024)
if not dat:
break
dat = dat.strip()
try:
self.relays[relay]['public_key'] = rsa.PublicKey.load_pkcs1(
base64.urlsafe_b64decode(dat))
break
except:
pass
sock.close()
try:
sock = create_connection((host, port), timeout=2)
except TimeoutError:
self.relays[relay]['public_key'] = None
return
packet = json.dumps({
'peer_id': self.id,
'peer_name': self.name,
'network': self.network_name,
'public_key': base64.urlsafe_b64encode(self.public.save_pkcs1()).decode('utf-8'),
'relays': list(self.relays.keys()),
'advertise': self.advertise
})
packet = 'PING:' + packet
tfk = Fernet.generate_key()
tempfernet = Fernet(tfk)
enc = rsa.encrypt(tfk, self.relays[relay]['public_key'])
to_send = base64.urlsafe_b64encode(
enc)+'§'.encode('utf-8')+base64.urlsafe_b64encode(tempfernet.encrypt(packet.encode('utf-8')))+b'\n'
sock.sendall(to_send)
packet_response = ''
while True:
dat = sock.recv(1024)
if not dat:
break
packet_response += dat.decode('utf-8').strip()
| |
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getMid_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
value = (value * 31) ^ hash(make_hashable(self.e))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getUserCountryForBilling_args(object):
"""
Attributes:
- country
- remoteIp
"""
def __init__(self, country=None, remoteIp=None):
self.country = country
self.remoteIp = remoteIp
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.STRING:
self.country = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.remoteIp = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getUserCountryForBilling_args')
if self.country is not None:
oprot.writeFieldBegin('country', TType.STRING, 2)
oprot.writeString(self.country)
oprot.writeFieldEnd()
if self.remoteIp is not None:
oprot.writeFieldBegin('remoteIp', TType.STRING, 3)
oprot.writeString(self.remoteIp)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.country))
value = (value * 31) ^ hash(make_hashable(self.remoteIp))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getUserCountryForBilling_result(object):
"""
Attributes:
- success
- e
"""
def __init__(self, success=None, e=None):
self.success = success
self.e = e
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getUserCountryForBilling_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
value = (value * 31) ^ hash(make_hashable(self.e))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getFavoriteGroupIdsForChannel_args(object):
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getFavoriteGroupIdsForChannel_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getFavoriteGroupIdsForChannel_result(object):
"""
Attributes:
- success
- e
"""
def __init__(self, success=None, e=None):
self.success = success
self.e = e
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_, elem999) = iprot.readListBegin()
for _ in range(elem999):
elem1000 = iprot.readString()
self.success.append(elem1000)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getFavoriteGroupIdsForChannel_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for elem1001 in self.success:
oprot.writeString(elem1001)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
value = (value * 31) ^ hash(make_hashable(self.e))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getPaidCallHistory_args(object):
"""
Attributes:
- start
- size
- language
"""
def __init__(self, start=None, size=None, language=None):
self.start = start
self.size = size
self.language = language
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.start = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.size = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.language = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getPaidCallHistory_args')
if self.start is not None:
oprot.writeFieldBegin('start', TType.I64, 2)
oprot.writeI64(self.start)
oprot.writeFieldEnd()
if self.size is not None:
oprot.writeFieldBegin('size', TType.I32, 3)
oprot.writeI32(self.size)
oprot.writeFieldEnd()
if self.language is not None:
oprot.writeFieldBegin('language', TType.STRING, 4)
oprot.writeString(self.language)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.start))
value = (value * 31) ^ hash(make_hashable(self.size))
value = (value * 31) ^ hash(make_hashable(self.language))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getPaidCallHistory_result(object):
"""
Attributes:
- success
- e
"""
def __init__(self, success=None, e=None):
self.success = success
self.e = e
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = PaidCallHistoryResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('getPaidCallHistory_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.success))
value = (value * 31) ^ hash(make_hashable(self.e))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sendPinCodeOperation_args(object):
"""
Attributes:
- verifier
"""
def __init__(self, verifier=None):
self.verifier = verifier
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.verifier = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('sendPinCodeOperation_args')
if self.verifier is not None:
oprot.writeFieldBegin('verifier', TType.STRING, 1)
oprot.writeString(self.verifier)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.verifier))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sendPinCodeOperation_result(object):
"""
Attributes:
- e
"""
def __init__(self, e=None):
self.e = e
def read(self, iprot):
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = TalkException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
self.validate()
def write(self, oprot):
self.validate()
oprot.writeStructBegin('sendPinCodeOperation_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(make_hashable(self.e))
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class inviteIntoGroupCall_args(object):
"""
Attributes:
- chatMid
- memberMids
- mediaType
"""
def __init__(self, chatMid=None, memberMids=None, mediaType=None):
self.chatMid = chatMid
self.memberMids = memberMids
self.mediaType = mediaType
def read(self, iprot):
iprot.readStructBegin()
| |
self.health_errors = None
class MasterTargetServer(msrest.serialization.Model):
"""Details of a Master Target Server.
:param id: The server Id.
:type id: str
:param ip_address: The IP address of the server.
:type ip_address: str
:param name: The server name.
:type name: str
:param os_type: The OS type of the server.
:type os_type: str
:param agent_version: The version of the scout component on the server.
:type agent_version: str
:param last_heartbeat: The last heartbeat received from the server.
:type last_heartbeat: ~datetime.datetime
:param version_status: Version status.
:type version_status: str
:param retention_volumes: The retention volumes of Master target Server.
:type retention_volumes: list[~azure.mgmt.recoveryservicessiterecovery.models.RetentionVolume]
:param data_stores: The list of data stores in the fabric.
:type data_stores: list[~azure.mgmt.recoveryservicessiterecovery.models.DataStore]
:param validation_errors: Validation errors.
:type validation_errors: list[~azure.mgmt.recoveryservicessiterecovery.models.HealthError]
:param health_errors: Health errors.
:type health_errors: list[~azure.mgmt.recoveryservicessiterecovery.models.HealthError]
:param disk_count: Disk count of the master target.
:type disk_count: int
:param os_version: OS Version of the master target.
:type os_version: str
:param agent_expiry_date: Agent expiry date.
:type agent_expiry_date: ~datetime.datetime
:param mars_agent_version: MARS agent version.
:type mars_agent_version: str
:param mars_agent_expiry_date: MARS agent expiry date.
:type mars_agent_expiry_date: ~datetime.datetime
:param agent_version_details: Agent version details.
:type agent_version_details: ~azure.mgmt.recoveryservicessiterecovery.models.VersionDetails
:param mars_agent_version_details: Mars agent version details.
:type mars_agent_version_details:
~azure.mgmt.recoveryservicessiterecovery.models.VersionDetails
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'ip_address': {'key': 'ipAddress', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'agent_version': {'key': 'agentVersion', 'type': 'str'},
'last_heartbeat': {'key': 'lastHeartbeat', 'type': 'iso-8601'},
'version_status': {'key': 'versionStatus', 'type': 'str'},
'retention_volumes': {'key': 'retentionVolumes', 'type': '[RetentionVolume]'},
'data_stores': {'key': 'dataStores', 'type': '[DataStore]'},
'validation_errors': {'key': 'validationErrors', 'type': '[HealthError]'},
'health_errors': {'key': 'healthErrors', 'type': '[HealthError]'},
'disk_count': {'key': 'diskCount', 'type': 'int'},
'os_version': {'key': 'osVersion', 'type': 'str'},
'agent_expiry_date': {'key': 'agentExpiryDate', 'type': 'iso-8601'},
'mars_agent_version': {'key': 'marsAgentVersion', 'type': 'str'},
'mars_agent_expiry_date': {'key': 'marsAgentExpiryDate', 'type': 'iso-8601'},
'agent_version_details': {'key': 'agentVersionDetails', 'type': 'VersionDetails'},
'mars_agent_version_details': {'key': 'marsAgentVersionDetails', 'type': 'VersionDetails'},
}
def __init__(
self,
*,
id: Optional[str] = None,
ip_address: Optional[str] = None,
name: Optional[str] = None,
os_type: Optional[str] = None,
agent_version: Optional[str] = None,
last_heartbeat: Optional[datetime.datetime] = None,
version_status: Optional[str] = None,
retention_volumes: Optional[List["RetentionVolume"]] = None,
data_stores: Optional[List["DataStore"]] = None,
validation_errors: Optional[List["HealthError"]] = None,
health_errors: Optional[List["HealthError"]] = None,
disk_count: Optional[int] = None,
os_version: Optional[str] = None,
agent_expiry_date: Optional[datetime.datetime] = None,
mars_agent_version: Optional[str] = None,
mars_agent_expiry_date: Optional[datetime.datetime] = None,
agent_version_details: Optional["VersionDetails"] = None,
mars_agent_version_details: Optional["VersionDetails"] = None,
**kwargs
):
super(MasterTargetServer, self).__init__(**kwargs)
self.id = id
self.ip_address = ip_address
self.name = name
self.os_type = os_type
self.agent_version = agent_version
self.last_heartbeat = last_heartbeat
self.version_status = version_status
self.retention_volumes = retention_volumes
self.data_stores = data_stores
self.validation_errors = validation_errors
self.health_errors = health_errors
self.disk_count = disk_count
self.os_version = os_version
self.agent_expiry_date = agent_expiry_date
self.mars_agent_version = mars_agent_version
self.mars_agent_expiry_date = mars_agent_expiry_date
self.agent_version_details = agent_version_details
self.mars_agent_version_details = mars_agent_version_details
class MigrateInput(msrest.serialization.Model):
"""Input for migrate.
All required parameters must be populated in order to send to Azure.
:param properties: Required. Migrate input properties.
:type properties: ~azure.mgmt.recoveryservicessiterecovery.models.MigrateInputProperties
"""
_validation = {
'properties': {'required': True},
}
_attribute_map = {
'properties': {'key': 'properties', 'type': 'MigrateInputProperties'},
}
def __init__(
self,
*,
properties: "MigrateInputProperties",
**kwargs
):
super(MigrateInput, self).__init__(**kwargs)
self.properties = properties
class MigrateInputProperties(msrest.serialization.Model):
"""Migrate input properties.
All required parameters must be populated in order to send to Azure.
:param provider_specific_details: Required. The provider specific details.
:type provider_specific_details:
~azure.mgmt.recoveryservicessiterecovery.models.MigrateProviderSpecificInput
"""
_validation = {
'provider_specific_details': {'required': True},
}
_attribute_map = {
'provider_specific_details': {'key': 'providerSpecificDetails', 'type': 'MigrateProviderSpecificInput'},
}
def __init__(
self,
*,
provider_specific_details: "MigrateProviderSpecificInput",
**kwargs
):
super(MigrateInputProperties, self).__init__(**kwargs)
self.provider_specific_details = provider_specific_details
class MigrateProviderSpecificInput(msrest.serialization.Model):
"""Migrate provider specific input.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: VMwareCbtMigrateInput.
All required parameters must be populated in order to send to Azure.
:param instance_type: Required. The class type.Constant filled by server.
:type instance_type: str
"""
_validation = {
'instance_type': {'required': True},
}
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
}
_subtype_map = {
'instance_type': {'VMwareCbt': 'VMwareCbtMigrateInput'}
}
def __init__(
self,
**kwargs
):
super(MigrateProviderSpecificInput, self).__init__(**kwargs)
self.instance_type = None # type: Optional[str]
class MigrationItem(Resource):
"""Migration item.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:ivar type: Resource Type.
:vartype type: str
:param location: Resource Location.
:type location: str
:param properties: The migration item properties.
:type properties: ~azure.mgmt.recoveryservicessiterecovery.models.MigrationItemProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'MigrationItemProperties'},
}
def __init__(
self,
*,
location: Optional[str] = None,
properties: Optional["MigrationItemProperties"] = None,
**kwargs
):
super(MigrationItem, self).__init__(location=location, **kwargs)
self.properties = properties
class MigrationItemCollection(msrest.serialization.Model):
"""Migration item collection.
:param value: The list of migration items.
:type value: list[~azure.mgmt.recoveryservicessiterecovery.models.MigrationItem]
:param next_link: The value of next link.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[MigrationItem]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["MigrationItem"]] = None,
next_link: Optional[str] = None,
**kwargs
):
super(MigrationItemCollection, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class MigrationItemProperties(msrest.serialization.Model):
"""Migration item properties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar machine_name: The on-premise virtual machine name.
:vartype machine_name: str
:ivar policy_id: The ARM Id of policy governing this item.
:vartype policy_id: str
:ivar policy_friendly_name: The name of policy governing this item.
:vartype policy_friendly_name: str
:ivar migration_state: The migration status. Possible values include: "None",
"EnableMigrationInProgress", "EnableMigrationFailed", "DisableMigrationInProgress",
"DisableMigrationFailed", "InitialSeedingInProgress", "InitialSeedingFailed", "Replicating",
"MigrationInProgress", "MigrationSucceeded", "MigrationFailed".
:vartype migration_state: str or ~azure.mgmt.recoveryservicessiterecovery.models.MigrationState
:ivar migration_state_description: The migration state description.
:vartype migration_state_description: str
:ivar last_test_migration_time: The last test migration time.
:vartype last_test_migration_time: ~datetime.datetime
:ivar last_test_migration_status: The status of the last test migration.
:vartype last_test_migration_status: str
:ivar test_migrate_state: The test migrate state. Possible values include: "None",
"TestMigrationInProgress", "TestMigrationSucceeded", "TestMigrationFailed",
"TestMigrationCleanupInProgress".
:vartype test_migrate_state: str or
~azure.mgmt.recoveryservicessiterecovery.models.TestMigrationState
:ivar test_migrate_state_description: The test migrate state description.
:vartype test_migrate_state_description: str
:ivar health: The consolidated health. Possible values include: "None", "Normal", "Warning",
"Critical".
:vartype health: str or ~azure.mgmt.recoveryservicessiterecovery.models.ProtectionHealth
:ivar health_errors: The list of health errors.
:vartype health_errors: list[~azure.mgmt.recoveryservicessiterecovery.models.HealthError]
:ivar allowed_operations: The allowed operations on the migration item based on the current
migration state of the item.
:vartype allowed_operations: list[str or
~azure.mgmt.recoveryservicessiterecovery.models.MigrationItemOperation]
:ivar current_job: The current job details.
:vartype current_job: ~azure.mgmt.recoveryservicessiterecovery.models.CurrentJobDetails
:ivar event_correlation_id: The correlation Id for events associated with this migration item.
:vartype event_correlation_id: str
:param provider_specific_details: The migration provider custom settings.
:type provider_specific_details:
~azure.mgmt.recoveryservicessiterecovery.models.MigrationProviderSpecificSettings
"""
_validation = {
'machine_name': {'readonly': True},
'policy_id': {'readonly': True},
'policy_friendly_name': {'readonly': True},
'migration_state': {'readonly': True},
'migration_state_description': {'readonly': True},
'last_test_migration_time': {'readonly': True},
'last_test_migration_status': {'readonly': True},
'test_migrate_state': {'readonly': True},
'test_migrate_state_description': {'readonly': True},
'health': {'readonly': True},
'health_errors': {'readonly': True},
'allowed_operations': {'readonly': True},
'current_job': {'readonly': True},
'event_correlation_id': {'readonly': True},
}
_attribute_map = {
'machine_name': {'key': 'machineName', 'type': 'str'},
'policy_id': {'key': 'policyId', 'type': 'str'},
'policy_friendly_name': {'key': 'policyFriendlyName', 'type': 'str'},
'migration_state': {'key': 'migrationState', 'type': 'str'},
'migration_state_description': {'key': 'migrationStateDescription', 'type': 'str'},
'last_test_migration_time': {'key': 'lastTestMigrationTime', 'type': 'iso-8601'},
'last_test_migration_status': {'key': 'lastTestMigrationStatus', 'type': 'str'},
'test_migrate_state': {'key': 'testMigrateState', 'type': 'str'},
'test_migrate_state_description': {'key': 'testMigrateStateDescription', 'type': 'str'},
'health': {'key': 'health', 'type': 'str'},
'health_errors': {'key': 'healthErrors', 'type': '[HealthError]'},
'allowed_operations': {'key': 'allowedOperations', 'type': '[str]'},
'current_job': {'key': 'currentJob', 'type': 'CurrentJobDetails'},
'event_correlation_id': {'key': 'eventCorrelationId', 'type': 'str'},
'provider_specific_details': {'key': 'providerSpecificDetails', 'type': 'MigrationProviderSpecificSettings'},
}
def __init__(
self,
*,
provider_specific_details: Optional["MigrationProviderSpecificSettings"] = None,
**kwargs
):
super(MigrationItemProperties, self).__init__(**kwargs)
self.machine_name = None
self.policy_id = None
self.policy_friendly_name = None
self.migration_state = None
self.migration_state_description = None
self.last_test_migration_time = None
self.last_test_migration_status = None
self.test_migrate_state = None
self.test_migrate_state_description = None
self.health = None
self.health_errors = None
self.allowed_operations = None
self.current_job = None
self.event_correlation_id = None
self.provider_specific_details = provider_specific_details
class MigrationItemsQueryParameter(msrest.serialization.Model):
"""Query parameter to enumerate migration items.
:param source_fabric_name: The source fabric name filter.
:type source_fabric_name: str
:param source_container_name: The source container name filter.
:type source_container_name: str
:param instance_type: The replication provider type.
:type instance_type: str
"""
_attribute_map = {
'source_fabric_name': {'key': 'sourceFabricName', 'type': 'str'},
'source_container_name': {'key': 'sourceContainerName', 'type': 'str'},
'instance_type': {'key': 'instanceType', 'type': 'str'},
}
def __init__(
self,
*,
source_fabric_name: Optional[str] = None,
source_container_name: Optional[str] = None,
instance_type: Optional[str] = None,
**kwargs
):
super(MigrationItemsQueryParameter, self).__init__(**kwargs)
self.source_fabric_name = source_fabric_name
self.source_container_name = source_container_name
self.instance_type = instance_type
class MigrationProviderSpecificSettings(msrest.serialization.Model):
"""Migration provider specific settings.
| |
True
else:
bool_2 = False
if "Bounty" in getCheckBoxes:
bool_3 = True
else:
bool_3 = False
# No Answers
if bool_1 and bool_2 == False and bool_3 == False:
print("Only First is True, Without Query")
questions = Question.objects.all().annotate(
answers=Count('answer')).filter(
answers=0, is_deleted=False).order_by('-date')
# No Accepted Answer
if bool_2 and bool_1 == False and bool_3 == False:
print("Only Second is True")
questions = Question.objects.all().filter(
answer__accepted=False, is_deleted=False).distinct().order_by('-date')
# Has Bounty
if bool_3 and bool_1 == False and bool_2 == False:
questions = Question.objects.all().exclude(
is_bountied=False, is_deleted=True).order_by('-date')
print("Only Third is True")
# No Answers and No Accepted Answer
if bool_1 and bool_2 and bool_3 == False:
questions = Question.objects.all().exclude(
answer__accepted=True).annotate(
answers=Count('answer')).filter(
answers=0,
is_deleted=True).order_by('-date')
print("First and Second is True")
# No Answers and Bounty
if bool_1 and bool_3 and bool_2 == False:
questions = Question.objects.all().exclude(
is_bountied=False).annotate(
answers=Count('answer')).filter(
answers=0,
is_deleted=True).order_by('-date')
print("First and Third is True")
# No Accepted Answers and Bounty
if bool_2 and bool_3 and bool_1 == False:
questions = Question.objects.all().exclude(
is_bountied=False, answer__accepted=True, is_deleted=True).order_by('-date')
print("Second and Third is True")
# ALl Filters are True
if bool_1 and bool_2 and bool_3:
questions = Question.objects.all().exclude(
is_bountied=False,
answer__accepted=True,
is_deleted=True).annotate(
answers=Count('answer')).filter(
answer=0).order_by('-date')
print("All Are True")
# Clear Filters
if bool_1 == False and bool_2 == False and bool_3 == False:
questions = Question.objects.all().exclude(
is_bountied=True, is_deleted=True).order_by('-date')
elif "MostVotes" in this:
selected = "MostVotes"
if query:
relatedTags = Tag.objects.filter(name__icontains=query)
if "NoAnswers" in getCheckBoxes:
bool_1 = True
else:
bool_1 = False
if "NoAcceptedAnswer" in getCheckBoxes:
bool_2 = True
else:
bool_2 = False
if "Bounty" in getCheckBoxes:
bool_3 = True
else:
bool_3 = False
# No Answers
if bool_1 and bool_2 == False and bool_3 == False:
print("Only First is True")
questions = Question.objects.filter(
tags__name__icontains=query).annotate(
answers=Count('answer')).filter(
answers=0,
is_deleted=False).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
# No Accepted Answer
if bool_2 and bool_1 == False and bool_3 == False:
print("Only Second is True")
questions = Question.objects.filter(
tags__name__icontains=query).filter(
answer__accepted=False,
is_deleted=False).distinct().annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
# Has Bounty
if bool_3 and bool_1 == False and bool_2 == False:
questions = Question.objects.filter(
tags__name__icontains=query, is_bountied=True, is_deleted=False).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("Only Third is True")
# No Answers and No Accepted Answer
if bool_1 and bool_2 and bool_3 == False:
questions = Question.objects.filter(
tags__name__icontains=query,
answer__accepted=False,
is_deleted=False).annotate(
answers=Count('answer')).filter(
answers=0).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("First and Second is True")
# No Answers and Bounty
if bool_1 and bool_3 and bool_2 == False:
questions = Question.objects.filter(
tags__name__icontains=query,
is_bountied=True,
is_deleted=False).annotate(
answers=Count('answer')).filter(
answers=0).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("First and Third is True")
# No Accepted Answers and Bounty
if bool_2 and bool_3 and bool_1 == False:
questions = Question.objects.filter(
tags__name__icontains=query,
is_bountied=True,
answer__accepted=False,
is_deleted=False).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("Second and Third is True")
# ALl Filters are True
if bool_1 and bool_2 and bool_3:
questions = Question.objects.filter(
tags__name__icontains=query,
is_bountied=True,
answer__accepted=False,
is_deleted=False).annotate(
answers=Count('answer')).filter(
answer=0).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("All Are True")
# Clear Filters
if bool_1 == False and bool_2 == False and bool_3 == False:
questions = Question.objects.filter(
tags__name__icontains=query, is_deleted=False).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
else:
if "NoAnswers" in getCheckBoxes:
bool_1 = True
else:
bool_1 = False
if "NoAcceptedAnswer" in getCheckBoxes:
bool_2 = True
else:
bool_2 = False
if "Bounty" in getCheckBoxes:
bool_3 = True
else:
bool_3 = False
# No Answers
if bool_1 and bool_2 == False and bool_3 == False:
print("Only First is True, Without Query")
questions = Question.objects.all().annotate(
answers=Count('answer')).filter(
answers=0, is_deleted=False).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
# No Accepted Answer
if bool_2 and bool_1 == False and bool_3 == False:
print("Only Second is True")
questions = Question.objects.all().exclude(
answer__accepted=True, is_deleted=True).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes').distinct()
# Has Bounty
if bool_3 and bool_1 == False and bool_2 == False:
questions = Question.objects.all().exclude(
is_bountied=False, is_deleted=True).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("Only Third is True")
# No Answers and No Accepted Answer
if bool_1 and bool_2 and bool_3 == False:
questions = Question.objects.all().exclude(
answer__accepted=True).annotate(
answers=Count('answer')).filter(
answers=0,
is_deleted=False).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("First and Second is True")
# No Answers and Bounty
if bool_1 and bool_3 and bool_2 == False:
questions = Question.objects.all().exclude(
is_bountied=False,
is_deleted=True).annotate(
answers=Count('answer')).filter(
answers=0).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("First and Third is True")
# No Accepted Answers and Bounty
if bool_2 and bool_3 and bool_1 == False:
questions = Question.objects.all().exclude(
is_bountied=False, is_deleted=True, answer__accepted=True).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("Second and Third is True")
# ALl Filters are True
if bool_1 and bool_2 and bool_3:
questions = Question.objects.all().exclude(
is_bountied=False,
answer__accepted=True,
is_deleted=True).annotate(
answers=Count('answer')).filter(
answer=0).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("All Are True")
# Clear Filters
if bool_1 == False and bool_2 == False and bool_3 == False:
questions = Question.objects.all().exclude(
is_bountied=True, is_deleted=True).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
# elif "MostFrequent" in this:
# selected = "MostFrequent"
else:
selected = "BountyEndingSoon"
if query:
relatedTags = Tag.objects.filter(name__icontains=query)
if "NoAnswers" in getCheckBoxes:
bool_1 = True
else:
bool_1 = False
if "NoAcceptedAnswer" in getCheckBoxes:
bool_2 = True
else:
bool_2 = False
if "Bounty" in getCheckBoxes:
bool_3 = True
else:
bool_3 = False
minutes_10 = timezone.now() - timedelta(hours=23)
# questions = Question.objects.filter(bounty_date_announced__lt=minutes_10, is_bountied=True)
# No Answers
if bool_1 and bool_2 == False and bool_3 == False:
print("Only First is True")
questions = Question.objects.filter(
tags__name__icontains=query).annotate(
answers=Count('answer')).filter(
answers=0).filter(
bounty_date_announced__lt=minutes_10, is_bountied=True, is_deleted=False)
# No Accepted Answer
if bool_2 and bool_1 == False and bool_3 == False:
print("Only Second is True, In Bounty Ending Soon")
questions = Question.objects.filter(
tags__name__icontains=query).filter(
answer__accepted=False).filter(
bounty_date_announced__lt=minutes_10,
is_bountied=True,
is_deleted=False).distinct()
# Has Bounty
if bool_3 and bool_1 == False and bool_2 == False:
questions = Question.objects.filter(
tags__name__icontains=query, is_bountied=True, is_deleted=False).filter(
bounty_date_announced__lt=minutes_10)
print("Only Third is True")
# No Answers and No Accepted Answer
if bool_1 and bool_2 and bool_3 == False:
questions = Question.objects.filter(
tags__name__icontains=query, answer__accepted=False).annotate(
answers=Count('answer')).filter(answers=0).filter(
bounty_date_announced__lt=minutes_10, is_bountied=True, is_deleted=False)
print("First and Second is True")
# No Answers and Bounty
if bool_1 and bool_3 and bool_2 == False:
questions = Question.objects.filter(
tags__name__icontains=query,
is_bountied=True).annotate(
answers=Count('answer')).filter(
answers=0).filter(
bounty_date_announced__lt=minutes_10,
is_bountied=True,
is_deleted=False)
print("First and Third is True")
# No Accepted Answers and Bounty
if bool_2 and bool_3 and bool_1 == False:
questions = Question.objects.filter(
tags__name__icontains=query, is_bountied=True, answer__accepted=False).filter(
bounty_date_announced__lt=minutes_10, is_bountied=True, is_deleted=False)
print("Second and Third is True")
# ALl Filters are True
if bool_1 and bool_2 and bool_3:
questions = Question.objects.filter(
tags__name__icontains=query,
is_bountied=True,
answer__accepted=False).annotate(
answers=Count('answer')).filter(
answer=0,
is_deleted=False).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("All Are True")
# Clear Filters
if bool_1 == False and bool_2 == False and bool_3 == False:
questions = Question.objects.filter(tags__name__icontains=query).filter(
bounty_date_announced__lt=minutes_10, is_deleted=False, is_bountied=True)
else:
if "NoAnswers" in getCheckBoxes:
bool_1 = True
else:
bool_1 = False
if "NoAcceptedAnswer" in getCheckBoxes:
bool_2 = True
else:
bool_2 = False
if "Bounty" in getCheckBoxes:
bool_3 = True
else:
bool_3 = False
minutes_10 = timezone.now() - timedelta(hours=23)
# No Answers
if bool_1 and bool_2 == False and bool_3 == False:
print("Only First is True, Without Query")
questions = Question.objects.filter(
bounty_date_announced__lt=minutes_10,
is_bountied=True,
is_deleted=False).annotate(
answers=Count('answer')).filter(
answers=0)
# No Accepted Answer
if bool_2 and bool_1 == False and bool_3 == False:
print("Only Second is True")
questions = Question.objects.filter(
bounty_date_announced__lt=minutes_10,
is_bountied=True,
is_deleted=False).exclude(
answer__accepted=True).distinct()
# Has Bounty
if bool_3 and bool_1 == False and bool_2 == False:
questions = Question.objects.filter(
bounty_date_announced__lt=minutes_10, is_bountied=True, is_deleted=False)
print("Only Third is True")
# No Answers and No Accepted Answer
if bool_1 and bool_2 and bool_3 == False:
questions = Question.objects.filter(
bounty_date_announced__lt=minutes_10,
is_bountied=True).exclude(
answer__accepted=True).annotate(
answers=Count('answer')).filter(
answers=0,
is_deleted=False)
print("First and Second is True")
# No Answers and Bounty
if bool_1 and bool_3 and bool_2 == False:
questions = Question.objects.filter(
bounty_date_announced__lt=minutes_10,
is_bountied=True,
is_deleted=False).annotate(
answers=Count('answer')).filter(
answers=0).annotate(
mostVotes=Count('qupvote')).order_by('-mostVotes')
print("First and Third is True")
# No Accepted Answers and Bounty
if bool_2 and bool_3 and bool_1 == False:
questions = Question.objects.filter(
bounty_date_announced__lt=minutes_10,
is_bountied=True,
is_deleted=False).exclude(
answer__accepted=True)
print("Second and Third is True")
# ALl Filters are True
if bool_1 and bool_2 and bool_3:
questions = Question.objects.filter(
bounty_date_announced__lt=minutes_10,
is_bountied=True).exclude(
answer__accepted=True).annotate(
answers=Count('answer')).filter(
answer=0,
is_deleted=False)
print("All Are True")
# Clear Filters
if bool_1 == False and bool_2 == False and bool_3 == False:
questions = Question.objects.filter(
bounty_date_announced__lt=minutes_10, is_bountied=True, is_deleted=False)
else:
selected = False
questions = Question.objects.filter(
is_deleted=False, is_bountied=False).order_by('-date')
query = ''
bool_1 = ''
bool_2 = ''
bool_3 = ''
relatedTags = ''
# EXCLUDED QUESTIONS WHICH HAVE BOUNTY
objs = Question.objects.all() # .exclude(is_bountied=True)
if request.user.is_authenticated:
notifics = Notification.objects.filter(
noti_receiver=request.user).order_by('-date_created')
else:
notifics = ''
minutes_10 = timezone.now() - timedelta(hours=23)
# POSTS BEFORE 4 HOURS
bount_older = Question.objects.filter(
bounty_date_announced__lt=minutes_10, is_bountied=True)
bountied = ''
object_list = ''
page = request.GET.get('page', 1)
paginator = Paginator(questions, 9)
try:
questions = paginator.page(page)
except PageNotAnInteger:
questions = paginator.page(1)
except EmptyPage:
questions = | |
### Use Fixed Vertical Shift Check Box
useFixedVertShiftChkState = tk.BooleanVar()
useFixedVertShiftChkState.set(False)
useFixedVertShift_checkBox = tk.Checkbutton(measErrorLabelframe, text = 'Use Fixed Vertical Shift', font=('Arial', 12), var=useFixedVertShiftChkState, command=useVertShiftCallback)
useFixedVertShift_checkBox.place(x=10, y=43)
### Vertical Shift Amount Entry Box
lbl = tk.Label(measErrorLabelframe, text='Vertical Shift (m):', font=('Arial', 12), anchor = 'w', justify='left')
lbl.place(x=240, y=45)
verticalShift_textBox = tk.Entry(measErrorLabelframe, width=10)
verticalShift_textBox.place(x=370, y=50)
verticalShift_textBox.insert(0,'0')
verticalShift_textBox.config(state = 'disabled')
# Use measured signal confidence checkbox callback
def useMeasSigConfCallback():
useMeasSigConf = useMeasSigConfChkState.get()
if(useMeasSigConf):
measSigConfIndex_textBox.config(state = 'normal')
truthGroundIndex_textBox.config(state = 'disabled')
useGroundIndexChkState.set(False)
else:
measSigConfIndex_textBox.config(state = 'disabled')
truthGroundIndex_textBox.config(state = 'normal')
useGroundIndexChkState.set(True)
# endIf
# endDef
### Use Measured Signal Confidence Entry Box
useMeasSigConfChkState = tk.BooleanVar()
useMeasSigConfChkState.set(True)
useMeasSigConf_checkBox = tk.Checkbutton(measErrorLabelframe, text = 'Use ICESat-2 Signal Confidence Value(s):', font=('Arial', 12), var=useMeasSigConfChkState, command=useMeasSigConfCallback)
useMeasSigConf_checkBox.place(x=10, y=115)
### Measured Signal Confidence Entry Box
measSigConfIndex_textBox = tk.Entry(measErrorLabelframe, width=8)
measSigConfIndex_textBox.place(x=340, y=120)
measSigConfIndex_textBox.insert(0,'3, 4')
# Use measured signal confidence checkbox callback
def useGroundIndexConfCallback():
useGroundIndex = useGroundIndexChkState.get()
if(useGroundIndex):
truthGroundIndex_textBox.config(state = 'normal')
measSigConfIndex_textBox.config(state = 'disabled')
useMeasSigConfChkState.set(False)
else:
truthGroundIndex_textBox.config(state = 'disabled')
measSigConfIndex_textBox.config(state = 'normal')
useMeasSigConfChkState.set(True)
# endIf
# endDef
### Use Truth Ground Index Entry Box
useGroundIndexChkState = tk.BooleanVar()
useGroundIndexChkState.set(False)
useGroundIndex_checkBox = tk.Checkbutton(measErrorLabelframe, text = 'Use Reference Ground Index:', font=('Arial', 12), var=useGroundIndexChkState, command=useGroundIndexConfCallback)
useGroundIndex_checkBox.place(x=10, y=150)
### Truth Ground Index Entry Box
truthGroundIndex_textBox = tk.Entry(measErrorLabelframe, width=5)
truthGroundIndex_textBox.place(x=245, y=155)
truthGroundIndex_textBox.insert(0,'2')
truthGroundIndex_textBox.config(state = 'disabled')
lbl = tk.Label(measErrorLabelframe, text='(Requires ATL08 File)', font=('Arial', 12), anchor = 'w', justify='left')
lbl.place(x=285, y=150)
### Create Measured Corrected File Check Box
createMeasCorrFileChkState = tk.BooleanVar()
createMeasCorrFileChkState.set(True)
createMeasCorrFile_checkBox = tk.Checkbutton(measErrorLabelframe, text = 'Save Shifted ICESat-2 File', font=('Arial', 12), var=createMeasCorrFileChkState)
createMeasCorrFile_checkBox.place(x=10, y=185)
### Make Plots Check Box
makePlotsChkState = tk.BooleanVar()
makePlotsChkState.set(True)
makePlots_checkBox = tk.Checkbutton(measErrorLabelframe, text = 'Make Output Plots', font=('Arial', 12), var=makePlotsChkState)
makePlots_checkBox.place(x=280, y=185)
# Set measurement correction section to disabled (default mode)
for child in measErrorLabelframe.winfo_children():
child.configure(state='disabled')
# endFor
# Function to refresh stats
def refreshStats():
global statsDF
# Update status bar
statsStatusBar['value'] = 0
statuslbl.config(text='')
# Update status bar
statsStatusBarRH['value'] = 0
statuslblRH.config(text=' Status:')
# Clear Add Stats listbox in Stats Section
addStatsTuple = ('')
addStatsBox['values'] = addStatsTuple
addStatsBox.delete(0,'end')
# Clear stats
statsDF = []
# Update window
window.update()
# end Def
# Function to load ATL03 plot info after a run
def loadAtl03_info():
# Set file name being analyzed
fileName_textBox.delete(0,'end')
fileName_textBox.insert(0,atl03Data[0].atl03FileName)
# Set file name being analyzed (in Stats Tab section)
fileName_textBoxRH.delete(0,'end')
fileName_textBoxRH.insert(0,atl03Data[0].atl03FileName)
# Set Ground Tracks to plot
gtNumsText = [i + ' (Beam #' + j + ', ' + k + ' beam)' for i, j, k in zip(gtNumsGood, beamNumsGood, beamStrengthGood)]
gtNumsTuple = tuple(gtNumsText)
gtNumPlotBox['values'] = gtNumsTuple
gtNumPlotBox.current(0)
# Set Ground Tracks to plot (in Stats Tab section)
gtNumPlotBoxRH['values'] = gtNumsTuple
gtNumPlotBoxRH.current(0)
if(atl03Data[0].zone=='3413' or atl03Data[0].zone=='3976'):
plotVarsTuple = ('Time (sec)', 'Delta Time (sec)', \
'Latitude (deg)', 'Longitude (deg)', \
'Polar Stereo X (m)', 'Polar Stereo Y (m)', \
'Cross-Track (m)', 'Along-Track (m)', \
'Height (m HAE)', 'Height (m MSL)', \
'Classification', 'Signal Confidence')
else:
plotVarsTuple = ('Time (sec)', 'Delta Time (sec)', \
'Latitude (deg)', 'Longitude (deg)', \
'UTM Easting (m)', 'UTM Northing (m)', \
'Cross-Track (m)', 'Along-Track (m)', \
'Height (m HAE)', 'Height (m MSL)', \
'Classification', 'Signal Confidence')
# endIf
# Set X Vals to plot
xValsBox['values'] = plotVarsTuple
xValsBox.current(0)
# Set Y Vals to plot
yValsBox['values'] = plotVarsTuple
yValsBox.current(8)
# Set X Label
xAxisVal = xValsBox.get()
currentData = xlabel_textBox.get()
xlabel_textBox.delete(0,len(currentData))
xlabel_textBox.insert(0,xAxisVal)
# Set Y label
yAxisVal = yValsBox.get()
currentData = ylabel_textBox.get()
ylabel_textBox.delete(0,len(currentData))
ylabel_textBox.insert(0,yAxisVal)
# Set Vals to filter on
if(atl03Data[0].dataIsMapped):
filterTuple = (' ','Classification', 'Signal Confidence')
else:
filterTuple = (' ', 'Signal Confidence')
# endIf
filterBox['values'] = filterTuple
filterBox.current(0)
# Set Filter Number Checkboxes
filter0_checkBox.place_forget()
filter1_checkBox.place_forget()
filter2_checkBox.place_forget()
filter3_checkBox.place_forget()
filter4_checkBox.place_forget()
filter0ChkState.set(False)
filter1ChkState.set(False)
filter2ChkState.set(False)
filter3ChkState.set(False)
filter4ChkState.set(False)
# Refresh stats
refreshStats()
# Update window
window.update()
# endDef
# Function to load ATL08 plot info after a run
def loadAtl08_info():
# Set Y Vals to plot
yValsTuple_atl08 = ('Max Canopy (m HAE)', 'Terrain Best Fit (m HAE)', 'Terrain Median (m HAE)',
'Max Canopy (m MSL)', 'Terrain Best Fit (m MSL)', 'Terrain Median (m MSL)')
yValsBox_atl08['values'] = yValsTuple_atl08
yValsBox_atl08.current(0)
# Set Segment By in Stats sections
segmentByTuple = ('Time (sec)','Latitude (deg)','UTM Northing (m)','Along-Track (m)')
segmentByBox['values'] = segmentByTuple
segmentByBox.current(0)
# Set increment units in Stats section
currentData = incrementBox.get()
incrementBox.delete(0,len(currentData))
incrementBox.insert(0,'1')
incrementText.config(text = 'sec')
# Refresh stats
refreshStats()
# Update window
window.update()
# endDef
# Function to check and match up ATL03/ATL08 file paths
def getMatchingAtlFiles(atl03FilePaths, atl08FilePaths):
# Get ATL08 formatted string to compare against
atl08FilePathsFormatted = np.empty(np.shape(atl08FilePaths), dtype=object)
for i in range(0,len(atl08FilePathsFormatted)):
# Get ATL03 file
atl08FilePath = atl08FilePaths[i]
# Get ATL03 file parts
atl08File_w_ext = os.path.basename(atl08FilePath)
atl08NameParts = getNameParts(atl08File_w_ext)
# Build ATL08 formatted string
atl08FilePathsFormatted[i] = 'ATL03_' + atl08NameParts.year + atl08NameParts.month + \
atl08NameParts.day + atl08NameParts.hour + atl08NameParts.minute + \
atl08NameParts.second + '_' + atl08NameParts.trackNum + \
atl08NameParts.unknown
# endFor
# Loop through each ATL03 file and find matching ATL08 file
atl03FilePathsAll = np.empty(np.shape(atl03FilePaths), dtype=object)
atl08FilePathsAll = np.empty(np.shape(atl03FilePaths), dtype=object)
for i in range(0,len(atl03FilePaths)):
# Get ATL03 file
atl03FilePath = atl03FilePaths[i]
# Get ATL03 file parts
atl03File_w_ext = os.path.basename(atl03FilePath)
atl03NameParts = getNameParts(atl03File_w_ext)
# Build ATL03 formatted string
atl03FilePathsFormatted = 'ATL03_' + atl03NameParts.year + atl03NameParts.month + \
atl03NameParts.day + atl03NameParts.hour + atl03NameParts.minute + \
atl03NameParts.second + '_' + atl03NameParts.trackNum + \
atl03NameParts.unknown
# Compare ATL03 formatted string to ATL08 formatted string
atl03FilePathsAll[i] = atl03FilePaths[i]
if(np.isin(atl03FilePathsFormatted,atl08FilePathsFormatted)):
ind = np.where(atl08FilePathsFormatted==atl03FilePathsFormatted)[0][0]
atl08FilePathsAll[i] = atl08FilePaths[ind]
else:
atl08FilePathsAll[i] = ''
# endIf
# endFor
return atl03FilePathsAll, atl08FilePathsAll
# endDef
# Run Button Callback
def runAtl03():
# Update status bar
statusBar['value'] = 0
window.update()
# Make atlMeasuredData a global variable
global atl03Data, atl08Data, atlTruthDataFiltered, \
truthDataExists, atlCorrections, atl03FileExists, atl08FileExists, \
outFilePath, outPathExists, atl03DF_all, \
gtNumsGood, beamNumsGood, beamStrengthGood
# Check ATL03/ATL08 files
checkATL03()
checkATL08()
atl03FilePathsAll, atl08FilePathsAll = getMatchingAtlFiles(atl03FilePaths,atl08FilePaths)
# Check output file path, make output directory if it doesn't exist
outFilePath = outPath_textBox.get().strip()
if('' == outFilePath or '.' == outFilePath):
outPathExists = False
else:
outPathExists = os.path.isdir(outFilePath)
if(not outPathExists):
os.mkdir(outFilePath)
outPathExists = True
# endIf
# endIf
# Get reference section status
useTruthSection = useTruthSectionChkState.get()
# Check Truth Directory
truthOkToRun = True
if(useTruthSection):
checkTruthDir()
if(not truthDataExists):
truthOkToRun = False
# endIf
# endIf
if(atl03FileExists and outPathExists and truthOkToRun):
# Disable run button
RunButton.config(state=tk.DISABLED)
# Open .log file for writing
logFileName = 'temp.txt'
logFilePath = os.path.normpath(outFilePath + '/' + logFileName)
if(os.path.exists(logFilePath)):
os.remove(logFilePath)
# endIf
try:
logFileID = open(logFilePath, 'w')
except:
logFileID = False
# endTry
# Try code
try:
# Start timer
timeStart = runTime.time()
# Get current date/time
currentDateTime = (datetime.now()).strftime('%m/%d/%Y %H:%M:%S')
writeLog('-------------------------------------', logFileID)
writeLog('Run Initiated On: %s' %currentDateTime, logFileID)
writeLog('-------------------------------------', logFileID)
writeLog('', logFileID)
# Loop through all ATL03/ATL08 files
totalFiles = len(atl03FilePathsAll)
for numFile in range(0,totalFiles):
# Initialize variables
atl03Data = []
atl03DataSingle = []
atl03DF = []
atl03DF_all = []
atl08Data = []
atl08DataSingle = []
atlTruthDataSingle = []
atlTruthDataFiltered = []
atlTruthDataFilteredSingle = []
atlCorrections = []
atlCorrectionsSingle = []
gtNumsGood = []
beamNumsGood = []
beamStrengthGood = []
# Get ATL03/ATL08 file paths
atl03FilePath = atl03FilePathsAll[numFile]
atl08FilePath = atl08FilePathsAll[numFile]
# Print updates
writeLog('File #%d of %d' %(numFile+1,totalFiles),logFileID)
writeLog('File Name: %s' %(os.path.basename(atl03FilePath)),logFileID)
writeLog('', logFileID)
# Get GT Nums
gtNums = getGtNums(atl03FilePath)
# Get Truth Data inputs
if(useTruthSection):
useExistingTruth = useExistingTruthChkState.get()
# truthSwathDir = truthDataDir_textBox.get().strip()
bufferInput = truthBuffer_textBox.get().strip()
if('' == bufferInput):
buffer = 0
else:
buffer = int(bufferInput)
# endIf
createTruthFile = createTruthFileChkState.get()
# endIf
# Get Corrected Measured inputs
useMeasErrorSection = useMeasErrorSectionChkState.get()
if(useMeasErrorSection):
offsetsCrossTrackBounds = eval('[' + crossTrackBounds_textBox.get().strip() + ']')
offsetsAlongTrackBounds = eval('[' + alongTrackBounds_textBox.get().strip() + ']')
offsetsRasterResolutions = eval('[' + multiresStepdown_textBox.get().strip() + ']')
refHeightType = refHeightTypeBox.get()
offsetsUseVerticalShift = useFixedVertShiftChkState.get()
offsetsVerticalShift = float(verticalShift_textBox.get().strip())
offsets = offsetsStruct(offsetsCrossTrackBounds, offsetsAlongTrackBounds, offsetsRasterResolutions, offsetsUseVerticalShift, offsetsVerticalShift)
useMeasSigConf = useMeasSigConfChkState.get()
if(useMeasSigConf):
filterData = eval('[' + measSigConfIndex_textBox.get().strip() + ']')
else:
filterData = int(truthGroundIndex_textBox.get().strip())
# endIf
createMeasCorrFile = createMeasCorrFileChkState.get()
makePlots = makePlotsChkState.get()
showPlots = False
# endIf
# Get trim inputs
if(trimNoneModeChkState.get()):
trimInfo = 'none'
elif(trimAutoModeChkState.get()):
trimInfo = 'auto'
elif(trimManualModeChkState.get()):
trimMode = 'manual'
if(latModeChkState.get()):
trimType = 'lat'
trimMin = latMin_textBox.get()
| |
= stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if (msg.data < 0):
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if (msg.data > 0):
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
if (msg.data > 0):
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: (/a1 { (data > 0) } or /a2 { (data < 0) }) forbids /b { True } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a1': self.on_msg__a1,
'/a2': self.on_msg__a2,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
self.witness.append(rec)
self.witness.append(MsgRecord('/b', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a1(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if (msg.data > 0):
self._pool.append(MsgRecord('/a1', stamp, msg))
return True
if self._state == 3:
if (msg.data > 0):
self._pool.append(MsgRecord('/a1', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def on_msg__a2(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
if (msg.data < 0):
self._pool.append(MsgRecord('/a2', stamp, msg))
return True
if self._state == 3:
if (msg.data < 0):
self._pool.append(MsgRecord('/a2', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''globally: /a { True } forbids (/b1 { (data > 0) } or /b2 { (data < 0) }) within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b2': self.on_msg__b2,
'/a': self.on_msg__a,
'/b1': self.on_msg__b1,
}
@property
def verdict(self):
with self._lock:
if self._state == -1:
return True
if self._state == -2:
return False
return None
def on_launch(self, stamp):
with self._lock:
if self._state != 0:
raise RuntimeError('monitor is already turned on')
self._reset()
self.time_launch = stamp
self._state = 3
self.time_state = stamp
self.on_enter_scope(stamp)
return True
def on_shutdown(self, stamp):
with self._lock:
if self._state == 0:
raise RuntimeError('monitor is already turned off')
self.time_shutdown = stamp
self._state = 0
self.time_state = stamp
return True
def on_timer(self, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
return True
def on_msg__b2(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if (msg.data < 0):
self.witness.append(rec)
self.witness.append(MsgRecord('/b2', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def on_msg__a(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
self._pool.append(MsgRecord('/a', stamp, msg))
return True
if self._state == 3:
self._pool.append(MsgRecord('/a', stamp, msg))
self._state = 2
self.time_state = stamp
return True
return False
def on_msg__b1(self, msg, stamp):
with self._lock:
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
while self._pool and (stamp - self._pool[0].timestamp) >= 0.1:
self._pool.popleft()
if not self._pool:
self._state = 3
self.time_state = stamp
if self._state == 2:
assert len(self._pool) >= 1, 'missing trigger event'
rec = self._pool[0]
if (msg.data > 0):
self.witness.append(rec)
self.witness.append(MsgRecord('/b1', stamp, msg))
self._pool.clear()
self._state = -2
self.time_state = stamp
self.on_violation(stamp, self.witness)
return True
return False
def _reset(self):
self.witness = []
self._pool = deque((), 1)
self.time_launch = -1
self.time_shutdown = -1
self.time_state = -1
def _noop(self, *args):
pass
class PropertyMonitor(object):
__slots__ = (
'_lock', # concurrency control
'_state', # currently active state
'_pool', # MsgRecord deque to hold temporary records
'witness', # MsgRecord list of observed events
'on_enter_scope', # callback upon entering the scope
'on_exit_scope', # callback upon exiting the scope
'on_violation', # callback upon verdict of False
'on_success', # callback upon verdict of True
'time_launch', # when was the monitor launched
'time_shutdown', # when was the monitor shutdown
'time_state', # when did the last state transition occur
'cb_map', # mapping of topic names to callback functions
)
PROP_ID = 'None'
PROP_TITLE = '''None'''
PROP_DESC = '''None'''
HPL_PROPERTY = r'''after /p { True }: /a { True } forbids /b { True } within 0.1s'''
def __init__(self):
self._lock = Lock()
self._reset()
self.on_enter_scope = self._noop
self.on_exit_scope = self._noop
self.on_violation = self._noop
self.on_success = self._noop
self._state = 0
self.cb_map = {
'/b': self.on_msg__b,
'/a': self.on_msg__a,
| |
<reponame>ResearchSoftwareInstitute/MyHPOM
from django.test import TestCase
from django.contrib.auth.models import Group
from django.core.exceptions import PermissionDenied
from hs_access_control.models import UserResourceProvenance, UserResourcePrivilege, \
GroupResourceProvenance, GroupResourcePrivilege, \
UserGroupProvenance, UserGroupPrivilege, \
PrivilegeCodes
from hs_core import hydroshare
from hs_core.testing import MockIRODSTestCaseMixin
from hs_access_control.tests.utilities import is_equal_to_as_set, global_reset, \
check_provenance_synchronization
__author__ = 'Alva'
class UnitTests(MockIRODSTestCaseMixin, TestCase):
""" test basic behavior of each routine """
def setUp(self):
super(UnitTests, self).setUp()
global_reset()
self.group, _ = Group.objects.get_or_create(name='Resource Author')
self.alva = hydroshare.create_account(
'<EMAIL>',
username='alva',
first_name='alva',
last_name='couch',
superuser=False,
groups=[]
)
self.george = hydroshare.create_account(
'<EMAIL>',
username='george',
first_name='george',
last_name='miller',
superuser=False,
groups=[]
)
self.john = hydroshare.create_account(
'<EMAIL>',
username='john',
first_name='john',
last_name='miller',
superuser=False,
groups=[]
)
self.admin = hydroshare.create_account(
'<EMAIL>',
username='admin',
first_name='first_name_admin',
last_name='last_name_admin',
superuser=True,
groups=[]
)
# george creates a entity 'bikes', so george is the quota holder initially
self.bikes = hydroshare.create_resource(
resource_type='GenericResource',
owner=self.george,
title='Bikes',
metadata=[],
)
# george creates a entity 'bikers'
self.bikers = self.george.uaccess.create_group('Bikers', 'Of the human powered kind')
# george creates a entity 'harps'
self.harps = hydroshare.create_resource(
resource_type='GenericResource',
owner=self.george,
title='Harps',
metadata=[],
)
# george creates a entity 'harpers'
self.harpers = self.george.uaccess.create_group('Harpers', 'Without any ferries')
def test_user_resource_provenance_crosstalk(self):
george = self.george
alva = self.alva
bikes = self.bikes
harps = self.harps
john = self.john
# George grants Alva view privilege
george.uaccess.share(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.VIEW)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.VIEW)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=george),
[alva]))
record = UserResourceProvenance.get_current_record(
resource=bikes, user=alva)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.VIEW)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# George grants Alva privilege
george.uaccess.share(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=george),
[alva]))
record = UserResourceProvenance.get_current_record(
resource=bikes, user=alva)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# Alva grants John privilege
alva.uaccess.share(
resource=bikes,
user=john,
privilege=PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=john),
PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=john),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=george),
[alva]))
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=alva),
[john]))
record = UserResourceProvenance.get_current_record(
resource=bikes, user=john)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.user, john)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, alva)
# now George overrides Alva on John's privilege
george.uaccess.share(
resource=bikes,
user=john,
privilege=PrivilegeCodes.VIEW)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=john),
PrivilegeCodes.VIEW)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=john),
PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes, grantor=george), [
alva, john]))
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=alva),
[]))
record = UserResourceProvenance.get_current_record(
resource=bikes, user=john)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.user, john)
self.assertEqual(record.privilege, PrivilegeCodes.VIEW)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# Crosstalk test: George grants Alva privilege over harps
george.uaccess.share(
resource=harps,
user=alva,
privilege=PrivilegeCodes.VIEW)
# old privileges didn't change
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes, grantor=george), [
alva, john]))
record = UserResourceProvenance.get_current_record(
resource=bikes, user=alva)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# check new privileges: should be independent.
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=harps,
user=alva),
PrivilegeCodes.VIEW)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=harps,
user=alva),
PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=harps,
grantor=george),
[alva]))
record = UserResourceProvenance.get_current_record(
resource=harps, user=alva)
self.assertEqual(record.resource, harps)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.VIEW)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
check_provenance_synchronization(self)
def test_user_group_provenance_crosstalk(self):
george = self.george
alva = self.alva
bikers = self.bikers
harpers = self.harpers
john = self.john
# George grants Alva view privilege
george.uaccess.share(
group=bikers,
user=alva,
privilege=PrivilegeCodes.VIEW)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.VIEW)
self.assertEqual(
UserGroupPrivilege.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
UserGroupPrivilege.get_undo_users(
group=bikers,
grantor=george),
[alva]))
record = UserGroupProvenance.get_current_record(
group=bikers, user=alva)
self.assertEqual(record.group, bikers)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.VIEW)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# George grants Alva privilege
george.uaccess.share(
group=bikers,
user=alva,
privilege=PrivilegeCodes.CHANGE)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
self.assertEqual(
UserGroupPrivilege.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
UserGroupPrivilege.get_undo_users(
group=bikers,
grantor=george),
[alva]))
record = UserGroupProvenance.get_current_record(
group=bikers, user=alva)
self.assertEqual(record.group, bikers)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# Alva grants John privilege
alva.uaccess.share(
group=bikers,
user=john,
privilege=PrivilegeCodes.CHANGE)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=john),
PrivilegeCodes.CHANGE)
self.assertEqual(
UserGroupPrivilege.get_privilege(
group=bikers,
user=john),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
UserGroupPrivilege.get_undo_users(
group=bikers,
grantor=george),
[alva]))
self.assertTrue(
is_equal_to_as_set(
UserGroupPrivilege.get_undo_users(
group=bikers,
grantor=alva),
[john]))
record = UserGroupProvenance.get_current_record(
group=bikers, user=john)
self.assertEqual(record.group, bikers)
self.assertEqual(record.user, john)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, alva)
# now George overrides Alva on John's privilege
george.uaccess.share(
group=bikers,
user=john,
privilege=PrivilegeCodes.VIEW)
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=john),
PrivilegeCodes.VIEW)
self.assertEqual(
UserGroupPrivilege.get_privilege(
group=bikers,
user=john),
PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
UserGroupPrivilege.get_undo_users(
group=bikers, grantor=george), [
alva, john]))
self.assertTrue(
is_equal_to_as_set(
UserGroupPrivilege.get_undo_users(
group=bikers,
grantor=alva),
[]))
record = UserGroupProvenance.get_current_record(
group=bikers, user=john)
self.assertEqual(record.group, bikers)
self.assertEqual(record.user, john)
self.assertEqual(record.privilege, PrivilegeCodes.VIEW)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# Crosstalk test: George grants Alva privilege over harpers
george.uaccess.share(
group=harpers,
user=alva,
privilege=PrivilegeCodes.VIEW)
# old privileges didn't change
self.assertEqual(
UserGroupProvenance.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
self.assertEqual(
UserGroupPrivilege.get_privilege(
group=bikers,
user=alva),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
UserGroupPrivilege.get_undo_users(
group=bikers, grantor=george), [
alva, john]))
record = UserGroupProvenance.get_current_record(
group=bikers, user=alva)
self.assertEqual(record.group, bikers)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# check new privileges: should be independent of old privileges
self.assertEqual(
UserGroupProvenance.get_privilege(
group=harpers,
user=alva),
PrivilegeCodes.VIEW)
self.assertEqual(
UserGroupPrivilege.get_privilege(
group=harpers,
user=alva),
PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
UserGroupPrivilege.get_undo_users(
group=harpers,
grantor=george),
[alva]))
record = UserGroupProvenance.get_current_record(
group=harpers, user=alva)
self.assertEqual(record.group, harpers)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.VIEW)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
check_provenance_synchronization(self)
def test_group_resource_provenance_crosstalk(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
harps = self.harps
harpers = self.harpers
alva = self.alva
# George grants Bikers view privilege
george.uaccess.share(
resource=bikes,
group=bikers,
privilege=PrivilegeCodes.VIEW)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.VIEW)
self.assertEqual(
GroupResourcePrivilege.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
GroupResourcePrivilege.get_undo_groups(
resource=bikes,
grantor=george),
[bikers]))
record = GroupResourceProvenance.get_current_record(
resource=bikes, group=bikers)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.group, bikers)
self.assertEqual(record.privilege, PrivilegeCodes.VIEW)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# George grants Harpers change privilege
george.uaccess.share(
resource=bikes,
group=harpers,
privilege=PrivilegeCodes.CHANGE)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=harpers),
PrivilegeCodes.CHANGE)
self.assertEqual(
GroupResourcePrivilege.get_privilege(
resource=bikes,
group=harpers),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
GroupResourcePrivilege.get_undo_groups(
resource=bikes, grantor=george), [
bikers, harpers]))
record = GroupResourceProvenance.get_current_record(
resource=bikes, group=harpers)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.group, harpers)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# Alva is a harper
george.uaccess.share(
group=harpers,
user=alva,
privilege=PrivilegeCodes.CHANGE)
# Alva can access bikes
george.uaccess.share(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE)
# Alva downgrades Harpers privilege on bikes
alva.uaccess.share(
resource=bikes,
group=harpers,
privilege=PrivilegeCodes.VIEW)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=harpers),
PrivilegeCodes.VIEW)
self.assertEqual(
GroupResourcePrivilege.get_privilege(
resource=bikes,
group=harpers),
PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
GroupResourcePrivilege.get_undo_groups(
resource=bikes,
grantor=george),
[bikers]))
self.assertTrue(
is_equal_to_as_set(
GroupResourcePrivilege.get_undo_groups(
resource=bikes,
grantor=alva),
[harpers]))
record = GroupResourceProvenance.get_current_record(
resource=bikes, group=harpers)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.group, harpers)
self.assertEqual(record.privilege, PrivilegeCodes.VIEW)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, alva)
# now George overrides Alva on Harpers privilege
george.uaccess.share(
resource=bikes,
group=harpers,
privilege=PrivilegeCodes.CHANGE)
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=harpers),
PrivilegeCodes.CHANGE)
self.assertEqual(
GroupResourcePrivilege.get_privilege(
resource=bikes,
group=harpers),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
GroupResourcePrivilege.get_undo_groups(
resource=bikes, grantor=george),
[bikers, harpers]))
self.assertTrue(
is_equal_to_as_set(
GroupResourcePrivilege.get_undo_groups(
resource=bikes,
grantor=alva),
[]))
record = GroupResourceProvenance.get_current_record(
resource=bikes, group=harpers)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.group, harpers)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# Crosstalk test: George grants bikers privilege over harps
george.uaccess.share(
resource=harps,
group=bikers,
privilege=PrivilegeCodes.CHANGE)
# old privileges didn't change
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.VIEW)
self.assertEqual(
GroupResourcePrivilege.get_privilege(
resource=bikes,
group=bikers),
PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
GroupResourcePrivilege.get_undo_groups(
resource=bikes, grantor=george),
[bikers, harpers]))
record = GroupResourceProvenance.get_current_record(
resource=bikes, group=bikers)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.group, bikers)
self.assertEqual(record.privilege, PrivilegeCodes.VIEW)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# check new privileges: should be independent.
self.assertEqual(
GroupResourceProvenance.get_privilege(
resource=harps,
group=bikers),
PrivilegeCodes.CHANGE)
self.assertEqual(
GroupResourcePrivilege.get_privilege(
resource=harps,
group=bikers),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
GroupResourcePrivilege.get_undo_groups(
resource=harps,
grantor=george),
[bikers]))
record = GroupResourceProvenance.get_current_record(
resource=harps, group=bikers)
self.assertEqual(record.resource, harps)
self.assertEqual(record.group, bikers)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
check_provenance_synchronization(self)
def test_user_resource_provenance_undo_share(self):
george = self.george
alva = self.alva
bikes = self.bikes
harps = self.harps
john = self.john
# initial state: no undo to do.
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=george),
[]))
record = UserResourceProvenance.get_current_record(
resource=bikes, user=alva) # no record
self.assertTrue(record is None)
# George grants Alva view privilege
george.uaccess.share(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.VIEW)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.VIEW)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=george),
[alva]))
# update creates a record
record = UserResourceProvenance.get_current_record(
resource=bikes, user=alva)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.VIEW)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# Roll back alva's privilege
george.uaccess.undo_share(resource=bikes, user=alva)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.NONE)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=george),
[]))
# there is now a record
record = UserResourceProvenance.get_current_record(
resource=bikes, user=alva)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.NONE)
self.assertEqual(record.undone, True)
self.assertEqual(record.grantor, None)
# George grants Alva privilege
george.uaccess.share(
resource=bikes,
user=alva,
privilege=PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=alva),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=george),
[alva]))
record = UserResourceProvenance.get_current_record(
resource=bikes, user=alva)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.user, alva)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, george)
# Alva grants John privilege
alva.uaccess.share(
resource=bikes,
user=john,
privilege=PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourceProvenance.get_privilege(
resource=bikes,
user=john),
PrivilegeCodes.CHANGE)
self.assertEqual(
UserResourcePrivilege.get_privilege(
resource=bikes,
user=john),
PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=george),
[alva]))
self.assertTrue(
is_equal_to_as_set(
UserResourcePrivilege.get_undo_users(
resource=bikes,
grantor=alva),
[john]))
record = UserResourceProvenance.get_current_record(
resource=bikes, user=john)
self.assertEqual(record.resource, bikes)
self.assertEqual(record.user, john)
self.assertEqual(record.privilege, PrivilegeCodes.CHANGE)
self.assertEqual(record.undone, False)
self.assertEqual(record.grantor, alva)
# now George overrides Alva on John's privilege
george.uaccess.share(
resource=bikes,
user=john,
privilege=PrivilegeCodes.VIEW)
self.assertEqual(
UserResourceProvenance.get_privilege(
| |
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.167983,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 2.76118,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0452724,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.238248,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.221211,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.101163,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.163172,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0823639,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.346699,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0817877,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.3859,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0417915,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00424323,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0485797,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0313813,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0903713,
'Execution Unit/Register Files/Runtime Dynamic': 0.0356245,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.113665,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.265192,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.29114,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000401469,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000401469,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000358062,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000143197,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000450795,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00161179,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00354974,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0301677,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.91892,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0783593,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.102463,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 4.23057,
'Instruction Fetch Unit/Runtime Dynamic': 0.216151,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0503637,
'L2/Runtime Dynamic': 0.0149541,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.25674,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.514257,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0329871,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.032987,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.41251,
'Load Store Unit/Runtime Dynamic': 0.709925,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0813405,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.16268,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.028868,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0296095,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.119312,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0128898,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.325011,
'Memory Management Unit/Runtime Dynamic': 0.0424993,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 14.9938,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.109934,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00590207,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0495882,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming | |
<filename>src/MSongsDB/PythonSrc/DatasetCreation/dataset_creator.py
"""
<NAME> (2010) Columbia University
<EMAIL>
This code contains code used when creating the actual MSong dataset,
i.e. functions to create a song HDF5 at the right place, with proper
locks for multithreading.
This is part of the Million Song Dataset project from
LabROSA (Columbia University) and The Echo Nest.
Copyright 2010, <NAME>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import sys
import glob
import copy
import time
from Queue import Queue # from 'queue' in python 3.0
import shutil
import urllib2
import multiprocessing
import numpy.random as npr
try:
import hdf5_utils as HDF5
except ImportError:
pass # will be imported in command line
# pyechonest objects
import pyechonest
import pyechonest.config
pyechonest.config.CALL_TIMEOUT=30 # instead of 10 seconds
from pyechonest import artist as artistEN
from pyechonest import song as songEN
from pyechonest import track as trackEN
CATALOG='7digital'
try:
_api_dev_key = os.environ['ECHO_NEST_API_KEY']
except KeyError:
_api_dev_key = os.environ['ECHONEST_API_KEY']
# posgresql import and info for musicbrainz dataset
MBUSER='gordon'
MBPASSWD='<PASSWORD>'
# HOW LONG DO WE WAIT WHEN SOMETHING GOES WRONG
SLEEPTIME=15 # in seconds
# total number of files in the dataset, should be 1M
TOTALNFILES=1000000
# CAL 500 artist and song txt file
CAL500='https://github.com/tb2332/MSongsDB/raw/master/PythonSrc/DatasetCreation/cal500_artist_song.txt'
# lock to access the set of tracks being treated
# getting a track on the lock means having put the EN id
# of that track in the set TRACKSET
# use: get_lock_song
# release_lock_song
TRACKSET_LOCK = multiprocessing.Lock()
TRACKSET_CLOSED = multiprocessing.Value('b')
TRACKSET_CLOSED = False # use to end the process, nothing can get a
# track lock if this is turn to True
CREATION_CLOSED = multiprocessing.Value('b')
CREATION_CLOSED = False # use to end all threads at a higher level
# than trackset closed, it is more a matter
# of printing and returning than the risk of
# creating a corrupted file
class my_trackset():
"""
class works with multiprocessing
should look like a set from outside
"""
def __init__(self):
array_length = 10
self.ar = multiprocessing.Array('l',array_length) # l for long, i not enough
for k in range(len(self.ar)):
self.ar[k] = 0
def remove(self,obj):
objh = hash(obj)
for k in range(len(self.ar)):
if self.ar[k] == objh:
self.ar[k] = 0
return
print 'ERROR: my_trackset, tried to remove inexisting element, obj=',obj,'and hash=',objh
def add(self,obj):
objh = hash(obj)
for k in range(len(self.ar)):
if self.ar[k] == 0:
self.ar[k] = objh
return
print 'ERROR: shared memory trackset full!!! fake a keyboardinterrupt to stop'
raise KeyboardInterrupt
def __contains__(self,obj):
return hash(obj) in self.ar
def __str__(self):
return str(list(self.ar))
# instanciate object for trackset
TRACKSET=my_trackset()
def close_creation():
"""
Function to use to help stop all processes in a clean way.
It is usefull because of multithreading: only one thread
will get the keyboard interrupt, this function tries to
propagate the info
"""
close_trackset()
global CREATION_CLOSED
CREATION_CLOSED = True
def close_trackset():
"""
When terminating the thread, nothing can add anything
to TRACKSET anymore
"""
global TRACKSET_CLOSED
TRACKSET_CLOSED = True
def get_lock_track(trackid):
"""
Get the lock for the creation of one particular file
Returns True if you got, False otherwise (meaning
someone else just got it
This is a blocking call.
"""
got_lock = TRACKSET_LOCK.acquire() # blocking by default
if not got_lock:
print 'ERROR: could not get TRACKSET_LOCK locked?'
return False
if TRACKSET_CLOSED:
TRACKSET_LOCK.release()
print 'RELEASED LOCK BECAUSE TRACKSET_CLOSED'
return False
if trackid in TRACKSET:
TRACKSET_LOCK.release()
return False
TRACKSET.add( trackid )
TRACKSET_LOCK.release()
return True
def release_lock_track(trackid):
"""
Release the lock for the creation of one particular file.
Should always return True, unless there is a problem
Releasing a song that you don't have the lock on is dangerous.
"""
got_lock = TRACKSET_LOCK.acquire() # blocking by default
if not got_lock:
print 'ERROR: could not get TRACKSET_LOCK lock?'
return False
if TRACKSET_CLOSED:
TRACKSET_LOCK.release()
print 'RELEASED LOCK BECAUSE TRACKSET_CLOSED, track=',trackid
return False
if not trackid in TRACKSET:
TRACKSET_LOCK.release()
print 'WARNING: releasing a song you dont own, trackid=',trackid;sys.stdout.flush()
return False
TRACKSET.remove(trackid)
TRACKSET_LOCK.release()
return True
def path_from_trackid(trackid):
"""
Returns the typical path, with the letters[2-3-4]
of the trackid (starting at 0), hence a song with
trackid: TRABC1839DQL4H... will have path:
A/B/C/TRABC1839DQL4H....h5
"""
p = os.path.join(trackid[2],trackid[3])
p = os.path.join(p,trackid[4])
p = os.path.join(p,trackid+'.h5')
return p
def count_h5_files(basedir):
"""
Return the number of hdf5 files contained in all
subdirectories of base
"""
cnt = 0
try:
for root, dirs, files in os.walk(basedir):
files = glob.glob(os.path.join(root,'*.h5'))
cnt += len(files)
return cnt
except (IOError,OSError),e:
print 'ERROR:',e,'in count_h5_files, return 0'
return 0
def create_track_file(maindir,trackid,track,song,artist,mbconnect=None):
"""
Main function to create an HDF5 song file.
You got to have the track, song and artist already.
If you pass an open connection to the musicbrainz database, we also use it.
Returns True if song was created, False otherwise.
False can mean another thread is already doing that song.
We also check whether the path exists.
INPUT
maindir - main directory of the Million Song Dataset
trackid - Echo Nest track id of the track object
track - pyechonest track object
song - pyechonest song object
artist - pyechonest artist object
mbconnect - open musicbrainz pg connection
RETURN
True if a track file was created, False otherwise
"""
hdf5_path = os.path.join(maindir,path_from_trackid(trackid))
if os.path.exists( hdf5_path ):
return False # file already exists, no stress
hdf5_path_tmp = hdf5_path + '_tmp'
# lock the file
got_lock = get_lock_track(trackid)
if not got_lock:
return False # someone is taking care of that file
if os.path.exists( hdf5_path ):
release_lock_track(trackid)
return False # got the lock too late, file exists
# count errors (=tries), stop after 100 tries
try_cnt = 0
# create file and fill it
try:
while True: # try until we make it work!
try:
# we try one more time
try_cnt += 1
if not os.path.isdir( os.path.split(hdf5_path)[0] ):
os.makedirs( os.path.split(hdf5_path)[0] )
# check / delete tmp file if exist
if os.path.isfile(hdf5_path_tmp):
os.remove(hdf5_path_tmp)
# create tmp file
HDF5.create_song_file(hdf5_path_tmp)
h5 = HDF5.open_h5_file_append(hdf5_path_tmp)
HDF5.fill_hdf5_from_artist(h5,artist)
HDF5.fill_hdf5_from_song(h5,song)
HDF5.fill_hdf5_from_track(h5,track)
if mbconnect is not None:
HDF5.fill_hdf5_from_musicbrainz(h5,mbconnect)
# TODO billboard? lastfm? ...?
h5.close()
except KeyboardInterrupt:
close_creation()
raise
# we dont panic, delete file, wait and retry
except Exception as e:
# close hdf5
try:
h5.close()
except NameError,ValueError:
pass
# delete path
try:
os.remove( hdf5_path_tmp )
except IOError:
pass
# print and wait
print 'ERROR creating track:',trackid,'on',time.ctime(),'(pid='+str(os.getpid())+')'
print e
if try_cnt < 100:
print '(try again in',SLEEPTIME,'seconds)'
time.sleep(SLEEPTIME)
continue
# give up
else:
print 'we give up after',try_cnt,'tries'
release_lock_track(trackid)
return False
# move tmp file to real file
shutil.move(hdf5_path_tmp, hdf5_path)
# release lock
release_lock_track(trackid)
break
# KeyboardInterrupt, we delete file, clean things up
except KeyboardInterrupt:
# close hdf5
try:
h5.close()
except NameError,ValueError:
pass
# delete path
try:
if os.path.isfile( hdf5_path_tmp ):
os.remove( hdf5_path_tmp )
if os.path.isfile( hdf5_path ):
os.remove( hdf5_path )
except IOError:
pass
raise
except (IOError,OSError),e:
print 'GOT Error',e,'deep deep in creation process, threading problem?'
raise
# IF WE GET HERE WE'RE GOOD
return True
def create_track_file_from_trackid(maindir,trackid,song,artist,mbconnect=None):
"""
Get a track from a track id and calls for its creation.
We assume we already have song and artist.
We can have a connection to musicbrainz as an option.
This function should create only one file!
GOAL: mostly, it checks if we have the track already created before
calling EchoNest API. It saves some calls/time
Also, handles some errors.
INPUT
maindir - MillionSongDataset root directory
trackid - Echo Nest track ID (string: TRABC.....)
song - pyechonest song object for that track
artist - pyechonest artist object for that song/track
mbconnect - open musicbrainz pg connection
RETURN
true if a song file is created, false otherwise
"""
# CLOSED CREATION?
if CREATION_CLOSED:
return False
# do we already have this track in the dataset?
track_path = os.path.join(maindir,path_from_trackid(trackid))
if os.path.exists(track_path):
return False
# get that track!
try_cnt = 0
while True:
try:
try_cnt += 1
track = trackEN.track_from_id(trackid)
break
except KeyboardInterrupt:
close_creation()
raise
except urllib2.HTTPError,e:
print type(e),':',e
| |
import math
from decimal import Decimal
from .. import tag
from . import Widget, ClosedWidget, FieldArg, FieldArgList, FieldArgTable, FieldArgDict
class Arrow1(ClosedWidget):
"""An svg arrow shape, fitting in a 100x100 space
"""
# This class does not display any error messages
display_errors = False
_points = ((49,1), (50,1), (98,30), (98,32), (60,32), (60,98), (39,98), (39,32), (1,32), (1,30))
arg_descriptions = {'fill':FieldArg("text", "none", jsonset=True),
'stroke':FieldArg("text", "black", jsonset=True),
'transform':FieldArg("text", "", jsonset=True),
'stroke_width':FieldArg("text", "1", jsonset=True)
}
def __init__(self, name=None, brief='', **field_args):
"""
fill: The fill colour, use none for no fill
stroke: The outline edge colour
stroke_width: The outline edge thickness
transform: The svg transform object, use it to scals and rotate
"""
ClosedWidget.__init__(self, name=name, tag_name="polygon", brief=brief, **field_args)
def _build(self, page, ident_list, environ, call_data, lang):
"create arrow"
attrib_points = ""
for p in self._points:
point = "%s, %s " % p
attrib_points += point
self.update_attribs({"points":attrib_points})
if self.get_field_value("fill"):
self.update_attribs({"fill":self.get_field_value("fill")})
if self.get_field_value("stroke"):
self.update_attribs({"stroke":self.get_field_value("stroke")})
if self.get_field_value("transform"):
self.update_attribs({"transform":self.get_field_value("transform")})
if self.get_field_value("stroke_width"):
self.update_attribs({"stroke-width":self.get_field_value("stroke_width")})
@classmethod
def description(cls):
"""Returns a text string to illustrate the widget"""
return """
<polygon /> <!-- arrow shape with widget id, class widget_class and the given attributes -->"""
class Arrow2(ClosedWidget):
"""A slim svg arrow shape, fitting in a 50x200 space
"""
# This class does not display any error messages
display_errors = False
_points = ((24,1), (25,1), (49,50), (49,52), (30,52), (30,198), (19,198), (19,52), (1,52), (1,50))
arg_descriptions = {'fill':FieldArg("text", "none", jsonset=True),
'stroke':FieldArg("text", "black", jsonset=True),
'transform':FieldArg("text", "", jsonset=True),
'stroke_width':FieldArg("text", "1", jsonset=True)
}
def __init__(self, name=None, brief='', **field_args):
"""
fill: The fill colour, use none for no fill
stroke: The outline edge colour
stroke_width: The outline edge thickness
transform: The svg transform object, use it to scals and rotate
"""
ClosedWidget.__init__(self, name=name, tag_name="polygon", brief=brief, **field_args)
def _build(self, page, ident_list, environ, call_data, lang):
"create arrow"
attrib_points = ""
for p in self._points:
point = "%s, %s " % p
attrib_points += point
self.update_attribs({"points":attrib_points})
if self.get_field_value("fill"):
self.update_attribs({"fill":self.get_field_value("fill")})
if self.get_field_value("stroke"):
self.update_attribs({"stroke":self.get_field_value("stroke")})
if self.get_field_value("transform"):
self.update_attribs({"transform":self.get_field_value("transform")})
if self.get_field_value("stroke_width"):
self.update_attribs({"stroke-width":self.get_field_value("stroke_width")})
@classmethod
def description(cls):
"""Returns a text string to illustrate the widget"""
return """
<polygon /> <!-- arrow shape with widget id, class widget_class and the given attributes -->"""
class Vertical1(Widget):
# This class does not display any error messages
display_errors = False
_points = ((110,49), (110,50), (81,98), (79,98), (79,60), (13,60), (13,39), (79,39), (79,1), (81,1))
arg_descriptions = {
'transform':FieldArg("text", "", jsonset=True),
'arrow_fill':FieldArg("text", "blue", jsonset=True),
'minimum':FieldArg("text", "0"),
'maximum':FieldArg("text", "100"),
'smallintervals':FieldArg("text", "10"),
'largeintervals':FieldArg("text", "20"),
'measurement':FieldArg("text", "50", jsonset=True),
'font_family':FieldArg("text", "arial")
}
def _make_scale(self, minimum, maximum, smallintervals, largeintervals):
"Returns two lists of Decimal values"
minvalue = Decimal(minimum)
maxvalue = Decimal(maximum)
smallint = Decimal(smallintervals)
largeint = Decimal(largeintervals)
# start at the bottom of the scale with minvalue
minscale = [minvalue]
maxscale = [minvalue]
mns = minvalue
mxs = minvalue
while mxs < maxvalue:
mxs += largeint
maxscale.append(mxs)
while True:
mns += smallint
if mns > maxscale[-1]:
break
minscale.append(mns)
return minscale, maxscale
def __init__(self, name=None, brief='', **field_args):
"""A g element which holds a vertical scale and arrow, held in a 700 high x 250 wide space"""
Widget.__init__(self, name=name, tag_name="g", brief=brief, **field_args)
self[0] = tag.ClosedPart(tag_name='rect', attribs={"x":"100",
"y":"1",
"rx":"2",
"ry":"2",
"width":"149",
"height":"698",
"fill":"white",
"stroke":"black",
"stroke-width":"1"})
arrow_points = ""
for p in self._points:
point = "%s, %s " % p
arrow_points += point
self[1] = tag.ClosedPart(tag_name='polygon', attribs={
"fill":"white",
"stroke":"black",
"stroke-width":"2",
"points":arrow_points })
self[2] = tag.ClosedPart(tag_name='line', attribs={
'x1':'120',
'y1':'50',
'x2':'120',
'y2':'650',
'stroke':"black",
'stroke-width':"2" })
def _build(self, page, ident_list, environ, call_data, lang):
if self.get_field_value("transform"):
self.update_attribs({"transform":self.get_field_value("transform")})
font_family = self.get_field_value("font_family")
if not font_family:
font_family = "arial"
if self.get_field_value("arrow_fill"):
self[1].update_attribs({"fill":self.get_field_value("arrow_fill")})
# make the scale
minscale, maxscale = self._make_scale(self.get_field_value("minimum"),
self.get_field_value("maximum"),
self.get_field_value("smallintervals"),
self.get_field_value("largeintervals"))
# small lines
minitems = len(minscale)
scalemins = Decimal('600.0') / (minitems-1)
n = 3
for index, item in enumerate(minscale):
vert = Decimal(650) - index*scalemins
self[n] = tag.ClosedPart(tag_name='line', attribs={
'x1':'120',
'y1': str(vert),
'x2':'150',
'y2':str(vert),
'stroke':"black",
'stroke-width':"1" })
n += 1
# large lines
maxitems = len(maxscale)
scalemaxs = Decimal('600.0') / (maxitems-1)
for index, item in enumerate(maxscale):
vert = Decimal('650') - index*scalemaxs
self[n] = tag.ClosedPart(tag_name='line', attribs={
'x1':'119',
'y1': str(vert),
'x2':'210',
'y2':str(vert),
'stroke':"black",
'stroke-width':"3" })
n += 1
self[n] = tag.Part(tag_name='text', text=str(item), attribs={
'x':'160',
'y': str(vert-10),
'font-size': '20',
'font-family': font_family,
'stroke':"black",
'stroke-width':"1" })
n += 1
# now place arrow at the measurement point
measurement = Decimal(self.get_field_value("measurement"))
self._minvalue = maxscale[0]
self._maxvalue = maxscale[-1]
if measurement >= self._maxvalue:
return
if measurement <= self._minvalue:
self[1].update_attribs({"transform" : "translate(0, 600)"})
return
m = Decimal('600.0') - (measurement - self._minvalue)*600/(self._maxvalue-self._minvalue)
self[1].update_attribs({"transform" : "translate(0, %s)" % (m,)})
def _build_js(self, page, ident_list, environ, call_data, lang):
"""Sends scaling factor for mapping measurement to scale"""
return self._make_fieldvalues(maxvalue=str(self._maxvalue), minvalue=str(self._minvalue))
@classmethod
def description(cls):
"""Returns a text string to illustrate the widget"""
return """
<g> <!-- with widget id and class widget_class, and transform attribute if given -->
<rect /> <!-- the scale rectangle -->
<!-- lines and text dependent on the input scale values -->
<polygon /> <!-- the arrow, with translate linked to the input value -->
</g>"""
class Traditional1(Widget):
# This class does not display any error messages
display_errors = False
_points = ((24,1), (25,1), (49,50), (49,52), (30,52), (30,198), (19,198), (19,52), (1,52), (1,50))
arg_descriptions = {
'transform':FieldArg("text", "translate(10,10)", jsonset=True),
'minimum':FieldArg("text", "0"),
'maximum':FieldArg("text", "100"),
'smallintervals':FieldArg("text", "10"),
'largeintervals':FieldArg("text", "20"),
'arrow_stroke':FieldArg("text", "grey", jsonset=True),
'measurement':FieldArg("text", "50", jsonset=True),
'font_family':FieldArg("text", "arial")
}
def _make_scale(self, minimum, maximum, smallintervals, largeintervals):
"Returns two lists of Decimal values"
minvalue = Decimal(minimum)
maxvalue = Decimal(maximum)
smallint = Decimal(smallintervals)
largeint = Decimal(largeintervals)
# start at the bottom of the scale with minvalue
minscale = [minvalue]
maxscale = [minvalue]
mns = minvalue
mxs = minvalue
while mxs < maxvalue:
mxs += largeint
maxscale.append(mxs)
while True:
mns += smallint
if mns > maxscale[-1]:
break
minscale.append(mns)
return minscale, maxscale
def __init__(self, name=None, brief='', **field_args):
"""A g element which holds the scale and arrow, held in a 400 high x 700 wide space"""
Widget.__init__(self, name=name, tag_name="g", brief=brief, **field_args)
# A path which holds the curved shape which will contain the meter
# the angle of the white backing is 140 degrees, this makes an
# angle of 20 degrees to the horizonta. So get this in radians
back_horizontal_angle = math.radians(20.0)
# The scale
# the angle of the scale, 120 degrees
scale_angle = 120
self._scale_angle = Decimal(scale_angle)
# the angle to the horizontal, 30 degrees, get it in radians
scale_horizontal_angle = math.radians((180-scale_angle)/2.0)
# radius of outside of white backing shape
r1 = 320
# radius of scale line
r2 = 230
self._scale_r = r2
# radius of inside of white backing shape
r3 = 200
# coordinates of rotation centre of the meter
cx = 350
cy = 350
self._cx = cx
self._cy = cy
# create white backing shape
left_out_x = cx - r1*math.cos(back_horizontal_angle)
left_out_y = cy - r1*math.sin(back_horizontal_angle)
right_out_x = cx + r1*math.cos(back_horizontal_angle)
right_out_y = left_out_y
right_in_x = cx + r3*math.cos(back_horizontal_angle)
right_in_y = cy - r3*math.sin(back_horizontal_angle)
left_in_x = cx - r3*math.cos(back_horizontal_angle)
left_in_y = right_in_y
path_data = """
M %s %s
A %s %s 0 0 1 %s %s
L %s %s
A %s %s 0 0 0 %s %s
Z""" % (left_out_x, left_out_y,
r1, r1, right_out_x, right_out_y,
right_in_x, right_in_y,
r3, r3, left_in_x, left_in_y)
self[0] = tag.ClosedPart(tag_name='path',
attribs={"fill":"white", "stroke":"black", "stroke-width":"1", "d":path_data})
# create the scale curve
# still centred on cx, cy
scale_left_x = cx - r2 * math.cos(scale_horizontal_angle)
scale_left_y = cy - r2 * math.sin(scale_horizontal_angle)
scale_right_x = cx + r2 * math.cos(scale_horizontal_angle)
scale_right_y = scale_left_y
# Draw the scale curve
scale_data = """
M %s %s
A %s %s 0 0 1 %s %s
""" % (scale_left_x, scale_left_y,
r2, r2, scale_right_x, scale_right_y,)
self[1] = tag.ClosedPart(tag_name='path',
attribs={ "fill":"none", "stroke":"black", "stroke-width":"2", "d":scale_data})
# The arrow points
arrow_points = ""
# move all points to the right and down,
# note 24.5 is x distance to arrow point
x_move = cx - 24.5
# moves arrow down to just touch the scale
y_move = cy - r2
for p in self._points:
point = "%s, %s " % (p[0] + x_move, p[1] + y_move)
arrow_points += point
self[2] = tag.ClosedPart(tag_name='polygon', attribs={
"fill":"black",
"stroke":"grey",
"stroke-width":"2",
"points":arrow_points })
# insert a circle at arrow hub, of radius 40
self[3] = | |
<reponame>thiippal/AI2D-RST
# -*- coding: utf-8 -*-
# Import modules
from .draw import *
def process_command(user_input, mode, diagram, current_graph):
"""
A function for handling generic commands coming in from multiple annotation
tasks.
Parameters:
user_input: A string containing the command entered by the user.
mode: A string defining the current annotation task, either 'layout',
'connectivity' or 'rst'.
diagram: A Diagram class object that is currently being annotated.
current_graph: The graph of a Diagram currently being annotated.
Returns:
Performs the requested action.
"""
# Extract command from the user input
command = user_input.split()[0]
# Save a screenshot of all annotations if requested
if command == 'acap':
# Get filename of current image (without extension)
fname = os.path.basename(diagram.image_filename).split('.')[0]
# Join filename to get a string
fname = ''.join(fname)
# Draw segmentation
segmentation = draw_layout(diagram.image_filename,
diagram.annotation,
height=720,
dpi=100)
# Draw grouping graph
try:
grouping = draw_graph(diagram.layout_graph, dpi=100, mode='layout')
except AttributeError:
# Print error message
print("[ERROR] Sorry, you have not annotated the {} graph yet."
.format(mode))
return
# Draw connectivity graph
try:
connectivity = draw_graph(diagram.connectivity_graph, dpi=100,
mode='connectivity')
except AttributeError:
# Print error message
print("[ERROR] Sorry, you have not annotated the {} graph yet."
.format(mode))
return
# Draw RST graph
try:
rst = draw_graph(diagram.rst_graph, dpi=100, mode='rst')
except AttributeError:
# Print error message
print("[ERROR] Sorry, you have not annotated the {} graph yet."
.format(mode))
return
# Stack images of all graphs side by side and on top of each other
seg_group = np.hstack([segmentation, grouping])
rst_group = np.hstack([connectivity, rst])
all_graphs = np.vstack([seg_group, rst_group])
# Write image on disk
cv2.imwrite("all_graphs_{}.png".format(fname), all_graphs)
# Print status message
print("[INFO] Saved screenshots for all graphs on disk for {}.png"
.format(fname))
return
# Save a screenshot if requested
if command == 'cap':
# Get filename of current image (without extension)
fname = os.path.basename(diagram.image_filename).split('.')[0]
# Join filename to get a string
fname = ''.join(fname)
# Render high-resolution versions of graph and segmentation
layout_hires = draw_layout(diagram.image_filename,
diagram.annotation,
height=720,
dpi=200)
diag_hires = draw_graph(current_graph, dpi=200,
mode=mode)
# Write image on disk
cv2.imwrite("segmentation_{}.png".format(fname), layout_hires)
cv2.imwrite("{}_{}.png".format(mode, fname), diag_hires)
# Print status message
print("[INFO] Saved separate screenshots on disk for {}.png".format(
fname
))
return
# Store a comment if requested
if command == 'comment':
# Show a prompt for comment
comment = input(prompts['comment'])
# Return the comment
diagram.comments.append(comment)
return
# If requested, mark the annotation as complete and remove isolates from the
# graph.
if command == 'done':
# Check the annotation task and mark complete as appropriate
if mode == 'layout':
# Set status to complete
diagram.group_complete = True
# Print status message
print("[INFO] Marking grouping as complete.")
if mode == 'connectivity':
# Set status to complete
diagram.connectivity_complete = True
print("[INFO] Marking connectivity as complete.")
if mode == 'rst':
# Set status to complete
diagram.rst_complete = True
print("[INFO] Marking rhetorical structure as complete.")
# Check if the current graph is frozen
if nx.is_frozen(current_graph):
# If the graph is frozen, unfreeze by making a copy
current_graph = current_graph.copy()
# Remove grouping edges from RST and connectivity annotation
if mode == 'rst' or mode == 'connectivity':
# Retrieve a list of edges in the graph
edge_bunch = list(current_graph.edges(data=True))
# Collect grouping edges from the edge list
try:
edge_bunch = [(u, v) for (u, v, d) in edge_bunch
if d['kind'] == 'grouping']
except KeyError:
pass
# Remove grouping edges from current graph
current_graph.remove_edges_from(edge_bunch)
# Find nodes without edges (isolates)
isolates = list(nx.isolates(current_graph))
# Remove isolates
current_graph.remove_nodes_from(isolates)
# Freeze the graph
nx.freeze(current_graph)
# Destroy any remaining windows
cv2.destroyAllWindows()
return
# If requested, exit the annotator immediately
if command == 'exit':
# Destroy any remaining windows
cv2.destroyAllWindows()
return
# Export a graphviz DOT graph if requested
if command == 'export':
# Get filename of current image (without extension)
fname = os.path.basename(diagram.image_filename).split('.')[0]
# Join filename to get a string
fname = ''.join(fname)
# Remove grouping edges from RST and connectivity annotation
if mode == 'rst' or mode == 'connectivity':
# Retrieve a list of edges in the graph
edge_bunch = list(current_graph.edges(data=True))
# Collect grouping edges from the edge list
try:
edge_bunch = [(u, v) for (u, v, d) in edge_bunch
if d['kind'] == 'grouping']
except KeyError:
pass
# Remove grouping edges from current graph
current_graph.remove_edges_from(edge_bunch)
# Find nodes without edges (isolates)
isolates = list(nx.isolates(current_graph))
# Remove isolates
current_graph.remove_nodes_from(isolates)
# Write DOT graph to disk
nx.nx_pydot.write_dot(current_graph,
'{}_{}.dot'.format(fname, mode))
# Print status message
print("[INFO] Saved a DOT graph for {}.png on disk.".format(fname))
return
# If requested, release all connections leading to a node
if command == 'free':
# Prepare input for validation
user_input = prepare_input(user_input, 1)
# Check input against current graph
valid = validate_input(user_input, current_graph)
# If the input is not valid, return
if not valid:
return
# If the input is valid, proceed
if valid:
# Convert user input to uppercase
user_input = [i.upper() for i in user_input]
# Retrieve the list of edges to delete
edge_bunch = list(current_graph.edges(user_input))
# Remove designated edges
current_graph.remove_edges_from(edge_bunch)
# Flag the graph for re-drawing
diagram.update = True
# If requested, print info on current annotation task
if command == 'info':
# Clear screen first
os.system('cls' if os.name == 'nt' else 'clear')
# Print information on layout commands
print(info[mode])
print(info['generic'])
# If requested, remove isolates from the current graph
if command == 'isolate':
# Find nodes without edges (isolates)
isolates = list(nx.isolates(current_graph))
# Remove isolates
current_graph.remove_nodes_from(isolates)
# Print status message
print("[INFO] Removing isolates from the graph as requested.")
# Flag the graph for re-drawing
diagram.update = True
return
# If requested, print macro-groups
if command == 'macrogroups':
# Print header for available macro-groups
print("---\nAvailable macro-groups and their aliases\n---")
# Print the available macro-groups and their aliases
for k, v in macro_groups.items():
print("{} (alias: {})".format(v, k))
# Print closing line
print("---")
# Get current macro-groups from the layout graph
mgroups = dict(nx.get_node_attributes(diagram.layout_graph,
'macro_group'))
# If more than one macro-group has been defined, print groups
if len(mgroups) > 0:
# Print header for current macro-groups
print("\nCurrent macro-groups \n---")
# Print the currently defined macro-groups
for k, v in mgroups.items():
print("{}: {}".format(k, v))
# Print closing line
print("---\n")
return
# If requested, move to the next graph
if command == 'next':
# Destroy any remaining windows
cv2.destroyAllWindows()
return
# If requested, removing grouping nodes
if command == 'ungroup':
# Retrieve a list of edges in the graph
edge_bunch = list(current_graph.edges(data=True))
# Collect grouping edges from the edge list
edge_bunch = [(u, v) for (u, v, d) in edge_bunch
if d['kind'] == 'grouping']
# Remove grouping edges from current graph
current_graph.remove_edges_from(edge_bunch)
# Flag the graph for re-drawing
diagram.update = True
return
# If requested, print available RST relations
if command == 'rels':
# Clear screen first
os.system('cls' if os.name == 'nt' else 'clear')
# Print header for available macro-groups
print("---\nAvailable RST relations and their aliases\n---")
# Loop over RST relations
for k, v in rst_relations.items():
# Print information on each RST relation
print("{} (alias: {}, type: {})".format(
v['name'],
k,
v['kind']))
# Print closing line
print("---")
# Generate a dictionary of RST relations present in the graph
relation_ix = get_node_dict(current_graph, kind='relation')
# Loop through current RST relations and rename for convenience.
relation_ix = {"R{}".format(i): k for i, (k, v) in
enumerate(relation_ix.items(), start=1)}
# If more than one macro-group has been defined, print groups
if len(relation_ix) > 0:
# Print header for current macro-groups
print("\nCurrent RST relations \n---")
# Print relations currently defined in the graph
for k, v in relation_ix.items():
print("{}: {}".format(k,
diagram.rst_graph.nodes[v]['rel_name']))
# Print closing line
print("---\n")
return
# If requested, reset the annotation
if command == 'reset':
# Reset layout graph if requested
if mode == 'layout':
# Unfreeze the reset graph and assign to layout_graph
diagram.layout_graph = create_graph(diagram.annotation,
edges=False,
arrowheads=False,
mode='layout'
)
# Reset connectivity graph if requested
if mode == 'connectivity':
# Create a new connectivity graph for the Diagram object
diagram.connectivity_graph = nx.MultiDiGraph()
# Update grouping information from the grouping graph to the new
# connectivity | |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from paddle.utils import gast
from paddle.fluid import unique_name
from paddle.fluid.dygraph.dygraph_to_static.utils import index_in_list
from paddle.fluid.dygraph.dygraph_to_static.break_continue_transformer import ForToWhileTransformer
from paddle.fluid.dygraph.dygraph_to_static.variable_trans_func import create_fill_constant_node
from paddle.fluid.dygraph.dygraph_to_static.utils import ast_to_source_code
__all__ = [
'RETURN_NO_VALUE_MAGIC_NUM', 'RETURN_NO_VALUE_VAR_NAME', 'ReturnTransformer'
]
# Constant for the name of the variable which stores the boolean state that we
# should return
RETURN_PREFIX = '__return'
# Constant for the name of the variable which stores the final return value
RETURN_VALUE_PREFIX = '__return_value'
# Constant for the name of variables to initialize the __return_value
RETURN_VALUE_INIT_NAME = '__return_value_init'
# Constant magic number representing returning no value. This constant amis to
# support returning various lengths of variables. Static graph must have fixed
# size of fetched output while dygraph can have flexible lengths of output, to
# solve it in dy2stat, we put float64 value with this magic number at Static
# graph as a place holder to indicate the returning placeholder means no value
# should return.
RETURN_NO_VALUE_MAGIC_NUM = 1.77113e+279
RETURN_NO_VALUE_VAR_NAME = "__no_value_return_var"
def get_return_size(return_node):
assert isinstance(return_node, gast.Return), "Input is not gast.Return node"
return_length = 0
if return_node.value is not None:
if isinstance(return_node.value, gast.Tuple):
return_length = len(return_node.value.elts)
else:
return_length = 1
return return_length
class ReplaceReturnNoneTransformer(gast.NodeTransformer):
"""
Replace 'return None' to 'return' because 'None' cannot be a valid input
in control flow. In ReturnTransformer single 'Return' will be appended no
value placeholder
"""
def __init__(self, root_node):
self.root = root_node
def transform(self):
self.visit(self.root)
def visit_Return(self, node):
if isinstance(node.value, gast.Name) and node.value.id == 'None':
node.value = None
return node
if isinstance(node.value, gast.Constant) and node.value.value == None:
node.value = None
return node
return node
class ReturnAnalysisVisitor(gast.NodeVisitor):
"""
Visits gast Tree and analyze the information about 'return'.
"""
def __init__(self, root_node):
self.root = root_node
# A list to store where the current function is.
self.function_def = []
# Mapping from gast.FunctionDef node to the number of return statements
# Python allows define function inside function so we have to handle it
self.count_return = {}
# Mapping from gast.FunctionDef node to the maximum number of variables
# returned by the function's return statement
self.max_return_length = {}
self.visit(self.root)
def visit_FunctionDef(self, node):
self.function_def.append(node)
self.count_return[node] = 0
self.max_return_length[node] = 0
self.generic_visit(node)
self.function_def.pop()
return node
def visit_Return(self, node):
assert len(
self.function_def) > 0, "Found 'return' statement out of function."
cur_func = self.function_def[-1]
if cur_func in self.count_return:
self.count_return[cur_func] += 1
else:
self.count_return[cur_func] = 1
return_length = get_return_size(node)
if cur_func in self.max_return_length:
self.max_return_length[cur_func] = max(
self.max_return_length[cur_func], return_length)
else:
self.max_return_length[cur_func] = return_length
self.generic_visit(node)
def get_func_return_count(self, func_node):
return self.count_return[func_node]
def get_func_max_return_length(self, func_node):
return self.max_return_length[func_node]
class ReturnTransformer(gast.NodeTransformer):
"""
Transforms return statements into equivalent python statements containing
only one return statement at last. The basics idea is using a return value
variable to store the early return statements and boolean states with
if-else to skip the statements after the return.
"""
def __init__(self, wrapper_root):
self.wrapper_root = wrapper_root
self.root = wrapper_root.node
pre_transformer = ReplaceReturnNoneTransformer(self.root)
pre_transformer.transform()
self.ancestor_nodes = []
# The name of the variable which stores the final return value
# Mapping from FunctionDef node to string
self.return_value_name = {}
# The names of the variable which stores the boolean state that skip
# statments. Mapping from FunctionDef node to list
self.return_name = {}
# The names of the variable which is placeholder to handle various-
# length return. Mapping from FunctionDef node to list
self.return_no_value_name = {}
# A list of FunctionDef to store where the current function is.
self.function_def = []
self.pre_analysis = None
def transform(self):
self.visit(self.root)
def generic_visit(self, node):
# Because we change ancestor nodes during visit_Return, not current
# node, original generic_visit of NodeTransformer will visit node
# which may be deleted. To prevent that node being added into
# transformed AST, We self-write a generic_visit and visit
for field, value in gast.iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, gast.AST):
self.visit(item)
elif isinstance(value, gast.AST):
self.visit(value)
def visit(self, node):
"""
Self-defined visit for appending ancestor
"""
self.ancestor_nodes.append(node)
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
ret = visitor(node)
self.ancestor_nodes.pop()
return ret
def visit_FunctionDef(self, node):
self.function_def.append(node)
self.return_value_name[node] = None
self.return_name[node] = []
self.return_no_value_name[node] = []
self.pre_analysis = ReturnAnalysisVisitor(node)
max_return_length = self.pre_analysis.get_func_max_return_length(node)
while self.pre_analysis.get_func_return_count(node) > 1:
self.generic_visit(node)
self.pre_analysis = ReturnAnalysisVisitor(node)
if max_return_length == 0:
self.function_def.pop()
return node
# Prepend initialization of final return and append final return statement
value_name = self.return_value_name[node]
if value_name is not None:
node.body.append(
gast.Return(value=gast.Name(
id=value_name,
ctx=gast.Load(),
annotation=None,
type_comment=None)))
init_names = [
unique_name.generate(RETURN_VALUE_INIT_NAME)
for i in range(max_return_length)
]
assign_zero_nodes = [
create_fill_constant_node(iname, 0.0) for iname in init_names
]
if len(init_names) == 1:
return_value_nodes = gast.Name(
id=init_names[0],
ctx=gast.Load(),
annotation=None,
type_comment=None)
else:
# We need to initialize return value as a tuple because control
# flow requires some inputs or outputs have same structure
return_value_nodes = gast.Tuple(
elts=[
gast.Name(
id=iname,
ctx=gast.Load(),
annotation=None,
type_comment=None) for iname in init_names
],
ctx=gast.Load())
assign_return_value_node = gast.Assign(
targets=[
gast.Name(
id=value_name,
ctx=gast.Store(),
annotation=None,
type_comment=None)
],
value=return_value_nodes)
node.body.insert(0, assign_return_value_node)
node.body[:0] = assign_zero_nodes
# Prepend no value placeholders
for name in self.return_no_value_name[node]:
assign_no_value_node = create_fill_constant_node(
name, RETURN_NO_VALUE_MAGIC_NUM)
node.body.insert(0, assign_no_value_node)
self.function_def.pop()
return node
def visit_Return(self, node):
cur_func_node = self.function_def[-1]
return_name = unique_name.generate(RETURN_PREFIX)
self.return_name[cur_func_node].append(return_name)
max_return_length = self.pre_analysis.get_func_max_return_length(
cur_func_node)
parent_node_of_return = self.ancestor_nodes[-2]
for ancestor_index in reversed(range(len(self.ancestor_nodes) - 1)):
ancestor = self.ancestor_nodes[ancestor_index]
cur_node = self.ancestor_nodes[ancestor_index + 1]
if hasattr(ancestor,
"body") and index_in_list(ancestor.body, cur_node) != -1:
if cur_node == node:
self._replace_return_in_stmt_list(
ancestor.body, cur_node, return_name, max_return_length,
parent_node_of_return)
self._replace_after_node_to_if_in_stmt_list(
ancestor.body, cur_node, return_name, parent_node_of_return)
elif hasattr(ancestor, "orelse") and index_in_list(ancestor.orelse,
cur_node) != -1:
if cur_node == node:
self._replace_return_in_stmt_list(
ancestor.orelse, cur_node, return_name,
max_return_length, parent_node_of_return)
self._replace_after_node_to_if_in_stmt_list(
ancestor.orelse, cur_node, return_name,
parent_node_of_return)
# If return node in while loop, add `not return_name` in gast.While.test
if isinstance(ancestor, gast.While):
cond_var_node = gast.UnaryOp(
op=gast.Not(),
operand=gast.Name(
id=return_name,
ctx=gast.Load(),
annotation=None,
type_comment=None))
ancestor.test = gast.BoolOp(
op=gast.And(), values=[ancestor.test, cond_var_node])
continue
# If return node in for loop, add `not return_name` in gast.While.test
if isinstance(ancestor, gast.For):
cond_var_node = gast.UnaryOp(
op=gast.Not(),
operand=gast.Name(
id=return_name,
ctx=gast.Load(),
annotation=None,
type_comment=None))
parent_node = self.ancestor_nodes[ancestor_index - 1]
for_to_while = ForToWhileTransformer(parent_node, ancestor,
cond_var_node)
new_stmts = for_to_while.transform()
while_node = new_stmts[-1]
self.ancestor_nodes[ancestor_index] = while_node
if ancestor == cur_func_node:
break
# return_node is replaced so we shouldn't return here
def _replace_return_in_stmt_list(self, stmt_list, return_node, return_name,
max_return_length, parent_node_of_return):
assert max_return_length >= 0, "Input illegal max_return_length"
i = index_in_list(stmt_list, return_node)
if i == -1:
return False
assign_nodes = []
# Here assume that the parent node of return is gast.If
if isinstance(parent_node_of_return, gast.If):
# Prepend control flow boolean nodes such as '__return@1 = True'
node_str = "{} = _jst.create_bool_as_type({}, True)".format(
return_name,
ast_to_source_code(parent_node_of_return.test).strip())
assign_true_node = gast.parse(node_str).body[0]
assign_nodes.append(assign_true_node)
cur_func_node = self.function_def[-1]
return_length = get_return_size(return_node)
if return_length < max_return_length:
# In this case we should append RETURN_NO_VALUE placeholder
#
# max_return_length must be >= 1 here because return_length will be
# 0 at least.
if self.return_value_name[cur_func_node] is None:
self.return_value_name[cur_func_node] = unique_name.generate(
RETURN_VALUE_PREFIX)
no_value_names = [
unique_name.generate(RETURN_NO_VALUE_VAR_NAME)
for j in range(max_return_length - return_length)
]
self.return_no_value_name[cur_func_node].extend(no_value_names)
# Handle tuple/non-tuple case
if max_return_length == 1:
assign_nodes.append(
gast.Assign(
targets=[
gast.Name(
id=self.return_value_name[cur_func_node],
ctx=gast.Store(),
annotation=None,
type_comment=None)
],
value=gast.Name(
id=no_value_names[0],
ctx=gast.Load(),
annotation=None,
type_comment=None)))
else:
# max_return_length > 1 which means we should assign tuple
fill_tuple = [
gast.Name(
id=n,
ctx=gast.Load(),
annotation=None,
type_comment=None) for n in no_value_names
]
if return_node.value is not None:
if isinstance(return_node.value, gast.Tuple):
fill_tuple[:0] = return_node.value.elts
else:
fill_tuple.insert(0, return_node.value)
assign_nodes.append(
gast.Assign(
targets=[
gast.Name(
id=self.return_value_name[cur_func_node],
ctx=gast.Store(),
annotation=None,
type_comment=None)
],
value=gast.Tuple(
elts=fill_tuple, ctx=gast.Load())))
else:
# In this case we should NOT append RETURN_NO_VALUE placeholder
if return_node.value is not None:
cur_func_node = self.function_def[-1]
if self.return_value_name[cur_func_node] is None:
self.return_value_name[
cur_func_node] = unique_name.generate(
RETURN_VALUE_PREFIX)
assign_nodes.append(
gast.Assign(
targets=[
gast.Name(
id=self.return_value_name[cur_func_node],
ctx=gast.Store(),
annotation=None,
type_comment=None)
],
value=return_node.value))
stmt_list[i:] = assign_nodes
return True
def _replace_after_node_to_if_in_stmt_list(
self, stmt_list, node, return_name, parent_node_of_return):
i | |
ntuniprot(RecName_Full='Plasma membrane-associated coenzyme Q6 reductase PGA3'),
'Q12748' : ntuniprot(RecName_Full='Inner kinetochore subunit CTF3 {ECO:0000305}'),
'Q12749' : ntuniprot(RecName_Full='Structural maintenance of chromosomes protein 6'),
'Q12751' : ntuniprot(RecName_Full='RNA polymerase II assembly factor RTP1 {ECO:0000305|PubMed:23438601}'),
'Q12753' : ntuniprot(RecName_Full='Transcriptional activator HAA1'),
'Q12754' : ntuniprot(RecName_Full='Ribosomal RNA-processing protein 12'),
'Q2V2P0' : ntuniprot(RecName_Full='Uncharacterized protein YPR145C-A'),
'Q2V2P1' : ntuniprot(RecName_Full='Coiled-coil domain-containing protein YLR146W-A'),
'Q2V2P2' : ntuniprot(RecName_Full='Uncharacterized protein YKL065W-A'),
'Q2V2P3' : ntuniprot(RecName_Full='Uncharacterized protein YKL023C-A'),
'Q2V2P4' : ntuniprot(RecName_Full='Uncharacterized protein YIL156W-B'),
'Q2V2P5' : ntuniprot(RecName_Full='Uncharacterized protein YIL102C-A'),
'Q2V2P6' : ntuniprot(RecName_Full='Uncharacterized protein YGL194C-A'),
'Q2V2P7' : ntuniprot(RecName_Full='Uncharacterized protein YDR461C-A'),
'Q2V2P8' : ntuniprot(RecName_Full='Inner kinetochore subunit WIP1 {ECO:0000305}'),
'Q2V2P9' : ntuniprot(RecName_Full='Uncharacterized protein YDR119W-A'),
'Q2V2Q0' : ntuniprot(RecName_Full='Putative uncharacterized protein YDL007C-A'),
'Q2V2Q1' : ntuniprot(RecName_Full='Antisense of depressing factor protein 1'),
'Q2V2Q2' : ntuniprot(RecName_Full='Uncharacterized protein YCL048W-A'),
'Q2V2Q3' : ntuniprot(RecName_Full='Putative uncharacterized protein YBR201C-A'),
'Q3E6R4' : ntuniprot(RecName_Full='Uncharacterized protein YDR524C-B'),
'Q3E6R5' : ntuniprot(RecName_Full='Uncharacterized mitochondrial outer membrane protein YDR381C-A'),
'Q3E705' : ntuniprot(RecName_Full='rRNA-processing protein EFG1'),
'Q3E731' : ntuniprot(RecName_Full='Cytochrome c oxidase assembly protein COX19'),
'Q3E732' : ntuniprot(RecName_Full='Putative uncharacterized protein YLR264C-A'),
'Q3E735' : ntuniprot(RecName_Full='Uncharacterized membrane protein YOR034C-A'),
'Q3E736' : ntuniprot(RecName_Full='Uncharacterized protein YOR192C-C'),
'Q3E737' : ntuniprot(RecName_Full='Uncharacterized protein YJL047C-A'),
'Q3E739' : ntuniprot(RecName_Full='Uncharacterized protein YIR021W-A'),
'Q3E740' : ntuniprot(RecName_Full='Uncharacterized protein YGL258W-A'),
'Q3E741' : ntuniprot(RecName_Full='Putative uncharacterized protein YAL037C-A'),
'Q3E742' : ntuniprot(RecName_Full='Uncharacterized protein YLR412C-A'),
'Q3E743' : ntuniprot(RecName_Full='Uncharacterized protein YJR112W-A'),
'Q3E744' : ntuniprot(RecName_Full='Uncharacterized protein YGR161W-C'),
'Q3E746' : ntuniprot(RecName_Full='Uncharacterized protein YHR086W-A'),
'Q3E747' : ntuniprot(RecName_Full='Uncharacterized protein YLR363W-A'),
'Q3E750' : ntuniprot(RecName_Full='Uncharacterized protein YGL041C-B'),
'Q3E751' : ntuniprot(RecName_Full='Uncharacterized protein YPL119C-A'),
'Q3E752' : ntuniprot(RecName_Full='Sporulation protein 24 {ECO:0000303|PubMed:25127041}'),
'Q3E754' : ntuniprot(RecName_Full='40S ribosomal protein S21-B {ECO:0000303|PubMed:9559554}'),
'Q3E755' : ntuniprot(RecName_Full='Uncharacterized protein YBR200W-A'),
'Q3E756' : ntuniprot(RecName_Full='UPF0768 protein YBL029C-A'),
'Q3E757' : ntuniprot(RecName_Full='60S ribosomal protein L11-B {ECO:0000303|PubMed:9559554}'),
'Q3E758' : ntuniprot(RecName_Full='Uncharacterized protein YHL048C-A'),
'Q3E760' : ntuniprot(RecName_Full='Uncharacterized protein YMR030W-A'),
'Q3E762' : ntuniprot(RecName_Full='Putative uncharacterized protein YBR230W-A'),
'Q3E763' : ntuniprot(RecName_Full='Uncharacterized protein YDR246W-A'),
'Q3E764' : ntuniprot(RecName_Full='Translation machinery-associated protein 7'),
'Q3E765' : ntuniprot(RecName_Full='Uncharacterized protein YKL183C-A'),
'Q3E766' : ntuniprot(RecName_Full='Uncharacterized protein YMR175W-A'),
'Q3E767' : ntuniprot(RecName_Full='Uncharacterized protein YNL067W-B'),
'Q3E769' : ntuniprot(RecName_Full='Uncharacterized protein YOL159C-A'),
'Q3E770' : ntuniprot(RecName_Full='Seripauperin-9'),
'Q3E771' : ntuniprot(RecName_Full='Uncharacterized protein YLR285C-A'),
'Q3E772' : ntuniprot(RecName_Full='Protein LSO2 {ECO:0000305}'),
'Q3E774' : ntuniprot(RecName_Full='Uncharacterized protein YDL159W-A'),
'Q3E775' : ntuniprot(RecName_Full='Uncharacterized protein YJR151W-A'),
'Q3E776' : ntuniprot(RecName_Full='Uncharacterized protein YBR255C-A'),
'Q3E778' : ntuniprot(RecName_Full='Uncharacterized protein YBR196C-B'),
'Q3E781' : ntuniprot(RecName_Full='Uncharacterized protein YBR221W-A'),
'Q3E782' : ntuniprot(RecName_Full='Uncharacterized protein YMR247W-A'),
'Q3E784' : ntuniprot(RecName_Full='SCOCO-like protein 1'),
'Q3E785' : ntuniprot(RecName_Full='Succinate dehydrogenase assembly factor 1, mitochondrial {ECO:0000303|PubMed:19465911}'),
'Q3E786' : ntuniprot(RecName_Full='Uncharacterized protein YGR240C-A'),
'Q3E787' : ntuniprot(RecName_Full='Putative uncharacterized protein YCR095W-A'),
'Q3E789' : ntuniprot(RecName_Full='Uncharacterized protein YDR169C-A'),
'Q3E790' : ntuniprot(RecName_Full='Serine palmitoyltransferase-regulating protein TSC3'),
'Q3E791' : ntuniprot(RecName_Full='Uncharacterized protein YAL063C-A'),
'Q3E792' : ntuniprot(RecName_Full='40S ribosomal protein S25-A {ECO:0000303|PubMed:9559554}'),
'Q3E793' : ntuniprot(RecName_Full='BolA-like protein 1 {ECO:0000305}'),
'Q3E794' : ntuniprot(RecName_Full='Uncharacterized protein YBR072C-A'),
'Q3E795' : ntuniprot(RecName_Full='Uncharacterized protein YLR361C-A'),
'Q3E796' : ntuniprot(RecName_Full='Uncharacterized protein YDR182W-A'),
'Q3E798' : ntuniprot(RecName_Full='Uncharacterized protein YLR099W-A'),
'Q3E7A0' : ntuniprot(RecName_Full='Uncharacterized protein YKL106C-A'),
'Q3E7A2' : ntuniprot(RecName_Full='Uncharacterized protein YER078W-A'),
'Q3E7A3' : ntuniprot(RecName_Full='Uncharacterized protein YJL133C-A'),
'Q3E7A4' : ntuniprot(RecName_Full='COX assembly mitochondrial protein 2'),
'Q3E7A5' : ntuniprot(RecName_Full='Uncharacterized protein YOL164W-A'),
'Q3E7A6' : ntuniprot(RecName_Full='Uncharacterized protein YML007C-A, mitochondrial'),
'Q3E7A7' : ntuniprot(RecName_Full='Uncharacterized protein YKL018C-A'),
'Q3E7A8' : ntuniprot(RecName_Full='Uncharacterized protein YNL162W-A'),
'Q3E7A9' : ntuniprot(RecName_Full='Cx9C motif-containing protein 4, mitochondrial'),
'Q3E7B0' : ntuniprot(RecName_Full='Uncharacterized protein YER053C-A'),
'Q3E7B2' : ntuniprot(RecName_Full='Cytochrome c oxidase assembly factor 3, mitochondrial'),
'Q3E7B3' : ntuniprot(RecName_Full='Uncharacterized protein YPR108W-A'),
'Q3E7B4' : ntuniprot(RecName_Full='Uncharacterized protein YPL038W-A'),
'Q3E7B5' : ntuniprot(RecName_Full='Uncharacterized protein YMR230W-A'),
'Q3E7B6' : ntuniprot(RecName_Full='V-type proton ATPase subunit e'),
'Q3E7B7' : ntuniprot(RecName_Full='SERF-like protein YDL085C-A'),
'Q3E7B9' : ntuniprot(RecName_Full='Uncharacterized protein YOR008C-A'),
'Q3E7C1' : ntuniprot(RecName_Full='General transcription and DNA repair factor IIH subunit TFB5'),
'Q3E7X8' : ntuniprot(RecName_Full="Y' element ATP-dependent helicase YEL077C"),
'Q3E7X9' : ntuniprot(RecName_Full='40S ribosomal protein S28-A {ECO:0000303|PubMed:9559554}'),
'Q3E7Y3' : ntuniprot(RecName_Full='40S ribosomal protein S22-B {ECO:0000303|PubMed:9559554}'),
'Q3E7Y4' : ntuniprot(RecName_Full='Putative uncharacterized helicase-like protein YBL112C'),
'Q3E7Y5' : ntuniprot(RecName_Full='Uncharacterized helicase-like protein YBL111C'),
'Q3E7Y6' : ntuniprot(RecName_Full='N-terminal-borealin-like protein'),
'Q3E7Y7' : ntuniprot(RecName_Full='Uncharacterized protein YOR293C-A'),
'Q3E7Y8' : ntuniprot(RecName_Full='Uncharacterized protein YOL155W-A'),
'Q3E7Y9' : ntuniprot(RecName_Full='Uncharacterized protein YOL097W-A'),
'Q3E7Z0' : ntuniprot(RecName_Full='Uncharacterized protein YNL277W-A'),
'Q3E7Z1' : ntuniprot(RecName_Full='Uncharacterized protein YNL146C-A'),
'Q3E7Z2' : ntuniprot(RecName_Full='Uncharacterized protein YNL042W-B'),
'Q3E7Z3' : ntuniprot(RecName_Full='Uncharacterized protein YIR018C-A'),
'Q3E7Z4' : ntuniprot(RecName_Full='Uncharacterized protein YIL046W-A'),
'Q3E7Z5' : ntuniprot(RecName_Full='Uncharacterized protein YIL002W-A'),
'Q3E7Z6' : ntuniprot(RecName_Full='Uncharacterized protein YHL015W-A'),
'Q3E7Z7' : ntuniprot(RecName_Full='Uncharacterized protein YDR003W-A'),
'Q3E7Z8' : ntuniprot(RecName_Full='Uncharacterized protein YCR024C-B'),
'Q3E7Z9' : ntuniprot(RecName_Full='Uncharacterized protein YOL038C-A'),
'Q3E801' : ntuniprot(RecName_Full='Uncharacterized protein YJL136W-A'),
'Q3E802' : ntuniprot(RecName_Full='Uncharacterized protein YGL006W-A'),
'Q3E804' : ntuniprot(RecName_Full='Uncharacterized protein YOR381W-A'),
'Q3E805' : ntuniprot(RecName_Full='Uncharacterized protein YOR376W-A'),
'Q3E806' : ntuniprot(RecName_Full='Uncharacterized protein YOR316C-A'),
'Q3E807' : ntuniprot(RecName_Full='Uncharacterized protein YOR011W-A'),
'Q3E808' : ntuniprot(RecName_Full='2-deoxy-glucose resistant protein 1, mitochondrial'),
'Q3E809' : ntuniprot(RecName_Full='Uncharacterized protein YML054C-A'),
'Q3E810' : ntuniprot(RecName_Full='Uncharacterized protein YLR342W-A'),
'Q3E811' : ntuniprot(RecName_Full='Regulator of rDNA transcription protein 15'),
'Q3E813' : ntuniprot(RecName_Full='Uncharacterized protein YLR154C-G'),
'Q3E814' : ntuniprot(RecName_Full='Uncharacterized protein YLL006W-A'),
'Q3E815' : ntuniprot(RecName_Full='Uncharacterized membrane protein YHR175W-A'),
'Q3E816' : ntuniprot(RecName_Full='Uncharacterized protein YGR121W-A'),
'Q3E817' : ntuniprot(RecName_Full='Uncharacterized protein YFL041W-A'),
'Q3E818' : ntuniprot(RecName_Full='Uncharacterized protein YDR194W-A'),
'Q3E819' : ntuniprot(RecName_Full='Uncharacterized protein YCR108C'),
'Q3E820' : ntuniprot(RecName_Full='Uncharacterized protein YBR196C-A'),
'Q3E821' : ntuniprot(RecName_Full='Uncharacterized protein YBL008W-A'),
'Q3E823' : ntuniprot(RecName_Full='Cytochrome c oxidase assembly factor 2'),
'Q3E824' : ntuniprot(RecName_Full='Uncharacterized protein YOR020W-A'),
'Q3E825' : ntuniprot(RecName_Full='Uncharacterized protein YLR307C-A'),
'Q3E826' : ntuniprot(RecName_Full='Uncharacterized protein YKL068W-A'),
'Q3E827' : ntuniprot(RecName_Full='Protein LSO1 {ECO:0000305}'),
'Q3E828' : ntuniprot(RecName_Full='UPF0618 protein YJL127C-B'),
'Q3E829' : ntuniprot(RecName_Full='Inner kinetochore subunit MHF2 {ECO:0000305}'),
'Q3E830' : ntuniprot(RecName_Full='Uncharacterized protein YCR075W-A'),
'Q3E832' : ntuniprot(RecName_Full='Uncharacterized protein YOR072W-B'),
'Q3E833' : ntuniprot(RecName_Full='EKC/KEOPS complex subunit PCC1'),
'Q3E834' : ntuniprot(RecName_Full='Protein transport protein YOS1'),
'Q3E835' : ntuniprot(RecName_Full='Inner kinetochore subunit MHF1 {ECO:0000305}'),
'Q3E837' : ntuniprot(RecName_Full='Uncharacterized protein YJL052C-A'),
'Q3E838' : ntuniprot(RecName_Full='Uncharacterized protein YFR012W-A'),
'Q3E840' : ntuniprot(RecName_Full='Diphthamide biosynthesis protein 3'),
'Q3E841' : ntuniprot(RecName_Full='Uncharacterized protein YNR034W-A'),
'Q3E842' : ntuniprot(RecName_Full='Uncharacterized endoplasmic reticulum membrane protein YMR122W-A'),
'Q3E843' : ntuniprot(RecName_Full='Uncharacterized protein YMR158C-A'),
'Q3E846' : ntuniprot(RecName_Full='Cytochrome c oxidase assembly factor 6 {ECO:0000303|PubMed:22984289}'),
'Q45U18' : ntuniprot(RecName_Full='Uncharacterized protein YIL134C-A'),
'Q45U48' : ntuniprot(RecName_Full='Uncharacterized protein YGR035W-A'),
'Q6B2U8' : ntuniprot(RecName_Full='Topoisomerase I damage affected protein 8'),
'Q6Q546' : ntuniprot(RecName_Full='Ubiquitin-like modifier HUB1'),
'Q6Q547' : ntuniprot(RecName_Full='H/ACA ribonucleoprotein complex subunit NOP10'),
'Q6Q560' : ntuniprot(RecName_Full='Protein ISD11'),
'Q6Q595' : ntuniprot(RecName_Full='Vesicle-associated membrane protein-associated protein SCS22'),
'Q6Q5H1' : ntuniprot(RecName_Full='Transposon Ty1-PR3 Gag polyprotein'),
'Q6Q5K6' : ntuniprot(RecName_Full='MEC1-mediated checkpoint protein HUG1'),
'Q6Q5P6' : ntuniprot(RecName_Full='Transposon Ty4-H Gag polyprotein'),
'Q6Q5X2' : ntuniprot(RecName_Full='Cysteine-rich and transmembrane domain-containing protein YDR034W-B'),
'Q6WNK7' : ntuniprot(RecName_Full='Transcription and mRNA export factor SUS1 {ECO:0000255|HAMAP-Rule:MF_03046}'),
'Q7LHG5' : ntuniprot(RecName_Full='Transposon Ty3-I Gag-Pol polyprotein'),
'Q7M4S9' : ntuniprot(RecName_Full='Uncharacterized protein YBL113C'),
'Q86ZR7' : ntuniprot(RecName_Full='Uncharacterized hydrolase YKL033W-A'),
'Q8J0M4' : ntuniprot(RecName_Full='UPF0357 protein YCL012C'),
'Q8TGJ0' : ntuniprot(RecName_Full='Uncharacterized protein YOR394C-A'),
'Q8TGJ1' : ntuniprot(RecName_Full='UPF0320 protein YOL166W-A'),
'Q8TGJ2' : ntuniprot(RecName_Full='Putative UPF0377 protein YNR075C-A'),
'Q8TGJ3' : ntuniprot(RecName_Full='Protein kish'),
'Q8TGJ7' : ntuniprot(RecName_Full='Uncharacterized protein YLL066W-B'),
'Q8TGK0' : ntuniprot(RecName_Full='Putative uncharacterized protein YHR214C-E'),
'Q8TGK1' : ntuniprot(RecName_Full='Uncharacterized protein YHR213W-B'),
'Q8TGK4' : ntuniprot(RecName_Full='Uncharacterized protein YBR298C-A'),
'Q8TGK6' : ntuniprot(RecName_Full='Putative UPF0377 protein YAL067W-A'),
'Q8TGM6' : ntuniprot(RecName_Full='Protein TAR1'),
'Q8TGN3' : ntuniprot(RecName_Full='Uncharacterized protein YJL077W-A'),
'Q8TGN5' : ntuniprot(RecName_Full='Uncharacterized protein YIL105W-A'),
'Q8TGN9' : ntuniprot(RecName_Full='Protein NAG1'),
'Q8TGQ7' : ntuniprot(RecName_Full='Uncharacterized protein YPR159C-A'),
'Q8TGR9' : ntuniprot(RecName_Full='Uncharacterized protein YPL152W-A'),
'Q8TGS0' : ntuniprot(RecName_Full='Uncharacterized protein YOR161C-C'),
'Q8TGS1' : ntuniprot(RecName_Full='Uncharacterized protein YOR032W-A'),
'Q8TGS2' : ntuniprot(RecName_Full='Uncharacterized protein YOL019W-A'),
'Q8TGS4' : ntuniprot(RecName_Full='Uncharacterized protein YMR315W-A'),
'Q8TGS5' : ntuniprot(RecName_Full='Uncharacterized protein YMR272W-B'),
'Q8TGS6' : ntuniprot(RecName_Full='Uncharacterized protein YMR242W-A'),
'Q8TGS7' : ntuniprot(RecName_Full='Uncharacterized protein YMR182W-A'),
'Q8TGS8' : ntuniprot(RecName_Full='Uncharacterized protein YMR105W-A'),
'Q8TGS9' : ntuniprot(RecName_Full='Uncharacterized protein YMR001C-A'),
'Q8TGT0' : ntuniprot(RecName_Full='Uncharacterized protein YML100W-A'),
'Q8TGT1' : ntuniprot(RecName_Full='Uncharacterized protein YLR406C-A'),
'Q8TGT2' : ntuniprot(RecName_Full='Uncharacterized protein YKL096C-B'),
'Q8TGT3' : ntuniprot(RecName_Full='Uncharacterized protein YJL077W-B'),
'Q8TGT4' : ntuniprot(RecName_Full='Uncharacterized protein YHR213W-A'),
'Q8TGT6' : ntuniprot(RecName_Full='Uncharacterized protein YHR022C-A'),
'Q8TGT7' : ntuniprot(RecName_Full='Uncharacterized protein YGR204C-A'),
'Q8TGT8' : ntuniprot(RecName_Full='Uncharacterized protein YGR174W-A'),
'Q8TGT9' : ntuniprot(RecName_Full='Uncharacterized protein YGR146C-A'),
'Q8TGU0' : ntuniprot(RecName_Full='Uncharacterized protein YGL007C-A'),
'Q8TGU1' : ntuniprot(RecName_Full='Uncharacterized protein YGL188C-A'),
'Q8TGU2' : ntuniprot(RecName_Full='Uncharacterized protein YFR032C-B'),
'Q8TGU4' : ntuniprot(RecName_Full='Uncharacterized protein YER175W-A'),
'Q8TGU5' : ntuniprot(RecName_Full='Uncharacterized protein YBR296C-A'),
'Q8TGU6' : ntuniprot(RecName_Full='Uncharacterized protein YBR182C-A'),
'Q8TGU7' : ntuniprot(RecName_Full='Uncharacterized protein YBR126W-A'),
'Q8TGU8' : ntuniprot(RecName_Full='Uncharacterized protein YBL071C-B'),
'Q8TGV0' : ntuniprot(RecName_Full='Uncharacterized protein YAR035C-A'),
'Q92316' : ntuniprot(RecName_Full='Dolichyl-diphosphooligosaccharide--protein glycosyltransferase subunit OST5'),
'Q92317' : ntuniprot(RecName_Full='Negative cofactor 2 complex subunit beta'),
'Q92325' : ntuniprot(RecName_Full='Cullin-associated NEDD8-dissociated protein 1 homolog'),
'Q92328' : ntuniprot(RecName_Full='Mitochondrial distribution and morphology protein 12 {ECO:0000255|HAMAP-Rule:MF_03104}'),
'Q92331' : ntuniprot(RecName_Full='Vacuolar protein sorting-associated protein 5'),
'Q92392' : ntuniprot(RecName_Full='Transposon Ty1-OL Gag polyprotein'),
'Q92393' : ntuniprot(RecName_Full='Transposon Ty1-OR Gag-Pol polyprotein'),
'Q96VH2' : ntuniprot(RecName_Full='Putative pelota-like protein YCL001W-B'),
'Q96VH3' : ntuniprot(RecName_Full='Putative uncharacterized protein YCL021W-A'),
'Q96VH4' : ntuniprot(RecName_Full='Putative nitroreductase HBN1'),
'Q96VH5' : ntuniprot(RecName_Full='MICOS complex subunit MIC10'),
'Q99176' : ntuniprot(RecName_Full='Protein SRN2'),
'Q99177' : ntuniprot(RecName_Full='Pre-mRNA-splicing factor BRR1'),
'Q99181' : ntuniprot(RecName_Full='Protein HSH49'),
'Q99186' : ntuniprot(RecName_Full='AP-2 complex subunit mu'),
'Q99188' : ntuniprot(RecName_Full='Regulator of G-protein signaling 2'),
'Q99189' : ntuniprot(RecName_Full='mRNA transport regulator MTR10'),
'Q99190' : ntuniprot(RecName_Full='Very-long-chain enoyl-CoA reductase {ECO:0000305}'),
'Q99207' : ntuniprot(RecName_Full='Nucleolar complex protein 14'),
'Q99208' : ntuniprot(RecName_Full="Y' element ATP-dependent helicase YLL066C"),
'Q99210' : ntuniprot(RecName_Full='dTTP/UTP pyrophosphatase {ECO:0000305}'),
'Q99216' : ntuniprot(RecName_Full='Pre-rRNA-processing protein PNO1'),
'Q99220' : ntuniprot(RecName_Full='Protein OS-9 homolog'),
| |
<reponame>preym17/csit<gh_stars>0
# Copyright (c) 2018 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module defining MultipleLossRatioSearch class."""
import logging
import math
import time
from AbstractSearchAlgorithm import AbstractSearchAlgorithm
from NdrPdrResult import NdrPdrResult
from ReceiveRateInterval import ReceiveRateInterval
class MultipleLossRatioSearch(AbstractSearchAlgorithm):
"""Optimized binary search algorithm for finding NDR and PDR bounds.
Traditional binary search algorithm needs initial interval
(lower and upper bound), and returns final interval after bisecting
(until some exit condition is met).
The exit condition is usually related to the interval width,
(upper bound value minus lower bound value).
The optimized algorithm contains several improvements
aimed to reduce overall search time.
One improvement is searching for two intervals at once.
The intervals are for NDR (No Drop Rate) and PDR (Partial Drop Rate).
Next improvement is that the initial interval does not need to be valid.
Imagine initial interval (10, 11) where 11 is smaller
than the searched value.
The algorithm will try (11, 13) interval next, and if 13 is still smaller,
(13, 17) and so on, doubling width until the upper bound is valid.
The part when interval expands is called external search,
the part when interval is bisected is called internal search.
Next improvement is that trial measurements at small trial duration
can be used to find a reasonable interval for full trial duration search.
This results in more trials performed, but smaller overall duration
in general.
Next improvement is bisecting in logarithmic quantities,
so that exit criteria can be independent of measurement units.
Next improvement is basing the initial interval on receive rates.
Final improvement is exiting early if the minimal value
is not a valid lower bound.
The complete search consist of several phases,
each phase performing several trial measurements.
Initial phase creates initial interval based on receive rates
at maximum rate and at maximum receive rate (MRR).
Final phase and preceding intermediate phases are performing
external and internal search steps,
each resulting interval is the starting point for the next phase.
The resulting interval of final phase is the result of the whole algorithm.
Each non-initial phase uses its own trial duration and width goal.
Any non-initial phase stops searching (for NDR or PDR independently)
when minimum is not a valid lower bound (at current duration),
or all of the following is true:
Both bounds are valid, bound bounds are measured at the current phase
trial duration, interval width is less than the width goal
for current phase.
TODO: Review and update this docstring according to rst docs.
TODO: Support configurable number of Packet Loss Ratios.
"""
class ProgressState(object):
"""Structure containing data to be passed around in recursion."""
def __init__(
self, result, phases, duration, width_goal, packet_loss_ratio,
minimum_transmit_rate, maximum_transmit_rate):
"""Convert and store the argument values.
:param result: Current measured NDR and PDR intervals.
:param phases: How many intermediate phases to perform
before the current one.
:param duration: Trial duration to use in the current phase [s].
:param width_goal: The goal relative width for the curreent phase.
:param packet_loss_ratio: PDR fraction for the current search.
:param minimum_transmit_rate: Minimum target transmit rate
for the current search [pps].
:param maximum_transmit_rate: Maximum target transmit rate
for the current search [pps].
:type result: NdrPdrResult.NdrPdrResult
:type phases: int
:type duration: float
:type width_goal: float
:type packet_loss_ratio: float
:type minimum_transmit_rate: float
:type maximum_transmit_rate: float
"""
self.result = result
self.phases = int(phases)
self.duration = float(duration)
self.width_goal = float(width_goal)
self.packet_loss_ratio = float(packet_loss_ratio)
self.minimum_transmit_rate = float(minimum_transmit_rate)
self.maximum_transmit_rate = float(maximum_transmit_rate)
def __init__(self, measurer, final_relative_width=0.005,
final_trial_duration=30.0, initial_trial_duration=1.0,
number_of_intermediate_phases=2, timeout=600.0, doublings=1):
"""Store the measurer object and additional arguments.
:param measurer: Rate provider to use by this search object.
:param final_relative_width: Final lower bound transmit rate
cannot be more distant that this multiple of upper bound [1].
:param final_trial_duration: Trial duration for the final phase [s].
:param initial_trial_duration: Trial duration for the initial phase
and also for the first intermediate phase [s].
:param number_of_intermediate_phases: Number of intermediate phases
to perform before the final phase [1].
:param timeout: The search will fail itself when not finished
before this overall time [s].
:param doublings: How many doublings to do in external search step.
Default 1 is suitable for fairly stable tests,
less stable tests might get better overal duration with 2 or more.
:type measurer: AbstractMeasurer.AbstractMeasurer
:type final_relative_width: float
:type final_trial_duration: float
:type initial_trial_duration: int
:type number_of_intermediate_phases: int
:type timeout: float
:type doublings: int
"""
super(MultipleLossRatioSearch, self).__init__(measurer)
self.final_trial_duration = float(final_trial_duration)
self.final_relative_width = float(final_relative_width)
self.number_of_intermediate_phases = int(number_of_intermediate_phases)
self.initial_trial_duration = float(initial_trial_duration)
self.timeout = float(timeout)
self.doublings = int(doublings)
@staticmethod
def double_relative_width(relative_width):
"""Return relative width corresponding to double logarithmic width.
:param relative_width: The base relative width to double.
:type relative_width: float
:returns: The relative width of double logarithmic size.
:rtype: float
"""
return 1.999 * relative_width - relative_width * relative_width
# The number should be 2.0, but we want to avoid rounding errors,
# and ensure half of double is not larger than the original value.
@staticmethod
def double_step_down(relative_width, current_bound):
"""Return rate of double logarithmic width below.
:param relative_width: The base relative width to double.
:param current_bound: The current target transmit rate to move [pps].
:type relative_width: float
:type current_bound: float
:returns: Transmit rate smaller by logarithmically double width [pps].
:rtype: float
"""
return current_bound * (
1.0 - MultipleLossRatioSearch.double_relative_width(
relative_width))
@staticmethod
def expand_down(relative_width, doublings, current_bound):
"""Return rate of expanded logarithmic width below.
:param relative_width: The base relative width to double.
:param doublings: How many doublings to do for expansion.
:param current_bound: The current target transmit rate to move [pps].
:type relative_width: float
:type doublings: int
:type current_bound: float
:returns: Transmit rate smaller by logarithmically double width [pps].
:rtype: float
"""
for _ in range(doublings):
relative_width = MultipleLossRatioSearch.double_relative_width(
relative_width)
return current_bound * (1.0 - relative_width)
@staticmethod
def double_step_up(relative_width, current_bound):
"""Return rate of double logarithmic width above.
:param relative_width: The base relative width to double.
:param current_bound: The current target transmit rate to move [pps].
:type relative_width: float
:type current_bound: float
:returns: Transmit rate larger by logarithmically double width [pps].
:rtype: float
"""
return current_bound / (
1.0 - MultipleLossRatioSearch.double_relative_width(
relative_width))
@staticmethod
def expand_up(relative_width, doublings, current_bound):
"""Return rate of expanded logarithmic width above.
:param relative_width: The base relative width to double.
:param doublings: How many doublings to do for expansion.
:param current_bound: The current target transmit rate to move [pps].
:type relative_width: float
:type doublings: int
:type current_bound: float
:returns: Transmit rate smaller by logarithmically double width [pps].
:rtype: float
"""
for _ in range(doublings):
relative_width = MultipleLossRatioSearch.double_relative_width(
relative_width)
return current_bound / (1.0 - relative_width)
@staticmethod
def half_relative_width(relative_width):
"""Return relative width corresponding to half logarithmic width.
:param relative_width: The base relative width to halve.
:type relative_width: float
:returns: The relative width of half logarithmic size.
:rtype: float
"""
return 1.0 - math.sqrt(1.0 - relative_width)
@staticmethod
def half_step_up(relative_width, current_bound):
"""Return rate of half logarithmic width above.
:param relative_width: The base relative width to halve.
:param current_bound: The current target transmit rate to move [pps].
:type relative_width: float
:type current_bound: float
:returns: Transmit rate larger by logarithmically half width [pps].
:rtype: float
"""
return current_bound / (
1.0 - MultipleLossRatioSearch.half_relative_width(relative_width))
def narrow_down_ndr_and_pdr(
self, minimum_transmit_rate, maximum_transmit_rate,
packet_loss_ratio):
"""Perform initial phase, create state object, proceed with next phases.
:param minimum_transmit_rate: Minimal target transmit rate [pps].
:param maximum_transmit_rate: Maximal target transmit rate [pps].
:param packet_loss_ratio: Fraction of packets lost, for PDR [1].
:type minimum_transmit_rate: float
:type maximum_transmit_rate: float
:type packet_loss_ratio: float
:returns: Structure containing narrowed down intervals
and their measurements.
:rtype: NdrPdrResult.NdrPdrResult
:raises RuntimeError: If total duration is | |
import os
import csv
import json
import re
import datetime
import requests
from .. import package_metadata, occurrences
from ..gbifutils import (
is_not_none,
is_none,
stop,
gbif_GET,
gbif_GET_write,
gbif_DELETE,
)
import logging
def _parse_args(x):
if re.match("geometry", x):
geometry = re.search("(POLY|MULTIPOLY).+", x, re.IGNORECASE).group()
return {"type": "within", "geometry": geometry}
tmp = re.split("\s", x)
pred_type = operator_lkup.get(tmp[1])
key = key_lkup.get(tmp[0])
return {"type": pred_type, "key": key, "value": tmp[2]}
def _check_environ(variable, value):
"""check if a variable is present in the environmental variables"""
if is_not_none(value):
return value
else:
value = os.environ.get(variable)
if is_none(value):
stop(
"".join(
[
variable,
""" not supplied and no entry in environmental
variables""",
]
)
)
else:
return value
def download(queries, user=None, pwd=None, email=None, pred_type="and"):
"""
Spin up a download request for GBIF occurrence data.
:param queries: One or more of query arguments to kick of a download job.
See Details.
:type queries: str or list
:param pred_type: (character) One of ``equals`` (``=``), ``and`` (``&``),
`or`` (``|``), ``lessThan`` (``<``), ``lessThanOrEquals`` (``<=``),
``greaterThan`` (``>``), ``greaterThanOrEquals`` (``>=``),
``in``, ``within``, ``not`` (``!``), ``like``
:param user: (character) User name within GBIF's website.
Required. Set in your env vars with the option ``GBIF_USER``
:param pwd: (character) User password within GBIF's website. Required.
Set in your env vars with the option ``GBIF_PWD``
:param email: (character) Email address to recieve download notice done
email. Required. Set in your env vars with the option ``GBIF_EMAIL``
Argument passed have to be passed as character (e.g., ``country = US``),
with a space between key (``country``), operator (``=``), and value (``US``).
See the ``type`` parameter for possible options for the operator.
This character string is parsed internally.
Acceptable arguments to ``...`` (args) are:
- taxonKey = ``TAXON_KEY``
- scientificName = ``SCIENTIFIC_NAME``
- country = ``COUNTRY``
- publishingCountry = ``PUBLISHING_COUNTRY``
- hasCoordinate = ``HAS_COORDINATE``
- hasGeospatialIssue = ``HAS_GEOSPATIAL_ISSUE``
- typeStatus = ``TYPE_STATUS``
- recordNumber = ``RECORD_NUMBER``
- lastInterpreted = ``LAST_INTERPRETED``
- continent = ``CONTINENT``
- geometry = ``GEOMETRY``
- basisOfRecord = ``BASIS_OF_RECORD``
- datasetKey = ``DATASET_KEY``
- eventDate = ``EVENT_DATE``
- catalogNumber = ``CATALOG_NUMBER``
- year = ``YEAR``
- month = ``MONTH``
- decimalLatitude = ``DECIMAL_LATITUDE``
- decimalLongitude = ``DECIMAL_LONGITUDE``
- elevation = ``ELEVATION``
- depth = ``DEPTH``
- institutionCode = ``INSTITUTION_CODE``
- collectionCode = ``COLLECTION_CODE``
- issue = ``ISSUE``
- mediatype = ``MEDIA_TYPE``
- recordedBy = ``RECORDED_BY``
- repatriated = ``REPATRIATED``
See the API docs http://www.gbif.org/developer/occurrence#download
for more info, and the predicates docs
http://www.gbif.org/developer/occurrence#predicates
GBIF has a limit of 12,000 characters for download queries - so
if you're download request is really, really long and complex,
consider breaking it up into multiple requests by one factor or
another.
:return: A dictionary, of results
Usage::
from pygbif import occurrences as occ
occ.download('basisOfRecord = LITERATURE')
occ.download('taxonKey = 3119195')
occ.download('decimalLatitude > 50')
occ.download('elevation >= 9000')
occ.download('decimalLatitude >= 65')
occ.download('country = US')
occ.download('institutionCode = TLMF')
occ.download('catalogNumber = Bird.27847588')
res = occ.download(['taxonKey = 7264332', 'hasCoordinate = TRUE'])
# pass output to download_meta for more information
occ.download_meta(occ.download('decimalLatitude > 75'))
# Multiple queries
gg = occ.download(['decimalLatitude >= 65',
'decimalLatitude <= -65'], type='or')
gg = occ.download(['depth = 80', 'taxonKey = 2343454'],
type='or')
# Repratriated data for Costa Rica
occ.download(['country = CR', 'repatriated = true'])
# turn off logging
import logging
logger = logging.getLogger()
logger.disabled = True
z = occ.download('elevation >= 95000')
logger.disabled = False
w = occ.download('elevation >= 10000')
"""
user = _check_environ("GBIF_USER", user)
pwd = _check_environ("GBIF_PWD", pwd)
email = _check_environ("GBIF_EMAIL", email)
if isinstance(queries, str):
queries = [queries]
keyval = [_parse_args(z) for z in queries]
# USE GBIFDownload class to set up the predicates
req = GbifDownload(user, email)
req.main_pred_type = pred_type
for predicate in keyval:
if "geometry" in predicate.keys():
req.add_geometry(predicate["geometry"])
else:
req.add_predicate(predicate["key"], predicate["value"], predicate["type"])
out = req.post_download(user, pwd)
return out, req.payload
class GbifDownload(object):
def __init__(self, creator, email, polygon=None):
"""class to setup a JSON doc with the query and POST a request
All predicates (default key-value or iterative based on a list of
values) are combined with an AND statement. Iterative predicates are
creating a subset equal statements combined with OR
:param creator: User name.
:param email: user email
:param polygon: Polygon of points to extract data from
"""
self.predicates = []
self._main_pred_type = "and"
self.url = "http://api.gbif.org/v1/occurrence/download/request"
self.header = {
"accept": "application/json",
"content-type": "application/json",
"user-agent": "".join(
[
"python-requests/",
requests.__version__,
",pygbif/",
package_metadata.__version__,
]
),
}
self.payload = {
"creator": creator,
"notification_address": [email],
"send_notification": "true",
"created": datetime.date.today().year,
"predicate": {"type": self._main_pred_type, "predicates": self.predicates},
}
self.request_id = None
# prepare the geometry polygon constructions
if polygon:
self.add_geometry(polygon)
@property
def main_pred_type(self):
"""get main predicate combination type"""
return self._main_pred_type
@main_pred_type.setter
def main_pred_type(self, value):
"""set main predicate combination type
:param value: (character) One of ``equals`` (``=``), ``and`` (``&``), ``or`` (``|``),
``lessThan`` (``<``), ``lessThanOrEquals`` (``<=``), ``greaterThan`` (``>``),
``greaterThanOrEquals`` (``>=``), ``in``, ``within``, ``not`` (``!``), ``like``
"""
if value not in operators:
value = operator_lkup.get(value)
if value:
self._main_pred_type = value
self.payload["predicate"]["type"] = self._main_pred_type
else:
raise Exception("main predicate combiner not a valid operator")
def add_predicate(self, key, value, predicate_type="equals"):
"""
add key, value, type combination of a predicate
:param key: query KEY parameter
:param value: the value used in the predicate
:param predicate_type: the type of predicate (e.g. ``equals``)
"""
if predicate_type not in operators:
predicate_type = operator_lkup.get(predicate_type)
if predicate_type:
self.predicates.append({"type": predicate_type, "key": key, "value": value})
else:
raise Exception("predicate type not a valid operator")
@staticmethod
def _extract_values(values_list):
"""extract values from either file or list
:param values_list: list or file name (str) with list of values
"""
values = []
# check if file or list of values to iterate
if isinstance(values_list, str):
with open(values_list) as ff:
reading = csv.reader(ff)
for j in reading:
values.append(j[0])
elif isinstance(values_list, list):
values = values_list
else:
raise Exception("input datatype not supported.")
return values
def add_iterative_predicate(self, key, values_list):
"""add an iterative predicate with a key and set of values
which it can be equal to in and or function.
The individual predicates are specified with the type ``equals`` and
combined with a type ``or``.
The main reason for this addition is the inability of using ``in`` as
predicate type wfor multiple taxon_key values
(cfr. http://dev.gbif.org/issues/browse/POR-2753)
:param key: API key to use for the query.
:param values_list: Filename or list containing the taxon keys to be s
searched.
"""
values = self._extract_values(values_list)
predicate = {"type": "equals", "key": key, "value": None}
predicates = []
while values:
predicate["value"] = values.pop()
predicates.append(predicate.copy())
self.predicates.append({"type": "or", "predicates": predicates})
def add_geometry(self, polygon, geom_type="within"):
"""add a geometry type of predicate
:param polygon: In this format ``POLYGON((x1 y1, x2 y2,... xn yn))``
:param geom_type: type of predicate, e.g. ``within``
:return:
"""
self.predicates.append({"type": geom_type, "geometry": polygon})
def post_download(self, user=None, pwd=None):
"""
:param user: Username
:param pwd: Password
:return:
"""
user = _check_environ("GBIF_USER", user)
pwd = _check_environ("GBIF_PWD", pwd)
# pprint.pprint(self.payload)
r = requests.post(
self.url,
auth=requests.auth.HTTPBasicAuth(user, pwd),
data=json.dumps(self.payload),
headers=self.header,
)
if r.status_code > 203:
raise Exception(
"error: "
+ r.text
+ ", with error status code "
+ str(r.status_code)
+ "check your number of active downloads."
)
else:
self.request_id = r.text
logging.info("Your download key is " + self.request_id)
return self.request_id
def get_status(self):
"""get the current download status"""
return get_download_status(self.request_id)
def get_download_status(request_key):
"""get the current download status"""
return download_meta(request_key).get("status")
def download_meta(key, **kwargs):
"""
Retrieves the occurrence download metadata by its unique key. Further
named arguments passed on to ``requests.get`` can be included as additional
arguments
:param key: [str] A key generated from a request, like that from ``download``
Usage::
from pygbif import occurrences as occ
occ.download_meta(key = "0003970-140910143529206")
occ.download_meta(key = "0000099-140929101555934")
"""
url = "http://api.gbif.org/v1/occurrence/download/" + key
return gbif_GET(url, {}, **kwargs)
def download_cancel(key, user=None, pwd=None, **kwargs):
"""
Delete a download request by its unique key. Further
named arguments passed on to ``requests.get`` can be included as additional
arguments
:param key: [str] A key generated from a request, like that from ``download``
:param user: [str] A user name, look at env var ``GBIF_USER`` first
:param pwd: [str] Your password, look at env var ``GBIF_PWD`` first
:return: a bool, `True` if cancel request successful, otherwise `False`
Usage::
from pygbif import occurrences as occ
# first, make a download request
x = occ.download('taxonKey = | |
current count value for progress bar
self.__progressBarCount.set(xx+10) ## increment that value
self.__singleSipmIvsVline = []
if(self.__cmjPlotDiag > 4): print(("__multiWindow__unpackSipmgetIvsVforSipm: len(self.__localSipmIvsVresult) = %d, self.__localSipmIvsVresult = %s \n") % (len(self.__localSipmIvsVresult),self.__localSipmIvsVresult))
self.__mcount = 0
self.__ncount = 0
for self.__mline in sorted(self.__localSipmIvsVresult):
xx = self.__progressBarCount.get() ## get current count value for progress bar
self.__progressBarCount.set(xx+10) ## increment that value
self.update() ## update the progress bar...
if(self.__cmjPlotDiag > 0): print(("__multiWindow__unpackSipmgetIvsVforSipm: (self.__singleSipmIvsVline, all) ncount = %d, len(self.__mline) = %d, self.__mline = %s \n") % (self.__ncount,len(self.__mline),self.__mline))
if(len(self.__mline) > 0):
self.__singleSipmIvsVline.append(self.__mline.rsplit(','))
if(self.__cmjPlotDiag > 0): print(("__multiWindow__unpackSipmgetIvsVforSipm: (self.__singleSipmIvsVline, filtered) mcount = %d, self.__mline = %s \n") % (self.__mcount,self.__mline))
self.__sipmIvsVresults.append(self.__mline)
self.__mcount += 1
self.__ncount += 1
if(self.__cmjPlotDiag > 0): print("__multiWindow__unpackSipmgetIvsVforSipm: Exit")
return
##
##
## --------------------------------------------------------------
## This method is specialized to the Sipms... and will have to be
## repeated for other types of tables... The PO-number is associated
## with Sipm numbers in the "sipm" table. Once a PO-number is selected
## a list of all Sipms must be made with that PO-Number. The
## following method makes that list....
def getSipmNumberFromPo(self,tempBatch):
self.__sipmList = []
self.__getSipmNumbers = DataQuery(self.__queryUrl)
self.__table = "sipms"
self.__fetchThese = "sipm_id"
self.__fetchCondition = "po_number:eq:"+str(tempBatch)
if (self.__cmjPlotDiag > 1):
print(("__multiWindow__getSipmNumberFromPogetSipmNumberFromPo: tempBatch = %s \n") % (tempBatch))
print(("__multiWindow__getSipmNumberFromPogetSipmNumberFromPo: self.__table = %s \n") % (self.__table))
print(("__multiWindow__getSipmNumberFromPogetSipmNumberFromPo: self.__fetchThese = %s \n") % (self.__fetchThese))
print(("__multiWindow__getSipmNumberFromPogetSipmNumberFromPo: self.__fetchCondition = %s \n") % (self.__fetchCondition))
if(self.__cmjTest == 0):
self.__sipmList = self.__getSipmNumbers.query(self.__database,self.__table,self.__fetchThese,self.__fetchCondition,'-'+self.__fetchThese)
else:
self.__sipmList = self.__getSipmNumbers.query(self.__database,self.__table,self.__fetchThese,self.__fetchCondition,'-'+self.__fetchThese,limit=self.__cmjTestLimit)
xx = self.__progressBarCount.get() ## get current count value for progress bar
self.__progressBarCount.set(xx+10) ## increment that value
if(self.__cmjPlotDiag > 1):
for self.__l in self.__sipmList:
print(("__multiWindow__getSipmNumberFromPogetSipmNumberFromPo: self.__sipmId = %s \n") % (self.__l))
return self.__sipmList
## --------------------------------------------------------------
## This method calls the method to get the entries to the database
## and plot scatter plots
def getScatterPlots(self):
if(self.__cmjPlotDiag != 0): print("....getScatterPlots: Enter getScatterPlots \n")
self.plotScatterPlots()
if(self.__cmjPlotDiag != 0): print("....getScatterPlots: Exit getScatterPlots \n")
return
## --------------------------------------------------------------
## This method calls the method to get the entries to the database
## and plot Histograms
def getHistograms(self):
if(self.__cmjPlotDiag > 0): print("__multiWindow__getHistograms: Enter \n")
#self.plotHistograms()
self.__treeTime = myTime()
self.__treeTime.getComputerTime()
self.__saveTreeTime = self.__treeTime.getTimeForSavedFiles()
self.__outRootTreeFileName = "outputFiles/SipmRootHistograms"+self.__saveTreeTime+".root"
self.__rootTreeFile = TFile(self.__outRootTreeFileName,'RECREATE')
print(("XXXXXXXXXXX getHistograms:: Root Tree File name = %s \n") % (self.__outRootTreeFileName))
self.bookHistograms()
self.fillHistograms()
self.drawCanvas()
#self.defineSipmMeasurementRootTree() ## define and write root trees for sipm measurments!
self.defineSipmIvsVMeasurementRootTree() ## define the root trees that contain the I vs V measurment.
##cmj2022Feb03 self.defineAllLocalSipmMeasurementRootTree()
##cmj2022Feb03 self.defineAllVendorSipmMeasurementRootTree()
self.defineAllMeasurementRootTree()
self.__rootTreeFile.Close()
if(self.__cmjPlotDiag > 0): print("__multiWindow__getHistograms: Exit \n")
return
##
##
## --------------------------------------------------------------
## Unpack the database information for each Sipm, each test,
## use save this information to plot in a histogramm
## and to save in a root tree
def unpackSipms(self):
if(self.__cmjPlotDiag > 1): print("XXXXXXXXXXX __multiWindow__unpackSipms:: Enter \n")
if(self.__cmjPlotDiag > 2): print(("__multiWindow__unpackSipms: self.__sipmResults = %s \n") % (self.__sipmResults))
print((".__multiWindow__unpackSipmunpackSipms: self.__sipmResults = %d \n") % len(self.__sipmResults))
## self.__sipmResults is a list that has all occurances of a given sipm_id... this includes
## an entry for each test date (and test type like vendor or measured)....
for self.__record in sorted(self.__sipmResults):
self.__new_Sipm_Id_Marker = 0
if(self.__cmjPlotDiag > 2): print(("__multiWindow__unpackSipms: self.__record = %s , len(self.__record) = %d \n") % (self.__record, len(self.__record)))
for self.__item in sorted(self.__record):
xx = self.__progressBarCount.get() ## get current count value for progress bar
self.__progressBarCount.set(xx+10) ## increment that value
self.update() ## update the progress bar...
if(self.__cmjPlotDiag > 3): print(("__multiWindow__unpackSipms: self.__record = %s, len(self.__record) = %d \n") %(self.__record, len(self.__record)))
self.__item = str(self.__record).rsplit(',') ##
if(self.__cmjPlotDiag > 3): print(("__multiWindow__unpackSipms: self.__item = %s, len(self.__item) = %d \n") %(self.__item, len(self.__item)))
if (len(self.__item) > 2):
##cmj2022Fem03 self.__tempSipmId = self.__item[0][2:]
self.__tempSipmId = self.__item[0]
self.__tempNumber = self.__tempSipmId[22:]
self.__tempSipmTestDate = self.__item[1]
self.__tempTestType = self.__item[2]
self.__tempWorkerBarCode = self.__item[3]
self.__tempWorkStationBarCode = self.__item[4]
self.__tempBiasVoltage = self.__item[5]
self.__tempDarkCount = self.__item[6]
self.__tempGain = self.__item[7]
#self.__tempString = str(self.__item[8])
#self.__tempTemperature = float(self.__tempString[0:len(self.__tempString)-1])
self.__tempTemperature = float(self.__item[8])
self.__tempBreakdownVolt = self.__item[9]
self.__tempDarkCountRate = self.__item[10]
self.__tempCurrentVsVoltageCond = self.__item[11]
self.__tempXTalk = self.__item[12]
self.__tempLedResponse = self.__item[13]
try:
self.__tempPackNumber = self.__sipmPackNumberResults[self.__item[0]]
except:
self.__tempPackNumber = 'None'
xx = self.__progressBarCount.get() ## get current count value for progress bar
self.__progressBarCount.set(xx+10) ## increment that value
self.__tempDataFileLocation = self.__item[14]
self.__tempDataFileName = self.__item[15]
if(self.__new_Sipm_Id_Marker == 0): ## let user know what the script is doing
print(("__multiWindow__getHistograms:unpackSipms:: unpack sipm_id = %s ") % (self.__tempSipmId))
self.__new_Sipm_Id_Marker = 1
if(self.__cmjPlotDiag > 2): ## diagnostic print statements
print("unpackSipms ........ New Counter ............ \n")
print(("__multiWindow__unpackSipmself.__item = %s ") % self.__item)
print(("__multiWindow__unpackSipmself.__tempSipmId = %s ") % self.__tempSipmId)
print(("__multiWindow__unpackSipmself.__tempNumber = %s ") % self.__tempNumber)
print(("__multiWindow__unpackSipmself.__tempSipmTestDate = %s ") % self.__tempSipmTestDate)
print(("__multiWindow__unpackSipmself.__tempTestType = %s ") % self.__tempTestType)
print(("__multiWindow__unpackSipmself.__tempWorkerBarCode = %s ") % self.__tempWorkerBarCode)
print(("__multiWindow__unpackSipmself.__tempWorkStationBarCode = %s ") % self.__tempWorkStationBarCode)
print(("__multiWindow__unpackSipmself.__tempBiasVoltage = %s ") % self.__tempBiasVoltage)
print(("__multiWindow__unpackSipmself.__tempDarkCount = %s ") % self.__tempDarkCount)
print(("__multiWindow__unpackSipmself.__tempGain = %s ") % self.__tempGain)
print(("__multiWindow__unpackSipmself.__tempTemperature = %s ") % self.__tempTemperature)
print(("__multiWindow__unpackSipmself.__tempBreakDownVolt = %s ") % self.__tempBreakdownVolt)
print(("__multiWindow__unpackSipmself.__tempDarkCountRate = %s ") % self.__tempDarkCountRate)
print(("__multiWindow__unpackSipmself.__tempCurrentVsVoltageCond = %s ") % self.__tempCurrentVsVoltageCond)
print(("__multiWindow__unpackSipmself.__tempXTalk = %s ") % self.__tempXTalk)
print(("__multiWindow__unpackSipmself.__tempLedResponse = %s ") % self.__tempLedResponse)
print(("__multiWindow__unpackSipmself.__tempPackNumber = %s ") % self.__tempPackNumber)
print(("__multiWindow__unpackSipmself.__tempDataFileLocation = %s ") % self.__tempDataFileLocation)
print(("__multiWindow__unpackSipmself.__tempDataFileName = %s \n") % self.__tempDataFileName)
self.__sipmId[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempSipmId
self.__sipmNumber[self.__tempSipmTestDate][self.__tempSipmId] = float(self.__tempNumber)
self.__sipmMeasureTestDate[self.__tempSipmTestDate] = self.__tempSipmTestDate
self.__saveTestType[self.__tempSipmTestDate] = self.__tempTestType
self.__testType[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempTestType
self.__biasVoltage[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempBiasVoltage
self.__darkCount[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempDarkCount
self.__gain[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempGain
self.__temperature[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempTemperature
##
if(self.__tempWorkerBarCode != 'None') :
self.__workerBarCode[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempWorkerBarCode
else:
self.__workerBarCode[self.__tempSipmTestDate][self.__tempSipmId] = 'not_given'
if(self.__tempWorkStationBarCode != 'None') :
self.__workStationBarCode[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempWorkStationBarCode
else:
self.__workStationBarCode[self.__tempSipmTestDate][self.__tempSipmId] = 'not_given'
if(self.__tempBreakdownVolt != 'None') :
self.__breakdown_voltage[self.__tempSipmTestDate][self.__tempSipmId] = float(self.__tempBreakdownVolt)
else:
self.__breakdown_voltage[self.__tempSipmTestDate][self.__tempSipmId] = -9999.99
if(self.__tempDarkCountRate != 'None') :
self.__dark_count_rate[self.__tempSipmTestDate][self.__tempSipmId] = float(self.__tempDarkCountRate)
else:
self.__dark_count_rate[self.__tempSipmTestDate][self.__tempSipmId] = -9999.99
if(self.__tempCurrentVsVoltageCond != 'None') :
self.__current_vs_voltage_condition[self.__tempSipmTestDate][self.__tempSipmId] = float(self.__tempCurrentVsVoltageCond)
else:
self.__current_vs_voltage_condition[self.__tempSipmTestDate][self.__tempSipmId] = -9999.99
if(self.__tempXTalk != 'None') :
self.__x_talk[self.__tempSipmTestDate][self.__tempSipmId] = float(self.__tempXTalk)
else:
self.__x_talk[self.__tempSipmTestDate][self.__tempSipmId] = -9999.99
if(self.__tempLedResponse != 'None') :
self.__led_response[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempLedResponse
else:
self.__led_response[self.__tempSipmTestDate][self.__tempSipmId] = -9999.99
if(self.__tempPackNumber != 'None'):
self.__pack_number[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempPackNumber
else:
self.__pack_number[self.__tempSipmTestDate][self.__tempSipmId] = 'none_given'
if(self.__tempDataFileLocation != 'None') :
self.__data_file_location[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempDataFileLocation
else:
self.__data_file_location[self.__tempSipmTestDate][self.__tempSipmId] = 'none_given'
if(self.__tempDataFileName != 'None') :
self.__data_file_name[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempDataFileName
else:
self.__data_file_name[self.__tempSipmTestDate][self.__tempSipmId] = 'none_given'
if(self.__cmjPlotDiag > 1): ## diangonstic print statements
print("unpackSipms ........ SipmId ............ \n")
for self.__n in list(self.__sipmMeasureTestDate.keys()):
print(("...... self.__sipmMeasureTestDate[%s] = %s ") % (self.__n,self.__sipmMeasureTestDate[self.__n]))
for self.__m in sorted(self.__sipmId[self.__n].keys()):
print(("__multiWindow__unpackSipmself.__sipmId[%s][%s] = %s \n") % (self.__n,self.__m,self.__sipmId[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__sipmNumber[%s][%s] = %s ") % (self.__n,self.__m,self.__sipmNumber[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__testType[%s][%s] = %s ") % (self.__n,self.__m,self.__testType[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__workerBarCode[%s][%s]= %s ") % (self.__n,self.__m,self.__workerBarCode[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__workStationBarCode[%s][%s] = %s \n") % (self.__n,self.__m,self.__workStationBarCode[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__biasVoltage[%s][%s] = %s ") % (self.__n,self.__m,self.__biasVoltage[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__darkCount[%s][%s] = %s ") % (self.__n,self.__m,self.__darkCount[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__gain[%s][%s] = %s ") % (self.__n,self.__m,self.__gain[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__sipmId.keys() = %s || self.__temperature[%s] = %s ") % (self.__m,self.__m,self.__temperature[self.__m]))
print(("__multiWindow__unpackSipmself.__breakdown_voltage[%s[%s] = %s \n") % (self.__n,self.__m,self.__breakdown_voltage[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__dark_count_rate[%s][%s] = %s \n") % (self.__n,self.__m,self.__dark_count_rate[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__current_vs_voltage_condition[%s][%s] = %s ") % (self.__n,self.__m,self.__current_vs_voltage_condition[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__x_talk[%s][%s] = %s \n") % (self.__n,self.__m,self.__x_talk[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__led_response[%s][%s] = %s ") % (self.__n,self.__m,self.__led_response[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__data_file_location[%s][%s] = %s \n") % (self.__n,self.__m,self.__data_file_location[self.__n][self.__m]))
print(("__multiWindow__unpackSipmself.__data_file_name[%s][%s] = %s ") % (self.__n,self.__m,self.__data_file_name[self.__n][self.__m]))
#
if(self.__cmjPlotDiag > 2): print("XXXXXXXXXXX__multiWindow__unpackSipms:: Exit \n")
return
##
##
## --------------------------------------------------------------
def unpackSipmIvsV(self):
if(self.__cmjPlotDiag > 1): print("XXXXXXXXXXX entered unpackSipmIvsV \n")
if(self.__cmjPlotDiag > 2): print((".... unpackSipmIvsV: self.__sipmResults = %s \n") % (self.__sipmResults))
print((".... unpackSipmIvsV: self.__sipmResults = %d \n") % len(self.__sipmIvsVresults))
## self.__sipmResults is a list that has all occurances of a given sipm_id... this includes
## an entry for each test date (and test type like vendor or measured)....
for self.__record in sorted(self.__sipmIvsVresults):
if(self.__cmjPlotDiag > 2): print(("....unpackSipmIvsV: self.__record = %s , len(self.__record) = %d \n") % (self.__record, len(self.__record)))
for self.__item in sorted(self.__record):
if(self.__cmjPlotDiag > 3): print(("....unpackSipmIvsV: self.__record = %s, len(self.__record) = %d \n") %(self.__record, len(self.__record)))
self.__item = str(self.__record).rsplit(',') ##
if(self.__cmjPlotDiag > 3): print(("....unpackSipmIvsV: self.__item = %s, len(self.__item) = %d \n") %(self.__item, len(self.__item)))
if (len(self.__item) > 2):
self.__tempSipmId = self.__item[0]
self.__tempSipmTestDate = self.__item[1]
self.__tempSipmIvsV_Bin = self.__item[2]
self.__tempSipmIvsV_Voltage = self.__item[3]
self.__tempSipmIvsV_Current = self.__item[4]
if(self.__cmjPlotDiag > 4):
print((".... unpackSipmIvsV: self.__tempSipmId = %s") % (self.__tempSipmId))
print((".... unpackSipmIvsV: self.__tempSipmTestDate = %s") % (self.__tempSipmTestDate))
print((".... unpackSipmIvsV: self.__tempSipmIvsV_Bin = %s") % (self.__tempSipmIvsV_Bin))
print((".... unpackSipmIvsV: self.__tempSipmIvsV_Voltage = %s") % (self.__tempSipmIvsV_Voltage))
print((".... unpackSipmIvsV: self.__tempSipmIvsV_Current = %s") % (self.__tempSipmIvsV_Current))
self.__sipmMeasureTestDate_IvsV[self.__tempSipmTestDate] = self.__tempSipmTestDate
self.__sipmId_IvsV[self.__tempSipmTestDate][self.__tempSipmId] = self.__tempSipmId
self.__IvsV_voltage[self.__tempSipmTestDate][self.__tempSipmId][self.__tempSipmIvsV_Bin] = self.__tempSipmIvsV_Voltage
self.__IvsV_current[self.__tempSipmTestDate][self.__tempSipmId][self.__tempSipmIvsV_Bin] = self.__tempSipmIvsV_Current
return
##
##
##
################################################################################################
################################################################################################
################################################################################################
##
## Root part of the program
##
## --------------------------------------------------------------------
## Plot scatter plots for sipms
## Use Root!
def plotScatterPlots(self):
print (".... plotScatterPlots:Begin Scatter Plots\n")
self.__windowTitle = "PO-Number "+str(self.__sipmBatchRequest)
self.__c2 = TCanvas('self.__c2',self.__windowTitle,200,10,800,700)
self.__c2.SetFillColor(0)
self.__c2.Divide(2,3)
self.__n = len(self.__plotSipmNumber)
self.__sipmNumberArray = array('f')
self.__biasVoltageArray = array('f')
self.__darkCountArray = array('f')
self.__gainArray = array('f')
self.__temperatureArray = | |
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import socket
from unittest import TestCase
from mock.mock import patch, MagicMock
class TestHDP206StackAdvisor(TestCase):
def setUp(self):
import imp
import os
testDirectory = os.path.dirname(os.path.abspath(__file__))
stackAdvisorPath = os.path.join(testDirectory, '../../../../../main/resources/stacks/stack_advisor.py')
hdp206StackAdvisorPath = os.path.join(testDirectory, '../../../../../main/resources/stacks/HDP/2.0.6/services/stack_advisor.py')
hdp206StackAdvisorClassName = 'HDP206StackAdvisor'
with open(stackAdvisorPath, 'rb') as fp:
stack_advisor = imp.load_module( 'stack_advisor', fp, stackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE) )
with open(hdp206StackAdvisorPath, 'rb') as fp:
self.stack_advisor_impl = imp.load_module('stack_advisor_impl', fp, hdp206StackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
clazz = getattr(self.stack_advisor_impl, hdp206StackAdvisorClassName)
self.stackAdvisor = clazz()
self.maxDiff = None
# substitute method in the instance
self.get_system_min_uid_real = self.stackAdvisor.get_system_min_uid
self.stackAdvisor.get_system_min_uid = self.get_system_min_uid_magic
@patch('__builtin__.open')
@patch('os.path.exists')
def get_system_min_uid_magic(self, exists_mock, open_mock):
class MagicFile(object):
def read(self):
return """
#test line UID_MIN 200
UID_MIN 500
"""
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def __enter__(self):
return self
exists_mock.return_value = True
open_mock.return_value = MagicFile()
return self.get_system_min_uid_real()
def test_recommendationCardinalityALL(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [{"name": "GANGLIA_MONITOR", "cardinality": "ALL", "category": "SLAVE", "is_master": False}]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.recommendComponentLayout(services, hosts)
expectedComponentsHostsMap = {
"GANGLIA_MONITOR": ["host1", "host2"]
}
self.assertHostLayout(expectedComponentsHostsMap, result)
def test_recommendOnAllHosts(self):
""" Recommend on all hosts for cardinality ALL even if the component has been installed in the cluster before """
servicesInfo = [
{
"name": "GANGLIA",
"components": [{"name": "GANGLIA_MONITOR", "cardinality": "ALL", "category": "SLAVE", "is_master": False, "hostnames": ["host1"]}]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.recommendComponentLayout(services, hosts)
expectedComponentsHostsMap = {
"GANGLIA_MONITOR": ["host1", "host2"]
}
self.assertHostLayout(expectedComponentsHostsMap, result)
def test_recommendationIsNotPreferableOnAmbariServer(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [{"name": "GANGLIA_SERVER", "cardinality": "ALL", "category": "MASTER", "is_master": True}]
}
]
services = self.prepareServices(servicesInfo)
localhost = socket.getfqdn()
hosts = self.prepareHosts([localhost, "host2"])
result = self.stackAdvisor.recommendComponentLayout(services, hosts)
expectedComponentsHostsMap = {
"GANGLIA_SERVER": ["host2"]
}
self.assertHostLayout(expectedComponentsHostsMap, result)
def test_validationNamenodeAndSecondaryNamenode2Hosts_noMessagesForSameHost(self):
servicesInfo = [
{
"name": "HDFS",
"components": [
{"name": "NAMENODE", "cardinality": "1-2", "category": "MASTER", "is_master": True, "hostnames": ["host1"]},
{"name": "SECONDARY_NAMENODE", "cardinality": "1", "category": "MASTER", "is_master": True, "hostnames": ["host1"]}]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Host is not used", "level": "ERROR", "host": "host2"}
]
self.assertValidationResult(expectedItems, result)
def test_validationCardinalityALL(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_MONITOR", "display_name": "Ganglia Monitor", "cardinality": "ALL", "category": "SLAVE", "is_master": False, "hostnames": ["host1"]},
{"name": "GANGLIA_SERVER", "display_name": "Ganglia Server", "cardinality": "1-2", "category": "MASTER", "is_master": True, "hostnames": ["host2", "host1"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Ganglia Monitor component should be installed on all hosts in cluster.", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationCardinalityExactAmount(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_MONITOR", "display_name": "Ganglia Monitor", "cardinality": "2", "category": "SLAVE", "is_master": False, "hostnames": ["host1"]},
{"name": "GANGLIA_SERVER", "display_name": "Ganglia Server", "cardinality": "2", "category": "MASTER", "is_master": True, "hostnames": ["host2", "host1"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Exactly 2 Ganglia Monitor components should be installed in cluster.", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationCardinalityAtLeast(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_MONITOR", "display_name": "Ganglia Monitor", "cardinality": "1+", "category": "SLAVE", "is_master": False, "hostnames": ["host1"]},
{"name": "GANGLIA_SERVER", "display_name": "Ganglia Server", "cardinality": "3+", "category": "MASTER", "is_master": True, "hostnames": ["host2", "host1"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "At least 3 Ganglia Server components should be installed in cluster.", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationWarnMessagesIfLessThanDefault(self):
servicesInfo = [
{
"name": "YARN",
"components": []
}
]
services = self.prepareServices(servicesInfo)
services["configurations"] = {"yarn-site":{"properties":{"yarn.nodemanager.resource.memory-mb": "0",
"yarn.scheduler.minimum-allocation-mb": "str"}}}
hosts = self.prepareHosts([])
result = self.stackAdvisor.validateConfigurations(services, hosts)
expectedItems = [
{"message": "Value is less than the recommended default of 512", "level": "WARN"},
{'message': 'Value should be set for yarn.nodemanager.linux-container-executor.group', 'level': 'ERROR'},
{"message": "Value should be integer", "level": "ERROR"},
{"message": "Value should be set", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationYARNServicecheckQueueName(self):
servicesInfo = [
{
"name": "YARN",
"components": []
}
]
services = self.prepareServices(servicesInfo)
services["configurations"] = {"yarn-env":{"properties":{"service_check.queue.name": "default"}},
"capacity-scheduler":{"properties":{"capacity-scheduler": "yarn.scheduler.capacity.root.queues=ndfqueue\n"}}}
hosts = self.prepareHosts([])
result = self.stackAdvisor.validateConfigurations(services, hosts)
expectedItems = [
{'message': 'Queue is not exist, or not corresponds to existing YARN leaf queue', 'level': 'ERROR'}
]
self.assertValidationResult(expectedItems, result)
services["configurations"]["yarn-env"]["properties"]["service_check.queue.name"] = "ndfqueue"
expectedItems = []
result = self.stackAdvisor.validateConfigurations(services, hosts)
self.assertValidationResult(expectedItems, result)
def test_validationMinMax(self):
configurations = {
"mapred-site": {
"properties": {
"mapreduce.task.io.sort.mb": "4096",
"some_float_value": "0.5",
"no_min_or_max_attribute_property": "STRING_VALUE"
}
}
}
recommendedDefaults = {
"mapred-site": {
"properties": {
"mapreduce.task.io.sort.mb": "2047",
"some_float_value": "0.8",
"no_min_or_max_attribute_property": "STRING_VALUE"
},
"property_attributes": {
'mapreduce.task.io.sort.mb': {'maximum': '2047'},
'some_float_value': {'minimum': '0.8'}
}
}
}
items = []
self.stackAdvisor.validateMinMax(items, recommendedDefaults, configurations)
expectedItems = [
{
'message': 'Value is greater than the recommended maximum of 2047 ',
'level': 'WARN',
'config-type': 'mapred-site',
'config-name': 'mapreduce.task.io.sort.mb',
'type': 'configuration'
},
{
'message': 'Value is less than the recommended minimum of 0.8 ',
'level': 'WARN',
'config-type': 'mapred-site',
'config-name': 'some_float_value',
'type': 'configuration'
}
]
self.assertEquals(expectedItems, items)
def test_validationHostIsNotUsedForNonValuableComponent(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_MONITOR", "cardinality": "ALL", "category": "SLAVE", "is_master": False, "hostnames": ["host1", "host2"]},
{"name": "GANGLIA_SERVER", "cardinality": "1", "category": "MASTER", "is_master": True, "hostnames": ["host2"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Host is not used", "host": "host1", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationCardinality01TwoHostsAssigned(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_SERVER", "display_name": "Ganglia Server", "cardinality": "0-1", "category": "MASTER", "is_master": True, "hostnames": ["host1", "host2"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Between 0 and 1 Ganglia Server components should be installed in cluster.", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationHostIsNotUsed(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_SERVER", "cardinality": "1", "category": "MASTER", "is_master": True, "hostnames": ["host1"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Host is not used", "host": "host2", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_getConfigurationClusterSummary_withHBaseAnd6gbRam(self):
servicesList = ["HBASE"]
components = []
hosts = {
"items" : [
{
"Hosts" : {
"cpu_count" : 8,
"total_mem" : 6291456,
"disk_info" : [
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"},
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"}
]
}
}
]
}
expected = {
"hBaseInstalled": True,
"components": components,
"cpu": 8,
"disk": 8,
"ram": 6,
"reservedRam": 2,
"hbaseRam": 1,
"minContainerSize": 512,
"totalAvailableRam": 3072,
"containers": 6,
"ramPerContainer": 512,
"mapMemory": 512,
"reduceMemory": 512,
"amMemory": 512,
"referenceHost": hosts["items"][0]["Hosts"]
}
# Test - Cluster data with 1 host
result = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, None)
self.assertEquals(result, expected)
# Test - Cluster data with 2 hosts - pick minimum memory
servicesList.append("YARN")
services = services = {"services":
[{"StackServices":
{"service_name" : "YARN",
"service_version" : "2.6.0.2.2"
},
"components":[
{
"StackServiceComponents":{
"advertise_version":"true",
"cardinality":"1+",
"component_category":"SLAVE",
"component_name":"NODEMANAGER",
"custom_commands":[
],
"display_name":"NodeManager",
"is_client":"false",
"is_master":"false",
"service_name":"YARN",
"stack_name":"HDP",
"stack_version":"2.2",
"hostnames":[
"host1",
"host2"
]
},
"dependencies":[
]
}
],
}],
"configurations": {}
}
hosts["items"][0]["Hosts"]["host_name"] = "host1"
hosts["items"].append({
"Hosts": {
"cpu_count" : 4,
"total_mem" : 500000,
"host_name" : "host2",
"disk_info" : [
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"},
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"}
]
}
})
expected["referenceHost"] = hosts["items"][1]["Hosts"]
expected["referenceNodeManagerHost"] = hosts["items"][1]["Hosts"]
expected["amMemory"] = 170.66666666666666
expected["containers"] = 3.0
expected["cpu"] = 4
expected["totalAvailableRam"] = 512
expected["mapMemory"] = 170
expected["minContainerSize"] = 256
expected["reduceMemory"] = 170.66666666666666
expected["ram"] = 0
expected["ramPerContainer"] = 170.66666666666666
expected["reservedRam"] = 1
result = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, services)
self.assertEquals(result, expected)
def test_getConfigurationClusterSummary_withHBaseAnd48gbRam(self):
servicesList = ["HBASE"]
components = []
| |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Hybrid Script Parser For TIR"""
# pylint: disable=invalid-name, missing-docstring, inconsistent-return-statements, no-else-return
# pylint: disable=unnecessary-comprehension, unused-argument, import-outside-toplevel
# pylint: disable=unused-import
import json
import operator
from typed_ast import ast3 as ast
import tvm._ffi
from tvm import tir
from tvm._ffi.base import TVMError
from tvm.ir import GlobalVar
from tvm.tir import all as _all
from tvm.tir import expr as _expr
from . import scope_emitter, special_stmt, scope_handler, intrin, ty
from .meta_unparser import MetaUnparser
from .registry import Registry
from . import _ffi_api
class HybridParserError(RuntimeError):
"""Hybrid Parser Runtime Error"""
class HybridParser(ast.NodeVisitor):
"""Python AST visitor pass which finally lowers it to TIR
Notes for extension:
1. To support new types of AST nodes. Add a function visit_xxx().
2. To support new functions
We divide allowed function calls in hybrid script into 3 categories,
which is intrin, scope_handler and special_stmt.
1) intrin functions ought to have return value.
User can also register intrin category function into parser.
2) scope_handler functions have no return value and accepts parser and AST node
as its arguments, which is used in for scope and with scope.
3) special_stmt functions have return value and accepts parser and AST node as its arguments
When visiting Call node, we check special_stmt registry at first. If no registered function
is found, we then check intrin.
When visiting With node, we check with_scope registry.
When visiting For node, we check for_scope registry.
"""
_binop_maker = {
ast.Add: tir.Add,
ast.Sub: tir.Sub,
ast.Mult: tir.Mul,
ast.Div: tir.Div,
ast.FloorDiv: tir.FloorDiv,
ast.Mod: tir.FloorMod,
ast.BitOr: operator.or_,
ast.BitAnd: operator.and_,
ast.BitXor: operator.xor,
ast.Gt: tir.GT,
ast.GtE: tir.GE,
ast.Lt: tir.LT,
ast.LtE: tir.LE,
ast.Eq: tir.EQ,
ast.NotEq: tir.NE,
ast.And: tir.And,
ast.Or: tir.Or,
}
_unaryop_maker = {ast.USub: operator.neg, ast.Invert: operator.invert, ast.Not: tir.Not}
def __init__(self, src, base_lienno):
self.params = None
self.buffer_map = None
self.dict_attr = None
self.scope_emitter = None
self.var_env_dict = None
self.src = src.split("\n")
self.base_lineno = base_lienno
self.current_lineno = 0
self.current_col_offset = 0
self.meta = None
self.functions = {}
self.target = None
def init_function_parsing_env(self):
"""Initialize function parsing environment"""
self.params = [] # parameter list
self.buffer_map = {} # buffer map
self.dict_attr = {} # dict attr
self.scope_emitter = scope_emitter.ScopeEmitter(self) # scope emitter
self.var_env_dict = {} # map from var to thread env name
@staticmethod
def is_meta(node):
"""Judge whether an AST node is META"""
return (
isinstance(node, ast.Assign)
and len(node.targets) == 1
and isinstance(node.targets[0], ast.Name)
and node.targets[0].id == "__tvm_meta__"
)
def init_meta(self, meta_dict):
if meta_dict is not None:
self.meta = tvm.ir.load_json(json.dumps(meta_dict))
def visit(self, node):
"""Override method in ast.NodeVisitor"""
old_lineno, old_col_offset = self.current_lineno, self.current_col_offset
if hasattr(node, "lineno"):
self.current_lineno = self.base_lineno + node.lineno - 1
if hasattr(node, "col_offset"):
self.current_col_offset = node.col_offset
method = "visit_" + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
visit_res = visitor(node)
self.current_lineno, self.current_col_offset = old_lineno, old_col_offset
return visit_res
def wrap_line_col(self, message, lineno, col_offset):
"""Wrap the message with line number and column offset"""
src_line = self.src[lineno - self.base_lineno]
leading_space = len(src_line) - len(src_line.lstrip(" "))
col_offset = col_offset - leading_space
src_line = src_line[leading_space:]
return (
"\n "
+ src_line
+ "\n "
+ " " * col_offset
+ "^\n"
+ "ParserError in line "
+ str(lineno)
+ " : "
+ message
)
def report_error(self, message, lineno=None, col_offset=None):
"""Report an error occur in line lineno and column col_offset
Parameters
----------
message : str
Error message
lineno : int
Line number of error line
col_offset : int
Column offset of error line
"""
if lineno is None:
lineno = self.current_lineno
if col_offset is None:
col_offset = self.current_col_offset
raise HybridParserError(self.wrap_line_col(message, lineno, col_offset))
def get_body(self):
body = []
while len(self.scope_emitter.node_stack[-1]) > 0:
res = self.visit(self.scope_emitter.node_stack[-1].pop())
if res is not None:
body.append(res)
return tvm.tir.SeqStmt(body) if len(body) > 1 else body[0]
def get_type(self, type_node):
""" Parse type """
if type_node is None:
self.report_error("missing type annotation")
res_type = self.visit(type_node)
return tvm.ir.TupleType([]) if res_type is None else res_type.evaluate()
def generic_visit(self, node):
"""Override method in ast.NodeVisitor.
To directly filter out invalidate type of stmt.
"""
self.report_error(type(node).__name__ + " AST node is not supported now")
def visit_Module(self, node):
"""Module visitor
AST abstract grammar:
Module(stmt* body, type_ignore* type_ignore)
By now we support two format of hybrid script shown below.
Example
-------
1. Generate a Function(If the code is printed, then it may bring meta)
.. code-block:: python
import tvm
@tvm.hybrid.script
def A(...):
...
# call hybrid parser when call this function, get a Function
func = A
2. Generate an IRModule
.. code-block:: python
import tvm
@tvm.hybrid.script
class MyMod():
def A(...):
...
def B(...):
...
__tvm_meta__ = ...
# call hybrid parser during construction, get an IRModule
mod = MyMod()
"""
if len(node.body) == 1 and isinstance(node.body[0], (ast.ClassDef, ast.FunctionDef)):
# class or single function
return self.visit(node.body[0])
elif len(node.body) == 2:
if isinstance(node.body[0], ast.Assign):
node.body[0], node.body[1] = node.body[1], node.body[0]
if isinstance(node.body[0], ast.FunctionDef) and HybridParser.is_meta(node.body[1]):
# function with meta
self.init_meta(MetaUnparser().visit(node.body[1].value))
return self.visit(node.body[0])
self.report_error(
"Only one-function, one-class or function-with-meta source code is allowed"
)
def visit_ClassDef(self, node):
"""ClassDef visitor
AST abstract grammar:
ClassDef(identifier name, expr* bases, keyword* keywords, stmt* body,
expr* decorator_list)
"""
# parse meta
count = False
for body_element in node.body:
if isinstance(body_element, ast.FunctionDef):
pass
elif HybridParser.is_meta(body_element) and not count:
count = True
self.init_meta(MetaUnparser().visit(body_element.value))
else:
self.report_error("invalid class member")
# parse member functions
for body_element in node.body:
if isinstance(body_element, ast.FunctionDef):
self.visit(body_element)
from .utils import create_module
return create_module(self.functions)
def visit_FunctionDef(self, node):
"""FunctionDef visitor
AST abstract grammar:
FunctionDef(identifier name, arguments args, stmt* body, expr* decorator_list,
expr? returns, string? type_comment)
arguments = (arg* posonlyargs, arg* args, arg? vararg, arg* kwonlyargs,
expr* kw_defaults, arg? kwarg, expr* defaults)
arg = (identifier arg, expr? annotation, string? type_comment)
"""
self.init_function_parsing_env()
# add parameters of function
for arg in node.args.args:
arg_var = tvm.te.var(arg.arg, self.get_type(arg.annotation))
self.scope_emitter.update_symbol(arg.arg, arg_var)
self.params.append(arg_var)
# visit the body of function
self.scope_emitter.node_stack[-1].extend(reversed(node.body))
# fetch the body and return a tir.PrimFunc
func = tvm.tir.PrimFunc(
self.params,
self.get_body(),
ret_type=self.get_type(node.returns),
buffer_map=self.buffer_map,
attrs=tvm.ir.make_node("DictAttrs", **self.dict_attr),
)
self.functions[GlobalVar(node.name)] = func
return func
def visit_Assign(self, node):
"""Assign visitor
AST abstract grammar:
Assign(expr* targets, expr value, string? type_comment)
By now only 3 types of Assign is supported:
1. special stmts with return value
1.1 Buffer = tir.buffer_bind()/tir.buffer_decl()
1.2 Var = tir.var()
1.3 Var = tir.env_thread()
2. (BufferStore) Buffer[PrimExpr, PrimExpr, ..., PrimExpr] = PrimExpr
3. (Store) Var[PrimExpr] = PrimExpr
4. with scope handlers with concise scoping and var def
4.1 var = tir.alloc_with_scope()
"""
if not len(node.targets) == 1:
self.report_error("Only one-valued assignment is supported now")
target = node.targets[0]
if isinstance(target, ast.Name):
# scenario 1&4
self.target = [target.id]
if not isinstance(node.value, ast.Call):
self.report_error("Unsupported assign stmt")
func = self.visit(node.value.func)
if Registry.is_with_scope(func):
# scenario 4
return self.visit(node.value)
else:
# scenario 1
rhs = self.visit(node.value)
self.scope_emitter.update_symbol(target.id, rhs)
elif isinstance(target, ast.Subscript):
# scenario 2&3
symbol, indexes = self.visit(target)
rhs = self.visit(node.value)
if isinstance(symbol, tvm.tir.Buffer):
# BufferStore
return tvm.tir.BufferStore(symbol, tvm.runtime.convert(rhs), indexes)
else:
if len(indexes) != 1:
self.report_error("Invalid Store stmt")
# Store
return tvm.tir.Store(
symbol, tvm.runtime.convert(rhs), indexes[0], tvm.runtime.convert(True)
)
else:
self.report_error("Unsupported Assign stmt")
def visit_AnnAssign(self, node):
"""AnnAssign visitor
AST abstract grammar:
AnnAssign(expr target, expr annotation, expr? value, int simple)
Corresponds to concise mode of with tir.let()
"""
if isinstance(node.target, ast.Name):
value = self.visit(node.value)
var = tvm.te.var(node.target.id, self.get_type(node.annotation))
self.scope_emitter.update_symbol(var.name, var)
return tvm.tir.LetStmt(var, value, self.visit(self.scope_emitter.node_stack[-1].pop()))
else:
self.report_error("Unsupported AnnAssign stmt")
def visit_Assert(self, node):
"""Assert visitor
AST abstract grammar:
Assert(expr test, expr? msg)
Corresponds to concise mode of with tir.assert()
"""
condition = self.visit(node.test)
if node.msg is None:
self.report_error("Message of AssertStmt can't be None")
message = self.visit(node.msg)
return tvm.tir.AssertStmt(condition, tvm.runtime.convert(message), self.get_body())
def visit_For(self, node):
"""For visitor
AST abstract grammar:
| |
nothing received for 30 seconds, resubscribe.
erc.subscribe()
count = 0
else:
#If nothing received for 5 seconds, up the count and
# try again.
count = count + 1
# If the display/main thread hasn't done anything in 10
# minutes, let us restart entv.
#TO DO - get_time
if enstore_display.message_queue.get_time <= \
time.time() - TEN_MINUTES:
message = "Display is stuck. Restarting entv. [1]"
Trace.trace(0, message, out_fp=sys.stderr)
restart_entv()
continue
# If the display/main thread hasn't done anything in 10
# minutes, let us restart entv.
if enstore_display.message_queue.len_queue(system_name) > 0 and \
enstore_display.message_queue.last_get_time() <= \
time.time() - TEN_MINUTES:
message = "Display is stuck. Restarting entv. [2]"
Trace.trace(0, message, out_fp=sys.stderr)
restart_entv()
commands = []
#Read any status responses from movers or the inquisitor.
if u.get_tsd().socket in readable:
send_request_dict_copy = get_sent_request(system_name, None)
for tx_id in send_request_dict_copy.keys():
try:
mstatus = u.recv_deferred(tx_id, 0.0)
if mstatus.has_key('time_in_state'):
#We have a mover response. Since the status
# field might be for an error, we need to
# avoid using is_ok() here, so that the error
# gets displayed instead of getting the
# response ignored.
pass
else:
#We have an inquisitor response.
if not e_errors.is_ok(mstatus):
#del send_request_dict[tx_id]
set_sent_request(None, system_name, tx_id)
continue
#commands = commands + handle_status(
# send_request_dict[tx_id]['name'], mstatus)
commands = commands + handle_status(
get_sent_request(system_name, tx_id)['name'],
mstatus)
#del send_request_dict[tx_id]
set_sent_request(None, system_name, tx_id)
if mstatus.get('work', None) == "show":
Trace.trace(1, "Recieved ID %s from inquisitor." \
% (tx_id,))
else:
Trace.trace(1, "Recieved ID %s from mover." \
% (tx_id,))
except (socket.error, select.error,
e_errors.EnstoreError):
pass
except errno.errorcode[errno.ETIMEDOUT]:
pass
else:
#Make sure to read any messages that finally arrived
# after the record of them being sent was purged from
# send_request_dict.
try:
u.recv_deferred([], 0.0)
except (socket.error, select.error,
e_errors.EnstoreError), msg:
if msg.args[0] not in [errno.ETIMEDOUT]:
Trace.log(0,
"Error reading socket: %s" % (str(msg),))
#Remove items that are in the queue without having received a
# response.
else:
drop_stale_status_requests(system_name)
#Read the next message from the event relay.
if erc.sock in readable:
try:
msg = enstore_erc_functions.read_erc(erc)
except SyntaxError:
exc, msg = sys.exc_info()[:2]
import traceback
traceback.print_tb(sys.exc_info()[2])
#Report on the error.
try:
message = "Failed to read erc message: (%s, %s)\n"
sys.stderr.write(message % (str(exc), str(msg)))
sys.stderr.flush()
except IOError:
pass
if msg and not getattr(msg, 'status', None):
#Take the message from event relay.
commands = commands + ["%s %s" % (msg.type,
msg.extra_info)]
##If read_erc is valid it is a EventRelayMessage instance. If
# it gets here it is a dictionary with a status field error.
elif getattr(msg, "status", None):
Trace.trace(1, "Event relay error: %s" % (str(msg),),
out_fp=sys.stderr)
if not commands:
continue
#Those commands that use mover names need to have the system name
# appended to the name.
commands = insert_system_name(commands, system_name, intf)
put_func = enstore_display.message_queue.put_queue #Shortcut.
for command in commands:
if command:
#For normal use put everything into the queue.
put_func(command, system_name)
#If necessary, handle resubscribing.
if not intf.messages_file:
now = time.time()
if now - start > TEN_MINUTES:
# resubscribe
erc.subscribe()
start = now
#End nicely.
if not intf.messages_file:
#Tell the event relay to stop sending us information.
erc.unsubscribe()
#Remove all of the routes that were set up to all of the movers.
for mover_name in movers:
try:
m_addr = csc.get(mover_name, {}).get('hostip', None)
#If we added a route to the mover, we should remove it.
# Most clients would prefer to leave such routes in place,
# but entv is not your normal client. It talks to many
# movers that makes the routing table huge.
host_config.unset_route(m_addr)
pass
except (socket.error, OSError):
pass
except TypeError:
# mov.server_address is equal to None
pass
Trace.trace(1, "Detected stop flag in %s messages thread." %
(system_name,))
return
"""
#########################################################################
#
#########################################################################
#
def setup_networking(system_name, intf):
global u
global timeout_time
#This is a time hack to get a clean output file.
if intf.generate_messages_file:
timeout_time = time.time() + intf.capture_timeout
else:
timeout_time = None
# Get the info from past events recorded in a file.
if intf.messages_file:
# messages_file = open(intf.messages_file, "r")
#
# last_timestamp = -1 #Used to space the commands in real time.
return
# We will get all of the info from the event relay.
else:
#When called from the main thread, after menu selection, set this
# placeholder for the networking thread to do the setup.
if intf.threaded and threading.current_thread().getName() == MAIN_NAME:
reserve_setup_of_erc(system_name)
return
Trace.trace(1, "Setting up connections to %s." % (system_name,))
erc = get_erc(system_name)
if erc:
erc.system_name = system_name #Convienence for callbacks.
#Start the heartbeats.
retval = erc.start([event_relay_messages.ALL])
if retval == erc.ERROR:
Trace.trace(0, "Could not contact event relay.",
out_fp=sys.stderr)
#Determine the list of movers, tell the main thread about them
# and send the movers status requests.
setup_movers(system_name, get_display(system_name), intf)
#Assign callback for this client's socket. The erc objects have
# fileno() functions that just call their socket's fileno()
# function.
if not intf.threaded:
Tkinter.tkinter.createfilehandler(erc, Tkinter.READABLE,
process_erc)
#This can be shared between the displays.
if not intf.threaded:
Tkinter.tkinter.createfilehandler(u, Tkinter.READABLE, process_udp)
Trace.trace(1, "Set up connections to %s." % (system_name,))
#
def unsetup_networking(system_name, intf):
erc = get_erc(system_name)
if erc:
#If the event relay client socket is already closed, we didn't get
# far enough to call createfilehandler().
try:
fd = erc.fileno()
except socket.error:
fd = None
#Release resources. (If not already done.)
if not intf.threaded and type(fd) == types.IntType:
Tkinter.tkinter.deletefilehandler(erc)
#Destroy the socket, only after we have removed the file handler.
erc.stop()
del_erc(system_name)
movers = get_mover_list(system_name, intf, fullnames=1)
csc = get_csc(system_name)
#Remove all of the routes that were set up to all of the movers.
for mover_name in movers:
try:
m_addr = csc.get(mover_name, {}).get('hostip', None)
#If we added a route to the mover, we should remove it.
# Most clients would prefer to leave such routes in place,
# but entv is not your normal client. It talks to many
# movers that makes the routing table huge.
host_config.unset_route(m_addr)
pass
except (socket.error, OSError):
pass
except TypeError:
# mov.server_address is equal to None
pass
#Cleanup all the sockets still open.
def cleanup_networking(intf):
global u
if not intf.threaded:
Tkinter.tkinter.deletefilehandler(u)
for system_name in keys_ercs():
unsetup_networking(system_name, intf)
#########################################################################
# Callback functions for single threaded entv.
#########################################################################
#Callback to resubscribe to the event relay(s).
def subscribe_erc():
global master_windowframe
global subscribe_id
for system_name in keys_ercs():
print "subscribing with %s at %s" % (system_name, time.ctime())
erc = get_erc(system_name)
erc.subscribe()
#Need to setup the next callback call.
subscribe_id = master_windowframe.after(TEN_MINUTES_IN_MILISECONDS, subscribe_erc)
#Callback to handle old/stale messages.
def old_messages():
global old_messages_id
global master_windowframe
#Send any status requests to the movers or the inquisitor. This
# only sends these requests, it does not wait for responses here.
send_all_status_requests()
#Remove items that are in the queue without having received a response,
# and resend requests that possibly have been dropped.
drop_stale_status_requests()
old_messages_id = master_windowframe.after(5000, old_messages)
#Callback when the event_relay_client gets a Trace.notify() message.
# This function is also used by handle_messages() in threaded mode.
def process_erc(erc, mask):
global intf_of_entv
__pychecker__ = "unusednames=mask"
commands = []
try:
msg = enstore_erc_functions.read_erc(erc)
except SyntaxError:
exc, msg = sys.exc_info()[:2]
import traceback
traceback.print_tb(sys.exc_info()[2])
#Report on the error.
try:
message = "Failed to read erc message: (%s, %s)\n"
sys.stderr.write(message % (str(exc), str(msg)))
sys.stderr.flush()
except IOError:
pass
if msg and not getattr(msg, 'status', None):
#Take the message from event relay.
commands = commands + ["%s %s" % (msg.type,
msg.extra_info)]
##If read_erc is valid it is a EventRelayMessage instance. If
# it gets here it is a dictionary with a status field error.
elif getattr(msg, "status", None):
Trace.trace(1, "Event relay error: %s" % (str(msg),),
out_fp=sys.stderr)
#Those commands that use mover names need to have the system name
# appended to the name.
commands = insert_system_name(commands, erc.system_name, intf_of_entv)
for command in commands:
if command:
#For normal use put everything into the queue.
enstore_display.message_queue.put_queue(command, erc.system_name)
#Callback when the general purpose udp cleint gets a reply message.
# This function is also used by handle_messages() in threaded mode.
def process_udp(local_udp_client, mask):
global u #u and local_udp_client are the same
global intf_of_entv
__pychecker__ = "unusednames=local_udp_client,mask"
send_request_dict_copy = get_sent_request(None, None)
for system_name in send_request_dict_copy.keys():
commands = []
for tx_id in send_request_dict_copy[system_name].keys():
try:
| |
<reponame>Amirsorouri00/neolej
'''
.d8888b. 888 888 888 8888888 888
d88P Y88b 888 888 888 888 888
888 888 888 888 888 888 888
888 888 .d88b. 88888b. 8888b. 888 888 88888b.d88b. 88888b. .d88b. 888d888 888888 .d8888b
888 88888 888 d88""88b 888 "88b "88b 888 888 888 "888 "88b 888 "88b d88""88b 888P" 888 88K
888 888 888 888 888 888 888 .d888888 888 888 888 888 888 888 888 888 888 888 888 "Y8888b.
Y88b d88P 888 Y88..88P 888 d88P 888 888 888 888 888 888 888 888 d88P Y88..88P 888 Y88b. X88
"Y8888P88 888 "Y88P" 88888P" "Y888888 888 8888888 888 888 888 88888P" "Y88P" 888 "Y888 88888P'
888
888
888
'''
from accounts.serializers.user_serializer import UserSerializer as US
from django.http import JsonResponse, HttpResponse
from accounts.models import User
'''
88888888888 888
888 888
888 888
888 .d88b. .d8888b 888888 .d8888b
888 d8P Y8b 88K 888 88K
888 88888888 "Y8888b. 888 "Y8888b.
888 Y8b. X88 Y88b. X88
888 "Y8888 88888P' "Y888 88888P'
'''
def test2(request, format=None):
users = User.objects.all()
users_serializer = US(users, many = True)
print('user serialized result = {0}'.format(users_serializer))
return JsonResponse({'received data': users_serializer.data}, safe=False, status=200)
def test1(request, format=None):
print(request.POST)
serializer = US(data = request.POST)
print(serializer)
print(serializer.is_valid())
print(serializer.errors)
print(serializer.validated_data)
serializer.save()
return JsonResponse({'received data': request.POST}, safe=False, status=200)
'''
888 .d88888b. .d8888b. 8888888 888b 888 d88P 888 .d88888b. .d8888b. .d88888b. 888 888 88888888888
888 d88P" "Y88b d88P Y88b 888 8888b 888 d88P 888 d88P" "Y88b d88P Y88b d88P" "Y88b 888 888 888
888 888 888 888 888 888 88888b 888 d88P 888 888 888 888 888 888 888 888 888 888
888 888 888 888 888 888Y88b 888 d88P 888 888 888 888 888 888 888 888 888
888 888 888 888 88888 888 888 Y88b888 d88P 888 888 888 888 88888 888 888 888 888 888
888 888 888 888 888 888 888 Y88888 d88P 888 888 888 888 888 888 888 888 888 888
888 Y88b. .d88P Y88b d88P 888 888 Y8888 d88P 888 Y88b. .d88P Y88b d88P Y88b. .d88P Y88b. .d88P 888
88888888 "Y88888P" "Y8888P88 8888888 888 Y888 d88P 88888888 "Y88888P" "Y8888P88 "Y88888P" "Y88888P" 888
'''
from rest_framework.authtoken.views import ObtainAuthToken
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
from rest_framework import status
class CustomAuthToken(ObtainAuthToken):
# @api_view(['POST'])
# @authentication_classes((TokenAuthentication,))
#@permission_classes((IsAuthenticated,))
@csrf_exempt
def post(self, request, *args, **kwargs):
serializer = self.serializer_class(data=request.data, context={'request': request})
if serializer.is_valid():
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return JsonResponse({
'token': token.key,
'user_uuid': user.uuid,
# 'user_uuid': user.uuid_user.user_uuid.hex,
'email': user.email
}, safe=False, status = status.HTTP_202_ACCEPTED)
else: return JsonResponse({'received data': request.POST, 'errors': self.errors}, safe=False, status=status.HTTP_400_BAD_REQUEST)
'''
888 888 .d8888b. 8888888888 8888888b. d8888 8888888b. 8888888
888 888 d88P Y88b 888 888 Y88b d88888 888 Y88b 888
888 888 Y88b. 888 888 888 d88P888 888 888 888
888 888 "Y888b. 8888888 888 d88P d88P 888 888 d88P 888
888 888 "Y88b. 888 8888888P" d88P 888 8888888P" 888
888 888 "888 888 888 T88b d88P 888 888 888
Y88b. .d88P Y88b d88P 888 888 T88b d8888888888 888 888
"Y88888P" "Y8888P" 8888888888 888 T88b d88P 888 888 8888888
'''
from django.views.decorators.http import require_http_methods
from django.utils.decorators import method_decorator
from rest_framework.views import APIView
from rest_framework.parsers import MultiPartParser, FormParser, JSONParser
from rest_framework.permissions import IsAuthenticated
from django.shortcuts import get_object_or_404
@method_decorator([require_http_methods(["GET", "POST", "PUT", "DELETE"])], name='dispatch')
class UserAPI(APIView):
parser_classes = (MultiPartParser, FormParser, JSONParser)
permission_classes = (IsAuthenticated,)
serializer_class = US
model = User
errors = []
def dispatch(self, request, uuid = None, format=None, *args, **kwargs):
if 'POST' == request.method:
self.permission_classes = ()
return super().dispatch(request, uuid = uuid, format=None, *args, **kwargs)
def get(self, request, format=None, *args, **kwargs):
self.errors = []
print('in get')
user_serialized = 'user_serialized temp'
if request.GET.get('field'):
field = request.GET.get('field')
if 'all' == field:
users = User.objects.all()
user_serialized = self.serializer_class(users, many=True)
elif 'uuid' == field:
user_serialized = self.serializer_class(get_object_or_404(self.model, uuid = request.GET.get('uuid')))
elif 'email' == field:
user_serialized = self.serializer_class(get_object_or_404(self.model, email = request.GET.get('email')))
else:
return JsonResponse({'error': "This url doesn't provide information based on your request information."}, safe=False, status=status.HTTP_406_NOT_ACCEPTABLE)
else:
user_serialized = self.serializer_class(get_object_or_404(self.model, id = request.GET.get('id')))
return JsonResponse({'response': user_serialized.data}, safe=False, status=status.HTTP_200_OK)
def post(self, request, format=None, *args, **kwargs):
self.errors = []
# return HttpResponse(request.data)
print(request.data)
user_serializer = self.serializer_class(data = request.data)
if user_serializer.is_valid():
user = user_serializer.save()
from commons.services import sms_384
message = [{
"adres": "نئولج",
"username": request.data.get('email'),
"password": request.data.get('password')
}]
# to = ['09128048897', '09102108510']
to = request.data.get('cell_phone')
r = sms_384(message, to)
return JsonResponse({'received data': request.data, 'errors': user_serializer.errors}, safe=False, status=status.HTTP_201_CREATED)
else:
print('user_serializer_errors: {0}'.format(user_serializer.errors))
self.errors.append({'user_serializer': user_serializer.errors})
return JsonResponse({'received data': request.data, 'errors': self.errors}, safe=False, status=status.HTTP_400_BAD_REQUEST)
def put(self, request, uuid, format=None, *args, **kwargs):
self.errors = []
print(request.data)
print(uuid)
user = get_object_or_404(self.model, pk=uuid)
user_serializer = self.serializer_class(user, data=request.data, partial=True)
if user_serializer.is_valid():
user = user_serializer.save()
return JsonResponse({'received data': request.data, 'errors': self.errors}, safe=False, status=status.HTTP_201_CREATED)
else:
print('user_serializer_errors: {0}'.format(user_serializer.errors))
self.errors.append({'user_serializer': user_serializer.errors})
return JsonResponse({'received data': request.data, 'errors': self.errors}, safe=False, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, uuid, format=None, *args, **kwargs):
# delete an object and send a confirmation response
from django.db.models import ProtectedError
try:
user = get_object_or_404(self.model, pk=uuid)
user.is_active = False
user.save()
return JsonResponse({'deleted data': uuid}, safe=False, status=status.HTTP_200_OK)
except ProtectedError:
error_message = "This object can't be deleted!!"
return JsonResponse(error_message, status=status.HTTP_401_UNAUTHORIZED)
except Exception as e:
error_message = {'errors': [str(val)] for val in e}
return JsonResponse(error_message, safe=False, status=status.HTTP_400_BAD_REQUEST)
'''
.d8888b. 8888888 888b 888 .d8888b. 888 8888888888 888 888 .d8888b. 8888888888 8888888b.
d88P Y88b 888 8888b 888 d88P Y88b 888 888 888 888 d88P Y88b 888 888 Y88b
Y88b. 888 88888b 888 888 888 888 888 888 888 Y88b. 888 888 888
"Y888b. 888 888Y88b 888 888 888 8888888 888 888 "Y888b. 8888888 888 d88P
"Y88b. 888 888 Y88b888 888 88888 888 888 888 888 "Y88b. 888 8888888P"
"888 888 888 Y88888 888 888 888 888 888 888 "888 888 888 T88b
Y88b d88P 888 888 Y8888 Y88b d88P 888 888 Y88b. .d88P Y88b d88P 888 888 T88b
"Y8888P" 8888888 888 Y888 "Y8888P88 88888888 8888888888 "Y88888P" "Y8888P" 8888888888 888 T88b
'''
@method_decorator([require_http_methods(["GET", "POST", "PUT", "DELETE"])], name='dispatch')
class SingleUser(APIView):
serializer_class = US
model = User
def get(self, request, *args, **kwargs):
user_serialized = self.serializer_class(get_object_or_404(self.model, uuid = request.GET.get('uuid')))
return JsonResponse({'response': user_serialized.data}, safe=False, status=200)
def post(self, request, uuid = None, *args, **kwargs):
serializer = None
if uuid:
user = get_object_or_404(self.model, uuid = uuid)
serializer = self.serializer_class(user, data = request.POST, partial=True)
else:
serializer = self.serializer_class(data = request.POST)
if serializer.is_valid():
serializer.save()
# <process serializer cleaned data>
# return HttpResponseRedirect('/success/')
return JsonResponse({'received data': serializer.data}, safe=False, status=200)
else:
return JsonResponse({'received data': request.POST, 'errors': serializer.errors}, safe=False, status=500)
def put(self, request, uuid, *args, **kwargs):
user = get_object_or_404(self.model, uuid = uuid)
serializer = self.serializer_class(user, data = request.POST, partial=True)
if serializer.is_valid():
# <process serializer cleaned data>
# return HttpResponseRedirect('/success/')
return JsonResponse({'received data': serializer.data}, safe=False, status=200)
else:
return JsonResponse({'received data': serializer.errors}, safe=False, status=500)
def delete(self, request, uuid, *args, **kwargs):
# delete an object and send a confirmation response
from django.db.models import ProtectedError
try:
get_object_or_404(self.model, uuid=uuid).delete()
return JsonResponse({'deleted data': uuid}, safe=False, status=200)
except ProtectedError:
error_message = "This object can't be deleted!!"
return JsonResponse(error_message, status=500)
except Exception as e:
error_message = {'errors': [str(val)] for val in e}
return JsonResponse(error_message, safe=False, status=500)
'''
888 888 .d8888b. 8888888888 8888888b. 888 8888888 .d8888b. 88888888888
888 888 d88P Y88b 888 888 Y88b 888 888 d88P Y88b 888
888 888 Y88b. 888 888 888 888 888 Y88b. 888
888 888 "Y888b. 8888888 888 d88P 888 888 "Y888b. 888
888 888 "Y88b. 888 8888888P" 888 888 "Y88b. 888
888 888 "888 888 888 T88b 888 888 "888 888
Y88b. .d88P Y88b d88P 888 888 T88b 888 888 Y88b d88P 888
"Y88888P" "Y8888P" 8888888888 888 T88b 88888888 8888888 "Y8888P" 888
'''
from rest_framework import generics
# from rest_framework.permissions import IsAdminUser, IsAuthenticated
class UserListCreate(generics.ListCreateAPIView):
''' Used for read-write endpoints to represent a collection of model instances.
Provides get and post method handlers. '''
queryset = User.objects.all()
serializer_class = US
# permission_classes = (IsAdminUser, IsAuthenticated)
def get_queryset(self):
# user = User.objects.filter(id = 3)
# print(self.request.data.get('all'))
filter_role = {}
# queryset = self.get_queryset()
if self.request.data.get('all'):
return User.objects.all()
elif self.request.data.get('fields'):
#change
data = self.request.data.get('fields').strip('[').rstrip(']')
filter_role['id'] = [{int(val) for val in data.split(',')}]
print(filter_role)
# return | |
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "piyg_9.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn(self):
cname = "prgn"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_r(self):
cname = "prgn_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_10(self):
cname = "prgn_10"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_10.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_10_r(self):
cname = "prgn_10_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_10.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_11(self):
cname = "prgn_11"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_11.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_11_r(self):
cname = "prgn_11_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_11.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_3(self):
cname = "prgn_3"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_3.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_3_r(self):
cname = "prgn_3_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_3.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_4(self):
cname = "prgn_4"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_4.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_4_r(self):
cname = "prgn_4_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_4.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_5(self):
cname = "prgn_5"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_5.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_5_r(self):
cname = "prgn_5_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_5.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_6(self):
cname = "prgn_6"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_6.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_6_r(self):
cname = "prgn_6_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_6.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_7(self):
cname = "prgn_7"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_7.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_7_r(self):
cname = "prgn_7_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_7.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_8(self):
cname = "prgn_8"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_8.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_8_r(self):
cname = "prgn_8_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_8.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_9(self):
cname = "prgn_9"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_9.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def prgn_9_r(self):
cname = "prgn_9_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "prgn_9.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu(self):
cname = "pubu"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_r(self):
cname = "pubu_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_3(self):
cname = "pubu_3"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_3.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_3_r(self):
cname = "pubu_3_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_3.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_4(self):
cname = "pubu_4"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_4.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_4_r(self):
cname = "pubu_4_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_4.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_5(self):
cname = "pubu_5"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_5.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_5_r(self):
cname = "pubu_5_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_5.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_6(self):
cname = "pubu_6"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_6.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_6_r(self):
cname = "pubu_6_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_6.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_7(self):
cname = "pubu_7"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_7.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_7_r(self):
cname = "pubu_7_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_7.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_8(self):
cname = "pubu_8"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_8.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_8_r(self):
cname = "pubu_8_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_8.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_9(self):
cname = "pubu_9"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_9.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubu_9_r(self):
cname = "pubu_9_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubu_9.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn(self):
cname = "pubugn"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_r(self):
cname = "pubugn_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_3(self):
cname = "pubugn_3"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_3.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_3_r(self):
cname = "pubugn_3_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_3.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_4(self):
cname = "pubugn_4"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_4.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_4_r(self):
cname = "pubugn_4_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_4.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_5(self):
cname = "pubugn_5"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_5.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_5_r(self):
cname = "pubugn_5_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_5.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_6(self):
cname = "pubugn_6"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_6.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_6_r(self):
cname = "pubugn_6_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_6.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_7(self):
cname = "pubugn_7"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_7.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_7_r(self):
cname = "pubugn_7_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_7.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_8(self):
cname = "pubugn_8"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_8.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_8_r(self):
cname = "pubugn_8_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_8.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def pubugn_9(self):
cname = "pubugn_9"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "pubugn_9.rgb")
| |
"""
Contains reusable elements for the gui, primarily menus,
as well as some functions for generating widely used menus,
such as unit dropdowns
"""
import os
import base64
from dash import dcc
from dash import html
import dash_bootstrap_components as dbc
import dash_cytoscape as cyto
from dash import dash_table
from .css_styles import *
"""
Mapping of all types in the GUI to their representative colors
"""
TYPE_COLORS = {
'Quantum_Repeater': '#4D9DE0',
'Quantum_Router': '#E15554',
'Photon_Source': '#E1BC29',
'Detector': '#3BB273',
'QuantumErrorCorrection': '#7768AE ',
'BSM_node': '#FFC857',
'Quantum': '#8634eb',
'Classical': '#345feb',
'Memory': '#8a34ab',
'Temp': '#084C61',
}
"""
Default node type options for dropdown menus
"""
OPTIONS = [
{
'label': 'Quantum Router',
'value': 'Quantum_Router'
},
{
'label': 'Quantum Memory',
'value': 'Memory'
},
{
'label': 'Detector',
'value': 'Detector'
},
# {
# 'label': 'Protocol',
# 'value': 'Protocol'
# },
{
'label': 'BSM Node',
'value': 'BSM_node'
},
{
'label': 'Quantum Repeater',
'value': 'Quantum_Repeater'
},
# {
# 'label': 'Quantum Error Correction',
# 'value': 'QuantumErrorCorrection'
# },
{
'label': 'Photon Source',
'value': 'Photon_Source'
},
# {
# 'label': 'Temp',
# 'value': 'Temp'
# }
]
DIRECTORY, _ = os.path.split(__file__)
def getInputField(
value_in: str,
label: str,
input_id: str,
out_type: str,
style_in=None,
place=None
):
return dbc.Row(
[
dbc.Col(dbc.Label(label), width=4),
dbc.Col(dbc.Input(
id=input_id,
value=value_in,
className=out_type,
placeholder=place
),
width=8
),
],
style=style_in
)
def makeDropdownOptions(possible_inputs):
out = []
for x in possible_inputs:
out.append({'label': x, 'value': x})
return out
def getDropdownField(
value_in: str,
all_vals: "list[str]",
label: str,
input_id: str,
out_type: str,
style_in=None
):
return dbc.Row(
[
dbc.Col(dbc.Label(label), width=4),
dbc.Col(dcc.Dropdown(
id=input_id,
options=all_vals,
value=value_in,
className=out_type
), width=8),
],
style=style_in
)
def getLogo(filename, width, link=None):
image_filename = os.path.join(DIRECTORY, 'assets', filename)
encoded_image = base64.b64encode(open(image_filename, 'rb').read())
image = html.Img(
src='data:image/png;base64,{}'.format(encoded_image.decode()),
width=width,
style={'border-radius': '20%'})
if link is None:
return image
else:
return html.A(image, href=link)
def getSelectedNodeMenu(values, templates):
out = []
out.append(
getInputField(
values['name'],
'Name:',
'selected_name',
'name'
)
)
out.append(
getDropdownField(
values['type'],
OPTIONS,
'Node Type:',
'selected_node_type',
'type'
)
)
out.append(
getDropdownField(
values['template'],
makeDropdownOptions(templates),
'Template:',
'selected_template',
'template'
)
)
return dbc.Form(out)
def getSelectedEdgeMenu(values, nodes, link_types):
return dbc.Form(
[
getDropdownField(
values['source'],
makeDropdownOptions(nodes),
'Source:',
'selected_source',
'source'
),
getDropdownField(
values['target'],
makeDropdownOptions(nodes),
'Target:',
'selected_target',
'target'
),
getDropdownField(
values['link_type'],
makeDropdownOptions(link_types),
'Link Type:',
'selected_link_type',
'link_type'
),
getInputField(
values['attenuation'],
'Attenuation:',
'selected_attenuation',
'attenuation'
),
getInputField(
values['distance'],
'Distance:',
'selected_distance',
'distance'
)
]
)
def getFreqUnits(id_extra):
return dcc.Dropdown(
id='frequency_units_'+id_extra,
options=[
{'label': 'mHz', 'value': 1e6},
{'label': 'kHz', 'value': 1e3},
{'label': 'hz', 'value': 1},
],
value=1,
style={'margin-bottom': '15px'}
)
def getTimeUnits(id_extra):
return dcc.Dropdown(
id='time_units_'+id_extra,
options=[
{'label': 's', 'value': 1e12},
{'label': 'ms', 'value': 1e9},
{'label': 'ns', 'value': 1e3},
{'label': 'ps', 'value': 1},
],
value=1,
)
classic_edge = [
getInputField(
'',
'Distance:',
'distance_input',
''
),
getInputField(
'',
'Attenuation:',
'attenuation_input',
''
)
]
quantum_edge = [
getInputField(
'',
'Distance:',
'distance_input',
''
),
getInputField(
'',
'Attenuation:',
'attenuation_input',
''
)
]
router_template = [
dbc.Label('Memory Size'),
dbc.Input(
id='mem_size',
className='memo_size',
placeholder='Memory Array Size'
),
dbc.Label('Memory Type'),
dcc.Dropdown(
id='mem_type',
className='mem_type',
value='',
options=[]
),
]
quantum_memory_template = [
dbc.Label('Coherence Time'),
dbc.Row(
[
dbc.Col(dbc.Input(
id='coh_time_in',
className='coherence_time',
placeholder='1.3e12'),
width=10
),
dbc.Col(getTimeUnits('coh'), width=2)
],
className="g-0"
),
dbc.Label('Frequency'),
dbc.Row(
[
dbc.Col(dbc.Input(
id='mem_freq_in',
className='frequency',
placeholder='2000'),
width=10
),
dbc.Col(getFreqUnits('mem'), width=2)
],
className="g-0"
),
dbc.Label('Efficiency'),
dbc.Input(id='mem_eff_in', className='efficiency', placeholder='0.75'),
dbc.Label('Fidelity'),
dbc.Input(id='fidelity_in', className='fidelity', placeholder='500'),
]
detector_template = [
dbc.Label('Dark Count Rate'),
dbc.Input(
id='dark_count_in',
className='dark_count',
placeholder='0'
),
dbc.Label('Efficiency'),
dbc.Input(
id='detector_efficiency_in',
className='efficiency',
placeholder='0.8'
),
dbc.Label('Count Rate'),
dbc.Input(
id='count_rate_in',
className='count_rate',
placeholder='5.7e3'
),
dbc.Label('Resolution'),
dbc.Input(
id='resolution_in',
className='resolution',
placeholder='1e2'
)
]
bsm_template = [
dbc.Label('Detector Type'),
dcc.Dropdown(
id='detec_type',
className='detector_type',
value='',
options=[]
),
]
protocol_template = [
]
# New #
tab_ids = [f"tab-{i}" for i in range(9)]
add_node_form = html.Div(
dbc.Form(
[
html.H3('Add Node'),
getInputField(
'',
'Name:',
'node_to_add_name',
'',
place='Enter Node ID'
),
getDropdownField(
'Quantum_Router',
OPTIONS,
'Type:',
'type_menu',
''
),
getDropdownField(
'',
[],
'Template:',
'add_template_menu',
'',
style_in={'margin-bottom': '10px'}
),
html.Div(
[
dbc.Button('Add Node', color='primary', id='add_node')
],
className="d-grid"
),
html.P(id='make_node_error', style={'color': 'red'})
]
),
style=MENU_STYLE,
id=tab_ids[0]
)
add_edge = html.Div(
[
html.H3('Add Edge'),
getDropdownField(
'',
[],
'From:',
'from_node',
''
),
getDropdownField(
'',
[],
'To:',
'to_node',
''
),
getDropdownField(
'',
[
{
'label': 'Quantum Connection',
'value': 'Quantum'
},
{
'label': 'Classical Connection',
'value': 'Classical'
},
],
'Link Type:',
'edge_type_menu',
'Quantum'
),
dbc.Row(
id='edge_properties'
),
html.P(id='make_edge_error', style={'color': 'red'}),
html.Div(
[
dbc.Button('Add Edge', color='primary', id='add_edge'),
],
className="d-grid"
),
],
style=MENU_STYLE,
id=tab_ids[1]
)
delete_menu = html.Div(
dbc.Form(
[
html.H3('Delete'),
dbc.Row(
[
html.P(
'Select an element and press the button to remove it'
),
html.Div(
[
dbc.Button(
'Delete',
color='primary',
id='delete_button',
),
],
className="d-grid"
),
]
)
],
),
style=MENU_STYLE,
id=tab_ids[2]
)
make_new_template = html.Div(
dbc.Form(
[
html.H3('Template'),
getInputField(
'',
'ID:',
'template_name',
'',
place='Enter ID'
),
getDropdownField(
'Quantum_Router',
OPTIONS,
'Type:',
'template_type_menu',
''
),
dbc.Row(
id='template_properties'
),
html.Div(
[
dbc.Button(
'Save',
color='primary',
id='save_template',
),
],
className="d-grid"
),
html.P(id='save_state', style={'color': 'blue'})
]
),
style=MENU_STYLE,
id=tab_ids[3]
)
def getTopoTable(data, columns):
return html.Div(
dbc.Form(
[
html.H3('View'),
dbc.ButtonGroup(
[
dbc.Button(
'Nodes',
id='toggle_nodes',
color='primary',
style={
'padding': '5px 65px',
}
),
dbc.Button(
'Edges',
id='toggle_edges',
color='primary',
style={
'padding': '5px 65px'
}
)
],
style={
'margin-bottom': '10px',
}
),
dbc.Form(
[
dash_table.DataTable(
id='graph_table',
data=data,
columns=columns,
fixed_rows={'headers': True},
page_size=20,
style_table={
# 'height': '300px',
'overflowY': 'auto',
'overflowX': 'auto'
},
style_cell={'minWidth': 150, 'width': 150},
filter_action='native',
editable=True
)
]
)
]
),
style=MENU_STYLE,
id=tab_ids[4]
)
def delay_menu(data, columns):
return html.Div(
dbc.Form(
[
html.H3('Classical Channel Delays'),
dash_table.DataTable(
id='delay_table',
data=data,
columns=columns,
fixed_rows={'headers': True},
# fixed_columns={'headers': True, 'data': 1},
page_size=20,
style_table={
# 'height': '300px',
'overflowY': 'auto',
'overflowX': 'auto'
},
style_cell={'minWidth': 150, 'width': 150},
editable=True,
filter_action='native',
)
]
),
style=MENU_STYLE,
id=tab_ids[5]
)
def tdm_menu(data, columns):
return html.Div(
dbc.Form(
[
html.H3('Quantum Channel TDM'),
dash_table.DataTable(
id='tdm_table',
data=data,
columns=columns,
fixed_rows={'headers': True},
# fixed_columns={'headers': True, 'data': 1},
page_size=20,
style_table={
# 'height': '300px',
'overflowY': 'auto',
'overflowX': 'auto'
},
style_cell={'minWidth': 150, 'width': 150},
editable=True,
filter_action='native',
)
]
),
style=MENU_STYLE,
id=tab_ids[6]
)
def TDM_menu(tdm_data, tdm_columns):
new_tdm_data = tdm_data.copy()
tdm_rows = []
for x in tdm_columns:
tdm_rows.append(x['id'])
new_tdm_data.insert(loc=0, column='To', value=tdm_rows)
tdm_columns.insert(0, {
'id': 'To',
'type': 'text',
'name': 'To'
})
new_tdm_data = new_tdm_data.to_dict('records')
return tdm_menu(new_tdm_data, tdm_columns)
def CCD_menu(delay_data, delay_columns):
new_delay_data = delay_data.copy()
delay_rows = []
for x in delay_columns:
delay_rows.append(x['id'])
new_delay_data.insert(loc=0, column='To', value=delay_rows)
delay_columns.insert(0, {
'id': 'To',
'type': 'text',
'name': 'To'
})
new_delay_data = new_delay_data.to_dict('records')
return delay_menu(new_delay_data, delay_columns)
def makeLegend(values):
if values is None:
return html.Div(
hidden=True,
)
out = []
for x in values:
out.append(dbc.Row(
[
dbc.Col(
[
html.Span(
className='dot',
style={
'background-color': TYPE_COLORS[x],
'height': '25px',
'width': '25px',
'border-radius': '50%',
'display': 'inline-block',
'outline-style': 'solid',
'outline-color': '#fff',
'border': '2px solid black'
}
),
dbc.Label(
x,
style={
'position': 'relative',
"top": '-5px',
"left": '7px',
}
),
],
),
],
style={
'margin': '5px 0px'
}
))
legend = html.Form(
children=out,
style={
"width": "auto",
"height": "auto",
}
)
return legend
selection_menu = html.Div(
[
html.H3('Edit'),
html.Div(id='selected_element'),
html.Div(
[
dbc.Button(
'Submit',
id='submit_edit',
color='primary',
style={
'margin-top': '10px'
}
)
],
className="d-grid"
),
],
id=tab_ids[7],
style=MENU_STYLE
)
results_menu = dbc.Form(
[
html.H5("Results"),
dbc.Card(
[
html.Pre(id='results_out'),
], style={
'minHeight': '50vh',
'maxHeight': '65vh',
'overflowY': 'auto',
'overflowX': 'auto'
}
)
]
)
simulation_menu = html.Div(
[
html.H3('Run'),
dbc.Row(
[
dbc.Col(
dbc.Label('Name'),
width=2
),
dbc.Col(dbc.Input(
id='sim_name',
placeholder='Ex: Test_1'
), width=10)
],
className="g-0"
),
dbc.Row(
[
dbc.Col(
dbc.Label('Time'),
width=2
),
dbc.Col(dbc.Input(
id='sim_time_in',
placeholder='Enter simulation time'
), width=8),
dbc.Col(
getTimeUnits('sim'),
width=2,
),
],
className="g-0"
),
dbc.Row(
[
dbc.Label("Logging Options", width=6),
dbc.Col(
dbc.RadioItems(
id='logging_verbosity',
options=[
{"label": 'Critical', 'value': 'CRITICAL'},
{"label": 'Error', 'value': 'ERROR'},
{"label": 'Warning', 'value': 'WARNING'},
{"label": 'Info', 'value': 'INFO'},
{"label": 'Debug', 'value': 'DEBUG'},
{'label': 'None', 'value': 'NOTSET'},
],
value='NOTSET',
inline=True
),
width=12,
),
],
),
html.Div(
[
dbc.Button('Run', color='primary', id='run_sim'),
],
className="d-grid"
),
dcc.Interval(
id='running',
interval=1000,
n_intervals=0,
disabled=True
),
html.Pre(id='runtime'),
html.Pre(id='simtime'),
results_menu
],
id=tab_ids[8],
style=MENU_STYLE
)
navbar = dbc.Navbar(
children=[
html.A(
dbc.Row(
[
dbc.Col(getLogo('sequence.jpg', '80px')),
dbc.Col(dbc.NavbarBrand(
"SeQUeNCe",
className="ml-2",
style={
'font-size': '50px'
}
)),
],
align="center",
className="g-0"
),
href="https://github.com/sequence-toolbox",
style={
'position': 'relative',
'top': '0px',
'left': '0px'
}
),
dbc.Row(
[
dbc.NavLink(
'New Network',
id='new_network',
style={
'color': 'white'
},
),
# dbc.NavLink(
# 'Save',
# id='save_network',
# style={
# 'color': 'white'
# }
# ),
# dbc.NavLink(
# 'Load',
# id='load_network',
# style={
# 'color': 'white'
# }
# ),
dbc.DropdownMenu(
[
dbc.DropdownMenuItem('All', id='export_all'),
dbc.DropdownMenuItem('Topology', id='export_topo'),
dbc.DropdownMenuItem('Templates', id='export_templ'),
dbc.DropdownMenuItem('Simulation', id='export_sim')
],
label="Export",
group=True,
size='sm',
nav=True,
in_navbar=True,
toggle_style={
'color': 'white'
}
),
dbc.DropdownMenu(
children=[
dbc.DropdownMenuItem(
"Help",
href='https://sequence-toolbox.github.io/',
),
dbc.DropdownMenuItem(
'Report Issue',
href='https://github.com/sequence-toolbox/SeQUeNCe/issues', # nopep8
),
],
nav=True,
group=True,
size='sm',
in_navbar=True,
label="More",
right=True,
toggle_style={
'color': 'white'
}
),
],
className="ml-auto flex-nowrap mt-3 mt-md-0 g-0",
align="center",
)
],
color="dark",
dark=True,
)
def get_network(elements_in):
return cyto.Cytoscape(
id='graph',
layout={
'name': 'cose',
'animate': True
},
zoomingEnabled=True,
responsive=True,
style={'width': '100%', 'height': '100vh'},
elements=elements_in,
stylesheet=[
{
'selector': 'node',
'style': {
'width': 50,
'height': 50,
| |
LISP_MAX_MAP_NOTIFY_RETRIES ) :
lprint ( "DDT Map-Request retry limit reached for EID {}, nonce 0x{}" . format ( green ( OoOoOooOOOOO0 , False ) , lisp_hex_string ( iIiIi1i1Iiii ) ) )
if 76 - 76: I1ii11iIi11i - ooOoO0o % OoooooooOO / Oo0Ooo % IiII / ooOoO0o
mr . dequeue_map_request ( )
return
if 57 - 57: O0
if 23 - 23: OoO0O00 / II111iiii . I1ii11iIi11i . O0
mr . retry_count += 1
if 13 - 13: I1ii11iIi11i
i1I1iIi1IiI = green ( iii11i1i1II11 , False )
i1i11ii1Ii = green ( OoOoOooOOOOO0 , False )
lprint ( "Retransmit DDT {} from {}ITR {} EIDs: {} -> {}, nonce 0x{}" . format ( bold ( "Map-Request" , False ) , "P" if mr . from_pitr else "" ,
# i1IIi * OOooOOo
red ( mr . itr . print_address ( ) , False ) , i1I1iIi1IiI , i1i11ii1Ii ,
lisp_hex_string ( iIiIi1i1Iiii ) ) )
if 35 - 35: I1Ii111 / Oo0Ooo * OoooooooOO / O0 / iIii1I11I1II1
if 44 - 44: o0oOOo0O0Ooo / iIii1I11I1II1
if 40 - 40: OoO0O00 / O0
if 60 - 60: iIii1I11I1II1 / Oo0Ooo / oO0o + iII111i
lisp_send_ddt_map_request ( mr , False )
if 66 - 66: iIii1I11I1II1 . O0 * IiII . ooOoO0o + i1IIi
if 83 - 83: o0oOOo0O0Ooo / II111iiii + I1IiiI - iII111i + OoO0O00
if 67 - 67: I1Ii111 - OoOoOO00 . i11iIiiIii - I1Ii111 . i11iIiiIii
if 25 - 25: I11i % I1Ii111 + Ii1I
mr . retransmit_timer = threading . Timer ( LISP_DDT_MAP_REQUEST_INTERVAL ,
lisp_retransmit_ddt_map_request , [ mr ] )
mr . retransmit_timer . start ( )
return
if 46 - 46: ooOoO0o + Oo0Ooo + oO0o / II111iiii . iIii1I11I1II1 * I1IiiI
if 87 - 87: I11i + iIii1I11I1II1
if 91 - 91: oO0o
if 58 - 58: i11iIiiIii / Ii1I - OoooooooOO
if 25 - 25: i1IIi * ooOoO0o % OOooOOo / I1IiiI
if 75 - 75: i11iIiiIii
if 38 - 38: iIii1I11I1II1
if 80 - 80: OoO0O00
def lisp_get_referral_node ( referral , source_eid , dest_eid ) :
if 72 - 72: I11i * II111iiii
if 82 - 82: I1Ii111 . OoO0O00 * II111iiii
if 99 - 99: iIii1I11I1II1 / iII111i % i1IIi - II111iiii / OoO0O00
if 33 - 33: OoooooooOO / i1IIi . Ii1I
OOoO = [ ]
for iI1I111iI1I1I in referral . referral_set . values ( ) :
if ( iI1I111iI1I1I . updown == False ) : continue
if ( len ( OOoO ) == 0 or OOoO [ 0 ] . priority == iI1I111iI1I1I . priority ) :
OOoO . append ( iI1I111iI1I1I )
elif ( OOoO [ 0 ] . priority > iI1I111iI1I1I . priority ) :
OOoO = [ ]
OOoO . append ( iI1I111iI1I1I )
if 7 - 7: I1IiiI - OOooOOo % II111iiii / I1IiiI / i1IIi
if 59 - 59: O0
if 38 - 38: IiII . IiII
oo00OO = len ( OOoO )
if ( oo00OO == 0 ) : return ( None )
if 49 - 49: iIii1I11I1II1 % Oo0Ooo % I11i * Ii1I - OoO0O00
oO000o0o0oOo0 = dest_eid . hash_address ( source_eid )
oO000o0o0oOo0 = oO000o0o0oOo0 % oo00OO
return ( OOoO [ oO000o0o0oOo0 ] )
if 15 - 15: i11iIiiIii + o0oOOo0O0Ooo . Ii1I . I1IiiI
if 8 - 8: iII111i % II111iiii + IiII
if 5 - 5: i1IIi + II111iiii
if 75 - 75: OOooOOo . IiII . I1IiiI + OoooooooOO
if 35 - 35: I11i % i1IIi - I1ii11iIi11i . Oo0Ooo
if 69 - 69: ooOoO0o * OoO0O00 % o0oOOo0O0Ooo * o0oOOo0O0Ooo
if 35 - 35: I1IiiI . OOooOOo * OoO0O00 . I1ii11iIi11i - I1IiiI
def lisp_send_ddt_map_request ( mr , send_to_root ) :
iiiII1i1i1iII = mr . lisp_sockets
iIiIi1i1Iiii = mr . nonce
iIi1 = mr . itr
i1IIIii1III1 = mr . mr_source
oo0ooooO = mr . print_eid_tuple ( )
if 90 - 90: IiII % I1ii11iIi11i - I1ii11iIi11i - iII111i
if 63 - 63: O0 % i1IIi + OoOoOO00 + I11i . IiII + ooOoO0o
if 19 - 19: O0 - i1IIi / I1Ii111
if 14 - 14: I11i - i11iIiiIii
if 49 - 49: oO0o . I1ii11iIi11i
if ( mr . send_count == 8 ) :
lprint ( "Giving up on map-request-queue entry {}, nonce 0x{}" . format ( green ( oo0ooooO , False ) , lisp_hex_string ( iIiIi1i1Iiii ) ) )
if 51 - 51: OOooOOo + o0oOOo0O0Ooo . OOooOOo
mr . dequeue_map_request ( )
return
if 23 - 23: iIii1I11I1II1 + OoO0O00 / I1IiiI
if 48 - 48: OoOoOO00 + I11i + oO0o . I1IiiI
if 7 - 7: iII111i * i1IIi % OoOoOO00 % Ii1I . I1IiiI
if 53 - 53: OOooOOo / I11i + OOooOOo / I1IiiI / OoO0O00
if 12 - 12: i11iIiiIii % ooOoO0o / iII111i . IiII
if 68 - 68: OOooOOo / iIii1I11I1II1 + I1IiiI . ooOoO0o * IiII
if ( send_to_root ) :
oOoOO0oOOoO0o = lisp_address ( LISP_AFI_NONE , "" , 0 , 0 )
iI1I = lisp_address ( LISP_AFI_NONE , "" , 0 , 0 )
mr . tried_root = True
lprint ( "Jumping up to root for EID {}" . format ( green ( oo0ooooO , False ) ) )
else :
oOoOO0oOOoO0o = mr . eid
iI1I = mr . group
if 28 - 28: i1IIi
if 69 - 69: OOooOOo % ooOoO0o - i1IIi . Oo0Ooo
if 35 - 35: iIii1I11I1II1 - I11i / iIii1I11I1II1 % ooOoO0o % I1IiiI
if 46 - 46: oO0o
if 5 - 5: i1IIi % o0oOOo0O0Ooo + OoOoOO00 - I11i . Ii1I
iiIiI1III111 = lisp_referral_cache_lookup ( oOoOO0oOOoO0o , iI1I , False )
if ( iiIiI1III111 == None ) :
lprint ( "No referral cache entry found" )
lisp_send_negative_map_reply ( iiiII1i1i1iII , oOoOO0oOOoO0o , iI1I ,
iIiIi1i1Iiii , iIi1 , mr . sport , 15 , None , False )
return
if 8 - 8: I1ii11iIi11i
if 45 - 45: i1IIi - OoO0O00 % Oo0Ooo
i1111I1I = iiIiI1III111 . print_eid_tuple ( )
lprint ( "Found referral cache entry {}, referral-type: {}" . format ( i1111I1I ,
iiIiI1III111 . print_referral_type ( ) ) )
if 66 - 66: iII111i - ooOoO0o * I1ii11iIi11i - Ii1I / OoooooooOO
iI1I111iI1I1I = lisp_get_referral_node ( iiIiI1III111 , i1IIIii1III1 , mr . eid )
if ( iI1I111iI1I1I == None ) :
lprint ( "No reachable referral-nodes found" )
mr . dequeue_map_request ( )
lisp_send_negative_map_reply ( iiiII1i1i1iII , iiIiI1III111 . eid ,
iiIiI1III111 . group , iIiIi1i1Iiii , iIi1 , mr . sport , 1 , None , False )
return
if 86 - 86: I1IiiI % iII111i + Oo0Ooo + i1IIi % o0oOOo0O0Ooo
if 85 - 85: Ii1I + I1Ii111 * I11i
lprint ( "Send DDT Map-Request to {} {} for EID {}, nonce 0x{}" . format ( iI1I111iI1I1I . referral_address . print_address ( ) ,
# Oo0Ooo . OoO0O00 + OoooooooOO + I1Ii111
iiIiI1III111 . print_referral_type ( ) , green ( oo0ooooO , False ) ,
lisp_hex_string ( iIiIi1i1Iiii ) ) )
if 57 - 57: Ii1I % Ii1I * Oo0Ooo % i11iIiiIii
if 12 - 12: oO0o . Oo0Ooo . I1IiiI - i11iIiiIii / o0oOOo0O0Ooo
if 54 - 54: i11iIiiIii + I1Ii111 . I1Ii111 * I1ii11iIi11i % I1Ii111 - OoooooooOO
if 76 - 76: IiII + i1IIi + i11iIiiIii . oO0o
I1IIiII1 = ( iiIiI1III111 . referral_type == LISP_DDT_ACTION_MS_REFERRAL or
iiIiI1III111 . referral_type == LISP_DDT_ACTION_MS_ACK )
lisp_send_ecm ( iiiII1i1i1iII , mr . packet , i1IIIii1III1 , mr . sport , mr . eid ,
iI1I111iI1I1I . referral_address , to_ms = I1IIiII1 , ddt = True )
if 35 - 35: iII111i / iII111i * OoOoOO00 - i11iIiiIii
if 27 - 27: i1IIi / I11i + I1Ii111 . II111iiii * OoO0O00
if 55 - 55: i1IIi % Ii1I - o0oOOo0O0Ooo | |
<gh_stars>0
import tkinter as tk
import os
from sys import exit
import xml.etree.ElementTree as ET
import requests
import urllib.request
from tkinter import font as tkfont, filedialog
from io import BytesIO
from tkcalendar import DateEntry
from babel.numbers import *
from zipfile import ZipFile
from tkinter import ttk
from PIL import Image, ImageTk
class NasrParser(tk.Tk):
def __init__(self, *args, **kwargs):
# Set the user_input to be none at the beggining of the program.
self.user_input1 = None
self.user_input2 = None
self.user_input3 = None
# This will tell the program to complete all ARTCC's. By default it is True, however it is changed later.
self.do_all = True
# TODO: This can't be hardcoded in. I need to find a way to get this data.
self.all_artccs = ['All Artcc', 'ZAP', 'ZAN', 'ZJX', 'ZME', 'ZTL', 'ZHU', 'ZID', 'ZFW', 'ZKC', 'ZHN', 'ZAB',
'ZLA', 'ZDV', 'ZSE', 'ZOA', 'ZUA', 'ZBW', 'ZNY', 'ZDC', 'ZMA', 'ZAU', 'ZMP', 'ZLC', 'ZOB',
'ZYZ', 'ZSU', 'ZVR', 'ZEG', 'FIM', 'SBA', 'ZAK', 'ZUL', 'ZWG']
# Directories of user inputs and current exe location.
self.exe_directory = os.getcwd()
self.in_directory = os.getcwd()
self.out_directory = os.getcwd()
self.out_folder_name = "Error_Occurred"
# Setting all the colors for everything.
self.label_fg_color = "black"
self.label_bg_color = "dark gray"
self.button_fg_color = "black"
self.button_bg_color = "light gray"
self.entry_fg_color = "black"
self.entry_bg_color = "light gray"
self.image_fg_color = "black"
self.image_bg_color = "dark gray"
self.frame_bg_color = "dark gray"
# Variable to hold File locations needed.
self.has_apt_file = None
self.has_meta_file = None
# start the program.
tk.Tk.__init__(self, *args, **kwargs)
self.title_font = tkfont.Font(family='Helvetica', size=12, slant="italic")
self.helv10 = tkfont.Font(family='Helvetica', size=10)
self.helv12 = tkfont.Font(family='Helvetica', size=12)
# ZLC Logo, Get from web.
zlc_img_url = requests.get("https://i.imgur.com/3SFeAHa.png")
zlc_img_data = zlc_img_url.content
self.logo = ImageTk.PhotoImage(Image.open(BytesIO(zlc_img_data)))
# Work in progress Pic, Get from Web
wip_img_url = requests.get("https://i.imgur.com/nyaxTUK.png")
wip_img_data = wip_img_url.content
self.wip = ImageTk.PhotoImage(Image.open(BytesIO(wip_img_data)))
# Menu Bar Variables.
menu_bar = tk.Menu(self)
instruction_menu = tk.Menu(menu_bar, tearoff=0)
instruction_menu.add_command(label="About",
command=lambda: self.show_frame("AboutScreen"))
instruction_menu.add_command(label="Help",
command=lambda: self.show_frame("WorkInProgress"))
instruction_menu.add_separator()
instruction_menu.add_command(label="Exit",
command=lambda: self.show_frame("WorkInProgress"))
menu_bar.add_cascade(label="About / Help", menu=instruction_menu)
self.config(menu=menu_bar)
# the container is where we'll stack a bunch of frames
# on top of each other, then the one we want visible
# will be raised above the others
self.container = tk.Frame(self)
self.container.pack(side="top", fill="both", expand=True)
self.container.grid_rowconfigure(0, weight=1)
self.container.grid_columnconfigure(3, weight=1)
self.frames = {}
for F in (StartScreen, UserInputScreen, DirectoryViewScreen, WorkInProgress,
SplashScreen, CompletedScreen, DownLoadingScreen, AboutScreen):
page_name = F.__name__
frame = F(parent=self.container, controller=self)
self.frames[page_name] = frame
# put all of the pages in the same location;
# the one on the top of the stacking order
# will be the one that is visible.
frame.grid(row=0, column=0, sticky="nsew")
self.show_frame("SplashScreen")
def get_apt_txt(self):
if self.has_apt_file == "NO":
text = str(self.user_input3)
new_text = text.split("-")
print(new_text[0], new_text[1], new_text[2])
print("APT.TXT FILE NOT FOUND DOWNLOADING IT NOW.")
url = urllib.request.urlopen(
f"https://nfdc.faa.gov/webContent/28DaySub/{new_text[0]}-{new_text[1]}-{new_text[2]}/APT.zip")
with ZipFile(BytesIO(url.read())) as my_zip_file:
for contained_file in my_zip_file.namelist():
with open(f"{self.in_directory}\\APT.txt",
"wb") as output:
for line in my_zip_file.open(contained_file).readlines():
# print(line)
output.write(line)
my_zip_file.close()
self.has_apt_file = "YES"
cal.config(state='disabled')
print(f"APT.TXT FILE DOWNLOAD COMPLETE. \nLocation: {self.in_directory}")
else:
# They already have the APT.TXT file. No further action needed, unless they lied about the location of it.
pass
self.get_apt_in_artcc(self.do_all)
self.get_procedures(self.do_all)
self.show_frame("CompletedScreen")
def get_apt_in_artcc(self, doing_all):
if doing_all is True:
x_var = 0
input_file = None
try:
input_file = open(f"{self.in_directory}/APT.txt", "r", errors='replace')
print(f"APT.TXT FILE FOUND: {self.in_directory}")
except FileNotFoundError:
# Basic error message right now. Need to have in case user selects wrong input directory.
# This will in the future be automated to take them back to select a new input directory.
# TODO: If user selects 'invalid' directory, doesnt cause crash and they can select new one.
print("ERROR: FILE_NOT_FOUND - {}/APT.TXT".format(self.in_directory))
exit()
text = input_file.read()
lines = text.split("\n")
try:
os.mkdir(f"{self.out_directory}/{self.user_input1}_{self.user_input2}")
except FileExistsError:
# print("ERROR: FILE_EXISTS_ERROR!")
# IF this part of the code is reached, the Folder already exists.
# We don't need to do anything if this is the case we just Pass.
pass
# Create the Three output files we need. This is okay as long as we close them when we are done with them.
for working_artcc in self.all_artccs:
x_var += 1
if x_var > len(self.all_artccs):
x_var = False
if x_var is not False:
try:
os.mkdir(f"{self.out_directory}/All Artcc_{self.user_input2}/{working_artcc}_{self.user_input2}")
except FileExistsError:
# print("ERROR: FILE_EXISTS_ERROR!")
# IF this part of the code is reached, the Folder already exists.
# We don't need to do anything if this is the case we just Pass.
pass
apt_output_file = open("{}/All Artcc_{}/{}_{}/{}_{}_FAA_APT_CODES.TXT".format(self.out_directory,
self.user_input2,
working_artcc,
self.user_input2,
working_artcc,
self.user_input2), "w")
apt_ICAO_output_file = open("{}/All Artcc_{}/{}_{}/{}_{}_ICAO_APT_CODES.TXT".format(self.out_directory,
self.user_input2,
working_artcc,
self.user_input2,
working_artcc,
self.user_input2), "w")
for line in lines:
line_type = line[0:3]
artcc = line[674:677]
if line_type == "APT":
if artcc == working_artcc:
airport = line[27:31]
f_string = "%s\n" % airport
apt_output_file.write(f_string)
if line[1210:1217].strip() != "":
icao = line[1210:1217].strip()
else:
icao = line[27:31].strip()
f1_string = "%s\n" % icao
apt_ICAO_output_file.write(f1_string)
apt_output_file.close()
apt_ICAO_output_file.close()
else:
pass
else:
input_file = None
try:
input_file = open(f"{self.in_directory}/APT.txt", "r", errors='replace')
print(f"APT.TXT FILE FOUND: {self.in_directory}")
except FileNotFoundError:
# Basic error message right now. Need to have in case user selects wrong input directory.
# This will in the future be automated to take them back to select a new input directory.
# TODO: If user selects 'invalid' directory, doesnt cause crash and they can select new one.
print("ERROR: FILE_NOT_FOUND - {}/APT.TXT".format(self.in_directory))
exit()
text = input_file.read()
lines = text.split("\n")
responsible_artcc = self.user_input1
try:
os.mkdir(f"{self.out_directory}/{self.user_input1}_{self.user_input2}")
except FileExistsError:
# print("ERROR: FILE_EXISTS_ERROR!")
# IF this part of the code is reached, the Folder already exists.
# We don't need to do anything if this is the case we just Pass.
pass
# Create the Three output files we need. This is okay as long as we close them when we are done with them.
apt_output_file = open("{}/{}_{}/{}_{}_FAA_APT_CODES.TXT".format(self.out_directory,
self.user_input1,
self.user_input2,
self.user_input1,
self.user_input2), "w")
apt_ICAO_output_file = open("{}/{}_{}/{}_{}_ICAO_APT_CODES.TXT".format(self.out_directory,
self.user_input1,
self.user_input2,
self.user_input1,
self.user_input2), "w")
for line in lines:
line_type = line[0:3]
artcc = line[674:677]
if line_type == "APT":
if artcc == responsible_artcc:
airport = line[27:31]
f_string = "%s\n" % airport
apt_output_file.write(f_string)
if line[1210:1217].strip() != "":
icao = line[1210:1217].strip()
else:
icao = line[27:31].strip()
f1_string = "%s\n" % icao
apt_ICAO_output_file.write(f1_string)
apt_output_file.close()
apt_ICAO_output_file.close()
input_file.close()
def get_procedures(self, doing_all):
if doing_all is True:
x_var = 0
# Downloads the META file.
try:
meta_file = open(f"{self.in_directory}/PROCEDURE_META_{self.user_input2}.xml", "r")
print(F"FOUND META FILE: {self.in_directory}/PROCEDURE_META_{self.user_input2}.xml")
meta_file.close()
self.has_meta_file = "YES"
except FileNotFoundError:
self.has_meta_file = "NO"
print("META FILE NOT FOUND DOWNLOADING IT NOW.")
url = f"https://aeronav.faa.gov/d-tpp/{self.user_input2}/xml_data/d-tpp_Metafile.xml"
faa_website_resp = requests.get(url)
with open(f"{self.in_directory}/PROCEDURE_META_{self.user_input2}.xml", "wb") as file:
for chunk in faa_website_resp.iter_content(chunk_size=1024):
if chunk:
file.write(chunk)
print(
F"META FILE DOWNLOAD COMPLETE. \nLocation: {self.in_directory}/PROCEDURE_META_{self.user_input2}.xml")
self.has_meta_file = "YES"
for working_artcc in self.all_artccs:
x_var += 1
if x_var > len(self.all_artccs):
x_var = False
if x_var is not False:
procedure_output_file = open(f"{self.out_directory}/All Artcc_{self.user_input2}/{working_artcc}_{self.user_input2}/{working_artcc}_{self.user_input2}_PROCEDURES.TXT", "w")
procedure_changes_output_file = open(f"{self.out_directory}/All Artcc_{self.user_input2}/{working_artcc}_{self.user_input2}/{working_artcc}_{self.user_input2}_PROCEDURE_CHANGES.TXT", "w")
apt_in_artcc = open(f"{self.out_directory}/All Artcc_{self.user_input2}/{working_artcc}_{self.user_input2}/{working_artcc}_{self.user_input2}_FAA_APT_CODES.TXT", "r")
apt_txt = apt_in_artcc.read()
apt_lines = apt_txt.split("\n")
tree = ET.parse(f"{self.in_directory}/PROCEDURE_META_{self.user_input2}.xml")
root = tree.getroot()
for state in root.iter('state_code'):
for city in state.iter('city_name'):
for airport in city.iter('airport_name'):
for line in apt_lines:
wanted_apt = line[:].strip(" ")
if wanted_apt == airport.attrib['apt_ident']:
procedure_output_file.write(f"[{airport.attrib['apt_ident']}]\n")
for record in airport.iter('record'):
procedure_output_file.write(f" {record[2].text} ")
procedure_output_file.write("| https://aeronav.faa.gov/d-tpp/{}/{}\n".format(
self.user_input2,
record[4].text
))
else:
# If it gets here, this means the Airport is not in the APT.TXT for that ARTCC.
pass
procedure_output_file.close()
for state in root.iter('state_code'):
for city in state.iter('city_name'):
for airport in city.iter('airport_name'):
for line in apt_lines:
wanted_apt = line[:].strip(" ")
if wanted_apt == airport.attrib['apt_ident']:
#if airport.attrib['apt_ident']['record'] == "":
# print("NO CHANGES, ADDITIONS, or DELETIONS FOR {}".format(airport.attrib['apt_ident']))
# continue
#else:
procedure_changes_output_file.write(f"[{airport.attrib['apt_ident']}]\n")
for record in airport.iter('record'):
link_text = record[4].text
link_text_striped = link_text[:-4]
# print(link_text_striped)
link_comp = "https://aeronav.faa.gov/d-tpp/{}/compare_pdf/{}_cmp.pdf".format(
self.user_input2,
link_text_striped
)
if record[3].text == "A":
procedure_changes_output_file.write(f" ({record[3].text}) ")
procedure_changes_output_file.write(f"{record[2].text}\n")
elif record[3].text == "C":
procedure_changes_output_file.write(f" ({record[3].text}) ")
procedure_changes_output_file.write(f"{record[2].text} | {link_comp}\n")
elif record[3].text == "D":
procedure_changes_output_file.write(f" ({record[3].text}) ")
procedure_changes_output_file.write(f"{record[2].text}\n")
else:
pass
else:
# If it gets here, this means the Airport is not in the APT.TXT for that ARTCC.
pass
procedure_changes_output_file.close()
apt_in_artcc.close()
else:
pass
else:
procedure_output_file = open("{}/{}_{}/{}_{}_PROCEDURES.TXT".format(self.out_directory,
self.user_input1,
self.user_input2,
self.user_input1,
self.user_input2), "w")
procedure_changes_output_file = open("{}/{}_{}/{}_{}_PROCEDURE_CHANGES.TXT".format(self.out_directory,
self.user_input1,
self.user_input2,
self.user_input1,
self.user_input2), "w")
apt_in_artcc = open("{}/{}_{}/{}_{}_FAA_APT_CODES.TXT".format(self.out_directory,
self.user_input1,
self.user_input2,
self.user_input1,
self.user_input2), "r")
apt_txt = apt_in_artcc.read()
apt_lines = apt_txt.split("\n")
# Downloads the META file.
try:
meta_file = open(f"{self.in_directory}/PROCEDURE_META_{self.user_input2}.xml", "r")
print(F"FOUND META FILE: {self.in_directory}/PROCEDURE_META_{self.user_input2}.xml")
meta_file.close()
self.has_meta_file = "YES"
except FileNotFoundError:
self.has_meta_file = "NO"
print("META FILE NOT FOUND DOWNLOADING | |
# coding: utf-8
"""
Automated Tool for Optimized Modelling (ATOM)
Author: Mavs
Description: Unit tests for basepredictor.py
"""
# Standard packages
import pytest
import numpy as np
import pandas as pd
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
# Own modules
from atom import ATOMClassifier, ATOMRegressor
from atom.branch import Branch
from atom.training import DirectClassifier
from atom.utils import NotFittedError, merge
from .utils import (
X_bin, y_bin, X_class, y_class, X_reg, y_reg, X_idx, y_idx,
bin_train, bin_test, X10, y10, X10_str,
)
# Test magic methods =============================================== >>
def test_getattr_branch():
"""Assert that branches can be called from the trainer."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.branch = "b2"
assert atom.b2 is atom._branches["b2"]
def test_getattr_attr_from_branch():
"""Assert that branch attributes can be called from the trainer."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
assert atom.pipeline is atom.branch.pipeline
def test_getattr_model():
"""Assert that the models can be called as attributes from the trainer."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.run("Tree")
assert atom.tree is atom._models[0]
def test_getattr_column():
"""Assert that the columns can be accessed as attributes."""
atom = ATOMClassifier(X_class, y_class, random_state=1)
assert isinstance(atom.alcohol, pd.Series)
def test_getattr_dataframe():
"""Assert that the dataset attributes can be called from atom."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
assert isinstance(atom.head(), pd.DataFrame)
def test_getattr_invalid():
"""Assert that an error is raised when there is no such attribute."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
with pytest.raises(AttributeError, match=r".*object has no attribute.*"):
_ = atom.invalid
def test_setattr_to_branch():
"""Assert that branch properties can be set from the trainer."""
new_dataset = merge(X_bin, y_bin)
new_dataset.iloc[0, 3] = 4 # Change one value
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.dataset = new_dataset
assert atom.dataset.iloc[0, 3] == 4 # Check the value is changed
def test_setattr_normal():
"""Assert that trainer attributes can be set normally."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.attr = "test"
assert atom.attr == "test"
def test_delattr_branch():
"""Assert that branches can be deleted through del."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.branch = "b2"
atom.branch = "b3"
del atom.branch
assert list(atom._branches) == ["og", "master", "b2"]
del atom.b2
assert list(atom._branches) == ["og", "master"]
def test_delattr_models():
"""Assert that models can be deleted through del."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.run(["MNB", "LR"])
del atom.winner
assert atom.models == "MNB"
del atom.winner
assert not atom.models
def test_delattr_normal():
"""Assert that trainer attributes can be deleted normally."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
del atom._models
assert not hasattr(atom, "_models")
def test_delattr_invalid():
"""Assert that an error is raised when there is no such attribute."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
with pytest.raises(AttributeError, match=r".*object has no attribute.*"):
del atom.invalid
def test_contains():
"""Assert that we can test if a trainer contains a column."""
trainer = DirectClassifier(models="LR", random_state=1)
assert "mean radius" not in trainer
trainer.run(bin_train, bin_test)
assert "mean radius" in trainer
def test_len():
"""Assert that the length of a trainer is the length of the dataset."""
trainer = DirectClassifier(models="LR", random_state=1)
trainer.run(bin_train, bin_test)
assert len(trainer) == len(X_bin)
def test_getitem():
"""Assert that atom is subscriptable."""
trainer = DirectClassifier(models="LR", random_state=1)
trainer.run(bin_train, bin_test)
assert trainer["LR"] is trainer["lr"] is trainer.lr
assert trainer["mean radius"] is trainer.dataset["mean radius"]
assert isinstance(trainer[["mean radius", "mean texture"]], pd.DataFrame)
with pytest.raises(ValueError, match=r".*has no model or column.*"):
print(trainer["invalid"])
with pytest.raises(TypeError, match=r".*subscriptable with types.*"):
print(trainer[2.3])
# Test utility properties ========================================== >>
def test_branch_property():
"""Assert that the branch property returns the current branch."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
assert isinstance(atom.branch, Branch)
def test_models_property():
"""Assert that the models property returns the model names."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.run(["LR", "Tree"])
assert atom.models == ["LR", "Tree"]
def test_models_property_no_run():
"""Assert that the models property doesn't crash for unfitted trainers."""
trainer = DirectClassifier(["LR", "Tree"], random_state=1)
assert trainer.models == ["LR", "Tree"]
def test_metric_property():
"""Assert that the metric property returns the metric names."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.run("lr", metric="f1")
assert atom.metric == "f1"
def test_metric_property_no_run():
"""Assert that the metric property doesn't crash for unfitted trainers."""
trainer = DirectClassifier("lr", metric="r2", random_state=1)
assert trainer.metric == "r2"
def test_errors_property():
"""Assert that the errors property returns the model's errors."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.run(["Tree", "LGB"], n_calls=5, n_initial_points=(2, 6))
assert "LGB" in atom.errors
def test_winner_property():
"""Assert that the winner property returns the best model in the pipeline."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.run(["LR", "Tree", "LGB"], n_calls=0)
assert atom.winner is atom.lgb
def test_results_property():
"""Assert that the results property returns an overview of the results."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.run("LR")
assert atom.results.shape == (1, 4)
def test_results_property_dropna():
"""Assert that the results property doesn't return columns with NaNs."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.run("LR")
assert "mean_bootstrap" not in atom.results
def test_results_property_successive_halving():
"""Assert that the results works for successive halving runs."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.successive_halving(["LR", "Tree"])
assert atom.results.shape == (3, 4)
assert list(atom.results.index.get_level_values(0)) == [0.5, 0.5, 1.0]
def test_results_property_train_sizing():
"""Assert that the results works for train sizing runs."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.train_sizing("LR")
assert atom.results.shape == (5, 4)
assert list(atom.results.index.get_level_values(0)) == [0.2, 0.4, 0.6, 0.8, 1.0]
# Test prediction methods ========================================== >>
def test_predict():
"""Assert that the predict method works as intended."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
pytest.raises(NotFittedError, atom.predict, X_bin) # When not yet fitted
atom.run("LR")
assert isinstance(atom.predict(X_bin), np.ndarray)
def test_predict_proba():
"""Assert that the predict_proba method works as intended."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
pytest.raises(NotFittedError, atom.predict_proba, X_bin)
atom.run("LR")
assert isinstance(atom.predict_proba(X_bin), np.ndarray)
def test_predict_log_proba():
"""Assert that the predict_log_proba method works as intended."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
pytest.raises(NotFittedError, atom.predict_log_proba, X_bin)
atom.run("LR")
assert isinstance(atom.predict_log_proba(X_bin), np.ndarray)
def test_decision_function():
"""Assert that the decision_function method works as intended."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
pytest.raises(NotFittedError, atom.decision_function, X_bin)
atom.run("LR")
assert isinstance(atom.decision_function(X_bin), np.ndarray)
def test_score():
"""Assert that the score method works as intended."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
pytest.raises(NotFittedError, atom.score, X_bin, y_bin)
atom.run("LR")
assert isinstance(atom.score(X_bin, y_bin), float)
def test_score_sample_weights():
"""Assert that the score method works with sample weights."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.run("LR")
score = atom.score(X_bin, y_bin, sample_weight=list(range(len(y_bin))))
assert isinstance(score, float)
# Test utility methods ============================================= >>
def test_get_rows_is_None():
"""Assert that all indices are returned."""
atom = ATOMClassifier(X_idx, y_idx, index=True, random_state=1)
assert len(atom._get_rows(index=None, return_test=True)) < len(X_idx)
assert len(atom._get_rows(index=None, return_test=False)) == len(X_idx)
def test_get_rows_is_slice():
"""Assert that a slice of rows is returned."""
atom = ATOMClassifier(X_idx, y_idx, index=True, random_state=1)
assert len(atom._get_rows(index=slice(20, 100, 2))) == 40
def test_get_rows_by_name():
"""Assert that rows can be retrieved by their index label."""
atom = ATOMClassifier(X_idx, y_idx, index=True, random_state=1)
with pytest.raises(ValueError, match=r".*not found in the dataset.*"):
atom._get_rows(index="index")
assert atom._get_rows(index="index_34") == ["index_34"]
def test_get_rows_by_position():
"""Assert that rows can be retrieved by their index position."""
atom = ATOMClassifier(X_idx, y_idx, index=True, random_state=1)
with pytest.raises(ValueError, match=r".*out of range.*"):
atom._get_rows(index=1000)
assert atom._get_rows(index=100) == [atom.X.index[100]]
def test_get_rows_none_selected():
"""Assert that an error is raised when no rows are selected."""
atom = ATOMClassifier(X_idx, y_idx, index=True, random_state=1)
with pytest.raises(ValueError, match=r".*has to be selected.*"):
atom._get_rows(index=slice(1000, 2000))
def test_get_columns_is_None():
"""Assert that all columns are returned."""
atom = ATOMClassifier(X10_str, y10, random_state=1)
assert len(atom._get_columns(columns=None)) == 4
assert len(atom._get_columns(columns=None, only_numerical=True)) == 3
assert len(atom._get_columns(columns=None, include_target=False)) == 3
def test_get_columns_slice():
"""Assert that a slice of columns is returned."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
assert len(atom._get_columns(columns=slice(2, 6))) == 4
def test_get_columns_by_index():
"""Assert that columns can be retrieved by index."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
with pytest.raises(ValueError, match=r".*length of columns is.*"):
atom._get_columns(columns=40)
assert atom._get_columns(columns=0) == ["mean radius"]
def test_get_columns_by_name():
"""Assert that columns can be retrieved by name."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
with pytest.raises(ValueError, match=r".*not found in the dataset.*"):
atom._get_columns(columns="invalid")
assert atom._get_columns(columns="mean radius") == ["mean radius"]
def test_get_columns_by_type():
"""Assert that columns can be retrieved by type."""
atom = ATOMClassifier(X10_str, y10, random_state=1)
assert atom._get_columns(columns="!number") == ["Feature 3"]
assert atom._get_columns(columns="number") == ["Feature 1", "Feature 2", "target"]
def test_get_columns_exclude():
"""Assert that columns can be excluded using `!`."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
with pytest.raises(ValueError, match=r".*not found in the dataset.*"):
atom._get_columns(columns="!invalid")
assert len(atom._get_columns(columns="!mean radius")) == 30
assert len(atom._get_columns(columns=["!mean radius", "!mean texture"])) == 29
def test_get_columns_none_selected():
"""Assert that an error is raised when no columns are selected."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
with pytest.raises(ValueError, match=r".*At least one.*"):
atom._get_columns(columns="datetime")
def test_get_columns_include_or_exclude():
"""Assert that an error is raised when cols are included and excluded."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
with pytest.raises(ValueError, match=r".*either include or exclude columns.*"):
atom._get_columns(columns=["mean radius", "!mean texture"])
def test_get_columns_return_inc_exc():
"""Assert that included and excluded columns can be returned."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
assert isinstance(atom._get_columns(columns="number", return_inc_exc=True), tuple)
def test_get_columns_remove_duplicates():
"""Assert that duplicate columns are ignored."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
assert atom._get_columns(columns=[0, 1, 0]) == ["mean radius", "mean texture"]
def test_get_model_name_winner():
"""Assert that the winner is returned when used as name."""
atom = ATOMClassifier(X_bin, y_bin, random_state=1)
atom.run(["LR", "LGB"])
assert atom._get_model_name("winner") == ["LGB"]
def test_get_model_name_exact_name():
"""Assert that a single model is returned if the name matches exactly."""
atom | |
<gh_stars>0
# -*- coding: utf-8 -*-
"""
==================================================================================
"""
import copy
from itertools import takewhile
from rdflib import (
BNode,
Graph,
Literal,
RDF,
URIRef,
Variable,
)
from rdflib.util import first
from rdflib.namespace import split_uri
from FuXi.Horn.PositiveConditions import (
And,
BuildUnitermFromTuple,
Condition,
Or,
QNameManager,
SetOperator,
Uniterm,
)
from FuXi.Rete.BetaNode import project
from FuXi.Rete.Magic import AdornedUniTerm
from FuXi.Rete.Proof import ImmutableDict
from FuXi.Rete.RuleStore import N3Builtin
from FuXi.Rete.SidewaysInformationPassing import (
GetOp,
GetVariables,
iterCondition,
)
from FuXi.Rete.Util import selective_memoize
try:
from functools import reduce
except ImportError:
pass
def normalizeBindingsAndQuery(vars, bindings, conjunct):
"""
Takes a query in the form of a list of variables to bind to
an a priori set of bindings and a conjunct of literals and applies the bindings
returning:
- The remaining variables that were not substituted
- The (possibly grounded) conjunct of literals
- The bindings minus mappings involving substituted variables
"""
_vars = set(vars)
bindingDomain = set(bindings.keys())
appliedBindings = False
if bindings:
#Apply a priori substitutions
for lit in conjunct:
substitutedVars = bindingDomain.intersection(lit.toRDFTuple())
lit.ground(bindings)
if substitutedVars:
appliedBindings = True
_vars.difference_update(substitutedVars)
return list(_vars), conjunct, \
project(bindings, _vars, inverse=True) if appliedBindings else bindings
def tripleToTriplePattern(graph, term):
if isinstance(term, N3Builtin):
template = graph.templateMap[term.uri]
return "FILTER(%s)" % (template % (term.argument.n3(),
term.result.n3()))
else:
return "%s %s %s" % tuple([renderTerm(graph, term, predTerm=idx == 1)
for idx, term in enumerate(term.toRDFTuple())])
@selective_memoize([0])
def normalizeUri(rdfTerm, revNsMap):
"""
Takes an RDF Term and 'normalizes' it into a QName (using the registered prefix)
or (unlike compute_qname) the Notation 3 form for URIs: <...URI...>
"""
try:
namespace, name = split_uri(rdfTerm)
namespace = URIRef(namespace)
except:
if isinstance(rdfTerm, Variable):
return "?%s" % rdfTerm
else:
return "<%s>" % rdfTerm
prefix = revNsMap.get(namespace)
if prefix is None and isinstance(rdfTerm, Variable):
return "?%s" % rdfTerm
elif prefix is None:
return "<%s>" % rdfTerm
else:
qNameParts = compute_qname(rdfTerm, revNsMap)
return ':'.join([qNameParts[0], qNameParts[-1]])
@selective_memoize([0])
def compute_qname(uri, revNsMap):
namespace, name = split_uri(uri)
namespace = URIRef(namespace)
prefix = revNsMap.get(namespace)
if prefix is None:
prefix = "_%s" % len(revNsMap)
revNsMap[namespace] = prefix
return (prefix, namespace, name)
def renderTerm(graph, term, predTerm=False):
if term == RDF.type and predTerm:
return ' a '
elif isinstance(term, URIRef):
qname = normalizeUri(term, hasattr(graph, 'revNsMap') and graph.revNsMap or \
dict([(u, p) for p, u in graph.namespaces()]))
return qname[0] == '_' and u"<%s>" % term or qname
elif isinstance(term, Literal):
return term.n3()
else:
try:
return isinstance(term, BNode) and term.n3() or graph.qname(term)
except:
return term.n3()
def RDFTuplesToSPARQL(conjunct,
edb,
isGround=False,
vars=[],
symmAtomicInclusion=False):
"""
Takes a conjunction of Horn literals and returns the
corresponding SPARQL query
"""
queryType = isGround and "ASK" or "SELECT %s" % (
' '.join([v.n3() for v in vars]))
queryShell = len(conjunct) > 1 and "%s {\n%s\n}" or "%s { %s }"
if symmAtomicInclusion:
if vars:
var = vars.pop()
prefix = "%s a ?KIND" % var.n3()
else:
prefix = "%s a ?KIND" % first(
[first(iterCondition(lit)).arg[0].n3() for lit in conjunct])
conjunct = (i.formulae[0] if isinstance(i, And) else i for i in conjunct)
subquery = queryShell % (queryType, "%s\nFILTER(%s)" % (
prefix,
' ||\n'.join(['?KIND = %s' % edb.qname(GetOp(lit))
for lit in conjunct])))
else:
subquery = queryShell % (
queryType, ' .\n'.join(
['\t' + tripleToTriplePattern(edb, lit)
for lit in conjunct]))
return subquery
#@selective_memoize([0, 1], ['vars', 'symmAtomicInclusion'])
def RunQuery(subQueryJoin,
bindings,
factGraph,
vars=None,
debug=False,
symmAtomicInclusion=False):
initialNs = hasattr(factGraph, 'nsMap') and factGraph.nsMap or \
dict([(k, v) for k, v in factGraph.namespaces()])
if not subQueryJoin:
return False
if not vars:
vars = []
if bool(bindings):
#Apply a priori substitutions
openVars, conjGroundLiterals, bindings = \
normalizeBindingsAndQuery(set(vars),
bindings,
subQueryJoin)
vars = list(openVars)
else:
conjGroundLiterals = subQueryJoin
isGround = not vars
subquery = RDFTuplesToSPARQL(conjGroundLiterals,
factGraph,
isGround,
[v for v in vars],
symmAtomicInclusion)
rt = factGraph.query(subquery,
initNs=initialNs)
# DEBUG=debug)
projectedBindings = vars and project(bindings, vars) or bindings
if isGround:
if debug:
print("%s%s-> %s" % (
subquery,
projectedBindings and
" %s apriori binding(s)" % len(projectedBindings) or '',
rt.askAnswer[0]))
return subquery, rt.askAnswer[0]
else:
rt = len(vars) > 1 and (dict([(vars[idx], i)
for idx, i in enumerate(v)])
for v in rt) \
or (dict([(vars[0], v)]) for v in rt)
if debug:
print("%s%s-> %s" % (
subquery,
projectedBindings and
" %s apriori binding(s)" % len(projectedBindings) or '',
rt and '[]')) # .. %s answers .. ]'%len(rt) or '[]')
return subquery, rt
def EDBQueryFromBodyIterator(factGraph, remainingBodyList, derivedPreds, hybridPredicates=None):
hybridPredicates = hybridPredicates if hybridPredicates is not None else []
def sparqlResolvable(literal):
predTerm = GetOp(literal)
if not isinstance(literal,
AdornedUniTerm) and isinstance(literal,
Uniterm):
return not literal.naf and (
predTerm not in derivedPreds or
(predTerm in hybridPredicates and
not predTerm.find('_derived') + 1))
else:
return isinstance(literal, N3Builtin) and \
literal.uri in factGraph.templateMap
def sparqlResolvableNoTemplates(literal):
predTerm = GetOp(literal)
if isinstance(literal, Uniterm):
return not literal.naf and (
predTerm not in derivedPreds or
(predTerm in hybridPredicates and
not predTerm.find('_derived') + 1))
else:
return False
return list(
takewhile(
hasattr(factGraph, 'templateMap') and sparqlResolvable or \
sparqlResolvableNoTemplates,
remainingBodyList))
class ConjunctiveQueryMemoize(object):
"""
Ideas from MemoizeMutable class of Recipe 52201 by <NAME> and
from memoized decorator of http://wiki.python.org/moin/PythonDecoratorLibrary
A memoization decorator of a function which take (as argument): a
graph and a conjunctive query and returns a generator over results of evaluating
the conjunctive query against the graph
"""
def __init__(self, cache=None):
self._cache = cache if cache is not None else {}
def produceAnswersAndCache(self, answers, key, cache=None):
cache = cache if cache is not None else []
for item in answers:
self._cache.setdefault(key, cache).append(item)
yield item
def __call__(self, func):
def innerHandler(queryExecAction, conjQuery):
key = (conjQuery.factGraph, conjQuery)
try:
rt = self._cache.get(key)
if rt is not None:
for item in rt:
yield item
else:
for item in self.produceAnswersAndCache(
func(queryExecAction,
conjQuery),
key):
yield item
except TypeError:
import pickle
try:
dump = pickle.dumps(key)
except pickle.PicklingError:
for item in func(*args, **kwds):
yield item
else:
if dump in self._cache:
for item in self._cache[dump]:
yield item
else:
for item in self.produceAnswersAndCache(
func(queryExecAction,
conjQuery),
dump):
yield item
return innerHandler
class EDBQuery(QNameManager, SetOperator, Condition):
"""
A list of frames (comprised of EDB predicates) meant for evaluation over a large EDB
lst is a conjunct of terms
factGraph is the RDF graph to evaluate queries over
returnVars is the return variables (None, the default, will cause the list
to be built via instrospection on lst)
bindings is a solution mapping to apply to the terms in lst
"""
def __init__(self,
lst,
factGraph,
returnVars=None,
bindings={},
varMap={},
symIncAxMap={},
symmAtomicInclusion=False):
self.factGraph = factGraph
self.varMap = varMap
self.symmAtomicInclusion = symmAtomicInclusion
self.formulae = lst
self.naf = False
#apply an apriori solutions
if bool(bindings):
#Apply a priori substitutions
openVars, termList, bindings = \
normalizeBindingsAndQuery(set(returnVars)
if returnVars else [v for v in self.getOpenVars()],
bindings,
lst)
self.returnVars = list(openVars)
else:
if returnVars is None:
#return vars not specified, but meant to be determined by
#constructor
self.returnVars = self.getOpenVars()
else:
#Note if returnVars is an empty list, this
self.returnVars = (returnVars if isinstance(returnVars, list)
else list(returnVars)) if returnVars else []
termList = lst
super(EDBQuery, self).__init__(termList)
self.bindings = bindings.normalize() \
if isinstance(bindings, ImmutableDict) else bindings
def copy(self):
"""
A shallow copy of an EDB query
"""
return EDBQuery([copy.deepcopy(t) for t in self.formulae],
self.factGraph,
self.returnVars,
self.bindings.copy(),
self.varMap.copy(),
symmAtomicInclusion=self.symmAtomicInclusion)
def renameVariables(self, varMap):
for item in self.formulae:
item.renameVariables(varMap)
def ground(self, mapping):
appliedVars = set()
for item in self.formulae:
if isinstance(item, Or):
for _item in item.formulae:
appliedVars.update(item.ground(mapping))
else:
appliedVars.update(item.ground(mapping))
self.bindings = project(self.bindings, appliedVars, True)
self.returnVars = self.getOpenVars()
return appliedVars
def accumulateBindings(self, bindings):
"""
"""
self.bindings.update(project(bindings, self.getOpenVars(), inverse=True))
def getOpenVars(self):
return list(
set(
reduce(
lambda x, y: x + y,
[list(GetVariables(arg, secondOrder=True)) for arg in self.formulae])))
def applyMGU(self, substitutions):
for term in self.formulae:
term.renameVariables(substitutions)
self.bindings = dict([(substitutions.get(k, k), v)
for k, v in list(self.bindings.items())])
def evaluate(self, debug=False, symmAtomicInclusion=False):
return RunQuery(self.formulae,
self.bindings,
self.factGraph,
vars=self.returnVars,
debug=debug,
symmAtomicInclusion=symmAtomicInclusion)
def asSPARQL(self):
# initialNs = hasattr(self.factGraph, 'nsMap') and self.factGraph.nsMap or \
# dict([(k, v) for k, v in self.factGraph.namespaces()])
return RDFTuplesToSPARQL(self.formulae,
self.factGraph,
not self.returnVars,
self.returnVars,
self.symmAtomicInclusion)
def __len__(self):
return len(self.formulae)
def __eq__(self, other):
return hash(self) == hash(other)
def __hash__(self):
"""
>>> g = Graph()
>>> lit1 = (Variable('X'), RDF.type, Variable('Y'))
>>> q1 = EDBQuery([BuildUnitermFromTuple(lit1)], g)
>>> q2 = EDBQuery([BuildUnitermFromTuple(lit1)], g)
>>> q1 == q2
True
>>> d = {q1:True}
>>> q2 in d
True
"""
from FuXi.Rete.Network import HashablePatternList
conj = HashablePatternList(
[term.toRDFTuple() for term in self.formulae],
skipBNodes=True)
return hash(conj)
def | |
<gh_stars>0
"""Support for real-time plotting of LinearActuator state."""
# Standard imports
import asyncio
import datetime
import logging
# External imports
from bokeh import layouts
from bokeh.models import (
ColumnDataSource, DatetimeTickFormatter, annotations, arrow_heads, widgets
)
from bokeh.plotting import figure
# Local package imports
from lhrhost.dashboard import DocumentLayout, DocumentModel
from lhrhost.dashboard.widgets import Button
from lhrhost.protocol.linear_actuator import Receiver as LinearActuatorReceiver
# External imports
import numpy as np
# Logging
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
class ClearButton(Button):
"""Plotter clear button, synchronized across documents."""
def __init__(self, *plotters):
"""Initialize member variables."""
super().__init__(label='Clear plot')
self.plotters = plotters
# Implement Button
def on_click(self):
"""Handle a button click event."""
for plotter in self.plotters:
plotter.clear()
class PositionPlot(DocumentLayout):
"""Plot of received linear actuator position data."""
def __init__(
self, title, rollover=750, width=960, height=320, line_width=2
):
"""Initialize member variables."""
super().__init__()
self.rollover = rollover
self._init_position_plot(title, width, height, line_width)
self.plotting = True
self.last_position_time = None
self.last_position = None
self.last_region = None
def _init_position_plot(self, title, plot_width, plot_height, line_width):
"""Initialize member variables for position plotting."""
self.position_source = ColumnDataSource({
'time': [],
'position': []
})
self.position_fig = figure(
title=title, plot_width=plot_width, plot_height=plot_height
)
self.position_fig.xaxis.axis_label = 'Time from Start (s)'
self.position_fig.xaxis.formatter = DatetimeTickFormatter()
self.position_fig.yaxis.axis_label = 'Position'
self.position_line = self.position_fig.line(
x='time', y='position', source=self.position_source, line_width=line_width
)
def add_position(self, position):
"""Add a point to the plot."""
if not self.plotting:
return
self.last_position_time = datetime.datetime.now()
self.last_position = position
self.position_source.stream({
'time': [self.last_position_time],
'position': [self.last_position]
}, rollover=self.rollover)
def clear(self):
"""Clear plot data."""
for arrow in self.position_fig.select(name='arrow'):
self.position_fig.renderers.remove(arrow)
for region in self.position_fig.select(name='region'):
self.position_fig.renderers.remove(region)
self.position_source.data = {'time': [], 'position': []}
if self.last_region is not None:
self.start_limits_region(
self.last_region.bottom, self.last_region.top,
start_time=self.last_region.left,
fill_color=self.last_region.fill_color,
fill_alpha=self.last_region.fill_alpha
)
def start_plotting(self):
"""Start plotting data."""
self.plotting = True
def stop_plotting(self):
"""Stop plotting data."""
self.plotting = False
def add_arrow(self, next_position, slope=1, line_width=2):
"""Add an arrow from the last position point to the next position point."""
if not self.plotting:
return
if self.last_position_time is None or self.last_position is None:
logger.warn('Could not add position arrow from unknown last position!')
return
self.position_fig.add_layout(annotations.Arrow(
x_start=self.last_position_time,
y_start=self.last_position,
x_end=self.last_position_time + datetime.timedelta(
milliseconds=abs(next_position - self.last_position) / slope
),
y_end=next_position,
end=arrow_heads.VeeHead(size=10),
line_width=line_width,
name='arrow'
))
def start_limits_region(
self, low, high, start_time=None, fill_color='gray', fill_alpha=0.25
):
"""Start a position region."""
if not self.plotting:
return
if start_time is None:
start_time = datetime.datetime.now()
self.last_region = annotations.BoxAnnotation(
left=start_time, bottom=low, top=high,
fill_color=fill_color, fill_alpha=fill_alpha,
name='region'
)
self.position_fig.add_layout(self.last_region)
def end_limits_region(self, end_time=None):
"""End position region."""
if not self.plotting:
return
if self.last_region is None:
return
if end_time is None:
end_time = datetime.datetime.now()
self.last_region.right = end_time
# Implement DocumentLayout
@property
def layout(self):
"""Return a document layout element."""
return self.position_fig
class PositionPlotter(DocumentModel, LinearActuatorReceiver):
"""Linear actuator position plotter, synchronized across documents."""
def __init__(self, *args, **kwargs):
"""Initialize member variables."""
super().__init__(PositionPlot, *args, **kwargs)
self.last_position_time = None
self.last_position = None
self.position_limit_low = None
self.position_limit_high = None
def clear(self):
"""Clear plot data."""
self.update_docs(lambda plot: plot.clear())
def start_plotting(self):
"""Start plotting data."""
self.update_docs(lambda plot: plot.start_plotting())
def stop_plotting(self):
"""Stop plotting data."""
self.update_docs(lambda plot: plot.stop_plotting())
def add_arrow(self, *args, **kwargs):
"""Add an arrow from the most recent position point to the next position point."""
self.update_docs(lambda plot: plot.add_arrow(*args, **kwargs))
def update_limits_region(self):
"""Update the limits region."""
if self.position_limit_low is None or self.position_limit_high is None:
return
self.update_docs(lambda plot: plot.end_limits_region())
self.update_docs(lambda plot: plot.start_limits_region(
self.position_limit_low, self.position_limit_high
))
# Implement LinearActuatorReceiver
async def on_linear_actuator_position(self, position: int) -> None:
"""Receive and handle a LinearActuator/Position response."""
self.last_position_time = datetime.datetime.now()
self.last_position = position
self.update_docs(lambda plot: plot.add_position(position))
async def on_linear_actuator_feedback_controller_limits_position_low(
self, position: int
) -> None:
"""Receive and handle a LA/FC/Limits/Position/Low response."""
self.position_limit_low = position
self.update_limits_region()
async def on_linear_actuator_feedback_controller_limits_position_high(
self, position: int
) -> None:
"""Receive and handle a LA/FC/Limits/Position/High response."""
self.position_limit_high = position
self.update_limits_region()
class DutyPlot(DocumentLayout):
"""Plot of received linear actuator motor duty cycle data."""
def __init__(
self, title, rollover=750, width=960, height=320, line_width=2
):
"""Initialize member variables."""
super().__init__()
self.rollover = rollover
self._init_duty_plot(title, width, height, line_width)
self.plotting = True
self.duty_state_region_start_time = None
self.last_limits_regions = {
'forwards': None,
'backwards': None
}
def _init_duty_plot(self, title, plot_width, plot_height, line_width):
"""Initialize member variables for motor duty cycle plotting."""
self.duty_source = ColumnDataSource({
'time': [],
'duty': []
})
self.duty_fig = figure(
title=title, plot_width=plot_width, plot_height=plot_height,
y_range=[-255, 255]
)
self.duty_fig.xaxis.axis_label = 'Time from Start (s)'
self.duty_fig.xaxis.formatter = DatetimeTickFormatter()
self.duty_fig.yaxis.axis_label = 'Signed Motor Duty Cycle'
self.duty_line = self.duty_fig.line(
x='time', y='duty', source=self.duty_source, line_width=line_width
)
def add_duty(self, duty):
"""Add a point to the plot."""
if not self.plotting:
return
self.duty_source.stream({
'time': [datetime.datetime.now()],
'duty': [duty]
}, rollover=self.rollover)
def clear(self):
"""Clear plot data."""
for region in self.duty_fig.select(name='region'):
self.duty_fig.renderers.remove(region)
self.duty_source.data = {'time': [], 'duty': []}
for (direction, region) in self.last_limits_regions.items():
if region is not None:
self.start_limits_region(
direction, region.bottom, region.top, start_time=region.left,
fill_color=region.fill_color, fill_alpha=region.fill_alpha
)
def start_plotting(self):
"""Start plotting data."""
self.plotting = True
def stop_plotting(self):
"""Stop plotting data."""
self.plotting = False
def add_state_region(self, fill_color, start_time=None, end_time=None, fill_alpha=0.25):
"""Add a shaded region between the two duty cycle times."""
if not self.plotting:
return
if start_time is None:
start_time = self.duty_state_region_start_time
if start_time is None:
logger.warn('Could not add duty region from unspecified start time!')
return
if end_time is None:
end_time = datetime.datetime.now()
self.duty_fig.add_layout(annotations.BoxAnnotation(
left=start_time, right=end_time,
fill_alpha=fill_alpha, fill_color=fill_color,
name='region'
))
def start_state_region(self):
"""Start a duty cycle region."""
if not self.plotting:
return
self.duty_state_region_start_time = datetime.datetime.now()
def start_limits_region(
self, direction, low, high, start_time=None,
fill_color='gray', fill_alpha=0.25
):
"""Start a duty cycle lmits region."""
if not self.plotting:
return
if start_time is None:
start_time = datetime.datetime.now()
self.last_limits_regions[direction] = annotations.BoxAnnotation(
left=start_time, bottom=low, top=high,
fill_color=fill_color, fill_alpha=fill_alpha,
name='region'
)
self.duty_fig.add_layout(self.last_limits_regions[direction])
def end_limits_region(self, direction, end_time=None):
"""End duty cycle limits region."""
if not self.plotting:
return
if self.last_limits_regions[direction] is None:
return
if end_time is None:
end_time = datetime.datetime.now()
self.last_limits_regions[direction].right = end_time
# Implement DocumentLayout
@property
def layout(self):
"""Return a document layout element."""
return self.duty_fig
class DutyPlotter(DocumentModel, LinearActuatorReceiver):
"""Linear actuator motor duty cycle plotter, synchronized across documents."""
def __init__(self, *args, **kwargs):
"""Initialize member variables."""
super().__init__(DutyPlot, *args, **kwargs)
self.last_duty_time = None
self.last_duty = None
self.motor_limit_low = {
'forwards': None,
'backwards': None
}
self.motor_limit_high = {
'forwards': None,
'backwards': None
}
def clear(self):
"""Clear plot data."""
self.update_docs(lambda plot: plot.clear())
def start_plotting(self):
"""Start plotting data."""
self.update_docs(lambda plot: plot.start_plotting())
def stop_plotting(self):
"""Stop plotting data."""
self.update_docs(lambda plot: plot.stop_plotting())
def add_state_region(self, *args, **kwargs):
"""Add an arrow from the last position point to the next position point."""
self.update_docs(lambda plot: plot.add_state_region(*args, **kwargs))
def start_state_region(self):
"""Add an arrow from the last position point to the next position point."""
self.update_docs(lambda plot: plot.start_state_region())
def update_limits_region(self, direction):
"""Update the limits region."""
if (
self.motor_limit_low[direction] is None or
self.motor_limit_high[direction] is None
):
return
self.update_docs(lambda plot: plot.end_limits_region(direction))
self.update_docs(lambda plot: plot.start_limits_region(
direction,
self.motor_limit_low[direction],
self.motor_limit_high[direction]
))
# Implement LinearActuatorReceiver
async def on_linear_actuator_motor(self, duty: int) -> None:
"""Receive and handle a LinearActuator/Motor response."""
self.last_duty_time = datetime.datetime.now()
self.last_duty = duty
self.update_docs(lambda plot: plot.add_duty(duty))
async def on_linear_actuator_feedback_controller_limits_motor_forwards_low(
self, duty: int
) -> None:
"""Receive and handle a LA/FC/Limits/Motor/Forwards/Low response."""
self.motor_limit_low['forwards'] = duty
self.update_limits_region('forwards')
async def on_linear_actuator_feedback_controller_limits_motor_forwards_high(
self, duty: int
) -> None:
"""Receive and handle a LA/FC/Limits/Motor/Forwards/High response."""
self.motor_limit_high['forwards'] = duty
self.update_limits_region('forwards')
async def on_linear_actuator_feedback_controller_limits_motor_backwards_low(
self, duty: int
) -> None:
"""Receive and handle a LA/FC/Limits/Motor/Backwards/Low response."""
self.motor_limit_low['backwards'] = duty
self.update_limits_region('backwards')
async def on_linear_actuator_feedback_controller_limits_motor_backwards_high(
self, duty: int
) -> None:
"""Receive and handle a LA/FC/Limits/Motor/Backwards/High response."""
self.motor_limit_high['backwards'] = duty
self.update_limits_region('backwards')
class ToggleStatePlottingButton(Button):
"""Linear actuator plotter functionality toggle, synchronized across documents."""
def __init__(self, linear_actuator_protocol, plotters, plotting_interval=20):
"""Initialize member variables."""
super().__init__(label='Start Plotting')
self.protocol = linear_actuator_protocol
self.plotters = plotters
self.plotting = False
self.interval = plotting_interval
async def toggle_plotting(self):
"""Toggle plotting."""
if self.plotting:
await self.stop_plotting()
else:
await self.start_plotting()
async def start_plotting(self):
"""Start plotting if it hasn't already started."""
if self.plotting:
return
await self.protocol.position.notify.change_only.request(0)
await self.protocol.position.notify.interval.request(self.interval)
await self.protocol.motor.notify.change_only.request(0)
await self.protocol.motor.notify.interval.request(self.interval)
await self.protocol.position.notify.request(2)
await self.protocol.motor.notify.request(2)
self.plotting = True
for plotter in self.plotters:
plotter.start_plotting()
self.enable_button('Stop plotting')
async def stop_plotting(self):
"""Stop plotting if it hasn't already stopped."""
if not self.plotting:
return
await self.protocol.position.notify.request(0)
await self.protocol.motor.notify.request(0)
self.plotting = False
for plotter in self.plotters:
plotter.stop_plotting()
self.enable_button('Start plotting')
# Implement Button
def on_click(self):
"""Handle a button click event."""
label = 'Stopping plotting...' if self.plotting else 'Stopping plotting'
self.disable_button(label)
asyncio.get_event_loop().create_task(self.toggle_plotting())
class LinearActuatorPlot(DocumentLayout):
"""Plot of received linear actuator state data."""
def __init__(
self, position_plotter, duty_plotter, clear_button, toggle_button,
title, nest_level
):
"""Initialize member variables."""
super().__init__()
self.position_plot = position_plotter.make_document_layout()
self.duty_plot = duty_plotter.make_document_layout()
self.duty_plot.duty_fig.x_range = self.position_plot.position_fig.x_range
self.clear_button = clear_button.make_document_layout()
self.toggle_button = toggle_button.make_document_layout()
heading_level = 1 + nest_level
column = []
if title:
column += [
layouts.widgetbox([
widgets.Div(text='<h{}>{}</h{}>'.format(
heading_level, title, heading_level
))
])
]
column += [
layouts.row([
| |
from platform import python_version
from itertools import zip_longest
import itertools
import math
from typing import final
print('Hello World!')
num = int(input())
a, b = 1, 2 # a = 1, b = 2
def numbers():
return 1, 2, 3, 4, 5
a, b, c, d, e = numbers()
print(3 + 2) # 5 +=
print(3 - 2) # 1 -=
print(3 * 2) # 6 *=
print(3 / 2) # 1.5 /=
print(2 / 2) # 1.0
print(2**3) # 8 **=
print(7 // 2)# 3 //=
print(7 % 2) # 1 %=
print('uma')
print("duas")
print('''uma
duas
tres linhas''')
print(f'interpolado {num}')
"""
docstring
"""
print('spam' + 'eggs') # spameggs
print('2' + '3') # 23
# print('2' + 2) # error
print('2' * 4) # 2222
print(1 == 1) # True
print(1 != 1) # False
print(2 >= 1) # True
print(2 <= 1) # False
print(2 > 1) # True
print(2 < 1) # False
print(1 > 0 and 2 < 1) # False
print(1 > 0 or 2 < 1) # True
print(not (1 > 0 and 2 < 1)) # True
print(not None) # true # implicit values
print(not []) # true # but [] != None
str = []
# print(len(str) == 0) # not python way
print(not str) # empty, None or zero
# also as
print(str is None) # anything but None
print(str is not None) # only None
if num > 10:
print(f'{num} é maior que 10')
elif num == 10:
print(f'{num} é igual a 10')
else:
print(f'{num} é menor que 10')
while num > 0:
print(num)
num -= 1;
words = ['hello', 'world', '!']
str = 'Hello World!'
m = [
[1, 2, 3],
[4, 5, 6]
]
print(words[2]) # !
print(str[6]) # W
print(words + [1, 2, 3]) # ['hello', 'world', '!', 1, 2, 3]
print([1, 2, 3] + [4, 5, 6]) # [1, 2, 3, 4, 5, 6]
print([1, 2, 3].extend([4, 5, 6]))
ord('a') # unicode value
print('hello' in words) # True
print(4 not in [1, 2, 3]) # True
for w in words:
print(f'{w}!')
print(list(range(10))) # [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
print(list(range(3, 8))) # [3, 4, 5, 6, 7]
print(list(range(2, 10, 3))) # [2, 5, 8]
words = [1, 'a', 2, 'a', 3, 'a', 4, 'a', 5, 'a']
for i in range(words.count('a')):
words.remove('a')
print(words)
words = [1, 'a', 2, 'a', 3, 'a', 4, 'a', 5, 'a']
squares = [0, 1, 4, 9, 16, 25, 36, 49, 64, 81]
print(squares[2:6]) # [4, 9, 16, 25]
print(squares[3:8]) # [9, 16, 25, 36, 49]
print(squares[0:1]) # [0]
print(squares[:7]) # [0, 1, 4, 9, 16, 25, 36]
print(squares[7:]) # [49, 64, 81]
print(squares[::2]) # [0, 4, 16, 36, 64]
print(squares[2:8:3])#[4, 25]
print(squares[1:-1])# [1, 4, 9, 16, 25, 36, 49, 64]
print(squares[-1]) # 81
print(squares[::-1])# [81, 64, 49, 36, 25, 16, 9, 4, 1, 0]
# same as list.reverse()
# same as reversed(list)
len(squares) # 10
squares.append(100) # [0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100]
squares.insert(0,'fon') # ['fon', 0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100]
squares.index(9) # 4
[3, 1, 2].sort() # [1, 2, 3]
max([1, 2, 3, 4, 5]) # 5
min([1, 2, 3, 4, 5]) # 1
words.count('a') # 5
squares.remove('fon') # [0, 1, 4, 9, 16, 25, 36, 49, 64, 81, 100]
[1, 2, 3].reverse() # [3, 2, 1]
nums = [4, 5, 6]
msg = "Numbers: {0} {1} {2}". format(nums[0], nums[1], nums[2])
print(msg) # Numbers: 4 5 6
a = "{x}, {y}".format(x=5, y=12)
print(a) # 5, 12
print(", ".join(["spam", "eggs", "ham"])) # prints "spam, eggs, ham"
print("Hello ME".replace("ME", "world")) # prints "Hello world"
print("This is a sentence.".startswith("This")) # prints "True"
print("This is a sentence.".endswith("sentence.")) # prints "True"
print("This is a sentence.".upper()) # prints "THIS IS A SENTENCE."
print("AN ALL CAPS SENTENCE".lower()) # prints "an all caps sentence"
print("spam, eggs, ham".split(", ")) # prints "['spam', 'eggs', 'ham']"
def main(n):
return n + 1
print(main(num))
# file != script
def main():
print('Hello World!')
print('im not a file, im a script!')
if __name__ == '__main__':
main() # prevents from runing scripts when importing
_ = 'dont use me!'
import time
import traceback
# while True:
# try:
# print('Vrummmmmmm')
# time.sleep(0.1)
# raise Exception('que')
# except Exception: # != 'exception', excludes keyboardinterrupt (^C)
# print('unheeeee')
try:
raise Exception('unheeee')
except Exception as e: # ValueError => alternative but more precise and correct way
print(e)
traceback.print_exc() # keeps track of the trouble
print(traceback.format_exc)
print('ALWAYS MAKE LISTS AS SETs OR HASH TABLES')
def fun(str, arr=None):
# prevents reassigning the array im memory # dont cache previous values # redfines itself
if isinstance(arr, type(None)):
arr = []
for s in str:
arr.append(s)
return arr
# better way to assign arrays
# python -i file.py
# import pdb
# pdb.set_trace()
# debug code
# $ pip install virtual env
# $ virtualenv venv
# $ source venv/bin/activate
# # also as
# $ python -m venv my_venv
# $ python file.py // python -m file
fruits = [
{'name': 'apple', 'price': 20},
{'name': 'avocado', 'price': 10},
{'name': 'orange', 'price': 5}
] # Dictionary
print(
# list comprehension
[fruit['name'] for fruit in fruits if fruit['name'][0] == 'a']
) # ['apple', 'avocado']
print(
{fruit['name']: fruit['price'] for fruit in fruits}
) # {'apple': 20, 'avocado': 10, 'orange': 5}
add: lambda x,y: x + y # lambda anonymous function
# more_than_one_nums = filter(lambda x: x > 1, [1, 2, 3]) # inline functions
# print(more_than_one_nums)
condition = True
x = 1 if condition else 0
# ternary
num1 = 10_000_000_000
num2 = 100_000_000 # grouping numbers w/o messing code
total = num1 + num2
print(f'{total:,}') # 10,100,000,000
# context manager
# with open('file', 'r') as f:
# content = f.raed()
# print(len(content.split(' ')))
# better way to manage resources
names = ['Otávio', 'Pedro', 'Marina', 'Alex']
apelidos = ['Ota', 'Pepe', 'Nina', 'Alek']
for index, name in enumerate(names, start=1):
print(index, name)
# enumarete function, same as JavaScript forEach
for name, apelido in zip(names, apelidos):
print(f'O apelido do {name} é {apelido}')
# packinhg and unpacking
# normal
items = (1, 2)
print(items) # (1, 2)
# umpacking
# a, b = (1, 2)
a, _ = (1, 2) # _ as variable is ignored by the code
print(a) # 1
# print(b) # 2
# a, b, c = (1, 2) # error, not enough values to unpack
# a, b = (1, 2, 3) # error, not enough variables to assign
a, b, *c, d = (1, 2, 3, 4, 5, 6, 7)
print(a) # 1
print(b) # 2
print(c) # [3, 4, 5, 6] # all unassigned values # can be set as _ too
print(d) # 7 # last value
class Person(): # Object
pass
person = Person()
# person.first = 'Otávio'
# person.last = 'Pedro'
# first_key = 'first'
# first_val = 'Pedro'
# setattr(person, first_key, first_val)
# first = getattr(person, first_key)
# uses functions to set/get keys and values from a object
for fruit in fruits:
for key, value in fruit.items():
setattr(person, key, value)
print(person.name, person.price)
# for key in person.keys():
# print(getattr(person, key))
from getpass import getpass
username = input('Username: ')
password = getpass('Password: ')
print('Logging In...')
#set a get password function, hiding the input
# help(module_name)
from datetime import datetime
dir(datetime)
datetime.today
datetime.today()
now = datetime.now()
print(now.day, now.month, now.year, now.hour, now.minute, now.second)
# contdown
now = datetime.now()
end = datetime(2020, 12, 1)
print((end-now).microseconds) # aaa
# elapsed time
start = datetime.now()
for i in range(100_000_000): # to pass time
pass
end = datetime.now()
print(type(end-start)) # <class 'datetime.timedelta'>
elapsed = end-start
print(elapsed)
print(elapsed.seconds, elapsed.microseconds)
print(f"\033[91mError: This is \033[96mcyan. \033[92mContinue?")
# Colored Text
print(round(8.5)) #down to 8
print(round(9.5)) #up to 10
# import webbrowser
# webbrowser.open('github.com/pedromchd')
msg = 'message ' 'other message'
print(msg) # message other message
print('message '
'other message')
# message other message
print('''message \
other message
another message''')
# message other message
# another message
if 'Git' in 'GitHub': print('GitHub'.index('Git')) # may cause errors
'GitHub'.find('Git') # sames as .index()
fruit = {'Orange': 15}
print(id(fruit)) # gets unique id from objetct
# use as an alias
dict1 = {'a': 1, 'b': 2}
dict2 = dict1
dict2['c'] = 3
print(id(dict1) == id(dict2)) # true
print(dict1) # {'a': 1, 'b': 2, 'c': 3}
print(dict2) # {'a': 1, 'b': 2, 'c': 3}
# doesnt work in primary values
# actually copy
dict1 = {'a': 1, 'b': 2}
dict2 = dict1[:]
# same as dict1.copy()
dict2['c'] = 3
print(id(dict1) == id(dict2)) # false
print(dict1) # {'a': 1, 'b': 2}
print(dict2) # {'a': 1, 'b': 2, 'c': 3}
# actually replaces content
dict1[:] = dict2
print(dict1) # {'a': 1, 'b': 2, 'c': 3}
print(dict2) # {'a': 1, 'b': 2, 'c': 3}
from copy import deepcopy
tech = ['C++', 'Go', 'Python', ['html', 'css', 'pics']]
learning = tech.copy()
print(id(tech) == id(learning)) # false
print(id(tech[-1]) == id(learning[-1])) #true with shallow copy
learning = deepcopy(tech)
print(id(tech) == id(learning)) # false
print(id(tech[-1]) == id(learning[-1])) # false
age = 16
print('age' in locals(), 'age' in globals())
del age
print('age' in locals(), 'age' in globals())
age = None
print('age' in locals(), 'age' in globals())
for language in ['C', 'C++', 'Java', 'C#', 'Python', 'Go', 'Rust']:
print(language, end=" ") # end=' ' changes ending of line output
print("") #to go to next line for the next output
print(fruits, names, tech)
# use else in loops
for name in names:
| |
else:
E.append(1 - AC)
# Now deal with the peripheral faces
for tet in tri.tetrahedra():
for j in range(4):
E.append(0)
return E
# 6. Coboundary
# Suppose that e is a truncated edge. Let LF be the set of truncated
# faces to the left of e and let RF be the set of faces to the right. Then
# \delta e^* = \sum_{f \in LF} f^* - \sum_{f \in RF} f^*.
# Suppose that e is a peripheral edge. So there is a unique truncated
# face f meeting e. Note that f is to the left of e. There are
# also a pair of boundary faces meeting e: say f' _above_ e and f''
# _below_ e. Then
# \delta e^* = f^* + (f')^* - (f'')^*.
def coboundary(tri, angle):
"""
Given a triangulation "tri" (T), with oriented edges, and a
transverse taut angle structure "angle", returns the co-boundary
operator delta^1 \from C^1(T', ZZ) \to C^2(T', ZZ), as a matrix,
for the truncated triangulation T'. Note that, strictly speaking,
we don't need to use "angle" for this, but we use it to determine
orientation on faces for the Euler class, so we might as well use
it again here.
"""
# \delta^1 takes row vectors (functions on edges) and spits out
# row vectors (functions on faces). So, if c is a one-cochain
# then c \cdot \delta is a two-cochain.
delta = []
assert is_transverse_taut(tri, angle)
tet_vert_coorientations = is_transverse_taut(tri, angle, return_type = "tet_vert_coorientations")
face_coorientations = is_transverse_taut(tri, angle, return_type = "face_coorientations")
for edge in tri.edges():
# A row for every truncated edge
row = []
for face in tri.triangles():
# A row entry for every truncated face
count = 0
for i in range(3):
if face.edge(i) == edge:
perm = face.faceMapping(1, i)
if perm[1] == ((perm[0] + 1) % 3):
# the edge and face orientations agree so,
count += 1
else:
count -= 1
row.append(count * face_coorientations[face.index()])
# +1 if face is to the left of the edge, -1 if face is to
# the right of the edge, using Regina's edge orientation
# when viewed from above (using the transverse taut notion
# of up)
# ,'|
# ,' |
# ,' |
# ,' CCW | gets a +1
# `. ^
# `. |
# `. |
# `.|
for tet in tri.simplices():
for i in range(4):
row.append(0)
delta.append(row)
for face in tri.triangles():
face_embeddings = []
for j in range(2):
face_embeddings.append( face.embedding(j) )
for i in range(3): # vertices of the face
# A row for every peripheral edge
row = []
for face2 in tri.triangles():
# A row entry for every truncated face
if face2 == face:
row.append(1)
else:
row.append(0)
for tet in tri.simplices():
for k in range(4):
# A row entry for every peripheral face
count = 0
for j in range(2):
if (tet == face_embeddings[j].simplex()) and (face_embeddings[j].vertices()[i] == k):
# the tetrahedron is on the jth side of the
# face and the ith vertex of face is the kth
# vertex of tet
face_num_in_tet = face_embeddings[j].vertices()[3]
count -= tet_vert_coorientations[tet.index()][face_num_in_tet]
# tet_vert_coorientations is +1 if
# coorientation on face points out of the
# tetrahedron, and we want count += 1 if
# the peripheral face is above the
# peripheral edge
row.append(count)
delta.append(row)
return delta
# 7. Linear algebra
# We ask: is there a one-cocycle C \in C^1(\calT', \ZZ) so that
# \delta C = E? If so, then [E] = E(\calT) is zero in H^2, as
# desired.
# This is a linear algebra problem, so can be solved by, say, sage.
def order_of_euler_class(delta, E):
"""
Given the coboundary operator delta and an Euler two-cocycle E,
returns k if [E] is k--torsion. By convention, returns zero if
[E] is non-torsion. Note that the trivial element is 1--torsion.
"""
delta = Matrix(delta)
E = vector(E)
# Note that E is a coboundary if there is a one-cocycle C solving
#
# E = C*delta
#
# We can find C (if it exists at all) using Smith normal form.
D, U, V = delta.smith_form()
assert D == U*delta*V
# So we are trying to solve
#
# C*delta = C*U.inverse()*D*V.inverse() = E
#
# for a one-cochain C. Multiply by V to get
#
# C*delta*V = C*U.inverse()*D = E*V
#
# Now set
#
# B = C*U.inverse(), and so B*U = C
#
# and rewrite to get
#
# B*U*delta*V = B*D = E*V
#
# So define E' by:
Ep = E*V
# Finally we attempt to solve B * D = Ep. Note that D is
# diagonal: so if we can solve all of the equations
# B[i] * D[i][i] == Ep[i]
# with B[i] integers, then [E] = 0 in cohomology.
diag = diagonal(D)
if any( (diag[i] == 0 and Ep[i] != 0) for i in range(len(Ep)) ):
return 0
# All zeros are at the end in Smith normal form. Since we've
# passed the above we can now remove them.
first_zero = diag.index(0)
diag = diag[:first_zero]
Ep = Ep[:first_zero]
# Since diag[i] is (now) never zero we can divide to get the
# fractions Ep[i]/diag[i] and then find the scaling that makes
# them simultaneously integral.
denoms = [ diag[i] / gcd(Ep[i], diag[i]) for i in range(len(Ep)) ]
return lcm(denoms)
# 8. Remarks
# a) Here is a nice trick that proves [E] = 0 in some cases. Suppose
# that \gamma is an oriented path in \bdy M. Suppose that \gamma is
# transverse to the one-skeleton of \calT'. We form a one-cocycle
# D_\gamma by adding up the boundary edges that \gamma crosses, with
# sign. The sign is positive if \gamma crosses from below to above,
# and negative otherwise. Note that \delta D_\gamma vanishes on all
# boundary faces.
# b) <NAME> says that we should take the paths that go up
# through the centres of tetrahedra and take the Poincare dual. BUT I
# think this is not what we want... Marc is thinking of the relative
# Euler class as discussed on page 390 of his paper "Taut ideal
# triangulations of three-manifolds". The relative Euler class lives
# in H^2(M, \bdy M), so is Poincare dual to an element of H_1(M),
# represented by a collection of loops.
# c) [2019-03-31] It seems that, for transverse veering triangulations
# in the 16 census, the Euler class is always zero or two-torsion.
# Note that there are manifolds M in the census where H^2(M, \ZZ) has
# positive rank... What about odd torsion?
# Question: If the veering triangulation is edge-orientable, does the
# Euler class vanish?
# Answer: Yes. Here is a version of a discussion with Nathan
# [2020-04-03] - he says the following:
# Suppose that F is a foliation carried by the horizontal branched
# surface. Let UTF be the unit tangent bundle to F. We think of
# e(UTF) as being the obstruction to UTF having a section. Let G be
# the foliation carried by the upper (aka green) branched surface. If
# G is transversely orientable (aka edge-orientability of the veering
# triangulation) then G \cap F gives the desired section, and e(UTF) =
# 0. Note that G \cap F gives, for every point, a pair of points in
# the unit tangent circle. So let PUTF be the projective unit tangent
# bundle to F. This definitely has a section, so e(PUTF) = 0. Now,
# the bundle UTF is a double cover of the bundle PUTF.
# Claim: The euler class is multiplicative with respect to covers (in
# both senses).
# With the claim in hand, we have
# 2 * e(UTF) = e(PUTF) = 0
# We deduce that e(UTF) is either zero or two-torsion.
# 9. Calling code
@liberal
def order_of_euler_class_wrapper(tri, angle):
"""
Returns the order of the euler class.
"""
return order_of_euler_class(coboundary(tri, angle), euler_cocycle(tri, angle))
def compute_order_of_euler_classes(file_in, number=None, file_out=None):
data_in = parse_data_file(file_in)
data_in = [line.split(" ") for line in data_in]
if number != None:
data_in = | |
import os
import warnings
from collections import OrderedDict
from typing import Optional, Union
import imageio
import cv2
import numpy as np
import torch
from torch.utils import data
import datautils
__all__ = ["RIO10"]
class RIO10(data.Dataset):
r"""Expecting the RIO10 Dataset to be in this format:
.. code-block::
| ├── RIO10
| │ ├── seq02
| │ │ ├── seq02_01/
| │ │ ├──────seq
| │ │ ├──────── camera.yaml
| │ │ ├──────── frame-000000.color.jpg
| │ │ ├──────── frame-000000.pose.txt
| │ │ ├──────── frame-000000.rendered.depth.png
| │ │ ├──────── frame-000001.color.jpg
| │ │ ├──────── frame-000001.pose.txt
| │ │ ├──────── frame-000001.rendered.depth.png
| │ │ ├──────── .....
| │ │ ├──────instances
| │ │ ├──────── frame-000000.color.jpg
| │ │ ├──────── frame-000001.color.jpg
| │ │ ├── seq02_02/
| │ │ ├── .....
| │ │ └── intrinsics.txt
| │ ├── seq03
| │ ├── ...
|
|
"""
def __init__(self,
basedir: str,
scenes: Union[tuple, str, None],
start: Optional[int] = 0,
end: Optional[int] = -1,
height: int = 540,
width: int = 960,
seg_classes: str = "nyu40",
channels_first: bool = False,
normalize_color: bool = False,
*,
return_depth: bool = True,
return_intrinsics: bool = True,
return_pose: bool = False,
return_transform: bool = False,
return_names: bool = False,
return_labels: bool = False,
):
super(RIO10, self).__init__()
#height: 960
#width: 540
basedir = (os.path.join(basedir))
self.height = height
self.width = width
self.height_downsample_ratio = float(height) / 480
self.width_downsample_ratio = float(width) / 640
self.seg_classes = seg_classes
self.channels_first = channels_first
self.normalize_color = normalize_color
self.return_depth = return_depth
self.return_intrinsics = return_intrinsics
self.return_pose = return_pose
self.return_transform = return_transform
self.return_names = return_names
self.return_labels = return_labels
self.color_encoding = get_color_encoding(self.seg_classes)
self.intrinsics = dict()
# Start and end frames. Used to determine sequence length.
self.start = start
self.end = end
full_sequence = self.end == -1
if start < 0:
raise ValueError("Start frame cannot be less than 0.")
if not (end == -1 or end > start):
raise ValueError(
"End frame ({}) should be equal to -1 or greater than start ({})".format(
end, start
)
)
self.seqlen = self.end - self.start
# scenes should be a tuple
if isinstance(scenes, str):
if os.path.isfile(scenes):
with open(scenes, "r") as f:
scenes = tuple(f.read().split("\n"))
else:
raise ValueError("incorrect filename: {} doesn't exist".format(scenes))
elif not (scenes is None or isinstance(scenes, tuple)):
msg = "scenes should either be path to split.txt or tuple of scenes or None, but was of type %r instead"
raise TypeError(msg % type(scenes))
# Get a list of all color, depth, pose, label and intrinsics files.
colorfiles, depthfiles, posefiles = [], [], []
labelfiles, intrinsicsfiles, scene_names = [], [], []
scene_names = os.listdir(basedir)
print(scene_names)
for index, tmp in enumerate(scene_names):
if tmp =="intrinsics.txt":
print("Found intrinsics")
self._get_intrinsics(basedir+"/"+tmp)
scene_names.pop(index)
if len(self.intrinsics) == 74:
print("Loaded Intrinsics correctly")
print(self.intrinsics)
for seqpath in scene_names:
if scenes is not None:
if seqpath not in scenes:
continue
try:
types = os.listdir(basedir+"/"+seqpath+"/")
except OSError:
print("Invalid directory")
break
type_files = "seq"
type_instance = "instance"
if type_files not in types:
print("no files found in this scene")
continue
seq_colorfiles, seq_depthfiles, seq_posefiles = [], [], []
seq_labelfiles, seq_intrinsicsfiles = [], []
files = []
try:
files = os.listdir(os.path.join(basedir+"/"+seqpath+"/"+type_files+"/"))
files.sort()
except OSError:
print("Invalid directory")
break
for i in range(len(files)):
prefix, extension = os.path.splitext(files[i])
extension = extension[1:]
if extension=="jpg":
#colorfiles
seq_colorfiles.append(os.path.join(basedir+"/"+seqpath+"/"+type_files+"/"+files[i]))
if extension=="png":
#depthfiles
seq_depthfiles.append(os.path.join(basedir+"/"+seqpath+"/"+type_files+"/"+files[i]))
if extension=="txt":
#posefiles
seq_posefiles.append(os.path.join(basedir+"/"+seqpath+"/"+type_files+"/"+files[i]))
#if extension=="yaml":
#intrinsicsfiles
# seq_intrinsicsfiles.append(files[i])
colorfiles.append(seq_colorfiles)
depthfiles.append(seq_depthfiles)
posefiles.append(seq_posefiles)
#intrinsicsfiles.append(seq_intrinsicsfiles[0])
if type_instance not in types:
print("no Instance files registered in this scene")
continue
try:
files = os.listdir(os.path.join(basedir+"/"+seqpath+"/"+type_instance+"/"))
files.sort()
except OSError:
print("Invalid directory")
break
for i in range(len(files)):
prefix, extension = os.path.splitext(files[i])
extension = extension[1:]
if extension=="jpg" or extension=="png":
#labelfiles
seq_labelfiles.append(os.path.join(basedir+"/"+seqpath+"/"+type_instance+"/"+files[i]))
else:
print("Folder contains files of wrong type")
break
labelfiles.append(seq_labelfiles)
self.num_sequences = len(colorfiles)
# Class members to store the list of valid filepaths.
self.colorfiles = colorfiles
self.depthfiles = depthfiles
self.posefiles = posefiles
self.labelfiles = labelfiles
self.intrinsicsfiles = intrinsicsfiles
self.seqnames = scene_names
# Scaling factor for depth images
self.scaling_factor = 1000.0
def __len__(self):
r"""Returns the length of the dataset. """
return self.num_sequences
def __getitem__(self, idx: int):
r"""Returns the data from the sequence at index idx.
Returns:
color_seq (torch.Tensor): Sequence of rgb images of each frame
depth_seq (torch.Tensor): Sequence of depths of each frame
pose_seq (torch.Tensor): Sequence of poses of each frame
transform_seq (torch.Tensor): Sequence of transformations between each frame in the sequence and the
previous frame. Transformations are w.r.t. the first frame in the sequence having identity pose
(relative transformations with first frame's pose as the reference transformation). First
transformation in the sequence will always be `torch.eye(4)`.
label_seq (torch.Tensor): Sequence of semantic segmentation labels
intrinsics (torch.Tensor): Intrinsics for the current sequence
seqname (str): Name of the sequence
Shape:
- color_seq: :math:`(L, H, W, 3)` if `channels_first` is False, else :math:`(L, 3, H, W)`. `L` denotes
sequence length.
- depth_seq: :math:`(L, H, W, 1)` if `channels_first` is False, else :math:`(L, 1, H, W)`. `L` denotes
sequence length.
- pose_seq: :math:`(L, 4, 4)` where `L` denotes sequence length.
- transform_seq: :math:`(L, 4, 4)` where `L` denotes sequence length.
- label_seq: :math:`(L, H, W)` where `L` denotes sequence length.
- intrinsics: :math:`(1, 4, 4)`
"""
# Read in the color, depth, pose, label and intrinstics info.
color_seq_path = self.colorfiles[idx]
depth_seq_path = self.depthfiles[idx]
pose_seq_path = self.posefiles[idx]
if self.return_labels:
label_seq_path = self.labelfiles[idx]
intrinsics_path = self.intrinsicsfiles[idx]
seqname = self.seqnames[idx]
print("Getting scene"+seqname)
color_seq, depth_seq, pose_seq, label_seq = [], [], [], []
poses = []
self.seqlen = 8
output = []
if self.return_intrinsics:
intrinsics = self.intrinsics[seqname]
intrinsics = torch.from_numpy(intrinsics).float()
output.append(intrinsics)
for i in range(self.seqlen):
color = np.asarray(imageio.imread(color_seq_path[i]), dtype=float)
color = self._preprocess_color(color)
color = torch.from_numpy(color)
color_seq.append(color)
if self.return_depth:
depth = np.asarray(imageio.imread(depth_seq_path[i]), dtype=np.int64)
depth = self._preprocess_depth(depth)
depth = torch.from_numpy(depth)
depth_seq.append(depth)
# if self.return_pose or self.return_transform:
# pose = np.loadtxt(pose_seq_path[i]).astype(float)
# poses.append(pose)
# pose = torch.from_numpy(pose)
# pose_seq.append(pose)
# if self.return_labels:
# label = np.asarray(imageio.imread(label_seq_path[i]), dtype=np.uint8)
# label = self._preprocess_label(label)
# label = torch.from_numpy(label)
# label_seq.append(label)
color_seq = torch.stack(color_seq, 0).float()
output.append(color_seq)
if self.return_depth:
depth_seq = torch.stack(depth_seq, 0).float()
output.append(depth_seq)
# if self.return_pose:
# pose_seq = torch.stack(pose_seq, 0).float()
# pose_seq = self._preprocess_poses(pose_seq)
# output.append(pose_seq)
# if self.return_transform:
# transform_seq = datautils.poses_to_transforms(poses)
# transform_seq = [torch.from_numpy(x).float() for x in transform_seq]
# transform_seq = torch.stack(transform_seq, 0).float()
# output.append(transform_seq)
if self.return_names:
output.append(seqname)
# if self.return_labels:
# label_seq = torch.stack(label_seq, 0).float()
# output.append(label_seq)
return tuple(output)
def _get_intrinsics(self, datapath: str):
with open(datapath, "r") as f:
lines = f.readlines()
for line in lines:
line = line.strip().split()
seq_name = line[0]
fx, fy, cx, cy = line[1], line[2], line[3], line[4]
intrinsics = np.zeros((3, 3))
intrinsics[0,0] = fx
intrinsics[1,1] = fy
intrinsics[0,2] = cx
intrinsics[0,2] = cy
intrinsics[2,2] = 1
self.intrinsics[seq_name] = self._preprocess_intrinsics(intrinsics)
def _preprocess_intrinsics(self, intrinsics: Union[torch.Tensor, np.ndarray]):
r"""Preprocesses the intrinsics by scaling `fx`, `fy`, `cx`, `cy` based on new frame size and expanding the
0-th dimension.
Args:
intrinsics (torch.Tensor or np.ndarray): Intrinsics matrix to be preprocessed
Returns:
Output (torch.Tensor or np.ndarray): Preprocessed intrinsics
Shape:
- intrinsics: :math:`(4, 4)`
- Output: :math:`(1, 4, 4)`
"""
scaled_intrinsics = datautils.scale_intrinsics(
intrinsics, self.height_downsample_ratio, self.width_downsample_ratio
)
if torch.is_tensor(scaled_intrinsics):
return scaled_intrinsics.unsqueeze(0)
elif isinstance(scaled_intrinsics, np.ndarray):
return np.expand_dims(scaled_intrinsics, 0)
def _preprocess_color(self, color: np.ndarray):
r"""Preprocesses the color image by resizing to :math:`(H, W, C)`, (optionally) normalizing values to
:math:`[0, 1]`, and (optionally) using channels first :math:`(C, H, W)` representation.
Args:
color (np.ndarray): Raw input rgb image
Retruns:
np.ndarray: Preprocessed rgb image
Shape:
- Input: :math:`(H_\text{old}, W_\text{old}, C)`
- Output: :math:`(H, W, C)` if `self.channels_first == False`, else :math:`(C, H, W)`.
"""
color = cv2.resize(
color, (self.width, self.height), interpolation=cv2.INTER_LINEAR
)
if self.normalize_color:
color = datautils.normalize_image(color)
if self.channels_first:
color = datautils.channels_first(color)
return color
def _preprocess_depth(self, depth: np.ndarray):
r"""Preprocesses the depth image by resizing, adding channel dimension, and scaling values to meters. Optionally
converts depth from channels last :math:`(H, W, 1)` to channels first :math:`(1, H, W)` representation.
Args:
depth (np.ndarray): Raw depth image
Returns:
np.ndarray: Preprocessed depth
Shape:
- depth: :math:`(H_\text{old}, W_\text{old})`
- Output: :math:`(H, W, 1)` if `self.channels_first == False`, else :math:`(1, H, W)`.
"""
depth = cv2.resize(
depth.astype(float),
(self.width, self.height),
interpolation=cv2.INTER_NEAREST,
)
depth = np.expand_dims(depth, -1)
if self.channels_first:
depth = datautils.channels_first(depth)
return depth / self.scaling_factor
def | |
<gh_stars>1-10
#
# gemini_python
#
# primitives_qa.py
# ------------------------------------------------------------------------------
import numpy as np
import math
import operator
from copy import deepcopy
from collections import namedtuple
from astropy.stats import sigma_clip
from scipy.special import j1
from gemini_instruments.gmos.pixel_functions import get_bias_level
from gempy.gemini import gemini_tools as gt
from gempy.gemini import qap_tools as qap
from gempy.utils import logutils
from .lookups import DQ_definitions as DQ
from .lookups import qa_constraints as qa
from geminidr import PrimitivesBASE
from . import parameters_qa
from recipe_system.utils.decorators import parameter_override
QAstatus = namedtuple('QAstatus', 'band req warning info')
Measurement = namedtuple('Measurement', 'value std samples')
# ------------------------------------------------------------------------------
@parameter_override
class QA(PrimitivesBASE):
"""
This is the class containing the QA primitives.
"""
tagset = {"GEMINI"}
def __init__(self, adinputs, **kwargs):
super().__init__(adinputs, **kwargs)
self._param_update(parameters_qa)
def measureBG(self, adinputs=None, **params):
"""
This primitive measures the sky background level for an image by
sampling the non-object unflagged pixels in each extension.
The count levels are then converted to a flux using the nominal
(*not* measured) Zeropoint values - the point being you want to measure
the actual background level, not the flux incident on the top of the
cloud layer necessary to produce that flux level.
Parameters
----------
suffix: str
suffix to be added to output files
remove_bias: bool
remove the bias level (if present) before measuring background?
separate_ext: bool
report one value per extension, instead of a global value?
"""
log = self.log
log.debug(gt.log_message("primitive", self.myself(), "starting"))
timestamp_key = self.timestamp_keys[self.myself()]
suffix = params["suffix"]
remove_bias = params.get("remove_bias", False)
separate_ext = params["separate_ext"]
for ad in adinputs:
bias_level = None
# First check if the bias level has already been subtracted
if remove_bias:
if not {'BIASIM', 'DARKIM',
self.timestamp_keys['subtractOverscan']}.intersection(ad.phu):
try:
bias_level = get_bias_level(adinput=ad,
estimate=False)
except NotImplementedError:
bias_level = None
if bias_level is None:
log.warning("Bias level not found for {}; "
"approximate bias will not be removed "
"from the sky level".format(ad.filename))
# Get the filter name and the corresponding BG band definition
# and the requested band
filter = ad.filter_name(pretty=True)
if filter in ['k(short)', 'kshort', 'K(short)', 'Kshort']:
filter = 'Ks'
try:
bg_band_limits = qa.bgBands[filter]
except KeyError:
bg_band_limits = None
pixscale = ad.pixel_scale()
exptime = ad.exposure_time()
# Get background level from all extensions quick'n'dirty
bg_list = gt.measure_bg_from_image(ad, sampling=100, gaussfit=False)
info_list = []
bg_mag_list = []
in_adu = ad.is_in_adu()
bunit = 'ADU' if in_adu else 'electron'
for i, (ext, npz) in enumerate(
zip(ad, ad.nominal_photometric_zeropoint())):
extver = ext.hdr['EXTVER']
ext_info = {}
bg_count = Measurement(*bg_list[i])
if bg_count.value:
log.fullinfo("EXTVER {}: Raw BG level = {:.3f}".
format(extver, bg_count.value))
if bias_level is not None:
if bias_level[i] is not None:
bg_count = _arith(bg_count, 'sub', bias_level[i])
log.fullinfo(" Bias-subtracted BG level "
"= {:.3f}".format(bg_count.value))
# Put Measurement into the list in place of 3 values
bg_list[i] = bg_count
# Write sky background to science header
ext.hdr.set("SKYLEVEL", bg_count.value, comment="{} [{}]".
format(self.keyword_comments["SKYLEVEL"], bunit))
bg_mag = Measurement(None, None, 0)
# We need a nominal photometric zeropoint to do anything useful
if bg_count.value is None:
continue
if npz is not None:
if bg_count.value > 0:
# convert background to counts/arcsec^2/second, but
# want to preserve values of sci_bg and sci_std
fak = 1.0 / (exptime * pixscale * pixscale)
bg_mag = Measurement(npz - 2.5*math.log10(bg_count.value*fak),
2.5*math.log10(1 + bg_count.std/bg_count.value),
bg_count.samples)
# Need to report to FITSstore in electrons
bg_e = _arith(bg_count, 'mul', fak * (ext.gain() if
in_adu else 1))
ext_info.update({"mag": bg_mag.value, "mag_std": bg_mag.std,
"electrons": bg_e.value, "electrons_std":
bg_e.std, "nsamples": bg_e.samples})
bg_mag_list.append(bg_mag)
qastatus = _get_qa_band('bg', ad, bg_mag, bg_band_limits)
ext_info.update({"percentile_band": qastatus.band,
"comment": [qastatus.warning]})
else:
log.warning("Background is less than or equal to 0 "
"for {}:{}".format(ad.filename,extver))
else:
log.stdinfo("No nominal photometric zeropoint available "
"for {}:{}, filter {}".format(ad.filename,
extver, ad.filter_name(pretty=True)))
info_list.append(ext_info)
if separate_ext:
comments = _bg_report(ext, bg_count, bunit, bg_mag, qastatus)
# Collapse extension-by-extension numbers if multiple extensions
bg_count = _stats(bg_list)
bg_mag = _stats(bg_mag_list)
# Write mean background to PHU if averaging all together
# (or if there's only one science extension)
if (len(ad)==1 or not separate_ext) and bg_count is not None:
ad.phu.set("SKYLEVEL", bg_count.value, comment="{} [{}]".
format(self.keyword_comments["SKYLEVEL"], bunit))
qastatus = _get_qa_band('bg', ad, bg_mag, bg_band_limits)
# Compute overall numbers if requested
if not separate_ext:
comments = _bg_report(ad, bg_count, bunit, bg_mag, qastatus)
# Report measurement to the adcc
if bg_mag.value:
try:
req_bg = ad.requested_bg()
except KeyError:
req_bg = None
qad = {"band": qastatus.band,
"brightness": float(bg_mag.value),
"brightness_error": float(bg_mag.std),
"requested": req_bg,
"comment": comments}
qap.adcc_report(ad, "bg", qad)
# Report measurement to fitsstore
if self.upload and "metrics" in self.upload:
fitsdict = qap.fitsstore_report(ad, "sb", info_list,
self.calurl_dict,
self.mode, upload=True)
# Timestamp and update filename
gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
ad.update_filename(suffix=suffix, strip=True)
return adinputs
def measureCC(self, adinputs=None, suffix=None):
"""
This primitive will determine the zeropoint by looking at sources in
the OBJCAT for which a reference catalog magnitude has been determined
It will also compare the measured zeropoint against the nominal
zeropoint for the instrument and the nominal atmospheric extinction
as a function of airmass, to compute the estimated cloud attenuation.
This function is for use with SExtractor-style source-detection.
It relies on having already added a reference catalog and done the
cross match to populate the refmag column of the objcat
The reference magnitudes (refmag) are straight from the reference
catalog. The measured magnitudes (mags) are straight from the object
detection catalog.
We correct for atmospheric extinction at the point where we
calculate the zeropoint, ie we define::
actual_mag = zeropoint + instrumental_mag + extinction_correction
where in this case, actual_mag is the refmag, instrumental_mag is
the mag from the objcat, and we use the nominal extinction value as
we don't have a measured one at this point. ie we're actually
computing zeropoint as::
zeropoint = refmag - mag - nominal_extinction_correction
Then we can treat zeropoint as::
zeropoint = nominal_photometric_zeropoint - cloud_extinction
to estimate the cloud extinction.
Parameters
----------
suffix: str
suffix to be added to output files
"""
log = self.log
log.debug(gt.log_message("primitive", self.myself(), "starting"))
timestamp_key = self.timestamp_keys[self.myself()]
for ad in adinputs:
nom_phot_zpt = ad.nominal_photometric_zeropoint()
if not any(nom_phot_zpt):
log.warning("No nominal photometric zeropoint available "
"for {}, filter {}".format(ad.filename,
ad.filter_name(pretty=True)))
continue
qad = {'zeropoint': {}}
if not any(hasattr(ext, 'OBJCAT') for ext in ad):
log.warning("No OBJCATs found in {}".format(ad.filename))
continue
# We really want to check for the presence of reference mags
# in the objcats at this point, but we can more easily do a
# quick check for the presence of reference catalogs, which are
# a pre-requisite for this and not bother with
# any of this if there are no reference catalogs
if not hasattr(ad, 'REFCAT'):
log.warning("No REFCAT present - not attempting"
" to measure photometric zeropoints")
continue
nom_at_ext = ad.nominal_atmospheric_extinction()
if nom_at_ext is None:
log.warning("Cannot get atmospheric extinction. Assuming zero.")
nom_at_ext = 0.0
exptime = ad.exposure_time()
# If it's a funky nod-and-shuffle imaging acquistion,
# then need to scale exposure time
if "NODANDSHUFFLE" in ad.tags:
log.warning("Imaging Nod-And-Shuffle. Photometry may be dubious")
# AFAIK the number of nod_cycles isn't actually relevant -
# there's always 2 nod positions, thus the exposure
# time for any given star is half the total
exptime /= 2.0
all_zp = []
all_cloud = []
info_list = []
for ext, npz in zip(ad, nom_phot_zpt):
extver = ext.hdr['EXTVER']
ext_info = {}
try:
objcat = ext.OBJCAT
except AttributeError:
log.warning("No OBJCAT in {}:{}".format(ad.filename,extver))
all_zp.append(Measurement(None, None, 0))
continue
# Incrementally cull the catalog: remove sources without mags
good_obj = objcat[~np.logical_or(objcat['MAG_AUTO'] == -999,
objcat['MAG_AUTO'] > 90)]
if len(good_obj) == 0:
log.warning("No magnitudes found in {}[OBJCAT,{}]".format(
ad.filename,extver))
all_zp.append(Measurement(None, None, 0))
continue
# Remove sources without reference mags
good_obj = good_obj[~np.logical_or.reduce(
[good_obj['REF_MAG'] == -999, np.isnan(good_obj['REF_MAG']),
np.isnan(good_obj['REF_MAG_ERR'])])]
if len(good_obj) == 0:
log.warning("No reference magnitudes found in {}[OBJCAT,{}]".
format(ad.filename,extver))
all_zp.append(Measurement(None, None, 0))
continue
# Sources must be free of SExtractor flags and unsaturated, and
# <2% of pixels be otherwise flagged (typically bad/non-linear)
good_obj = good_obj[np.logical_and.reduce([good_obj['FLAGS'] == 0,
good_obj['NIMAFLAGS_ISO'] < 0.02*good_obj['ISOAREA_IMAGE'],
good_obj['IMAFLAGS_ISO'] & DQ.saturated == 0])]
zps = good_obj['REF_MAG'] - nom_at_ext - (good_obj['MAG_AUTO'] +
2.5*math.log10(exptime))
zperrs = np.sqrt(good_obj['REF_MAG_ERR']**2 +
good_obj['MAGERR_AUTO']**2)
# There shouldn't be any NaN left
assert sum(np.logical_or(np.isnan(zps), np.isnan(zperrs))) == 0
# TODO: weight instead?
# Trim out where zeropoint error > err_threshold
if len([z for z in zps if z is not None]) <= 5:
# 5 sources or less. Beggars are not choosers.
ok = | |
<filename>skfem/quadrature.py
"""Tabulated and generated quadrature points for various reference domains."""
import numpy as np
from typing import Tuple
def get_quadrature(refdom: str, norder: int) -> Tuple[np.ndarray, np.ndarray]:
"""Return a nth order accurate quadrature rule for different reference
domains.
Parameters
----------
refdom
The name of the reference domain. Valid reference domains can be found
in the following table.
+-------+-----------------+----------------+
| Name | Corner points | Maximum order |
+-------+-----------------+----------------+
| point | N.A. | infty |
+-------+-----------------+----------------+
| line | 0, 1 | infty |
+-------+-----------------+----------------+
| tri | (0,0) (0,1) | 19 |
| | (1,0) | |
+-------+-----------------+----------------+
| quad | (-1,-1) (1,-1) | infty |
| | (1,1) (-1,1) | |
+-------+-----------------+----------------+
| tet | (0,0,0) (0,0,1) | 4 |
| | (0,1,0) (1,0,0) | |
+-------+-----------------+----------------+
| hex | (-1,-1,-1), | infty |
| | (1,1,1), etc. | |
+-------+-----------------+----------------+
norder
The polynomial order upto which the requested quadrature rule is
accurate.
Returns
-------
An array of quadrature points (Ndim x Nqp) and an array of quadrature
weights (Nqp).
"""
if refdom is "tri":
return get_quadrature_tri(norder)
elif refdom is "tet":
return get_quadrature_tet(norder)
elif refdom is "line": # [0,1]
return get_quadrature_line(norder)
elif refdom is "point":
return get_quadrature_point(norder)
elif refdom is "quad": # (-1,-1) (1,-1) (1,1) (-1,1)
X, W = get_quadrature_line(norder)
# generate tensor product rule from 1D rule
A, B = np.meshgrid(X, X)
Y = 2.0*np.vstack((A.flatten(order='F'),
B.flatten(order='F'))) - 1.0
# transform weights
A, B = np.meshgrid(2*W, 2*W)
Z = A*B
W = Z.flatten(order='F')
return Y, W
elif refdom is "hex": # (-1,-1,-1), (1,1,1), etc.
X, W = get_quadrature_line(norder)
# generate tensor product rule from 1D rule
A, B, C = np.meshgrid(X, X, X)
Y = 2.0*np.vstack((A.flatten(order='F'),
B.flatten(order='F'),
C.flatten(order='F'))) - 1.0
# transform weights
A, B, C = np.meshgrid(2*W, 2*W, 2*W)
Z = A*B*C
W = Z.flatten(order='F')
return Y, W
else:
raise NotImplementedError("The given mesh type is not supported!")
def get_quadrature_tet(norder: int) -> Tuple[np.ndarray, np.ndarray]:
"""Return a nth order accurate quadrature rule for the reference
tetrahedron (0,0,0) (0,0,1) (0,1,0) (1,0,0)."""
if norder <= 1:
norder = 2
try:
return {
2: (np.array([[0.5854101966249685, 0.1381966011250105, 0.1381966011250105, 0.1381966011250105], \
[0.1381966011250105, 0.1381966011250105, 0.1381966011250105, 0.5854101966249685], \
[0.1381966011250105, 0.1381966011250105, 0.5854101966249685, 0.1381966011250105]]), \
np.array([0.2500000000000000, 0.2500000000000000, 0.2500000000000000, 0.2500000000000000]) / 6.),
3: (np.array([[0.2500000000000000, 0.5000000000000000, 0.1666666666666667, 0.1666666666666667, 0.1666666666666667], \
[0.2500000000000000, 0.1666666666666667, 0.1666666666666667, 0.1666666666666667, 0.5000000000000000], \
[0.2500000000000000, 0.1666666666666667, 0.1666666666666667, 0.5000000000000000, 0.1666666666666667]]), \
np.array([-0.8000000000000000, 0.4500000000000000, 0.4500000000000000, 0.4500000000000000,
0.4500000000000000]) / 6.),
4: (np.array([[0.2500000000000000, 0.7857142857142857, 0.0714285714285714, 0.0714285714285714,
0.0714285714285714, 0.1005964238332008, 0.3994035761667992, 0.3994035761667992,
0.3994035761667992, 0.1005964238332008, 0.1005964238332008], \
[0.2500000000000000, 0.0714285714285714, 0.0714285714285714, 0.0714285714285714,
0.7857142857142857, 0.3994035761667992, 0.1005964238332008, 0.3994035761667992,
0.1005964238332008, 0.3994035761667992, 0.1005964238332008], \
[0.2500000000000000, 0.0714285714285714, 0.0714285714285714, 0.7857142857142857,
0.0714285714285714, 0.3994035761667992, 0.3994035761667992, 0.1005964238332008,
0.1005964238332008, 0.1005964238332008, 0.3994035761667992]]), \
np.array([-0.0789333333333333, 0.0457333333333333, 0.0457333333333333, 0.0457333333333333,
0.0457333333333333, 0.1493333333333333, 0.1493333333333333, 0.1493333333333333,
0.1493333333333333, 0.1493333333333333, 0.1493333333333333]) / 6.)
}[norder] # last one available from http://www.cfd-online.com/Wiki/Code:_Quadrature_on_Tetrahedra
except:
raise NotImplementedError("The requested order of quadrature " +
"is not tabulated.")
def get_quadrature_tri(norder: int) -> Tuple[np.ndarray, np.ndarray]:
"""Return a nth order accurate quadrature rule for the reference triangle
(0,0) (0,1) (1,0)."""
if norder <= 1:
norder = 2
try:
return {
2: (np.array([[1.666666666666666666666e-01, 6.666666666666666666666e-01, 1.666666666666666666666e-01],
[1.666666666666666666666e-01, 1.666666666666666666666e-01, 6.666666666666666666666e-01]]),
np.array([1.666666666666666666666e-01, 1.666666666666666666666e-01, 1.666666666666666666666e-01])),
3: (np.array([[0.333333333333333, 0.2, 0.6, 0.2], [0.333333333333333, 0.6, 0.2, 0.2]]),
np.array([-0.28125, 0.260416666666667, 0.260416666666667, 0.260416666666667])),
4: (np.array([[0.445948490915965, 0.0915762135097699, 0.10810301816807, 0.816847572980459,
0.445948490915965, 0.091576213509771],
[0.10810301816807, 0.816847572980459, 0.445948490915965, 0.091576213509771, 0.445948490915965,
0.0915762135097699]]), np.array(
[0.111690794839006, 0.054975871827661, 0.111690794839006, 0.054975871827661, 0.111690794839006,
0.054975871827661])),
5: (np.array([[0.333333333333333, 0.470142064105115, 0.101286507323457, 0.05971587178977, 0.797426985353087,
0.470142064105115, 0.101286507323456],
[0.333333333333333, 0.05971587178977, 0.797426985353087, 0.470142064105115, 0.101286507323456,
0.470142064105115, 0.101286507323457]]), np.array(
[0.1125, 0.066197076394253, 0.0629695902724135, 0.066197076394253, 0.0629695902724135,
0.066197076394253, 0.0629695902724135])),
6: (np.array([[0.249286745170911, 0.0630890144915021, 0.501426509658179, 0.873821971016996,
0.24928674517091, 0.063089014491502, 0.636502499121399, 0.310352451033784, 0.053145049844817,
0.053145049844817, 0.310352451033784, 0.636502499121399],
[0.501426509658179, 0.873821971016996, 0.24928674517091, 0.063089014491502, 0.249286745170911,
0.0630890144915021, 0.053145049844817, 0.053145049844817, 0.310352451033784,
0.636502499121399, 0.636502499121399, 0.310352451033784]]), np.array(
[0.0583931378631895, 0.0254224531851035, 0.0583931378631895, 0.0254224531851035, 0.0583931378631895,
0.0254224531851035, 0.041425537809187, 0.041425537809187, 0.041425537809187, 0.041425537809187,
0.041425537809187, 0.041425537809187])),
7: (np.array([[0.333333333333333, 0.26034596607904, 0.065130102902216, 0.47930806784192, 0.869739794195568,
0.26034596607904, 0.065130102902216, 0.63844418856981, 0.312865496004874, 0.048690315425316,
0.048690315425316, 0.312865496004874, 0.63844418856981],
[0.333333333333333, 0.47930806784192, 0.869739794195568, 0.26034596607904, 0.065130102902216,
0.26034596607904, 0.065130102902216, 0.048690315425316, 0.048690315425316, 0.312865496004874,
0.63844418856981, 0.63844418856981, 0.312865496004874]]), np.array(
[-0.074785022233841, 0.087807628716604, 0.026673617804419, 0.087807628716604, 0.026673617804419,
0.087807628716604, 0.026673617804419, 0.0385568804451285, 0.0385568804451285, 0.0385568804451285,
0.0385568804451285, 0.0385568804451285, 0.0385568804451285])),
8: (np.array([[0.333333333333333, 0.459292588292723, 0.17056930775176, 0.0505472283170311,
0.081414823414554, 0.65886138449648, 0.898905543365938, 0.459292588292723, 0.17056930775176,
0.050547228317031, 0.728492392955404, 0.263112829634638, 0.008394777409958,
0.008394777409958, 0.263112829634638, 0.728492392955404],
[0.333333333333333, 0.081414823414554, 0.65886138449648, 0.898905543365938, 0.459292588292723,
0.17056930775176, 0.050547228317031, 0.459292588292723, 0.17056930775176, 0.0505472283170311,
0.008394777409958, 0.008394777409958, 0.263112829634638, 0.728492392955404,
0.728492392955404, 0.263112829634638]]), np.array(
[0.0721578038388935, 0.0475458171336425, 0.051608685267359, 0.016229248811599, 0.0475458171336425,
0.051608685267359, 0.016229248811599, 0.0475458171336425, 0.051608685267359, 0.016229248811599,
0.0136151570872175, 0.0136151570872175, 0.0136151570872175, 0.0136151570872175, 0.0136151570872175,
0.0136151570872175])),
9: (np.array([[0.333333333333333, 0.489682519198737, 0.437089591492936, 0.188203535619032,
0.0447295133944519, 0.020634961602525, 0.125820817014127, 0.623592928761935,
0.910540973211095, 0.489682519198738, 0.437089591492937, 0.188203535619033,
0.044729513394453, 0.741198598784498, 0.221962989160766, 0.036838412054736,
0.036838412054736, 0.221962989160766, 0.741198598784498],
[0.333333333333333, 0.020634961602525, 0.125820817014127, 0.623592928761935,
0.910540973211095, 0.489682519198738, 0.437089591492937, 0.188203535619033,
0.044729513394453, 0.489682519198737, 0.437089591492936, 0.188203535619032,
0.0447295133944519, 0.036838412054736, 0.036838412054736, 0.221962989160766,
0.741198598784498, 0.741198598784498, 0.221962989160766]]), np.array(
[0.0485678981413995, 0.0156673501135695, 0.038913770502387, 0.039823869463605, 0.012788837829349,
0.0156673501135695, 0.038913770502387, 0.039823869463605, 0.012788837829349, 0.0156673501135695,
0.038913770502387, 0.039823869463605, 0.012788837829349, 0.0216417696886445, 0.0216417696886445,
0.0216417696886445, 0.0216417696886445, 0.0216417696886445, 0.0216417696886445])),
10: (np.array([[0.333333333333333, 0.485577633383658, 0.109481575485037, 0.028844733232685,
0.781036849029926, 0.485577633383657, 0.109481575485037, 0.550352941820999,
0.728323904597411, 0.923655933587501, 0.307939838764121, 0.246672560639903, 0.0668032510122,
0.14170721941488, 0.025003534762686, 0.009540815400299, 0.14170721941488, 0.025003534762686,
0.009540815400299, 0.307939838764121, 0.246672560639903, 0.0668032510122, 0.550352941820999,
0.728323904597411, 0.923655933587501],
[0.333333333333333, 0.028844733232685, 0.781036849029926, 0.485577633383657,
0.109481575485037, 0.485577633383658, 0.109481575485037, 0.14170721941488,
0.025003534762686, 0.009540815400299, 0.14170721941488, 0.025003534762686,
0.009540815400299, 0.307939838764121, 0.246672560639903, 0.0668032510122, 0.550352941820999,
0.728323904597411, 0.923655933587501, 0.550352941820999, 0.728323904597411,
0.923655933587501, 0.307939838764121, 0.246672560639903, 0.0668032510122]]), np.array(
[0.045408995191377, 0.0183629788782335, 0.022660529717764, 0.0183629788782335, 0.022660529717764,
0.0183629788782335, 0.022660529717764, 0.03637895842271, 0.0141636212655285, 0.0047108334818665,
0.03637895842271, 0.0141636212655285, 0.0047108334818665, 0.03637895842271, 0.0141636212655285,
0.0047108334818665, 0.03637895842271, 0.0141636212655285, 0.0047108334818665, 0.03637895842271,
0.0141636212655285, 0.0047108334818665, 0.03637895842271, 0.0141636212655285, 0.0047108334818665])),
11: (np.array([[0.488217389773805, 0.439724392294461, 0.271210385012116, 0.127576145541586,
0.021317350453211, 0.02356522045239, 0.120551215411079, 0.457579229975768,
0.744847708916828, 0.957365299093579, 0.488217389773805, 0.43972439229446,
0.271210385012116, 0.127576145541586, 0.02131735045321, 0.608943235779788,
0.695836086787803, 0.858014033544073, 0.275713269685514, 0.28132558098994,
0.116251915907597, 0.115343494534698, 0.022838332222257, 0.02573405054833,
0.115343494534698, 0.022838332222257, 0.02573405054833, 0.275713269685514, 0.28132558098994,
0.116251915907597, 0.608943235779788, 0.695836086787803, 0.858014033544073],
[0.02356522045239, 0.120551215411079, 0.457579229975768, 0.744847708916828,
0.957365299093579, 0.488217389773805, 0.43972439229446, 0.271210385012116,
0.127576145541586, 0.02131735045321, 0.488217389773805, 0.439724392294461,
0.271210385012116, 0.127576145541586, 0.021317350453211, 0.115343494534698,
0.022838332222257, 0.02573405054833, 0.115343494534698, 0.022838332222257, 0.02573405054833,
0.275713269685514, 0.28132558098994, 0.116251915907597, 0.608943235779788,
0.695836086787803, 0.858014033544073, 0.608943235779788, 0.695836086787803,
0.858014033544073, 0.275713269685514, 0.28132558098994, 0.116251915907597]]), np.array(
[0.0128655332202275, 0.021846272269019, 0.0314291121089425, 0.0173980564653545, 0.0030831305257795,
0.0128655332202275, 0.021846272269019, 0.0314291121089425, 0.0173980564653545, 0.0030831305257795,
0.0128655332202275, 0.021846272269019, 0.0314291121089425, 0.0173980564653545, 0.0030831305257795,
0.0201857788831905, 0.0111783866011515, 0.0086581155543295, 0.0201857788831905, 0.0111783866011515,
0.0086581155543295, 0.0201857788831905, 0.0111783866011515, 0.0086581155543295, 0.0201857788831905,
0.0111783866011515, 0.0086581155543295, 0.0201857788831905, 0.0111783866011515, 0.0086581155543295,
0.0201857788831905, 0.0111783866011515, 0.0086581155543295])),
12: (np.array([[0.488217389773805, 0.439724392294461, 0.271210385012116, 0.127576145541586,
0.021317350453211, 0.02356522045239, 0.120551215411079, 0.457579229975768,
0.744847708916828, 0.957365299093579, 0.488217389773805, 0.43972439229446,
0.271210385012116, 0.127576145541586, 0.02131735045321, 0.608943235779788,
0.695836086787803, 0.858014033544073, 0.275713269685514, 0.28132558098994,
0.116251915907597, 0.115343494534698, 0.022838332222257, 0.02573405054833,
0.115343494534698, 0.022838332222257, 0.02573405054833, 0.275713269685514, 0.28132558098994,
0.116251915907597, 0.608943235779788, 0.695836086787803, 0.858014033544073],
[0.02356522045239, 0.120551215411079, 0.457579229975768, 0.744847708916828,
0.957365299093579, 0.488217389773805, 0.43972439229446, 0.271210385012116,
0.127576145541586, 0.02131735045321, 0.488217389773805, 0.439724392294461,
0.271210385012116, 0.127576145541586, 0.021317350453211, 0.115343494534698,
0.022838332222257, 0.02573405054833, 0.115343494534698, 0.022838332222257, 0.02573405054833,
0.275713269685514, 0.28132558098994, 0.116251915907597, 0.608943235779788,
0.695836086787803, 0.858014033544073, 0.608943235779788, 0.695836086787803,
0.858014033544073, 0.275713269685514, 0.28132558098994, 0.116251915907597]]), np.array(
[0.0128655332202275, 0.021846272269019, 0.0314291121089425, 0.0173980564653545, 0.0030831305257795,
0.0128655332202275, 0.021846272269019, 0.0314291121089425, 0.0173980564653545, 0.0030831305257795,
0.0128655332202275, 0.021846272269019, 0.0314291121089425, 0.0173980564653545, 0.0030831305257795,
0.0201857788831905, 0.0111783866011515, 0.0086581155543295, 0.0201857788831905, 0.0111783866011515,
0.0086581155543295, 0.0201857788831905, 0.0111783866011515, 0.0086581155543295, 0.0201857788831905,
0.0111783866011515, 0.0086581155543295, 0.0201857788831905, 0.0111783866011515, 0.0086581155543295,
0.0201857788831905, 0.0111783866011515, 0.0086581155543295])),
13: (np.array([[0.333333333333333, 0.495048184939704, 0.468716635109574, 0.414521336801276,
0.229399572042832, 0.11442449519633, 0.024811391363459, 0.009903630120591,
0.062566729780852, 0.170957326397447, 0.541200855914337, 0.77115100960734,
0.950377217273082, 0.495048184939705, 0.468716635109574, 0.414521336801277,
0.229399572042831, 0.11442449519633, 0.024811391363459, 0.63635117456166, 0.690169159986905,
0.851409537834241, 0.268794997058761, 0.291730066734288, 0.126357385491669,
0.094853828379579, 0.018100773278807, 0.02223307667409, 0.094853828379579,
0.018100773278807, 0.02223307667409, 0.268794997058761, 0.291730066734288,
0.126357385491669, 0.63635117456166, 0.690169159986905, 0.851409537834241],
[0.333333333333333, 0.009903630120591, 0.062566729780852, 0.170957326397447,
0.541200855914337, 0.77115100960734, 0.950377217273082, 0.495048184939705,
0.468716635109574, 0.414521336801277, 0.229399572042831, 0.11442449519633,
0.024811391363459, 0.495048184939704, 0.468716635109574, 0.414521336801276,
0.229399572042832, 0.11442449519633, 0.024811391363459, 0.094853828379579,
0.018100773278807, 0.02223307667409, 0.094853828379579, 0.018100773278807, 0.02223307667409,
0.268794997058761, 0.291730066734288, 0.126357385491669, 0.63635117456166,
0.690169159986905, 0.851409537834241, 0.63635117456166, 0.690169159986905,
0.851409537834241, 0.268794997058761, 0.291730066734288, 0.126357385491669]]), np.array(
[0.026260461700401, 0.005640072604665, 0.015711759181227, 0.023536251252097, 0.0236817932681775,
0.015583764522897, 0.003987885732537, 0.005640072604665, 0.015711759181227, 0.023536251252097,
0.0236817932681775, 0.015583764522897, 0.003987885732537, 0.005640072604665, 0.015711759181227,
0.023536251252097, 0.0236817932681775, 0.015583764522897, 0.003987885732537, 0.018424201364366,
0.008700731651911, 0.0077608934195225, 0.018424201364366, 0.008700731651911, 0.0077608934195225,
0.018424201364366, 0.008700731651911, 0.0077608934195225, 0.018424201364366, 0.008700731651911,
0.0077608934195225, 0.018424201364366, 0.008700731651911, 0.0077608934195225, 0.018424201364366,
0.008700731651911, 0.0077608934195225])),
14: (np.array([[0.488963910362178, 0.417644719340454, 0.273477528308838, 0.177205532412544,
0.0617998830908719, 0.019390961248701, 0.022072179275643, 0.164710561319092,
0.453044943382323, 0.645588935174913, 0.876400233818255, 0.961218077502598,
0.488963910362179, 0.417644719340454, 0.273477528308839, 0.177205532412543,
0.061799883090873, 0.019390961248701, 0.770608554774996, 0.570222290846683,
0.686980167808088, 0.879757171370171, 0.172266687821356, 0.336861459796345,
0.298372882136258, 0.118974497696957, 0.057124757403648, 0.092916249356972,
0.014646950055654, 0.001268330932872, 0.057124757403648, 0.092916249356972,
0.014646950055654, 0.001268330932872, 0.172266687821356, 0.336861459796345,
0.298372882136258, 0.118974497696957, 0.770608554774996, 0.570222290846683,
0.686980167808088, 0.879757171370171],
[0.022072179275643, 0.164710561319092, 0.453044943382323, 0.645588935174913,
0.876400233818255, 0.961218077502598, 0.488963910362179, 0.417644719340454,
0.273477528308839, 0.177205532412543, 0.061799883090873, 0.019390961248701,
0.488963910362178, 0.417644719340454, 0.273477528308838, 0.177205532412544,
0.0617998830908719, 0.019390961248701, 0.057124757403648, 0.092916249356972,
0.014646950055654, 0.001268330932872, 0.057124757403648, 0.092916249356972,
0.014646950055654, 0.001268330932872, 0.172266687821356, 0.336861459796345,
0.298372882136258, 0.118974497696957, 0.770608554774996, 0.570222290846683,
0.686980167808088, 0.879757171370171, 0.770608554774996, 0.570222290846683,
0.686980167808088, 0.879757171370171, 0.172266687821356, 0.336861459796345,
0.298372882136258, 0.118974497696957]]), np.array(
[0.0109417906847145, 0.0163941767720625, 0.025887052253646, 0.0210812943684965, 0.0072168498348885,
0.0024617018012, 0.0109417906847145, 0.0163941767720625, 0.025887052253646, 0.0210812943684965,
0.0072168498348885, 0.0024617018012, 0.0109417906847145, 0.0163941767720625, 0.025887052253646,
0.0210812943684965, 0.0072168498348885, 0.0024617018012, 0.012332876606282, 0.0192857553935305,
0.007218154056767, 0.0025051144192505, 0.012332876606282, 0.0192857553935305, 0.007218154056767,
0.0025051144192505, 0.012332876606282, 0.0192857553935305, 0.007218154056767, 0.0025051144192505,
0.012332876606282, 0.0192857553935305, 0.007218154056767, 0.0025051144192505, 0.012332876606282,
0.0192857553935305, 0.007218154056767, 0.0025051144192505, 0.012332876606282, 0.0192857553935305,
0.007218154056767, 0.0025051144192505])),
15: (np.array([[0.333333333333333, 0.497170540556774, 0.482176322624624, 0.450239969020781,
0.400266239377397, 0.252141267970952, 0.162047004658462, 0.0758758822607461,
0.015654726967822, 0.005658918886452, 0.035647354750751, 0.099520061958437,
0.199467521245206, 0.495717464058095, 0.675905990683077, 0.848248235478508,
0.968690546064356, 0.497170540556774, 0.482176322624625, 0.450239969020782,
0.400266239377397, | |
threshold:
to_keep = np.where(global_scores[picked] >= threshold)[0]
to_delete = np.where(global_scores[picked] < threshold)[0]
# print(picked.shape)
picked = picked[to_keep]
# print(picked.shape)
topk = picked.shape[0]
for x in to_delete:
sizes[x] = 0
# print(topk)
if not topk:
break
global_scores /= len(self.back_translators)
last_scores /= len(self.back_translators)
# TODO: there may be duplicates because of new_Unk
if only_local_score:
ret = [(self.global_itos[z], last_scores[z]) if z != onmt.IO.UNK else (new_unk, last_scores[z]) for z in picked if self.global_itos[z] != onmt.IO.EOS_WORD]
else:
ret = [(self.global_itos[z], global_scores[z]) if z != onmt.IO.UNK else (new_unk, global_scores[z]) for z in picked if self.global_itos[z] != onmt.IO.EOS_WORD]
return sorted(ret, key=lambda x: x[1], reverse=True)
return global_scores, new_unk
print()
print(list(reversed([self.global_itos[x] for x in np.argsort(global_scores)[-100:]])))
pass
def suggest_in_between(self, words, idxs_middle, topk=10, threshold=None,
original_sentence=None, max_inserts=4, ignore_set=set(),
return_full_texts=False, orig_score=0, verbose=False):
# TODO: This is outdated
run_through = True
to_add = -10000
memoized_stuff = self.last == original_sentence and original_sentence is not None
# print('suggest_next', words[:idx], memoized_stuff)
if not memoized_stuff:
self.last = original_sentence
self.memoized = {}
self.memoized['translation'] = []
sentence = (' '.join(words) if original_sentence is None
else original_sentence)
words_after = words[idxs_middle[-1] + 1:]
words_between = words[idxs_middle[0]:idxs_middle[1] + 1]
words = words[:idxs_middle[0]]
words_before = words
# print(words)
# print(words_between)
# print(words_after)
max_iters = max_inserts + idxs_middle[1] - idxs_middle[0] + 1
out_scores = {}
orig_ids = np.array([self.global_stoi[onmt.IO.BOS_WORD]] + [self.global_stoi[x] if x in self.global_stoi else onmt.IO.UNK for x in words])
after_ids = np.array([self.global_stoi[x] if x in self.global_stoi else onmt.IO.UNK for x in words_after] +
[self.global_stoi[onmt.IO.EOS_WORD]])
mid_ids = np.array([self.global_stoi[x] if x in self.global_stoi else onmt.IO.UNK for x in words_between])
unk_scores = []
if threshold:
orig_threshold = threshold
attns = []
src_examples = []
decoder_states = []
encoder_states = []
contexts = []
mappings = []
prev_scores = 0
feed_original = 0
in_between = 0
mid_score = 0
for k, (to, back, mapper, back_mapper, unks) in enumerate(
zip(self.to_translators, self.back_translators,
self.vocab_mappers, self.back_vocab_mappers,
self.vocab_unks)):
if memoized_stuff:
translation, mapping = self.memoized['translation'][k]
mappings.append(mapping)
else:
translation, mapping = choose_forward_translation(sentence, to, back,
n=5)
mappings.append(mapping)
self.memoized['translation'].append((translation, mapping))
encStates, context, decStates, src_example = (
back.get_init_states(translation))
src_examples.append(src_example)
# print()
# Feed in the original input
tz = time.time()
for i, n in zip(orig_ids, orig_ids[1:]):
idx = int(back_mapper[i])
n = int(back_mapper[n])
out, decStates, attn = back.advance_states(encStates, context,
decStates, [idx], [1])
attenz = attn['std'].data[0].cpu().numpy()
chosen = np.argmax(attenz, axis=1)
for r, ch in enumerate(chosen):
ch = mapping[ch]
if ch in back.vocab().stoi:
# print("YOO")
ind = back.vocab().stoi[ch]
# print("prev", out[r, ind])
out[r, ind] = max(out[r, ind], out[r, onmt.IO.UNK])
# print("aft", out[r, ind])
prev_scores += out[0][n]
mid_score += prev_scores
feed_original += time.time() - tz
decoder_states.append(decStates)
contexts.append(context)
encoder_states.append(encStates)
# print("MID IDS", mid_ids)
onmt_model.transform_dec_states(decStates, [1])
decStates = copy.deepcopy(decStates)
for i, n in zip([orig_ids[-1]] + list(mid_ids), list(mid_ids) + [after_ids[0]]):
# print('mid', i, n)
idx = int(back_mapper[i])
n = int(back_mapper[n])
out, decStates, attn = back.advance_states(encStates, context,
decStates, [idx], [1])
attenz = attn['std'].data[0].cpu().numpy()
chosen = np.argmax(attenz, axis=1)
for r, ch in enumerate(chosen):
ch = mapping[ch]
if ch in back.vocab().stoi:
# print("YOO")
ind = back.vocab().stoi[ch]
# print("prev", out[r, ind])
out[r, ind] = max(out[r, ind], out[r, onmt.IO.UNK])
# print("aft", out[r, ind])
mid_score += out[0][n]
# print("INcreasing mid")
prev = [[]]
prev_scores = [prev_scores / float(len(self.back_translators))]
mid_score = mid_score / float(len(self.back_translators))
if verbose:
print('MID', mid_score)
if threshold:
threshold = mid_score + threshold
# print(prev_scores)
prev_unks = [[]]
new_sizes = [1]
idxs = [orig_ids[-1]]
current_iter = 0
# print(list(reversed([(self.global_itos[x], global_scores[0][x]) for x in np.argsort(global_scores[0])[-10:]])))
going_after = 0
while prev and current_iter < max_iters + 1:
if verbose:
print('iter', current_iter, topk)
current_iter += 1
global_scores = np.zeros((len(prev), (len(self.global_itos))))
all_stuff = zip(
self.back_translators, self.vocab_mappers,
self.back_vocab_mappers, self.vocab_unks, contexts,
decoder_states, encoder_states, src_examples, mappings)
new_decoder_states = []
new_attns = []
unk_scores = []
tz = time.time()
for (b, mapper, back_mapper, unks, context,
decStates, encStates, srcz, mapping) in all_stuff:
idx = [int(back_mapper[i]) for i in idxs]
out, decStates, attn = b.advance_states(
encStates, context, decStates, idx, new_sizes)
new_decoder_states.append(decStates)
new_attns.append(attn)
attenz = attn['std'].data[0].cpu().numpy()
chosen = np.argmax(attenz, axis=1)
for r, ch in enumerate(chosen):
ch = mapping[ch]
if ch in b.vocab().stoi:
ind = b.vocab().stoi[ch]
out[r, ind] = max(out[r, ind], out[r, onmt.IO.UNK])
elif ch in self.global_stoi:
ind = self.global_stoi[ch]
global_scores[r, ind] -= to_add
unk_scores.append(out[:, onmt.IO.UNK])
global_scores[:, mapper] += out
if unks.shape[0]:
global_scores[:, unks] += to_add + out[:, onmt.IO.UNK][:, np.newaxis]
decoder_states = new_decoder_states
global_scores /= float(len(self.back_translators))
unk_scores = [normalize_ll(x) for x in np.array(unk_scores).T]
new_prev = []
new_prev_unks = []
new_prev_scores = []
new_sizes = []
new_origins = []
idxs = []
new_scores = global_scores + np.array(prev_scores)[:, np.newaxis]
# best = new_scores.max()
# if threshold:
# threshold = mid_score + orig_threshold
# threshold = best + orig_threshold
# threshold = orig_score + orig_threshold
# print('best', best)
# print('new thresh', threshold)
# print(threshold == best + orig_threshold)
if threshold:
# print(threshold)
where = np.where(new_scores > threshold)
if topk:
largest = largest_indices(new_scores[where], topk)[0]
where = (where[0][largest], where[1][largest])
else:
where = largest_indices(new_scores, topk)
# print('best', new_scores[where[0][0], where[1][0]], new_scores.max())
tmp = np.argsort(where[0])
where = (where[0][tmp], where[1][tmp])
# print(where)
new_this_round = []
new_origins_this_round = []
to_add = time.time() - tz
in_between += time.time() - tz
if verbose:
print('in', to_add, in_between, threshold)
print(where[0].shape)
for i, j in zip(*where):
if j == after_ids[0]:
words = [self.global_itos[x] if x != onmt.IO.UNK
else prev_unks[i][k]
for k, x in enumerate(prev[i], start=0)]
new_full = ' '.join(words_before + words + words_after)
new = ' '.join(words)
if return_full_texts:
new = new_full
if new_full in ignore_set:
continue
# return
if new not in out_scores or new_scores[i, j] > out_scores[new]:
out_scores[new] = new_scores[i, j]
new_this_round.append(new)
new_origins_this_round.append(i)
# if topk:
# topk -= 1
continue
if j == self.global_stoi[onmt.IO.EOS_WORD]:
continue
new_origins.append(i)
new_unk = '<unk>'
if j == onmt.IO.UNK:
new_unk_scores = collections.defaultdict(lambda: 0)
for x, src, mapping, score_weight in zip(new_attns, src_examples, mappings, unk_scores[i]):
attn = x['std'].data[0][i]
for zidx, (word, score) in enumerate(zip(src, attn)):
word = mapping[zidx]
new_unk_scores[word] += score * score_weight
new_unk = max(new_unk_scores.items(),
key=operator.itemgetter(1))[0]
# print (' '.join(self.global_itos[x] for x in prev[i][1:]))
new_prev.append(prev[i] + [j])
new_prev_unks.append(prev_unks[i] + [new_unk])
new_prev_scores.append(new_scores[i, j])
# print(i, j, new_scores[i,j])
idxs.append(j)
# print('newog', new_origins_this_round)
# print(new_sizes)
# print('idxs')
# print(idxs)
# for i, p in enumerate(prev):
# print(i, end= ' ')
# print([self.global_itos[x] for x in p], end=' ')
# print(list(reversed([(self.global_itos[x], new_scores[i][x]) for x in np.argsort(new_scores[i])[-10:]])))
new_sizes = np.bincount(new_origins, minlength=len(prev))
new_sizes = [int(x) for x in new_sizes]
nsizes_this_round = np.bincount(new_origins_this_round, minlength=len(prev))
nsizes_this_round = [int(x) for x in nsizes_this_round]
# global_scores = np.zeros((len(prev), (len(self.global_itos))))
zaaa = time.time()
ndec_states = copy.deepcopy(decoder_states)
all_stuff = zip(
self.back_translators, self.vocab_mappers,
self.back_vocab_mappers, self.vocab_unks, contexts,
ndec_states, encoder_states, mappings)
if len(new_this_round):
# print(out_scores)
for (b, mapper, back_mapper, unks, context,
decStates, encStates, mapping) in all_stuff:
nsizes = nsizes_this_round
# print('new b')
for i, next_ in zip(after_ids, after_ids[1:]):
# print(self.global_itos[i], self.global_itos[next_])
idx = [int(back_mapper[i]) for _ in new_this_round]
# print(len(nsizes_this_round))
# print(len(idx), sum(nsizes_this_round))
# print(nsizes)
n = int(back_mapper[next_])
# decStates = copy.deepcopy(decStates)
out, decStates, attn = b.advance_states(
encStates, context, decStates, idx, nsizes)
attenz = attn['std'].data[0].cpu().numpy()
chosen = np.argmax(attenz, axis=1)
for r, ch in enumerate(chosen):
ch = mapping[ch]
if ch in b.vocab().stoi:
ind = b.vocab().stoi[ch]
out[r, ind] = max(out[r, ind], out[r, onmt.IO.UNK])
nsizes = [1 for _ in new_this_round]
for r in range(out.shape[0]):
out_scores[new_this_round[r]] += out[r, n] / float(len(self.back_translators))
# print('ae')
# print(nsizes)
going_after += time.time() - zaaa
prev = new_prev
prev_unks = new_prev_unks
# print('prev', prev)
prev_scores = new_prev_scores
# print("HIHFSD", prev_scores[2])
# new_sizes = []
# idxs = []
# return []
if threshold:
threshold = orig_threshold + orig_score
if verbose:
print('first ', feed_original )
print('between ', in_between)
print('going after', going_after)
print('total after', feed_original + in_between + going_after)
# return [x for x in sorted(out_scores.items(), key=lambda x: x[1], reverse=True)]
# threshold = -99999999
return [x for x in sorted(out_scores.items(), key=lambda x: x[1], reverse=True) if x[1] > threshold]
return []
key_order = list(out_scores.keys())
best = -9999999
for dec_idx, (to, back, mapper, back_mapper, encStates, context, mapping) in enumerate(zip(
self.to_translators, self.back_translators, self.vocab_mappers,
self.back_vocab_mappers, encoder_states, contexts, mappings)):
for i, next_ in zip(after_ids[1:], after_ids[2:]):
idx = [int(back_mapper[i])]
n = int(back_mapper[next_])
for key in key_order:
decStates = out_dec_states[key][dec_idx]
new_sizes = out_new_sizes[key]
out, decStates, attn = back.advance_states(encStates, context,
decStates, idx, new_sizes)
attenz = | |
<gh_stars>0
import os as _os
from isopy import core
import csv as csv
import datetime as dt
import numpy as np
import chardet
import openpyxl
import pyperclip
from openpyxl import load_workbook
import itertools
import io
__all__ = ['read_exp', 'read_csv', 'write_csv', 'read_xlsx', 'write_xlsx', 'read_clipboard',
'array_from_csv', 'array_from_xlsx', 'array_from_clipboard']
import isopy.checks
################
### read exp ###
################
class NeptuneData:
"""
Container for the data returned by ``read_exp``.
"""
def __init__(self, info, cycle, time, measurements):
self.info = info
self.cycle = cycle
self.time = time
self.measurements = measurements
def read_exp(filename, rename = None) -> NeptuneData:
"""
Load data from a Neptune/Triton export file.
Parameters
----------
filename : str, bytes, StringIO, BytesIO
Path for file to be opened. Alternatively a file like byte string can be supplied.
Also accepts file like objects.
rename : dict, Callable, Optional
For renaming keys in the analysed data. Useful for cases when the key is the mass rather
than the isotope measured. If a dictionary is passed then every key present in the
dictionary will be replaced by the associated value. A callable can also be passed that
takes one key in the file and returns the new key.
Returns
-------
neptune_data : NeptuneData
An object containing the following attributes:
* info - Dictionary containing the metadata included at the beginning of the file.
* cycle - A list containing the cycle number for each measurement.
* time - A list containing datetime objects for each measurement.
* measurements - An dictionary containing an an isopy array with the values in for each line measured. Static measurements are always given as line ``1``.
to extract e.g only the isotope data from the measurement use ``neptune_data.measurement[line].copy(flavour_eq='isotope')``.
"""
information = {}
# If filename is a string load the files.
if type(filename) is str:
with open(filename, 'rb') as fileio:
file = fileio.read()
elif type(filename) is bytes:
file = filename
elif type(filename) is io.BytesIO:
filename.seek(0)
file = filename.read()
elif type(filename) is str:
file = filename
elif type(filename) is io.StringIO:
filename.seek(0)
file = filename.read()
else:
raise TypeError('filename is of unknown type')
if type(file is bytes):
# find the files encoding
encoding = chardet.detect(file).get('encoding')
# Decode the bytes into string.
file = file.decode(encoding)
dialect = csv.Sniffer().sniff(file)
csv_reader = csv.reader(io.StringIO(file), dialect=dialect)
for row in csv_reader:
if ':' in row[0]: # mete
name, value = row[0].split(':', 1)
information[name] = value
if row[0] == 'Cycle':
# We can reuse the function from before to read the integration data
data = _read_csv_ckeys(row, csv_reader, float_prefered=False, termination_symbol='***')
if rename is None:
renamer = lambda name: name
elif isinstance(rename, dict):
renamer = lambda name: rename.get(name, name)
elif callable(rename):
renamer = rename
else:
raise TypeError('rename must be a dict or a callable function')
data.pop("", None)
cycle = np.array(data.pop('Cycle'), dtype ='int')
time = data.pop('Time')
try:
time =[dt.datetime.strptime(time[i], '%H:%M:%S:%f') for i in range(len(time))]
except ValueError:
try:
time = [dt.datetime.fromtimestamp(time[i]).strftime('%H:%M:%S:%f') for i in range(len(time))]
except:
time = [dt.datetime.fromtimestamp(0).strftime('%H:%M:%S:%f') for i in range(len(time))]
measurements = {}
for key in data:
if ":" in key:
line, keystring = key.split(":", 1)
line = int(line)
else:
line = 1
keystring = key
keystring = renamer(keystring)
measurements.setdefault(line, {})[isopy.keystring(keystring)] = data[key]
measurements = {line: isopy.asarray(measurements[line]) for line in measurements}
return NeptuneData(information, cycle, time, measurements)
######################
### read/write CSV ###
######################
def read_csv(filename, comment_symbol ='#', keys_in = 'c',
float_preferred = False, encoding = None,
dialect = None):
"""
Load data from a csv file.
Parameters
----------
filename : str, bytes, StringIO, BytesIO
Path for file to be opened. Alternatively a file like byte string can be supplied.
Also accepts file like objects.
comment_symbol : str, Default = '#'
Rows starting with this string will be ignored.
keys_in : {'c', 'r', None}, Default = 'c'
If keys are given as the first value in each column pass ``c``. If keys are given as the
first value in each row pass ``r``. If there are no keys pass ``None``.
float_preferred : bool, Default = False
If `True` all values will be converted to float if possible. If conversion fails the
value will be left as a string.
encoding
Encoding of the file. If None the encoding will be guessed from the file.
dialect
Dialect of the csv file. If None the dialect will be guessed from the file.
Returns
-------
data : dict or list
Returns a dictionary for data with keys otherwise a list.
"""
filename = isopy.checks.check_type('filename', filename, str, bytes)
comment_symbol = isopy.checks.check_type('comment_symbol', comment_symbol, str)
keys_in = isopy.checks.check_type('keys_in', keys_in, str,allow_none=True)
encoding = isopy.checks.check_type('encoding', encoding, str, allow_none=True)
dialect = isopy.checks.check_type('dialect', dialect, str, allow_none=True)
#If filename is a string load the files.
if type(filename) is str:
with open(filename, 'rb') as fileio:
file = fileio.read()
elif type(filename) is bytes:
file = filename
elif type(filename) is io.BytesIO:
filename.seek(0)
file = filename.read()
elif type(filename) is str:
file = filename
elif type(filename) is io.StringIO:
filename.seek(0)
file = filename.read()
else:
raise TypeError('filename is of unknown type')
if type(file is bytes):
#find the files encoding
if encoding is None:
encoding = chardet.detect(file).get('encoding')
#Decode the bytes into string.
file = file.decode(encoding)
#find the csv dialect
if dialect is None:
dialect = csv.Sniffer().sniff(file)
# Create a reader object by converting the file string to a file-like object
csv_reader = csv.reader(io.StringIO(file), dialect=dialect)
for row in csv_reader:
row_data = [r.strip() for r in
row] # Remove any training whitespaces from the data in this row
if row_data.count('') == len(row):
# All the columns in this row are empty. Ignore it
continue
if comment_symbol is not None and row[0][:len(comment_symbol)] == comment_symbol: #startswith(comment_symbol):
# This is a comment so ignore it.
continue
if keys_in == 'c':
data = _read_csv_ckeys(row_data, csv_reader, comment_symbol, float_prefered=float_preferred)
data.pop("", None)
return data
elif keys_in == 'r':
data = _read_csv_rkeys(row_data, csv_reader, comment_symbol, float_prefered=float_preferred)
data.pop("", None)
return data
elif keys_in is None:
return _read_csv_nokeys(row_data, csv_reader, comment_symbol, float_prefered=float_preferred)
else:
raise ValueError(f'Unknown value for "keys_in" {keys_in}')
def read_clipboard(comment_symbol ='#', keys_in = 'c',
float_preferred = False, dialect = None):
"""
Load data from values in the clipboard.
Parameters
----------
comment_symbol : str, Default = '#'
Rows starting with this string will be ignored.
keys_in : {'c', 'r', None}, Default = 'c'
If keys are given as the first value in each column pass ``c``. If keys are given as the
first value in each row pass ``r``. If there are no keys pass ``None``.
float_preferred : bool, Default = False
If `True` all values will be converted to float if possible. If conversion fails the
value will be left as a string.
dialect
Dialect of the values in the clipboard. If None the dialect will be guessed from the values.
Returns
-------
data : dict or list
Returns a dictionary for data with keys otherwise a list.
"""
data = pyperclip.paste()
data = data.encode('UTF-8')
return read_csv(data, encoding='UTF-8', comment_symbol=comment_symbol, keys_in=keys_in,
float_preferred=float_preferred, dialect=dialect)
def _read_csv_ckeys(first_row, reader, comment_symbol=None, termination_symbol=None, float_prefered = False):
keys = first_row # First row contains the the name of each column
values = [[] for i in range(len(keys))] # Create an empty list for each key
# Loop over the remaining rows
for i, row in enumerate(reader):
row = [r.strip() for r in row]
if termination_symbol is not None and row[0][:len(termination_symbol)] == termination_symbol: #startswith(termination_symbol):
# Stop reading data if we find this string at the beginning of a row
break
if comment_symbol is not None and row[0][:len(comment_symbol)] == comment_symbol: #.startswith(comment_symbol):
# Row is a comment, ignore
continue
if row.count('') == len(row):
# Empty row, ignore
continue
if len(row) < len(keys):
# There is not enough values in this row to give one to each key
raise ValueError(f'Row {i} does not have enough columns')
for j in range(len(keys)):
value = row[j]
if float_prefered:
try:
value = float(value)
except:
value = str(value)
values[j].append(value)
return dict(zip(keys, values)) # creates a dictionary from two lists
def _read_csv_rkeys(first_row, reader, comment_symbol=None, termination_symbol=None,
float_prefered=False):
data = {}
dlen = None
# Loop over the remaining rows
for i, row in enumerate(itertools.chain([first_row], reader)):
row = [r.strip() for r in row]
if termination_symbol is not None and row[0][:len(termination_symbol)] == termination_symbol: #.startswith(termination_symbol):
| |
<reponame>szyminson/jsos-msg-lookup<gh_stars>10-100
#!/usr/bin/env python3
# coding=utf-8
import signal
import requests
import os
from dotenv import load_dotenv
import getpass
import schedule
import time
from datetime import datetime
import urllib.parse as urlparse
from urllib.parse import parse_qsl
from bs4 import BeautifulSoup
import imaplib
import smtplib
from email.message import EmailMessage
import base64
from Crypto.Cipher import AES
from Crypto.Hash import SHA256
from Crypto import Random
import pickle
# define Python user-defined exceptions
class Alert(Exception):
"""Base class for alert exceptions"""
pass
class ErrorAlert(Alert):
"""Raised when error alert is sent"""
pass
class WorkingAlert(Alert):
"""Raised when working alert is sent"""
pass
class NotSentAlert(Alert):
"""Raised when alert could not be sent"""
pass
class ErrorCountException(Exception):
"""Raised when error count is not equal 0 after successful msg lookup"""
pass
def encrypt(key, source, encode=True):
key = SHA256.new(key).digest() # use SHA-256 over our key to get a proper-sized AES key
IV = Random.new().read(AES.block_size) # generate IV
encryptor = AES.new(key, AES.MODE_CBC, IV)
padding = AES.block_size - len(source) % AES.block_size # calculate needed padding
source += bytes([padding]) * padding # Python 2.x: source += chr(padding) * padding
data = IV + encryptor.encrypt(source) # store the IV at the beginning and encrypt
return base64.b64encode(data).decode("latin-1") if encode else data
def decrypt(key, source, decode=True):
if decode:
source = base64.b64decode(source.encode("latin-1"))
key = SHA256.new(key).digest() # use SHA-256 over our key to get a proper-sized AES key
IV = source[:AES.block_size] # extract the IV from the beginning
decryptor = AES.new(key, AES.MODE_CBC, IV)
data = decryptor.decrypt(source[AES.block_size:]) # decrypt
padding = data[-1] # pick the padding value from the end; Python 2.x: ord(data[-1])
if data[-padding:] != bytes([padding]) * padding: # Python 2.x: chr(padding) * padding
raise ValueError("Invalid padding...")
return data[:-padding] # remove the padding
def signal_handler(signal, frame):
raise SystemExit
def removeAccents(input_text):
strange='ŮôῡΒძěἊἦëĐᾇόἶἧзвŅῑἼźἓʼnἐÿἈΌἢὶЁϋυŕŽŎŃğûλВὦėἜŤŨîᾪĝžἙâᾣÚκὔჯᾏᾢĠфĞὝŲŊŁČῐЙῤŌὭŏყἀхῦЧĎὍОуνἱῺèᾒῘᾘὨШūლἚύсÁóĒἍŷöὄЗὤἥბĔõὅῥŋБщἝξĢюᾫაπჟῸდΓÕűřἅгἰშΨńģὌΥÒᾬÏἴქὀῖὣᾙῶŠὟὁἵÖἕΕῨčᾈķЭτἻůᾕἫжΩᾶŇᾁἣჩαἄἹΖеУŹἃἠᾞåᾄГΠКíōĪὮϊὂᾱიżŦИὙἮὖÛĮἳφᾖἋΎΰῩŚἷРῈIJἁéὃσňİΙῠΚĸὛΪᾝᾯψÄᾭêὠÀღЫĩĈμΆᾌἨÑἑïოĵÃŒŸζჭᾼőΣŻçųøΤΑËņĭῙŘАдὗპŰἤცᾓήἯΐÎეὊὼΘЖᾜὢĚἩħĂыῳὧďТΗἺĬὰὡὬὫÇЩᾧñῢĻᾅÆßшδòÂчῌᾃΉᾑΦÍīМƒÜἒĴἿťᾴĶÊΊȘῃΟúχΔὋŴćŔῴῆЦЮΝΛῪŢὯнῬũãáἽĕᾗნᾳἆᾥйᾡὒსᾎĆрĀüСὕÅýფᾺῲšŵкἎἇὑЛვёἂΏθĘэᾋΧĉᾐĤὐὴιăąäὺÈФĺῇἘſგŜæῼῄĊἏØÉПяწДĿᾮἭĜХῂᾦωთĦлðὩზკίᾂᾆἪпἸиᾠώᾀŪāоÙἉἾρаđἌΞļÔβĖÝᾔĨНŀęᾤÓцЕĽŞὈÞუтΈέıàᾍἛśìŶŬȚijῧῊᾟάεŖᾨᾉςΡმᾊᾸįᾚὥηᾛġÐὓłγľмþᾹἲἔбċῗჰხοἬŗŐἡὲῷῚΫŭᾩὸùᾷĹēრЯĄὉὪῒᾲΜᾰÌœĥტ'
ascii_replacements='<KEY>'
translator=str.maketrans(strange,ascii_replacements)
return input_text.translate(translator)
def add_smail_domain(smail_user):
domain = '@student.pwr.edu.pl'
if smail_user.find(domain) < 0:
smail_user = smail_user + domain
return smail_user
def check_credentials(settings):
creds_valid = {'jsos': False, 'smail': True}
srv_ok = {'jsos': True, 'smail': True}
try:
check_srv = imaplib.IMAP4_SSL('imap.gmail.com', 993)
except:
srv_ok['smail'] = False
creds_valid['smail'] = False
if srv_ok['smail']:
try:
check_srv.login(settings['smail_user'], settings['smail_pass'])
except:
creds_valid['smail'] = False
check_srv.logout()
try:
s = requests.Session()
login_url = 'https://jsos.pwr.edu.pl/index.php/site/loginAsStudent'
r = s.get(login_url)
redirect_url = r.url
parsed = urlparse.urlparse(redirect_url)
tokens = dict(parse_qsl(parsed.query))
post_url = 'https://oauth.pwr.edu.pl/oauth/authenticate?0-1.IFormSubmitListener-authenticateForm&' + urlparse.urlencode(tokens)
post_static = {'authenticateButton': 'Zaloguj',
'oauth_callback_url': 'https://jsos.pwr.edu.pl/index.php/site/loginAsStudent',
'oauth_request_url': 'http://oauth.pwr.edu.pl/oauth/authenticate',
'oauth_symbol': 'EIS',
'id1_hf_0': ''}
post_credentials = {'username': settings['jsos_user'], 'password': settings['<PASSWORD>']}
form_data = post_static
form_data.update(tokens)
form_data.update(post_credentials)
r = s.post(post_url, form_data)
if(r.url == 'https://jsos.pwr.edu.pl/index.php/student/indeksDane'):
creds_valid['jsos'] = True
except:
srv_ok['jsos'] = False
return {'srv_ok': srv_ok, 'creds_valid': creds_valid}
def msg_lookup(settings, errors, check_jsos_anyways):
send_alert(settings['webhook'], errors)
if (check_jsos_anyways or settings['mode'] == 'test'):
unread = [True]
else:
check_srv = imaplib.IMAP4_SSL('imap.gmail.com', 993)
check_srv.login(settings['smail_user'], settings['smail_pass'])
check_srv.select('inbox')
_status, unread = check_srv.search(None, '(SUBJECT "[Edukacja.CL] powiadomienie o otrzymaniu nowego komunikatu" UNSEEN)')
if unread[0]:
s = requests.Session()
login_url = 'https://jsos.pwr.edu.pl/index.php/site/loginAsStudent'
r = s.get(login_url)
redirect_url = r.url
parsed = urlparse.urlparse(redirect_url)
tokens = dict(parse_qsl(parsed.query))
post_url = 'https://oauth.pwr.edu.pl/oauth/authenticate?0-1.IFormSubmitListener-authenticateForm&' + urlparse.urlencode(tokens)
post_static = {'authenticateButton': 'Zaloguj',
'oauth_callback_url': 'https://jsos.pwr.edu.pl/index.php/site/loginAsStudent',
'oauth_request_url': 'http://oauth.pwr.edu.pl/oauth/authenticate',
'oauth_symbol': 'EIS',
'id1_hf_0': ''}
post_credentials = {'username': settings['jsos_user'], 'password': settings['<PASSWORD>']}
form_data = post_static
form_data.update(tokens)
form_data.update(post_credentials)
r = s.post(post_url, form_data)
inbox_url = 'https://jsos.pwr.edu.pl/index.php/student/wiadomosci'
r = s.get(inbox_url)
soup1 = BeautifulSoup(r.text, 'html.parser')
if settings['mode'] == 'test':
msgs = soup1.find_all('tr')
else:
msgs = soup1.find_all('tr', class_='unread')
sent_count = 0
if msgs:
send_srv = smtplib.SMTP('smtp.gmail.com', 587)
send_srv.starttls()
send_srv.login(settings['smail_user'], settings['smail_pass'])
for msg in msgs:
msg_url = msg.get('data-url')
if msg_url and not (settings['mode'] == 'test' and sent_count > 2):
r = s.get('https://jsos.pwr.edu.pl' + msg_url)
soup2 = BeautifulSoup(r.text, 'html.parser')
msg_content = soup2.find('div', id='podgladWiadomosci')
msg_author = msg_content.find('span', class_='text-bold').text.replace('Nadawca - ', '')
msg_author_split = msg_author.split()
if len(msg_author_split) > 2:
msg_author_email = '<EMAIL>'
else:
msg_author_email = removeAccents(msg_author_split[1] + '.' + msg_author_split[0]).lower() + '@pwr.edu.pl'
msg_title = msg_content.find('h4').text.replace('Temat - ', '')
msg_body = msg_content.find('div').text.replace('Treść wiadomości', '').replace('\n', '', 5)
if settings['mode'] == 'webhook' or (settings['mode'] == 'test' and settings['webhook']):
msg_json = {
"type": "message",
"author": msg_author,
"author_email": msg_author_email,
"title": msg_title,
"body": msg_body,
"msg": msg_author + "\n*" + msg_title + "*\n\n" + msg_body + "\n___"
}
r = requests.post(settings['webhook'], json=msg_json)
if settings['mode'] == 'normal' or settings['mode'] == 'test':
email_msg = EmailMessage()
email_msg.set_content(msg_body)
email_msg['Subject'] = '[JSOS] ' + msg_title
email_msg['From'] = msg_author + ' <<EMAIL>>'
email_msg['Reply-to'] = msg_author_email
email_msg['To'] = settings['smail_user']
send_srv.send_message(email_msg)
sent_count += 1
send_srv.quit()
if check_jsos_anyways or settings['mode'] == 'test':
log_msg = 'Emails: not-checked, JSOS messages: ' + str(sent_count)
else:
for e_id in unread[0].split():
check_srv.store(e_id, '+FLAGS', r'\Seen')
log_msg = 'Emails: ' + str(len(unread[0].split())) + ', JSOS messages: ' + str(sent_count)
check_srv.logout()
else:
log_msg = 'Emails: 0, JSOS messages: not-checked'
now = datetime.now()
log_msg = '[' + now.strftime("%d/%m/%Y %H:%M:%S") + '] ' + log_msg
print(log_msg)
clear_error_count(errors)
def webhook_alert(webhook, error_count):
if error_count < 0:
title = "[jml] Working again."
body = "It looks like your jml is fine and working again. Enjoy!"
else:
title = "[jml] Error alert!"
body = "Your jml was not able to check or deliver new JSOS messages for last " + str(error_count) + " attempts. Perhaps one of pwr's services (JSOS or SMAIL) is not working properly or your jml's host lost an internet connection for a while. Go to https://edukacja.pwr.wroc.pl/ to check messages manually."
json = {
"type": "alert",
"author": "jml",
"author_email": "None",
"title": title,
"body": body,
"errors": error_count,
"msg": "*" + title + "*\n" + body + "\n___"
}
return json
def send_alert(webhook, errors):
if errors['count'] >= errors['alert_at'] and webhook:
print('Reached ' + str(errors['alert_at']) + ' errors in a row. Sending an alert...')
try:
requests.post(webhook, json=webhook_alert(webhook, errors['count']))
except:
print('Could not send the alert, check your internet connection.')
raise NotSentAlert
print('Alert sent.')
raise ErrorAlert
if errors['count'] < 0 and webhook:
print('Working fine again. Sending alert...')
try:
requests.post(webhook, json=webhook_alert(webhook, errors['count']))
except:
print('Could not send the alert, check your internet connection.')
raise NotSentAlert
print('Alert sent.')
raise WorkingAlert
def clear_error_count(errors):
if errors['count'] > 0 or errors['alert_sent']:
raise ErrorCountException
def set_scheduler(settings, errors):
schedule.clear()
schedule.every().minute.do(msg_lookup, settings, errors, False)
schedule.every(2).hours.do(msg_lookup, settings, errors, True)
def run_scheduler(settings):
errors = {
'count': 0,
'alert_at': 10,
'alert_sent': False
}
print('Setting up scheduler...')
set_scheduler(settings, errors)
print('Scheduler up and running.')
while True:
try:
schedule.run_pending()
time.sleep(1)
except SystemExit:
print("\njml stopping gracefully, bye!")
raise SystemExit
except ErrorAlert:
errors['count'] = 0
errors['alert_sent'] = True
set_scheduler(settings, errors)
continue
except WorkingAlert:
errors['count'] = 0
errors['alert_sent'] = False
set_scheduler(settings, errors)
except NotSentAlert:
set_scheduler(settings, errors)
continue
except ErrorCountException:
if(errors['alert_sent']):
errors['count'] = -1
else:
errors['count'] = 0
set_scheduler(settings, errors)
continue
except Exception as e:
print('An error has occured: ' + type(e).__name__)
errors['count'] = errors['count'] + 1
set_scheduler(settings, errors)
continue
def main():
# Credentials' filename
creds_file = '.creds'
settings = {}
modes = {
'n': 'normal',
't': 'test',
'w': 'webhook'
}
if os.path.isfile('./.env'):
load_dotenv()
settings['jsos_user'] = os.getenv('JSOSU')
settings['jsos_pass'] = <PASSWORD>('J<PASSWORD>')
settings['smail_user'] = os.getenv('SMAILU')
settings['smail_pass'] = os.getenv('SMAILP')
settings['mode'] = os.getenv('JMLMODE')
if not settings['mode'] == modes['t'] and not settings['mode'] == modes['w']:
settings['mode'] = modes['n']
settings['webhook'] = os.getenv('JMLWEBHOOK')
if not settings['webhook']:
settings['webhook'] = False
# Check if credentials retrieved from ENV variables
if settings['jsos_user'] and settings['jsos_pass'] and settings['smail_user'] and settings['smail_pass']:
# Check credentials
settings['smail_user'] = add_smail_domain(settings['smail_user'])
print('Checking credentials...')
chck_cred = check_credentials(settings)
if chck_cred['srv_ok']['jsos'] and chck_cred['srv_ok']['smail']:
if chck_cred['creds_valid']['jsos'] and chck_cred['creds_valid']['smail']:
print('Credentials OK.')
else:
if not chck_cred['creds_valid']['jsos']:
print('Invalid JSOS credentials!')
if not chck_cred['creds_valid']['smail']:
print('Invalid SMAIL credentials!')
print('Bye.')
raise SystemExit
else:
if not chck_cred['srv_ok']['jsos']:
print('JSOS not accessible!')
if not chck_cred['srv_ok']['smail']:
print('Email server not accessible!')
print('Bye.')
raise SystemExit
else:
# Check if credentials file exists
if os.path.isfile(creds_file):
# Load credentials from the file
print('Found saved credentials.')
cred_key = getpass.getpass('Enter a key to decrypt: ')
cred_key = cred_key.encode()
cred_in = open(creds_file, 'rb')
creds = pickle.load(cred_in)
cred_in.close()
# Decrypt credentials
try:
settings['jsos_user'] = decrypt(cred_key, creds['jsosu']).decode()
settings['jsos_pass'] = decrypt(cred_key, creds['jsosp']).decode()
settings['smail_user'] = decrypt(cred_key, creds['smailu']).decode()
settings['smail_pass'] = decrypt(cred_key, creds['smailp']).decode()
if creds['webhook']:
settings['webhook'] = decrypt(cred_key, creds['webhook']).decode()
print('Credentials loaded!')
except ValueError:
print('Invalid key! Cannot decrypt credentials, bye!')
raise SystemExit
cred_key = None
else:
# Ask for credentials
settings['jsos_user'] = input('JSOS login: ')
settings['jsos_pass'] = getpass.getpass('JSOS password: ')
settings['smail_user'] = add_smail_domain(input('SMAIL login: '))
settings['smail_pass'] = getpass.getpass('SMAIL password: ')
# Check credentials
print('Checking credentials...')
chck_cred = check_credentials(settings)
if chck_cred['srv_ok']['jsos'] and chck_cred['srv_ok']['smail']:
if chck_cred['creds_valid']['jsos'] and chck_cred['creds_valid']['smail']:
print('Credentials OK.')
else:
if not chck_cred['creds_valid']['jsos']:
print('Invalid JSOS credentials!')
if not chck_cred['creds_valid']['smail']:
print('Invalid SMAIL credentials!')
print('Bye.')
raise SystemExit
else:
if not chck_cred['srv_ok']['jsos']:
print('JSOS not accessible!')
if not chck_cred['srv_ok']['smail']:
print('Email server not accessible!')
print('Bye.')
raise | |
<filename>oPB/gui/mainwindow.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This module is part of the opsi PackageBuilder
see: https://forum.opsi.org/viewforum.php?f=22
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
__author__ = '<NAME>'
__copyright__ = "Copyright 2013-2015, <NAME>"
__license__ = "MIT"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Production"
import os.path
import pathlib
import webbrowser
import platform
import subprocess
import datetime
#from subprocess import Popen, PIPE, STDOUT
from time import sleep
from PyQt5.QtWidgets import *
from PyQt5 import QtCore, QtGui
from PyQt5.QtCore import QObject, QEvent, pyqtSignal, pyqtSlot
import oPB
from oPB.gui.helpviewer import Help
from oPB.core.confighandler import ConfigHandler
from oPB.core.tools import Helper, LogMixin
from oPB.gui.splash import Splash
from oPB.gui.utilities import ScriptFileValidator, EventMixin
from oPB.ui.ui import MainWindowBase, MainWindowUI
translate = QtCore.QCoreApplication.translate
class MainWindow(MainWindowBase, MainWindowUI, LogMixin, EventMixin):
showLogRequested = pyqtSignal()
windowMoved = pyqtSignal()
MaxRecentFiles = 5
def __init__(self, parent):
"""
Constructor of MainWindow
:param parent: parent
:return:
"""
self._parent = parent
print("\tgui/MainWindow parent: ", self._parent, " -> self: ", self) if oPB.PRINTHIER else None
MainWindowBase.__init__(self)
self.setupUi(self)
self.recentFileActions = []
self.datamapper = None # QDataWidgetMapper object for field mapping
self.datamapper_dependencies = None
self.datamapper_properties = None
self.helpviewer = Help(oPB.HLP_FILE, oPB.HLP_PREFIX, self)
self.splash = Splash(self, translate("MainWindow", "Please wait..."))
self.splash.close() # only for linux
self.create_datamapper()
self.connect_signals()
self.connect_validators()
self.reset_datamapper_and_display(0)
self.fill_cmbDepProdID()
def init_recent(self):
"""Init recent files menu items"""
for i in range(MainWindow.MaxRecentFiles):
self.recentFileActions.append(
QAction(self, visible=False,
triggered=self.open_recent_project))
for i in range(MainWindow.MaxRecentFiles):
self.menuRecent.addAction(self.recentFileActions[i])
self._parent.startup.menuRecent.addAction(self.recentFileActions[i])
self.update_recent_file_actions()
def update_recent_file_actions(self):
"""Update recent file menu actions"""
files = ConfigHandler.cfg.recent
numRecentFiles = min(len(files), MainWindow.MaxRecentFiles)
for i in range(numRecentFiles):
text = "&%d %s" % (i + 1, self.stripped_name(files[i]))
self.recentFileActions[i].setText(text)
self.recentFileActions[i].setData(files[i])
self.recentFileActions[i].setVisible(True)
for j in range(numRecentFiles, MainWindow.MaxRecentFiles):
self.recentFileActions[j].setVisible(False)
def stripped_name(self, fullFileName):
"""
Remove any path component from ``fullFileName``
:param fullFileName: complete path of file or folder
:return: last path part
"""
return QtCore.QFileInfo(fullFileName).fileName()
def set_current_project(self, project):
"""
Insert current project into recent files list
:param project: project name
"""
files = ConfigHandler.cfg.recent
try:
files.remove(project)
except ValueError:
pass
files.insert(0, project)
del files[MainWindow.MaxRecentFiles:]
ConfigHandler.cfg.recent = files
for widget in QApplication.topLevelWidgets():
if isinstance(widget, MainWindow):
widget.update_recent_file_actions()
def create_datamapper(self):
self.logger.debug("Create data widget mapper for fields")
self.datamapper = QDataWidgetMapper(self)
self.datamapper.setModel(self._parent.model_fields)
self.datamapper.addMapping(self.lblPacketFolder, 0, b"text") # "text" property name must be added for QLabel to work with QDataWidgetmapper
self.datamapper.addMapping(self.inpProductId, 1)
self.datamapper.addMapping(self.inpProductName, 2)
self.datamapper.addMapping(self.editDesc, 3)
self.datamapper.addMapping(self.editAdvice, 4)
self.datamapper.addMapping(self.cmbProductType, 5)
self.datamapper.addMapping(self.inpProductVer, 6)
self.datamapper.addMapping(self.inpPackageVer, 7)
self.datamapper.addMapping(self.sldPrio, 8)
self.datamapper.addMapping(self.cmbLicense, 9)
self.datamapper.addMapping(self.inpScrSetup, 10)
self.datamapper.addMapping(self.inpScrUninstall, 11)
self.datamapper.addMapping(self.inpScrUpdate, 12)
self.datamapper.addMapping(self.inpScrAlways, 13)
self.datamapper.addMapping(self.inpScrOnce, 14)
self.datamapper.addMapping(self.inpScrCustom, 15)
self.datamapper.addMapping(self.inpScrUserLogin, 16)
self.datamapper.toFirst()
self.logger.debug("Create data widget mapper for dependencies")
self.datamapper_dependencies = QDataWidgetMapper(self)
self.datamapper_dependencies.setSubmitPolicy(QDataWidgetMapper.ManualSubmit)
self.datamapper_dependencies.setModel(self._parent.model_dependencies)
self.datamapper_dependencies.addMapping(self.cmbDepAction, 0)
self.datamapper_dependencies.addMapping(self.cmbDepProdID, 1)
self.datamapper_dependencies.addMapping(self.cmbDepReqAction, 2)
self.datamapper_dependencies.addMapping(self.cmbDepInstState, 3)
self.datamapper_dependencies.addMapping(self.cmbDepRequirement, 4)
self.datamapper_dependencies.toFirst()
self.logger.debug("Create data widget mapper for properties")
self.datamapper_properties = QDataWidgetMapper(self)
self.datamapper_properties.setSubmitPolicy(QDataWidgetMapper.ManualSubmit)
self.datamapper_properties.setModel(self._parent.model_properties)
self.datamapper_properties.addMapping(self.inpPropName, 0)
self.datamapper_properties.addMapping(self.cmbPropType, 1)
self.datamapper_properties.addMapping(self.cmbPropMulti, 2)
self.datamapper_properties.addMapping(self.cmbPropEdit, 3)
self.datamapper_properties.addMapping(self.inpPropDesc, 4)
self.datamapper_properties.addMapping(self.inpPropVal, 5)
self.datamapper_properties.addMapping(self.inpPropDef, 6)
self.datamapper_properties.addMapping(self.cmbPropDef, 6)
self.datamapper_properties.toFirst()
def connect_signals(self):
self.logger.debug("Connect signals")
self.actionNew.triggered.connect(self.new_project)
self.actionOpen.triggered.connect(self.open_project)
self.actionClose.triggered.connect(self._parent.project_close)
self.actionQuit.triggered.connect(self.close)
# self.actionSave.triggered.connect(self._parent.project_save)
self.actionSave.triggered.connect(self.submit_main_and_save)
self.actionShowLog.triggered.connect(self.showLogRequested.emit)
self.actionSaveAs.triggered.connect(self.save_as)
self.actionStartWinst.triggered.connect(self.start_winst)
self.actionScriptEditor.triggered.connect(self.open_scripteditor)
self.actionHelp.triggered.connect(lambda: self.helpviewer.showHelp(oPB.HLP_DST_INDEX, False))
if self._parent.args.noupdate == True:
self.actionSearchForUpdates.setEnabled(False)
else:
self.actionSearchForUpdates.triggered.connect(self._parent.update_check)
self.actionShowChangeLog.triggered.connect(lambda: self.helpviewer.showHelp(oPB.HLP_DST_CHANGELOG, False))
self.actionAbout.triggered.connect(self.not_working)
self.actionRefreshLogo.triggered.connect(self._parent.get_package_logos)
self.actionMSIProductCode.triggered.connect(self.get_msiproductcode)
self.actionAbout_Qt.triggered.connect(self.aboutqt)
if oPB.NETMODE != "offline":
# connect online menu action signals
self.actionSetRights.triggered.connect(self._parent.do_setrights)
self.actionInstall.triggered.connect(self.quickinstall)
self.actionUpload.triggered.connect(self.upload)
self.actionScheduler.triggered.connect(self._parent.scheduler_dialog)
self.actionUninstall.triggered.connect(self._parent.quickuninstall_dialog)
self.actionLockedProducts.triggered.connect(self._parent.lockedproducts_dialog)
self.actionDeploy.triggered.connect(self._parent.deployagent_dialog)
self.actionBundleCreation.triggered.connect(self._parent.bundle_dialog)
self.actionDepotManager.triggered.connect(self._parent.depotmanager_dialog)
self.actionImport.triggered.connect(self.package_import)
else:
# connect online menu action signals
self.actionSetRights.triggered.connect(self.offline)
self.actionInstall.triggered.connect(self.offline)
self.actionUpload.triggered.connect(self.offline)
self.actionScheduler.triggered.connect(self.offline)
self.actionUninstall.triggered.connect(self.offline)
self.actionLockedProducts.triggered.connect(self.offline)
self.actionDeploy.triggered.connect(self.offline)
self.actionBundleCreation.triggered.connect(self.offline)
self.actionImport.triggered.connect(self.offline)
# buttons
# self.btnSave.clicked.connect(self._parent.project_save)
self.btnSave.clicked.connect(self.submit_main_and_save)
self.btnChangelogEdit.clicked.connect(self._parent.show_changelogeditor)
self.btnShowScrStruct.clicked.connect(self._parent.show_scripttree)
self.btnHelpPacket.clicked.connect(lambda: self.helpviewer.showHelp(oPB.HLP_DST_TABPACKET, False))
self.btnHelpDependencies.clicked.connect(lambda: self.helpviewer.showHelp(oPB.HLP_DST_TABDEPEND, False))
self.btnHelpProperties.clicked.connect(lambda: self.helpviewer.showHelp(oPB.HLP_DST_TABPROP, False))
self.btnScrSetup.clicked.connect(lambda: self.select_script_dialog("setup"))
self.btnScrUninstall.clicked.connect(lambda: self.select_script_dialog("uninstall"))
self.btnScrUpdate.clicked.connect(lambda: self.select_script_dialog("update"))
self.btnScrAlways.clicked.connect(lambda: self.select_script_dialog("always"))
self.btnScrOnce.clicked.connect(lambda: self.select_script_dialog("once"))
self.btnScrCustom.clicked.connect(lambda: self.select_script_dialog("custom"))
self.btnScrUserLogin.clicked.connect(lambda: self.select_script_dialog("userlogin"))
self.btnScrSetupDel.clicked.connect(lambda: self.select_script_dialog("setup", False))
self.btnScrUninstallDel.clicked.connect(lambda: self.select_script_dialog("uninstall", False))
self.btnScrUpdateDel.clicked.connect(lambda: self.select_script_dialog("update", False))
self.btnScrAlwaysDel.clicked.connect(lambda: self.select_script_dialog("always", False))
self.btnScrOnceDel.clicked.connect(lambda: self.select_script_dialog("once", False))
self.btnScrCustomDel.clicked.connect(lambda: self.select_script_dialog("custom", False))
self.btnScrUserLoginDel.clicked.connect(lambda: self.select_script_dialog("userlogin", False))
self.btnScrSetupEdit.clicked.connect(self.open_scripteditor)
self.btnScrUninstallEdit.clicked.connect(self.open_scripteditor)
self.btnScrUpdateEdit.clicked.connect(self.open_scripteditor)
self.btnScrAlwaysEdit.clicked.connect(self.open_scripteditor)
self.btnScrOnceEdit.clicked.connect(self.open_scripteditor)
self.btnScrCustomEdit.clicked.connect(self.open_scripteditor)
self.btnScrUserLoginEdit.clicked.connect(self.open_scripteditor)
if oPB.NETMODE != "offline":
self.btnBuild.clicked.connect(self._parent.project_build)
self.btnInstall.clicked.connect(lambda: self._parent.do_install(depot = self._parent.query_depot(parent = self)))
self.btnInstSetup.clicked.connect(lambda: self._parent.do_installsetup(depot = self._parent.query_depot(parent = self)))
self.btnUninstall.clicked.connect(lambda: self._parent.do_uninstall(depot = self._parent.query_depot(parent = self)))
else:
self.btnBuild.clicked.connect(self.offline)
self.btnInstall.clicked.connect(self.offline)
self.btnInstSetup.clicked.connect(self.offline)
self.btnUninstall.clicked.connect(self.offline)
self.btnDevFolder.clicked.connect(self.open_project_folder)
self.btnDepAdd.clicked.connect(self.add_dependency)
self.btnDepEdit.clicked.connect(self.edit_dependency)
self.btnDepModify.clicked.connect(self.submit_dependencies)
self.btnDepDelete.clicked.connect(lambda a: self._parent.remove_dependency(self.tblDependencies.selectionModel().currentIndex().row()))
self.btnPropAdd.clicked.connect(self.add_property)
self.btnPropEdit.clicked.connect(self.edit_property)
self.btnPropModify.clicked.connect(self.submit_properties)
self.btnPropDelete.clicked.connect(lambda a: self._parent.remove_property(self.tblProperties.selectionModel().currentIndex().row()))
self.btnPropRead.clicked.connect(self._parent.get_properties_from_scripts)
self.tblProperties.setModel(self._parent.model_properties)
self.tblDependencies.setModel(self._parent.model_dependencies)
self.tblDependencies.selectionModel().selectionChanged.connect(self.update_dependency_fields)
self.tblProperties.selectionModel().selectionChanged.connect(self.update_property_fields)
self._parent.modelDataUpdated.connect(self.reset_datamapper_and_display)
self._parent.msgSend.connect(self.set_statbar_text, type=QtCore.Qt.DirectConnection)
self._parent.processingEnded.connect(self.set_button_state)
self._parent.progressChanged.connect(self.splash.incProgress, type=QtCore.Qt.DirectConnection)
self._parent.processingEnded.connect(self.splash.close)
self._parent.processingStarted.connect(self.splash.show_)
self._parent.projectImageLoaded.connect(self.set_project_logo)
self._parent.projectLoaded.connect(self.set_current_project)
self._parent.projectLoaded.connect(self.set_button_state)
# connect event filter to tables
self.tblFilter = TableKeyEventFilter()
self.tblDependencies.installEventFilter(self.tblFilter)
self.tblProperties.installEventFilter(self.tblFilter)
TableKeyEventFilter.actiondict[(self.tblDependencies, QtCore.Qt.Key_F2)] = self.edit_dependency
TableKeyEventFilter.actiondict[(self.tblProperties, QtCore.Qt.Key_F2)] = self.edit_property
def connect_validators(self):
self.logger.debug("Connect validators to fields")
# set validators
if ConfigHandler.cfg.age == "True":
self.set_regex_validator(self.inpProductId, oPB.OPB_PRODUCT_ID_REGEX_NEW)
self.set_regex_validator(self.cmbDepProdID, oPB.OPB_PRODUCT_ID_REGEX_NEW)
self.set_regex_validator(self.inpPropName, oPB.OPB_PROPERTY_REGEX_NEW)
else:
self.set_regex_validator(self.inpProductId, oPB.OPB_PRODUCT_ID_REGEX_OLD)
self.set_regex_validator(self.cmbDepProdID, oPB.OPB_PRODUCT_ID_REGEX_OLD)
self.set_regex_validator(self.inpPropName, oPB.OPB_PROPERTY_REGEX_OLD)
# product id
self.inpProductId.textChanged.connect(self.check_state)
self.inpProductId.textChanged.emit(self.inpProductId.text())
self.inpProductId.textChanged.connect(self.set_button_state)
self.cmbDepProdID.editTextChanged.connect(self.check_state)
self.cmbDepProdID.editTextChanged.emit(self.cmbDepProdID.currentText())
# property names
self.inpPropName.textChanged.connect(self.check_state)
self.inpPropName.textChanged.emit(self.inpPropName.text())
# product version
self.set_regex_validator(self.inpProductVer, oPB.OPB_PRODUCT_VER_REGEX)
self.inpProductVer.textChanged.connect(self.check_state)
self.inpProductVer.textChanged.emit(self.inpProductVer.text())
self.inpProductVer.textChanged.connect(self.set_button_state)
# package version
self.set_regex_validator(self.inpPackageVer, oPB.OPB_PACKAGE_VER_REGEX)
self.inpPackageVer.textChanged.connect(self.check_state)
self.inpPackageVer.textChanged.emit(self.inpPackageVer.text())
self.inpPackageVer.textChanged.connect(self.set_button_state)
# script validator
self.set_scriptfile_validator(self.inpScrSetup)
self.inpScrSetup.textChanged.connect(self.check_state)
self.inpScrSetup.textChanged.emit(self.inpScrSetup.text())
self.set_scriptfile_validator(self.inpScrUninstall)
self.inpScrUninstall.textChanged.connect(self.check_state)
self.inpScrUninstall.textChanged.emit(self.inpScrUninstall.text())
self.set_scriptfile_validator(self.inpScrUpdate)
self.inpScrUpdate.textChanged.connect(self.check_state)
self.inpScrUpdate.textChanged.emit(self.inpScrUpdate.text())
self.set_scriptfile_validator(self.inpScrAlways)
self.inpScrAlways.textChanged.connect(self.check_state)
self.inpScrAlways.textChanged.emit(self.inpScrAlways.text())
self.set_scriptfile_validator(self.inpScrOnce)
self.inpScrOnce.textChanged.connect(self.check_state)
self.inpScrOnce.textChanged.emit(self.inpScrOnce.text())
self.set_scriptfile_validator(self.inpScrCustom)
self.inpScrCustom.textChanged.connect(self.check_state)
self.inpScrCustom.textChanged.emit(self.inpScrCustom.text())
self.set_scriptfile_validator(self.inpScrUserLogin)
self.inpScrUserLogin.textChanged.connect(self.check_state)
self.inpScrUserLogin.textChanged.emit(self.inpScrUserLogin.text())
def fill_cmbDepProdID(self):
"""Fill combobox with values from opsi_depot share"""
self.cmbDepProdID.clear()
if oPB.NETMODE != "offline":
try:
self.logger.debug("Retrieve active package list from depot")
subpath = "\\\\" + ConfigHandler.cfg.opsi_server + "\\" + oPB.DEPOTSHARE_BASE
subdirs = Helper.get_subdirlist(subpath)
subdirs.sort()
for elem in subdirs:
self.cmbDepProdID.addItem(elem)
except:
pass
@pyqtSlot()
def aboutqt(self):
"""Show Qt's About dialog"""
self._parent.msgbox("", oPB.MsgEnum.MS_ABOUTQT, self)
@pyqtSlot()
def not_working(self):
"""Show a short "Not working" message"""
self._parent.msgbox(translate("MainWindow", "Sorry, this function doesn't work at the moment!"),
oPB.MsgEnum.MS_ALWAYS, self)
@pyqtSlot()
def offline(self):
"""Show offline message"""
self._parent.msgbox(translate("MainWindow", "You are working in offline mode. Functionality not available!"),
oPB.MsgEnum.MS_ALWAYS, self)
@pyqtSlot()
def get_msiproductcode(self):
"""Show MSI product code of individual MSI file"""
self.logger.debug("Show MSI product code " + platform.system())
if platform.system() in ["Windows"]:
ext = "MSI Package (*.msi)"
msi = QFileDialog.getOpenFileName(self, translate("MainWindow", "Choose package file"),
"", ext)
if not msi == ("", ""):
self.logger.debug("Selected package: " + msi[0])
prodcode = Helper.get_msi_property(msi[0])
self._parent.msgbox(translate("MainWindow", "Selected MSI: " + Helper.get_file_from_path(msi[0]) + "\n\n" + "Product Code: " + " " + prodcode), oPB.MsgEnum.MS_ALWAYS, self)
else:
self.logger.debug("Dialog aborted.")
else:
self._parent.msgbox(translate("MainWindow", "Function not available at the moment for system:" + " " + platform.system()), oPB.MsgEnum.MS_ALWAYS, self)
@pyqtSlot()
def start_winst(self):
"""Start opsi winst32"""
self.logger.debug("Start Winst under " + platform.system())
if platform.system() in ["Windows"]:
if os.path.exists(oPB.OPB_WINST_NT):
subprocess.call([oPB.OPB_WINST_NT, self.lblPacketFolder.text().replace("\\","/")])
else:
self._parent.msgbox(translate("MainWindow", "Local opsi-winst installation not found or client-agent not installed!"), oPB.MsgEnum.MS_ERR, self)
else:
self._parent.msgbox(translate("MainWindow", "Function not available at the moment for system:" + " " + platform.system()), oPB.MsgEnum.MS_ALWAYS, self)
@pyqtSlot()
def open_scripteditor(self):
"""
Open configured script editor.
Method reaction depends on calling widget (self.sender())
"""
self.logger.debug("Start scripteditor")
if ConfigHandler.cfg.editor_intern == "True":
self._parent.msgbox(translate("MainWindow", "Internal editor not available at the moment. Use external editor instead!"), oPB.MsgEnum.MS_ALWAYS, self)
self.actionSettings.trigger()
return
if os.path.exists(ConfigHandler.cfg.scripteditor):
path = Helper.concat_path_native(self.lblPacketFolder.text(), "CLIENT_DATA")
if self.sender() == self.btnScrSetupEdit:
if self.inpScrSetup.text().strip() == "":
script = "setup.opsiscript"
else:
script = self.inpScrSetup.text()
elif self.sender() == self.btnScrUninstallEdit:
if self.inpScrUninstall.text().strip() == "":
script = "uninstall.opsiscript"
else:
script = self.inpScrUninstall.text()
elif self.sender() == self.btnScrUpdateEdit:
if self.inpScrUpdate.text().strip() == "":
script = "update.opsiscript"
else:
script = self.inpScrUpdate.text()
elif self.sender() == self.btnScrAlwaysEdit:
if self.inpScrAlways.text().strip() == "":
script = "always.opsiscript"
else:
script = self.inpScrAlways.text()
elif self.sender() == self.btnScrOnceEdit:
if self.inpScrOnce.text().strip() == "":
script = "once.opsiscript"
else:
script = self.inpScrOnce.text()
elif self.sender() == self.btnScrCustomEdit:
if self.inpScrCustom.text().strip() == "":
script = "custom.opsiscript"
else:
script = self.inpScrCustom.text()
elif self.sender() == self.btnScrUserLoginEdit:
if self.inpScrUserLogin.text().strip() == "":
script = "userlogin.opsiscript"
else:
script = self.inpScrUserLogin.text()
elif self.sender() == self.actionScriptEditor:
script = "new.opsiscript"
# script editor from menu
if path != "" and script != "":
path = Helper.concat_path_native(path, script)
self.logger.debug("Opening script: " + path)
# construct calling array
# first add basic scripteditor executable
cmd = [ConfigHandler.cfg.scripteditor]
# if there are options, split and append them
if (ConfigHandler.cfg.editor_options).strip() != "":
for part in (ConfigHandler.cfg.editor_options).split():
cmd.append(part)
# if attach direct is true, combine last option with script file path
if ConfigHandler.cfg.editor_attachdirect == "True":
cmd[-1] = cmd[-1] + path
# or else, append as separate value to list
else:
cmd.append(path)
else:
cmd.append(path)
self.logger.debug("Executing subprocess: " + str(cmd))
proc | |
<gh_stars>1-10
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from conductor.client.http.api_client import ApiClient
class QueueAdminResourceApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def names(self, **kwargs): # noqa: E501
"""Get Queue Names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.names(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: dict(str, str)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.names_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.names_with_http_info(**kwargs) # noqa: E501
return data
def names_with_http_info(self, **kwargs): # noqa: E501
"""Get Queue Names # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.names_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: dict(str, str)
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method names" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/queue/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, str)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def size1(self, **kwargs): # noqa: E501
"""Get the queue length # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.size1(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: dict(str, int)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.size1_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.size1_with_http_info(**kwargs) # noqa: E501
return data
def size1_with_http_info(self, **kwargs): # noqa: E501
"""Get the queue length # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.size1_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: dict(str, int)
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method size1" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/queue/size', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, int)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update1(self, body, workflow_id, task_ref_name, status, **kwargs): # noqa: E501
"""Publish a message in queue to mark a wait task as completed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update1(body, workflow_id, task_ref_name, status, async_req=True)
>>> result = thread.get()
:param async_req bool
:param dict(str, object) body: (required)
:param str workflow_id: (required)
:param str task_ref_name: (required)
:param str status: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update1_with_http_info(body, workflow_id, task_ref_name, status, **kwargs) # noqa: E501
else:
(data) = self.update1_with_http_info(body, workflow_id, task_ref_name, status, **kwargs) # noqa: E501
return data
def update1_with_http_info(self, body, workflow_id, task_ref_name, status, **kwargs): # noqa: E501
"""Publish a message in queue to mark a wait task as completed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update1_with_http_info(body, workflow_id, task_ref_name, status, async_req=True)
>>> result = thread.get()
:param async_req bool
:param dict(str, object) body: (required)
:param str workflow_id: (required)
:param str task_ref_name: (required)
:param str status: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'workflow_id', 'task_ref_name', 'status'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update1" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update1`") # noqa: E501
# verify the required parameter 'workflow_id' is set
if ('workflow_id' not in params or
params['workflow_id'] is None):
raise ValueError("Missing the required parameter `workflow_id` when calling `update1`") # noqa: E501
# verify the required parameter 'task_ref_name' is set
if ('task_ref_name' not in params or
params['task_ref_name'] is None):
raise ValueError("Missing the required parameter `task_ref_name` when calling `update1`") # noqa: E501
# verify the required parameter 'status' is set
if ('status' not in params or
params['status'] is None):
raise ValueError("Missing the required parameter `status` when calling `update1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'workflow_id' in params:
path_params['workflowId'] = params['workflow_id'] # noqa: E501
if 'task_ref_name' in params:
path_params['taskRefName'] = params['task_ref_name'] # noqa: E501
if 'status' in params:
path_params['status'] = params['status'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/queue/update/{workflowId}/{taskRefName}/{status}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_by_task_id(self, body, workflow_id, task_id, status, **kwargs): # noqa: E501
"""Publish a message in queue to mark a wait task (by taskId) as completed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_by_task_id(body, workflow_id, task_id, status, async_req=True)
>>> result = thread.get()
:param async_req bool
:param dict(str, object) body: (required)
:param str workflow_id: (required)
:param str task_id: (required)
:param str status: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_by_task_id_with_http_info(body, workflow_id, task_id, status, **kwargs) # noqa: E501
else:
(data) = self.update_by_task_id_with_http_info(body, workflow_id, task_id, status, **kwargs) # noqa: E501
return data
def update_by_task_id_with_http_info(self, body, workflow_id, task_id, status, **kwargs): # noqa: E501
"""Publish a message in queue to mark a wait task (by taskId) as completed. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_by_task_id_with_http_info(body, workflow_id, task_id, status, async_req=True)
>>> result = thread.get()
:param async_req bool
:param dict(str, object) body: (required)
:param str workflow_id: (required)
:param str task_id: (required)
:param str status: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'workflow_id', 'task_id', 'status'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_by_task_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_by_task_id`") # noqa: E501
# verify the required parameter | |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The GaussianProcess distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import warnings
# Dependency imports
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.distributions import distribution
from tensorflow_probability.python.distributions import kullback_leibler
from tensorflow_probability.python.distributions import mvn_linear_operator
from tensorflow_probability.python.distributions import normal
from tensorflow_probability.python.internal import distribution_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import reparameterization
from tensorflow_probability.python.internal import tensor_util
__all__ = [
'GaussianProcess',
]
def _add_diagonal_shift(matrix, shift):
return tf.linalg.set_diag(
matrix, tf.linalg.diag_part(matrix) + shift, name='add_diagonal_shift')
class GaussianProcess(distribution.Distribution):
"""Marginal distribution of a Gaussian process at finitely many points.
A Gaussian process (GP) is an indexed collection of random variables, any
finite collection of which are jointly Gaussian. While this definition applies
to finite index sets, it is typically implicit that the index set is infinite;
in applications, it is often some finite dimensional real or complex vector
space. In such cases, the GP may be thought of as a distribution over
(real- or complex-valued) functions defined over the index set.
Just as Gaussian distributions are fully specified by their first and second
moments, a Gaussian process can be completely specified by a mean and
covariance function. Let `S` denote the index set and `K` the space in which
each indexed random variable takes its values (again, often R or C). The mean
function is then a map `m: S -> K`, and the covariance function, or kernel, is
a positive-definite function `k: (S x S) -> K`. The properties of functions
drawn from a GP are entirely dictated (up to translation) by the form of the
kernel function.
This `Distribution` represents the marginal joint distribution over function
values at a given finite collection of points `[x[1], ..., x[N]]` from the
index set `S`. By definition, this marginal distribution is just a
multivariate normal distribution, whose mean is given by the vector
`[ m(x[1]), ..., m(x[N]) ]` and whose covariance matrix is constructed from
pairwise applications of the kernel function to the given inputs:
```none
| k(x[1], x[1]) k(x[1], x[2]) ... k(x[1], x[N]) |
| k(x[2], x[1]) k(x[2], x[2]) ... k(x[2], x[N]) |
| ... ... ... |
| k(x[N], x[1]) k(x[N], x[2]) ... k(x[N], x[N]) |
```
For this to be a valid covariance matrix, it must be symmetric and positive
definite; hence the requirement that `k` be a positive definite function
(which, by definition, says that the above procedure will yield PD matrices).
We also support the inclusion of zero-mean Gaussian noise in the model, via
the `observation_noise_variance` parameter. This augments the generative model
to
```none
f ~ GP(m, k)
(y[i] | f, x[i]) ~ Normal(f(x[i]), s)
```
where
* `m` is the mean function
* `k` is the covariance kernel function
* `f` is the function drawn from the GP
* `x[i]` are the index points at which the function is observed
* `y[i]` are the observed values at the index points
* `s` is the scale of the observation noise.
Note that this class represents an *unconditional* Gaussian process; it does
not implement posterior inference conditional on observed function
evaluations. This class is useful, for example, if one wishes to combine a GP
prior with a non-conjugate likelihood using MCMC to sample from the posterior.
#### Mathematical Details
The probability density function (pdf) is a multivariate normal whose
parameters are derived from the GP's properties:
```none
pdf(x; index_points, mean_fn, kernel) = exp(-0.5 * y) / Z
K = (kernel.matrix(index_points, index_points) +
(observation_noise_variance + jitter) * eye(N))
y = (x - mean_fn(index_points))^T @ K @ (x - mean_fn(index_points))
Z = (2 * pi)**(.5 * N) |det(K)|**(.5)
```
where:
* `index_points` are points in the index set over which the GP is defined,
* `mean_fn` is a callable mapping the index set to the GP's mean values,
* `kernel` is `PositiveSemidefiniteKernel`-like and represents the covariance
function of the GP,
* `observation_noise_variance` represents (optional) observation noise.
* `jitter` is added to the diagonal to ensure positive definiteness up to
machine precision (otherwise Cholesky-decomposition is prone to failure),
* `eye(N)` is an N-by-N identity matrix.
#### Examples
##### Draw joint samples from a GP prior
```python
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
psd_kernels = tfp.positive_semidefinite_kernels
num_points = 100
# Index points should be a collection (100, here) of feature vectors. In this
# example, we're using 1-d vectors, so we just need to reshape the output from
# np.linspace, to give a shape of (100, 1).
index_points = np.expand_dims(np.linspace(-1., 1., num_points), -1)
# Define a kernel with default parameters.
kernel = psd_kernels.ExponentiatedQuadratic()
gp = tfd.GaussianProcess(kernel, index_points)
samples = gp.sample(10)
# ==> 10 independently drawn, joint samples at `index_points`
noisy_gp = tfd.GaussianProcess(
kernel=kernel,
index_points=index_points,
observation_noise_variance=.05)
noisy_samples = noisy_gp.sample(10)
# ==> 10 independently drawn, noisy joint samples at `index_points`
```
##### Optimize kernel parameters via maximum marginal likelihood.
```python
# Suppose we have some data from a known function. Note the index points in
# general have shape `[b1, ..., bB, f1, ..., fF]` (here we assume `F == 1`),
# so we need to explicitly consume the feature dimensions (just the last one
# here).
f = lambda x: np.sin(10*x[..., 0]) * np.exp(-x[..., 0]**2)
observed_index_points = np.expand_dims(np.random.uniform(-1., 1., 50), -1)
# Squeeze to take the shape from [50, 1] to [50].
observed_values = f(observed_index_points)
# Define a kernel with trainable parameters.
kernel = psd_kernels.ExponentiatedQuadratic(
amplitude=tf.get_variable('amplitude', shape=(), dtype=np.float64),
length_scale=tf.get_variable('length_scale', shape=(), dtype=np.float64))
gp = tfd.GaussianProcess(kernel, observed_index_points)
neg_log_likelihood = -gp.log_prob(observed_values)
optimize = tf.train.AdamOptimizer().minimize(neg_log_likelihood)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in range(1000):
_, neg_log_likelihood_ = sess.run([optimize, neg_log_likelihood])
if i % 100 == 0:
print("Step {}: NLL = {}".format(i, neg_log_likelihood_))
print("Final NLL = {}".format(neg_log_likelihood_))
```
"""
def __init__(self,
kernel,
index_points=None,
mean_fn=None,
observation_noise_variance=0.,
jitter=1e-6,
validate_args=False,
allow_nan_stats=False,
name='GaussianProcess'):
"""Instantiate a GaussianProcess Distribution.
Args:
kernel: `PositiveSemidefiniteKernel`-like instance representing the
GP's covariance function.
index_points: `float` `Tensor` representing finite (batch of) vector(s) of
points in the index set over which the GP is defined. Shape has the form
`[b1, ..., bB, e, f1, ..., fF]` where `F` is the number of feature
dimensions and must equal `kernel.feature_ndims` and `e` is the number
(size) of index points in each batch. Ultimately this distribution
corresponds to a `e`-dimensional multivariate normal. The batch shape
must be broadcastable with `kernel.batch_shape` and any batch dims
yielded by `mean_fn`.
mean_fn: Python `callable` that acts on `index_points` to produce a (batch
of) vector(s) of mean values at `index_points`. Takes a `Tensor` of
shape `[b1, ..., bB, f1, ..., fF]` and returns a `Tensor` whose shape is
broadcastable with `[b1, ..., bB]`. Default value: `None` implies
constant zero function.
observation_noise_variance: `float` `Tensor` representing (batch of)
scalar variance(s) of the noise in the Normal likelihood
distribution of the model. If batched, the batch shape must be
broadcastable with the shapes of all other batched parameters
(`kernel.batch_shape`, `index_points`, etc.).
Default value: `0.`
jitter: `float` scalar `Tensor` added to the diagonal of the covariance
matrix to ensure positive definiteness of the covariance matrix.
Default value: `1e-6`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
Default value: `False`.
allow_nan_stats: Python `bool`, default `True`. When `True`,
statistics (e.g., mean, mode, variance) use the value "`NaN`" to
indicate the result is undefined. When `False`, an exception is raised
if one or more of the statistic's batch members are undefined.
Default value: `False`.
name: | |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Implement the explicit finite difference method to calculate the price of
various options
@author: ucaiado
Created on 07/03/2016
"""
# import libraries
import matplotlib.pylab as plt
import math
import numpy as np
import pandas as pd
from scipy import stats
import seaborn as sns
import time
from scipy.optimize import minimize
'''
Begin help functions
'''
class STABILITY_ERROR(Exception):
'''
STABILITY_ERROR is raised by the init method of the Grid class
'''
pass
class UNSUCCESSFUL_ERROR(Exception):
'''
UNSUCCESSFUL_ERROR is raised by the static hedhing minimization
'''
pass
def get_d1_and_d2(f_St, f_sigma, f_time, f_r, f_K):
'''
Calculate the d1 and d2 parameter used in Digital and call options
'''
f_d2 = (np.log(f_St/f_K) - (f_r - 0.5 * f_sigma ** 2)*f_time)
f_d2 /= (f_sigma * f_time**0.5)
f_d1 = f_d2 + f_sigma*f_time**0.5
return f_d1, f_d2
def bilinear_interpolation(f_S, f_time, df):
'''
Get information from simulations matrix using bilinear interpolation
:param f_S: float. asset price
:param f_time: float. time in years
:param df: dataframe. information to be interpolated
'''
# encontro linhas e colunas relevantes
f_col1 = df.columns[df.columns < f_time][-1]
f_col2 = df.columns[df.columns >= f_time][0]
f_row1 = df.index[df.index < f_S][-1]
f_row2 = df.index[df.index >= f_S][0]
# defino pontos e areas
l_V = [df.loc[f_row1, f_col1], df.loc[f_row1, f_col2],
df.loc[f_row2, f_col2], df.loc[f_row2, f_col1]]
l_A = [(f_row2 - f_S) * (f_col2 - f_time),
(f_row2 - f_S) * (f_time - f_col1),
(f_S - f_row1) * (f_time - f_col1),
(f_S - f_row1) * (f_col2 - f_time)]
# interpolo valores
return sum(np.array(l_V)*np.array(l_A))/sum(np.array(l_A))
'''
End help functions
'''
class GridNode(object):
'''
A representation of a Node of a Grid
'''
def __init__(self, i, k):
'''
Initialize a GridNode object
:param k: integer. the time index
:param i: integer. the asset index
'''
# inicia variaveis de controle
self.i = i # linhas sao os passos do asset
self.k = k # colunas sao os passos no tempo
self.node_idx = '{:.0f},{:.0f}'.format(i, k)
# inicia variaveis para precificacao
self.f_asset_value = 0
self.f_option_value = 0
self.f_delta = 0
self.f_gamma = 0
self.f_theta = 0
# inicia variaveis para guardar valores analiticos
self.f_option_value_anlt = 0
self.f_delta_anlt = 0
self.f_gamma_anlt = 0
def __str__(self):
'''
Return node_idx
'''
return self.node_idx
def __repr__(self):
'''
Return the node_idx
'''
return self.node_idx
def __eq__(self, other):
'''
Return if a node has different node_idx from the other
:param other: node object. Node to be compared
'''
return self.node_idx == other.node_idx
def __ne__(self, other):
'''
Return if a node has the same node_idx from the other
:param other: node object. Node to be compared
'''
return not self.__eq__(other)
def __hash__(self):
'''
Allow the node object be used as a key in a hash
table
'''
return self.node_idx.__hash__()
class Grid(object):
'''
A general representation of a Grid to be used by Derivative classes in the
discretization of their domains
'''
def __init__(self, f_vol, f_value, f_time, i_nas, i_nts=None):
'''
Initialize a Grid object. Save all parameters as attributes
:param f_vol: float. Volatility of the underlying instrument
:param f_val: float. The reference value to calculate the grid length
:param f_time: float. time to be used in the grid
:param i_nas: integer. Number of asset steps
:*param i_nts: integer. Number of time steps
'''
# inicia variaveis e usa vol para garantir estabilidade
self.f_nas = 1. * i_nas
# 'infinito' eh duas vezes o valor
self.dS = 2 * f_value / self.f_nas
# como o wilmott garantiu estabilidade
self.dt = 0.9 / f_vol**2. / self.f_nas**2.
self.i_nts = int(f_time/self.dt) + 1
if i_nts:
if i_nts <= self.i_nts:
self.i_nts = i_nts-1
else:
s_err = 'The maximum of time steps is {}'
raise STABILITY_ERROR(s_err.format(self.i_nts))
self.dt = f_time / (self.i_nts * 1.)
# inicia grid. O ponto do tempo inicial eh o final, na verdade
self.grid = {}
for k in xrange(int(self.i_nts) + 1):
for i in xrange(int(self.f_nas) + 1):
node = GridNode(i, k)
self.grid[node] = node
def __call__(self, i, k):
'''
Allow direct access to the nodes of the object
:param k: integer. the time index
:param i: integer. the asset index
'''
node_idx = GridNode(i, k)
return self.grid[node_idx]
def __str__(self):
'''
A string representation of the node
'''
s_aux = ''
df_rtn = pd.DataFrame(np.zeros([int(self.f_nas),
int(self.i_nts)]))
for k in xrange(int(self.i_nts) + 1):
for i in xrange(int(self.f_nas) + 1):
valid_node = self(i, k)
df_rtn.ix[i, k] = valid_node
return str(df_rtn)
class Derivative(object):
'''
A general representation of a Derivative contract.
'''
def __init__(self, f_St, f_sigma, f_time, f_r, i_nas, f_K=None,
i_nts=None, f_sigmam=None):
'''
Initialize a Derivative object. Save all parameters as attributes
:param f_St: float. The price of the underline asset
:param f_sigma: float. A non negative underline volatility
:param f_time: float. The time remain until the expiration
:param f_r: float. The free intereset rate
:param i_nas: integer. Number of asset steps
:*param f_K: float. The strike, if applyable
:*param i_nas: integer. Number of time steps
:*param f_sigmam: float. The minimum volatility observed. If it is set
fs_sigma is the maximum volatility observed
'''
# inicia variaveis
self.s_name = "General"
self.f_St = f_St
self.f_K = f_K
self.f_r = f_r
self.f_sigma = f_sigma
self.use_UV = False
if f_sigmam:
self.f_sigmaM = f_sigma
self.f_sigmam = f_sigmam
self.f_sigma = (f_sigma + f_sigmam)/2.
self.use_UV = True
self.f_time = f_time
# inica grid
self.grid = Grid(f_vol=f_sigma,
f_value=f_St,
f_time=f_time,
i_nas=i_nas,
i_nts=i_nts)
def get_information(self, f_S, f_time, s_info):
'''
:param f_S: float. asset price
:param f_time: float. time in years
:param s_info: string. information desired. delta, gamma, price,
delta_anlt, gamma_anlt, price_anlt
'''
# define dataframe desejado
if s_info == 'price':
df = self.df_opt_prices
elif s_info == 'price_anlt':
df = self.df_opt_prices_anlt
elif s_info == 'delta':
df = self.df_delta
elif s_info == 'delta_anlt':
df = self.df_delta_anlt
elif s_info == 'gamma':
df = self.df_gamma
elif s_info == 'gamma_anlt':
df = self.df_gamma_anlt
# interpola informacao
return bilinear_interpolation(f_S, f_time, df)
def compare_to_analytical_solutions(self, l_S, f_time):
'''
Plot charts comparing the price, delta and gamma measure by the finitte
difference and by the analytical solution
l_S: list. asset price list
f_time. float. the time step to measure the outputs
'''
d_price = {u'analítico': [], u'diferenças finitas': []}
d_delta = {u'analítico': [], u'diferenças finitas': []}
d_gamma = {u'analítico': [], u'diferenças finitas': []}
l_prices = l_S
for f_S in l_prices:
# calcula precos
f_aux = self.get_information(f_S, f_time, 'price_anlt')
d_price[u'analítico'].append(f_aux)
f_aux = self.get_information(f_S, f_time, 'price')
d_price[u'diferenças finitas'].append(f_aux)
# calcula delta
f_aux = self.get_information(f_S, f_time, 'delta_anlt')
d_delta[u'analítico'].append(f_aux)
f_aux = self.get_information(f_S, f_time, 'delta')
d_delta[u'diferenças finitas'].append(f_aux)
# calcula gamma
f_aux = self.get_information(f_S, f_time, 'gamma_anlt')
d_gamma[u'analítico'].append(f_aux)
f_aux = self.get_information(f_S, f_time, 'gamma')
d_gamma[u'diferenças finitas'].append(f_aux)
# plota resultados
fig, (ax1, ax2, ax3) = plt.subplots(1, 3, sharex=True)
fig.set_size_inches(12, 4)
l_title = [u'Preços\n', u'$\Delta$\n', u'$\Gamma$\n']
for d_aux, ax, s_title in zip([d_price, d_delta, d_gamma],
[ax1, ax2, ax3], l_title):
s_col = u'diferenças finitas'
df_plot = pd.DataFrame(d_aux[s_col], index=l_prices)
df_plot.columns = [s_col]
df_plot.plot(ax=ax)
s_col = u'analítico'
df_plot = pd.DataFrame(d_aux[s_col], index=l_prices)
df_plot.columns = [s_col]
df_plot.plot(style='--', ax=ax)
# df_plot = pd.DataFrame(d_aux, index=l_prices)
# df_plot.plot(ax=ax)
ax.set_xlabel(u'Preço do Subjacente')
ax.set_title(s_title)
ax1.set_ylabel(u'Valor')
s_prep = u"Comparação de Resultados para {}\n"
fig.suptitle(s_prep.format(self.s_name), fontsize=16, y=1.03)
fig.tight_layout()
def _set_final_condition(self):
'''
Set up the final condition in the grid, the payoff
'''
# apenas o valor final do ativo eh necessario aqui
for i in xrange(int(self.grid.f_nas) + 1):
f_S = i * 1. * self.grid.dS
self.grid(i, 0).f_asset_value = f_S
self.grid(i, 0).f_option_value = self._get_payoff(f_S)
self.grid(i, 0).f_option_value_anlt = self._get_payoff(f_S)
# preencho ultimo valor de todas as colunas (tempo)
for j in xrange(int(self.grid.i_nts) + 1):
f_S = i * 1. * self.grid.dS
self.grid(i, j).f_asset_value = f_S
def _set_all_matrix(self):
'''
Create attributes to hold the get_matrix information
'''
d_rtn = self._get_matrix()
self.df_asset_prices = d_rtn['asset']
self.df_opt_prices = d_rtn['opt_prices']
self.df_delta = d_rtn['delta']
self.df_gamma = d_rtn['gamma']
self.df_theta = d_rtn['theta']
self.df_opt_prices_anlt = d_rtn['opt_prices_anlt']
self.df_delta_anlt = d_rtn['delta_anlt']
self.df_gamma_anlt = d_rtn['gamma_anlt']
def _go_backwards(self):
'''
work backwards in time to calculate the option value
'''
# inicia variaveis que serao utilizadas
dS = self.grid.dS
dt = self.grid.dt
f_r = self.f_r
f_vol = self.f_sigma
i_nas = int(self.grid.f_nas)
# seta condicao final
self._set_final_condition()
# comeco o loop depois do primeiro passo de cada dimensao
for k in xrange(1, int(self.grid.i_nts) + 1):
for i in xrange(1, int(self.grid.f_nas)):
# calcula valores auxiliares
f_S = i | |
an image viewer program when you are using 'tempfile' image
# handler. This option is a list of string where the first element is the
# command itself and reminders are the options for the command. You can use
# {file} and {format} in the string to represent the location of the generated
# image file and image format.
# c.ZMQTerminalInteractiveShell.tempfile_image_handler = []
# Deprecated, use PromptManager.in2_template
# c.ZMQTerminalInteractiveShell.prompt_in2 = ' .\\D.: '
# Enable magic commands to be called without the leading %.
# c.ZMQTerminalInteractiveShell.automagic = True
#
# c.ZMQTerminalInteractiveShell.separate_out = ''
# Timeout for giving up on a kernel (in seconds).
#
# On first connect and restart, the console tests whether the kernel is running
# and responsive by sending kernel_info_requests. This sets the timeout in
# seconds for how long the kernel can take before being presumed dead.
# c.ZMQTerminalInteractiveShell.kernel_timeout = 60
# Deprecated, use PromptManager.justify
# c.ZMQTerminalInteractiveShell.prompts_pad_left = True
# The shell program to be used for paging.
# c.ZMQTerminalInteractiveShell.pager = 'less'
# The name of the logfile to use.
# c.ZMQTerminalInteractiveShell.logfile = ''
# If True, anything that would be passed to the pager will be displayed as
# regular output instead.
# c.ZMQTerminalInteractiveShell.display_page = False
# auto editing of files with syntax errors.
# c.ZMQTerminalInteractiveShell.autoedit_syntax = False
# Handler for image type output. This is useful, for example, when connecting
# to the kernel in which pylab inline backend is activated. There are four
# handlers defined. 'PIL': Use Python Imaging Library to popup image; 'stream':
# Use an external program to show the image. Image will be fed into the STDIN
# of the program. You will need to configure `stream_image_handler`;
# 'tempfile': Use an external program to show the image. Image will be saved in
# a temporally file and the program is called with the temporally file. You
# will need to configure `tempfile_image_handler`; 'callable': You can set any
# Python callable which is called with the image data. You will need to
# configure `callable_image_handler`.
# c.ZMQTerminalInteractiveShell.image_handler = None
# Show rewritten input, e.g. for autocall.
# c.ZMQTerminalInteractiveShell.show_rewritten_input = True
# The part of the banner to be printed before the profile
# c.ZMQTerminalInteractiveShell.banner1 = 'Python 3.4.0 (default, Apr 11 2014, 13:05:18) \nType "copyright", "credits" or "license" for more information.\n\nIPython 3.0.0-rc1 -- An enhanced Interactive Python.\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n'
# Set to confirm when you try to exit IPython with an EOF (Control-D in Unix,
# Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a
# direct exit without any confirmation.
# c.ZMQTerminalInteractiveShell.confirm_exit = True
# Preferred object representation MIME type in order. First matched MIME type
# will be used.
# c.ZMQTerminalInteractiveShell.mime_preference = ['image/png', 'image/jpeg', 'image/svg+xml']
#
# c.ZMQTerminalInteractiveShell.history_length = 10000
#
# c.ZMQTerminalInteractiveShell.object_info_string_level = 0
#
# c.ZMQTerminalInteractiveShell.debug = False
# Start logging to the default log file.
# c.ZMQTerminalInteractiveShell.logstart = False
# Number of lines of your screen, used to control printing of very long strings.
# Strings longer than this number of lines will be sent through a pager instead
# of directly printed. The default value for this is 0, which means IPython
# will auto-detect your screen size every time it needs to print certain
# potentially long strings (this doesn't change the behavior of the 'print'
# keyword, it's only triggered internally). If for some reason this isn't
# working well (it needs curses support), specify it yourself. Otherwise don't
# change the default.
# c.ZMQTerminalInteractiveShell.screen_length = 0
# Prefix to add to outputs coming from clients other than this one.
#
# Only relevant if include_other_output is True.
# c.ZMQTerminalInteractiveShell.other_output_prefix = '[remote] '
#
# c.ZMQTerminalInteractiveShell.quiet = False
#------------------------------------------------------------------------------
# KernelManager configuration
#------------------------------------------------------------------------------
# Manages a single kernel in a subprocess on this host.
#
# This version starts kernels with Popen.
# KernelManager will inherit config from: ConnectionFileMixin
#
# c.KernelManager.transport = 'tcp'
# set the shell (ROUTER) port [default: random]
# c.KernelManager.shell_port = 0
# Set the kernel's IP address [default localhost]. If the IP address is
# something other than localhost, then Consoles on other machines will be able
# to connect to the Kernel, so be careful!
# c.KernelManager.ip = ''
# set the stdin (ROUTER) port [default: random]
# c.KernelManager.stdin_port = 0
# JSON file in which to store connection info [default: kernel-<pid>.json]
#
# This file will contain the IP, ports, and authentication key needed to connect
# clients to this kernel. By default, this file will be created in the security
# dir of the current profile, but can be specified by absolute path.
# c.KernelManager.connection_file = ''
# set the heartbeat port [default: random]
# c.KernelManager.hb_port = 0
# set the iopub (PUB) port [default: random]
# c.KernelManager.iopub_port = 0
# set the control (ROUTER) port [default: random]
# c.KernelManager.control_port = 0
# Should we autorestart the kernel if it dies.
# c.KernelManager.autorestart = False
# DEPRECATED: Use kernel_name instead.
#
# The Popen Command to launch the kernel. Override this if you have a custom
# kernel. If kernel_cmd is specified in a configuration file, IPython does not
# pass any arguments to the kernel, because it cannot make any assumptions about
# the arguments that the kernel understands. In particular, this means that the
# kernel does not receive the option --debug if it given on the IPython command
# line.
# c.KernelManager.kernel_cmd = []
#------------------------------------------------------------------------------
# ProfileDir configuration
#------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = ''
#------------------------------------------------------------------------------
# Session configuration
#------------------------------------------------------------------------------
# Object for handling serialization and sending of messages.
#
# The Session object handles building messages and sending them with ZMQ sockets
# or ZMQStream objects. Objects can communicate with each other over the
# network via Session objects, and only need to work with the dict-based IPython
# message spec. The Session will handle serialization/deserialization, security,
# and metadata.
#
# Sessions support configurable serialization via packer/unpacker traits, and
# signing with HMAC digests via the key/keyfile traits.
#
# Parameters ----------
#
# debug : bool
# whether to trigger extra debugging statements
# packer/unpacker : str : 'json', 'pickle' or import_string
# importstrings for methods to serialize message parts. If just
# 'json' or 'pickle', predefined JSON and pickle packers will be used.
# Otherwise, the entire importstring must be used.
#
# The functions must accept at least valid JSON input, and output *bytes*.
#
# For example, to use msgpack:
# packer = 'msgpack.packb', unpacker='msgpack.unpackb'
# pack/unpack : callables
# You can also set the pack/unpack callables for serialization directly.
# session : bytes
# the ID of this Session object. The default is to generate a new UUID.
# username : unicode
# username added to message headers. The default is to ask the OS.
# key : bytes
# The key used to initialize an HMAC signature. If unset, messages
# will not be signed or checked.
# keyfile : filepath
# The file containing a key. If this is set, `key` will be initialized
# to the contents of the file.
# The UUID identifying this session.
# c.Session.session = ''
# Threshold (in bytes) beyond which an object's buffer should be extracted to
# avoid pickling.
# c.Session.buffer_threshold = 1024
# The maximum number of items for a container to be introspected for custom
# serialization. Containers larger than this are pickled outright.
# c.Session.item_threshold = 64
# The name of the unpacker for unserializing messages. Only used with custom
# functions for `packer`.
# c.Session.unpacker = 'json'
# path to file containing execution key.
# c.Session.keyfile = ''
# Metadata dictionary, which serves as the default top-level metadata dict for
# each message.
# c.Session.metadata = {}
# Debug output in the Session
# c.Session.debug = False
# The digest scheme used to construct the message signatures. Must have the form
# 'hmac-HASH'.
# c.Session.signature_scheme = 'hmac-sha256'
# The name of the packer for serializing messages. Should be one of 'json',
# 'pickle', or an import name for a custom callable serializer.
# c.Session.packer = 'json'
# Username for the Session. Default is your system username.
# c.Session.username = 'vagrant'
# The maximum number of digests to remember.
#
# | |
big_ax = fig.add_subplot(111)
big_ax.set_axis_bgcolor('none')
big_ax.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_xlabel(r'$|\mathbf{b}|$ [m]', fontsize=24, weight='medium', labelpad=20)
big_ax.set_ylabel(r'$V_{b\tau}^\mathrm{rms}(\mathbf{b})$ [Jy Hz]', fontsize=24, weight='medium', labelpad=20)
if plot_10 or plot_11 or plot_12 or plot_13 or plot_14:
infile = '/data3/t_nithyanandan/'+project_dir+'/'+telescope_str+'multi_baseline_visibilities_'+ground_plane_str+snapshot_type_str+obs_mode+'_baseline_range_{0:.1f}-{1:.1f}_'.format(ref_bl_length[baseline_bin_indices[0]],ref_bl_length[min(baseline_bin_indices[n_bl_chunks-1]+baseline_chunk_size-1,total_baselines-1)])+'gaussian_FG_model_'+fg_str+sky_sector_str+'sprms_{0:.1f}_'.format(spindex_rms)+spindex_seed_str+'nside_{0:0d}_'.format(nside)+'Tsys_{0:.1f}K_{1:.1f}_MHz_{2:.1f}_MHz'.format(Tsys, freq/1e6, nchan*freq_resolution/1e6)
asm_CLEAN_infile = '/data3/t_nithyanandan/'+project_dir+'/'+telescope_str+'multi_baseline_CLEAN_visibilities_'+ground_plane_str+snapshot_type_str+obs_mode+'_baseline_range_{0:.1f}-{1:.1f}_'.format(ref_bl_length[baseline_bin_indices[0]],ref_bl_length[min(baseline_bin_indices[n_bl_chunks-1]+baseline_chunk_size-1,total_baselines-1)])+'gaussian_FG_model_asm'+sky_sector_str+'sprms_{0:.1f}_'.format(spindex_rms)+spindex_seed_str+'nside_{0:0d}_'.format(nside)+'Tsys_{0:.1f}K_{1:.1f}_MHz_{2:.1f}_MHz_'.format(Tsys, freq/1e6, nchan*freq_resolution/1e6)+bpass_shape
dsm_CLEAN_infile = '/data3/t_nithyanandan/'+project_dir+'/'+telescope_str+'multi_baseline_CLEAN_visibilities_'+ground_plane_str+snapshot_type_str+obs_mode+'_baseline_range_{0:.1f}-{1:.1f}_'.format(ref_bl_length[baseline_bin_indices[0]],ref_bl_length[min(baseline_bin_indices[n_bl_chunks-1]+baseline_chunk_size-1,total_baselines-1)])+'gaussian_FG_model_dsm'+sky_sector_str+'sprms_{0:.1f}_'.format(spindex_rms)+spindex_seed_str+'nside_{0:0d}_'.format(nside)+'Tsys_{0:.1f}K_{1:.1f}_MHz_{2:.1f}_MHz_'.format(Tsys, freq/1e6, nchan*freq_resolution/1e6)+bpass_shape
csm_CLEAN_infile = '/data3/t_nithyanandan/'+project_dir+'/'+telescope_str+'multi_baseline_CLEAN_visibilities_'+ground_plane_str+snapshot_type_str+obs_mode+'_baseline_range_{0:.1f}-{1:.1f}_'.format(ref_bl_length[baseline_bin_indices[0]],ref_bl_length[min(baseline_bin_indices[n_bl_chunks-1]+baseline_chunk_size-1,total_baselines-1)])+'gaussian_FG_model_csm'+sky_sector_str+'sprms_{0:.1f}_'.format(spindex_rms)+spindex_seed_str+'nside_{0:0d}_'.format(nside)+'Tsys_{0:.1f}K_{1:.1f}_MHz_{2:.1f}_MHz_'.format(Tsys, freq/1e6, nchan*freq_resolution/1e6)+bpass_shape
ia = RI.InterferometerArray(None, None, None, init_file=infile+'.fits')
simdata_bl_orientation = NP.angle(ia.baselines[:,0] + 1j * ia.baselines[:,1], deg=True)
simdata_neg_bl_orientation_ind = simdata_bl_orientation > 90.0 + 0.5*180.0/n_bins_baseline_orientation
simdata_bl_orientation[simdata_neg_bl_orientation_ind] -= 180.0
ia.baselines[simdata_neg_bl_orientation_ind,:] = -ia.baselines[simdata_neg_bl_orientation_ind,:]
hdulist = fits.open(infile+'.fits')
latitude = hdulist[0].header['latitude']
pointing_coords = hdulist[0].header['pointing_coords']
pointings_table = hdulist['POINTING AND PHASE CENTER INFO'].data
lst = pointings_table['LST']
n_snaps = lst.size
hdulist.close()
if pointing_coords == 'altaz':
pointings_altaz = NP.hstack((pointings_table['pointing_latitude'].reshape(-1,1), pointings_table['pointing_longitude'].reshape(-1,1)))
pointings_hadec = GEOM.altaz2hadec(pointings_altaz, latitude, units='degrees')
pointings_dircos = GEOM.altaz2dircos(pointings_altaz, units='degrees')
elif pointing_coords == 'radec':
pointings_radec = NP.hstack((pointings_table['pointing_longitude'].reshape(-1,1), pointings_table['pointing_latitude'].reshape(-1,1)))
pointings_hadec = NP.hstack(((lst-pointings_radec[:,0]).reshape(-1,1), pointings_radec[:,1].reshape(-1,1)))
pointings_altaz = GEOM.hadec2altaz(pointings_hadec, latitude, units='degrees')
pointings_dircos = GEOM.altaz2dircos(pointings_altaz, units='degrees')
elif pointing_coords == 'hadec':
pointings_hadec = NP.hstack((pointings_table['pointing_longitude'].reshape(-1,1), pointings_table['pointing_latitude'].reshape(-1,1)))
pointings_radec = NP.hstack(((lst-pointings_hadec[:,0]).reshape(-1,1), pointings_hadec[:,1].reshape(-1,1)))
pointings_altaz = GEOM.hadec2altaz(pointings_hadec, latitude, units='degrees')
pointings_dircos = GEOM.altaz2dircos(pointings_altaz, units='degrees')
hdulist = fits.open(asm_CLEAN_infile+'.fits')
clean_lags = hdulist['SPECTRAL INFO'].data['lag']
clean_lags_orig = NP.copy(clean_lags)
asm_cc_skyvis = hdulist['CLEAN NOISELESS VISIBILITIES REAL'].data + 1j * hdulist['CLEAN NOISELESS VISIBILITIES IMAG'].data
asm_cc_skyvis_res = hdulist['CLEAN NOISELESS VISIBILITIES RESIDUALS REAL'].data + 1j * hdulist['CLEAN NOISELESS VISIBILITIES RESIDUALS IMAG'].data
asm_cc_vis = hdulist['CLEAN NOISY VISIBILITIES REAL'].data + 1j * hdulist['CLEAN NOISY VISIBILITIES IMAG'].data
asm_cc_vis_res = hdulist['CLEAN NOISY VISIBILITIES RESIDUALS REAL'].data + 1j * hdulist['CLEAN NOISY VISIBILITIES RESIDUALS IMAG'].data
hdulist.close()
hdulist = fits.open(dsm_CLEAN_infile+'.fits')
dsm_cc_skyvis = hdulist['CLEAN NOISELESS VISIBILITIES REAL'].data + 1j * hdulist['CLEAN NOISELESS VISIBILITIES IMAG'].data
dsm_cc_skyvis_res = hdulist['CLEAN NOISELESS VISIBILITIES RESIDUALS REAL'].data + 1j * hdulist['CLEAN NOISELESS VISIBILITIES RESIDUALS IMAG'].data
dsm_cc_vis = hdulist['CLEAN NOISY VISIBILITIES REAL'].data + 1j * hdulist['CLEAN NOISY VISIBILITIES IMAG'].data
dsm_cc_vis_res = hdulist['CLEAN NOISY VISIBILITIES RESIDUALS REAL'].data + 1j * hdulist['CLEAN NOISY VISIBILITIES RESIDUALS IMAG'].data
hdulist.close()
hdulist = fits.open(csm_CLEAN_infile+'.fits')
csm_cc_skyvis = hdulist['CLEAN NOISELESS VISIBILITIES REAL'].data + 1j * hdulist['CLEAN NOISELESS VISIBILITIES IMAG'].data
csm_cc_skyvis_res = hdulist['CLEAN NOISELESS VISIBILITIES RESIDUALS REAL'].data + 1j * hdulist['CLEAN NOISELESS VISIBILITIES RESIDUALS IMAG'].data
csm_cc_vis = hdulist['CLEAN NOISY VISIBILITIES REAL'].data + 1j * hdulist['CLEAN NOISY VISIBILITIES IMAG'].data
csm_cc_vis_res = hdulist['CLEAN NOISY VISIBILITIES RESIDUALS REAL'].data + 1j * hdulist['CLEAN NOISY VISIBILITIES RESIDUALS IMAG'].data
hdulist.close()
asm_cc_skyvis[simdata_neg_bl_orientation_ind,:,:] = asm_cc_skyvis[simdata_neg_bl_orientation_ind,:,:].conj()
asm_cc_skyvis_res[simdata_neg_bl_orientation_ind,:,:] = asm_cc_skyvis_res[simdata_neg_bl_orientation_ind,:,:].conj()
asm_cc_vis[simdata_neg_bl_orientation_ind,:,:] = asm_cc_vis[simdata_neg_bl_orientation_ind,:,:].conj()
asm_cc_vis_res[simdata_neg_bl_orientation_ind,:,:] = asm_cc_vis_res[simdata_neg_bl_orientation_ind,:,:].conj()
asm_cc_skyvis_lag = NP.fft.fftshift(NP.fft.ifft(asm_cc_skyvis, axis=1),axes=1) * asm_cc_skyvis.shape[1] * freq_resolution
asm_ccres_sky = NP.fft.fftshift(NP.fft.ifft(asm_cc_skyvis_res, axis=1),axes=1) * asm_cc_skyvis.shape[1] * freq_resolution
asm_cc_skyvis_lag = asm_cc_skyvis_lag + asm_ccres_sky
asm_cc_vis_lag = NP.fft.fftshift(NP.fft.ifft(asm_cc_vis, axis=1),axes=1) * asm_cc_vis.shape[1] * freq_resolution
asm_ccres = NP.fft.fftshift(NP.fft.ifft(asm_cc_vis_res, axis=1),axes=1) * asm_cc_vis.shape[1] * freq_resolution
asm_cc_vis_lag = asm_cc_vis_lag + asm_ccres
dsm_cc_skyvis[simdata_neg_bl_orientation_ind,:,:] = dsm_cc_skyvis[simdata_neg_bl_orientation_ind,:,:].conj()
dsm_cc_skyvis_res[simdata_neg_bl_orientation_ind,:,:] = dsm_cc_skyvis_res[simdata_neg_bl_orientation_ind,:,:].conj()
dsm_cc_vis[simdata_neg_bl_orientation_ind,:,:] = dsm_cc_vis[simdata_neg_bl_orientation_ind,:,:].conj()
dsm_cc_vis_res[simdata_neg_bl_orientation_ind,:,:] = dsm_cc_vis_res[simdata_neg_bl_orientation_ind,:,:].conj()
dsm_cc_skyvis_lag = NP.fft.fftshift(NP.fft.ifft(dsm_cc_skyvis, axis=1),axes=1) * dsm_cc_skyvis.shape[1] * freq_resolution
dsm_ccres_sky = NP.fft.fftshift(NP.fft.ifft(dsm_cc_skyvis_res, axis=1),axes=1) * dsm_cc_skyvis.shape[1] * freq_resolution
dsm_cc_skyvis_lag = dsm_cc_skyvis_lag + dsm_ccres_sky
dsm_cc_vis_lag = NP.fft.fftshift(NP.fft.ifft(dsm_cc_vis, axis=1),axes=1) * dsm_cc_vis.shape[1] * freq_resolution
dsm_ccres = NP.fft.fftshift(NP.fft.ifft(dsm_cc_vis_res, axis=1),axes=1) * dsm_cc_vis.shape[1] * freq_resolution
dsm_cc_vis_lag = dsm_cc_vis_lag + dsm_ccres
csm_cc_skyvis[simdata_neg_bl_orientation_ind,:,:] = csm_cc_skyvis[simdata_neg_bl_orientation_ind,:,:].conj()
csm_cc_skyvis_res[simdata_neg_bl_orientation_ind,:,:] = csm_cc_skyvis_res[simdata_neg_bl_orientation_ind,:,:].conj()
csm_cc_vis[simdata_neg_bl_orientation_ind,:,:] = csm_cc_vis[simdata_neg_bl_orientation_ind,:,:].conj()
csm_cc_vis_res[simdata_neg_bl_orientation_ind,:,:] = csm_cc_vis_res[simdata_neg_bl_orientation_ind,:,:].conj()
csm_cc_skyvis_lag = NP.fft.fftshift(NP.fft.ifft(csm_cc_skyvis, axis=1),axes=1) * csm_cc_skyvis.shape[1] * freq_resolution
csm_ccres_sky = NP.fft.fftshift(NP.fft.ifft(csm_cc_skyvis_res, axis=1),axes=1) * csm_cc_skyvis.shape[1] * freq_resolution
csm_cc_skyvis_lag = csm_cc_skyvis_lag + csm_ccres_sky
csm_cc_vis_lag = NP.fft.fftshift(NP.fft.ifft(csm_cc_vis, axis=1),axes=1) * csm_cc_vis.shape[1] * freq_resolution
csm_ccres = NP.fft.fftshift(NP.fft.ifft(csm_cc_vis_res, axis=1),axes=1) * csm_cc_vis.shape[1] * freq_resolution
csm_cc_vis_lag = csm_cc_vis_lag + csm_ccres
asm_cc_skyvis_lag = DSP.downsampler(asm_cc_skyvis_lag, 1.0*clean_lags.size/ia.lags.size, axis=1)
asm_cc_vis_lag = DSP.downsampler(asm_cc_vis_lag, 1.0*clean_lags.size/ia.lags.size, axis=1)
dsm_cc_skyvis_lag = DSP.downsampler(dsm_cc_skyvis_lag, 1.0*clean_lags.size/ia.lags.size, axis=1)
dsm_cc_vis_lag = DSP.downsampler(dsm_cc_vis_lag, 1.0*clean_lags.size/ia.lags.size, axis=1)
csm_cc_skyvis_lag = DSP.downsampler(csm_cc_skyvis_lag, 1.0*clean_lags.size/ia.lags.size, axis=1)
csm_cc_vis_lag = DSP.downsampler(csm_cc_vis_lag, 1.0*clean_lags.size/ia.lags.size, axis=1)
clean_lags = DSP.downsampler(clean_lags, 1.0*clean_lags.size/ia.lags.size, axis=-1)
clean_lags = clean_lags.ravel()
vis_noise_lag = NP.copy(ia.vis_noise_lag)
vis_noise_lag = vis_noise_lag[truncated_ref_bl_ind,:,:]
asm_cc_skyvis_lag = asm_cc_skyvis_lag[truncated_ref_bl_ind,:,:]
asm_cc_vis_lag = asm_cc_vis_lag[truncated_ref_bl_ind,:,:]
csm_cc_skyvis_lag = csm_cc_skyvis_lag[truncated_ref_bl_ind,:,:]
csm_cc_vis_lag = csm_cc_vis_lag[truncated_ref_bl_ind,:,:]
dsm_cc_skyvis_lag = dsm_cc_skyvis_lag[truncated_ref_bl_ind,:,:]
dsm_cc_vis_lag = dsm_cc_vis_lag[truncated_ref_bl_ind,:,:]
delaymat = DLY.delay_envelope(ia.baselines[truncated_ref_bl_ind,:], pc, units='mks')
bw = nchan * freq_resolution
min_delay = -delaymat[0,:,1]-delaymat[0,:,0]
max_delay = delaymat[0,:,0]-delaymat[0,:,1]
clags = clean_lags.reshape(1,-1)
min_delay = min_delay.reshape(-1,1)
max_delay = max_delay.reshape(-1,1)
thermal_noise_window = NP.abs(clags) >= max_abs_delay*1e-6
thermal_noise_window = NP.repeat(thermal_noise_window, ia.baselines[truncated_ref_bl_ind,:].shape[0], axis=0)
EoR_window = NP.logical_or(clags > max_delay+3/bw, clags < min_delay-3/bw)
strict_EoR_window = NP.logical_and(EoR_window, NP.abs(clags) < 1/coarse_channel_resolution)
wedge_window = NP.logical_and(clags <= max_delay, clags >= min_delay)
non_wedge_window = NP.logical_not(wedge_window)
vis_rms_lag = OPS.rms(asm_cc_vis_lag, mask=NP.logical_not(NP.repeat(thermal_noise_window[:,:,NP.newaxis], n_snaps, axis=2)), axis=1)
vis_rms_freq = NP.abs(vis_rms_lag) / NP.sqrt(nchan) / freq_resolution
T_rms_freq = vis_rms_freq / (2.0 * FCNST.k) * NP.mean(ia.A_eff[truncated_ref_bl_ind,:]) * NP.mean(ia.eff_Q[truncated_ref_bl_ind,:]) * NP.sqrt(2.0*freq_resolution*NP.asarray(ia.t_acc).reshape(1,1,-1)) * CNST.Jy
vis_rms_lag_theory = OPS.rms(vis_noise_lag, mask=NP.logical_not(NP.repeat(EoR_window[:,:,NP.newaxis], n_snaps, axis=2)), axis=1)
vis_rms_freq_theory = NP.abs(vis_rms_lag_theory) / NP.sqrt(nchan) / freq_resolution
T_rms_freq_theory = vis_rms_freq_theory / (2.0 * FCNST.k) * NP.mean(ia.A_eff[truncated_ref_bl_ind,:]) * NP.mean(ia.eff_Q[truncated_ref_bl_ind,:]) * NP.sqrt(2.0*freq_resolution*NP.asarray(ia.t_acc).reshape(1,1,-1)) * CNST.Jy
if (dspec_min is None) or (dspec_max is None):
dspec_max = max([NP.abs(asm_cc_skyvis_lag).max(), NP.abs(dsm_cc_skyvis_lag).max(), NP.abs(csm_cc_skyvis_lag).max()])
dspec_min = min([NP.abs(asm_cc_skyvis_lag).min(), NP.abs(dsm_cc_skyvis_lag).min(), NP.abs(csm_cc_skyvis_lag).min()])
dspec_max = dspec_max**2 * volfactor1 * volfactor2 * Jy2K**2
dspec_min = dspec_min**2 * volfactor1 * volfactor2 * Jy2K**2
small_delays_EoR_window = EoR_window.T
small_delays_strict_EoR_window = strict_EoR_window.T
small_delays_wedge_window = wedge_window.T
if max_abs_delay is not None:
small_delays_ind = NP.abs(clean_lags) <= max_abs_delay * 1e-6
clean_lags = clean_lags[small_delays_ind]
asm_cc_vis_lag = asm_cc_vis_lag[:,small_delays_ind,:]
asm_cc_skyvis_lag = asm_cc_skyvis_lag[:,small_delays_ind,:]
dsm_cc_vis_lag = dsm_cc_vis_lag[:,small_delays_ind,:]
dsm_cc_skyvis_lag = dsm_cc_skyvis_lag[:,small_delays_ind,:]
csm_cc_vis_lag = csm_cc_vis_lag[:,small_delays_ind,:]
csm_cc_skyvis_lag = csm_cc_skyvis_lag[:,small_delays_ind,:]
small_delays_EoR_window = small_delays_EoR_window[small_delays_ind,:]
small_delays_strict_EoR_window = small_delays_strict_EoR_window[small_delays_ind,:]
small_delays_wedge_window = small_delays_wedge_window[small_delays_ind,:]
if plot_10:
# 10) Plot noiseless delay spectra from simulations for diffuse, compact and all-sky models
descriptor_str = ['off-zenith', 'zenith']
All-sky model
fig, axs = PLT.subplots(n_snaps, sharex=True, sharey=True, figsize=(6,6))
for j in xrange(n_snaps):
imdspec = axs[j].pcolorfast(truncated_ref_bl_length, 1e6*clean_lags, NP.abs(asm_cc_skyvis_lag[:-1,:-1,j].T)**2 * volfactor1 * volfactor2 * Jy2K**2, norm=PLTC.LogNorm(vmin=(1e6)**2 * volfactor1 * volfactor2 * Jy2K**2, vmax=dspec_max))
horizonb = axs[j].plot(truncated_ref_bl_length, 1e6*min_delay.ravel(), color='white', ls=':', lw=1.5)
horizont = axs[j].plot(truncated_ref_bl_length, 1e6*max_delay.ravel(), color='white', ls=':', lw=1.5)
axs[j].set_ylim(0.9*NP.amin(clean_lags*1e6), 0.9*NP.amax(clean_lags*1e6))
axs[j].set_aspect('auto')
axs[j].text(0.5, 0.9, descriptor_str[j], transform=axs[j].transAxes, fontsize=14, weight='semibold', ha='center', color='white')
for j in xrange(n_snaps):
axs_kprll = axs[j].twinx()
axs_kprll.set_yticks(kprll(axs[j].get_yticks()*1e-6, redshift))
axs_kprll.set_ylim(kprll(NP.asarray(axs[j].get_ylim())*1e-6, redshift))
yformatter = FuncFormatter(lambda y, pos: '{0:.2f}'.format(y))
axs_kprll.yaxis.set_major_formatter(yformatter)
if j == 0:
axs_kperp = axs[j].twiny()
axs_kperp.set_xticks(kperp(axs[j].get_xticks()*freq/FCNST.c, redshift))
axs_kperp.set_xlim(kperp(NP.asarray(axs[j].get_xlim())*freq/FCNST.c, redshift))
xformatter = FuncFormatter(lambda x, pos: '{0:.3f}'.format(x))
axs_kperp.xaxis.set_major_formatter(xformatter)
fig.subplots_adjust(hspace=0)
big_ax = fig.add_subplot(111)
big_ax.set_axis_bgcolor('none')
big_ax.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
big_ax.set_xticks([])
big_ax.set_yticks([])
big_ax.set_ylabel(r'$\tau$ [$\mu$s]', fontsize=16, weight='medium', labelpad=30)
big_ax.set_xlabel(r'$|\mathbf{b}|$ [m]', fontsize=16, weight='medium', labelpad=20)
big_axr = big_ax.twinx()
big_axr.set_axis_bgcolor('none')
big_axr.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
big_axr.set_xticks([])
big_axr.set_yticks([])
big_axr.set_ylabel(r'$k_\parallel$ [$h$ Mpc$^{-1}$]', fontsize=16, weight='medium', labelpad=40)
big_axt = big_ax.twiny()
big_axt.set_axis_bgcolor('none')
big_axt.tick_params(labelcolor='none', top='off', bottom='off', left='off', right='off')
big_axt.set_xticks([])
big_axt.set_yticks([])
big_axt.set_xlabel(r'$k_\perp$ [$h$ Mpc$^{-1}$]', fontsize=16, weight='medium', labelpad=30)
cbax = fig.add_axes([0.9, 0.125, 0.02, 0.74])
cbar = fig.colorbar(imdspec, cax=cbax, orientation='vertical')
cbax.set_xlabel(r'K$^2$(Mpc/h)$^3$', labelpad=10, fontsize=12)
cbax.xaxis.set_label_position('top')
# PLT.tight_layout()
fig.subplots_adjust(right=0.72)
fig.subplots_adjust(top=0.88)
# fig = PLT.figure(figsize=(6,6))
# for j in xrange(n_snaps):
# ax = fig.add_subplot(n_snaps,1,j+1)
# ax.set_ylim(NP.amin(clean_lags*1e6), NP.amax(clean_lags*1e6))
# ax.set_ylabel(r'lag [$\mu$s]', fontsize=18)
# ax.set_xlabel(r'$|\mathbf{b}|$ [m]', fontsize=18)
# imdspec = ax.pcolorfast(truncated_ref_bl_length, 1e6*clean_lags, NP.abs(asm_cc_skyvis_lag[:-1,:-1,j].T), norm=PLTC.LogNorm(vmin=1e6, vmax=dspec_max))
# horizonb = ax.plot(truncated_ref_bl_length, 1e6*min_delay.ravel(), color='white', ls='-', lw=1.5)
# horizont = ax.plot(truncated_ref_bl_length, 1e6*max_delay.ravel(), color='white', ls='-', lw=1.5)
# ax.set_aspect('auto')
# cbax = fig.add_axes([0.86, 0.125, 0.02, 0.84])
# cbar = fig.colorbar(imdspec, cax=cbax, orientation='vertical')
# cbax.set_ylabel('Jy Hz', labelpad=0, fontsize=18)
# PLT.tight_layout()
# fig.subplots_adjust(right=0.83)
# # fig.subplots_adjust(top=0.9)
PLT.savefig('/data3/t_nithyanandan/'+project_dir+'/figures/'+telescope_str+'multi_baseline_CLEAN_noiseless_PS_'+ground_plane_str+snapshot_type_str+obs_mode+'_gaussian_FG_model_asm'+sky_sector_str+'nside_{0:0d}_'.format(nside)+'Tsys_{0:.1f}K_{1:.1f}_MHz_{2:.1f}_MHz_'.format(Tsys, freq/1e6,nchan*freq_resolution/1e6)+bpass_shape+'{0:.1f}'.format(oversampling_factor)+'.png', bbox_inches=0)
PLT.savefig('/data3/t_nithyanandan/'+project_dir+'/figures/'+telescope_str+'multi_baseline_CLEAN_noiseless_PS_'+ground_plane_str+snapshot_type_str+obs_mode+'_gaussian_FG_model_asm'+sky_sector_str+'nside_{0:0d}_'.format(nside)+'Tsys_{0:.1f}K_{1:.1f}_MHz_{2:.1f}_MHz_'.format(Tsys, freq/1e6,nchan*freq_resolution/1e6)+bpass_shape+'{0:.1f}'.format(oversampling_factor)+'.eps', bbox_inches=0)
# Plot each snapshot separately
for j in xrange(n_snaps):
fig = PLT.figure(figsize=(6,6))
ax = fig.add_subplot(111)
imdspec = ax.pcolorfast(truncated_ref_bl_length, 1e6*clean_lags, NP.abs(asm_cc_skyvis_lag[:-1,:-1,j].T)**2 * volfactor1 * volfactor2 * Jy2K**2, norm=PLTC.LogNorm(vmin=1e0, vmax=1e12))
horizonb = ax.plot(truncated_ref_bl_length, 1e6*min_delay.ravel(), color='white', ls=':', lw=1.5)
horizont = ax.plot(truncated_ref_bl_length, 1e6*max_delay.ravel(), color='white', ls=':', lw=1.5)
ax.set_ylim(0.9*NP.amin(clean_lags*1e6), 0.9*NP.amax(clean_lags*1e6))
ax.set_aspect('auto')
# ax.text(0.5, 0.9, descriptor_str[j], transform=ax.transAxes, fontsize=14, weight='semibold', ha='center', color='white')
ax_kprll = ax.twinx()
ax_kprll.set_yticks(kprll(ax.get_yticks()*1e-6, redshift))
ax_kprll.set_ylim(kprll(NP.asarray(ax.get_ylim())*1e-6, redshift))
yformatter = FuncFormatter(lambda y, pos: '{0:.2f}'.format(y))
ax_kprll.yaxis.set_major_formatter(yformatter)
ax_kperp = ax.twiny()
ax_kperp.set_xticks(kperp(ax.get_xticks()*freq/FCNST.c, redshift))
ax_kperp.set_xlim(kperp(NP.asarray(ax.get_xlim())*freq/FCNST.c, redshift))
xformatter = FuncFormatter(lambda x, pos: '{0:.3f}'.format(x))
ax_kperp.xaxis.set_major_formatter(xformatter)
ax.set_ylabel(r'$\tau$ [$\mu$s]', fontsize=16, weight='medium')
ax.set_xlabel(r'$|\mathbf{b}|$ [m]', fontsize=16, weight='medium')
ax_kprll.set_ylabel(r'$k_\parallel$ [$h$ Mpc$^{-1}$]', fontsize=16, weight='medium')
ax_kperp.set_xlabel(r'$k_\perp$ [$h$ Mpc$^{-1}$]', fontsize=16, weight='medium')
cbax = fig.add_axes([0.9, 0.125, 0.02, 0.74])
cbar = fig.colorbar(imdspec, cax=cbax, orientation='vertical')
cbax.set_xlabel(r'K$^2$(Mpc/h)$^3$', labelpad=10, fontsize=12)
cbax.xaxis.set_label_position('top')
# PLT.tight_layout()
fig.subplots_adjust(right=0.72)
fig.subplots_adjust(top=0.88)
PLT.savefig('/data3/t_nithyanandan/'+project_dir+'/figures/'+telescope_str+'multi_baseline_CLEAN_noiseless_PS_'+ground_plane_str+snapshot_type_str+obs_mode+'_gaussian_FG_model_asm'+sky_sector_str+'nside_{0:0d}_'.format(nside)+'Tsys_{0:.1f}K_{1:.1f}_MHz_{2:.1f}_MHz_'.format(Tsys, freq/1e6,nchan*freq_resolution/1e6)+bpass_shape+'{0:.1f}_snapshot_{1:1d}'.format(oversampling_factor, j)+'.png', bbox_inches=0)
PLT.savefig('/data3/t_nithyanandan/'+project_dir+'/figures/'+telescope_str+'multi_baseline_CLEAN_noiseless_PS_'+ground_plane_str+snapshot_type_str+obs_mode+'_gaussian_FG_model_asm'+sky_sector_str+'nside_{0:0d}_'.format(nside)+'Tsys_{0:.1f}K_{1:.1f}_MHz_{2:.1f}_MHz_'.format(Tsys, freq/1e6,nchan*freq_resolution/1e6)+bpass_shape+'{0:.1f}_snapshot_{1:1d}'.format(oversampling_factor, j)+'.eps', bbox_inches=0)
# Diffuse foreground model
fig, axs = PLT.subplots(n_snaps, sharex=True, sharey=True, figsize=(6,6))
for j in xrange(n_snaps):
imdspec = axs[j].pcolorfast(truncated_ref_bl_length, 1e6*clean_lags, NP.abs(dsm_cc_skyvis_lag[:-1,:-1,j].T)**2 * volfactor1 * volfactor2 * Jy2K**2, norm=PLTC.LogNorm(vmin=(1e6)**2 * volfactor1 * volfactor2 * Jy2K**2, vmax=dspec_max))
horizonb = axs[j].plot(truncated_ref_bl_length, 1e6*min_delay.ravel(), color='white', ls=':', lw=1.5)
horizont = axs[j].plot(truncated_ref_bl_length, 1e6*max_delay.ravel(), color='white', ls=':', lw=1.5)
axs[j].set_ylim(0.9*NP.amin(clean_lags*1e6), 0.9*NP.amax(clean_lags*1e6))
axs[j].set_aspect('auto')
axs[j].text(0.5, 0.9, descriptor_str[j], transform=axs[j].transAxes, fontsize=14, weight='semibold', ha='center', color='white')
for j in xrange(n_snaps):
axs_kprll = axs[j].twinx()
axs_kprll.set_yticks(kprll(axs[j].get_yticks()*1e-6, redshift))
axs_kprll.set_ylim(kprll(NP.asarray(axs[j].get_ylim())*1e-6, redshift))
yformatter = FuncFormatter(lambda y, pos: '{0:.2f}'.format(y))
axs_kprll.yaxis.set_major_formatter(yformatter)
if | |
<filename>pylayers/mobility/transit/vec3.py
#####################################################################
# vec3 - 3-dimensional vector
#
# Copyright (C) 2002, <NAME> (<EMAIL>)
#
# You may distribute under the terms of the BSD license, as
# specified in the file license.txt.
####################################################################
import types, math, copy
import pdb
# vec3
class vec3:
"""Three-dimensional vector.
This class can be used to represent points, vectors, normals
or even colors. The usual vector operations are available.
"""
def __init__(self, *args):
"""Constructor.
There are several possibilities how to initialize a vector:
v = vec3() -> v = <0,0,0>
v = vec3(a) -> v = <a,a,a>
v = vec3(x,y) -> v = <x,y,0>
v = vec3(x,y,z) -> v = <x,y,z>
Note that specifying just one value sets all three components to
that value (except when that single value is a another vec3, then
that vector is copied).
Additionally you can wrap those values in a list or a tuple or
specify them as a string:
v = vec3([1,2,3]) -> v = <1,2,3>
v = vec3("4,5") -> v = <4,5,0>
"""
if len(args)==0:
self.x, self.y, self.z = (0.0, 0.0, 0.0)
elif len(args)==1:
T = type(args[0])
# scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x, self.y, self.z = (args[0], args[0], args[0])
# vec3
elif isinstance(args[0], vec3):
self.x, self.y, self.z = args[0]
# Tuple/List
elif T==types.TupleType or T==types.ListType:
if len(args[0])==0:
self.x = self.y = self.z = 0.0
elif len(args[0])==1:
self.x = self.y = self.z = args[0][0]
elif len(args[0])==2:
self.x, self.y = args[0]
self.z = 0.0
elif len(args[0])==3:
self.x, self.y, self.z = args[0]
else:
raise TypeError("vec3() takes at most 3 arguments")
# String
elif T==types.StringType:
s=args[0].replace(","," ").replace(" "," ").strip().split(" ")
if s==[""]:
s=[]
f=map(lambda x: float(x), s)
dummy = vec3(f)
self.x, self.y, self.z = dummy
# error
else:
raise TypeError("vec3() arg can't be converted to vec3")
elif len(args)==2:
self.x, self.y, self.z = (args[0], args[1], 0.0)
elif len(args)==3:
self.x, self.y, self.z = args
else:
raise TypeError("vec3() takes at most 3 arguments")
def __repr__(self):
return 'vec3('+str(self.x)+', '+str(self.y)+', '+str(self.z)+')'
def __str__(self):
fmt="%1.4f"
return '('+fmt%self.x+', '+fmt%self.y+', '+fmt%self.z+')'
def __eq__(self, other):
"""== operator
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> c=vec3(-0.3, 0.75, 0.5)
>>> print(a==b)
0
>>> print(b==c)
1
>>> print(a==None)
0
"""
if isinstance(other, vec3):
return self.x==other.x and self.y==other.y and self.z==other.z
else:
return 0
def __ne__(self, other):
"""!= operator
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> c=vec3(-0.3, 0.75, 0.5)
>>> print(a!=b)
1
>>> print(b!=c)
0
>>> print(a!=None)
1
"""
if isinstance(other, vec3):
return self.x!=other.x or self.y!=other.y or self.z!=other.z
else:
return 1
def __add__(self, other):
"""Vector addition.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> print(a+b)
(0.7000, 1.2500, -1.3000)
"""
if isinstance(other, vec3):
return vec3(self.x+other.x, self.y+other.y, self.z+other.z)
else:
raise TypeError("unsupported operand type for +")
def __sub__(self, other):
"""Vector subtraction.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> print(a-b)
(1.3000, -0.2500, -2.3000)
"""
if isinstance(other, vec3):
return vec3(self.x-other.x, self.y-other.y, self.z-other.z)
else:
raise TypeError("unsupported operand type for -")
def __mul__(self, other):
"""Multiplication with a scalar or dot product.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> print(a*2.0)
(2.0000, 1.0000, -3.6000)
>>> print(2.0*a)
(2.0000, 1.0000, -3.6000)
>>> print(a*b)
-0.825
"""
T = type(other)
# vec3*scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return vec3(self.x*other, self.y*other, self.z*other)
# vec3*vec3
if isinstance(other, vec3):
return self.x*other.x + self.y*other.y + self.z*other.z
# unsupported
else:
# Try to delegate the operation to the other operand
if getattr(other,"__rmul__",None)!=None:
return other.__rmul__(self)
else:
raise TypeError("unsupported operand type for *")
__rmul__ = __mul__
def __div__(self, other):
"""Division by scalar
>>> a=vec3(1.0, 0.5, -1.8)
>>> print(a/2.0)
(0.5000, 0.2500, -0.9000)
"""
T = type(other)
# vec3/scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return vec3(self.x/other, self.y/other, self.z/other)
# unsupported
else:
raise TypeError("unsupported operand type for /")
def __mod__(self, other):
"""Modulo (component wise)
>>> a=vec3(3.0, 2.5, -1.8)
>>> print(a%2.0)
(1.0000, 0.5000, 0.2000)
"""
T = type(other)
# vec3%scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return vec3(self.x%other, self.y%other, self.z%other)
# unsupported
else:
raise TypeError("unsupported operand type for %")
def __iadd__(self, other):
"""Inline vector addition.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> a+=b
>>> print(a)
(0.7000, 1.2500, -1.3000)
"""
if isinstance(other, vec3):
self.x+=other.x
self.y+=other.y
self.z+=other.z
return self
else:
raise TypeError("unsupported operand type for +=")
def __isub__(self, other):
"""Inline vector subtraction.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> a-=b
>>> print(a)
(1.3000, -0.2500, -2.3000)
"""
if isinstance(other, vec3):
self.x-=other.x
self.y-=other.y
self.z-=other.z
return self
else:
raise TypeError("unsupported operand type for -=")
def __imul__(self, other):
"""Inline multiplication (only with scalar)
>>> a=vec3(1.0, 0.5, -1.8)
>>> a*=2.0
>>> print(a)
(2.0000, 1.0000, -3.6000)
"""
T = type(other)
# vec3*=scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x*=other
self.y*=other
self.z*=other
return self
else:
raise TypeError("unsupported operand type for *=")
def __idiv__(self, other):
"""Inline division with scalar
>>> a=vec3(1.0, 0.5, -1.8)
>>> a/=2.0
>>> print(a)
(0.5000, 0.2500, -0.9000)
"""
T = type(other)
# vec3/=scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x/=other
self.y/=other
self.z/=other
return self
else:
raise TypeError("unsupported operand type for /=")
def __imod__(self, other):
"""Inline modulo
>>> a=vec3(3.0, 2.5, -1.8)
>>> a%=2.0
>>> print(a)
(1.0000, 0.5000, 0.2000)
"""
T = type(other)
# vec3%=scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x%=other
self.y%=other
self.z%=other
return self
else:
raise TypeError("unsupported operand type for %=")
def __neg__(self):
"""Negation
>>> a=vec3(3.0, 2.5, -1.8)
>>> print(-a)
(-3.0000, -2.5000, 1.8000)
"""
return vec3(-self.x, -self.y, -self.z)
def __pos__(self):
"""
>>> a=vec3(3.0, 2.5, -1.8)
>>> print(+a)
(3.0000, 2.5000, -1.8000)
"""
return vec3(+self.x, +self.y, +self.z)
def __abs__(self):
"""Return the length of the vector.
abs(v) is equivalent to v.length().
>>> a=vec3(1.0, 0.5, -1.8)
>>> print(abs(a))
2.11896201004
"""
return math.sqrt(self*self)
def __len__(self):
"""Length of the sequence (always 3)"""
return 3
def __getitem__(self, key):
"""Return a component by index (0-based)
>>> a=vec3(1.0, 0.5, -1.8)
>>> print(a[0])
1.0
>>> print(a[1])
0.5
>>> print(a[2])
-1.8
"""
T=type(key)
if T!=types.IntType and T!=types.LongType:
raise TypeError("index must be integer")
if key==0: return self.x
elif key==1: return self.y
elif key==2: return self.z
else:
raise IndexError("index out of range")
def __setitem__(self, key, value):
"""Set a component by index (0-based)
>>> a=vec3()
>>> a[0]=1.5; a[1]=0.7; a[2]=-0.3
>>> print(a)
(1.5000, 0.7000, -0.3000)
"""
T=type(key)
if T!=types.IntType and T!=types.LongType:
raise TypeError("index must be integer")
if key==0: self.x = value
elif key==1: self.y = value
elif key==2: self.z = value
else:
raise IndexError("index out of range")
def cross(self, other):
"""Cross product.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> c=a.cross(b)
>>> print(c)
(1.6000, 0.0400, 0.9000)
"""
if isinstance(other, vec3):
return vec3(self.y*other.z-self.z*other.y,
self.z*other.x-self.x*other.z,
self.x*other.y-self.y*other.x)
else:
raise TypeError("unsupported operand type for cross()")
def length(self):
"""Return the length of the vector.
v.length() is equivalent to abs(v).
>>> a=vec3(1.0, 0.5, -1.8)
>>> print(a.length())
2.11896201004
"""
return math.sqrt(self*self)
def normalize(self):
"""Return normalized vector.
>>> a=vec3(1.0, 0.5, -1.8)
>>> print(a.normalize())
(0.4719, 0.2360, -0.8495)
"""
try:
nlen = 1.0/math.sqrt(self*self)
except:
nlen=1.
return vec3(self.x*nlen, self.y*nlen, self.z*nlen)
def angle(self, other):
"""Return angle (in radians) between self and other.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> print(a.angle(b))
1.99306755584
"""
if isinstance(other, vec3):
return math.acos((self*other) / (abs(self)*abs(other)))
else:
raise TypeError("unsupported operand type for angle()")
def reflect(self, N):
"""Return the reflection vector.
N is the surface normal which has to be of unit length.
>>> a=vec3(1.0, 0.5, -1.8)
>>> print( a.reflect(vec3(1,0,1))
(2.6000, 0.5000, -0.2000)
"""
return self - 2.0*(self*N)*N
def refract(self, N, eta):
"""Return the transmitted vector.
N is the surface normal which has to be of unit length.
eta is the relative index of refraction. If the returned
vector is zero then there is no transmitted light because
of total internal reflection.
>>> a=vec3(1.0, -1.5, 0.8)
>>> print(a.refract(vec3(0,1,0), 1.33))
(1.3300, -1.7920, 1.0640)
"""
dot = self*N
k = 1.0 - eta*eta*(1.0 - dot*dot)
if k<0:
return vec3(0.0,0.0,0.0)
else:
return eta*self - (eta*dot + math.sqrt(k))*N
def ortho(self):
"""Returns an orthogonal vector.
Returns a vector that is orthogonal to self (where
self*self.ortho()==0).
>>> a=vec3(1.0, -1.5, 0.8)
>>> print(round(a*a.ortho(),8))
0.0
"""
x=abs(self.x)
y=abs(self.y)
z=abs(self.z)
# Is z the smallest element? Then use x and y
if z<=x and z<=y:
return vec3(-self.y, self.x, 0.0)
# Is y smallest element? Then use x and z
elif | |
# rare case we don't worry too much about alignment.)
mc = self.assembler.mc
current_pos = mc.get_relative_pos()
target_pos = (current_pos + 15) & ~15
target_pos = max(target_pos, self.min_bytes_before_label)
insert_nops = target_pos - current_pos
assert 0 <= insert_nops <= 15
for c in mc.MULTIBYTE_NOPs[insert_nops]:
mc.writechar(c)
def loc(self, v):
if v is None: # xxx kludgy
return None
if v.type == FLOAT or v.is_vector():
return self.xrm.loc(v)
return self.rm.loc(v)
def load_condition_into_cc(self, box):
if self.assembler.guard_success_cc == rx86.cond_none:
self.assembler.test_location(self.loc(box))
self.assembler.guard_success_cc = rx86.Conditions['NZ']
def _consider_guard_cc(self, op):
arg = op.getarg(0)
self.load_condition_into_cc(arg)
self.perform_guard(op, [], None)
consider_guard_true = _consider_guard_cc
consider_guard_false = _consider_guard_cc
consider_guard_nonnull = _consider_guard_cc
consider_guard_isnull = _consider_guard_cc
def consider_finish(self, op):
# the frame is in ebp, but we have to point where in the frame is
# the potential argument to FINISH
if op.numargs() == 1:
loc = self.make_sure_var_in_reg(op.getarg(0))
locs = [loc]
else:
locs = []
self.perform(op, locs, None)
def consider_guard_no_exception(self, op):
self.perform_guard(op, [], None)
def consider_guard_not_invalidated(self, op):
mc = self.assembler.mc
n = mc.get_relative_pos(break_basic_block=False)
self.perform_guard(op, [], None)
assert n == mc.get_relative_pos(break_basic_block=False)
# ensure that the next label is at least 5 bytes farther than
# the current position. Otherwise, when invalidating the guard,
# we would overwrite randomly the next label's position.
self.ensure_next_label_is_at_least_at_position(n + 5)
def consider_guard_exception(self, op):
loc = self.rm.make_sure_var_in_reg(op.getarg(0))
box = TempVar()
args = op.getarglist()
loc1 = self.rm.force_allocate_reg(box, args)
if op in self.longevity:
# this means, is it ever used
resloc = self.rm.force_allocate_reg(op, args + [box])
else:
resloc = None
self.perform_guard(op, [loc, loc1], resloc)
self.rm.possibly_free_var(box)
def consider_save_exception(self, op):
resloc = self.rm.force_allocate_reg(op)
self.perform(op, [], resloc)
consider_save_exc_class = consider_save_exception
def consider_restore_exception(self, op):
args = op.getarglist()
loc0 = self.rm.make_sure_var_in_reg(op.getarg(0), args) # exc class
loc1 = self.rm.make_sure_var_in_reg(op.getarg(1), args) # exc instance
self.perform_discard(op, [loc0, loc1])
consider_guard_no_overflow = consider_guard_no_exception
consider_guard_overflow = consider_guard_no_exception
consider_guard_not_forced = consider_guard_no_exception
def consider_guard_value(self, op):
x = self.make_sure_var_in_reg(op.getarg(0))
loc = self.assembler.cpu.all_reg_indexes[x.value]
op.getdescr().make_a_counter_per_value(op, loc)
y = self.loc(op.getarg(1))
self.perform_guard(op, [x, y], None)
def consider_guard_class(self, op):
assert not isinstance(op.getarg(0), Const)
x = self.rm.make_sure_var_in_reg(op.getarg(0))
y = self.loc(op.getarg(1))
self.perform_guard(op, [x, y], None)
consider_guard_nonnull_class = consider_guard_class
consider_guard_gc_type = consider_guard_class
def consider_guard_is_object(self, op):
x = self.make_sure_var_in_reg(op.getarg(0))
tmp_box = TempVar()
y = self.rm.force_allocate_reg(tmp_box, [op.getarg(0)])
self.rm.possibly_free_var(tmp_box)
self.perform_guard(op, [x, y], None)
def consider_guard_subclass(self, op):
x = self.make_sure_var_in_reg(op.getarg(0))
tmp_box = TempVar()
z = self.rm.force_allocate_reg(tmp_box, [op.getarg(0)])
y = self.loc(op.getarg(1))
self.rm.possibly_free_var(tmp_box)
self.perform_guard(op, [x, y, z], None)
def _consider_binop_part(self, op, symm=False):
x = op.getarg(0)
y = op.getarg(1)
xloc = self.loc(x)
argloc = self.loc(y)
# For symmetrical operations, if x is not in a reg, but y is,
# and if x lives longer than the current operation while y dies, then
# swap the role of 'x' and 'y'
if (symm and not isinstance(xloc, RegLoc) and
isinstance(argloc, RegLoc)):
if ((x not in self.rm.longevity or
self.rm.longevity[x].last_usage > self.rm.position) and
self.rm.longevity[y].last_usage == self.rm.position):
x, y = y, x
argloc = self.loc(y)
#
args = op.getarglist()
loc = self.rm.force_result_in_reg(op, x, args)
return loc, argloc
def _consider_binop(self, op):
loc, argloc = self._consider_binop_part(op)
self.perform(op, [loc, argloc], loc)
def _consider_binop_symm(self, op):
loc, argloc = self._consider_binop_part(op, symm=True)
self.perform(op, [loc, argloc], loc)
def _consider_lea(self, op):
x = op.getarg(0)
loc = self.make_sure_var_in_reg(x)
# make it possible to have argloc be == loc if x dies
# (then LEA will not be used, but that's fine anyway)
self.possibly_free_var(x)
argloc = self.loc(op.getarg(1))
resloc = self.force_allocate_reg(op)
self.perform(op, [loc, argloc], resloc)
def consider_int_add(self, op):
y = op.getarg(1)
if isinstance(y, ConstInt) and rx86.fits_in_32bits(y.value):
self._consider_lea(op)
else:
self._consider_binop_symm(op)
consider_nursery_ptr_increment = consider_int_add
def consider_int_sub(self, op):
y = op.getarg(1)
if isinstance(y, ConstInt) and rx86.fits_in_32bits(-y.value):
self._consider_lea(op)
else:
self._consider_binop(op)
consider_int_mul = _consider_binop_symm
consider_int_and = _consider_binop_symm
consider_int_or = _consider_binop_symm
consider_int_xor = _consider_binop_symm
consider_int_mul_ovf = _consider_binop_symm
consider_int_sub_ovf = _consider_binop
consider_int_add_ovf = _consider_binop_symm
def consider_uint_mul_high(self, op):
arg1, arg2 = op.getarglist()
# should support all cases, but is optimized for (box, const)
if isinstance(arg1, Const):
arg1, arg2 = arg2, arg1
self.rm.make_sure_var_in_reg(arg2, selected_reg=eax)
l1 = self.loc(arg1)
# l1 is a register != eax, or stack_bp; or, just possibly, it
# can be == eax if arg1 is arg2
assert not isinstance(l1, ImmedLoc)
assert l1 is not eax or arg1 is arg2
#
# eax will be trash after the operation
self.rm.possibly_free_var(arg2)
tmpvar = TempVar()
self.rm.force_allocate_reg(tmpvar, selected_reg=eax)
self.rm.possibly_free_var(tmpvar)
#
self.rm.force_allocate_reg(op, selected_reg=edx)
self.perform(op, [l1], edx)
def consider_int_neg(self, op):
res = self.rm.force_result_in_reg(op, op.getarg(0))
self.perform(op, [res], res)
consider_int_invert = consider_int_neg
def consider_int_signext(self, op):
argloc = self.loc(op.getarg(0))
numbytesloc = self.loc(op.getarg(1))
resloc = self.force_allocate_reg(op)
self.perform(op, [argloc, numbytesloc], resloc)
def consider_int_lshift(self, op):
if isinstance(op.getarg(1), Const):
loc2 = self.rm.convert_to_imm(op.getarg(1))
else:
loc2 = self.rm.make_sure_var_in_reg(op.getarg(1), selected_reg=ecx)
args = op.getarglist()
loc1 = self.rm.force_result_in_reg(op, op.getarg(0), args)
self.perform(op, [loc1, loc2], loc1)
consider_int_rshift = consider_int_lshift
consider_uint_rshift = consider_int_lshift
def _consider_compop(self, op):
vx = op.getarg(0)
vy = op.getarg(1)
arglocs = [self.loc(vx), self.loc(vy)]
if (vx in self.rm.reg_bindings or vy in self.rm.reg_bindings or
isinstance(vx, Const) or isinstance(vy, Const)):
pass
else:
arglocs[0] = self.rm.make_sure_var_in_reg(vx)
loc = self.force_allocate_reg_or_cc(op)
self.perform(op, arglocs, loc)
consider_int_lt = _consider_compop
consider_int_gt = _consider_compop
consider_int_ge = _consider_compop
consider_int_le = _consider_compop
consider_int_ne = _consider_compop
consider_int_eq = _consider_compop
consider_uint_gt = _consider_compop
consider_uint_lt = _consider_compop
consider_uint_le = _consider_compop
consider_uint_ge = _consider_compop
consider_ptr_eq = consider_instance_ptr_eq = _consider_compop
consider_ptr_ne = consider_instance_ptr_ne = _consider_compop
def _consider_float_op(self, op):
loc1 = self.xrm.loc(op.getarg(1))
args = op.getarglist()
loc0 = self.xrm.force_result_in_reg(op, op.getarg(0), args)
self.perform(op, [loc0, loc1], loc0)
consider_float_add = _consider_float_op # xxx could be _symm
consider_float_sub = _consider_float_op
consider_float_mul = _consider_float_op # xxx could be _symm
consider_float_truediv = _consider_float_op
def _consider_float_cmp(self, op):
vx = op.getarg(0)
vy = op.getarg(1)
arglocs = [self.loc(vx), self.loc(vy)]
if not (isinstance(arglocs[0], RegLoc) or
isinstance(arglocs[1], RegLoc)):
if isinstance(vx, Const):
arglocs[1] = self.xrm.make_sure_var_in_reg(vy)
else:
arglocs[0] = self.xrm.make_sure_var_in_reg(vx)
loc = self.force_allocate_reg_or_cc(op)
self.perform(op, arglocs, loc)
consider_float_lt = _consider_float_cmp
consider_float_le = _consider_float_cmp
consider_float_eq = _consider_float_cmp
consider_float_ne = _consider_float_cmp
consider_float_gt = _consider_float_cmp
consider_float_ge = _consider_float_cmp
def _consider_float_unary_op(self, op):
loc0 = self.xrm.force_result_in_reg(op, op.getarg(0))
self.perform(op, [loc0], loc0)
consider_float_neg = _consider_float_unary_op
consider_float_abs = _consider_float_unary_op
def consider_cast_float_to_int(self, op):
loc0 = self.xrm.make_sure_var_in_reg(op.getarg(0))
loc1 = self.rm.force_allocate_reg(op)
self.perform(op, [loc0], loc1)
def consider_cast_int_to_float(self, op):
loc0 = self.rm.make_sure_var_in_reg(op.getarg(0))
loc1 = self.xrm.force_allocate_reg(op)
self.perform(op, [loc0], loc1)
def consider_cast_float_to_singlefloat(self, op):
loc0 = self.xrm.make_sure_var_in_reg(op.getarg(0))
loc1 = self.rm.force_allocate_reg(op)
tmpxvar = TempVar()
loctmp = self.xrm.force_allocate_reg(tmpxvar) # may be equal to loc0
self.xrm.possibly_free_var(tmpxvar)
self.perform(op, [loc0, loctmp], loc1)
consider_cast_singlefloat_to_float = consider_cast_int_to_float
def consider_convert_float_bytes_to_longlong(self, op):
if longlong.is_64_bit:
loc0 = self.xrm.make_sure_var_in_reg(op.getarg(0))
loc1 = self.rm.force_allocate_reg(op)
self.perform(op, [loc0], loc1)
else:
arg0 = op.getarg(0)
loc0 = self.xrm.loc(arg0)
loc1 = self.xrm.force_allocate_reg(op, forbidden_vars=[arg0])
self.perform(op, [loc0], loc1)
def consider_convert_longlong_bytes_to_float(self, op):
if longlong.is_64_bit:
loc0 = self.rm.make_sure_var_in_reg(op.getarg(0))
loc1 = self.xrm.force_allocate_reg(op)
self.perform(op, [loc0], loc1)
else:
arg0 = op.getarg(0)
loc0 = self.xrm.make_sure_var_in_reg(arg0)
loc1 = self.xrm.force_allocate_reg(op, forbidden_vars=[arg0])
self.perform(op, [loc0], loc1)
def _consider_llong_binop_xx(self, op):
# must force both arguments into xmm registers, because we don't
# know if they will be suitably aligned. Exception: if the second
# argument is a constant, we can ask it to be aligned to 16 bytes.
# xxx some of these operations could be '_symm'.
args = [op.getarg(1), op.getarg(2)]
loc1 = self.load_xmm_aligned_16_bytes(args[1])
loc0 = self.xrm.force_result_in_reg(op, args[0], args)
self.perform_llong(op, [loc0, loc1], loc0)
def _consider_llong_eq_ne_xx(self, op):
# must force both arguments into xmm registers, because we don't
# know if they will be suitably aligned. Exception: if they are
# constants, we can ask them to be aligned to 16 bytes.
args = [op.getarg(1), op.getarg(2)]
loc1 = self.load_xmm_aligned_16_bytes(args[0])
loc2 = self.load_xmm_aligned_16_bytes(args[1], args)
tmpxvar = TempVar()
loc3 = self.xrm.force_allocate_reg(tmpxvar, args)
self.xrm.possibly_free_var(tmpxvar)
loc0 = self.rm.force_allocate_reg(op, need_lower_byte=True)
self.perform_llong(op, [loc1, loc2, loc3], loc0)
def _maybe_consider_llong_lt(self, op):
# XXX just a special case for now
box = op.getarg(2)
if not isinstance(box, ConstFloat):
return False
if box.getfloat() != 0.0: # NaNs are also != 0.0
return False
# "x < 0.0" or maybe "x < -0.0" which is the same
box = op.getarg(1)
assert box.type == FLOAT
loc1 = self.xrm.make_sure_var_in_reg(box)
loc0 = self.rm.force_allocate_reg(op)
self.perform_llong(op, [loc1], loc0)
return True
def _consider_llong_to_int(self, op):
# accept an argument in a xmm register or in the stack
loc1 = self.xrm.loc(op.getarg(1))
loc0 = self.rm.force_allocate_reg(op)
self.perform_llong(op, [loc1], loc0)
def _loc_of_const_longlong(self, value64):
c = ConstFloat(value64)
return self.xrm.convert_to_imm(c)
def _consider_llong_from_int(self, op):
assert IS_X86_32
loc0 = self.xrm.force_allocate_reg(op)
box = op.getarg(1)
if isinstance(box, ConstInt):
loc1 = self._loc_of_const_longlong(r_longlong(box.value))
loc2 = None # unused
else:
loc1 = self.rm.make_sure_var_in_reg(box)
tmpxvar = TempVar()
loc2 = | |
"""
Manage the sparse pointlike data.
Duplicates the functionality of the C++ class BinnedPhotonData
Implements the new standard data format
http://gamma-astro-data-formats.readthedocs.io/en/latest/skymaps/healpix/index.html#hpx-bands-table
"""
import os, glob, StringIO, pickle
import healpy
from collections import Counter
import numpy as np
from astropy.io import fits
import pandas as pd
from uw.utilities import keyword_options
def set_dskeys(header,
circle=None, #(ra=162.387, dec=24.200,radius=5),
emin=100,emax=177.82, # must be set
event_type=1, #the bit: 1 or 2 for front or back
zmax=100, thetamax=66.4, #wired in for pointlike
):
"""Set the DS keys in the FITS header for Fermi analysis
"""
dsvals = [
('BIT_MASK(EVENT_CLASS,128,P8R3)','DIMENSIONLESS','1:1'),
('TIME', 's','TABLE', ':GTI'),
('BIT_MASK(EVENT_TYPE,{},P8R3)'.format(event_type),'DIMENSIONLESS','1:1'),
('ENERGY', 'MeV' ,'{}:{}'.format(emin,emax), ),
('ZENITH_ANGLE','deg','0:{} '.format(zmax) ,),
('THETA', 'deg','0:{} '.format(thetamax),),
]
if circle is not None:
dsvals = dsvals +[ ('POS(RA,DEC)', 'deg ','CIRCLE({},{},{})'.format(*circle)),]
header.append(('NDSKEYS', 0))
for n, x in enumerate(dsvals):
type, unit, value = x[:3]
fn = '{:1d}'.format(n)
header.append( ('DSTYP'+fn, type))
header.append( ('DSUNI'+fn, unit))
header.append( ('DSVAL'+fn , value))
if len(x)>3:
header.append(('DSREF'+fn, x[3]))
assert n>0
header['NDSKEYS']=n
return header
def roi_circle(roi_index, galactic=True, radius=5.0):
""" return (lon,lat,radius) tuple for given nside=12 position
"""
from skymaps import Band
sdir = Band(12).dir(roi_index)
return (sdir.l(),sdir.b(), radius) if galactic else (sdir.ra(),sdir.dec(), radius)
class GTI(object):
def __init__(self, gti_hdu):
self.hdu=gti_hdu
data = gti_hdu.data
self.start=data.START
self.stop =data.STOP
def add(self, other):
self.start=np.hstack([self.start, other.start])
self.stop =np.hstack([self.stop, other.stop])
g = self.hdu
g.columns['START'].array=self.start
g.columns['STOP'].array = self.stop
def make_hdu(self):
cols = [
fits.Column('START', format='D', unit='s',array=self.start),
fits.Column('STOP', format='D', unit='s', array=self.stop)
]
return fits.BinTableHDU.from_columns(cols,header=self.hdu.header,name='GTI')
def __repr__(self):
return '{} intervals from {:.0f} to {:.0f}, {:,.0f} s'.format(len(self.start),
self.start[0], self.stop[-1], sum(self.stop-self.start))
class BandList(object):
"""The list of bands, defined by energy range and event type
"""
def __init__(self, bands_hdu):
self.hdu=bands_hdu
self.bands=np.asarray(bands_hdu.data)
if 'COUNT' in bands_hdu.columns.names:
# old format: need this to parse the pixel data
self.pixelcnt = np.array(map( lambda n: 0 if n<0 else n,
bands_hdu.data.field('COUNT')),int)
else:
self.pixelcnt=None
def __repr__(self):
df = self.dataframe()
return '{} bands from {:.0f} to {:.0f} MeV'.format(
len(df), df.e_min[0],df.e_max.max())
def __getitem__(self, index):
df = self.dataframe()
return df.iloc[index]
def make_hdu(self, photons=None, version=3):
"""
"""
# new format
df = self.dataframe()
band_cols = [
fits.Column(name='NSIDE', format='J', array=df.nside),
fits.Column(name='E_MIN', format='D', array=df.e_min*1e+3, unit='keV'),
fits.Column(name='E_MAX', format='D', array=df.e_max*1e+3, unit='keV'),
fits.Column(name='EVENT_TYPE', format='J', array=df.event_type),
]
bands_hdu=fits.BinTableHDU.from_columns(band_cols, name='BANDS')
bands_hdu.header.update(VERSION=version)
return bands_hdu
def dataframe(self):
data = self.hdu.data
if self.pixelcnt is None:
cdata = [data.E_MIN, data.E_MAX, data.EVENT_TYPE, data.NSIDE]
else:
cdata = [self.hdu.data.field(cname) for cname in 'emin emax event_class nside'.split()]
cdata
df = pd.DataFrame(cdata, index='e_min e_max event_type nside'.split()).T
df.e_min /=1e3; df.e_max/=1e3 # convert to MeV from keV
df['event_type']= df.event_type.astype(int)
df.nside = df.nside.astype(int)
return df
class Pixels(object):
"""The list of pixels
Each line is a pixel, with a HEALPix index, a channel index, and the number of photons in the pixel
"""
def __init__(self, pixel_hdu, pixel_count=None):
"""pixel_hdu : HDU
pixel_count : array of int | None
number of pixels per band, from the band list in old format
"""
self.hdu = pixel_hdu
pixeldata = pixel_hdu.data
if pixel_count is not None:
# old format: make a list of channel numbers from the bands HDU
# the old list of pixels was sorted by channel
chn= []
for i,c in enumerate(pixel_count):
if c<0: continue
chn = chn + [i]*c
self.pix = pixeldata.field('INDEX')
self.cnt = pixeldata.field('COUNT')
self.chn = np.array(chn,int)
else:
# read new format
self.chn = pixeldata.field('CHANNEL') # band index
self.pix = pixeldata.field('PIX') # pixel index (depends on channel)
self.cnt = pixeldata.field('VALUE') # number of photons in bin
self.counter = None
self._sorted = False
def _make_counter(self):
# Make a Counter, with keys combined from channel and pixel id
if self.counter is None:
chn = np.array(self.chn,int) # convert to int for shift
keys = list(np.left_shift(chn,32) + self.pix)
self.counter = Counter(dict(zip(keys, self.cnt)))
return self.counter
def add(self, other):
"""combine the current list of pixels with another
other : Pixels object
"""
# combine the pixels by updating the Counter dictionary-like objects
self._make_counter() # create local Counter only if adding another
self.counter.update(other._make_counter())
def _decode_counter(self):
# Update the three arrays following adding another set of data
assert self.counter is not None, 'logic failure'
items = np.array(self.counter.items(), int)
keys = items[:,0]
self.chn = np.right_shift(keys,32)
self.pix = np.bitwise_and(keys, 2**32-1)
self.cnt = items[:,1]
def dataframe(self):
"""return a DataFrame with number of pixels and photons per channel
"""
if self.counter is not None:
self._decode_counter()
channels = sorted(list(set(self.chn)));
d = dict()
for channel in channels:
c = self.cnt[self.chn==channel]
d[channel] = {'pixels': len(c), 'photons': sum(c)}
df = pd.DataFrame(d).T[['pixels', 'photons']]
df.index.name='chanel'
return df
def __getitem__(self, channel):
"""return a list of (pixel, count) pairs for the band
"""
if not self._sorted:
if self.counter is not None:
self._decode_counter()
# sort the list of pixels according to channel number (band)
# create a lookup dictionary with limits for the pixel and count lists
csort = self.chn.argsort()
self.chn = self.chn[csort]
self.pix = self.pix[csort]
self.cnt = self.cnt[csort]
channels = sorted(list(set(self.chn)))
indexchan = list(np.searchsorted(self.chn, channels))+[len(self.chn)]
self.lookup = dict(zip(channels,zip(indexchan[:-1], indexchan[1:])))
self._sorted = True
try:
a,b = self.lookup[channel]
return zip(self.pix[a:b], self.cnt[a:b])
except KeyError:
return [] # empty list of no entry
def make_hdu(self):
""" create a new HDU in new format
"""
if self.counter is not None:
# needed it result of combining
self._decode_counter()
skymap_cols = [
fits.Column(name='PIX', format='J', array=self.pix),
fits.Column(name='CHANNEL', format='I',array=self.chn),
fits.Column(name='VALUE', format='J', array=self.cnt),
]
skymap_hdu=fits.BinTableHDU.from_columns(skymap_cols, name='SKYMAP')
skymap_hdu.header.update(
PIXTYPE='HEALPIX',
INDXSCHM='SPARSE',
ORDERING='RING',
COORDSYS='GAL',
BANDSHDU='BANDS',
AXCOLS='E_MIN,E_MAX',
)
return skymap_hdu
def __repr__(self):
if self.counter is not None:
npix, nphot = len(self.counter), sum(self.counter.values())
else:
npix, nphot = len(self.cnt), sum(self.cnt)
return '{}: {:,} pixels, {:,} photons'.format(self.__class__, npix, nphot)
class BinFile(object):
""" A Binned photon data file
Manages Two FITS tables:
* Pixels
* BandList
Implements an indexing interface. Now returns a special Band object
"""
def __init__(self, filenames, outfile=None, adding=False, quiet=True):
"""
filenames : a FITS file name, or a list
if a list, combine them
outfile : filename | Null [default]
write the corresponding fits file
"""
if not hasattr(filenames, '__iter__'):
filenames = [filenames]
for i,filename in enumerate(filenames):
if i==0: # first one: will add others, if any to this one
if not quiet:print ('\n"{}" '.format(filename),)
self.hdus=fits.open(filename)
self.gti=GTI(self.hdus['GTI'])
if 'PIXELS' in self.hdus:
# old format
self.bands=BandList(self.hdus['BANDS'])
self.pixels=Pixels(self.hdus['PIXELS'], self.bands.pixelcnt)
else:
# new format
self.bands=BandList(self.hdus['BANDS'])
self.pixels=Pixels(self.hdus['SKYMAP'])
if not quiet: print (self.pixels.__repr__(),)
else:
self.add(BinFile(filename, adding=True))
if not quiet: print (self.pixels.__repr__(),)
if not adding:
if not quiet: print ()
if outfile is not None:
self.writeto(outfile)
def fits_info(self):
output= StringIO.StringIO()
self.hdus.info(output)
return output.getvalue()
def __repr__(self):
out = self.fits_info()
out += 'GTI: '+ self.gti.__repr__()
out += '\nBands: '+self.bands.__repr__()
out += '\nPixels: '+self.pixels.__repr__()
return out
def __getitem__(self, index):
""" return a skymaps.Band C++ object corresponding to the band index
This object implements query_disk for extracting the pixels within a given ROI.
"""
from skymaps import Band
b = self.bands[index]
bb = Band(int(b.nside), int(b.event_type), b.e_min, b.e_max, 0,0)
# this is an unfortunate loop: need to consider an interface for adding a set of pixels
for pix, cnt in self.pixels[index]:
bb.add(int(pix), int(cnt))
return bb
def __len__(self): return len(self.bands.bands)
def add(self, other):
""" other : BinFile object
"""
# combine the pixel and GTI arrays
self.pixels.add(other.pixels)
self.gti.add(other.gti)
def dataframe(self):
"""return a DataFrame with Band info and Pixel summary
"""
dfb = self.bands.dataframe()
dfp = self.pixels.dataframe()
df = pd.concat([dfb,dfp], axis=1)
df.index.name='band'
# this just to replace NaN's for bands missing in pixel list with zeros
pixcnt = df.pixels.copy()
gcnt = df.photons.copy()
missing = pd.isnull(pixcnt)
pixcnt[missing]=0
gcnt[missing]=0
df.pixels=pixcnt.astype(int)
df.photons=gcnt.astype(int)
return df
def writeto(self, filename, overwrite=True):
"""write to a file
"""
gti_hdu = self.gti.make_hdu()
bands_hdu=self.bands.make_hdu()
pixels_hdu = self.pixels.make_hdu()
hdus=[self.hdus[0], pixels_hdu, bands_hdu, gti_hdu]
fits.HDUList(hdus).writeto(filename, overwrite=overwrite)
print ('wrote file {}'.format(filename))
def photonCount(self):
""" method to be consistent with skymaps.BinnedPhotonData
"""
return sum(self.pixels.cnt)
def roi_subset(self, roi_number, channel, radius=5):
"""Return a tuple:
(l,b,radius), nside, DataFrame with data values for the HEALPix pixels within the pointlike ROI
Creates empty pixels if no data in the pixel (input is sparse, output not)
"""
skymap = self.hdus['SKYMAP'].data
nside = self.hdus['BANDS'].data.NSIDE[channel]
# select pixels for the given channel
def select_pixels(channel):
sel = skymap.CHANNEL==channel
return pd.DataFrame( np.array(skymap.VALUE[sel], int), index=np.array(skymap.PIX[sel],int), columns=['value'])
# the set of pixels in an | |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# C++ version Copyright (c) 2006-2011 <NAME> http://www.box2d.org
# Python port by <NAME> / http://pybox2d.googlecode.com
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
from __future__ import absolute_import
__all__ = ('Island', )
__version__ = "$Revision: 349 $"
__date__ = "$Date: 2011-07-08 19:40:46 -0400 (Fri, 08 Jul 2011) $"
# $Source$
from copy import copy
from .common import (clamp, Vec2, property)
from .contact import (ContactSolver, )
from . import settings
MAX_TRANSLATION = settings.MAX_TRANSLATION
MAX_TRANSLATION_SQR = settings.MAX_TRANSLATION_SQR
MAX_ROTATION = settings.MAX_ROTATION
MAX_ROTATION_SQR = settings.MAX_ROTATION_SQR
BAUMGARTE = settings.BAUMGARTE
TOI_BAUMGARTE = settings.TOI_BAUMGARTE
MAX_FLOAT = settings.MAX_FLOAT
ANGULAR_SLEEP_TOLERANCE_SQR = settings.ANGULAR_SLEEP_TOLERANCE_SQR
LINEAR_SLEEP_TOLERANCE_SQR = settings.LINEAR_SLEEP_TOLERANCE_SQR
TIME_TO_SLEEP = settings.TIME_TO_SLEEP
MAX_SUB_STEPS = settings.MAX_SUB_STEPS
MAX_TOI_CONTACTS = settings.MAX_TOI_CONTACTS
class Island(object):
"""This is an internal class."""
# TODO slots just for debugging
__slots__ = ['_body_capacity', '_contact_capacity', '_joint_capacity', 'post_solve', 'bodies', 'contacts',
'joints']
def __init__(self, body_capacity, contact_capacity, joint_capacity, post_solve):
self._body_capacity = body_capacity
self._contact_capacity = contact_capacity
self._joint_capacity = joint_capacity
self.post_solve = post_solve
self.bodies = []
self.contacts = []
self.joints = []
def clear(self):
del self.bodies[:]
del self.contacts[:]
del self.joints[:]
def solve(self, step, gravity, allow_sleep):
dt = step.dt
positions = []
velocities = []
# Integrate velocities and apply damping. Initialize the body state.
for body in self.bodies:
c, a = body._sweep.c, body._sweep.a
v, w = body._linear_velocity, body._angular_velocity
# Store positions for continuous collision
body._sweep.c0 = Vec2(*c)
body._sweep.a0 = a
if body.dynamic:
# Integrate velocities.
v += dt * (body._gravity_scale * gravity + body._inv_mass * body._force)
w += dt * body._invI * body._torque
# Apply damping.
# ODE: dv/dt + c * v = 0
# Solution: v(t) = v0 * exp(-c * t)
# Time step: v(t + dt) = v0 * exp(-c * (t + dt)) = v0 * exp(-c * t) * exp(-c * dt) = v * exp(-c * dt)
# v2 = exp(-c * dt) * v1
# Taylor expansion:
# v2 = (1.0 - c * dt) * v1
v *= clamp(1.0 - dt * body._linear_damping, 0.0, 1.0)
w *= clamp(1.0 - dt * body._angular_damping, 0.0, 1.0)
positions.append((c, a))
velocities.append((v, w))
# Initialize velocity constraints.
contact_solver=ContactSolver(step, self.contacts, positions, velocities)
# The same position and velocity lists are stored in the ContactSolver,
# so subsequent calls to solve_* do not require them as parameters.
contact_solver.initialize_velocity_constraints()
if step.warm_starting:
contact_solver.warm_start()
for joint in self.joints:
joint._init_velocity_constraints(step, positions, velocities)
# Solve velocity constraints.
for i in range(step.vel_iters):
for joint in self.joints:
joint._solve_velocity_constraints(step, positions, velocities)
contact_solver.solve_velocity_constraints()
# Post-solve (store impulses for warm starting).
contact_solver.store_impulses()
# Integrate positions.
for i, ((c, a), (v, w)) in enumerate(zip(positions, velocities)):
# Check for large velocities.
translation = dt * v
if translation.dot(translation) > MAX_TRANSLATION_SQR:
ratio = MAX_TRANSLATION / translation.length
v *= ratio
rotation = dt * w
if rotation**2 > MAX_ROTATION_SQR:
#print(w, self.bodies[i].user_data) # TODO find what's causing this with the gear joint
ratio = MAX_ROTATION / abs(rotation)
w *= ratio
# Integrate
c += dt * v
a += dt * w
positions[i] = (c, a)
velocities[i] = (v, w)
# Solve position constraints
position_solved = False
for i in range(step.pos_iters):
contacts_okay = contact_solver.solve_position_constraints()
joints_okay = True
for joint in self.joints:
joint_okay = joint._solve_position_constraints(step, positions, velocities)
joints_okay = joints_okay and joint_okay
if contacts_okay and joints_okay:
# Exit early if the position errors are small.
position_solved = True
break
# Copy state buffers back to the bodies
for body, pos, vel in zip(self.bodies, positions, velocities):
body._sweep.c, body._sweep.a = pos
body._linear_velocity, body._angular_velocity = vel
body._synchronize_transform()
self.report(contact_solver.velocity_constraints)
if allow_sleep:
min_sleep_time = MAX_FLOAT
non_static_bodies = [body for body in self.bodies if not body.static]
for b in non_static_bodies:
if not body._allow_sleep or \
(b._angular_velocity**2) > ANGULAR_SLEEP_TOLERANCE_SQR or \
(b._linear_velocity.length_squared) > LINEAR_SLEEP_TOLERANCE_SQR:
b._sleep_time = 0.0
min_sleep_time = 0.0
else:
b._sleep_time += dt
min_sleep_time = min(min_sleep_time, b._sleep_time)
if min_sleep_time >= TIME_TO_SLEEP and position_solved:
for body in self.bodies:
b.awake = False
def solve_toi(self, sub_step, toi_index_a, toi_index_b):
# Initialize the body state
positions = [(body._sweep.c, body._sweep.a) for body in self.bodies]
velocities = [(body._linear_velocity, body._angular_velocity) for body in self.bodies]
contact_solver=ContactSolver(sub_step, self.contacts, positions, velocities)
# Solve position constraints
for i in range(sub_step.pos_iters):
contacts_okay=contact_solver.solve_toi_position_constraints(toi_index_a, toi_index_b)
if contacts_okay:
break
# Leap of faith to new safe state
body_a, body_b = self.bodies[toi_index_a], self.bodies[toi_index_b]
body_a._sweep.c0, body_a._sweep.a0 = positions[toi_index_a]
body_b._sweep.c0, body_b._sweep.a0 = positions[toi_index_b]
body_a._sweep.c0 = copy(body_a._sweep.c0)
body_b._sweep.c0 = copy(body_b._sweep.c0)
# No warm starting is needed for TOI events because warm
# starting impulses were applied in the discrete solver.
contact_solver.initialize_velocity_constraints()
# Solve velocity constraints.
for i in range(sub_step.vel_iters):
contact_solver.solve_velocity_constraints()
# Don't store the TOI contact forces for warm starting because
# they can be quite large.
dt = sub_step.dt
# Integrate positions.
for i, (body, (c, a), (v, w)) in enumerate(zip(self.bodies, positions, velocities)):
# Check for large velocities.
translation = dt * v
if translation.length_squared > MAX_TRANSLATION_SQR:
ratio = MAX_TRANSLATION / translation.length
v *= ratio
rotation = dt * w
if rotation**2 > MAX_ROTATION_SQR:
ratio = MAX_ROTATION / abs(rotation)
w *= ratio
# Integrate
c += dt * v
a += dt * w
# Sync bodies
positions[i] = (c, a)
velocities[i] = (v, w)
body._sweep.c = Vec2(*c)
body._sweep.a = a
body._linear_velocity = Vec2(*v)
body._angular_velocity = w
# Compute new transform
body._synchronize_transform()
self.report(contact_solver.velocity_constraints)
def report(self, constraints):
if not self.post_solve:
return
for contact, vc in zip(self.contacts, constraints):
impulses=[(point.normal_impulse, point.tangent_impulse) for point in vc.points]
self.post_solve(contact, impulses)
# -- NOTES --
"""
[Notes from the original Box2D source code]
Position Correction Notes
=========================
I tried the several algorithms for position correction of the 2D revolute joint.
I looked at these systems:
- simple pendulum (1m diameter sphere on massless 5m stick) with initial angular velocity of 100 rad/s.
- suspension bridge with 30 1m long planks of length 1m.
- multi-link chain with 30 1m long links.
Here are the algorithms:
Baumgarte - A fraction of the position error is added to the velocity error. There is no
separate position solver.
Pseudo Velocities - After the velocity solver and position integration,
the position error, Jacobian, and effective mass are recomputed. Then
the velocity constraints are solved with pseudo velocities and a fraction
of the position error is added to the pseudo velocity error. The pseudo
velocities are initialized to zero and there is no warm-starting. After
the position solver, the pseudo velocities are added to the positions.
This is also called the First Order World method or the Position LCP method.
Modified Nonlinear Gauss-Seidel (NGS) - Like Pseudo Velocities except the
position error is re-computed for each constraint and the positions are updated
after the constraint is solved. The radius vectors (aka Jacobians) are
re-computed too (otherwise the algorithm has horrible instability). The pseudo
velocity states are not needed because they are effectively zero at the beginning
of each iteration. Since we have the current position error, we allow the
iterations to terminate early if the error becomes smaller than b2_linearSlop.
Full NGS or just NGS - Like Modified NGS except the effective mass are re-computed
each time a constraint is solved.
Here are the results:
Baumgarte - this is the cheapest algorithm but it has some stability problems,
especially with the bridge. The chain links separate easily close to the root
and they jitter as they struggle to pull together. This is one of the most common
methods in the field. The big drawback is that the position correction artificially
affects the momentum, thus leading to instabilities and false bounce. I used a
bias factor of 0.2. A larger bias factor makes | |
<reponame>zanussbaum/Transformers4Rec
#
# Copyright (c) 2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from abc import ABC
from functools import reduce
from typing import Dict, List, Optional, Union
import torch
from merlin_standard_lib import Registry, Schema
from merlin_standard_lib.utils.doc_utils import docstring_parameter
from ..block.base import BlockBase, SequentialBlock, right_shift_block
from ..typing import TabularData, TensorOrTabularData
from ..utils.torch_utils import OutputSizeMixin, calculate_batch_size_from_input_size
tabular_transformation_registry: Registry = Registry.class_registry("torch.tabular_transformations")
tabular_aggregation_registry: Registry = Registry.class_registry("torch.tabular_aggregations")
class TabularTransformation(OutputSizeMixin, torch.nn.Module, ABC):
"""Transformation that takes in `TabularData` and outputs `TabularData`."""
def forward(self, inputs: TabularData, **kwargs) -> TabularData:
raise NotImplementedError()
@classmethod
def parse(cls, class_or_str):
return tabular_transformation_registry.parse(class_or_str)
class TabularAggregation(OutputSizeMixin, torch.nn.Module, ABC):
"""Aggregation of `TabularData` that outputs a single `Tensor`"""
def forward(self, inputs: TabularData) -> torch.Tensor:
raise NotImplementedError()
def _expand_non_sequential_features(self, inputs: TabularData) -> TabularData:
inputs_sizes = {k: v.shape for k, v in inputs.items()}
seq_features_shapes, sequence_length = self._get_seq_features_shapes(inputs_sizes)
if len(seq_features_shapes) > 0:
non_seq_features = set(inputs.keys()).difference(set(seq_features_shapes.keys()))
for fname in non_seq_features:
# Including the 2nd dim and repeating for the sequence length
inputs[fname] = inputs[fname].unsqueeze(dim=1).repeat(1, sequence_length, 1)
return inputs
def _get_seq_features_shapes(self, inputs_sizes: Dict[str, torch.Size]):
seq_features_shapes = dict()
for fname, fshape in inputs_sizes.items():
# Saves the shapes of sequential features
if len(fshape) >= 3:
seq_features_shapes[fname] = tuple(fshape[:2])
sequence_length = 0
if len(seq_features_shapes) > 0:
if len(set(seq_features_shapes.values())) > 1:
raise ValueError(
"All sequential features must share the same shape in the first two dims "
"(batch_size, seq_length): {}".format(seq_features_shapes)
)
sequence_length = list(seq_features_shapes.values())[0][1]
return seq_features_shapes, sequence_length
def _check_concat_shapes(self, inputs: TabularData):
input_sizes = {k: v.shape for k, v in inputs.items()}
if len(set(list([v[:-1] for v in input_sizes.values()]))) > 1:
raise Exception(
"All features dimensions except the last one must match: {}".format(input_sizes)
)
def _get_agg_output_size(self, input_size, agg_dim):
batch_size = calculate_batch_size_from_input_size(input_size)
seq_features_shapes, sequence_length = self._get_seq_features_shapes(input_size)
if len(seq_features_shapes) > 0:
return (
batch_size,
sequence_length,
agg_dim,
)
else:
return (batch_size, agg_dim)
@classmethod
def parse(cls, class_or_str):
return tabular_aggregation_registry.parse(class_or_str)
TabularTransformationType = Union[str, TabularTransformation]
TabularTransformationsType = Union[TabularTransformationType, List[TabularTransformationType]]
TabularAggregationType = Union[str, TabularAggregation]
class SequentialTabularTransformations(SequentialBlock):
"""A sequential container, modules will be added to it in the order they are passed in.
Parameters
----------
transformation: TabularTransformationType
transformations that are passed in here will be called in order.
"""
def __init__(self, *transformation: TabularTransformationsType):
if len(transformation) == 1 and isinstance(transformation, list):
transformation = transformation[0]
super().__init__(*[TabularTransformation.parse(t) for t in transformation])
def append(self, transformation):
self.transformations.append(TabularTransformation.parse(transformation))
TABULAR_MODULE_PARAMS_DOCSTRING = """
pre: Union[str, TabularTransformation, List[str], List[TabularTransformation]], optional
Transformations to apply on the inputs when the module is called (so **before** `forward`).
post: Union[str, TabularTransformation, List[str], List[TabularTransformation]], optional
Transformations to apply on the inputs after the module is called (so **after** `forward`).
aggregation: Union[str, TabularAggregation], optional
Aggregation to apply after processing the `forward`-method to output a single Tensor.
"""
@docstring_parameter(tabular_module_parameters=TABULAR_MODULE_PARAMS_DOCSTRING)
class TabularModule(torch.nn.Module):
"""PyTorch Module that's specialized for tabular-data by integrating many often used operations.
Parameters
----------
{tabular_module_parameters}
"""
def __init__(
self,
pre: Optional[TabularTransformationsType] = None,
post: Optional[TabularTransformationsType] = None,
aggregation: Optional[TabularAggregationType] = None,
**kwargs,
):
super().__init__()
self.input_size = None
self.pre = pre # type: ignore
self.post = post # type: ignore
self.aggregation = aggregation # type: ignore
@classmethod
def from_schema(cls, schema: Schema, tags=None, **kwargs) -> Optional["TabularModule"]:
"""Instantiate a TabularModule instance from a DatasetSchema.
Parameters
----------
schema
tags
kwargs
Returns
-------
Optional[TabularModule]
"""
schema_copy = schema.copy()
if tags:
schema_copy = schema_copy.select_by_tag(tags)
if not schema_copy.column_schemas:
return None
return cls.from_features(schema_copy.column_names, schema=schema_copy, **kwargs)
@classmethod
@docstring_parameter(tabular_module_parameters=TABULAR_MODULE_PARAMS_DOCSTRING, extra_padding=4)
def from_features(
cls,
features: List[str],
pre: Optional[TabularTransformationsType] = None,
post: Optional[TabularTransformationsType] = None,
aggregation: Optional[TabularAggregationType] = None,
) -> "TabularModule":
"""Initializes a TabularModule instance where the contents of features will be filtered
out
Parameters
----------
features: List[str]
A list of feature-names that will be used as the first pre-processing op to filter out
all other features not in this list.
{tabular_module_parameters}
Returns
-------
TabularModule
"""
pre = [FilterFeatures(features), pre] if pre else FilterFeatures(features) # type: ignore
return cls(pre=pre, post=post, aggregation=aggregation)
@property
def pre(self) -> Optional[SequentialTabularTransformations]:
"""
Returns
-------
SequentialTabularTransformations, optional
"""
return self._pre
@pre.setter
def pre(self, value: Optional[TabularTransformationsType]):
if value:
self._pre: Optional[
SequentialTabularTransformations
] = SequentialTabularTransformations(value)
else:
self._pre = None
@property
def post(self) -> Optional[SequentialTabularTransformations]:
"""
Returns
-------
SequentialTabularTransformations, optional
"""
return self._post
@post.setter
def post(self, value: Optional[TabularTransformationsType]):
if value:
self._post: Optional[
SequentialTabularTransformations
] = SequentialTabularTransformations(value)
else:
self._post = None
@property
def aggregation(self) -> Optional[TabularAggregation]:
"""
Returns
-------
TabularAggregation, optional
"""
return self._aggregation
@aggregation.setter
def aggregation(self, value: Optional[Union[str, TabularAggregation]]):
"""
Parameters
----------
value
"""
if value:
self._aggregation: Optional[TabularAggregation] = TabularAggregation.parse(value)
else:
self._aggregation = None
def pre_forward(
self, inputs: TabularData, transformations: Optional[TabularTransformationsType] = None
) -> TabularData:
"""Method that's typically called before the forward method for pre-processing.
Parameters
----------
inputs: TabularData
input-data, typically the output of the forward method.
transformations: TabularAggregationType, optional
Returns
-------
TabularData
"""
return self._maybe_apply_transformations(
inputs, transformations=transformations or self.pre
)
def forward(self, x: TabularData, *args, **kwargs) -> TabularData:
return x
def post_forward(
self,
inputs: TabularData,
transformations: Optional[TabularTransformationsType] = None,
merge_with: Union["TabularModule", List["TabularModule"]] = None,
aggregation: Optional[TabularAggregationType] = None,
) -> TensorOrTabularData:
"""Method that's typically called after the forward method for post-processing.
Parameters
----------
inputs: TabularData
input-data, typically the output of the forward method.
transformations: TabularTransformationType, optional
Transformations to apply on the input data.
merge_with: Union[TabularModule, List[TabularModule]], optional
Other TabularModule's to call and merge the outputs with.
aggregation: TabularAggregationType, optional
Aggregation to aggregate the output to a single Tensor.
Returns
-------
TensorOrTabularData (Tensor when aggregation is set, else TabularData)
"""
_aggregation: Optional[TabularAggregation]
if aggregation:
_aggregation = TabularAggregation.parse(aggregation)
else:
_aggregation = getattr(self, "aggregation", None)
outputs = inputs
if merge_with:
if not isinstance(merge_with, list):
merge_with = [merge_with]
for layer_or_tensor in merge_with:
to_add = layer_or_tensor(inputs) if callable(layer_or_tensor) else layer_or_tensor
outputs.update(to_add)
outputs = self._maybe_apply_transformations(
outputs, transformations=transformations or self.post
)
if _aggregation:
schema = getattr(self, "schema", None)
_aggregation.set_schema(schema)
return _aggregation(outputs)
return outputs
def __call__(
self,
inputs: TabularData,
*args,
pre: Optional[TabularTransformationsType] = None,
post: Optional[TabularTransformationsType] = None,
merge_with: Union["TabularModule", List["TabularModule"]] = None,
aggregation: Optional[TabularAggregationType] = None,
**kwargs,
) -> TensorOrTabularData:
"""We overwrite the call method in order to be able to do pre- and post-processing.
Parameters
----------
inputs: TabularData
Input TabularData.
pre: TabularTransformationType, optional
Transformations to apply before calling the forward method. If pre is None, this method
will check if `self.pre` is set.
post: TabularTransformationType, optional
Transformations to apply after calling the forward method. If post is None, this method
will check if `self.post` is set.
merge_with: Union[TabularModule, List[TabularModule]]
Other TabularModule's to call and merge the outputs with.
aggregation: TabularAggregationType, optional
Aggregation to aggregate the output to a single Tensor.
Returns
-------
TensorOrTabularData (Tensor when aggregation is set, else TabularData)
"""
inputs = self.pre_forward(inputs, transformations=pre)
# This will call the `forward` method implemented by the super class.
outputs = super().__call__(inputs, *args, **kwargs) # noqa
if isinstance(outputs, dict):
outputs = self.post_forward(
outputs, transformations=post, merge_with=merge_with, aggregation=aggregation
)
return outputs
def _maybe_apply_transformations(
self,
inputs: TabularData,
transformations: Optional[
Union[TabularTransformationsType, SequentialTabularTransformations]
] = None,
) -> TabularData:
"""Apply transformations to the inputs if these are defined.
Parameters
----------
inputs
transformations
Returns
-------
"""
if transformations:
_transformations = TabularTransformation.parse(transformations)
return _transformations(inputs)
return inputs
def __rrshift__(self, other):
return right_shift_block(self, other)
class FilterFeatures(TabularTransformation):
"""Module that filters out certain features from `TabularData`."
Parameters
----------
to_include: List[str]
List of features to include in the result of calling the module
pop: bool
Boolean indicating whether to pop the features to exclude from the inputs dictionary.
"""
def __init__(self, to_include: List[str], pop: bool = False):
super().__init__()
self.to_include = to_include
self.pop = pop
def forward(self, inputs: TabularData, **kwargs) -> TabularData:
"""
Parameters
----------
inputs: TabularData
Input dictionary containing features to filter.
Returns Filtered TabularData that only contains the feature-names in `self.to_include`.
-------
"""
assert isinstance(inputs, dict), "Inputs needs to be a dict"
outputs = {k: v for k, v in inputs.items() if k in self.to_include}
if self.pop:
for key in outputs.keys():
inputs.pop(key)
return outputs
def forward_output_size(self, input_shape):
"""
Parameters
| |
# Generated from Java9.g4 by ANTLR 4.7.2
from antlr4 import *
if __name__ is not None and "." in __name__:
from .Java9Parser import Java9Parser
else:
from Java9Parser import Java9Parser
# This class defines a complete generic visitor for a parse tree produced by Java9Parser.
class Java9Visitor(ParseTreeVisitor):
# Visit a parse tree produced by Java9Parser#literal.
def visitLiteral(self, ctx:Java9Parser.LiteralContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#primitiveType.
def visitPrimitiveType(self, ctx:Java9Parser.PrimitiveTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#numericType.
def visitNumericType(self, ctx:Java9Parser.NumericTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#integralType.
def visitIntegralType(self, ctx:Java9Parser.IntegralTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#floatingPointType.
def visitFloatingPointType(self, ctx:Java9Parser.FloatingPointTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#referenceType.
def visitReferenceType(self, ctx:Java9Parser.ReferenceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#classOrInterfaceType.
def visitClassOrInterfaceType(self, ctx:Java9Parser.ClassOrInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#classType.
def visitClassType(self, ctx:Java9Parser.ClassTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#classType_lf_classOrInterfaceType.
def visitClassType_lf_classOrInterfaceType(self, ctx:Java9Parser.ClassType_lf_classOrInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#classType_lfno_classOrInterfaceType.
def visitClassType_lfno_classOrInterfaceType(self, ctx:Java9Parser.ClassType_lfno_classOrInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#interfaceType.
def visitInterfaceType(self, ctx:Java9Parser.InterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#interfaceType_lf_classOrInterfaceType.
def visitInterfaceType_lf_classOrInterfaceType(self, ctx:Java9Parser.InterfaceType_lf_classOrInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#interfaceType_lfno_classOrInterfaceType.
def visitInterfaceType_lfno_classOrInterfaceType(self, ctx:Java9Parser.InterfaceType_lfno_classOrInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeVariable.
def visitTypeVariable(self, ctx:Java9Parser.TypeVariableContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#arrayType.
def visitArrayType(self, ctx:Java9Parser.ArrayTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#dims.
def visitDims(self, ctx:Java9Parser.DimsContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeParameter.
def visitTypeParameter(self, ctx:Java9Parser.TypeParameterContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeParameterModifier.
def visitTypeParameterModifier(self, ctx:Java9Parser.TypeParameterModifierContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeBound.
def visitTypeBound(self, ctx:Java9Parser.TypeBoundContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#additionalBound.
def visitAdditionalBound(self, ctx:Java9Parser.AdditionalBoundContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeArguments.
def visitTypeArguments(self, ctx:Java9Parser.TypeArgumentsContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeArgumentList.
def visitTypeArgumentList(self, ctx:Java9Parser.TypeArgumentListContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeArgument.
def visitTypeArgument(self, ctx:Java9Parser.TypeArgumentContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#wildcard.
def visitWildcard(self, ctx:Java9Parser.WildcardContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#wildcardBounds.
def visitWildcardBounds(self, ctx:Java9Parser.WildcardBoundsContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#moduleName.
def visitModuleName(self, ctx:Java9Parser.ModuleNameContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#packageName.
def visitPackageName(self, ctx:Java9Parser.PackageNameContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeName.
def visitTypeName(self, ctx:Java9Parser.TypeNameContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#packageOrTypeName.
def visitPackageOrTypeName(self, ctx:Java9Parser.PackageOrTypeNameContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#expressionName.
def visitExpressionName(self, ctx:Java9Parser.ExpressionNameContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#methodName.
def visitMethodName(self, ctx:Java9Parser.MethodNameContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#ambiguousName.
def visitAmbiguousName(self, ctx:Java9Parser.AmbiguousNameContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#compilationUnit.
def visitCompilationUnit(self, ctx:Java9Parser.CompilationUnitContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#ordinaryCompilation.
def visitOrdinaryCompilation(self, ctx:Java9Parser.OrdinaryCompilationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#modularCompilation.
def visitModularCompilation(self, ctx:Java9Parser.ModularCompilationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#packageDeclaration.
def visitPackageDeclaration(self, ctx:Java9Parser.PackageDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#packageModifier.
def visitPackageModifier(self, ctx:Java9Parser.PackageModifierContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#importDeclaration.
def visitImportDeclaration(self, ctx:Java9Parser.ImportDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#singleTypeImportDeclaration.
def visitSingleTypeImportDeclaration(self, ctx:Java9Parser.SingleTypeImportDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeImportOnDemandDeclaration.
def visitTypeImportOnDemandDeclaration(self, ctx:Java9Parser.TypeImportOnDemandDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#singleStaticImportDeclaration.
def visitSingleStaticImportDeclaration(self, ctx:Java9Parser.SingleStaticImportDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#staticImportOnDemandDeclaration.
def visitStaticImportOnDemandDeclaration(self, ctx:Java9Parser.StaticImportOnDemandDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeDeclaration.
def visitTypeDeclaration(self, ctx:Java9Parser.TypeDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#moduleDeclaration.
def visitModuleDeclaration(self, ctx:Java9Parser.ModuleDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#moduleDirective.
def visitModuleDirective(self, ctx:Java9Parser.ModuleDirectiveContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#requiresModifier.
def visitRequiresModifier(self, ctx:Java9Parser.RequiresModifierContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#classDeclaration.
def visitClassDeclaration(self, ctx:Java9Parser.ClassDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#normalClassDeclaration.
def visitNormalClassDeclaration(self, ctx:Java9Parser.NormalClassDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#classModifier.
def visitClassModifier(self, ctx:Java9Parser.ClassModifierContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeParameters.
def visitTypeParameters(self, ctx:Java9Parser.TypeParametersContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#typeParameterList.
def visitTypeParameterList(self, ctx:Java9Parser.TypeParameterListContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#superclass.
def visitSuperclass(self, ctx:Java9Parser.SuperclassContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#superinterfaces.
def visitSuperinterfaces(self, ctx:Java9Parser.SuperinterfacesContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#interfaceTypeList.
def visitInterfaceTypeList(self, ctx:Java9Parser.InterfaceTypeListContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#classBody.
def visitClassBody(self, ctx:Java9Parser.ClassBodyContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#classBodyDeclaration.
def visitClassBodyDeclaration(self, ctx:Java9Parser.ClassBodyDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#classMemberDeclaration.
def visitClassMemberDeclaration(self, ctx:Java9Parser.ClassMemberDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#fieldDeclaration.
def visitFieldDeclaration(self, ctx:Java9Parser.FieldDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#fieldModifier.
def visitFieldModifier(self, ctx:Java9Parser.FieldModifierContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#variableDeclaratorList.
def visitVariableDeclaratorList(self, ctx:Java9Parser.VariableDeclaratorListContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#variableDeclarator.
def visitVariableDeclarator(self, ctx:Java9Parser.VariableDeclaratorContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#variableDeclaratorId.
def visitVariableDeclaratorId(self, ctx:Java9Parser.VariableDeclaratorIdContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#variableInitializer.
def visitVariableInitializer(self, ctx:Java9Parser.VariableInitializerContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannType.
def visitUnannType(self, ctx:Java9Parser.UnannTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannPrimitiveType.
def visitUnannPrimitiveType(self, ctx:Java9Parser.UnannPrimitiveTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannReferenceType.
def visitUnannReferenceType(self, ctx:Java9Parser.UnannReferenceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannClassOrInterfaceType.
def visitUnannClassOrInterfaceType(self, ctx:Java9Parser.UnannClassOrInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannClassType.
def visitUnannClassType(self, ctx:Java9Parser.UnannClassTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannClassType_lf_unannClassOrInterfaceType.
def visitUnannClassType_lf_unannClassOrInterfaceType(self, ctx:Java9Parser.UnannClassType_lf_unannClassOrInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannClassType_lfno_unannClassOrInterfaceType.
def visitUnannClassType_lfno_unannClassOrInterfaceType(self, ctx:Java9Parser.UnannClassType_lfno_unannClassOrInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannInterfaceType.
def visitUnannInterfaceType(self, ctx:Java9Parser.UnannInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannInterfaceType_lf_unannClassOrInterfaceType.
def visitUnannInterfaceType_lf_unannClassOrInterfaceType(self, ctx:Java9Parser.UnannInterfaceType_lf_unannClassOrInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannInterfaceType_lfno_unannClassOrInterfaceType.
def visitUnannInterfaceType_lfno_unannClassOrInterfaceType(self, ctx:Java9Parser.UnannInterfaceType_lfno_unannClassOrInterfaceTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannTypeVariable.
def visitUnannTypeVariable(self, ctx:Java9Parser.UnannTypeVariableContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#unannArrayType.
def visitUnannArrayType(self, ctx:Java9Parser.UnannArrayTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#methodDeclaration.
def visitMethodDeclaration(self, ctx:Java9Parser.MethodDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#methodModifier.
def visitMethodModifier(self, ctx:Java9Parser.MethodModifierContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#methodHeader.
def visitMethodHeader(self, ctx:Java9Parser.MethodHeaderContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#result.
def visitResult(self, ctx:Java9Parser.ResultContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#methodDeclarator.
def visitMethodDeclarator(self, ctx:Java9Parser.MethodDeclaratorContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#formalParameterList.
def visitFormalParameterList(self, ctx:Java9Parser.FormalParameterListContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#formalParameters.
def visitFormalParameters(self, ctx:Java9Parser.FormalParametersContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#formalParameter.
def visitFormalParameter(self, ctx:Java9Parser.FormalParameterContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#variableModifier.
def visitVariableModifier(self, ctx:Java9Parser.VariableModifierContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#lastFormalParameter.
def visitLastFormalParameter(self, ctx:Java9Parser.LastFormalParameterContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#receiverParameter.
def visitReceiverParameter(self, ctx:Java9Parser.ReceiverParameterContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#throws_.
def visitThrows_(self, ctx:Java9Parser.Throws_Context):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#exceptionTypeList.
def visitExceptionTypeList(self, ctx:Java9Parser.ExceptionTypeListContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#exceptionType.
def visitExceptionType(self, ctx:Java9Parser.ExceptionTypeContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#methodBody.
def visitMethodBody(self, ctx:Java9Parser.MethodBodyContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#instanceInitializer.
def visitInstanceInitializer(self, ctx:Java9Parser.InstanceInitializerContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#staticInitializer.
def visitStaticInitializer(self, ctx:Java9Parser.StaticInitializerContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#constructorDeclaration.
def visitConstructorDeclaration(self, ctx:Java9Parser.ConstructorDeclarationContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#constructorModifier.
def visitConstructorModifier(self, ctx:Java9Parser.ConstructorModifierContext):
return self.visitChildren(ctx)
# Visit a parse tree produced by Java9Parser#constructorDeclarator.
def visitConstructorDeclarator(self, ctx:Java9Parser.ConstructorDeclaratorContext):
return self.visitChildren(ctx)
# Visit a parse | |
batch_size
batch_params = []
batch_total_rewards = []
for idx in batch_idxs:
batch_params.append(self.params[idx])
batch_total_rewards.append(self.total_rewards[idx])
return batch_params, batch_total_rewards
def append(self, observation, action, reward, terminal, training=True):
"""Append a reward to the memory
# Argument
observation (dict): Observation returned by environment
action (int): Action taken to obtain this observation
reward (float): Reward obtained by taking this action
terminal (boolean): Is the state terminal
"""
super(EpisodeParameterMemory, self).append(observation, action, reward, terminal, training=training)
if training:
self.intermediate_rewards.append(reward)
def finalize_episode(self, params):
"""Append an observation to the memory
# Argument
observation (dict): Observation returned by environment
action (int): Action taken to obtain this observation
reward (float): Reward obtained by taking this action
terminal (boolean): Is the state terminal
"""
total_reward = sum(self.intermediate_rewards)
self.total_rewards.append(total_reward)
self.params.append(params)
self.intermediate_rewards = []
@property
def nb_entries(self):
"""Return number of episode rewards
# Returns
Number of episode rewards
"""
return len(self.total_rewards)
def get_config(self):
"""Return configurations of SequentialMemory
# Returns
Dict of config
"""
config = super(SequentialMemory, self).get_config()
config['limit'] = self.limit
return config
class PartitionedRingBuffer(object):
"""
Buffer with a section that can be sampled from but never overwritten.
Used for demonstration data (DQfD). Can be used without a partition,
where it would function as a fixed-idxs variant of RingBuffer.
"""
def __init__(self, maxlen):
self.maxlen = maxlen
self.length = 0
self.data = [None for _ in range(maxlen)]
self.permanent_idx = 0
self.next_idx = 0
def __len__(self):
return self.length
def __getitem__(self, idx):
if idx < 0:
raise KeyError()
return self.data[idx % self.maxlen]
def append(self, v):
if self.length < self.maxlen:
self.length += 1
self.data[(self.permanent_idx + self.next_idx)] = v
self.next_idx = (self.next_idx + 1) % (self.maxlen - self.permanent_idx)
def load(self, load_data):
assert len(load_data) < self.maxlen, "Must leave space to write new data."
for idx, data in enumerate(load_data):
self.length += 1
self.data[idx] = data
self.permanent_idx += 1
class PrioritizedMemory(Memory):
def __init__(self, limit, alpha=.4, start_beta=1., end_beta=1., steps_annealed=1, **kwargs):
super(PrioritizedMemory, self).__init__(**kwargs)
#The capacity of the replay buffer
self.limit = limit
#Transitions are stored in individual RingBuffers, similar to the SequentialMemory.
self.actions = PartitionedRingBuffer(limit)
self.rewards = PartitionedRingBuffer(limit)
self.terminals = PartitionedRingBuffer(limit)
self.observations = PartitionedRingBuffer(limit)
assert alpha >= 0
#how aggressively to sample based on TD error
self.alpha = alpha
#how aggressively to compensate for that sampling. This value is typically annealed
#to stabilize training as the model converges (beta of 1.0 fully compensates for TD-prioritized sampling).
self.start_beta = start_beta
self.end_beta = end_beta
self.steps_annealed = steps_annealed
#SegmentTrees need a leaf count that is a power of 2
tree_capacity = 1
while tree_capacity < self.limit:
tree_capacity *= 2
#Create SegmentTrees with this capacity
self.sum_tree = SumSegmentTree(tree_capacity)
self.min_tree = MinSegmentTree(tree_capacity)
self.max_priority = 1.
#wrapping index for interacting with the trees
self.next_index = 0
def append(self, observation, action, reward, terminal, training=True):\
#super() call adds to the deques that hold the most recent info, which is fed to the agent
#on agent.forward()
super(PrioritizedMemory, self).append(observation, action, reward, terminal, training=training)
if training:
self.observations.append(observation)
self.actions.append(action)
self.rewards.append(reward)
self.terminals.append(terminal)
#The priority of each new transition is set to the maximum
self.sum_tree[self.next_index] = self.max_priority ** self.alpha
self.min_tree[self.next_index] = self.max_priority ** self.alpha
#shift tree pointer index to keep it in sync with RingBuffers
self.next_index = (self.next_index + 1) % self.limit
def _sample_proportional(self, batch_size):
#outputs a list of idxs to sample, based on their priorities.
idxs = list()
for _ in range(batch_size):
mass = random.random() * self.sum_tree.sum(0, self.limit - 1)
idx = self.sum_tree.find_prefixsum_idx(mass)
idxs.append(idx)
return idxs
def sample(self, batch_size, beta=1.):
idxs = self._sample_proportional(batch_size)
#importance sampling weights are a stability measure
importance_weights = list()
#The lowest-priority experience defines the maximum importance sampling weight
prob_min = self.min_tree.min() / self.sum_tree.sum()
max_importance_weight = (prob_min * self.nb_entries) ** (-beta)
obs_t, act_t, rews, obs_t1, dones = [], [], [], [], []
experiences = list()
for idx in idxs:
while idx < self.window_length + 1:
idx += 1
terminal0 = self.terminals[idx - 2]
while terminal0:
# Skip this transition because the environment was reset here. Select a new, random
# transition and use this instead. This may cause the batch to contain the same
# transition twice.
idx = sample_batch_indexes(self.window_length + 1, self.nb_entries, size=1)[0]
terminal0 = self.terminals[idx - 2]
assert self.window_length + 1 <= idx < self.nb_entries
#probability of sampling transition is the priority of the transition over the sum of all priorities
prob_sample = self.sum_tree[idx] / self.sum_tree.sum()
importance_weight = (prob_sample * self.nb_entries) ** (-beta)
#normalize weights according to the maximum value
importance_weights.append(importance_weight/max_importance_weight)
# Code for assembling stacks of observations and dealing with episode boundaries is borrowed from
# SequentialMemory
state0 = [self.observations[idx - 1]]
for offset in range(0, self.window_length - 1):
current_idx = idx - 2 - offset
assert current_idx >= 1
current_terminal = self.terminals[current_idx - 1]
if current_terminal and not self.ignore_episode_boundaries:
# The previously handled observation was terminal, don't add the current one.
# Otherwise we would leak into a different episode.
break
state0.insert(0, self.observations[current_idx])
while len(state0) < self.window_length:
state0.insert(0, zeroed_observation(state0[0]))
action = self.actions[idx - 1]
reward = self.rewards[idx - 1]
terminal1 = self.terminals[idx - 1]
state1 = [np.copy(x) for x in state0[1:]]
state1.append(self.observations[idx])
assert len(state0) == self.window_length
assert len(state1) == len(state0)
experiences.append(Experience(state0=state0, action=action, reward=reward,
state1=state1, terminal1=terminal1))
assert len(experiences) == batch_size
# Return a tuple whre the first batch_size items are the transititions
# while -2 is the importance weights of those transitions and -1 is
# the idxs of the buffer (so that we can update priorities later)
return tuple(list(experiences)+ [importance_weights, idxs])
def update_priorities(self, idxs, priorities):
#adjust priorities based on new TD error
for i, idx in enumerate(idxs):
assert 0 <= idx < self.limit
priority = priorities[i] ** self.alpha
self.sum_tree[idx] = priority
self.min_tree[idx] = priority
self.max_priority = max(self.max_priority, priority)
def calculate_beta(self, current_step):
a = float(self.end_beta - self.start_beta) / float(self.steps_annealed)
b = float(self.start_beta)
current_beta = min(self.end_beta, a * float(current_step) + b)
return current_beta
def get_config(self):
config = super(PrioritizedMemory, self).get_config()
config['alpha'] = self.alpha
config['start_beta'] = self.start_beta
config['end_beta'] = self.end_beta
config['beta_steps_annealed'] = self.steps_annealed
@property
def nb_entries(self):
"""Return number of observations
# Returns
Number of observations
"""
return len(self.observations)
class PartitionedMemory(Memory):
def __init__(self, limit, pre_load_data, alpha=.4, start_beta=1., end_beta=1., steps_annealed=1, **kwargs):
super(PartitionedMemory, self).__init__(**kwargs)
#The capacity of the replay buffer
self.limit = limit
#Transitions are stored in individual PartitionedRingBuffers.
self.actions = PartitionedRingBuffer(limit)
self.rewards = PartitionedRingBuffer(limit)
self.terminals = PartitionedRingBuffer(limit)
self.observations = PartitionedRingBuffer(limit)
assert alpha >= 0
#how aggressively to sample based on TD error
self.alpha = alpha
#how aggressively to compensate for that sampling.
self.start_beta = start_beta
self.end_beta = end_beta
self.steps_annealed = steps_annealed
#SegmentTrees need a leaf count that is a power of 2
tree_capacity = 1
while tree_capacity < self.limit:
tree_capacity *= 2
#Create SegmentTrees with this capacity
self.sum_tree = SumSegmentTree(tree_capacity)
self.min_tree = MinSegmentTree(tree_capacity)
self.max_priority = 1.
#unpack the expert transitions (assumes order recorded by the rl.utils.record_demo_data() method)
demo_obs, demo_acts, demo_rews, demo_ts = [], [], [], []
self.pre_load_data = pre_load_data
for demo in self.pre_load_data:
demo_obs.append(demo[0])
demo_acts.append(demo[1])
demo_rews.append(demo[2])
demo_ts.append(demo[3])
#pre-load the demonstration data
self.observations.load(demo_obs)
self.actions.load(demo_acts)
self.rewards.load(demo_rews)
self.terminals.load(demo_ts)
self.permanent_idx = self.observations.permanent_idx
assert self.permanent_idx == self.rewards.permanent_idx
self.next_index = 0
for idx in range(self.permanent_idx):
self.sum_tree[idx] = (self.max_priority ** self.alpha)
self.min_tree[idx] = (self.max_priority ** self.alpha)
def append(self, observation, action, reward, terminal, training=True):
#super() call adds to the deques that hold the most recent info, which is fed to the agent
#on agent.forward()
super(PartitionedMemory, self).append(observation, action, reward, terminal, training=training)
if training:
self.observations.append(observation)
self.actions.append(action)
self.rewards.append(reward)
self.terminals.append(terminal)
#The priority of each new transition is set to the maximum
self.sum_tree[self.next_index + self.permanent_idx] = self.max_priority ** self.alpha
self.min_tree[self.next_index + self.permanent_idx] = self.max_priority ** self.alpha
#shift tree pointer index to keep it in sync with RingBuffers
self.next_index = ((self.next_index + 1) % (self.limit - self.permanent_idx))
def sample_proportional(self, batch_size):
"""
Outputs a list of idxs to sample, based on their priorities.
This function is public in this memory (vs. private in Sequential and
Prioritized), because DQfD needs to be able to sample the same idxs
twice (single step and n-step).
"""
idxs = list()
for _ in range(batch_size):
mass = random.random() * self.sum_tree.sum(0, self.limit - 1)
idx = self.sum_tree.find_prefixsum_idx(mass)
idxs.append(idx)
return idxs
def sample_by_idxs(self, idxs, batch_size, beta=1., nstep=1, gamma=1):
"""
Gathers transition data from the ring | |
<gh_stars>100-1000
from collections import defaultdict
import numpy as np
import torch
import torch.nn as nn
from metal.utils import move_to_device, recursive_merge_dicts, set_seed
model_defaults = {
"seed": None,
"device": 0, # gpu id (int) or -1 for cpu
"verbose": True,
"fp16": False,
"model_weights": None, # the path to a saved checkpoint to initialize with
}
class MetalModel(nn.Module):
"""A dynamically constructed discriminative classifier
Args:
tasks: a list of Task objects which bring their own (named) modules
We currently support up to N input modules -> middle layers -> up to N heads
TODO: Accept specifications for more exotic structure (e.g., via user-defined graph)
"""
def __init__(self, tasks, **kwargs):
self.config = recursive_merge_dicts(model_defaults, kwargs, misses="insert")
# Set random seed before initializing module weights
if self.config["seed"] is None:
self.config["seed"] = np.random.randint(1e6)
set_seed(self.config["seed"])
super().__init__()
# Build network
self._build(tasks)
self.task_map = {task.name: task for task in tasks}
# Load weights
if self.config["model_weights"]:
self.load_weights(self.config["model_weights"])
# Half precision
if self.config["fp16"]:
print("metal_model.py: Using fp16")
self.half()
# Move model to device now, then move data to device in forward() or calculate_loss()
if self.config["device"] >= 0:
if torch.cuda.is_available():
if self.config["verbose"]:
print("Using GPU...")
self.to(torch.device(f"cuda:{self.config['device']}"))
else:
if self.config["verbose"]:
print("No cuda device available. Using cpu instead.")
# Show network
if self.config["verbose"]:
print("\nNetwork architecture:")
print(self)
print()
num_params = sum(p.numel() for p in self.parameters() if p.requires_grad)
print(f"Total number of parameters: {num_params}")
def _build(self, tasks):
"""Iterates over tasks, adding their input_modules and head_modules"""
# TODO: Allow more flexible specification of network structure
self.input_modules = nn.ModuleDict(
{task.name: nn.DataParallel(task.input_module) for task in tasks}
)
self.middle_modules = nn.ModuleDict(
{task.name: nn.DataParallel(task.middle_module) for task in tasks}
)
self.head_modules = nn.ModuleDict(
{task.name: nn.DataParallel(task.head_module) for task in tasks}
)
self.loss_hat_funcs = {task.name: task.loss_hat_func for task in tasks}
self.output_hat_funcs = {task.name: task.output_hat_func for task in tasks}
def forward(self, X, task_names):
"""Returns the outputs of the requested task heads in a dictionary
The output of each task is the result of passing the input through the
input_module, middle_module, and head_module for that task, in that order.
Before calculating any intermediate values, we first check whether a previously
evaluated task has produced that intermediate result. If so, we use that.
Args:
X: a [batch_size, ...] batch from a DataLoader
Returns:
output_dict: {task_name (str): output (Tensor)}
"""
input = move_to_device(X, self.config["device"])
outputs = {}
# TODO: Replace this naive caching scheme with a more intelligent and feature-
# complete approach where arbitrary DAGs of modules are specified and we only
# cache things that will be reused by another task
for task_name in task_names:
# Extra .module call is to get past DataParallel wrapper
input_module = self.input_modules[task_name].module
if input_module not in outputs:
output = input_module(input)
outputs[input_module] = output
middle_module = self.middle_modules[task_name].module
if middle_module not in outputs:
output = middle_module(outputs[input_module])
outputs[middle_module] = output
head_module = self.head_modules[task_name].module
if head_module not in outputs:
output = head_module(outputs[middle_module])
outputs[head_module] = output
return {t: outputs[self.head_modules[t].module] for t in task_names}
def calculate_loss(self, X, Ys, payload_name, labels_to_tasks):
"""Returns a dict of {task_name: loss (a FloatTensor scalar)}.
Args:
X: an appropriate input for forward(), either a Tensor or tuple
Ys: a dict of {task_name: labels} where labels is [n, ?]
labels_to_tasks: a dict of {label_name: task_name} indicating which task
head to use to calculate the loss for each labelset.
"""
task_names = set(labels_to_tasks.values())
outputs = self.forward(X, task_names)
loss_dict = {} # Stores the loss by task
count_dict = {} # Stores the number of active examples by task
for label_name, task_name in labels_to_tasks.items():
loss_name = f"{task_name}/{payload_name}/{label_name}/loss"
Y = Ys[label_name]
assert isinstance(Y, torch.Tensor)
out = outputs[task_name]
# Identify which instances have at least one non-zero target labels
active = torch.any(Y.detach() != 0, dim=1)
count_dict[loss_name] = active.sum().item()
# If there are inactive instances, slice them out to save computation
# and ignore their contribution to the loss
if 0 in active:
Y = Y[active]
if isinstance(out, torch.Tensor):
out = out[active]
# If the output of the head has multiple fields, slice them all
elif isinstance(out, dict):
out = move_to_device({k: v[active] for k, v in out.items()})
# Convert to half precision last thing if applicable
if self.config["fp16"] and Y.dtype == torch.float32:
out["data"] = out["data"].half()
Y = Y.half()
# If no examples in this batch have labels for this task, skip loss calc
# Active has type torch.uint8; avoid overflow with long()
if active.long().sum():
label_loss = self.loss_hat_funcs[task_name](
out, move_to_device(Y, self.config["device"])
)
assert isinstance(label_loss.item(), float)
loss_dict[loss_name] = (
label_loss * self.task_map[task_name].loss_multiplier
)
return loss_dict, count_dict
@torch.no_grad()
def calculate_probs(self, X, task_names):
"""Returns a dict of {task_name: probs}
Args:
X: instances to feed through the network
task_names: the names of the tasks for which to calculate outputs
Returns:
{task_name: probs}: probs is the output of the output_hat for the given
task_head
The type of each entry in probs depends on the task type:
instance-based tasks: each entry in probs is a [k]-len array
token-based tasks: each entry is a [seq_len, k] array
"""
assert self.eval()
return {
t: [probs.cpu().numpy() for probs in self.output_hat_funcs[t](out)]
for t, out in self.forward(X, task_names).items()
}
def update_config(self, update_dict):
"""Updates self.config with the values in a given update dictionary."""
self.config = recursive_merge_dicts(self.config, update_dict)
def load_weights(self, model_path):
"""Load model weights from checkpoint."""
if self.config["device"] >= 0:
device = torch.device(f"cuda:{self.config['device']}")
else:
device = torch.device("cpu")
try:
self.load_state_dict(torch.load(model_path, map_location=device)["model"])
except RuntimeError:
print("Your destination state dict has different keys for the update key.")
self.load_state_dict(
torch.load(model_path, map_location=device)["model"], strict=False
)
def save_weights(self, model_path):
"""Saves weight in checkpoint directory"""
raise NotImplementedError
@torch.no_grad()
def score(self, payload, metrics=[], verbose=True, **kwargs):
"""Calculate the requested metrics for the given payload
Args:
payload: a Payload to score
metrics: a list of full metric names, a single full metric name, or []:
list: a list of full metric names supported by the tasks' Scorers.
(full metric names are of the form task/payload/labelset/metric)
Only these metrics will be calculated and returned.
[]: defaults to all supported metrics for the given payload's Tasks
str: a single full metric name
A single score will be returned instead of a dictionary
Returns:
scores: a dict of the form {metric_name: score} corresponding to the
requested metrics (optionally a single score if metrics is a string
instead of a list)
"""
self.eval()
return_unwrapped = isinstance(metrics, str)
# If no specific metrics were requested, calculate all available metrics
if metrics:
metrics_list = metrics if isinstance(metrics, list) else [metrics]
assert all(len(metric.split("/")) == 4 for metric in metrics_list)
target_metrics = defaultdict(list)
target_tasks = []
target_labels = []
for full_metric_name in metrics:
task_name, payload_name, label_name, metric_name = full_metric_name.split(
"/"
)
target_tasks.append(task_name)
target_labels.append(label_name)
target_metrics[label_name].append(metric_name)
else:
target_tasks = set(payload.labels_to_tasks.values())
target_labels = set(payload.labels_to_tasks.keys())
target_metrics = {
label_name: None for label_name in payload.labels_to_tasks
}
Ys, Ys_probs, Ys_preds = self.predict_with_gold(
payload, target_tasks, target_labels, return_preds=True, **kwargs
)
metrics_dict = {}
for label_name, task_name in payload.labels_to_tasks.items():
scorer = self.task_map[task_name].scorer
task_metrics_dict = scorer.score(
Ys[label_name],
Ys_probs[task_name],
Ys_preds[task_name],
target_metrics=target_metrics[label_name],
)
# Expand short metric names into full metric names
for metric_name, score in task_metrics_dict.items():
full_metric_name = (
f"{task_name}/{payload.name}/{label_name}/{metric_name}"
)
metrics_dict[full_metric_name] = score
# If a single metric was given as a string (not list), return a float
if return_unwrapped:
metric, score = metrics_dict.popitem()
return score
else:
return metrics_dict
@torch.no_grad()
def predict_with_gold(
self,
payload,
target_tasks=None,
target_labels=None,
return_preds=False,
max_examples=0,
**kwargs,
):
"""Extracts Y and calculates Y_prods, Y_preds for the given payload and tasks
To get just the probabilities or predictions for a single task, consider using
predict() or predict_probs().
Args:
payload: the Payload to make predictions for
target_tasks: if not None, predict probs only for the specified tasks;
otherwise, predict probs for all tasks with corresponding labelsets
in the payload
target_labels: if not None, return labels for only the specified labelsets;
otherwise, return all labelsets
return_preds: if True, also include preds in return values
max_examples: if > 0, predict for a maximum of this many examples
# TODO: consider returning Ys as tensors instead of lists (padded if necessary)
Returns:
Ys: a {label_name: Y} dict where Y is an [n] list of labels (often ints)
Ys_probs: a {task_name: Y_probs} dict where Y_probs is a [n] list | |
import random
from otp.ai.AIBase import *
from direct.distributed.ClockDelta import *
from toontown.battle.BattleBase import *
from toontown.battle.BattleCalculatorAI import *
from toontown.toonbase.ToontownBattleGlobals import *
from toontown.battle.SuitBattleGlobals import *
from pandac.PandaModules import *
from toontown.battle import BattleExperienceAI
from direct.distributed import DistributedObjectAI
from direct.fsm import ClassicFSM, State
from direct.fsm import State
from direct.task import Task
from direct.directnotify import DirectNotifyGlobal
from toontown.ai import DatabaseObject
from toontown.toon import DistributedToonAI
from toontown.toon import InventoryBase
from toontown.toonbase import ToontownGlobals
from toontown.toon import NPCToons
from otp.ai.MagicWordGlobal import *
from toontown.pets import DistributedPetProxyAI
class DistributedBattleBaseAI(DistributedObjectAI.DistributedObjectAI, BattleBase):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBattleBaseAI')
def __init__(self, air, zoneId, finishCallback = None, maxSuits = 4, bossBattle = 0, tutorialFlag = 0, interactivePropTrackBonus = -1):
DistributedObjectAI.DistributedObjectAI.__init__(self, air)
self.serialNum = 0
self.zoneId = zoneId
self.maxSuits = maxSuits
self.setBossBattle(bossBattle)
self.tutorialFlag = tutorialFlag
self.interactivePropTrackBonus = interactivePropTrackBonus
self.finishCallback = finishCallback
self.avatarExitEvents = []
self.responses = {}
self.adjustingResponses = {}
self.joinResponses = {}
self.adjustingSuits = []
self.adjustingToons = []
self.numSuitsEver = 0
BattleBase.__init__(self)
self.streetBattle = 1
self.pos = Point3(0, 0, 0)
self.initialSuitPos = Point3(0, 0, 0)
self.toonExp = {}
self.toonOrigQuests = {}
self.toonItems = {}
self.toonOrigMerits = {}
self.toonMerits = {}
self.toonParts = {}
self.battleCalc = BattleCalculatorAI(self, tutorialFlag)
if self.air.suitInvasionManager.getInvading():
mult = getInvasionMultiplier()
self.battleCalc.setSkillCreditMultiplier(mult)
if self.air.holidayManager.isMoreXpHolidayRunning():
mult = getMoreXpHolidayMultiplier()
self.battleCalc.setSkillCreditMultiplier(mult)
self.fsm = None
self.clearAttacks()
self.ignoreFaceOffDone = 0
self.needAdjust = 0
self.movieHasBeenMade = 0
self.movieHasPlayed = 0
self.rewardHasPlayed = 0
self.movieRequested = 0
self.ignoreResponses = 0
self.ignoreAdjustingResponses = 0
self.taskNames = []
self.exitedToons = []
self.suitsKilled = []
self.suitsKilledThisBattle = []
self.suitsKilledPerFloor = []
self.suitsEncountered = []
self.newToons = []
self.newSuits = []
self.numNPCAttacks = 0
self.npcAttacks = {}
self.pets = {}
self.fireCount = 0
self.fsm = ClassicFSM.ClassicFSM('DistributedBattleAI', [State.State('FaceOff', self.enterFaceOff, self.exitFaceOff, ['WaitForInput', 'Resume']),
State.State('WaitForJoin', self.enterWaitForJoin, self.exitWaitForJoin, ['WaitForInput', 'Resume']),
State.State('WaitForInput', self.enterWaitForInput, self.exitWaitForInput, ['MakeMovie', 'Resume']),
State.State('MakeMovie', self.enterMakeMovie, self.exitMakeMovie, ['PlayMovie', 'Resume']),
State.State('PlayMovie', self.enterPlayMovie, self.exitPlayMovie, ['WaitForJoin', 'Reward', 'Resume']),
State.State('Reward', self.enterReward, self.exitReward, ['Resume']),
State.State('Resume', self.enterResume, self.exitResume, []),
State.State('Off', self.enterOff, self.exitOff, ['FaceOff', 'WaitForJoin'])], 'Off', 'Off')
self.joinableFsm = ClassicFSM.ClassicFSM('Joinable', [State.State('Joinable', self.enterJoinable, self.exitJoinable, ['Unjoinable']), State.State('Unjoinable', self.enterUnjoinable, self.exitUnjoinable, ['Joinable'])], 'Unjoinable', 'Unjoinable')
self.joinableFsm.enterInitialState()
self.runableFsm = ClassicFSM.ClassicFSM('Runable', [State.State('Runable', self.enterRunable, self.exitRunable, ['Unrunable']), State.State('Unrunable', self.enterUnrunable, self.exitUnrunable, ['Runable'])], 'Unrunable', 'Unrunable')
self.runableFsm.enterInitialState()
self.adjustFsm = ClassicFSM.ClassicFSM('Adjust', [State.State('Adjusting', self.enterAdjusting, self.exitAdjusting, ['NotAdjusting', 'Adjusting']), State.State('NotAdjusting', self.enterNotAdjusting, self.exitNotAdjusting, ['Adjusting'])], 'NotAdjusting', 'NotAdjusting')
self.adjustFsm.enterInitialState()
self.fsm.enterInitialState()
self.startTime = globalClock.getRealTime()
self.adjustingTimer = Timer()
def clearAttacks(self):
self.toonAttacks = {}
self.suitAttacks = getDefaultSuitAttacks()
def requestDelete(self):
if hasattr(self, 'fsm'):
self.fsm.request('Off')
self.__removeTaskName(self.uniqueName('make-movie'))
DistributedObjectAI.DistributedObjectAI.requestDelete(self)
def delete(self):
self.notify.debug('deleting battle')
self.fsm.request('Off')
self.ignoreAll()
self.__removeAllTasks()
del self.fsm
del self.joinableFsm
del self.runableFsm
del self.adjustFsm
self.__cleanupJoinResponses()
self.timer.stop()
del self.timer
self.adjustingTimer.stop()
del self.adjustingTimer
self.battleCalc.cleanup()
del self.battleCalc
for suit in self.suits:
del suit.battleTrap
del self.finishCallback
for petProxy in self.pets.values():
petProxy.requestDelete()
DistributedObjectAI.DistributedObjectAI.delete(self)
def pause(self):
self.timer.stop()
self.adjustingTimer.stop()
def unpause(self):
self.timer.resume()
self.adjustingTimer.resume()
def abortBattle(self):
self.notify.debug('%s.abortBattle() called.' % self.doId)
toonsCopy = self.toons[:]
for toonId in toonsCopy:
self.__removeToon(toonId)
if self.fsm.getCurrentState().getName() == 'PlayMovie' or self.fsm.getCurrentState().getName() == 'MakeMovie':
self.exitedToons.append(toonId)
self.d_setMembers()
self.b_setState('Resume')
self.__removeAllTasks()
self.timer.stop()
self.adjustingTimer.stop()
def __removeSuit(self, suit):
self.notify.debug('__removeSuit(%d)' % suit.doId)
self.suits.remove(suit)
self.activeSuits.remove(suit)
if self.luredSuits.count(suit) == 1:
self.luredSuits.remove(suit)
self.suitGone = 1
del suit.battleTrap
def findSuit(self, id):
for s in self.suits:
if s.doId == id:
return s
return None
def __removeTaskName(self, name):
if self.taskNames.count(name):
self.taskNames.remove(name)
self.notify.debug('removeTaskName() - %s' % name)
taskMgr.remove(name)
def __removeAllTasks(self):
for n in self.taskNames:
self.notify.debug('removeAllTasks() - %s' % n)
taskMgr.remove(n)
self.taskNames = []
def __removeToonTasks(self, toonId):
name = self.taskName('running-toon-%d' % toonId)
self.__removeTaskName(name)
name = self.taskName('to-pending-av-%d' % toonId)
self.__removeTaskName(name)
def getLevelDoId(self):
return 0
def getBattleCellId(self):
return 0
def getPosition(self):
self.notify.debug('getPosition() - %s' % self.pos)
return [self.pos[0], self.pos[1], self.pos[2]]
def getInitialSuitPos(self):
p = []
p.append(self.initialSuitPos[0])
p.append(self.initialSuitPos[1])
p.append(self.initialSuitPos[2])
return p
def setBossBattle(self, bossBattle):
self.bossBattle = bossBattle
def getBossBattle(self):
return self.bossBattle
def b_setState(self, state):
self.notify.debug('network:setState(%s)' % state)
stime = globalClock.getRealTime() + SERVER_BUFFER_TIME
self.sendUpdate('setState', [state, globalClockDelta.localToNetworkTime(stime)])
self.setState(state)
def setState(self, state):
self.fsm.request(state)
def getState(self):
return [self.fsm.getCurrentState().getName(), globalClockDelta.getRealNetworkTime()]
def d_setMembers(self):
self.notify.debug('network:setMembers()')
self.sendUpdate('setMembers', self.getMembers())
def getMembers(self):
suits = []
for s in self.suits:
suits.append(s.doId)
joiningSuits = ''
for s in self.joiningSuits:
joiningSuits += str(suits.index(s.doId))
pendingSuits = ''
for s in self.pendingSuits:
pendingSuits += str(suits.index(s.doId))
activeSuits = ''
for s in self.activeSuits:
activeSuits += str(suits.index(s.doId))
luredSuits = ''
for s in self.luredSuits:
luredSuits += str(suits.index(s.doId))
suitTraps = ''
for s in self.suits:
if s.battleTrap == NO_TRAP:
suitTraps += '9'
elif s.battleTrap == BattleCalculatorAI.TRAP_CONFLICT:
suitTraps += '9'
else:
suitTraps += str(s.battleTrap)
toons = []
for t in self.toons:
toons.append(t)
joiningToons = ''
for t in self.joiningToons:
joiningToons += str(toons.index(t))
pendingToons = ''
for t in self.pendingToons:
pendingToons += str(toons.index(t))
activeToons = ''
for t in self.activeToons:
activeToons += str(toons.index(t))
runningToons = ''
for t in self.runningToons:
runningToons += str(toons.index(t))
self.notify.debug('getMembers() - suits: %s joiningSuits: %s pendingSuits: %s activeSuits: %s luredSuits: %s suitTraps: %s toons: %s joiningToons: %s pendingToons: %s activeToons: %s runningToons: %s' % (suits,
joiningSuits,
pendingSuits,
activeSuits,
luredSuits,
suitTraps,
toons,
joiningToons,
pendingToons,
activeToons,
runningToons))
return [suits,
joiningSuits,
pendingSuits,
activeSuits,
luredSuits,
suitTraps,
toons,
joiningToons,
pendingToons,
activeToons,
runningToons,
globalClockDelta.getRealNetworkTime()]
def d_adjust(self):
self.notify.debug('network:adjust()')
self.sendUpdate('adjust', [globalClockDelta.getRealNetworkTime()])
def getInteractivePropTrackBonus(self):
return self.interactivePropTrackBonus
def getZoneId(self):
return self.zoneId
def getTaskZoneId(self):
return self.zoneId
def d_setMovie(self):
self.notify.debug('network:setMovie()')
self.sendUpdate('setMovie', self.getMovie())
self.__updateEncounteredCogs()
def getMovie(self):
suitIds = []
for s in self.activeSuits:
suitIds.append(s.doId)
p = [self.movieHasBeenMade]
p.append(self.activeToons)
p.append(suitIds)
for t in self.activeToons:
if t in self.toonAttacks:
ta = self.toonAttacks[t]
index = -1
id = ta[TOON_ID_COL]
if id != -1:
index = self.activeToons.index(id)
track = ta[TOON_TRACK_COL]
if (track == NO_ATTACK or attackAffectsGroup(track, ta[TOON_LVL_COL])) and track != NPCSOS and track != PETSOS:
target = -1
if track == HEAL:
if ta[TOON_LVL_COL] == 1:
ta[TOON_HPBONUS_COL] = random.randint(0, 10000)
elif track == SOS or track == NPCSOS or track == PETSOS:
target = ta[TOON_TGT_COL]
elif track == HEAL:
if self.activeToons.count(ta[TOON_TGT_COL]) != 0:
target = self.activeToons.index(ta[TOON_TGT_COL])
else:
target = -1
elif suitIds.count(ta[TOON_TGT_COL]) != 0:
target = suitIds.index(ta[TOON_TGT_COL])
else:
target = -1
p = p + [index,
track,
ta[TOON_LVL_COL],
target]
p = p + ta[4:]
else:
index = self.activeToons.index(t)
attack = getToonAttack(index)
p = p + attack
for i in range(4 - len(self.activeToons)):
p = p + getToonAttack(-1)
for sa in self.suitAttacks:
index = -1
id = sa[SUIT_ID_COL]
if id != -1:
index = suitIds.index(id)
if sa[SUIT_ATK_COL] == -1:
targetIndex = -1
else:
targetIndex = sa[SUIT_TGT_COL]
if targetIndex == -1:
self.notify.debug('suit attack: %d must be group' % sa[SUIT_ATK_COL])
else:
toonId = self.activeToons[targetIndex]
p = p + [index, sa[SUIT_ATK_COL], targetIndex]
sa[SUIT_TAUNT_COL] = 0
if sa[SUIT_ATK_COL] != -1:
suit = self.findSuit(id)
sa[SUIT_TAUNT_COL] = getAttackTauntIndexFromIndex(suit, sa[SUIT_ATK_COL])
p = p + sa[3:]
return p
def d_setChosenToonAttacks(self):
self.notify.debug('network:setChosenToonAttacks()')
self.sendUpdate('setChosenToonAttacks', self.getChosenToonAttacks())
def getChosenToonAttacks(self):
ids = []
tracks = []
levels = []
targets = []
for t in self.activeToons:
if t in self.toonAttacks:
ta = self.toonAttacks[t]
else:
ta = getToonAttack(t)
ids.append(t)
tracks.append(ta[TOON_TRACK_COL])
levels.append(ta[TOON_LVL_COL])
targets.append(ta[TOON_TGT_COL])
return [ids,
tracks,
levels,
targets]
def d_setBattleExperience(self):
self.notify.debug('network:setBattleExperience()')
self.sendUpdate('setBattleExperience', self.getBattleExperience())
def getBattleExperience(self):
returnValue = BattleExperienceAI.getBattleExperience(4, self.activeToons, self.toonExp, self.battleCalc.toonSkillPtsGained, self.toonOrigQuests, self.toonItems, self.toonOrigMerits, self.toonMerits, self.toonParts, self.suitsKilled, self.helpfulToons)
return returnValue
def getToonUberStatus(self):
fieldList = []
uberIndex = LAST_REGULAR_GAG_LEVEL + 1
for toon in self.activeToons:
toonList = []
for trackIndex in range(MAX_TRACK_INDEX):
toonList.append(toon.inventory.numItem(track, uberIndex))
fieldList.append(encodeUber(toonList))
return fieldList
def addSuit(self, suit):
self.notify.debug('addSuit(%d)' % suit.doId)
self.newSuits.append(suit)
self.suits.append(suit)
suit.battleTrap = NO_TRAP
self.numSuitsEver += 1
def __joinSuit(self, suit):
self.joiningSuits.append(suit)
toPendingTime = MAX_JOIN_T + SERVER_BUFFER_TIME
taskName = self.taskName('to-pending-av-%d' % suit.doId)
self.__addJoinResponse(suit.doId, taskName)
self.taskNames.append(taskName)
taskMgr.doMethodLater(toPendingTime, self.__serverJoinDone, taskName, extraArgs=(suit.doId, taskName))
def __serverJoinDone(self, avId, taskName):
self.notify.debug('join for av: %d timed out on server' % avId)
self.__removeTaskName(taskName)
self.__makeAvPending(avId)
return Task.done
def __makeAvPending(self, avId):
self.notify.debug('__makeAvPending(%d)' % avId)
self.__removeJoinResponse(avId)
self.__removeTaskName(self.taskName('to-pending-av-%d' % avId))
if self.toons.count(avId) > 0:
self.joiningToons.remove(avId)
self.pendingToons.append(avId)
else:
suit = self.findSuit(avId)
if suit != None:
if not suit.isEmpty():
if not self.joiningSuits.count(suit) == 1:
self.notify.warning('__makeAvPending(%d) in zone: %d' % (avId, self.zoneId))
self.notify.warning('toons: %s' % self.toons)
self.notify.warning('joining toons: %s' % self.joiningToons)
self.notify.warning('pending toons: %s' % self.pendingToons)
self.notify.warning('suits: %s' % self.suits)
self.notify.warning('joining suits: %s' % self.joiningSuits)
self.notify.warning('pending suits: %s' % self.pendingSuits)
self.joiningSuits.remove(suit)
self.pendingSuits.append(suit)
else:
self.notify.warning('makeAvPending() %d not in toons or suits' % avId)
return
self.d_setMembers()
self.needAdjust = 1
self.__requestAdjust()
def suitRequestJoin(self, suit):
self.notify.debug('suitRequestJoin(%d)' % suit.getDoId())
if self.suitCanJoin():
self.addSuit(suit)
self.__joinSuit(suit)
self.d_setMembers()
suit.prepareToJoinBattle()
return 1
else:
self.notify.warning('suitRequestJoin() - not joinable - joinable state: %s max suits: %d' % (self.joinableFsm.getCurrentState().getName(), self.maxSuits))
return 0
def addToon(self, avId):
self.notify.debug('addToon(%d)' % avId)
toon = self.getToon(avId)
if toon == None:
return 0
toon.stopToonUp()
event = simbase.air.getAvatarExitEvent(avId)
self.avatarExitEvents.append(event)
self.accept(event, self.__handleUnexpectedExit, extraArgs=[avId])
event = 'inSafezone-%s' % avId
self.avatarExitEvents.append(event)
self.accept(event, self.__handleSuddenExit, extraArgs=[avId, 0])
self.newToons.append(avId)
| |
of the member function whose DLL entry description is to be returned.
invKind: Specifies the kind of member identified by memid.
"""
pass
def GetDocumentation(self, index, strName, strDocString, dwHelpContext, strHelpFile):
"""
GetDocumentation(self: UCOMITypeInfo, index: int) -> (str, str, int, str)
Retrieves the documentation string, the complete Help file name and path, and
the context ID for the Help topic for a specified type description.
index: ID of the member whose documentation is to be returned.
"""
pass
def GetFuncDesc(self, index, ppFuncDesc):
"""
GetFuncDesc(self: UCOMITypeInfo, index: int) -> IntPtr
Retrieves the System.Runtime.InteropServices.FUNCDESC structure that contains
information about a specified function.
index: Index of the function description to return.
"""
pass
def GetIDsOfNames(self, rgszNames, cNames, pMemId):
"""
GetIDsOfNames(self: UCOMITypeInfo, rgszNames: Array[str], cNames: int) -> Array[int]
Maps between member names and member IDs, and parameter names and parameter IDs.
rgszNames: On succesful return, an array of names to map.
cNames: Count of names to map.
"""
pass
def GetImplTypeFlags(self, index, pImplTypeFlags):
"""
GetImplTypeFlags(self: UCOMITypeInfo, index: int) -> int
Retrieves the System.Runtime.InteropServices.IMPLTYPEFLAGS value for one
implemented interface or base interface in a type description.
index: Index of the implemented interface or base interface.
"""
pass
def GetMops(self, memid, pBstrMops):
"""
GetMops(self: UCOMITypeInfo, memid: int) -> str
Retrieves marshaling information.
memid: The member ID that indicates which marshaling information is needed.
"""
pass
def GetNames(self, memid, rgBstrNames, cMaxNames, pcNames):
"""
GetNames(self: UCOMITypeInfo, memid: int, cMaxNames: int) -> (Array[str], int)
Retrieves the variable with the specified member ID (or the name of the
property or method and its parameters) that correspond to the specified
function ID.
memid: The ID of the member whose name (or names) is to be returned.
cMaxNames: Length of the rgBstrNames array.
"""
pass
def GetRefTypeInfo(self, hRef, ppTI):
"""
GetRefTypeInfo(self: UCOMITypeInfo, hRef: int) -> UCOMITypeInfo
If a type description references other type descriptions, it retrieves the
referenced type descriptions.
hRef: Handle to the referenced type description to return.
"""
pass
def GetRefTypeOfImplType(self, index, href):
"""
GetRefTypeOfImplType(self: UCOMITypeInfo, index: int) -> int
If a type description describes a COM class, it retrieves the type description
of the implemented interface types.
index: Index of the implemented type whose handle is returned.
"""
pass
def GetTypeAttr(self, ppTypeAttr):
"""
GetTypeAttr(self: UCOMITypeInfo) -> IntPtr
Retrieves a System.Runtime.InteropServices.TYPEATTR structure that contains the
attributes of the type description.
"""
pass
def GetTypeComp(self, ppTComp):
"""
GetTypeComp(self: UCOMITypeInfo) -> UCOMITypeComp
Retrieves the ITypeComp interface for the type description, which enables a
client compiler to bind to the type description's members.
"""
pass
def GetVarDesc(self, index, ppVarDesc):
"""
GetVarDesc(self: UCOMITypeInfo, index: int) -> IntPtr
Retrieves a VARDESC structure that describes the specified variable.
index: Index of the variable description to return.
"""
pass
def Invoke(self, pvInstance, memid, wFlags, pDispParams, pVarResult, pExcepInfo, puArgErr):
"""
Invoke(self: UCOMITypeInfo, pvInstance: object, memid: int, wFlags: Int16, pDispParams: DISPPARAMS) -> (DISPPARAMS, object, EXCEPINFO, int)
Invokes a method, or accesses a property of an object, that implements the
interface described by the type description.
pvInstance: Reference to the interface described by this type description.
memid: Identifies the interface member.
wFlags: Flags describing the context of the invoke call.
pDispParams: Reference to a structure that contains an array of arguments, an array of
DISPIDs for named arguments, and counts of the number of elements in each
array.
"""
pass
def ReleaseFuncDesc(self, pFuncDesc):
"""
ReleaseFuncDesc(self: UCOMITypeInfo, pFuncDesc: IntPtr)
Releases a System.Runtime.InteropServices.FUNCDESC previously returned by
System.Runtime.InteropServices.UCOMITypeInfo.GetFuncDesc(System.Int32,System.Int
Ptr@).
pFuncDesc: Reference to the FUNCDESC to release.
"""
pass
def ReleaseTypeAttr(self, pTypeAttr):
"""
ReleaseTypeAttr(self: UCOMITypeInfo, pTypeAttr: IntPtr)
Releases a System.Runtime.InteropServices.TYPEATTR previously returned by
System.Runtime.InteropServices.UCOMITypeInfo.GetTypeAttr(System.IntPtr@).
pTypeAttr: Reference to the TYPEATTR to release.
"""
pass
def ReleaseVarDesc(self, pVarDesc):
"""
ReleaseVarDesc(self: UCOMITypeInfo, pVarDesc: IntPtr)
Releases a VARDESC previously returned by
System.Runtime.InteropServices.UCOMITypeInfo.GetVarDesc(System.Int32,System.IntP
tr@).
pVarDesc: Reference to the VARDESC to release.
"""
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class UCOMITypeLib:
""" Use System.Runtime.InteropServices.ComTypes.ITypeLib instead. """
def FindName(self, szNameBuf, lHashVal, ppTInfo, rgMemId, pcFound):
"""
FindName(self: UCOMITypeLib, szNameBuf: str, lHashVal: int, pcFound: Int16) -> (Array[UCOMITypeInfo], Array[int], Int16)
Finds occurrences of a type description in a type library.
szNameBuf: The name to search for.
lHashVal: A hash value to speed up the search, computed by the LHashValOfNameSys
function. If lHashVal is 0, a value is computed.
pcFound: On entry, indicates how many instances to look for. For example, pcFound = 1
can be called to find the first occurrence. The search stops when one instance
is found.On exit, indicates the number of instances that were found. If the in
and out values of pcFound are identical, there might be more type descriptions
that contain the name.
"""
pass
def GetDocumentation(self, index, strName, strDocString, dwHelpContext, strHelpFile):
"""
GetDocumentation(self: UCOMITypeLib, index: int) -> (str, str, int, str)
Retrieves the library's documentation string, the complete Help file name and
path, and the context identifier for the library Help topic in the Help file.
index: Index of the type description whose documentation is to be returned.
"""
pass
def GetLibAttr(self, ppTLibAttr):
"""
GetLibAttr(self: UCOMITypeLib) -> IntPtr
Retrieves the structure that contains the library's attributes.
"""
pass
def GetTypeComp(self, ppTComp):
"""
GetTypeComp(self: UCOMITypeLib) -> UCOMITypeComp
Enables a client compiler to bind to a library's types, variables, constants,
and global functions.
"""
pass
def GetTypeInfo(self, index, ppTI):
"""
GetTypeInfo(self: UCOMITypeLib, index: int) -> UCOMITypeInfo
Retrieves the specified type description in the library.
index: Index of the UCOMITypeInfo interface to return.
"""
pass
def GetTypeInfoCount(self):
"""
GetTypeInfoCount(self: UCOMITypeLib) -> int
Returns the number of type descriptions in the type library.
Returns: The number of type descriptions in the type library.
"""
pass
def GetTypeInfoOfGuid(self, guid, ppTInfo):
"""
GetTypeInfoOfGuid(self: UCOMITypeLib, guid: Guid) -> (Guid, UCOMITypeInfo)
Retrieves the type description that corresponds to the specified GUID.
guid: IID of the interface of CLSID of the class whose type info is requested.
"""
pass
def GetTypeInfoType(self, index, pTKind):
"""
GetTypeInfoType(self: UCOMITypeLib, index: int) -> TYPEKIND
Retrieves the type of a type description.
index: The index of the type description within the type library.
"""
pass
def IsName(self, szNameBuf, lHashVal):
"""
IsName(self: UCOMITypeLib, szNameBuf: str, lHashVal: int) -> bool
Indicates whether a passed-in string contains the name of a type or member
described in the library.
szNameBuf: The string to test.
lHashVal: The hash value of szNameBuf.
Returns: true if szNameBuf was found in the type library; otherwise false.
"""
pass
def ReleaseTLibAttr(self, pTLibAttr):
"""
ReleaseTLibAttr(self: UCOMITypeLib, pTLibAttr: IntPtr)
Releases the System.Runtime.InteropServices.TYPELIBATTR originally obtained
from System.Runtime.InteropServices.UCOMITypeLib.GetLibAttr(System.IntPtr@).
pTLibAttr: The TLIBATTR to release.
"""
pass
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
class UnknownWrapper(object):
"""
Wraps objects the marshaler should marshal as a VT_UNKNOWN.
UnknownWrapper(obj: object)
"""
@staticmethod # known case of __new__
def __new__(self, obj):
""" __new__(cls: type, obj: object) """
pass
WrappedObject = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Gets the object contained by this wrapper.
Get: WrappedObject(self: UnknownWrapper) -> object
"""
class UnmanagedFunctionPointerAttribute(Attribute, _Attribute):
"""
Controls the marshaling behavior of a delegate signature passed as an unmanaged function pointer to or from unmanaged code. This class cannot be inherited.
UnmanagedFunctionPointerAttribute(callingConvention: CallingConvention)
"""
def __init__(self, *args): #cannot find CLR method
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ | |
# coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes representing view hierarchy data of Android emulator screenshot.
The view hierarchy with tree structure is contained in a xml file generated by
'uiautomator dump' adb command from a running Android emulator instance.
It contains attribute data of all UI elements (i.e. UI objects) on a screen.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import json
import re
import attr
from distutils.util import strtobool
from enum import Enum
from lxml import etree
import tensorflow.compat.v1 as tf # tf
# from seq2act.data_generation import config
flags = tf.flags
FLAGS = flags.FLAGS
class UIObjectType(Enum):
"""Types of the different UI objects."""
UNKNOWN = 0
BUTTON = 1
CHECKBOX = 2
CHECKEDTEXTVIEW = 3
EDITTEXT = 4
IMAGEBUTTON = 5
IMAGEVIEW = 6
RADIOBUTTON = 7
SLIDINGDRAWER = 8
SPINNER = 9
SWITCH = 10
TABWIDGET = 11
TEXTVIEW = 12
TOGGLEBUTTON = 13
VIDEOVIEW = 14
class UIObjectGridLocation(Enum):
"""The on-screen grid location (3x3 grid) of an UI object."""
TOP_LEFT = 0
TOP_CENTER = 1
TOP_RIGHT = 2
LEFT = 3
CENTER = 4
RIGHT = 5
BOTTOM_LEFT = 6
BOTTOM_CENTER = 7
BOTTOM_RIGHT = 8
@attr.s
class BoundingBox(object):
"""The bounding box with horizontal/vertical coordinates of an UI object."""
x1 = attr.ib()
y1 = attr.ib()
x2 = attr.ib()
y2 = attr.ib()
@attr.s
class UIObject(object):
"""Represents an UI object from the leaf node in the view hierarchy."""
obj_type = attr.ib()
obj_name = attr.ib()
word_sequence = attr.ib()
text = attr.ib()
resource_id = attr.ib()
android_class = attr.ib()
android_package = attr.ib()
content_desc = attr.ib()
clickable = attr.ib()
visible = attr.ib()
enabled = attr.ib()
focusable = attr.ib()
focused = attr.ib()
scrollable = attr.ib()
long_clickable = attr.ib()
selected = attr.ib()
bounding_box = attr.ib()
grid_location = attr.ib()
dom_location = attr.ib()
def __eq__(self, other):
return self.content_desc == other and self.text == other and self.android_class == other and self.name == other and self.android_package == other
def _build_word_sequence(text, content_desc, resource_id):
"""Returns a sequence of word tokens based on certain attributes.
Args:
text: `text` attribute of an element.
content_desc: `content_desc` attribute of an element.
resource_id: `resource_id` attribute of an element.
Returns:
A sequence of word tokens.
"""
if text or content_desc:
return re.findall(r"[\w']+|[?.!/,;:]", text if text else content_desc)
else:
name = resource_id.split('/')[-1]
return filter(None, name.split('_'))
def _build_object_type(android_class):
"""Returns the object type based on `class` attribute.
Args:
android_class: `class` attribute of an element (Android class).
Returns:
The UIObjectType enum.
"""
if android_class.startswith('android.widget'):
widget_type = android_class.split('.')[2]
for obj_type in UIObjectType:
if obj_type.name == widget_type.upper():
return obj_type
return UIObjectType.UNKNOWN
def _build_object_name(text, content_desc):
"""Returns the object name based on `text` or `content_desc` attribute.
Args:
text: The `text` attribute.
content_desc: The `content_desc` attribute.
Returns:
The object name string.
"""
return text if text else content_desc
def _build_bounding_box(bounds):
"""Returns the object bounding box based on `bounds` attribute.
Args:
bounds: The `bounds` attribute.
Returns:
The BoundingBox object.
"""
match = re.compile(r'\[(\d+),(\d+)\]\[(\d+),(\d+)\]').match(bounds)
assert match
x1, y1, x2, y2 = match.groups()
return BoundingBox(x1=int(x1), y1=int(y1), x2=int(x2), y2=int(y2))
def _build_clickable(element, tree_child_as_clickable=True):
"""Returns whether the element is clickable or one of its ancestors is.
Args:
element: The etree.Element object.
tree_child_as_clickable: treat all tree children as clickable
Returns:
A boolean to indicate whether the element is clickable or one of its
ancestors is.
"""
clickable = element.get('clickable')
if clickable == 'false':
for node in element.iterancestors():
if node.get('clickable') == 'true':
clickable = 'true'
break
# Below code is try to fix that: some target UI have 'clickable==False'
# but it's clickable by human actually
# Checkable elemnts should also be treated as clickable
# Some menu items may have clickable==False but checkable==True
if element.get('checkable') == 'true':
clickable = 'true'
if tree_child_as_clickable:
p = element.getparent()
while p:
if p.get('class') == 'android.widget.ListView':
clickable = 'true'
break
p = p.getparent()
return strtobool(clickable)
def _pixel_distance(a_x1, a_x2, b_x1, b_x2):
"""Calculates the pixel distance between bounding box a and b.
Args:
a_x1: The x1 coordinate of box a.
a_x2: The x2 coordinate of box a.
b_x1: The x1 coordinate of box b.
b_x2: The x2 coordinate of box b.
Returns:
The pixel distance between box a and b on the x axis. The distance
on the y axis can be calculated in the same way. The distance can be
positive number (b is right/bottom to a) and negative number
(b is left or top to a).
"""
# if a and b are close enough, then we set the their distance to be 1
# because there are typically padding spaces inside an object's bounding box
if b_x1 <= a_x2 and a_x2 - b_x1 <= 100: # config.ADJACENT_BOUNDING_BOX_THRESHOLD:
return 1
if a_x1 <= b_x2 and b_x2 - a_x1 <= 100: # config.ADJACENT_BOUNDING_BOX_THRESHOLD:
return -1
# overlap
if (a_x1 <= b_x1 <= a_x2) or (a_x1 <= b_x2 <= a_x2) or (
b_x1 <= a_x1 <= b_x2) or (b_x1 <= a_x2 <= b_x2):
return 0
elif b_x1 > a_x2:
return b_x1 - a_x2
else:
return b_x2 - a_x1
def _grid_coordinate(x, width):
"""Calculates the 3x3 grid coordinate on the x axis.
The grid coordinate on the y axis is calculated in the same way.
Args:
x: The x coordinate: [0, width).
width: The screen width.
Returns:
The grid coordinate: [0, 2].
Note that the screen is divided into 3x3 grid, so the grid coordinate
uses the number from 0, 1, 2.
"""
assert 0 <= x <= width
grid_x_0 = width / 3
grid_x_1 = 2 * grid_x_0
if 0 <= x < grid_x_0:
grid_coordinate_x = 0
elif grid_x_0 <= x < grid_x_1:
grid_coordinate_x = 1
else:
grid_coordinate_x = 2
return grid_coordinate_x
def _grid_location(bbox, screen_width, screen_height):
"""Calculates the grid number of the UI object's bounding box.
The screen can be divided into 3x3 grid:
(0, 0) (0, 1) (0, 2) 0 1 2
(1, 0) (1, 1) (1, 2) ---> 3 4 5
(2, 0) (2, 1) (2, 2) 6 7 8
Args:
bbox: The bounding box of the UI object.
screen_width: The width of the screen associated with the hierarchy.
screen_height: The height of the screen associated with the hierarchy.
Returns:
The grid location number.
"""
bbox_center_x = (bbox.x1 + bbox.x2) / 2
bbox_center_y = (bbox.y1 + bbox.y2) / 2
bbox_grid_x = _grid_coordinate(bbox_center_x, screen_width)
bbox_grid_y = _grid_coordinate(bbox_center_y, screen_height)
return UIObjectGridLocation(bbox_grid_y * 3 + bbox_grid_x)
def _build_etree_from_json(root, json_dict):
"""Builds the element tree from json_dict.
Args:
root: The current etree root node.
json_dict: The current json_dict corresponding to the etree root node.
"""
# set node attributes
if root is None or json_dict is None:
return
x1, y1, x2, y2 = json_dict.get('bounds', [0, 0, 0, 0])
root.set('bounds', '[%d,%d][%d,%d]' % (x1, y1, x2, y2))
root.set('class', json_dict.get('class', ''))
# XML element cannot contain NULL bytes.
root.set('text', json_dict.get('text', '').replace('\x00', ''))
root.set('resource-id', json_dict.get('resource-id', ''))
content_desc = json_dict.get('content-desc', [None])
root.set(
'content-desc',
'' if content_desc[0] is None else content_desc[0].replace('\x00', ''))
root.set('package', json_dict.get('package', ''))
root.set('visible', str(json_dict.get('visible-to-user', True)))
root.set('enabled', str(json_dict.get('enabled', False)))
root.set('focusable', str(json_dict.get('focusable', False)))
root.set('focused', str(json_dict.get('focused', False)))
root.set(
'scrollable',
str(
json_dict.get('scrollable-horizontal', False) or
json_dict.get('scrollable-vertical', False)))
root.set('clickable', str(json_dict.get('clickable', False)))
root.set('long-clickable', str(json_dict.get('long-clickable', False)))
root.set('selected', str(json_dict.get('selected', False)))
if 'children' not in json_dict: # leaf node
return
for child in json_dict['children']:
# some json file has 'null' as one of the children.
if child:
child_node = etree.Element('node')
root.append(child_node)
_build_etree_from_json(child_node, child)
class LeafNode(object):
"""Represents a leaf node in the view hierarchy data from xml."""
def __init__(self,
element,
dom_location=None,
screen_width=1440,
screen_height=2960):
"""Constructor.
Args:
element: The etree.Element object.
dom_location: [depth, preorder-index, postorder-index] of element.
screen_width: The width of the screen associated with the element.
screen_height: The height of the screen associated with the element.
"""
assert not element.findall('.//node')
self.element = element
self._screen_width = screen_width
self._screen_height = | |
# -*- coding: utf-8 -*-
""" contest models: Contest, Contestant, Donor, and Founder
:copyright: Copyright (c) 2014 Bivio Software, Inc. All Rights Reserved.
:license: Apache, see LICENSE for more details.
"""
import datetime
import decimal
import math
import pytz
import random
import re
import flask
import sqlalchemy.orm
from .. import biv
from .. import common
from .. import controller
from ..contest import model as pcm
from ..auth import model as pam
from ..controller import db
class Contest(db.Model, common.ModelWithDates):
"""contest database model.
Fields:
biv_id: primary ID
display_name: name of the contest
tag_line: sub-name of the contest
contest_logo: image blob
logo_type: image type (gif, png, jpeg)
is_scoring_completed: True if the contestant can view their scores
"""
biv_id = db.Column(
db.Numeric(18),
db.Sequence('contest_s', start=1002, increment=1000),
primary_key=True
)
display_name = db.Column(db.String(100), nullable=False)
tag_line = db.Column(db.String(500))
# TODO(pjm): move logo and founder_avatar to separate model BivImage
contest_logo = db.Column(db.LargeBinary)
logo_type = db.Column(db.Enum('gif', 'png', 'jpeg', name='logo_type'))
end_date = db.Column(db.Date, nullable=False)
is_scoring_completed = db.Column(db.Boolean, nullable=False)
def contestant_count(self):
"""Returns the number of contestants for the current contest"""
return pam.BivAccess.query.select_from(Contestant).filter(
pam.BivAccess.source_biv_id == self.biv_id,
pam.BivAccess.target_biv_id == Contestant.biv_id,
# not a real ==, Column() overrides __eq__ to generate SQL
Contestant.is_public == True # noqa
).count()
def days_remaining(self):
"""Days remaining for this Contest."""
time_left = self._time_remaining()
if time_left.days > 0:
return time_left.days
return 0
def donor_count(self):
"""Returns the total donor count across all the contestants"""
access_alias = sqlalchemy.orm.aliased(pam.BivAccess)
return Donor.query.select_from(pam.BivAccess, access_alias).filter(
pam.BivAccess.source_biv_id == self.biv_id,
pam.BivAccess.target_biv_id == access_alias.source_biv_id,
access_alias.target_biv_id == Donor.biv_id,
Donor.donor_state == 'executed'
).count()
def donor_executed_amount(self):
"""Returns the total amount raised for all executed donors"""
access_alias = sqlalchemy.orm.aliased(pam.BivAccess)
# TODO(pjm): do sum in sql query
rows = Donor.query.select_from(pam.BivAccess, access_alias).filter(
pam.BivAccess.source_biv_id == self.biv_id,
pam.BivAccess.target_biv_id == access_alias.source_biv_id,
access_alias.target_biv_id == Donor.biv_id,
Donor.donor_state == 'executed'
).all()
total = decimal.Decimal(0)
for row in rows:
total += row.amount
return total
def get_admin_contestants(self):
"""Returns a list of contestants with computed scores."""
rows = []
max_raised = decimal.Decimal(0)
for contestant in self.get_public_contestants():
judge_score = contestant.get_judge_score_and_count()
row = {
'display_name': contestant.display_name,
'amount_raised': contestant.get_amount_raised(),
'judge_score': judge_score[0],
'judge_count': judge_score[1],
'contestant': contestant
}
if row['amount_raised'] > max_raised:
max_raised = row['amount_raised']
rows.append(row)
for row in rows:
row['amount_score'] = (
40 * row['amount_raised'] / max_raised) if max_raised else 0
row['total_score'] = row['amount_score'] + row['judge_score']
return sorted(rows, key=lambda contestant: contestant['display_name'])
def get_contest(self):
"""Returns self"""
return self
def get_sponsors(self, randomize=False):
"""Return a list of Sponsor models for this Contest"""
return pcm.Sponsor.get_sponsors_for_biv_id(self.biv_id, randomize);
def get_public_contestants(self, randomize=False, userRandomize=False):
"""Return a list of contestants for this Contest. List will be
randomized if randomize is True. If userRandomize is True, the list
will be randomized with a seed based on the current user name."""
contestants = Contestant.query.select_from(pam.BivAccess).filter(
pam.BivAccess.source_biv_id == self.biv_id,
pam.BivAccess.target_biv_id == Contestant.biv_id
).filter(Contestant.is_public == True).all() # noqa
if randomize:
random.shuffle(contestants)
if userRandomize:
random.Random(flask.session['user.display_name']).shuffle(
contestants)
return contestants
def get_timezone(self):
"""Returns the timezone used by this contest."""
# TODO(pjm): either store in config or per contest
return pytz.timezone('US/Mountain')
def hours_remaining(self):
"""Hours remaining for this Contest."""
hours = math.floor(self._time_remaining().total_seconds() / (60 * 60))
if hours > 0:
return hours
return 0
def is_admin(self):
"""Shortcut to Admin.is_admin"""
return pam.Admin.is_admin()
def is_expired(self):
"""Returns True if the contest has expired."""
return self._time_remaining().total_seconds() <= 0
def is_judge(self):
"""Returns True if the current user is a judge for this Contest"""
if not flask.session.get('user.is_logged_in'):
return False
if self.is_expired():
return False
access_alias = sqlalchemy.orm.aliased(pam.BivAccess)
if Judge.query.select_from(pam.BivAccess, access_alias).filter(
pam.BivAccess.source_biv_id == self.biv_id,
pam.BivAccess.target_biv_id == access_alias.target_biv_id,
access_alias.source_biv_id == flask.session['user.biv_id']
).first():
return True
return False
def minutes_remaining(self):
"""Minutes remaining for this Contest."""
minutes = math.floor(self._time_remaining().total_seconds() / 60)
if minutes > 0:
return minutes
return 0
def user_submission_url(self, task='contestant'):
"""Returns the current user's submission url or None."""
for contestant in self.get_public_contestants():
if contestant.is_founder():
return contestant.format_uri(task)
return None
def _time_remaining(self):
"""Returns the time remaining using the contest time zone."""
tz = self.get_timezone()
end_of_day = tz.localize(
datetime.datetime(
self.end_date.year, self.end_date.month, self.end_date.day,
23, 59, 59))
return end_of_day - datetime.datetime.now(tz)
class Contestant(db.Model, common.ModelWithDates):
"""contestant database model.
Fields:
biv_id: primary ID
display_name: project name
youtube_code: the VIDEO_ID for the youtube video
slideshow_code: the SlideShare ID for the slide deck
contestant_desc: project description
tax_id: project EIN
website: project website
business_phone: contact by phone
business_address: contact by mail
is_public: is the project to be shown on the public contestant list?
is_under_review: enables review of a non-public submission
"""
biv_id = db.Column(
db.Numeric(18),
db.Sequence('contestant_s', start=1003, increment=1000),
primary_key=True
)
display_name = db.Column(db.String(100), nullable=False)
youtube_code = db.Column(db.String(500))
slideshow_code = db.Column(db.String(500))
contestant_desc = db.Column(db.String)
tax_id = db.Column(db.String(30))
website = db.Column(db.String(100))
business_phone = db.Column(db.String(100))
business_address = db.Column(db.String(500))
is_public = db.Column(db.Boolean, nullable=False)
is_under_review = db.Column(db.Boolean, nullable=False)
def get_amount_raised(self):
"""Returns the executed Donor amount for this Contestant"""
rows = Donor.query.select_from(pam.BivAccess).filter(
pam.BivAccess.source_biv_id == self.biv_id,
pam.BivAccess.target_biv_id == Donor.biv_id,
Donor.donor_state == 'executed'
).all()
total = decimal.Decimal(0)
for row in rows:
total += row.amount
return total
def get_contest(self):
"""Returns the Contest model which owns this Contestant"""
return Contest.query.select_from(pam.BivAccess).filter(
pam.BivAccess.source_biv_id == Contest.biv_id,
pam.BivAccess.target_biv_id == self.biv_id
).one()
def get_founders(self):
"""Return a list of Founder models for this Contestant"""
return Founder.query.select_from(pam.BivAccess).filter(
pam.BivAccess.source_biv_id == self.biv_id,
pam.BivAccess.target_biv_id == Founder.biv_id
).all()
def get_completed_judge_scores(self):
"""Returns an hash of (judge_id => [JudgeScore ...]) for fully
scored surveys."""
rows = JudgeScore.query.select_from(pam.BivAccess).filter(
JudgeScore.contestant_biv_id == self.biv_id,
JudgeScore.judge_biv_id == pam.BivAccess.source_biv_id,
pam.BivAccess.target_biv_id == Judge.biv_id,
JudgeScore.question_number > 0,
JudgeScore.judge_score > 0
).all()
rows_by_judge = {}
for row in rows:
if not rows_by_judge.get(row.judge_biv_id):
rows_by_judge[row.judge_biv_id] = []
rows_by_judge[row.judge_biv_id].append(row)
res = {}
# only include completed surveys in results
for judge_id in rows_by_judge.keys():
if len(rows_by_judge[judge_id]) == JudgeScore.get_question_count():
res[judge_id] = sorted(
rows_by_judge[judge_id],
key=lambda score: score.question_number
)
return res
def get_judge_score_and_count(self):
"""Returns the score from judges and number of judges who judged
this Contestant."""
rows_by_judge = self.get_completed_judge_scores()
grand_total = decimal.Decimal(0)
grand_count = 0
for judge_id in rows_by_judge.keys():
grand_total += self._score_rows(rows_by_judge[judge_id])
grand_count += 1
return [
(grand_total / grand_count) if grand_count else 0,
grand_count
]
def get_slideshow_code(self):
"""Returns the slideshare or youtube code for the pitch deck"""
if self.is_youtube_slideshow():
match = re.search(r'^youtube\:(.*)$', self.slideshow_code)
return match.group(1)
return self.slideshow_code
def get_score_for_judge_user(self):
"""Returns this contestant's score for the current logged in judge"""
return self._score_rows(self._get_score_info_for_judge_user())
def get_summary(self):
"""Returns an excerpt for the Contestant.contestant_desc."""
summary = self.contestant_desc
match = re.search(
r'^(.*?\s[a-z)]{3,}\.\s.*?\s[a-z)]{3,}\.\s)',
summary,
re.DOTALL
)
if match:
return match.group(1)
return summary
def get_website(self):
"""Returns the contestant website, prepending http:// if necessary."""
if self.website and not re.search(r'^http', self.website):
return 'http://{}'.format(self.website)
return self.website
def is_founder(self):
"""Returns True if the current user is a founder for this Contestant"""
if not flask.session.get('user.is_logged_in'):
return False
access_alias = sqlalchemy.orm.aliased(pam.BivAccess)
if Founder.query.select_from(pam.BivAccess, access_alias).filter(
Founder.biv_id == pam.BivAccess.target_biv_id,
pam.BivAccess.source_biv_id == flask.session['user.biv_id'],
Founder.biv_id == access_alias.target_biv_id,
access_alias.source_biv_id == self.biv_id
).first():
return True
return False
def is_judge(self):
"""Returns True if the current user is a judge for this Contest"""
return self.get_contest().is_judge()
def is_partial_scored_by_judge_user(self):
# TODO(pjm): need meta data for question count
return len(self._get_score_info_for_judge_user()) \
!= JudgeScore.get_question_count()
def is_scored_by_judge_user(self):
return len(self._get_score_info_for_judge_user()) > 0
def is_youtube_slideshow(self):
"""Returns true if the slideshow is Youtube, not Slideshare."""
return re.search(r'^youtube\:', self.slideshow_code)
def _get_score_info_for_judge_user(self):
return JudgeScore.query.filter(
JudgeScore.judge_biv_id == flask.session['user.biv_id'],
JudgeScore.contestant_biv_id == self.biv_id,
JudgeScore.question_number > 0,
JudgeScore.judge_score > 0
).all()
def _score_rows(self, rows):
total = decimal.Decimal(0)
for row in rows:
total += row.get_points()
return total
class Donor(db.Model, common.ModelWithDates):
"""donor database model.
Fields:
biv_id: primary ID
amount: promised amount
display_name: donor name, from paypal
donor_email: donor email, from paypal
donor_state: (submitted, pending_confirmation, executed, canceled)
paypal_payment_id: payment id, from paypal post
paypal_payer_id: payer id, from paypal url callback
"""
biv_id = db.Column(
db.Numeric(18),
db.Sequence('donor_s', start=1007, increment=1000),
primary_key=True
)
amount = db.Column(db.Numeric(15, 2), nullable=False)
display_name = db.Column(db.String(100))
donor_email = db.Column(db.String(100))
donor_state = db.Column(db.Enum(
'submitted', 'pending_confirmation', 'executed', 'canceled',
name='donor_state'))
paypal_payment_id = db.Column(db.String(100))
paypal_payer_id = db.Column(db.String(100))
def add_to_session(self):
"""Add the donor to the session by biv_id."""
controller.db.session.add(self)
controller.db.session.flush()
flask.session['donor.biv_id'] = self.biv_id
def remove_from_session(self):
"""Remove the donor's biv_id from the session, if present."""
if flask.session.get('donor.biv_id'):
del flask.session['donor.biv_id']
@staticmethod
def unsafe_load_from_session():
"""Loads the donor from the session.
Returns None if session value is missing or donor does not exist.
"""
if flask.session.get('donor.biv_id'):
return Donor.query.filter_by(
biv_id=flask.session['donor.biv_id']
).first()
return None
class Founder(db.Model, common.ModelWithDates):
"""founder database model.
Fields:
biv_id: primary ID
display_name: <NAME>
fouder_desc: founder's short bio
founder_avatar: avatar image blob
avatar_type: image type (gif, png, jpeg)
"""
biv_id = db.Column(
db.Numeric(18),
db.Sequence('founder_s', start=1004, increment=1000),
primary_key=True
)
display_name = db.Column(db.String(100), nullable=False)
| |
<reponame>virtUOS/siddata_backend<filename>backend/api_views.py
"""
api_views.py
"""
import base64
import logging
import datetime
import json
import hashlib
import io
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse, HttpResponseServerError, HttpResponseNotFound, JsonResponse
from django.core.files.images import ImageFile
from django.views.decorators.csrf import csrf_exempt
from recommenders import recommender_functions
from recommenders.RM_start import RM_start
from backend import models
import settings
# Flag for debugging features, if true, origin will not be checked and siddata runs on localhost stud.ip versions
def preprocess(func):
def function_wrapper(*args, **kwargs):
# check for api_keys
# log recommender usage if user consent is given
# check if origin is allowed or has to be created in debug mode
if settings.DEBUG:
return func(*args, **kwargs)
request_data = args[0].GET
if request_data['api_key'] == '':
return HttpResponse("Invalid origin, empty api_key is not allowed", status=401)
try:
_ = models.Origin.objects.get(api_endpoint=request_data['origin'], api_key=request_data['api_key'])
except ObjectDoesNotExist:
logging.info('no origin object found')
return HttpResponse("Invalid origin, not allowed", status=401)
return func(*args, **kwargs)
return function_wrapper
@csrf_exempt
@preprocess
def student(request):
"""Route that returns all data related to a SiddataUser.
"""
if request.method == 'GET':
request_data = request.GET
origin = models.Origin.objects.get_or_create(
api_endpoint=request_data['origin'],
)[0]
# Get_or_create user, check if user is new
user, created = models.SiddataUser.objects.get_or_create(
user_origin_id=request.GET['user_origin_id'],
origin=origin)
# If user was created (hence this request is the first for this user) the db is initialized
if created:
recommender_functions.create_initial_data_for_user(user)
include_params = request.GET['include'].split(",")
data_response = user.serialize(include=include_params)
return JsonResponse(data_response, safe=False)
if request.method == 'DELETE':
origin = models.Origin.objects.get_or_create(
api_endpoint=request.GET['origin']
)[0]
try:
user = models.SiddataUser.objects.get(
user_origin_id=request.GET['user_origin_id'],
origin=origin
)
except models.SiddataUser.DoesNotExist:
return HttpResponse("Nutzer existiert nicht.")
user.delete()
return HttpResponse("Nutzer wurde gelöscht.")
if request.method == 'PATCH':
request_data_json = json.loads(request.body)
studi_json = request_data_json["data"]
request_data = request.GET
origin = models.Origin.objects.get_or_create(
api_endpoint=request_data['origin'],
)[0]
user = models.SiddataUser.objects.get(user_origin_id=studi_json["id"], origin=origin)
for attribute in studi_json["attributes"]:
if attribute == "data_donation":
user.data_donation = studi_json["attributes"][attribute]
elif attribute == "gender_brain":
user.gender_brain = studi_json["attributes"][attribute]
elif attribute == "gender_social":
user.gender_social = studi_json["attributes"][attribute]
elif attribute == "data_regulations":
user.data_regulations = studi_json["attributes"][attribute]
else:
logging.log("Unknown attribute in student PATCH route: {}".format(attribute))
user.save()
for institute_json in studi_json["relationships"]["institutes_brain"]["data"]:
institute = models.Institute.objects.get(institute_origin_id=institute_json["id"], origin=origin)
institutemembership = models.InstituteMembership.objects.get_or_create(
institute=institute,
user=user,
)[0]
institutemembership.share_brain = True
institutemembership.save()
for institute_json in studi_json["relationships"]["institutes_social"]["data"]:
institute = models.Institute.objects.get(institute_origin_id=institute_json["id"], origin=origin)
institutemembership = models.InstituteMembership.objects.get_or_create(
institute=institute,
user=user,
)[0]
institutemembership.share_social = True
institutemembership.save()
# respect rejected permissions
for im in models.InstituteMembership.objects.filter(user=user):
if im.institute.institute_origin_id not in [
institute_json.get('id') for institute_json in studi_json["relationships"]["institutes_brain"]["data"]
]:
im.share_brain = False
im.save()
if im.institute.institute_origin_id not in [
institute_json.get('id') for institute_json in studi_json["relationships"]["institutes_social"]["data"]
]:
im.share_social = False
im.save()
if not any([im.share_brain, im.share_social]):
# according to data privacy guidelines, if a membership is not shared at all, it should be deleted
im.delete()
for course_json in studi_json["relationships"]["courses_brain"]["data"]:
course = models.StudipCourse.objects.get(course_origin_id=course_json["id"], origin=origin)
coursemembership = models.CourseMembership.objects.get_or_create(
course=course,
user=user,
)[0]
coursemembership.share_brain = True
coursemembership.save()
for course_json in studi_json["relationships"]["courses_social"]["data"]:
course = models.InheritingCourse.objects.get(course_origin_id=course_json["id"], origin=origin)
coursemembership = models.CourseMembership.objects.get_or_create(
course=course,
user=user,
)[0]
coursemembership.share_social = True
coursemembership.save()
# respect rejected permissions
for cm in models.CourseMembership.objects.filter(user=user):
if cm.course.course_origin_id not in [
course_json.get('id') for course_json in studi_json["relationships"]["courses_brain"]["data"]
]:
cm.share_brain = False
cm.save()
if cm.course.course_origin_id not in [
course_json.get('id') for course_json in studi_json["relationships"]["courses_social"]["data"]
]:
cm.share_social = False
cm.save()
if not any([cm.share_brain, cm.share_social]):
# according to data privacy guidelines, if a membership is not shared at all, it should be deleted
cm.delete()
return HttpResponse("Die Nutzendendaten wurden gespeichert.")
@csrf_exempt
@preprocess
def recommender(request, recommender_id=None):
"""Route that returns all data related to a SiddataUserRecommender.
"""
if request.method == 'GET':
request_data = request.GET
origin = models.Origin.objects.get_or_create(
api_endpoint=request_data['origin'],
)[0]
# Get_or_create user, check it user is new
user, created = models.SiddataUser.objects.get_or_create(
user_origin_id=request_data["user_origin_id"],
origin=origin)
# If user was created (hence this request is the first for this user) the db is initialized
if created:
recommender_functions.create_initial_data_for_user(user)
data_response = {}
if recommender_id:
userrecommender = models.SiddataUserRecommender.objects.get(id=recommender_id)
data_response = userrecommender.serialize()
else:
data_response['data'] = []
data_response['included'] = []
recommenders = models.SiddataUserRecommender.objects.filter(user=user,
recommender__active=True, ).order_by("recommender__order")
for rec in recommenders:
r_ser = rec.serialize()
data_response['data'] += r_ser['data']
for i in r_ser['included']:
if not i in data_response['included']:
data_response['included'].append(i)
return JsonResponse(data_response, safe=False)
if request.method == 'PATCH':
request_data = json.loads(request.body)
recommenders = request_data["data"]
start_recommender = RM_start()
for recommender in recommenders:
if recommender["type"] != "Recommender":
return HttpResponseServerError("Recommender Object expected. Instead: {}".format(recommender))
SUR = models.SiddataUserRecommender.objects.get(id=recommender["id"])
SUR.enabled = recommender["attributes"]["enabled"]
SUR.save()
# if recommender is disabled or has been used before, continue..
if SUR.enabled == False or models.Goal.objects.filter(userrecommender=SUR).exists():
continue
else:
recommender_class_object = recommender_functions.create_recommender_by_classname(
SUR.recommender.classname)
recommender_class_object.initialize(SUR.user)
# Set teaser activity as done
teaser_activity = recommender_class_object.get_teaser_activity(
start_recommender.get_default_goal(SUR.user))
teaser_activity.status = "done"
teaser_activity.save()
return HttpResponse("Recommender-Einstellungen gespeichert.")
@csrf_exempt
@preprocess
def goal(request, goal_id=None):
"""Route that returns all data related to a Goal.
"""
logger = logging.getLogger("api_goal")
if request.method == 'GET':
request_data = request.GET
origin = models.Origin.objects.get_or_create(
api_endpoint=request_data['origin'],
)[0]
user = models.SiddataUser.objects.get(origin=origin, user_origin_id=request.GET["user_origin_id"])
data_response = {}
if goal_id:
goal = models.Goal.objects.get(id=goal_id)
data_response = goal.serialize()
else:
data_response['data'] = []
data_response['included'] = []
goals = models.Goal.objects.filter(userrecommender__user=user).order_by("order")
for g in goals:
g_ser = g.serialize()
data_response['data'] += g_ser['data']
data_response['included'] += g_ser['included']
return JsonResponse(data_response, safe=False)
elif request.method == 'PATCH':
request_data = json.loads(request.body)
request_goal = request_data["data"]
if request_goal["type"] != "Goal":
return HttpResponseServerError("Sent object is not of type Goal!")
try:
goal = models.Goal.objects.get(id=goal_id)
except models.Goal.DoesNotExist:
return HttpResponseServerError("Goal with this ID not known.")
except Exception as e:
logger.error(e)
return HttpResponseServerError()
for key in request_goal["attributes"]:
if key == "title":
goal.title = request_goal["attributes"]["title"]
goal.save()
elif key == "description":
goal.description = request_goal["attributes"]["description"]
goal.save()
elif key == "order":
goal.order = request_goal["attributes"]["order"]
goal.save()
return HttpResponse("Ziel wurde bearbeitet.")
elif request.method == 'DELETE':
try:
goal = models.Goal.objects.get(id=goal_id)
except models.Goal.DoesNotExist:
return HttpResponseServerError("Goal with this ID not known.")
except Exception as e:
logger.error(e)
return HttpResponseServerError()
goal.delete()
return HttpResponse("Ziel wurde gelöscht.")
@csrf_exempt
@preprocess
def activity(request, activity_id=None):
"""Route that returns an Activity..
"""
logger = logging.getLogger("api_activity")
if request.method == 'GET':
request_data = request.GET
origin_id = request_data['user_origin_id']
origin = models.Origin.objects.get_or_create(
api_endpoint=request_data['origin'],
)[0]
user = models.SiddataUser.objects.get(origin=origin, user_origin_id=origin_id)
data_response = {}
if activity_id:
act_obj = models.Activity.objects.get(id=activity_id)
data_response = act_obj.serialize()
else:
data_response['data'] = []
data_response['included'] = []
act_objs = models.Activity.objects.filter(goal__user=user)
for act in act_objs:
a_ser = act.serialize()
data_response['data'] += a_ser['data']
data_response['included'] += a_ser['included']
return JsonResponse(data_response, safe=False)
elif request.method == 'PATCH':
request_data = json.loads(request.body)
request_activity = request_data["data"]
if request_activity["type"] != "Activity":
return HttpResponseServerError("Sent object is not of type Activity!")
try:
activity = models.Activity.objects.get(id=activity_id)
# activity = models.Activity.objects.get(id=request_activity['id'])
except models.Activity.DoesNotExist:
return HttpResponseServerError("Activity with this ID not known.")
except Exception as e:
logger.error(e)
return HttpResponseServerError()
# flags for special cases
feedback = False
status = False
answer = False
for key in request_activity["attributes"]:
if key == "title":
activity.title = request_activity["attributes"]["title"]
elif key == "description":
activity.description = request_activity["attributes"]["description"]
elif key == "status":
if activity.status == "immortal":
continue
# If status has changed..
elif activity.status != request_activity["attributes"]["status"]:
# ..set new status..
activity.status = request_activity["attributes"]["status"]
activity.save()
# If the reason for the request was a finalization:
if activity.status == "done":
status = True
elif activity.status == "discarded":
return HttpResponse("Activity verworfen.")
elif activity.status == "snoozed":
return HttpResponse("Activity pausiert.")
elif activity.status == "active":
return HttpResponse("Activity reaktiviert.")
elif key == "answers":
activity.answers = request_activity["attributes"]["answers"]
if activity.status != "immortal":
activity.status = "done"
activity.save()
answer = True
elif key == "feedback_value":
activity.feedback_value = request_activity["attributes"]["feedback_value"]
activity.save()
feedback = True
elif key == "feedback_text":
activity.feedback_text = request_activity["attributes"]["feedback_text"]
activity.save()
feedback = True
elif key == "feedback_chdate":
Date = datetime.datetime.fromtimestamp(request_activity["attributes"]["feedback_chdate"],
datetime.timezone.utc)
activity.feedback_chdate = Date
activity.save()
elif key == "notes":
activity.notes = request_activity["attributes"]["notes"]
activity.save()
elif key == "duedate":
Date = datetime.datetime.fromtimestamp(request_activity["attributes"]["duedate"], datetime.timezone.utc)
activity.duedate = Date
activity.save()
elif key == "order":
activity.order = request_activity["attributes"]["order"]
activity.save()
elif key == "chdate":
Date = datetime.datetime.fromtimestamp(request_activity["attributes"]["chdate"], datetime.timezone.utc)
activity.chdate = Date
activity.save()
elif key == "activation_time":
Date = datetime.datetime.fromtimestamp(request_activity["attributes"]["activation_time"],
datetime.timezone.utc)
activity.activation_time = Date
activity.save()
elif key == "deactivation_time":
Date = datetime.datetime.fromtimestamp(request_activity["attributes"]["deactivation_time"],
datetime.timezone.utc)
activity.deactivation_time = Date
activity.save()
elif key == "interactions":
activity.interactions = request_activity["attributes"]["interactions"]
activity.save()
else:
pass
# If the changes only were a feedback change which does not finalize the activity...
if feedback and (not status and not answer):
# Send response to frontend and thereby end function call.
return HttpResponse("Feedback wurde gespeichert.")
rm_classname = activity.goal.userrecommender.recommender.classname
rm = recommender_functions.create_recommender_by_classname(rm_classname)
feedback = rm.process_activity(activity=activity)
recommender_functions.refresh_all_recommenders()
if feedback == None or feedback == True:
feedback = activity.respond()
return HttpResponse(feedback)
elif request.method == 'DELETE':
try:
activity = models.Activity.objects.get(id=activity_id)
except models.Activity.DoesNotExist:
return HttpResponseServerError("Activity with this ID not known.")
except Exception as e:
logger.error(e)
return HttpResponseServerError()
activity.delete()
return HttpResponse("Empfehlung wurde gelöscht.")
@csrf_exempt
@preprocess
def coursemembership(request):
"""
:param request:
:return:
"""
return HttpResponse("Kursteilnahme wurde gespeichert.")
@csrf_exempt
@preprocess
def institutemembership(request):
"""
:param request:
:return:
"""
return HttpResponse("Institutszugehörigkeit wurde gespeichert.")
@csrf_exempt
@preprocess
def studycourse(request):
"""
:param request:
:return:
| |
# coding=utf-8
from src.testcase.GN_F1331.LaunchApp import *
from src.utils.GetSerial import *
# 多线程返回值
class MyThread(threading.Thread):
def __init__(self, target, args=()):
super(MyThread, self).__init__()
self.func = target
self.args = args
def run(self):
self.result = self.func(*self.args)
def get_result(self):
try:
return self.result # 如果子线程不使用join方法,此处可能会报没有self.result的错误
except Exception:
return None
class WidgetOperation(LaunchApp):
def __init__(self, device_info):
super(WidgetOperation, self).__init__(device_info)
self.check_flag = 1
# 获取元素索引
def get_index(self, device, element1):
end_time = time.time() + 15
while True:
elements = self.wait_widget(element1) # 返回元素字典
for index, element in elements.items():
if element is not None and self.ac.get_attribute(element, "name") == device:
return index
else:
if time.time() > end_time:
return False
# 主页面选择待测设备
def choose_home_device(self, device, device_index=None):
if device_index is None:
index = self.get_index(device, self.page["app_home_page"]["device"])
else:
index = device_index
new_value = copy.copy(self.page["app_home_page"]["device"])
new_value[0] = new_value[0][index]
while True:
try:
self.widget_click(new_value, self.page["control_device_page"]["title"])
break
except TimeoutException:
self.ac.swipe(0.6, 0.9, 0.6, 0.6, self.driver)
time.sleep(1)
# 选择设备
def choose_device(self, device, element1, element2):
end_time = time.time() + 60
while True:
elements = self.wait_widget(element1)
new_value = copy.copy(element2)
for index, element in elements.items():
if element is not None and self.ac.get_attribute(element, "name") == device.replace(":", "-"):
new_value[0] = new_value[0][index]
self.widget_click(new_value)
return 0
else:
self.ac.swipe(0.6, 0.6, 0.6, 0.5, self.driver)
time.sleep(1)
if time.time() > end_time:
raise TimeoutException()
# 设置电源状态
def set_power(self, state):
self.widget_click(self.page["control_device_page"]["main_button"])
time.sleep(1)
if "main" in state:
button = "main_button"
elif "up" in state:
button = "up_button"
elif "mid" in state:
button = "mid_button"
else:
button = "down_button"
if "_on" in state:
btn = "on"
else:
btn = "off"
while True:
try:
self.wait_widget(self.page["control_device_page"][state])
try:
self.wait_widget(self.page["control_device_page"]["up_button_%s" % btn])
except TimeoutException:
self.widget_click(self.page["control_device_page"]["up_button"],
self.page["control_device_page"]["up_button_%s" % btn])
try:
self.wait_widget(self.page["control_device_page"]["mid_button_%s" % btn])
except TimeoutException:
self.widget_click(self.page["control_device_page"]["mid_button"],
self.page["control_device_page"]["mid_button_%s" % btn])
try:
self.wait_widget(self.page["control_device_page"]["down_button_%s" % btn])
except TimeoutException:
self.widget_click(self.page["control_device_page"]["down_button"],
self.page["control_device_page"]["down_button_%s" % btn])
break
except TimeoutException:
self.widget_click(self.page["control_device_page"][button],
self.page["control_device_page"][state])
# 设置滚轮
def set_roll(self, elem):
element = self.wait_widget(elem)
lc, sz = element.location, element.size
lcx, lcy, szw, szh = float(lc["x"]), float(lc["y"]), float(sz["width"]), float(sz["height"])
return lcx, lcy, szw, szh
# 设置定时滚轮
def set_timer_roll(self, elem_h, elem_m, elem_t, now_time, set_timer, cycle=False, delay_s=120):
"""
:param elem_h: 滚轮控件“时”框架,用来获取“时”x坐标
:param elem_m: 滚轮控件“分”框架,用来获取“分”x坐标
:param elem_t: 滚轮当前的时间值,“HH:MM”格式
:param now_time: 设置定时的当前时间
:param set_timer: 设置定时的目标时间
:param cycle: 是否是类鱼缸模式的连续定时模式
:param delay_s: 定时的设置时间和启动时间延迟
:return: 定时启动时间,格式为时间戳float型;定时执行时间,格式为时间戳float型
"""
# 定时的设置时间包含延迟定时和准点定时:
# 准点定时为设置定时当前时间前/后***分钟执行,数据格式为int型及以时间格式展现的str字符串型;
# int型包含int型正数/负数(int型/负int型),用于设置当前时间***分钟前/后执行的定时,关键字为“int”,“minus”;
# 时间格式str字符串型("09:00"),用于设置固定时间点执行的定时,关键字为“point”
# 延迟定时为设置时间段区间执行的定时,多用于鱼缸模式或延迟定时模式,数据格式为以时间格式展现的str字符串型;
# 时间格式str字符串型("30:00"),用于设置时间段定时,关键字为“delay”
# ps:delay_s函数关键词用于给设置定时预留时间,设置定时也需要时间,默认延迟2分钟,当前时间8:00,定时开始执行时间为8:02;
swipe_time = conf["roll_time"]["GN_F1331"] # 京东微联APP的滚轮滑动间隔时间
if isinstance(set_timer, int):
if set_timer >= 0:
time_seg = "int"
else:
time_seg = "minus"
else:
if set_timer[0] == "point":
time_seg = "point"
elif set_timer[0] == "delay":
time_seg = "delay"
else:
time_seg = None
if not isinstance(now_time, str):
raise KeyError("now time must be time.strftime('%%H:%%M'), current: %s" % str(now_time))
# 获取“时”“分”滚轮滑动的基准值
""""""
# 时滚轮
lcx_h, lcy_h, szw_h, szh_h = self.set_roll(elem_h)
pxx_h, pxy_h = elem_h[3]["px"]
start_x_h, start_y_h = int(lcx_h + pxx_h * szw_h), int(lcy_h + szh_h / 2) # “时”滚轮的操作起始点
# 分滚轮
lcx_m, lcy_m, szw_m, szh_m = self.set_roll(elem_m)
pxx_m, pxy_m = elem_m[3]["px"]
start_x_m, start_y_m = int(lcx_m + pxx_m * szw_m), int(lcy_m + szh_m / 2) # “分”滚轮的操作起始点
""""""
if isinstance(elem_t, str):
attr = elem_t
else:
attr = self.ac.get_attribute(self.wait_widget(elem_t), "name")
start_h, start_m = re.findall(u"(\d+)小时", attr), re.findall(u"(\d+)分钟", attr)
if start_h:
attr = "%02d:%02d" % (int(start_h[0]), int(start_m[0]))
else:
attr = "00:%02d" % int(start_m[0])
# 从控件拿到当前控件的值
time_roll = time.strftime("%Y-%m-%d r:00").replace("r", attr) # 滚轮的当前时间
time_roll = time.mktime(time.strptime(time_roll, "%Y-%m-%d %X")) # 转换为时间戳
# 将now_time添加秒数。
# 若同时设置普通定时和延迟定时,若两定时执行时间点相同,则难以判断定时执行情况
# 将延迟模式的启动时间从准点往后推30s则可以和普通定时错开,相应的delay_s也要再加上对应的30s,默认120s→150s
try:
time_now = time.strptime(time.strftime("%Y-%m-%d r:00").replace("r", now_time), "%Y-%m-%d %X")
except ValueError:
time_now = time.strptime(time.strftime("%Y-%m-%d r").replace("r", now_time), "%Y-%m-%d %X")
time_now = time.mktime(time_now)
if cycle is True: # 若定时为鱼缸模式,第二个定时的开始时间为第一个定时的结束时间,应将定时设置延迟去除
time_now = time_now - delay_s
# 获取定时的执行时间点
if time_seg == "int" or time_seg == "minus":
time_set = time_now + set_timer * 60 + delay_s
elif time_seg == "point" or time_seg == "delay":
time_set = time.strftime("%Y-%m-%d r:00").replace("r", set_timer[1])
time_set = time.mktime(time.strptime(time_set, "%Y-%m-%d %X"))
else:
time_set = "error"
# 定时开始执行和设定的时间点
time_start = time_now + delay_s
time_set = time_set
# 滚轮相关操作
roll_h, roll_m = time.strftime("%H:%M", time.localtime(time_roll)).split(":")
set_h, set_m = time.strftime("%H:%M", time.localtime(time_set)).split(":")
if set_h == "00" and roll_h != "00":
time_et_h = int(set_h) - int(roll_h) # 时间滚轮的“时”时间和待设置时间差值
time_et_h_a = abs(time_et_h) % 24 # “时”时间滚轮滑动次数
try: # 若time_et不相等
# time_et / time_et_a计算结果为1/-1,获取“时”滚轮滑动目的坐标值,用于计算时间滚轮是往上滑还是往下滑
pm_value = time_et_h / time_et_h_a
if pm_value > 0: # 往下滑
aszh_h = int(szh_h * 1.9) # 根据滚轮显示时间点滚条个数计算单个时间点滚条的元素宽度
else: # 往上滑
aszh_h = int(szh_h * 1.4)
end_y_h = start_y_h - pm_value * aszh_h
except ZeroDivisionError: # 若time_et相等
end_y_h = start_y_h
while time_et_h_a > 0:
self.ac.swipe(start_x_h, start_y_h, start_x_h, end_y_h, self.driver, 0, False)
print(time_et_h_a)
time_et_h_a -= 1
time.sleep(swipe_time)
roll_h, roll_m = "00", "01"
time_et_h = int(set_h) - int(roll_h) # 时间滚轮的“时”时间和待设置时间差值
time_et_h_a = abs(time_et_h) % 24 # “时”时间滚轮滑动次数
time_et_m = int(set_m) - int(roll_m) # 时间滚轮的“分”时间和待设置时间差值
time_et_m_a = abs(time_et_m) % 60 # “分”时间滚轮滑动次数
try: # 若time_et不相等
# time_et / time_et_a计算结果为1/-1,获取“时”滚轮滑动目的坐标值,用于计算时间滚轮是往上滑还是往下滑
pm_value = time_et_h / time_et_h_a
if pm_value > 0: # 往下滑
aszh_h = int(szh_h * 1.9) # 根据滚轮显示时间点滚条个数计算单个时间点滚条的元素宽度
else: # 往上滑
aszh_h = int(szh_h * 1.4)
end_y_h = start_y_h - pm_value * aszh_h
except ZeroDivisionError: # 若time_et相等
end_y_h = start_y_h
try:
# 获取“分”滚轮滑动目的坐标值
pm_value = time_et_m / time_et_m_a
if pm_value > 0: # 往下滑
aszh_m = int(szh_m * 1.9) # 根据滚轮显示时间点滚条个数计算单个时间点滚条的元素宽度
else: # 往上滑
aszh_m = int(szh_m * 1.4)
end_y_m = start_y_m - pm_value * aszh_m
except ZeroDivisionError:
end_y_m = start_y_m
# 分钟在前,时钟在后,若为00:00,滚轮会自动加一
while time_et_m_a > 0:
self.ac.swipe(start_x_m, start_y_m, start_x_m, end_y_m, self.driver, 0, False)
print(time_et_m_a)
time_et_m_a -= 1
time.sleep(swipe_time)
while time_et_h_a > 0:
self.ac.swipe(start_x_h, start_y_h, start_x_h, end_y_h, self.driver, 0, False)
print(time_et_h_a)
time_et_h_a -= 1
time.sleep(swipe_time)
# 将定时时间(时间戳,float型)格式化为时间(字符串型),仅做日志输出
start_time = time.strftime("%Y-%m-%d %X", time.localtime(time_start))
# 延时定时的滚轮时间和实际执行时间不一致,需转换一下
if time_seg == "delay":
delay_time = set_timer[1]
add_h, add_m = delay_time.split(":")
time_delay = int(add_h) * 3600 + int(add_m) * 60
time_set = time_now + time_delay + delay_s
# set_time = time.strftime("%Y-%m-%d %X", time.localtime(time_set))
# else:
# delay_time = "None"
# time_delay = "None"
# set_time = time.strftime("%Y-%m-%d %X", time.localtime(time_set))
# self.debug.info(
# "[APP_TIMER]Roll: start_time: %s, set_time: %s, delay_time: %s" % (start_time, set_time, delay_time))
# self.debug.info(
# "[APP_TIMER]Roll: time_start: %s, time_set: %s, time_delay: %s" % (time_start, time_set, time_delay))
time.sleep(1)
return int(time_start), int(time_set)
# 设置次数滚轮
def set_count_roll(self, elem, roll_value, set_value):
# 滚轮
lcx, lcy, szw, szh = self.set_roll(elem)
pxx, pxy = elem[3]["px"]
start_x, start_y = int(lcx + pxx * szw), int(lcy + pxy * szh) # 获取滚轮滑动开始坐标值
diff = set_value - roll_value
diff_a = abs(diff)
try:
# 计算滚轮滑动目标坐标值
pm_value = diff / diff_a
if pm_value > 0: # 往下滑
aszh = int(szh * 1.9)
else: # 往上滑
aszh = int(szh * 1.4)
end_y = start_y - pm_value * aszh
except ZeroDivisionError:
end_y = start_y
swipe_time = conf["roll_time"]["GN_F1331"]
while diff_a > 0:
self.ac.swipe(start_x, start_y, start_x, end_y, self.driver, percent=False) # step=25
print(diff_a)
diff_a -= 1
time.sleep(swipe_time)
self.debug.info("roll_value: %s, set_value: %s" % (roll_value, set_value))
time.sleep(1)
# 创建普通定时
def create_normal_timer(self, page, now_time, delay_time, power, delay_s=120, loop=u"执行一次"):
"""
:param page: 设置定时层
:param now_time: now time
:param delay_time: delay time
:param power: power state power on/off
:param delay_s: 延迟多久启动
:param loop: everyday/monday etc
:return: start_time, set_time
"""
try:
self.wait_widget(self.page[page]["delay_timer_state"])
self.widget_click(self.page[page]["delay_timer"])
except TimeoutException:
pass
try:
self.wait_widget(self.page[page]["cycle_timer_state"])
self.widget_click(self.page[page]["cycle_timer"])
except TimeoutException:
pass
while True: # 避免设置时时间跳变
if 1 <= int(time.strftime("%S")) <= 50:
break
time.sleep(1)
self.widget_click(self.page[page]["add_normal_timer"],
self.page["add_normal_timer_page"]["title"])
elem_t = time.strftime("%H:%M")
start_time, start_set_time = self.set_timer_roll(self.page["add_normal_timer_page"]["roll_h"],
self.page["add_normal_timer_page"]["roll_m"],
elem_t, now_time, delay_time, delay_s=delay_s)
now = time.mktime(time.strptime(time.strftime("%Y-%m-%d r:00").replace("r", now_time), "%Y-%m-%d %X"))
if start_set_time <= now:
start_set_time = start_set_time + 3600 * 24
self.widget_click(self.page["add_normal_timer_page"][power])
self.set_timer_loop("add_normal_timer_page", loop)
self.widget_click(self.page["add_normal_timer_page"]["saved"])
start_time = int(time.time())
try:
self.wait_widget(self.page["mode_timer_conflict_popup"]["title"])
self.widget_click(self.page["mode_timer_conflict_popup"]["confirm"],
self.page[page]["title"])
start_time = int(time.time())
except TimeoutException:
self.wait_widget(self.page[page]["title"])
self.debug.info("[APP_TIMER][%s, %s]Start_time: %s, Start_set_time: %s" % (
start_time, start_set_time,
time.strftime("%Y-%m-%d %X", time.localtime(start_time)),
time.strftime("%Y-%m-%d %X", time.localtime(start_set_time))))
self.debug.info(u"[APP_TIMER]Start Time: %s[%s]" % (time.strftime("%X"), time.time()))
return start_time, start_set_time
# 创建循环定时
def create_cycle_timer(self, page, now_time, set_start_time, set_end_time, loop, delay_s=120, cycle=False, loops=1):
"""
:param page: 设置定时层
:param now_time: 当前时间
:param set_start_time: 启动时间时长
:param set_end_time: 关闭时间时长
:param loop: 循环模式
:param delay_s: 定时设定与执行时间差
:param cycle: 是否是类鱼缸模式的连续定时模式
:param loops: 循环为永久循环时需要产出的时间对个数
:return:
"""
try:
self.wait_widget(self.page[page]["delay_timer_state"])
self.widget_click(self.page[page]["delay_timer"])
except TimeoutException:
pass
try:
self.wait_widget(self.page[page]["cycle_timer_state"])
except TimeoutException:
self.widget_click(self.page[page]["cycle_timer"],
self.page[page]["cycle_timer_button"])
self.widget_click(self.page[page]["cycle_timer_time"],
self.page["cycle_timer_page"]["title"])
self.widget_click(self.page["cycle_timer_page"]["open_time"],
self.page["cycle_timer_page"]["roll_h"])
start_time, start_set_time = self.set_timer_roll(self.page["cycle_timer_page"]["roll_h"],
self.page["cycle_timer_page"]["roll_m"],
self.page["cycle_timer_page"]["open_time"],
now_time, set_start_time, cycle, delay_s)
# 判断设置后的滚轮是否与设定一致
attr = self.ac.get_attribute(self.wait_widget(self.page["cycle_timer_page"]["open_time"]), "name")
start_h, start_m = re.findall(u"(\d+)小时", attr), re.findall(u"(\d+)分钟", attr)
if start_h:
attr = "%02d:%02d" % (int(start_h[0]), int(start_m[0]))
else:
attr = "00:%02d" % int(start_m[0])
self.debug.info("attr: %s; set start time: %s" % (attr, set_start_time[1]))
if attr != set_start_time[1]:
raise | |
"1980/81:148"): "metadataonly",
("prop", "1980/81:155"): "metadataonly",
("prop", "1980/81:157"): "metadataonly",
("prop", "1980/81:159"): "metadataonly",
("prop", "1980/81:160"): "metadataonly",
("prop", "1980/81:162"): "metadataonly",
("prop", "1980/81:161"): "metadataonly",
("prop", "1980/81:163"): "metadataonly",
("prop", "1980/81:158"): "metadataonly",
("prop", "1980/81:167"): "metadataonly",
("prop", "1980/81:168"): "metadataonly",
("prop", "1980/81:174"): "metadataonly",
("prop", "1980/81:171"): "metadataonly",
("prop", "1980/81:175"): "metadataonly",
("prop", "1980/81:165"): "metadataonly",
("prop", "1980/81:181"): "metadataonly",
("prop", "1980/81:182"): "metadataonly",
("prop", "1980/81:186"): "metadataonly",
("prop", "1980/81:191"): "metadataonly",
("prop", "1980/81:192"): "metadataonly",
("prop", "1980/81:185"): "metadataonly",
("prop", "1980/81:195"): "metadataonly",
("prop", "1980/81:199"): "metadataonly",
("prop", "1980/81:197"): "metadataonly",
("prop", "1980/81:198"): "metadataonly",
("prop", "1981/82:3"): "metadataonly",
("prop", "1981/82:7"): "metadataonly",
("prop", "1981/82:12"): "metadataonly",
("prop", "1981/82:16"): "metadataonly",
("prop", "1981/82:21"): "metadataonly",
("prop", "1981/82:26"): "metadataonly",
("prop", "1981/82:27"): "metadataonly",
("prop", "1981/82:32"): "metadataonly",
("prop", "1981/82:46"): "metadataonly",
("prop", "1981/82:49"): "metadataonly",
("prop", "1981/82:52"): "metadataonly",
("prop", "1981/82:53"): "metadataonly",
("prop", "1981/82:48"): "metadataonly",
("prop", "1981/82:66"): "metadataonly",
("prop", "1981/82:67"): "metadataonly",
("prop", "1981/82:68"): "metadataonly",
("prop", "1981/82:63"): "metadataonly",
("prop", "1981/82:69"): "metadataonly",
("prop", "1981/82:76"): "metadataonly",
("prop", "1981/82:82"): "metadataonly",
("prop", "1981/82:75"): "metadataonly",
("prop", "1981/82:95"): "metadataonly",
("prop", "1981/82:93"): "metadataonly",
("prop", "1981/82:104"): "metadataonly",
("prop", "1981/82:115"): "metadataonly",
("prop", "1981/82:117"): "metadataonly",
("prop", "1981/82:129"): "metadataonly",
("prop", "1981/82:137"): "metadataonly",
("prop", "1981/82:149"): "metadataonly",
("prop", "1981/82:147"): "metadataonly",
("prop", "1981/82:159"): "metadataonly",
("prop", "1981/82:157"): "metadataonly",
("prop", "1981/82:164"): "metadataonly",
("prop", "1981/82:174"): "metadataonly",
("prop", "1981/82:178"): "metadataonly",
("prop", "1981/82:179"): "metadataonly",
("prop", "1981/82:181"): "metadataonly",
("prop", "1981/82:183"): "metadataonly",
("prop", "1981/82:188"): "metadataonly",
("prop", "1981/82:193"): "metadataonly",
("prop", "1981/82:194"): "metadataonly",
("prop", "1981/82:195"): "metadataonly",
("prop", "1981/82:198"): "metadataonly",
("prop", "1981/82:205"): "metadataonly",
("prop", "1981/82:210"): "metadataonly",
("prop", "1981/82:211"): "metadataonly",
("prop", "1981/82:213"): "metadataonly",
("prop", "1981/82:215"): "metadataonly",
("prop", "1981/82:218"): "metadataonly",
("prop", "1981/82:226"): "metadataonly",
("prop", "1981/82:228"): "metadataonly",
("prop", "1981/82:227"): "metadataonly",
("prop", "1981/82:229"): "metadataonly",
("prop", "1982/83:5"): "metadataonly",
("prop", "1982/83:7"): "metadataonly",
("prop", "1982/83:6"): "metadataonly",
("prop", "1982/83:10"): "metadataonly",
("prop", "1982/83:12"): "metadataonly",
("prop", "1982/83:13"): "metadataonly",
("prop", "1982/83:15"): "metadataonly",
("prop", "1982/83:17"): "metadataonly",
("prop", "1982/83:18"): "metadataonly",
("prop", "1982/83:14"): "metadataonly",
("prop", "1982/83:21"): "metadataonly",
("prop", "1982/83:11"): "metadataonly",
("prop", "1982/83:9"): "metadataonly",
("prop", "1982/83:22"): "metadataonly",
("prop", "1982/83:29"): "metadataonly",
("prop", "1982/83:23"): "metadataonly",
("prop", "1982/83:30"): "metadataonly",
("prop", "1982/83:32"): "metadataonly",
("prop", "1982/83:31"): "metadataonly",
("prop", "1982/83:36"): "metadataonly",
("prop", "1982/83:37"): "metadataonly",
("prop", "1982/83:34"): "metadataonly",
("prop", "1982/83:45"): "metadataonly",
("prop", "1982/83:49"): "metadataonly",
("prop", "1982/83:48"): "metadataonly",
("prop", "1982/83:42"): "metadataonly",
("prop", "1982/83:57"): "metadataonly",
("prop", "1982/83:56"): "metadataonly",
("prop", "1982/83:58"): "metadataonly",
("prop", "1982/83:54"): "metadataonly",
("prop", "1982/83:62"): "metadataonly",
("prop", "1982/83:59"): "metadataonly",
("prop", "1982/83:28"): "metadataonly",
("prop", "1982/83:63"): "metadataonly",
("prop", "1982/83:65"): "metadataonly",
("prop", "1982/83:20"): "metadataonly",
("prop", "1982/83:70"): "metadataonly",
("prop", "1982/83:71"): "metadataonly",
("prop", "1982/83:77"): "metadataonly",
("prop", "1982/83:78"): "metadataonly",
("prop", "1982/83:79"): "metadataonly",
("prop", "1982/83:82"): "metadataonly",
("prop", "1982/83:86"): "metadataonly",
("prop", "1982/83:81"): "metadataonly",
("prop", "1982/83:92"): "metadataonly",
("prop", "1982/83:95"): "metadataonly",
("prop", "1982/83:96"): "metadataonly",
("prop", "1982/83:75"): "metadataonly",
("prop", "1982/83:102"): "metadataonly",
("prop", "1982/83:98"): "metadataonly",
("prop", "1982/83:97"): "metadataonly",
("prop", "1982/83:107"): "metadataonly",
("prop", "1982/83:108"): "metadataonly",
("prop", "1982/83:110"): "metadataonly",
("prop", "1982/83:112"): "metadataonly",
("prop", "1982/83:114"): "metadataonly",
("prop", "1982/83:116"): "metadataonly",
("prop", "1982/83:106"): "metadataonly",
("prop", "1982/83:121"): "metadataonly",
("prop", "1982/83:117"): "metadataonly",
("prop", "1982/83:122"): "metadataonly",
("prop", "1982/83:125"): "metadataonly",
("prop", "1982/83:128"): "metadataonly",
("prop", "1982/83:131"): "metadataonly",
("prop", "1982/83:129"): "metadataonly",
("prop", "1982/83:132"): "metadataonly",
("prop", "1982/83:137"): "metadataonly",
("prop", "1982/83:133"): "metadataonly",
("prop", "1982/83:140"): "metadataonly",
("prop", "1982/83:138"): "metadataonly",
("prop", "1982/83:143"): "metadataonly",
("prop", "1982/83:135"): "metadataonly",
("prop", "1982/83:148"): "metadataonly",
("prop", "1982/83:146"): "metadataonly",
("prop", "1982/83:149"): "metadataonly",
("prop", "1982/83:142"): "metadataonly",
("prop", "1982/83:152"): "metadataonly",
("prop", "1982/83:154"): "metadataonly",
("prop", "1982/83:156"): "metadataonly",
("prop", "1982/83:151"): "metadataonly",
("prop", "1982/83:157"): "metadataonly",
("prop", "1982/83:160"): "metadataonly",
("prop", "1982/83:164"): "metadataonly",
("prop", "1982/83:161"): "metadataonly",
("prop", "1982/83:166"): "metadataonly",
("prop", "1982/83:163"): "metadataonly",
("prop", "1982/83:167"): "metadataonly",
("prop", "1982/83:170"): "metadataonly",
("prop", "1982/83:169"): "metadataonly",
("prop", "1982/83:171"): "metadataonly",
("prop", "1982/83:175"): "metadataonly",
("prop", "1982/83:176"): "metadataonly",
("prop", "1982/83:103"): "metadataonly",
("prop", "1983/84:3"): "metadataonly",
("prop", "1983/84:4"): "metadataonly",
("prop", "1983/84:2"): "metadataonly",
("prop", "1983/84:7"): "metadataonly",
("prop", "1983/84:12"): "metadataonly",
("prop", "1983/84:15"): "metadataonly",
("prop", "1983/84:24"): "metadataonly",
("prop", "1983/84:31"): "metadataonly",
("prop", "1983/84:32"): "metadataonly",
("prop", "1983/84:35"): "metadataonly",
("prop", "1983/84:37"): "metadataonly",
("prop", "1983/84:42"): "metadataonly",
("prop", "1983/84:39"): "metadataonly",
("prop", "1983/84:44"): "metadataonly",
("prop", "1983/84:22"): "metadataonly",
("prop", "1983/84:49"): "metadataonly",
("prop", "1983/84:45"): "metadataonly",
("prop", "1983/84:48"): "metadataonly",
("prop", "1983/84:46"): "metadataonly",
("prop", "1983/84:53"): "metadataonly",
("prop", "1983/84:52"): "metadataonly",
("prop", "1983/84:54"): "metadataonly",
("prop", "1983/84:69"): "metadataonly",
("prop", "1983/84:74"): "metadataonly",
("prop", "1983/84:76"): "metadataonly",
("prop", "1983/84:77"): "metadataonly",
("prop", "1983/84:67"): "metadataonly",
("prop", "1983/84:82"): "metadataonly",
("prop", "1983/84:83"): "metadataonly",
("prop", "1983/84:81"): "metadataonly",
("prop", "1983/84:86"): "metadataonly",
("prop", "1983/84:88"): "metadataonly",
("prop", "1983/84:91"): "metadataonly",
("prop", "1983/84:73"): "metadataonly",
("prop", "1983/84:101"): "metadataonly",
("prop", "1983/84:102"): "metadataonly",
("prop", "1983/84:99"): "metadataonly",
("prop", "1983/84:85"): "metadataonly",
("prop", "1983/84:94"): "metadataonly",
("prop", "1983/84:109"): "metadataonly",
("prop", "1983/84:113"): "metadataonly",
("prop", "1983/84:114"): "metadataonly",
("prop", "1983/84:112"): "metadataonly",
("prop", "1983/84:121"): "metadataonly",
("prop", "1983/84:122"): "metadataonly",
("prop", "1983/84:130"): "metadataonly",
("prop", "1983/84:132"): "metadataonly",
("prop", "1983/84:131"): "metadataonly",
("prop", "1983/84:139"): "metadataonly",
("prop", "1983/84:140"): "metadataonly",
("prop", "1983/84:143"): "metadataonly",
("prop", "1983/84:147"): "metadataonly",
("prop", "1983/84:146"): "metadataonly",
("prop", "1983/84:151"): "metadataonly",
("prop", "1983/84:153"): "metadataonly",
("prop", "1983/84:154"): "metadataonly",
("prop", "1983/84:156"): "metadataonly",
("prop", "1983/84:161"): "metadataonly",
("prop", "1983/84:163"): "metadataonly",
("prop", "1983/84:162"): "default",
("prop", "1983/84:166"): "metadataonly",
("prop", "1983/84:165"): "default",
("prop", "1983/84:164"): "metadataonly",
("prop", "1983/84:171"): "metadataonly",
("prop", "1983/84:173"): "metadataonly",
("prop", "1983/84:175"): "metadataonly",
("prop", "1983/84:177"): "metadataonly",
("prop", "1983/84:172"): "metadataonly",
("prop", "1983/84:181"): "metadataonly",
("prop", "1983/84:168"): "metadataonly",
("prop", "1983/84:180"): "metadataonly",
("prop", "1983/84:182"): "metadataonly",
("prop", "1983/84:186"): "metadataonly",
("prop", "1983/84:185"): "metadataonly",
("prop", "1983/84:188"): "metadataonly",
("prop", "1983/84:193"): "metadataonly",
("prop", "1983/84:195"): "metadataonly",
("prop", "1983/84:198"): "metadataonly",
("prop", "1983/84:191"): "metadataonly",
("prop", "1983/84:200"): "metadataonly",
("prop", "1984/85:8"): "metadataonly",
("prop", "1984/85:9"): "metadataonly",
("prop", "1984/85:17"): "metadataonly",
("prop", "1984/85:27"): "metadataonly",
("prop", "1984/85:52"): "metadataonly",
("prop", "1984/85:64"): "metadataonly",
("prop", "1984/85:63"): "metadataonly",
("prop", "1984/85:112"): "metadataonly",
("prop", "1984/85:117"): "default",
("prop", "1984/85:130"): "metadataonly",
("prop", "1984/85:136"): "metadataonly",
("prop", "1984/85:138"): "metadataonly",
("prop", "1984/85:144"): "metadataonly",
("prop", "1984/85:146"): "metadataonly",
("prop", "1984/85:153"): "metadataonly",
("prop", "1984/85:167"): "metadataonly",
("prop", "1984/85:186"): "metadataonly",
("prop", "1984/85:188"): "metadataonly",
("prop", "1984/85:207"): "metadataonly",
("prop", "1984/85:221"): "metadataonly",
("prop", "1985/86:32"): "metadataonly",
("prop", "1985/86:40"): "metadataonly",
("prop", "1985/86:50"): "metadataonly",
("prop", "1985/86:66"): "metadataonly",
("prop", "1985/86:72"): "metadataonly",
("prop", "1985/86:76"): "metadataonly",
("prop", "1985/86:20"): "metadataonly",
("prop", "1985/86:81"): "metadataonly",
("prop", "1985/86:75"): "metadataonly",
("prop", "1985/86:103"): "metadataonly",
("prop", "1985/86:134"): "metadataonly",
("prop", "1985/86:149"): "metadataonly",
("prop", "1985/86:157"): "metadataonly",
("prop", "1985/86:165"): "metadataonly",
("prop", "1985/86:164"): "metadataonly",
("prop", "1985/86:176"): "metadataonly",
("prop", "1985/86:153"): "metadataonly",
("prop", "1985/86:166"): "metadataonly",
("prop", "1986/87:4"): "metadataonly",
("prop", "1986/87:9"): "metadataonly",
("prop", "1986/87:19"): "metadataonly",
("prop", "1986/87:13"): "metadataonly",
("prop", "1986/87:28"): "metadataonly",
("prop", "1986/87:29"): "metadataonly",
("prop", "1986/87:32"): "metadataonly",
("prop", "1986/87:27"): "metadataonly",
("prop", "1986/87:38"): "metadataonly",
("prop", "1986/87:45"): "metadataonly",
("prop", "1986/87:41"): "metadataonly",
("prop", "1986/87:51"): "metadataonly",
("prop", "1986/87:57"): "metadataonly",
("prop", "1986/87:59"): "metadataonly",
("prop", "1986/87:63"): "metadataonly",
("prop", "1986/87:66"): "metadataonly",
("prop", "1986/87:68"): "metadataonly",
("prop", "1986/87:70"): "metadataonly",
("prop", "1986/87:73"): "metadataonly",
("prop", "1986/87:75"): "metadataonly",
("prop", "1986/87:76"): "metadataonly",
("prop", "1986/87:83"): "metadataonly",
("prop", "1986/87:79"): "metadataonly",
("prop", "1986/87:92"): "metadataonly",
("prop", "1986/87:101"): "metadataonly",
("prop", "1986/87:104"): "metadataonly",
("prop", "1986/87:105"): "metadataonly",
("prop", "1986/87:108"): "metadataonly",
("prop", "1986/87:111"): "metadataonly",
("prop", "1986/87:117"): "metadataonly",
("prop", "1986/87:120"): "metadataonly",
("prop", "1986/87:118"): "metadataonly",
("prop", "1986/87:102"): "metadataonly",
("prop", "1986/87:126"): "metadataonly",
("prop", "1986/87:125"): "metadataonly",
("prop", "1986/87:130"): "metadataonly",
("prop", "1986/87:132"): "metadataonly",
("prop", "1986/87:133"): "metadataonly",
("prop", "1986/87:138"): "metadataonly",
("prop", "1986/87:139"): "metadataonly",
("prop", "1986/87:140"): "metadataonly",
("prop", "1986/87:142"): "metadataonly",
("prop", "1986/87:141"): "metadataonly",
("prop", "1986/87:144"): "metadataonly",
("prop", "1986/87:145"): "metadataonly",
("prop", "1986/87:152"): "metadataonly",
("prop", "1986/87:155"): "metadataonly",
("prop", "1986/87:156"): "metadataonly",
("prop", "1986/87:164"): "metadataonly",
("prop", "1986/87:165"): "metadataonly",
("prop", "1987/88:2"): "metadataonly",
("prop", "1987/88:7"): "metadataonly",
("prop", "1987/88:8"): "metadataonly",
("prop", "1987/88:9"): "metadataonly",
("prop", "1987/88:12"): "metadataonly",
("prop", "1987/88:18"): "metadataonly",
("prop", "1987/88:28"): "metadataonly",
("prop", "1987/88:33"): "metadataonly",
("prop", "1987/88:34"): "metadataonly",
("prop", "1987/88:32"): "metadataonly",
("prop", "1987/88:36"): "metadataonly",
("prop", "1987/88:39"): "metadataonly",
("prop", "1987/88:44"): "metadataonly",
("prop", "1987/88:35"): "metadataonly",
("prop", "1987/88:48"): "metadataonly",
("prop", "1987/88:51"): "metadataonly",
("prop", "1987/88:47"): "metadataonly",
("prop", "1987/88:59"): "metadataonly",
("prop", "1987/88:67"): "metadataonly",
("prop", "1987/88:70"): "metadataonly",
("prop", "1987/88:72"): "metadataonly",
("prop", "1987/88:76"): "metadataonly",
("prop", "1987/88:75"): "metadataonly",
("prop", "1987/88:79"): "metadataonly",
("prop", "1987/88:83"): "metadataonly",
("prop", "1987/88:89"): "metadataonly",
("prop", "1987/88:91"): "metadataonly",
("prop", "1987/88:94"): "metadataonly",
("prop", "1987/88:97"): "metadataonly",
("prop", "1987/88:106"): "metadataonly",
("prop", "1987/88:112"): "metadataonly",
("prop", "1987/88:111"): "metadataonly",
("prop", "1987/88:117"): "metadataonly",
("prop", "1987/88:119"): "metadataonly",
("prop", "1987/88:114"): "metadataonly",
("prop", "1987/88:127"): "metadataonly",
("prop", "1987/88:126"): "metadataonly",
("prop", "1987/88:131"): "metadataonly",
("prop", "1987/88:134"): "metadataonly",
("prop", "1987/88:136"): "metadataonly",
("prop", "1987/88:142"): "metadataonly",
("prop", "1987/88:141"): "metadataonly",
("prop", "1987/88:151"): "metadataonly",
("prop", "1987/88:152"): "metadataonly",
("prop", "1987/88:156"): "metadataonly",
("prop", "1987/88:163"): "metadataonly",
("prop", "1987/88:166"): "metadataonly",
("prop", "1987/88:168"): "metadataonly",
("prop", "1987/88:169"): "metadataonly",
("prop", "1987/88:173"): "metadataonly",
("prop", "1987/88:170"): "metadataonly",
("prop", "1987/88:178"): "metadataonly",
("prop", "1987/88:175"): "metadataonly",
("prop", "1989/90:8"): "metadataonly",
("prop", "1989/90:6"): "metadataonly",
("prop", "1989/90:19"): "metadataonly",
("prop", "1989/90:36"): "metadataonly",
("prop", "1989/90:48"): "metadataonly",
("prop", "1989/90:52"): "metadataonly",
("prop", "1989/90:57"): "metadataonly",
("prop", "1989/90:65"): "metadataonly",
("prop", "1989/90:83"): "metadataonly",
("prop", "1989/90:94"): "metadataonly",
("prop", "1989/90:96"): "metadataonly",
("prop", "1989/90:106"): "metadataonly",
("prop", "1989/90:108"): "metadataonly",
("prop", "1989/90:109"): "metadataonly",
("prop", "1989/90:113"): "metadataonly",
("prop", "1989/90:115"): "metadataonly",
("prop", "1989/90:114"): "metadataonly",
("prop", "1989/90:122"): "metadataonly",
("prop", "1989/90:126"): "metadataonly",
("prop", "1989/90:127"): "metadataonly",
("prop", "1989/90:128"): "metadataonly",
("prop", "1989/90:132"): "metadataonly",
("prop", "1989/90:133"): "metadataonly",
("prop", "1989/90:134"): "metadataonly",
("prop", "1989/90:140"): "metadataonly",
("prop", "1989/90:142"): "metadataonly",
("prop", "1989/90:141"): "metadataonly",
("prop", "1989/90:145"): "metadataonly",
("prop", "1989/90:147"): "metadataonly",
("prop", "1989/90:148"): "metadataonly",
("prop", "1989/90:149"): "metadataonly",
("prop", "1989/90:152"): "metadataonly",
("prop", "1989/90:159"): "metadataonly",
("prop", "1989/90:155"): "metadataonly",
("prop", "1990/91:13"): "metadataonly",
| |
"""
Code associated with the left-hand tree view for tests
"""
import gtk, gobject, pango, guiutils, plugins, logging
from ordereddict import OrderedDict
class TestColumnGUI(guiutils.SubGUI):
def __init__(self, dynamic, testCount):
guiutils.SubGUI.__init__(self)
self.addedCount = 0
self.totalNofTests = testCount
self.totalNofDistinctTests = testCount
self.nofSelectedTests = 0
self.nofDistinctSelectedTests = 0
self.totalNofTestsShown = 0
self.versionString = ""
self.column = None
self.dynamic = dynamic
self.testSuiteSelection = False
self.diag = logging.getLogger("Test Column GUI")
self.allSuites = []
def addSuites(self, suites):
self.allSuites = suites
def createView(self):
testRenderer = gtk.CellRendererText()
self.column = gtk.TreeViewColumn(self.getTitle(), testRenderer, text=0, background=1, foreground=7)
self.column.set_data("name", "Test Name") # Not a widget, so we can't set a name, do this instead
self.column.set_resizable(True)
self.column.set_cell_data_func(testRenderer, self.renderSuitesBold)
if not self.dynamic:
self.column.set_clickable(True)
self.column.connect("clicked", self.columnClicked)
if guiutils.guiConfig.getValue("auto_sort_test_suites") == 1:
self.column.set_sort_indicator(True)
self.column.set_sort_order(gtk.SORT_ASCENDING)
elif guiutils.guiConfig.getValue("auto_sort_test_suites") == -1:
self.column.set_sort_indicator(True)
self.column.set_sort_order(gtk.SORT_DESCENDING)
return self.column
def renderSuitesBold(self, dummy, cell, model, iter):
if model.get_value(iter, 2)[0].classId() == "test-case":
cell.set_property('font', "")
else:
cell.set_property('font', "bold")
def columnClicked(self, *args):
if not self.column.get_sort_indicator():
self.column.set_sort_indicator(True)
self.column.set_sort_order(gtk.SORT_ASCENDING)
order = 1
else:
order = self.column.get_sort_order()
if order == gtk.SORT_ASCENDING:
self.column.set_sort_order(gtk.SORT_DESCENDING)
order = -1
else:
self.column.set_sort_indicator(False)
order = 0
self.notify("ActionStart")
self.setSortingOrder(order)
if order == 1:
self.notify("Status", "Tests sorted in alphabetical order.")
elif order == -1:
self.notify("Status", "Tests sorted in descending alphabetical order.")
else:
self.notify("Status", "Tests sorted according to testsuite file.")
self.notify("RefreshTestSelection")
self.notify("ActionStop")
def setSortingOrder(self, order, suite = None):
if not suite:
for suite in self.allSuites:
self.setSortingOrder(order, suite)
else:
self.notify("Status", "Sorting suite " + suite.name + " ...")
self.notify("ActionProgress")
suite.autoSortOrder = order
suite.updateOrder()
for test in suite.testcases:
if test.classId() == "test-suite":
self.setSortingOrder(order, test)
def getTitle(self):
title = "Tests: "
if self.versionString and len(self.versionString) > 40:
reducedVersionString = self.versionString[:40] + "..."
else:
reducedVersionString = self.versionString
if self.testSuiteSelection:
# We don't care about totals with test suites
title += plugins.pluralise(self.nofSelectedTests, "suite") + " selected"
if self.versionString:
title += ", " + reducedVersionString
elif self.nofDistinctSelectedTests != self.nofSelectedTests:
title += ", " + str(self.nofDistinctSelectedTests) + " distinct"
return title
if self.nofSelectedTests == self.totalNofTests:
title += "All " + str(self.totalNofTests) + " selected"
else:
title += str(self.nofSelectedTests) + "/" + str(self.totalNofTests) + " selected"
if not self.dynamic:
if self.versionString:
title += ", " + reducedVersionString
elif self.totalNofDistinctTests != self.totalNofTests:
if self.nofDistinctSelectedTests == self.totalNofDistinctTests:
title += ", all " + str(self.totalNofDistinctTests) + " distinct"
else:
title += ", " + str(self.nofDistinctSelectedTests) + "/" + str(self.totalNofDistinctTests) + " distinct"
if self.totalNofTestsShown == self.totalNofTests:
if self.dynamic and self.totalNofTests > 0:
title += ", none hidden"
elif self.totalNofTestsShown == 0:
title += ", all hidden"
else:
title += ", " + str(self.totalNofTests - self.totalNofTestsShown) + " hidden"
return title
def updateTitle(self, initial=False):
if self.column:
self.column.set_title(self.getTitle())
def notifyTestTreeCounters(self, totalDelta, totalShownDelta, totalRowsDelta, initial=False):
self.addedCount += totalDelta
if not initial or self.totalNofTests < self.addedCount:
self.totalNofTests += totalDelta
self.totalNofDistinctTests += totalRowsDelta
self.totalNofTestsShown += totalShownDelta
self.updateTitle(initial)
def notifyAllRead(self):
if self.addedCount != self.totalNofTests:
self.totalNofTests = self.addedCount
self.updateTitle()
def countTests(self, tests):
if self.dynamic:
return len(tests), False
testCount, suiteCount = 0, 0
for test in tests:
if test.classId() == "test-case":
testCount += 1
else:
suiteCount += 1
if suiteCount and not testCount:
return suiteCount, True
else:
return testCount, False
def getVersionString(self, tests, distinctTestCount):
if not self.dynamic and distinctTestCount == 1 and self.totalNofTests != self.totalNofDistinctTests:
versions = [ test.app.getFullVersion().replace("_", "__") or "<default>" for test in tests ]
return "version" + ("s" if len(versions) > 1 else "") + " " + ",".join(versions)
else:
return ""
def notifyNewTestSelection(self, tests, dummyApps, distinctTestCount, *args, **kw):
self.updateTestInfo(tests, distinctTestCount)
def updateTestInfo(self, tests, distinctTestCount):
newCount, suitesOnly = self.countTests(tests)
if distinctTestCount > newCount:
distinctTestCount = newCount
newVersionStr = self.getVersionString(tests, distinctTestCount)
if self.nofSelectedTests != newCount or newVersionStr != self.versionString or \
self.nofDistinctSelectedTests != distinctTestCount or suitesOnly != self.testSuiteSelection:
self.diag.info("New selection count = " + repr(newCount) + ", distinct = " + str(distinctTestCount) + ", test suites only = " + repr(suitesOnly))
self.nofSelectedTests = newCount
self.nofDistinctSelectedTests = distinctTestCount
self.testSuiteSelection = suitesOnly
self.versionString = newVersionStr
self.updateTitle()
def notifyVisibility(self, tests, newValue):
testCount = sum((int(test.classId() == "test-case") for test in tests))
if newValue:
self.totalNofTestsShown += testCount
else:
self.totalNofTestsShown -= testCount
self.updateTitle()
class TestIteratorMap:
def __init__(self, dynamic, allApps):
self.dict = OrderedDict()
self.dynamic = dynamic
self.parentApps = {}
for app in allApps:
for extra in [ app ] + app.extras:
self.parentApps[extra] = app
def getKey(self, test):
if self.dynamic:
return test
elif test is not None:
return self.parentApps.get(test.app, test.app), test.getRelPath()
def store(self, test, iter):
self.dict[self.getKey(test)] = iter
def updateIterator(self, test, oldRelPath):
# relative path of test has changed
key = self.parentApps.get(test.app, test.app), oldRelPath
iter = self.dict.get(key)
if iter is not None:
self.store(test, iter)
del self.dict[key]
return iter
else:
return self.getIterator(test)
def getIterator(self, test):
return self.dict.get(self.getKey(test))
def remove(self, test):
key = self.getKey(test)
if self.dict.has_key(key):
del self.dict[key]
class TestTreeGUI(guiutils.ContainerGUI):
def __init__(self, dynamic, allApps, popupGUI, subGUI):
guiutils.ContainerGUI.__init__(self, [ subGUI ])
self.model = gtk.TreeStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,\
gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_BOOLEAN, \
gobject.TYPE_STRING, gobject.TYPE_STRING)
self.popupGUI = popupGUI
self.itermap = TestIteratorMap(dynamic, allApps)
self.selection = None
self.selecting = False
self.selectedTests = []
self.clipboardTests = set()
self.dynamic = dynamic
self.collapseStatic = self.getCollapseStatic()
self.successPerSuite = {} # map from suite to tests succeeded
self.collapsedRows = {}
self.filteredModel = None
self.treeView = None
self.newTestsVisible = guiutils.guiConfig.showCategoryByDefault("not_started")
self.diag = logging.getLogger("Test Tree")
self.longActionRunning = False
self.recreateOnActionStop = False
def notifyDefaultVisibility(self, newValue):
self.newTestsVisible = newValue
def isExpanded(self, iter):
parentIter = self.filteredModel.iter_parent(iter)
return not parentIter or self.treeView.row_expanded(self.filteredModel.get_path(parentIter))
def getCollapseStatic(self):
if self.dynamic:
return False
else:
return guiutils.guiConfig.getValue("static_collapse_suites")
def notifyAllRead(self, *args):
if self.dynamic:
self.filteredModel.connect('row-inserted', self.rowInserted)
else:
self.newTestsVisible = True
self.model.foreach(self.makeRowVisible)
if self.collapseStatic:
self.expandLevel(self.treeView, self.filteredModel.get_iter_root())
else:
self.treeView.expand_all()
self.notify("AllRead")
def makeRowVisible(self, model, dummyPath, iter):
model.set_value(iter, 5, True)
def getNodeName(self, suite, parent):
nodeName = suite.name
if parent == None:
appName = suite.app.name + suite.app.versionSuffix()
if appName != nodeName:
nodeName += " (" + appName + ")"
return nodeName
def addSuiteWithParent(self, suite, parent, follower=None):
nodeName = self.getNodeName(suite, parent)
self.diag.info("Adding node with name " + nodeName)
colour = guiutils.guiConfig.getTestColour("not_started")
row = [ nodeName, colour, [ suite ], "", colour, self.newTestsVisible, "", "black" ]
iter = self.model.insert_before(parent, follower, row)
storeIter = iter.copy()
self.itermap.store(suite, storeIter)
path = self.model.get_path(iter)
if self.newTestsVisible and parent is not None:
filterPath = self.filteredModel.convert_child_path_to_path(path)
self.treeView.expand_to_path(filterPath)
return iter
def createView(self):
self.filteredModel = self.model.filter_new()
self.filteredModel.set_visible_column(5)
self.treeView = gtk.TreeView(self.filteredModel)
self.treeView.set_search_column(0)
self.treeView.set_name("Test Tree")
self.treeView.expand_all()
self.selection = self.treeView.get_selection()
self.selection.set_mode(gtk.SELECTION_MULTIPLE)
if self.dynamic:
self.selection.set_select_function(self.canSelect)
testsColumn = self.subguis[0].createView()
self.treeView.append_column(testsColumn)
if self.dynamic:
detailsRenderer = gtk.CellRendererText()
detailsRenderer.set_property('wrap-width', 350)
detailsRenderer.set_property('wrap-mode', pango.WRAP_WORD_CHAR)
recalcRenderer = gtk.CellRendererPixbuf()
detailsColumn = gtk.TreeViewColumn("Details")
detailsColumn.pack_start(detailsRenderer, expand=True)
detailsColumn.pack_start(recalcRenderer, expand=False)
detailsColumn.add_attribute(detailsRenderer, 'text', 3)
detailsColumn.add_attribute(detailsRenderer, 'background', 4)
detailsColumn.add_attribute(recalcRenderer, 'stock_id', 6)
detailsColumn.set_resizable(True)
guiutils.addRefreshTips(self.treeView, "test", recalcRenderer, detailsColumn, 6)
self.treeView.append_column(detailsColumn)
self.treeView.connect('row-expanded', self.rowExpanded)
self.expandLevel(self.treeView, self.filteredModel.get_iter_root())
self.treeView.connect("button_press_event", self.popupGUI.showMenu)
self.selection.connect("changed", self.userChangedSelection)
self.treeView.show()
self.popupGUI.createView()
return self.addScrollBars(self.treeView, hpolicy=gtk.POLICY_NEVER)
def notifyTopWindow(self, *args):
# avoid the quit button getting initial focus, give it to the tree view (why not?)
self.treeView.grab_focus()
def canSelect(self, path):
pathIter = self.filteredModel.get_iter(path)
test = self.filteredModel.get_value(pathIter, 2)[0]
return test.classId() == "test-case"
def rowExpanded(self, treeview, iter, path):
if self.dynamic:
realPath = self.filteredModel.convert_path_to_child_path(path)
if self.collapsedRows.has_key(realPath):
del self.collapsedRows[realPath]
self.expandLevel(treeview, self.filteredModel.iter_children(iter), not self.collapseStatic)
def rowInserted(self, model, dummy, iter):
self.expandRow(model.iter_parent(iter), False)
def expandRow(self, iter, recurse):
if iter == None:
return
path = self.filteredModel.get_path(iter)
realPath = self.filteredModel.convert_path_to_child_path(path)
# Iterate over children, call self if they have children
if not self.collapsedRows.has_key(realPath):
self.diag.info("Expanding path at " + repr(realPath))
self.treeView.expand_row(path, open_all=False)
if recurse:
childIter = self.filteredModel.iter_children(iter)
while (childIter != None):
if self.filteredModel.iter_has_child(childIter):
self.expandRow(childIter, True)
childIter = self.filteredModel.iter_next(childIter)
def collapseRow(self, iter):
# To make sure that the path is marked as 'collapsed' even if the row cannot be collapsed
# (if the suite is empty, or not shown at all), we set self.collapsedRow manually, instead of
# waiting for rowCollapsed() to do it at the 'row-collapsed' signal (which will not be emitted
# in the above cases)
path = self.model.get_path(iter)
self.diag.info("Collapsed path " + repr(path))
self.collapsedRows[path] = 1
# Collapsing rows can cause indirect changes of selection, make sure we indicate this.
self.selecting = True
filterPath = self.filteredModel.convert_child_path_to_path(path)
if filterPath is not None: # don't collapse if it's already hidden
self.selection.get_tree_view().collapse_row(filterPath)
self.selecting = False
self.selectionChanged(direct=False)
def userChangedSelection(self, *args):
if not self.selecting and not hasattr(self.selection, "unseen_changes"):
self.selectionChanged(direct=True)
def selectionChanged(self, direct):
newSelection = self.getSelected()
if newSelection != self.selectedTests:
| |
"""
common_sim_browndye2
Base objects and routines for preparing and running Browndye2
simulations.
"""
import xml.etree.ElementTree as ET
from xml.dom import minidom
import os
import glob
import re
import parmed
import numpy as np
BROWNDYE_TRAJ_PREFIX = "traj"
BROWNDYE_INPUT_FILENAME = "input.xml"
BROWNDYE_APBS_INPUT_FILENAME = "apbs_input.xml"
BROWNDYE_RECEPTOR = "receptor"
BROWNDYE_LIGAND = "ligand"
class Ion():
"""
Represent an Ion object in Browndye2 for generating APBS grids
and Debye Length.
Attributes:
-----------
radius : float
The radius of the ion in units of Angstroms.
charge : float
The charge of the ion in units of
conc : float
The concentration of the ion in solution in units of moles per
liter.
"""
def __init__(self):
self.radius = -1.0
self.charge = -1.0
self.conc = -1.0
return
def serialize(self, xmlIon):
"""
Serialize this object to XML
Parameters:
-----------
xmlIon : ElementTree.SubElement
All sub elements get added to this root.
"""
assert self.radius >= 0.0, "Ion radius must be set"
xmlIonRadius = ET.SubElement(xmlIon, 'radius')
xmlIonRadius.text = str(self.radius)
xmlIonCharge = ET.SubElement(xmlIon, 'charge')
xmlIonCharge.text = str(self.charge)
assert self.conc >= 0.0, "Ion concentration must be set"
xmlIonConc = ET.SubElement(xmlIon, 'conc')
xmlIonConc.text = str(self.conc)
return
class Solvent():
"""
Parameters to represent the solvent within the BD simulation.
Attributes:
-----------
debye_length : float
The Debye length is a distance inversely related to the
strength and concentration of ions in solution.
dielectric : float, Default 78.0
The dielectric of solvent, relative to vacuum permittivity.
relative_viscosity : float, Default 1.0
Relative to water viscosity.
kT : float
Thermal energy relative to Boltzmann's constant times 298 K.
desolvation_parameter : float, Default 1.0
Factor that multiplies desolvation energy.
ions : list
A list of Ion() objects for APBS input
"""
def __init__(self):
self.debye_length = -1.0
self.dielectric = 78.0
self.relative_viscosity = 1.0
self.kT = -1.0
self.desolvation_parameter = 1.0
self.ions = []
return
def serialize(self, xmlSolvent, make_apbs_mode=True):
"""
Serialize this object to XML
Parameters:
-----------
xmlSolvent : ElementTree.SubElement
All sub elements get added to this root.
make_apbs_mode : bool
Whether this object should be serialized for the
make_apbs_inputs program.
"""
if not make_apbs_mode:
assert self.debye_length > 0.0, "Solvent Debye length must be assigned."
xmlSolventDebye = ET.SubElement(xmlSolvent, 'debye_length')
xmlSolventDebye.text = str(self.debye_length)
assert self.dielectric > 0.0
xmlSolventDielectric = ET.SubElement(xmlSolvent, 'dielectric')
xmlSolventDielectric.text = str(self.dielectric)
assert self.relative_viscosity > 0.0
xmlSolventRelVisc = ET.SubElement(xmlSolvent, 'relative_viscosity')
xmlSolventRelVisc.text = str(self.relative_viscosity)
assert self.kT > 0.0
xmlSolventKT = ET.SubElement(xmlSolvent, 'kT')
xmlSolventKT.text = str(self.kT)
assert self.desolvation_parameter > 0.0
xmlSolventDesolv = ET.SubElement(xmlSolvent, 'desolvation_parameter')
xmlSolventDesolv.text = str(self.desolvation_parameter)
if make_apbs_mode:
xmlSolventIons = ET.SubElement(xmlSolvent, 'ions')
for ion in self.ions:
xmlIon = ET.SubElement(xmlSolventIons, 'ion')
xmlIon.text = ion.serialize(xmlIon)
return
class Time_step_tolerances():
"""
Parameters to represent the limitations on the time step sizes
within the BD simulation.
Attributes:
-----------
force : float or None, Default None
Dimensionless parameter that governs the splitting of the
adaptive time step in response to changes in force and torque.
reaction : float or None, Default None
Dimensionless parameter that governs the splitting of the
adaptive time step in response to changes in the reaction
coordinate near a reaction.
minimum_core_dt : float, default 0.0
For a system with only rigid cores and frozen chains, this is
the smallest possible time step size in picoseconds.
minimum_chain_dt : float or None, default None
For a system with unfrozen chains, this is the smallest
possible time step size in picoseconds.
minimum_core_reaction_dt : float, default 0.0
If a reaction coordinate is small, then the minimum step size
given by minimum_core_dt can be overridden by the value given
here.
minimum_chain_reaction_dt : float or None, default None
This has the same relation to minimum_chain_dt.
"""
def __init__(self):
self.force = None
self.reaction = None
self.minimum_core_dt = 0.0
self.minimum_chain_dt = None
self.minimum_core_reaction_dt = 0.0
self.minimum_chain_reaction_dt = None
return
def serialize(self, xmlTime):
"""
Serialize this object to XML
Parameters:
-----------
xmlTime : ElementTree.SubElement
All sub elements get added to this root.
"""
if self.force is not None:
assert self.force > 0.0
xmlTimeForce = ET.SubElement(xmlTime, "force")
xmlTimeForce.text = str(self.force)
if self.reaction is not None:
assert self.reaction > 0.0
xmlTimeReaction = ET.SubElement(xmlTime, "reaction")
xmlTimeReaction.text = str(self.reaction)
assert self.minimum_core_dt >= 0.0
xmlTimeMinCoreDt = ET.SubElement(xmlTime, "minimum_core_dt")
xmlTimeMinCoreDt.text = str(self.minimum_core_dt)
if self.minimum_chain_dt is not None:
assert self.minimum_chain_dt >= 0.0
xmlTimeMinChainDt = ET.SubElement(xmlTime, "minimum_chain_dt")
xmlTimeMinChainDt.text = str(self.minimum_chain_dt)
assert self.minimum_core_reaction_dt >= 0.0
xmlTimeMinCoreRxnDt = ET.SubElement(xmlTime, "minimum_core_reaction_dt")
xmlTimeMinCoreRxnDt.text = str(self.minimum_core_reaction_dt)
if self.minimum_chain_reaction_dt is not None:
assert self.minimum_chain_reaction_dt >= 0.0
xmlTimeMinChainRxnDt = ET.SubElement(
xmlTime, "minimum_chain_reaction_dt")
xmlTimeMinChainRxnDt.text = str(self.minimum_chain_reaction_dt)
return
class Electric_field():
"""
In Browndye2, Electric_field tags define the various electric
electric forces applied to a molecule
Attributes:
-----------
grid_list : list, Default []
A list of DX file names output from APBS.
multipole_field : str or None, Default None
A file of multipole extension outside grids.
"""
def __init__(self):
self.grid_list = []
self.multipole_field = None
return
def serialize(self, xmlE):
"""
Serialize this object to XML
Parameters:
-----------
xmlE : ElementTree.SubElement
All sub elements get added to this root.
"""
for grid in self.grid_list:
xmlGrid_i = ET.SubElement(xmlE, 'grid')
xmlGrid_i.text = grid
if self.multipole_field is not None:
assert self.multipole_field
xmlEMultipole = ET.SubElement(xmlE, "multipole_field")
xmlEMultipole.text = self.multipole_field
return
class Link():
"""
In Browndye2, chains are connected to cores - defining the Link.
Attributes:
-----------
core_name : str
The name of the Core to link.
core_residue : int
The residue index within the Core to link to.
chain_residue : int
The residue index within the chain to link to.
"""
def __init__(self):
self.core_name = ""
self.core_residue = -1
self.chain_residue = -1
return
def serialize(self, xmlLink):
"""
Serialize this object to XML
Parameters:
-----------
xmlLink : ElementTree.SubElement
All sub elements get added to this root.
"""
assert self.core_name, "A name for the core must be provided"
xmlLinkCoreName = ET.SubElement(xmlLink, "core_name")
xmlLinkCoreName.text = self.core_name
assert self.core_residue >= 0
xmlLinkCoreResid = ET.SubElement(xmlLink, "core_residue")
xmlLinkCoreResid.text = str(self.core_residue)
assert self.chain_residue >= 0
xmlLinkChainResid = ET.SubElement(xmlLink, "chain_residue")
xmlLinkChainResid.text = str(self.chain_residue)
return
class Chain():
"""
In Browndye2, chains are flexible assemblies of spheres that can
interact with each other and the cores with user-specified force
fields.
Attributes:
-----------
name : str
The name of this Chain.
atoms : str
The filename of the atoms in this chain. When using the
COFFDROP model, the atoms files for cores and chains
must contain the coarse-grained "beads".
link_list : list
A list of Links, or connections between the chain and core. The
name of the core, the number of the linking residue, and the
number of the chain's linking residue are specified.
"""
def __init__(self):
self.name = ""
self.atoms = ""
self.link_list = []
return
def serialize(self, xmlChain):
"""
Serialize this object to XML
Parameters:
-----------
xmlChain : ElementTree.SubElement
All sub elements get added to this root.
"""
assert self.name, "A name for this chain must be provided"
xmlChainName = ET.SubElement(xmlChain, "name")
xmlChainName.text = self.name
assert self.atoms, "An atoms file must be provided for this Chain"
xmlChainAtoms = ET.SubElement(xmlChain, "atoms")
xmlChainAtoms.text = self.atoms
for link in self.link_list:
xmlLink_i = ET.SubElement(xmlChain, 'link')
xmlLink_i.text = link.serialize(xmlLink_i)
return
class Core():
"""
In Browndye2, cores are large rigid bodies composed of many smaller
atoms.
Attributes:
-----------
name : str
The name to identify this core.
atoms : str
The PQRXML file of the atoms of this core, created by the
program pqr2xml.
all_in_surface : str or None, Default "false"
If this is true, then all of the atoms of the core are
included, not just a shell made of the surface atoms.
electric_field : Electric_field() or None
Object containing APBS grids, desolvation grids, and other
information pertaining to electrical forces.
desolvation_field : str or None
This block describes the desolvation field; it is just like the
electric-field block but does not use the multipole field.
eff_charges : str or None
A file with effective charges and lumping information; output
of lumped_charges.
eff_charges_squared : str or None
File like eff_charges above, but with the charges | |
# -*- coding: utf-8 -*-
import re
import sys
from unittest.mock import MagicMock
from asyncy.Exceptions import AsyncyError, \
TypeAssertionRuntimeError, TypeValueRuntimeError
from asyncy.Stories import Stories
from asyncy.processing import Story
from asyncy.processing.internal import File, Http, Json, Log
from pytest import mark
import storyscript
from .Assertions import ContextAssertion, IsANumberAssertion, \
ListItemAssertion, MapValueAssertion, RuntimeExceptionAssertion
class TestCase:
def __init__(self, append=None, prepend=None, assertion=None):
self.append = append
self.prepend = prepend
self.assertion = assertion
class TestSuite:
def __init__(self, cases, preparation_lines=''):
self.cases = cases
self.preparation_lines = preparation_lines
@mark.parametrize('suite', [ # See pydoc below for how this runs.
TestSuite(
preparation_lines='a = {}\n'
'b = 0\n',
cases=[
TestCase(append='if a["foo"] != null\n'
' b = 1',
assertion=ContextAssertion(key='b', expected=0)),
TestCase(append='if a["foo"] == null\n'
' b = 1',
assertion=ContextAssertion(key='b', expected=1))
]
),
TestSuite(
preparation_lines='a = {"a": "b"}',
cases=[
TestCase(append='b = "{1} {a}"',
assertion=ContextAssertion(key='b',
expected='1 {\'a\': \'b\'}'))
]
),
TestSuite(
preparation_lines='a = json stringify content: {"a": "b"}',
cases=[
TestCase(assertion=ContextAssertion(key='a',
expected='{"a": "b"}'))
]
),
TestSuite(
preparation_lines='a = 1\n'
'if false and true\n'
' a = 2',
cases=[
TestCase(assertion=ContextAssertion(key='a', expected=1))
]
),
TestSuite(
preparation_lines='a = 1\n'
'if true and false\n'
' a = 2',
cases=[
TestCase(assertion=ContextAssertion(key='a', expected=1))
]
),
TestSuite(
preparation_lines='a = 1283',
cases=[
TestCase(append='b = a + ""',
assertion=ContextAssertion(key='b', expected='1283'))
]
),
TestSuite(
preparation_lines='function is_even n:int returns boolean\n'
' if n % 2 == 0\n'
' return true\n'
' else\n'
' return false\n'
'\n'
'even = is_even(n: a)', # a is prepended.
cases=[
TestCase(prepend='a = 10',
assertion=ContextAssertion(key='even', expected=True)),
TestCase(prepend='a = 11',
assertion=ContextAssertion(key='even', expected=False))
]
),
TestSuite(
preparation_lines='function echo i:int returns int\n'
' return i\n'
'\n'
'function add a:int b:int returns int\n'
' return a + b\n'
'\n'
'function get_28 returns int\n'
' return 28\n'
'\n'
'function do_nothing\n'
' a = "nothing meaningful happened"\n',
cases=[
TestCase(append='a = echo(i: 200)',
assertion=ContextAssertion(key='a', expected=200)),
TestCase(append='a = echo(i: -1)',
assertion=ContextAssertion(key='a', expected=-1)),
TestCase(append='a = echo(i: 28)',
assertion=ContextAssertion(key='a', expected=28)),
TestCase(append='echo(i: 28)',
assertion=[]),
TestCase(append='a = add(a: 10 b: 20)',
assertion=ContextAssertion(key='a', expected=30)),
TestCase(append='a = add(a: 10 b: 20) + get_28()',
assertion=ContextAssertion(key='a', expected=58)),
TestCase(append='a = get_28()',
assertion=ContextAssertion(key='a', expected=28)),
TestCase(append='do_nothing()',
assertion=ContextAssertion(key='a', expected=None)),
]
),
TestSuite(
cases=[
TestCase(append='a = echo(i: 200)\n'
'function echo i:int returns int\n'
' return i\n',
assertion=ContextAssertion(key='a', expected=200)),
]
),
TestSuite(
preparation_lines='my_list = [1, 2, 3]',
cases=[
TestCase(append='a = (my_list length) + 4',
assertion=ContextAssertion(key='a', expected=7)),
TestCase(append='a = my_list[0]',
assertion=ContextAssertion(key='a', expected=1)),
TestCase(append='a = my_list[-1]',
assertion=ContextAssertion(key='a', expected=3)),
]
),
TestSuite(
preparation_lines='status = "opened"\n'
'tag = "priority"\n'
'if status == "opened" and '
'(["important", "priority"] contains item: tag)\n'
' a = 1',
cases=[
TestCase(assertion=ContextAssertion(key='a', expected=1))
]
),
TestSuite(
preparation_lines='hello = "hello"\n'
'world = "world"',
cases=[
TestCase(append='a = hello + world',
assertion=ContextAssertion(
key='a', expected='helloworld')),
TestCase(append='a = hello + " " + world',
assertion=ContextAssertion(
key='a', expected='hello world')),
TestCase(append='a = hello + " "', # Test for no auto trim.
assertion=ContextAssertion(
key='a', expected='hello ')),
TestCase(append='a = "{hello}"',
assertion=ContextAssertion(
key='a', expected='hello')),
TestCase(append='a = "{hello} {world}"',
assertion=ContextAssertion(
key='a', expected='hello world')),
TestCase(append='a = "{hello}{world}"',
assertion=ContextAssertion(
key='a', expected='helloworld'))
]
),
TestSuite(
preparation_lines='labels = [{"name": "a"}]\n'
'found = false',
cases=[
TestCase(
append='foreach labels as label\n'
' if label["name"] == "a" or label["name"] == "b"\n'
' found = true\n'
'outside = true',
assertion=[ContextAssertion(key='found', expected=True),
ContextAssertion(key='outside', expected=True)]
)
]
),
TestSuite(
preparation_lines='a = 1\n'
'b = 5\n'
'c = null\n',
cases=[
TestCase(append='if true or false\n'
' c = "true"',
assertion=ContextAssertion(key='c', expected='true')),
TestCase(append='if false or true\n'
' c = "true"',
assertion=ContextAssertion(key='c', expected='true')),
TestCase(append='if true\n'
' c = "true"',
assertion=ContextAssertion(key='c', expected='true')),
TestCase(append='if false\n'
' c = "wtf"',
assertion=ContextAssertion(key='c', expected=None)),
TestCase(append='if a == 100 or b == 100\n'
' c = "wtf"',
assertion=ContextAssertion(key='c', expected=None)),
TestCase(append='if a == 100 or b == 5\n'
' c = "b"',
assertion=ContextAssertion(key='c', expected='b')),
TestCase(append='if a == 1 or b == 100\n'
' c = "a"',
assertion=ContextAssertion(key='c', expected='a')),
TestCase(append='if a == 1 or b == 5\n'
' c = "a"',
assertion=ContextAssertion(key='c', expected='a')),
TestCase(append='if a == 100 or b == 100 or true\n'
' c = "true"',
assertion=ContextAssertion(key='c', expected='true'))
]
),
TestSuite(
preparation_lines='a = [1, 2, 3, 4, 5]\n'
'b = []\n'
'c = []\n',
cases=[
TestCase(append='foreach a as elem\n'
' b append item: elem\n'
' foreach b as elem2\n'
' if elem2 > 1\n'
' break\n'
' c append item: elem2\n',
assertion=[
ContextAssertion(key='b', expected=[1, 2, 3, 4, 5]),
ContextAssertion(key='c', expected=[1, 1, 1, 1, 1])
])
]
),
TestSuite(
preparation_lines='a = [1, 1, 1, 2, 3, 4, 5]\n'
'b = 0\n',
cases=[
TestCase(append='b = a[b]',
assertion=ContextAssertion(key='b', expected=1)),
TestCase(append='foreach a as elem\n'
' b = b + elem\n'
' if b == 3\n'
' break',
assertion=ContextAssertion(key='b', expected=3))
]
),
TestSuite(
preparation_lines='a = []',
cases=[
TestCase(append='b = a[10]',
assertion=ContextAssertion(key='b', expected=None))
]
),
TestSuite(
preparation_lines='if colour == "blue"\n'
' result = "blue"\n'
'else if colour == "red"\n'
' result = "red"\n'
'else if colour == "yellow"\n'
' result = "yellow"\n'
'else if colour == "green"\n'
' result = "green"\n'
'else\n'
' result = "unknown"\n'
'outside_var = "executed"\n',
cases=[
TestCase(prepend='colour = "blue"',
assertion=[ContextAssertion(key='result',
expected='blue'),
ContextAssertion(key='outside_var',
expected='executed')]),
TestCase(prepend='colour = "red"',
assertion=[ContextAssertion(key='result',
expected='red'),
ContextAssertion(key='outside_var',
expected='executed')]),
TestCase(prepend='colour = "yellow"',
assertion=[ContextAssertion(key='result',
expected='yellow'),
ContextAssertion(key='outside_var',
expected='executed')]),
TestCase(prepend='colour = "green"',
assertion=[ContextAssertion(key='result',
expected='green'),
ContextAssertion(key='outside_var',
expected='executed')]),
TestCase(prepend='colour = "pink"',
assertion=[ContextAssertion(key='result',
expected='unknown'),
ContextAssertion(key='outside_var',
expected='executed')])
]
),
TestSuite(
preparation_lines='str = "hello world!"',
cases=[
TestCase(append='len = str length',
assertion=ContextAssertion(key='len', expected=12)),
TestCase(append='r = str contains item: "hello"',
assertion=ContextAssertion(key='r', expected=True)),
TestCase(append='r = str contains item: "hello1"',
assertion=ContextAssertion(key='r', expected=False)),
TestCase(append='r = str contains pattern: /llo/',
assertion=ContextAssertion(key='r', expected=True)),
TestCase(append='r = str contains pattern: /f/',
assertion=ContextAssertion(key='r', expected=False)),
TestCase(append='parts = str split by: " "',
assertion=ContextAssertion(
key='parts', expected=['hello', 'world!'])),
TestCase(append='a = str uppercase',
assertion=ContextAssertion(
key='a', expected='HELLO WORLD!')),
TestCase(append='a = str lowercase',
assertion=ContextAssertion(
key='a', expected='hello world!')),
TestCase(append='a = str capitalize',
assertion=ContextAssertion(
key='a', expected='Hello World!')),
TestCase(append='a = str substring start: 2',
assertion=ContextAssertion(
key='a', expected='llo world!')),
TestCase(append='a = str substring end: 5',
assertion=ContextAssertion(
key='a', expected='hello')),
TestCase(append='a = str substring start: 6 end: 11',
assertion=ContextAssertion(
key='a', expected='world')),
TestCase(append='a = str substring start: 6 end: -2',
assertion=ContextAssertion(
key='a', expected='worl')),
TestCase(append='a = str substring start: 6 end: -6',
assertion=ContextAssertion(
key='a', expected='')),
TestCase(append='a = str substring start: 20',
assertion=ContextAssertion(
key='a', expected='')),
TestCase(append='a = str substring start: 10 end:20',
assertion=ContextAssertion(
key='a', expected='d!')),
TestCase(append='a = str substring start: -3',
assertion=ContextAssertion(
key='a', expected='ld!')),
TestCase(append='a = str startswith prefix: "hello"',
assertion=ContextAssertion(
key='a', expected=True)),
TestCase(append='a = str startswith prefix: "ello"',
assertion=ContextAssertion(
key='a', expected=False)),
TestCase(append='a = str endswith suffix: "!"',
assertion=ContextAssertion(
key='a', expected=True)),
TestCase(append='a = str endswith suffix: "."',
assertion=ContextAssertion(
key='a', expected=False)),
]
),
TestSuite(
preparation_lines='str = "hello."',
cases=[
TestCase(append='r = str replace item: "hello" by:"foo"',
assertion=ContextAssertion(key='r', expected='foo.')),
TestCase(append='r = str replace item: "l" by:"o"',
assertion=ContextAssertion(key='r', expected='heooo.')),
TestCase(append='r = str replace item: "k" by:"$"',
assertion=ContextAssertion(key='r', expected='hello.')),
TestCase(append='r = str replace pattern: /hello/ by:"foo"',
assertion=ContextAssertion(key='r', expected='foo.')),
TestCase(append='r = str replace pattern: /l/ by:"o"',
assertion=ContextAssertion(key='r', expected='heooo.')),
TestCase(append='r = str replace pattern: /k/ by:"$"',
assertion=ContextAssertion(key='r', expected='hello.')),
]
),
TestSuite(
preparation_lines='str = " text "',
cases=[
TestCase(append='a = str trim',
assertion=ContextAssertion(
key='a', expected='text')),
],
),
TestSuite(
preparation_lines='e = 10\n'
'o = -3',
cases=[
TestCase(append='a = e is_odd',
assertion=ContextAssertion(key='a', expected=False)),
TestCase(append='a = o is_odd',
assertion=ContextAssertion(key='a', expected=True)),
TestCase(append='a = e is_even',
assertion=ContextAssertion(key='a', expected=True)),
TestCase(append='a = o is_even',
assertion=ContextAssertion(key='a', expected=False)),
TestCase(append='a = o absolute',
assertion=[
ContextAssertion(key='a', expected=3),
ContextAssertion(key='o', expected=-3)
]),
TestCase(append='a = e increment',
assertion=[
ContextAssertion(key='a', expected=11),
ContextAssertion(key='e', expected=10)
]),
TestCase(append='a = e decrement',
assertion=[
ContextAssertion(key='a', expected=9),
ContextAssertion(key='e', expected=10)
]),
TestCase(append='e decrement',
assertion=ContextAssertion(key='e', expected=10)),
TestCase(append='e increment',
assertion=ContextAssertion(key='e', expected=10))
]
),
TestSuite(
preparation_lines='m = {"a": 1, "b": 2}',
cases=[
TestCase(append='s = m length',
assertion=ContextAssertion(key='s', expected=2)),
TestCase(append='s = m keys',
assertion=ContextAssertion(key='s', expected=['a', 'b'])),
TestCase(append='s = m values',
assertion=ContextAssertion(key='s', expected=[1, 2])),
TestCase(append='s = m flatten',
assertion=ContextAssertion(
key='s', expected=[['a', 1], ['b', 2]])),
TestCase(append='s = m pop key: "a"',
assertion=[
ContextAssertion(key='s', expected=1),
ContextAssertion(key='m', expected={'b': 2})
]),
TestCase(append='s = m get key: "a" default: 3',
assertion=[
ContextAssertion(key='s', expected=1),
ContextAssertion(key='m', expected={'a': 1, 'b': 2})
]),
TestCase(append='s = m get key: "c" default: 42',
assertion=ContextAssertion(key='s', expected=42)),
TestCase(append='s = m contains key: "d"',
assertion=ContextAssertion(key='s', expected=False)),
TestCase(append='s = m contains key: "a"',
assertion=ContextAssertion(key='s', expected=True)),
TestCase(append='s = m contains value: 3',
assertion=ContextAssertion(key='s', expected=False)),
TestCase(append='s = m contains value: 1',
assertion=ContextAssertion(key='s', expected=True)),
TestCase(append='key = "a"\ns = m[key]',
assertion=ContextAssertion(key='s', expected=1)),
]
),
TestSuite(
preparation_lines=r'm = "\n\t"',
cases=[
TestCase(append='s = m',
assertion=ContextAssertion(key='s', expected='\n\t')),
TestCase(append=r's = "{m}\n"',
assertion=ContextAssertion(key='s', | |
# -*- coding: utf-8 -*-
from app.const import *
from app.base.logger import log
from app.base.db import get_db, SQL
from app.model import syslog
from app.model import policy
from app.base.utils import AttrDict, tp_timestamp_sec
def get_by_id(pid):
s = SQL(get_db())
s.select_from('audit_policy', ['id', 'name', 'desc'], alt_name='p')
s.where('p.id={}'.format(pid))
err = s.query()
if err != TPE_OK:
return err, {}
if len(s.recorder) == 0:
return TPE_NOT_EXISTS, {}
return TPE_OK, s.recorder[0]
def get_policies(sql_filter, sql_order, sql_limit):
dbtp = get_db().table_prefix
s = SQL(get_db())
s.select_from('audit_policy', ['id', 'rank', 'name', 'desc', 'state'], alt_name='p')
str_where = ''
_where = list()
if len(sql_filter) > 0:
for k in sql_filter:
if k == 'search':
_where.append('(p.name LIKE "%{filter}%" OR p.desc LIKE "%{filter}%")'.format(filter=sql_filter[k]))
if k == 'state':
_where.append('p.state={}'.format(sql_filter[k]))
else:
log.e('unknown filter field: {}\n'.format(k))
return TPE_PARAM, s.total_count, 0, s.recorder
if len(_where) > 0:
str_where = '( {} )'.format(' AND '.join(_where))
s.where(str_where)
s.order_by('p.rank', True)
if len(sql_limit) > 0:
s.limit(sql_limit['page_index'], sql_limit['per_page'])
err = s.query()
return err, s.total_count, s.page_index, s.recorder
def create_policy(handler, args):
"""
创建一个授权策略
"""
db = get_db()
_time_now = tp_timestamp_sec()
# 1. 判断此账号是否已经存在了
s = SQL(db)
err = s.reset().select_from('audit_policy', ['id']).where('audit_policy.name="{}"'.format(args['name'])).query()
if err != TPE_OK:
return err, 0
if len(s.recorder) > 0:
return TPE_EXISTS, 0
# 2. get total count
sql = 'SELECT COUNT(*) FROM {}audit_policy'.format(db.table_prefix)
db_ret = db.query(sql)
if not db_ret or len(db_ret) == 0:
return TPE_DATABASE, 0
rank = db_ret[0][0] + 1
sql = 'INSERT INTO `{}audit_policy` (`rank`, `name`, `desc`, `creator_id`, `create_time`) VALUES ' \
'({rank}, "{name}", "{desc}", {creator_id}, {create_time});' \
''.format(db.table_prefix,
rank=rank, name=args['name'], desc=args['desc'],
creator_id=handler.get_current_user()['id'],
create_time=_time_now)
db_ret = db.exec(sql)
if not db_ret:
return TPE_DATABASE, 0
_id = db.last_insert_id()
syslog.sys_log(handler.get_current_user(), handler.request.remote_ip, TPE_OK, "创建审计授权策略:{}".format(args['name']))
return TPE_OK, _id
def update_policy(handler, args):
db = get_db()
# 1. 判断此账号是否已经存在
s = SQL(db)
err = s.reset().select_from('audit_policy', ['id']).where('audit_policy.id={}'.format(args['id'])).query()
if err != TPE_OK:
return err
if len(s.recorder) == 0:
return TPE_NOT_EXISTS
sql = 'UPDATE `{}audit_policy` SET `name`="{name}", `desc`="{desc}" WHERE `id`={p_id};' \
''.format(db.table_prefix,
name=args['name'], desc=args['desc'], p_id=args['id']
)
db_ret = db.exec(sql)
if not db_ret:
return TPE_DATABASE
return TPE_OK
def update_policies_state(handler, p_ids, state):
db = get_db()
p_ids = ','.join([str(i) for i in p_ids])
sql_list = []
sql = 'UPDATE `{tp}audit_policy` SET `state`={ph} WHERE `id` IN ({p_ids});'.format(tp=db.table_prefix, ph=db.place_holder, p_ids=p_ids)
sql_list.append({'s': sql, 'v': (state, )})
sql = 'UPDATE `{tp}audit_auz` SET `state`={ph} WHERE `policy_id` IN ({p_ids});'.format(tp=db.table_prefix, ph=db.place_holder, p_ids=p_ids)
sql_list.append({'s': sql, 'v': (state, )})
sql = 'UPDATE `{tp}audit_map` SET `p_state`={ph} WHERE `p_id` IN ({p_ids});'.format(tp=db.table_prefix, ph=db.place_holder, p_ids=p_ids)
sql_list.append({'s': sql, 'v': (state, )})
if db.transaction(sql_list):
return TPE_OK
else:
return TPE_DATABASE
def remove_policies(handler, p_ids):
db = get_db()
p_ids = ','.join([str(i) for i in p_ids])
sql_list = []
sql = 'DELETE FROM `{tp}audit_policy` WHERE `id` IN ({p_ids});'.format(tp=db.table_prefix, p_ids=p_ids)
sql_list.append({'s': sql, 'v': None})
sql = 'DELETE FROM `{tp}audit_auz` WHERE `policy_id` IN ({p_ids});'.format(tp=db.table_prefix, p_ids=p_ids)
sql_list.append({'s': sql, 'v': None})
sql = 'DELETE FROM `{tp}audit_map` WHERE `p_id` IN ({p_ids});'.format(tp=db.table_prefix, p_ids=p_ids)
sql_list.append({'s': sql, 'v': None})
if db.transaction(sql_list):
return TPE_OK
else:
return TPE_DATABASE
def add_members(handler, policy_id, policy_type, ref_type, members):
# step 1: select exists rid.
s = SQL(get_db())
s.select_from('audit_auz', ['rid'], alt_name='p')
_where = list()
_where.append('p.policy_id={}'.format(policy_id))
_where.append('p.type={}'.format(policy_type))
_where.append('p.rtype={}'.format(ref_type))
s.where('( {} )'.format(' AND '.join(_where)))
err = s.query()
if err != TPE_OK:
return err
exists_ids = [r['rid'] for r in s.recorder]
operator = handler.get_current_user()
db = get_db()
_time_now = tp_timestamp_sec()
sql = []
# for uid in members:
# sql.append('INSERT INTO `{}group_map` (type, gid, mid) VALUES ({}, {}, {});'.format(db.table_prefix, gtype, gid, uid))
# print(args['members'])
for m in members:
if m['id'] in exists_ids:
continue
sql_s = 'INSERT INTO `{tp}audit_auz` (`policy_id`,`type`,`rtype`,`rid`,`name`,`creator_id`,`create_time`) VALUES ' \
'({ph},{ph},{ph},{ph},{ph},{ph},{ph});' \
''.format(tp=db.table_prefix, ph=db.place_holder)
sql_v = (policy_id, policy_type, ref_type, m['id'], m['name'], operator['id'], _time_now)
sql.append({'s': sql_s, 'v': sql_v})
if db.transaction(sql):
# return TPE_OK
return policy.rebuild_audit_auz_map()
else:
return TPE_DATABASE
def remove_members(handler, policy_id, policy_type, ids):
s = SQL(get_db())
auz_ids = [str(i) for i in ids]
# 将用户从所在组中移除
where = 'policy_id={} AND type={} AND id IN ({})'.format(policy_id, policy_type, ','.join(auz_ids))
err = s.reset().delete_from('audit_auz').where(where).exec()
if err != TPE_OK:
return err
# return TPE_OK
return policy.rebuild_audit_auz_map()
def get_auditors(sql_filter, sql_order, sql_limit):
ss = SQL(get_db())
ss.select_from('audit_auz', ['id', 'policy_id', 'rtype', 'rid', 'name'], alt_name='p')
_where = list()
_where.append('p.type=0')
if len(sql_filter) > 0:
for k in sql_filter:
if k == 'policy_id':
# _where.append('(p.name LIKE "%{filter}%" OR p.desc LIKE "%{filter}%")'.format(filter=sql_filter[k]))
_where.append('p.policy_id={}'.format(sql_filter[k]))
elif k == 'search':
_where.append('(p.name LIKE "%{filter}%")'.format(filter=sql_filter[k]))
else:
log.e('unknown filter field: {}\n'.format(k))
return TPE_PARAM, 0, 0, {}
if len(_where) > 0:
ss.where('( {} )'.format(' AND '.join(_where)))
if sql_order is not None:
_sort = False if not sql_order['asc'] else True
if 'name' == sql_order['name']:
ss.order_by('p.name', _sort)
elif 'rtype' == sql_order['name']:
ss.order_by('p.rtype', _sort)
else:
log.e('unknown order field: {}\n'.format(sql_order['name']))
return TPE_PARAM, ss.total_count, 0, ss.recorder
if len(sql_limit) > 0:
ss.limit(sql_limit['page_index'], sql_limit['per_page'])
err = ss.query()
if err != TPE_OK:
return err, 0, 0, {}
# print(ss.recorder)
return TPE_OK, ss.total_count, ss.page_index, ss.recorder
def get_auditees(sql_filter, sql_order, sql_limit):
ss = SQL(get_db())
ss.select_from('audit_auz', ['id', 'policy_id', 'rtype', 'rid', 'name'], alt_name='p')
_where = list()
_where.append('p.type=1')
if len(sql_filter) > 0:
for k in sql_filter:
if k == 'policy_id':
# _where.append('(p.name LIKE "%{filter}%" OR p.desc LIKE "%{filter}%")'.format(filter=sql_filter[k]))
_where.append('p.policy_id={}'.format(sql_filter[k]))
elif k == 'search':
_where.append('(p.name LIKE "%{filter}%")'.format(filter=sql_filter[k]))
else:
log.e('unknown filter field: {}\n'.format(k))
return TPE_PARAM, 0, 0, {}
if len(_where) > 0:
ss.where('( {} )'.format(' AND '.join(_where)))
if sql_order is not None:
_sort = False if not sql_order['asc'] else True
if 'name' == sql_order['name']:
ss.order_by('p.name', _sort)
elif 'rtype' == sql_order['name']:
ss.order_by('p.rtype', _sort)
else:
log.e('unknown order field: {}\n'.format(sql_order['name']))
return TPE_PARAM, ss.total_count, 0, ss.recorder
if len(sql_limit) > 0:
ss.limit(sql_limit['page_index'], sql_limit['per_page'])
err = ss.query()
if err != TPE_OK:
return err, 0, 0, {}
# print(ss.recorder)
return TPE_OK, ss.total_count, ss.page_index, ss.recorder
def rank_reorder(handler, pid, new_rank, start_rank, end_rank, direct):
db = get_db()
# 调节顺序:
# 由pid获取被移动的策略,得到其rank,即,p_rank
# p_rank > new_rank,向前移动
# 所有 new_rank <= rank < p_rank 的条目,其rank+1
# p_rank < new_rank,向后移动
# 所有 new_rank >= rank > p_rank 的条目,其rank-1
# 最后令pid条目的rank为new_rank
# 1. 判断此账号是否已经存在
s = SQL(db)
err = s.select_from('audit_policy', ['id', 'name', 'rank']).where('audit_policy.id={}'.format(pid)).query()
if err != TPE_OK:
return err
if len(s.recorder) == 0:
return TPE_NOT_EXISTS
p_name = s.recorder[0]['name']
p_rank = s.recorder[0]['rank']
sql = 'UPDATE `{dbtp}audit_policy` SET rank=rank{direct} WHERE (rank>={start_rank} AND rank<={end_rank});' \
''.format(dbtp=db.table_prefix, direct=direct, start_rank=start_rank, end_rank=end_rank)
db_ret = db.exec(sql)
if not db_ret:
return TPE_DATABASE
sql = 'UPDATE `{dbtp}audit_policy` SET rank={new_rank} WHERE id={pid};' \
''.format(dbtp=db.table_prefix, new_rank=new_rank, pid=pid)
db_ret = db.exec(sql)
if not db_ret:
return TPE_DATABASE
syslog.sys_log(handler.get_current_user(), handler.request.remote_ip, TPE_OK, "调整审计授权策略顺序:{},从{}到{}".format(p_name, p_rank, new_rank))
# return TPE_OK
return policy.rebuild_audit_auz_map()
def get_auth(auth_id):
db = get_db()
s = SQL(db)
err = s.select_from('audit_map', ['id', 'h_id', 'u_id', 'a_id']).where('audit_map.uni_id="{}"'.format(auth_id)).query()
if err != TPE_OK:
return None, err
if len(s.recorder) == 0:
return None, TPE_NOT_EXISTS
if len(s.recorder) != 1:
return None, TPE_FAILED
# log.v(s.recorder[0])
return s.recorder[0], TPE_OK
def build_auz_map():
_users = {}
_hosts = {}
# _accs = {}
_gusers = {}
_ghosts = {}
# _gaccs = {}
_groups = {}
_policies = {}
_p_users = {}
_p_assets = {}
_map = []
db = get_db()
dbtp = db.table_prefix
db.exec('DELETE FROM {}audit_map'.format(dbtp))
s = SQL(get_db())
# 加载所有策略
err = s.reset().select_from('audit_policy', ['id', 'rank', 'state'], alt_name='p').query()
if err != TPE_OK:
return err
if 0 == len(s.recorder):
return TPE_OK
for i in s.recorder:
_policies[i.id] = i
# 加载所有的用户
err = s.reset().select_from('user', ['id', 'username', 'surname', 'state'], alt_name='u').query()
if err != TPE_OK:
return err
if 0 == len(s.recorder):
return TPE_OK
for i in s.recorder:
_users[i.id] = i
# 加载所有的主机
err = s.reset().select_from('host', ['id', 'name', 'ip', 'router_ip', 'router_port', 'state'], alt_name='h').query()
if err != TPE_OK:
return err
if 0 == len(s.recorder):
return TPE_OK
for i in s.recorder:
_hosts[i.id] = i
# # 加载所有的账号
# err = s.reset().select_from('acc', ['id', 'host_id', 'username', 'protocol_type', 'protocol_port', 'auth_type', 'state'], alt_name='a').query()
# if err != TPE_OK:
# return err
# if 0 == len(s.recorder):
# return TPE_OK
# for i in s.recorder:
# _accs[i.id] = i
# 加载所有的组
err = s.reset().select_from('group', ['id', 'type', 'state'], alt_name='g').query()
if err != TPE_OK:
return err
for i in s.recorder:
_groups[i.id] = i
if i.type == TP_GROUP_USER:
_gusers[i.id] = []
elif i.type == TP_GROUP_HOST:
_ghosts[i.id] = []
# elif i.type == TP_GROUP_ACCOUNT:
# _gaccs[i.id] = []
# 加载所有的组
err = s.reset().select_from('group_map', ['id', 'type', 'gid', 'mid'], alt_name='g').query()
if err != TPE_OK:
return err
for g in s.recorder:
if g.type == TP_GROUP_USER:
# if g.gid not in _gusers:
# _gusers[g.gid] = []
_gusers[g.gid].append(_users[g.mid])
elif g.type == TP_GROUP_HOST:
# if g.gid not in _ghosts:
# _ghosts[g.gid] = | |
#!/usr/bin/env python3
import re
import glob
import json
import lzma
import argparse
from functools import reduce
from collections import OrderedDict, defaultdict
from datetime import datetime, timedelta
def main():
parser = argparse.ArgumentParser(description="Parse result files and render an HTML page with a status summary")
parser.add_argument('resultfiles', type=str, nargs='+', metavar='results.json', help='path to a result file')
parser.add_argument('--ignore', type=str, action='append', help='Ignore tests with the specified name; can be used more than once.')
parser.add_argument('--by-test', action='store_true', help="print results by test (distro)")
parser.add_argument('-o', '--output-file', type=str, help="file name to write report")
parser.add_argument('--compare-weekday-num', type=int, help="integer weekday number to hinge the summary report on", default=None)
args = parser.parse_args()
if args.by_test:
html = print_table_by_distro_report(args.resultfiles, args.ignore, args.compare_weekday_num)
else:
html = print_table_report(args.resultfiles, args.ignore)
if args.output_file:
with open(args.output_file, 'w') as f:
f.write(html)
else:
print(html)
def get_wheels_with_result(wheel_dict, key='test-passed', result=False, ignore_tests=[]):
wheels = set()
for wheel_name, wheel_results in wheel_dict.items():
if wheel_name in ignore_tests:
continue
for test_name, test_results in wheel_results.items():
if test_results[key] == result:
wheels.add(wheel_name)
return list(wheels)
def get_failing_tests(wheel_dict, ignore_tests=[]):
return get_wheels_with_result(wheel_dict, 'test-passed', False, ignore_tests)
def get_build_required(wheel_dict, ignore_tests=[]):
return get_tests_with_result(wheel_dict, 'build-required', True, ignore_tests)
def get_build_required(wheel_dict, ignore_tests=[]):
return get_tests_with_result(wheel_dict, 'build-required', True, ignore_tests)
def print_report(all_wheels):
passing = []
failing = []
for wheel, wheel_dict in all_wheels.items():
failed_tests = get_failing_tests(wheel_dict)
if len(failed_tests) == 0:
passing.append((wheel, wheel_dict))
else:
failing.append((wheel, wheel_dict))
html = []
html.append(f'<h1>Passing - {len(passing)}</h1>')
html.append('<ul>')
for wheel, wheel_dict in passing:
html.append(f'<li>{wheel}</li>')
html.append('</ul>')
html.append(f'<h1>Failing - {len(failing)}</h1>')
html.append('<ul>')
for wheel, wheel_dict in failing:
html.append(f'<li>{wheel}</li>')
html.append('</ul>')
html = '\n'.join(html)
return html
def get_wheel_report_cell(wheel, wheel_dict, ignore_tests):
failing = get_failing_tests(wheel_dict, ignore_tests=ignore_tests)
build_required = get_build_required(wheel_dict, ignore_tests=ignore_tests)
slow_install = get_tests_with_result(wheel_dict, 'slow-install', True, ignore_tests=ignore_tests)
badges = set()
cell_text = []
cell_text.append('<div>')
if len(failing) == 0 and len(build_required) == 0 and len(slow_install) == 0:
cell_text.append('<span class="perfect-score badge">perfect score</span> ')
badges.add('perfect-score')
elif len(failing) == 0:
cell_text.append('<span class="all-passed badge">all-passed</span> ')
badges.add('all-passed')
if len(build_required) > 0:
cell_text.append('<span class="build-required badge">build required</span> ')
badges.add('build-required')
if len(slow_install) > 0:
cell_text.append('<span class="slow-install badge">slow-install</span> ')
badges.add('slow-install')
for test_name in failing:
cell_text.append(f'<span class="test-name badge">{test_name}</span>')
badges.add(test_name)
cell_text.append('</div>')
return ('\n'.join(cell_text), badges)
def load_result_files(test_results_fname_list):
for fname in test_results_fname_list:
if re.search(r'\.xz$', fname) is not None:
with lzma.open(fname) as f:
yield json.load(f), fname
else:
with open(fname) as f:
yield json.load(f), fname
def print_table_report(test_results_fname_list, ignore_tests=[]):
test_results_list = []
if ignore_tests is None:
ignore_tests = []
all_keys = set()
for test_results, fname in load_result_files(test_results_fname_list):
test_results_list.append(test_results)
all_keys.update(test_results.keys())
all_keys = sorted(list(all_keys), key=str.lower)
html = []
html.append(HTML_HEADER)
html.append('<table class="python-wheel-report">')
html.append('<tr>')
html.append('<th></th>')
for i, test_results in enumerate(test_results_list):
html.append(f'<th>{test_results_fname_list[i]}</th>')
html.append('</tr>')
for i, wheel in enumerate(all_keys):
test_results_cache = {}
for test_results_i, test_results in enumerate(test_results_list):
if wheel in test_results:
wheel_dict = test_results[wheel]
test_results_cache[test_results_i] = get_wheel_report_cell(wheel, wheel_dict, ignore_tests)
# check to see if the sets returned as item index 1 are all the same
badge_set = None
wheel_differences = False
for s in map(lambda x: x[1][1], test_results_cache.items()):
if badge_set is None:
badge_set = s
elif badge_set != s:
wheel_differences = True
break
wheel_differences = 'different' if wheel_differences else ''
odd_even = 'even' if (i+1) % 2 == 0 else 'odd'
html.append(f'<tr class="wheel-line {odd_even}">')
html.append(f'<td class="wheel-name {wheel_differences}">{wheel}</td>')
for test_results_i, test_results in enumerate(test_results_list):
html.append('<td class="wheel-report">')
if wheel in test_results:
html.append(test_results_cache[test_results_i][0])
html.append('</td>')
html.append('</tr>')
html.append('</table>')
html.append(HTML_FOOTER)
html = '\n'.join(html)
return html
def make_badge(classes=[], text=""):
classes.append('badge')
classes = " ".join(classes)
return f'<span class="{classes}">{text}</span>'
def get_package_name_class(test_name):
if 'conda' in test_name:
return 'package-conda'
elif 'apt' in test_name:
return 'package-os'
elif 'yum' in test_name:
return 'package-os'
else:
return 'package-pip'
def get_distribution_name(test_name):
distros = ["amazon-linux2", "centos8", "focal"]
for distro in distros:
if distro in test_name:
return distro
return None
def get_package_manager_name(test_name):
names = ['yum', 'apt', 'conda']
for name in names:
if name in test_name:
return name
return 'pip'
class TestResultFile():
def __init__(self, fname):
self.fname = fname
self.content = None
self.date = None
self.wheels = {}
def add_inferred_meta_data(self):
for wheel, wheel_dict in self.content.items():
passed_by_distro = defaultdict(lambda: False)
self.wheels[wheel] = {}
self.wheels[wheel]['results'] = wheel_dict
for test_name, test_name_results in wheel_dict.items():
distribution = get_distribution_name(test_name)
test_name_results['distribution'] = distribution
test_name_results['package_manager'] = get_package_manager_name(test_name)
passed_by_distro[distribution] |= test_name_results['test-passed']
self.wheels[wheel]['passed-by-disribution'] = passed_by_distro
self.wheels[wheel]['each-distribution-has-passing-option'] = len(list(filter(lambda x: not x, passed_by_distro.values()))) == 0
def print_table_by_distro_report(test_results_fname_list, ignore_tests=[], compare_weekday_num=None):
test_results_list = []
for fname in test_results_fname_list:
test_result_file = TestResultFile(fname)
if re.search(r'\.xz$', fname) is not None:
with lzma.open(fname) as f:
test_result_file.content = json.load(f)
else:
with open(fname) as f:
test_result_file.content = json.load(f)
mo = re.search('[^/]-([0-9\-_]+).json.xz', fname)
if mo is not None:
test_result_file.date = datetime.strptime(mo.group(1), "%Y-%m-%d_%H-%M-%S")
test_result_file.add_inferred_meta_data()
test_results_list.append(test_result_file)
# Sort the test result files by date because code that follows assumes this order.
test_results_list = sorted(test_results_list, key=lambda x: x.date, reverse=True)
# get a sorted list of all the wheel names
wheel_name_set = set()
# get a sorted list of all the test_names (distros, plus extra, e.g. centos-python38)
all_test_names = set()
for test_result in test_results_list:
wheel_name_set.update(test_result.content.keys())
for wheel, wheel_dict in test_result.content.items():
for test_name, test_name_results in wheel_dict.items():
all_test_names.add(test_name)
wheel_name_set = sorted(list(wheel_name_set), key=str.lower)
all_test_names = sorted(list(all_test_names))
html = []
html.append(HTML_HEADER)
pretty_date = test_results_list[0].date.strftime("%B %d, %Y")
html.append(f'<h1>Python Wheels on aarch64 test results from {pretty_date}</h1>')
html.append('<section class="summary">')
# Find the result file to compare against for the top-level summary.
referene_test_file = None
if type(compare_weekday_num) is int:
reference_date = test_results_list[0].date
reference_date = reference_date.replace(hour=23, minute=59)
reference_date = reference_date - timedelta(days=reference_date.weekday()) + timedelta(days=compare_weekday_num)
current_weekday = test_results_list[0].date.weekday()
if current_weekday <= compare_weekday_num:
reference_date -= timedelta(days=7)
for test_result_file in test_results_list:
if test_result_file.date < reference_date:
reference_test_file = test_result_file
break
summary_table = [['date', 'number of wheels', 'all tests passed', 'some tests failed', 'each dist has passing option']]
for test_result_file in [reference_test_file, test_results_list[0]]:
count = len(test_result_file.content)
failures = len(get_failing_tests(test_result_file.content))
all_passing = count - failures
date = test_result_file.date.strftime("%A, %B %d, %Y")
passing_options = len(list(filter(lambda wheel: wheel['each-distribution-has-passing-option'], test_result_file.wheels.values())))
summary_table.append([date, count, all_passing, failures, passing_options])
html.append('<table class="summary">')
for index in range(len(summary_table[0])):
html.append('<tr>')
for column_index, column_data in enumerate(summary_table):
element = 'th' if column_index == 0 else 'td'
html.append(f'<{element}>{column_data[index]}</{element}>')
if summary_table[0][index] != 'date':
difference = summary_table[2][index] - summary_table[1][index]
plus = '+' if difference >= 0 else ''
html.append(f'<td>{plus}{difference}</td>')
else:
html.append('<td></td>')
html.append('</tr>')
html.append('</table>')
html.append('<p>The table shows test results from the current test run and differences, if any, with previous runs.')
html.append('When differences exist, the first test report exhibting the difference is shown. The current test result')
html.append('is always shown, regardless of whether there is any difference.</p>')
html.append('</section>')
html.append('<section class="display-controls">')
html.append('<input type="checkbox" checked="true" name="pip" class="package-pip" /><label for="pip">pip</label>')
html.append('<input type="checkbox" name="os" class="package-os" /><label for="os">apt/yum</label>')
html.append('<input type="checkbox" name="conda" class="package-conda"/><label for="conda">anaconda</label>')
html.append('<table class="python-wheel-report">')
html.append('<tr>')
html.append('<th></th>')
html.append('<th>at least one passing option per distribution?</th>')
for test_name in all_test_names:
html.append(f'<th class="test-column {get_package_name_class(test_name)}">{test_name}</th>')
html.append('</tr>')
# Iterate over the sorted list of wheel names
for i, wheel in enumerate(wheel_name_set):
# Make a list of test files to display by finding changes. Cap the total number of
# rows for each wheel to a specified number.
previous_wheel_test_results = None
displayed_test_rows = []
# Iterating over each input file, in reverse order
for test_result_file in test_results_list[::-1]:
wheel_test_results = {}
# Iterating over each test (centos, focal, ...)
for test_name in all_test_names:
try:
test_result = test_result_file.content[wheel][test_name]
except KeyError:
# This file does not have a result for this wheel and test name. Skip it.
continue
wheel_test_results[test_name] = (test_result['test-passed'], test_result['build-required'])
if previous_wheel_test_results is None:
displayed_test_rows.append(test_result_file)
elif wheel_test_results != previous_wheel_test_results:
displayed_test_rows.append(test_result_file)
previous_wheel_test_results = wheel_test_results
# If there are no changes to the results, only show the last result
if len(displayed_test_rows) == 1:
displayed_test_rows = [test_results_list[0]]
# Always display results from the most recent run.
elif test_results_list[0] not in displayed_test_rows:
displayed_test_rows.append(test_results_list[0])
different = len(displayed_test_rows) > 1
odd_even = 'even' if (i+1) % 2 == 0 else 'odd'
different_class = 'different' if different else ''
for test_result_file in displayed_test_rows:
if different:
pretty_date = test_result_file.date.strftime("%B %d, %Y")
file_indicator = f'<br /><span class="file-indicator">{pretty_date}</span>'
else:
file_indicator = ''
html.append(f'<tr class="wheel-line {odd_even}">')
html.append(f'<td class="wheel-name {different_class}">{wheel}{file_indicator}</td>')
html.append('<td class="">')
if wheel in test_result_file.wheels:
distro_passing = test_result_file.wheels[wheel]['each-distribution-has-passing-option']
if distro_passing:
html.append(make_badge(classes=['passed'], text='yes'))
else:
html.append(make_badge(classes=['failed'], text='no'))
html.append('</td>')
for test_name in all_test_names:
html.append(f'<td class="test-column {get_package_name_class(test_name)}">')
if wheel in test_result_file.content and test_name in test_result_file.content[wheel]:
result = test_result_file.content[wheel][test_name]
if result['test-passed']:
html.append(make_badge(classes=['passed'], text='passed'))
else:
html.append(make_badge(classes=['failed'], text='failed'))
if result['build-required']:
html.append(make_badge(classes=['warning'], text='build required'))
if result['slow-install']:
html.append(make_badge(classes=['warning'], text='slow install'))
if 'timeout' in result and result['timeout']:
html.append(make_badge(classes=['failed'], text='timed out'))
html.append('</td>')
html.append('</tr>')
if not different:
break
html.append('</table>')
html.append('</section>')
html.append(HTML_FOOTER)
html = '\n'.join(html)
return html
HTML_HEADER = '''
<!doctype html>
<html>
<head>
<style type="text/css">
h1 {
text-align: center;
}
section.summary {
margin: 0 auto;
width: 900px;
font-family: sans-serif;
}
section.summary table {
margin: 0 auto;
width: 700px;
border-collapse: collapse;
}
section.summary | |
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 1.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j],
[ 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
1.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j,
0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, 0.+0.j, | |
= Var(within=Reals,bounds=(0,None),initialize=0)
m.x598 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x599 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x600 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x601 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x602 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x603 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x604 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x605 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x606 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x607 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x608 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x609 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x610 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x611 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x612 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x613 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x614 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x615 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x616 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x617 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x618 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x619 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x620 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x621 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x622 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x623 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x624 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x625 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x626 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x627 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x628 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x629 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x630 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x631 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x632 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x633 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x634 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x635 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x636 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x637 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x638 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x639 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x640 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x641 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x642 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x643 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x644 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x645 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x646 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x647 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x648 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x649 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x650 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x651 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x652 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x653 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x654 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x655 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x656 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x657 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x658 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x659 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x660 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x661 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x662 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x663 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x664 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x665 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x666 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x667 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x668 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x669 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x670 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x671 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x672 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x673 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x674 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x675 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x676 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x677 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x678 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x679 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x680 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x681 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x682 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x683 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x684 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x685 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x686 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x687 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x688 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x689 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x690 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x691 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x692 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x693 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x694 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x695 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x696 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x697 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x698 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x699 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x700 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x701 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x702 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x703 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x704 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x705 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x706 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x707 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x708 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x709 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x710 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x711 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x712 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x713 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x714 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x715 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x716 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x717 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x718 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x719 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x720 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x721 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x722 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x723 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x724 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x725 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x726 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x727 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x728 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x729 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x730 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x731 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x732 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x733 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x734 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x735 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x736 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x737 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x738 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x739 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x740 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x741 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x742 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x743 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x744 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x745 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x746 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x747 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x748 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x749 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x750 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x751 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x752 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x753 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x754 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x755 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x756 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x757 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x758 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x759 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x760 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x761 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x762 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x763 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x764 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x765 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x766 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x767 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x768 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x769 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x770 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x771 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x772 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x773 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x774 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x775 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x776 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x777 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x778 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x779 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x780 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x781 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x782 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x783 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x784 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x785 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x786 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x787 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x788 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x789 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x790 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x791 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x792 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x793 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x794 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x795 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x796 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x797 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x798 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x799 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x800 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x801 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x802 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x803 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x804 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x805 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x806 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x807 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x808 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x809 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x810 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x811 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x812 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x813 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x814 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x815 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x816 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x817 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x818 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x819 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x820 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x821 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x822 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x823 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x824 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x825 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x826 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x827 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x828 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x829 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x830 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x831 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x832 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x833 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x834 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x835 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x836 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x837 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x838 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x839 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x840 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x841 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x842 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x843 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x844 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x845 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x846 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x847 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x848 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x849 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x850 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x851 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x852 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x853 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x854 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x855 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x856 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x857 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x858 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x859 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x860 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x861 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x862 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x863 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x864 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x865 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x866 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x867 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x868 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x869 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x870 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x871 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x872 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x873 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x874 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x875 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x876 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x877 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x878 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x879 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x880 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x881 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x882 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x883 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x884 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x885 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x886 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x887 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x888 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x889 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x890 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x891 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x892 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x893 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x894 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x895 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x896 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x897 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x898 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x899 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x900 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x901 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x902 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x903 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x904 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x905 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x906 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x907 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b908 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b909 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b910 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b911 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b912 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b913 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b914 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b915 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b916 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b917 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b918 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b919 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b920 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b921 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b922 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b923 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b924 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b925 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b926 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b927 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b928 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b929 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b930 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b931 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b932 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b933 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b934 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b935 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b936 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b937 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b938 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b939 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b940 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b941 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b942 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b943 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b944 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b945 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b946 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b947 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b948 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b949 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b950 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b951 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b952 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b953 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b954 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b955 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b956 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b957 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b958 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b959 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b960 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b961 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b962 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b963 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b964 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b965 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b966 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b967 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b968 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b969 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b970 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b971 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b972 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b973 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b974 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b975 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b976 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b977 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b978 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b979 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b980 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b981 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b982 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b983 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b984 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b985 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b986 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b987 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b988 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b989 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b990 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b991 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b992 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b993 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b994 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b995 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b996 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b997 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b998 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b999 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1000 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1001 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1002 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1003 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1004 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1005 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1006 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1007 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1008 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1009 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1010 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1011 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1012 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1013 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1014 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1015 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1016 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1017 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1018 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1019 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1020 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1021 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1022 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1023 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1024 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1025 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1026 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1027 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1028 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1029 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1030 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1031 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1032 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1033 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1034 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1035 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1036 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1037 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1038 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1039 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1040 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1041 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1042 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1043 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1044 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1045 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1046 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1047 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1048 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1049 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1050 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1051 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1052 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1053 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1054 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1055 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1056 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1057 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1058 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1059 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1060 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1061 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1062 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1063 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1064 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1065 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1066 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1067 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1068 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1069 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1070 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1071 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1072 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1073 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1074 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1075 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1076 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1077 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1078 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1079 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1080 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1081 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1082 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1083 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1084 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1085 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1086 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1087 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1088 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1089 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1090 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1091 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1092 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1093 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1094 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1095 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1096 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1097 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1098 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1099 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1100 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1101 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1102 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1103 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1104 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1105 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1106 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1107 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1108 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1109 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1110 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1111 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1112 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1113 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1114 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1115 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1116 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1117 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1118 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1119 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1120 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1121 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1122 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1123 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1124 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1125 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1126 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1127 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1128 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1129 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1130 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1131 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1132 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1133 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1134 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1135 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1136 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1137 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1138 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1139 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1140 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1141 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1142 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1143 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1144 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1145 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1146 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1147 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr= - m.x122 - m.x123 - m.x124 + 5*m.x140 + 10*m.x141 + 5*m.x142 - 2*m.x155 - m.x156 - 2*m.x157
- 10*m.x206 - 5*m.x207 - 5*m.x208 - 5*m.x209 - 5*m.x210 - 5*m.x211 + 40*m.x230 + 30*m.x231
+ 15*m.x232 + 15*m.x233 + 20*m.x234 + 25*m.x235 + 10*m.x236 + 30*m.x237 + 40*m.x238 + 30*m.x239
+ 20*m.x240 + 20*m.x241 + 35*m.x242 + 50*m.x243 + 20*m.x244 + 20*m.x245 + 30*m.x246 + 35*m.x247
+ 25*m.x248 + 50*m.x249 + 10*m.x250 + 15*m.x251 + 20*m.x252 + 20*m.x253 + 30*m.x275 + 40*m.x276
+ 40*m.x277 - m.x290 - m.x291 - m.x292 - 5*m.x341 - 3*m.x342 - 4*m.x343 - m.x344 - m.x345
- m.x346 + 120*m.x365 + 110*m.x366 + 150*m.x367 + 140*m.x368 + 120*m.x369 + 100*m.x370
+ 90*m.x371 + 60*m.x372 + 150*m.x373 + 80*m.x374 + 90*m.x375 + 120*m.x376 + 285*m.x377
+ 390*m.x378 + 350*m.x379 + 290*m.x380 + 405*m.x381 + 190*m.x382 + 280*m.x383 + 400*m.x384
+ 430*m.x385 + 290*m.x386 + 300*m.x387 + 240*m.x388 + 350*m.x389 + 250*m.x390 + 300*m.x391
- 5*m.b1028 - 4*m.b1029 - 6*m.b1030 - 8*m.b1031 - 7*m.b1032 - 6*m.b1033 - 6*m.b1034 - 9*m.b1035
- 4*m.b1036 - 10*m.b1037 | |
<gh_stars>100-1000
# imports
import hashlib
import json
import os
import os.path
import random
import re
import shutil
import stat
import urllib.parse
# constants
world_x_schema = "world_x_cities"
world_x_table = "cities"
# schema name can consist of 51 reserved characters max, server supports up to
# 64 chars but when it creates the directory for schema, it encodes non-letters
# ASCII using five bytes, meaning the directory name is going to be 255
# characters long (max on most # platforms)
# schema name consists of 49 reserved characters + UTF-8 character
test_schema = "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ó"
test_table_primary = "first"
test_table_unique = "second"
# mysql_data_home + schema + table name + path separators cannot exceed 512
# characters
# table name consists of 48 reserved characters + UTF-8 character
test_table_non_unique = "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ą"
test_table_no_index = "fóurth"
test_table_json = "metadata"
test_table_trigger = "sample_trigger"
test_schema_procedure = "sample_procedure"
test_schema_function = "sample_function"
test_schema_event = "sample_event"
test_view = "sample_view"
verification_schema = "wl13804_ver"
types_schema = "xtest"
if __os_type == "windows":
# on windows server would have to be installed in the root folder to
# handle long schema/table names
if __version_num >= 80000:
test_schema = "@ó"
test_table_non_unique = "@ą"
else:
# when using latin1 encoding, UTF-8 characters sent by shell are converted by server
# to latin1 representation, then upper case characters are converted to lower case,
# which leads to invalid UTF-8 sequences when names are transfered back to shell
# use ASCII in this case
test_schema = "@o"
test_table_non_unique = "@a"
test_table_no_index = "fourth"
all_schemas = [world_x_schema, types_schema, test_schema, verification_schema]
test_schema_tables = [test_table_primary, test_table_unique, test_table_non_unique, test_table_no_index, test_table_json]
test_schema_views = [test_view]
uri = __sandbox_uri1
xuri = __sandbox_uri1.replace("mysql://", "mysqlx://") + "0"
test_output_relative_parent = "dump_output"
test_output_relative = os.path.join(test_output_relative_parent, "data.txt")
test_output_absolute = os.path.abspath(test_output_relative)
test_output_absolute_parent = os.path.dirname(test_output_absolute)
# helpers
if __os_type != "windows":
def filename_for_file(filename):
return filename
else:
def filename_for_file(filename):
return filename.replace("\\", "/")
if __os_type != "windows":
def absolute_path_for_output(path):
return path
else:
def absolute_path_for_output(path):
long_path_prefix = r"\\?" "\\"
return long_path_prefix + path
def setup_session(u = uri):
shell.connect(u)
session.run_sql("SET NAMES 'utf8mb4';")
session.run_sql("SET GLOBAL local_infile = true;")
def drop_all_schemas(exclude=[]):
for schema in session.run_sql("SELECT SCHEMA_NAME FROM information_schema.schemata;").fetch_all():
if schema[0] not in ["information_schema", "mysql", "performance_schema", "sys"] + exclude:
session.run_sql("DROP SCHEMA IF EXISTS !;", [ schema[0] ])
def create_all_schemas():
for schema in all_schemas:
session.run_sql("CREATE SCHEMA !;", [ schema ])
def recreate_verification_schema():
session.run_sql("DROP SCHEMA IF EXISTS !;", [ verification_schema ])
session.run_sql("CREATE SCHEMA !;", [ verification_schema ])
def count_files_with_basename(directory, basename):
cnt = 0
for f in os.listdir(directory):
if f.startswith(basename):
cnt += 1
return cnt
def has_file_with_basename(directory, basename):
return count_files_with_basename(directory, basename) > 0
def count_files_with_extension(directory, ext):
cnt = 0
for f in os.listdir(directory):
if f.endswith(ext):
cnt += 1
return cnt
def quote(schema, table = None):
if table is None:
return "`{0}`".format(schema.replace("`", "``"))
else:
return "{0}.{1}".format(quote(schema), quote(table))
def hash_file(path):
size = 65536
md5 = hashlib.md5()
with open(path, "rb") as f:
while True:
contents = f.read(size)
if not contents:
break
md5.update(contents)
return md5.hexdigest()
def EXPECT_SUCCESS(table, outputUrl, options = {}):
shutil.rmtree(test_output_absolute_parent, True)
os.mkdir(test_output_absolute_parent)
EXPECT_FALSE(os.path.isfile(test_output_absolute))
util.export_table(table, outputUrl, options)
EXPECT_TRUE(os.path.isfile(test_output_absolute))
def EXPECT_FAIL(error, msg, table, outputUrl, options = {}, expect_file_created = False):
shutil.rmtree(test_output_absolute_parent, True)
os.mkdir(test_output_absolute_parent)
EXPECT_THROWS(lambda: util.export_table(table, outputUrl, options), "{0}: Util.export_table: {1}".format(error, msg))
if expect_file_created:
EXPECT_TRUE(os.path.isfile(test_output_absolute))
def TEST_BOOL_OPTION(option):
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type Bool, but is Null".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: None })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' Bool expected, but value is String".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: "dummy" })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type Bool, but is Array".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: [] })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type Bool, but is Map".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: {} })
def TEST_STRING_OPTION(option):
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type String, but is Null".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: None })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type String, but is Integer".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: 5 })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type String, but is Integer".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: -5 })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type String, but is Array".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: [] })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type String, but is Map".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: {} })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type String, but is Bool".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: False })
def TEST_UINT_OPTION(option):
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type UInteger, but is Null".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: None })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' UInteger expected, but Integer value is out of range".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: -5 })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' UInteger expected, but value is String".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: "dummy" })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type UInteger, but is Array".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: [] })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type UInteger, but is Map".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: {} })
def TEST_ARRAY_OF_STRINGS_OPTION(option):
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type Array, but is Integer".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: 5 })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type Array, but is Integer".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: -5 })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type Array, but is String".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: "dummy" })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type Array, but is Map".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: {} })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' is expected to be of type Array, but is Bool".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: False })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' String expected, but value is Null".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: [ None ] })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' String expected, but value is Integer".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: [ 5 ] })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' String expected, but value is Integer".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: [ -5 ] })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' String expected, but value is Map".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: [ {} ] })
EXPECT_FAIL("TypeError", "Argument #3: Option '{0}' String expected, but value is Bool".format(option), quote(types_schema, types_schema_tables[0]), test_output_relative, { option: [ False ] })
def get_all_columns(schema, table):
columns = []
for column in session.run_sql("SELECT COLUMN_NAME FROM information_schema.columns WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? ORDER BY ORDINAL_POSITION;", [schema, table]).fetch_all():
columns.append(column[0])
return columns
def compute_crc(schema, table, columns):
session.run_sql("SET @crc = '';")
session.run_sql("SELECT @crc := MD5(CONCAT_WS('#',@crc,{0})) FROM !.! ORDER BY {0};".format(("!," * len(columns))[:-1]), columns + [schema, table] + columns)
return session.run_sql("SELECT @crc;").fetch_one()[0]
def TEST_LOAD(schema, table, options = {}):
print("---> testing: `{0}`.`{1}` with options: {2}".format(schema, table, options))
# prepare the options
target_table = "verification"
run_options = { "showProgress": False }
# add extra options
run_options.update(options)
# export the table
EXPECT_SUCCESS(quote(schema, table), test_output_absolute, run_options)
# create target table
recreate_verification_schema()
session.run_sql("CREATE TABLE !.! LIKE !.!;", [verification_schema, target_table, schema, table])
# prepare options for load
run_options.update({ "schema": verification_schema, "table": target_table, "characterSet": "utf8mb4" })
# rename the character set key (if it was provided)
if "defaultCharacterSet" in run_options:
run_options["characterSet"] = run_options["defaultCharacterSet"]
del run_options["defaultCharacterSet"]
# add decoded columns
all_columns = get_all_columns(schema, table)
decoded_columns = {}
for column in all_columns:
ctype = session.run_sql("SELECT DATA_TYPE FROM information_schema.columns WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ? AND COLUMN_NAME = ?;", [schema, table, column]).fetch_one()[0].lower()
if (ctype.endswith("binary") or
ctype.endswith("bit") or
ctype.endswith("blob") or
ctype.endswith("geometry") or
ctype.endswith("geomcollection") or
ctype.endswith("geometrycollection") or
ctype.endswith("linestring") or
ctype.endswith("point") or
ctype.endswith("polygon")):
decoded_columns[column] = "FROM_BASE64"
if decoded_columns:
run_options["columns"] = all_columns
run_options["decodeColumns"] = decoded_columns
# load in chunks
run_options["bytesPerChunk"] = "1k"
# load data
util.import_table(test_output_absolute, run_options)
# compute CRC
EXPECT_EQ(compute_crc(schema, table, all_columns), compute_crc(verification_schema, target_table, all_columns))
def get_magic_number(path, count):
with open(path, "rb") as f:
return f.read(count).hex().upper()
GZIP_MAGIC_NUMBER = "1F8B"
ZSTD_MAGIC_NUMBER = "28B52FFD"
#@<> WL13804-FR2.1 - If there is no open global Shell session, an exception must be thrown. (no global session)
# WL13804-TSFR_2_1_2
EXPECT_FAIL("RuntimeError", "An open session is required to perform this operation.", quote('mysql', 'user'), test_output_relative)
#@<> deploy sandbox
testutil.deploy_raw_sandbox(__mysql_sandbox_port1, "root")
#@<> wait for server
testutil.wait_sandbox_alive(uri)
shell.connect(uri)
#@<> WL13804-FR2.1 - If there is no open global Shell session, an exception must be thrown. (no open session)
# WL13804-TSFR_2_1_1
session.close()
EXPECT_FAIL("RuntimeError", "An open | |
await self.account.blockchain.getcid(hash=content["hash"])
await content_collection.find_one_and_update({"txid":txid}, {"$set":{"cid":int(cid)}})
await content_collection.find_one_and_update({"txid":txid}, {"$set":{"hash":None}})
updated = await content_collection.find_one({"txid":txid})
return {i:updated[i] for i in updated if i != "_id"}
#@verify
async def set_access_string(self, **params):
"""Writes content access string to database
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
cid = int(params.get("cid", "0"))
seller_access_string = params.get("seller_access_string")
seller_pubkey = params.get("seller_pubkey")
coinid = params.get("coinid")
try:
coinid = coinid.replace("TEST", "")
except:
pass
database = client[coinid]
collection = database[settings.CONTENT]
content = await collection.find_one({"cid":cid})
if not content:
return {"error":404, "reason":"Content not found"}
if not all([cid, seller_access_string, seller_pubkey]):
return {"error":400, "reason":"Missed required fields"}
await collection.find_one_and_update({"cid":cid},
{"$set":{"seller_access_string":seller_access_string}})
await collection.find_one_and_update({"cid":cid},
{"$set":{"seller_pubkey":seller_pubkey}})
content = await collection.find_one({"cid":cid})
return {i:content[i] for i in content if i != "_id"}
#@verify
async def get_reviews(self, **params):
"""Receives all reviews by cid
Accepts:
- cid
- coinid
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
cid = params.get("cid", 0)
coinid = params.get("coinid")
if not cid and not coinid:
return {"error":400, "reason":"Missed cid"}
reviews = []
database = client[coinid]
collection = database[settings.REVIEW]
async for document in collection.find({"confirmed":None, "cid":int(cid)}):
reviews.append({i:document[i] for i in document if i == "confirmed"})
return reviews
#@verify
async def set_review(self, **params):
"""Writes review for content
Accepts:
- cid
- review
- public_key
- rating
- txid
- coinid
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
cid = int(params.get("cid", 0))
txid = params.get("txid")
coinid = params.get("coinid")
try:
coinid = coinid.replace("TEST", "")
except:
pass
# Get content
database = client[coinid]
content_collection = database[settings.CONTENT]
content = await content_collection.find_one({"cid":cid})
if not content:
return {"error":404, "reason":"Not found current content"}
database = client[coinid]
review_collection = database[settings.REVIEW]
await review_collection.insert_one({"cid":cid, "confirmed":None,
"txid":txid, "coinid":coinid})
return {"result":"ok"}
#@verify
async def update_review(self, **params):
"""Update review after transaction confirmation
Accepts:
- txid
- coinid
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
# Check if required fields exists
txid = params.get("txid")
coinid = params.get("coinid").upper()
try:
coinid = coinid.replace("TEST", "")
except:
pass
# Try to find offer with account id and cid
database = client[coinid]
collection = database[settings.REVIEW]
review = await collection.find_one({"txid":txid})
if not review:
return {"error":404,
"reason":"Review with txid %s not found" % txid }
# Update review
await collection.find_one_and_update(
{"txid":txid}, {"$set":{"confirmed":1}})
# Get updated offer
updated = await collection.find_one({"txid":txid})
return {i:updated[i] for i in updated if i != "_id"}
#@verify
async def write_deal(self, **params):
"""Writes deal to database
Accepts:
- cid
- access_type
- buyer public key
- seller public key
- price
- coinid
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
cid = int(params.get("cid", 0))
access_type = params.get("access_type")
buyer = params.get("buyer")
seller = params.get("seller")
price = params.get("price")
coinid = params.get("coinid")
try:
coinid = coinid.replace("TEST", "")
except:
pass
if not all([cid, access_type, buyer, seller, price]):
return {"error":400, "reason":"Missed required fields"}
database = client[coinid]
collection = database[settings.DEAL]
await collection.insert_one({
"cid":cid,
"access_type": access_type,
"buyer":buyer,
"seller":seller,
"price":price,
"coinid":coinid
})
result = await collection.find_one({"cid":cid, "buyer":buyer})
return {i:result[i] for i in result if i != "_id"}
#@verify
async def update_description(self, **params):
"""Set description to unconfirmed status
after updating by user.
Accepts:
- cid
- description
- transaction id
- coinid
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
# Check if required fields exists
cid = params.get("cid")
description = params.get("description")
txid = params.get("txid")
coinid = params.get("coinid")
try:
coinid = coinid.replace("TEST", "")
except:
pass
# Check if required fileds
if not all([cid, description, txid, coinid]):
return {"error":400, "reason":"Missed required fields"}
# Try to find offer with account id and cid
database = client[coinid]
collection = database[settings.CONTENT]
content = await collection.find_one({"cid":int(cid)})
if not content:
return {"error":404,
"reason":"Content with cid %s not found" % cid }
# Update offer
await collection.find_one_and_update(
{"cid":int(cid)}, {"$set":{"description":description}})
await collection.find_one_and_update(
{"cid":int(cid)}, {"$set":{"confirmed":None}})
await collection.find_one_and_update(
{"cid":int(cid)}, {"$set":{"txid":txid}})
# Get updated offer
updated = await collection.find_one({"cid":int(cid)})
return {i:updated[i] for i in updated if i != "_id"}
#@verify
async def set_write_price(self, **params):
"""Updates write access price of content
Accespts:
- cid
- price
- txid
- coinid
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
# Check if required fields exists
cid = params.get("cid")
price = params.get("write_price")
txid = params.get("txid")
coinid = params.get("coinid")
# Check if required fileds
if not all([cid, price, txid]):
return {"error":400, "reason":"Missed required fields"}
# Try to find offer with account id and cid
database = client[coinid]
collection = database[settings.CONTENT]
# Check if content exists
content = await collection.find_one({"cid":int(cid)})
if not content:
return {"error":404,
"reason":"Content with cid %s not found" % cid }
# Update content
await collection.find_one_and_update(
{"cid":int(cid)}, {"$set":{"write_access":price}})
await collection.find_one_and_update(
{"cid":int(cid)}, {"$set":{"confirmed":None}})
await collection.find_one_and_update(
{"cid":int(cid)}, {"$set":{"txid":txid}})
# Get updated content
updated = await collection.find_one({"cid":int(cid)})
return {i:updated[i] for i in updated if i != "_id"}
#@verify
async def set_read_price(self, **params):
"""Updates write access price of content
Accespts:
- cid
- price
- txid
- coinid
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
# Check if required fields exists
cid = params.get("cid")
price = params.get("read_price")
txid = params.get("txid")
coinid = params.get("coinid")
# Check if required fileds
if not all([cid, price, txid]):
return {"error":400, "reason":"Missed required fields"}
# Try to find offer with account id and cid
database = client[coinid]
collection = database[settings.CONTENT]
# Check if content exists
content = await collection.find_one({"cid":int(cid)})
if not content:
return {"error":404,
"reason":"Content with cid %s not found" % cid }
# Update content
await collection.find_one_and_update(
{"cid":int(cid)}, {"$set":{"read_access":price}})
await collection.find_one_and_update(
{"cid":int(cid)}, {"$set":{"confirmed":None}})
await collection.find_one_and_update(
{"cid":int(cid)}, {"$set":{"txid":txid}})
# Get updated content
updated = await collection.find_one({"cid":int(cid)})
return {i:updated[i] for i in updated if i != "_id"}
#@verify
async def change_owner(self, **params):
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
coinid = params.get("coinid")
cid = params.get("cid")
public_key = params.get("public_key")
database = client[coinid]
content_collection = database[settings.CONTENT]
content = await content_collection.find_one_and_update({"cid":int(cid)},
{"$set":{"owner":public_key}})
if not content:
return {"error":404, "reason":"Change owner. Content with cid %s not found" % cid}
return {i:content[i] for i in content if i != "_id"}
#@verify
async def share_content(self, **params):
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
coinid = params.get("coinid")
cid = params.get("cid")
public_key = params.get("public_key")
database = client[coinid]
content_collection = database[settings.DEAL]
await content_collection.insert_one({"cid":int(cid), "user":public_key})
content = await content_collection.find_one({"cid":int(cid), "user":public_key})
logging.debug(content)
if not content:
return {"error":404,
"reason":"Shared content. Content with cid %s not created" % cid}
return {i:content[i] for i in content if i != "_id"}
#@verify
async def get_deals(self, **params):
"""Receives all users deals
Accepts:
- buyer public key
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
buyer = params.get("buyer")
if not buyer:
return {"error":400, "reason":"Missed public key"}
deals = {i:[] for i in list(settings.bridges.keys())}
for coinid in list(settings.bridges.keys()):
database = client[coinid]
collection = database[settings.DEAL]
async for document in collection.find({"owner":buyer}):
deals[coinid].append((document["cid"],document.get("txid")))
return deals
#@verify
async def log_source(self, **params):
""" Logging users request sources
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
# Insert new source if does not exists the one
database = client[settings.DBNAME]
source_collection = database[settings.SOURCE]
await source_collection.update({"public_key":params.get("public_key")},
{"$addToSet":{"source":params.get("source")}},
upsert=True)
return {"result": "ok"}
#@verify
async def log_transaction(self, **params):
"""Writing transaction to database
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
coinid = params.get("coinid")
if not coinid in ["QTUM", "PUT"]:
return {"error":400, "reason": "Missed or invalid coinid"}
database = client[settings.TXS]
source_collection = database[coinid]
await source_collection.find_one_and_update({"txid":params.get("txid")},{"$set":{
"blocknumber":params.get("blocknumber"),
"blockhash":params.get("blockhash"),
"gasLimit":params.get("gasLimit"),
"gasPrice":params.get("gasPrice"),
}})
return {"success":True}
async def save_transaction(self, **params):
"""
"""
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
coinid = params.get("coinid")
if not coinid in ["QTUM", "PUT"]:
return {"error":400, "reason": "Missed or invalid coinid"}
database = client[settings.TXS]
source_collection = database[coinid]
await source_collection.insert_one({
"txid":params.get("txid"),
"address":params.get("address"),
"amount":params.get("amount")
})
return {"success":True}
#@verify
async def get_transactions(self, **params):
if params.get("message"):
params = json.loads(params.get("message", "{}"))
if not params:
return {"error":400, "reason":"Missed required fields"}
if not params.get("address"):
return {"error":400, "reason":"Missed address field. "}
coinid = params.get("coinid")
address = params.get("address")
if not coinid in ["QTUM", "PUT"]:
return {"error":400, "reason":"Missed or invalid coinid. "}
if not address:
return {"error":400, "reason":"Missed address field. "}
database = client[settings.TXS]
source_collection = database[coinid]
result = [{i:t[i] for i in t if i != "_id"}
async for t in source_collection.find({"address":address})]
return {"txs":result}
table = StorageTable(dbname=settings.DBNAME, collection=settings.ACCOUNTS)
@methods.add
async def createaccount(**params):
document = await table.create_account(**params)
return {i:document[i] for i in document if i != "_id"}
@methods.add
async def getaccountdata(**params):
if isinstance(params.get("message"), str):
params = json.loads(params.get("message", "{}"))
elif isinstance(params.get("message"), dict):
params = params.get("message")
data = {i:params[i] for i in params if i == "public_key" or i == "id"}
document = await table.find(**data)
return {i:document[i] for i in document if i != "_id"}
@methods.add
async def createwallet(**params):
document = await table.insert_wallet(**params)
return {i:document[i] for i in document if i != "_id"}
@methods.add
async def getnews(**params):
news = await table.find_recent_news(**params)
return news
@methods.add
async def setnews(**params):
result = await table.insert_news(**params)
return result
@methods.add
async def getaccountbywallet(**params):
"""Receives account by wallet
Accepts:
- public key hex or checksum format
"""
if params.get("message"):
params = json.loads(params.get("message"))
for coinid in coin_ids:
database = client[coinid]
wallet_collection = database[settings.WALLET]
wallet = await wallet_collection.find_one({"wallet":params["wallet"]})
if not wallet:
continue
else:
database = client[settings.DBNAME]
accounts_collection = database[settings.ACCOUNTS]
account = await accounts_collection.find_one({"id":wallet["account_id"]})
if not account:
return {"error":404, "reason":"Account was not found"}
return {i:account[i] for i in account if i != "_id"}
else:
return {"error":404, "reason":"Account was not found"}
@methods.add
async def updatelevel(**params):
message = json.loads(params.get("message"))
document = await table.find(**{"id":message["id"]})
data = {"_id":document["id"],
"level":message["level"]}
updated = await table.update(**data)
return {i:updated[i] for i in updated if i != "_id"}
@methods.add
async def insertoffer(**params):
offertable = StorageTable(dbname=settings.DBNAME,
collection=settings.OFFER)
result = await offertable.insert_offer(**params)
return result
@methods.add
async def getoffer(**params):
offertable = StorageTable(dbname=settings.DBNAME,
collection=settings.OFFER)
result = await offertable.get_offer(**params)
return result
@methods.add
async def removeoffer(**params):
offertable = StorageTable(dbname=settings.DBNAME,
collection=settings.OFFER)
result = await offertable.remove_offer(**params)
return result
@methods.add
async def updateoffer(**params):
offertable = StorageTable(dbname=settings.DBNAME,
collection=settings.OFFER)
result = await offertable.update_offer(**params)
return result
@methods.add
async def mailedconfirm(**params):
offertable = StorageTable(dbname=settings.DBNAME,
collection=settings.OFFER)
result = await offertable.mailed_confirm(**params)
return result
@methods.add
async def getoffers(**params):
result = await table.get_offers(**params)
return result
@methods.add
async def getuserscontent(**params):
result = await table.get_contents(**params)
return result
@methods.add
async def setuserscontent(**params):
result = await table.set_contents(**params)
return result
@methods.add
async def updateuserscontent(**params):
result = await table.update_contents(**params)
return result
@methods.add
async def getallcontent(**params):
result = await table.get_all_content(**params)
return result
@methods.add
async def getsinglecontent(**params):
table = StorageTable(dbname=settings.DBNAME, collection=settings.CONTENT)
result = await table.get_single_content(**params)
return result
@methods.add
async def changecontentowner(**params):
result = await table.change_content_owner(**params)
return result
@methods.add
async def setaccessstring(**params):
table = StorageTable(dbname=settings.DBNAME, collection=settings.CONTENT)
result = await table.set_access_string(**params)
return result
@methods.add
async def getreviews(**params):
table = StorageTable(dbname=settings.DBNAME, collection=settings.REVIEW)
result = await table.get_reviews(**params)
return result
@methods.add
async def setreview(**params):
table = StorageTable(dbname=settings.DBNAME, collection=settings.REVIEW)
result = await table.set_review(**params)
return result
@methods.add
async def updatereview(**params):
table = StorageTable(dbname=settings.DBNAME, collection=settings.REVIEW)
result = await table.update_review(**params)
return result
@methods.add
async def writedeal(**params):
table = StorageTable(dbname=settings.DBNAME, collection=settings.DEAL)
result = await table.write_deal(**params)
return result
@methods.add
async def getdeals(**params):
table = StorageTable(dbname=settings.DBNAME, collection=settings.DEAL)
result = await table.get_deals(**params)
return result
@methods.add
async def updatedescription(**params):
table = StorageTable(dbname=settings.DBNAME, collection=settings.CONTENT)
result = await | |
all regs"""
res_str = ''
for i in range(2, self.NUM_GPRS):
res_str += ('r' + str(i)).rjust(3) + ': ' + hex(self.get_gpr(i)) + '\n'
return res_str
def get_s_reg_table(self, header):
"""Get a table with hexstrings of all special registers"""
res_str = ''
if header:
res_str += self.get_limb_header()
res_str += 'mod: ' + self.get_xlen_hex_str(self.get_reg('mod')) + '\n'
res_str += 'rfp: ' + self.get_xlen_hex_str(self.get_reg('rfp')) + '\n'
res_str += 'dmp: ' + self.get_xlen_hex_str(self.get_reg('dmp')) + '\n'
res_str += ' lc: ' + self.get_xlen_hex_str(self.get_reg('lc')) + '\n'
res_str += 'rnd: ' + self.get_xlen_hex_str(self.get_reg('rnd'))
return res_str
def get_all_reg_table(self, header):
"""Get a table with hex strings of all registers (general purpose and special)"""
res_str = ''
if header:
res_str += self.get_limb_header()
res_str += self.get_s_reg_table(False) + '\n' + self.get_reg_table(False)
return res_str
def get_all_flags_table(self):
"""Ger a table with the state of all flags (extended and standard)"""
res_str = ''
res_str += '|C|Z|M|L| X|C|Z|M|L|\n'
res_str += '|' + str(int(self.get_flag('C'))) + '|' + str(int(self.get_flag('Z'))) + '|' \
+ str(int(self.get_flag('M'))) + '|' + str(int(self.get_flag('L'))) + '|'
res_str += ' |' + str(int(self.get_flag('XC'))) + '|' + str(int(self.get_flag('XZ'))) + '|' \
+ str(int(self.get_flag('XM'))) + '|' + str(int(self.get_flag('XL'))) + '|'
return res_str
def get_dmem_table(self, low, high):
"""Get a table of hex strings for a given dmem range"""
s = ''
for i in range(low, min(high + 1, self.DMEM_DEPTH)):
if (i % 4) == 0 and i > 0:
s += '\n'
s += ('' + str(i)).rjust(4) + ': ' + self.get_xlen_hex_str(self.dmem[i])
s += '\n'
return s
def get_breakpoints(self):
"""Get list of all breakpoints"""
ret_str = ''
for key in self.breakpoints:
ret_str += 'Address: ' + str(key) + ', stop at pass: ' + str(self.breakpoints[key][0]) \
+ ', passed: ' + str(self.breakpoints[key][1] - 1) + '\n'
return ret_str
def toggle_breakpoint(self, bp, passes=1, msg=False):
"""Toggle a breakpoint"""
# breakpoints is a dictionary with the address as key and the values
# are tuples of number of passes required to break and the pass counter
if isinstance(bp, int):
addr = int(bp)
else:
if bp.isdigit():
addr = int(bp)
elif bp.lower().startswith('0x'):
addr = int(bp[2:], 16)
else:
if not self.ctx:
print('\nError: Label/function breakpoints only possible when assembly context is available\n')
return
else:
rev_functions = {v: k for k, v in self.ctx.functions.items()}
rev_labels = {v: k for k, v in self.ctx.labels.items()}
if bp in rev_functions:
addr = rev_functions[bp]
elif bp in rev_labels:
addr = rev_labels[bp]
else:
print('\nError: function or label \'' + bp + '\' not found.\n')
return
if addr in self.breakpoints:
del self.breakpoints[addr]
if msg:
print('\nBreakpoint deleted at address ' + str(addr) + '\n')
else:
if addr in range(0, self.IMEM_DEPTH):
self.breakpoints.update({addr: (passes, 1)})
if msg:
print('\nBreakpoint set at address ' + str(addr) + '\n')
else:
print('\nError: breakpoint address out of range\n')
def set_breakpoint(self, bp, passes=1, msg=False):
# breakpoints is a dictionary with the address as key and the values
# are tuples of number of passes required to break and the pass counter
if isinstance(bp, int):
addr = int(bp)
else:
if bp.isdigit():
addr = int(bp)
elif bp.lower().startswith('0x'):
addr = int(bp[2:], 16)
else:
if not self.ctx:
print('\nError: Label/function breakpoints only possible when assembly context is available\n')
return
else:
rev_functions = {v: k for k, v in self.ctx.functions.items()}
rev_labels = {v: k for k, v in self.ctx.labels.items()}
if bp in rev_functions:
addr = rev_functions[bp]
elif bp in rev_labels:
addr = rev_labels[bp]
else:
print('\nError: function or label \'' + bp + '\' not found.\n')
return
"""set a breakpoint"""
if addr in range(0, self.IMEM_DEPTH):
self.breakpoints.update({addr: (passes, 1)})
if msg:
print('\nBreakpoint set at address ' + str(addr) + '\n')
else:
print('\nError: breakpoint address out of range\n')
def __check_break(self):
"""check if current PC is in list of Breakpoints, if so and the number of required passes are reached, break,
otherwise increment the pass counter for the address."""
if self.force_break[0]:
force_break, consider_callstack, callstack, consider_loopstack, loopstack = self.force_break
if consider_loopstack and len(self.loop_stack) == loopstack:
self.__clear_force_break()
return True, 0
if consider_callstack and len(self.call_stack) == callstack:
self.__clear_force_break()
return True, 0
if not consider_callstack and not consider_loopstack:
self.__clear_force_break()
return True, 0
if self.breakpoints:
# check if address is breakpoint
if self.get_pc() in self.breakpoints:
# break address found, check for number passes
passes, cnt = self.breakpoints[self.get_pc()]
if cnt == passes:
self.breakpoints[self.get_pc()] = (passes, 1)
return True, passes
else:
self.breakpoints[self.get_pc()] = (passes, cnt + 1)
return False, 0
return False, 0
def __loop_depth(self, address):
"""Get loop depth for an address"""
if not self.ctx:
return 0
i = 0
for r in self.ctx.loopranges:
if address in r:
i += 1
return i
def print_asm(self, address, before=5, after=5):
"""Print range of assembly instructions before and after current program counter"""
if address - before - 1 < 0:
s_addr = 0
else:
s_addr = address - before - 1
if address + after + 1 > len(self.imem) - 1:
e_addr = len(self.imem) - 1 + 1
else:
e_addr = address + after + 1
for i in range(s_addr, e_addr):
asm_str = ''
if address == i:
asm_str += ' ->'
else:
asm_str += ' '
if i in self.breakpoints:
if self.breakpoints[i][0] != self.breakpoints[i][1]:
asm_str += ' ? '
else:
asm_str += ' x '
else:
asm_str += ' '
asm_str += str(i).zfill(4) + ': '
for k in range(0, self.__loop_depth(i)):
asm_str += ' '
asm_str += self.get_instruction(i).get_asm_str()[1]
if self.ctx:
if i in self.ctx.functions:
print('\nfunction ' + self.ctx.functions[i] + ':')
if self.ctx:
if i in self.ctx.labels:
print(self.ctx.labels[i] + ':')
print(asm_str)
def get_full_dmem(self):
"""Get full dmem content"""
return self.dmem
def dump_dmem(self, length, filename):
"""Dump dmem contents to file"""
f = open(filename, 'w')
for i in range(0, min(length, self.DMEM_DEPTH)):
f.write(str(i).zfill(4) + ': ' + self.get_xlen_hex_str(self.dmem[i]) + '\n')
f.close()
@staticmethod
def __print_break_help():
print('h - show this help message')
print('c - continue')
print('s - step into')
print('n - step over')
print('o - step out')
print('ol - step out of loop')
print('r - print register file')
print('rs - print special registers')
print('ra - print all registers')
print('x - print GPRs (OTBN only)')
print('m - print mul accumulator)')
print('d [len] [start] - print dmem words')
print('f - print flags')
print('ls - print loop stack')
print('cs - print call stack')
print('a - print assembly around current instruction')
print('b <addr> [pass] - toggle breakpoint')
print('lp - list breakpoints')
print('dump <length> [filename] - dump dmem content to hex file')
print('q - quit')
def __set_force_break(self, consider_callstack=False, callstack=0, consider_loopstack=False, loopstack=0):
self.force_break = (True, consider_callstack, callstack, consider_loopstack, loopstack)
def __clear_force_break(self):
self.force_break = (False, False, 0, False, 0)
def __handle_break_command(self, passes):
if passes:
print('Breakpoint hit at address ' + str(self.get_pc()) + ' at pass ' + str(passes) + '.')
else:
print('Breakpoint hit at address ' + str(self.get_pc()) + '.')
self.print_asm(self.get_pc(), 5, 5)
while 1:
inp = input('Press \'c\' to continue, \'h\' for help: ')
if inp == 'h':
self.__print_break_help()
elif inp == 'q':
exit()
elif inp == 'c':
break
elif inp == 's':
self.__set_force_break()
break
elif inp == 'n':
self.__set_force_break(consider_callstack=True, callstack=len(self.call_stack))
break
elif inp == 'ol':
if len(self.loop_stack) <= 0:
print('Nothing on loop stack, can\'t \"step out\".')
else:
self.__set_force_break(consider_loopstack=True, loopstack=len(self.loop_stack) - 1)
break
elif inp == 'o':
if len(self.call_stack) <= 0:
print('Nothing on call stack, can\'t \"step out\".')
else:
self.__set_force_break(consider_callstack=True, callstack=len(self.call_stack) - 1)
break
elif inp == 'r':
print(self.get_reg_table(True))
elif inp == 'rs':
print(self.get_s_reg_table(True))
elif inp == 'ra':
print(self.get_all_reg_table(True))
elif inp == 'x':
print(self.get_gprs())
elif inp == 'm':
print(hex(self.get_acc()))
elif inp == 'f':
print(self.get_all_flags_table())
elif inp == 'ls':
print(self.loop_stack)
elif inp == 'cs':
print(self.call_stack)
elif inp.split()[0] == 'd':
dmem_cmd = inp.split()
if len(dmem_cmd) == 1 and dmem_cmd[0] == 'd':
print(self.get_dmem_table(0, len(self.dmem) - 1))
elif len(dmem_cmd) == 2:
if not dmem_cmd[1].isdigit():
print('Invalid print dmem command')
else:
print(self.get_dmem_table(0, int(dmem_cmd[1]) - 1))
elif len(dmem_cmd) == 3:
if not (dmem_cmd[1].isdigit() and dmem_cmd[2].isdigit()):
print('Invalid print dmem command')
else:
print(self.get_dmem_table(int(dmem_cmd[2]), int(dmem_cmd[2]) + int(dmem_cmd[1]) - 1))
else:
print('Invalid print dmem command')
elif inp == 'a':
self.print_asm(self.get_pc(), 5, 5)
elif inp == 'lp':
print(self.get_breakpoints())
elif inp.split()[0] == 'b':
p_cmd = inp.split()
if len(p_cmd) == 1 and p_cmd[0] == 'b':
self.toggle_breakpoint(self.get_pc(), msg=True)
self.print_asm(self.get_pc())
elif len(p_cmd) | |
from flask import Flask, render_template, session, request, redirect, url_for
from CreateForm import CreateInputForm
from collections import defaultdict
from grab_data import grab_data, grab_works
from os import urandom
import re
app = Flask(__name__)
app.secret_key = urandom(24)
books = ["Genesis", "Exodus", "Leviticus", "Numbers", "Deuteronomy", "Joshua", "Judges", "I Samuel", "II Samuel",
"I Kings", "II Kings", "Isaiah", "Jeremiah", "Ezekiel", "Hosea", "Joel", "Amos", "Obadiah", "Jonah", "Micah",
"Nahum", "Habakkuk", "Zephaniah", "Haggai", "Zechariah", "Malachi", "Psalms", "Proverbs", "Job", "Song of Songs",
"Ruth", "Lamentations", "Ecclesiastes", "Esther", "Daniel", "Ezra", "Nehemiah", "I Chronicles", "II Chronicles"]
books_dict ={'Genesis': [31, 25, 24, 26, 32, 22, 24, 22, 29, 32, 32, 20, 18, 24, 21, 16, 27, 33, 38, 18, 34, 24, 20, 67, 34, 35, 46, 22, 35, 43, 54, 33, 20, 31, 29, 43, 36, 30, 23, 23, 57, 38, 34, 34, 28, 34, 31, 22, 33, 26],
'Exodus': [22, 25, 22, 31, 23, 30, 29, 28, 35, 29, 10, 51, 22, 31, 27, 36, 16, 27, 25, 23, 37, 30, 33, 18, 40, 37, 21, 43, 46, 38, 18, 35, 23, 35, 35, 38, 29, 31, 43, 38],
'Leviticus': [17, 16, 17, 35, 26, 23, 38, 36, 24, 20, 47, 8, 59, 57, 33, 34, 16, 30, 37, 27, 24, 33, 44, 23, 55, 46, 34],
'Numbers': [54, 34, 51, 49, 31, 27, 89, 26, 23, 36, 35, 16, 33, 45, 41, 35, 28, 32, 22, 29, 35, 41, 30, 25, 18, 65, 23, 31, 39, 17, 54, 42, 56, 29, 34, 13],
'Deuteronomy': [46, 37, 29, 49, 30, 25, 26, 20, 29, 22, 32, 31, 19, 29, 23, 22, 20, 22, 21, 20, 23, 29, 26, 22, 19, 19, 26, 69, 28, 20, 30, 52, 29, 12],
'Joshua': [18, 24, 17, 24, 15, 27, 26, 35, 27, 43, 23, 24, 33, 15, 63, 10, 18, 28, 51, 9, 45, 34, 16, 33],
'Judges': [36, 23, 31, 24, 31, 40, 25, 35, 57, 18, 40, 15, 25, 20, 20, 31, 13, 31, 30, 48, 25],
'I Samuel': [28, 36, 21, 22, 12, 21, 17, 22, 27, 27, 15, 25, 23, 52, 35, 23, 58, 30, 24, 42, 16, 23, 28, 23, 44, 25, 12, 25, 11, 31, 13],
'II Samuel': [27, 32, 39, 12, 25, 23, 29, 18, 13, 19, 27, 31, 39, 33, 37, 23, 29, 32, 44, 26, 22, 51, 39, 25],
'I Kings': [53, 46, 28, 20, 32, 38, 51, 66, 28, 29, 43, 33, 34, 31, 34, 34, 24, 46, 21, 43, 29, 54],
'II Kings': [18, 25, 27, 44, 27, 33, 20, 29, 37, 36, 20, 22, 25, 29, 38, 20, 41, 37, 37, 21, 26, 20, 37, 20, 30],
'Isaiah': [31, 22, 26, 6, 30, 13, 25, 23, 20, 34, 16, 6, 22, 32, 9, 14, 14, 7, 25, 6, 17, 25, 18, 23, 12, 21, 13, 29, 24, 33, 9, 20, 24, 17, 10, 22, 38, 22, 8, 31, 29, 25, 28, 28, 25, 13, 15, 22, 26, 11, 23, 15, 12, 17, 13, 12, 21, 14, 21, 22, 11, 12, 19, 11, 25, 24],
'Jeremiah': [19, 37, 25, 31, 31, 30, 34, 23, 25, 25, 23, 17, 27, 22, 21, 21, 27, 23, 15, 18, 14, 30, 40, 10, 38, 24, 22, 17, 32, 24, 40, 44, 26, 22, 19, 32, 21, 28, 18, 16, 18, 22, 13, 30, 5, 28, 7, 47, 39, 46, 64, 34],
'Ezekiel': [28, 10, 27, 17, 17, 14, 27, 18, 11, 22, 25, 28, 23, 23, 8, 63, 24, 32, 14, 44, 37, 31, 49, 27, 17, 21, 36, 26, 21, 26, 18, 32, 33, 31, 15, 38, 28, 23, 29, 49, 26, 20, 27, 31, 25, 24, 23, 35],
'Hosea': [9, 25, 5, 19, 15, 11, 16, 14, 17, 15, 11, 15, 15, 10],
'Joel': [20, 27, 5, 21],
'Amos': [15, 16, 15, 13, 27, 14, 17, 14, 15],
'Obadiah': [21],
'Jonah': [16, 11, 10, 11],
'Micah': [16, 13, 12, 14, 14, 16, 20],
'Nahum': [14, 14, 19],
'Habakkuk': [17, 20, 19],
'Zephaniah': [18, 15, 20],
'Haggai': [15, 23],
'Zechariah': [17, 17, 10, 14, 11, 15, 14, 23, 17, 12, 17, 14, 9, 21],
'Malachi': [14, 17, 24],
'Psalms': [6, 12, 9, 9, 13, 11, 18, 10, 21, 18, 7, 9, 6, 7, 5, 11, 15, 51, 15, 10, 14, 32, 6, 10, 22, 12, 14, 9, 11, 13, 25, 11, 22, 23, 28, 13, 40, 23, 14, 18, 14, 12, 5, 27, 18, 12, 10, 15, 21, 23, 21, 11, 7, 9, 24, 14, 12, 12, 18, 14, 9, 13, 12, 11, 14, 20, 8, 36, 37, 6, 24, 20, 28, 23, 11, 13, 21, 72, 13, 20, 17, 8, 19, 13, 14, 17, 7, 19, 53, 17, 16, 16, 5, 23, 11, 13, 12, 9, 9, 5, 8, 29, 22, 35, 45, 48, 43, 14, 31, 7, 10, 10, 9, 8, 18, 19, 2, 29, 176, 7, 8, 9, 4, 8, 5, 6, 5, 6, 8, 8, 3, 18, 3, 3, 21, 26, 9, 8, 24, 14, 10, 8, 12, 15, 21, 10, 20, 14, 9, 6],
'Proverbs': [33, 22, 35, 27, 23, 35, 27, 36, 18, 32, 31, 28, 25, 35, 33, 33, 28, 24, 29, 30, 31, 29, 35, 34, 28, 28, 27, 28, 27, 33, 31],
'Job': [22, 13, 26, 21, 27, 30, 21, 22, 35, 22, 20, 25, 28, 22, 35, 22, 16, 21, 29, 29, 34, 30, 17, 25, 6, 14, 23, 28, 25, 31, 40, 22, 33, 37, 16, 33, 24, 41, 30, 32, 26, 17],
'Song of Songs': [17, 17, 11, 16, 16, 12, 14, 14],
'Ruth': [22, 23, 18, 22],
'Lamentations': [22, 22, 66, 22, 22],
'Ecclesiastes': [18, 26, 22, 17, 19, 12, 29, 17, 18, 20, 10, 14],
'Esther': [22, 23, 15, 17, 14, 14, 10, 17, 32, 3],
'Daniel': [21, 49, 33, 34, 30, 29, 28, 27, 27, 21, 45, 13],
'Ezra': [11, 70, 13, 24, 17, 22, 28, 36, 15, 44],
'Nehemiah': [11, 20, 38, 17, 19, 19, 72, 18, 37, 40, 36, 47, 31],
'I Chronicles': [54, 55, 24, 43, 41, 66, 40, 40, 44, 14, 47, 41, 14, 17, 29, 43, 27, 17, 19, 8, 30, 19, 32, 31, 31, 32, 34, 21, 30],
'II Chronicles': [18, 17, 17, 22, 14, 42, 22, 18, 31, 19, 23, 16, 23, 14, 19, 14, 19, 34, 11, 37, 20, 12, 21, 27, 28, 23, 9, 27, 36, 27, 21, 33, 25, 33, 27, 23]}
works = [u'Abravanel', u'Al HaTorah', u'And you shall count', u"Ba'al HaTurim on the Torah", u'Baal HaTurim', u'Bartenura', u'Bedek HaBayit', u'Bekhor Shor', u'Bemidbar Rabbah', u'Bereishit Rabbah', u'Buber footnotes', u'Chidushei HaRashba', u'Chizkuni', u"Da'at Mikreh", u'<NAME>', u'Derushim', u'Derushim Al HaTorah', u'Devarim Rabbah', u'<NAME>', u'<NAME>', u'Eichah Rabbah', u'Ein Yaakov', u'Ein Yaakov (Glick Edition)', u'Ephod Bad', u'Est<NAME>abbah', u'Ethics', u'<NAME>', u'Footnotes', u"HaK'tav V'haKabbalah", u'<NAME>', u'Hirsch', u'I<NAME>', u'Ikar <NAME>', u'Ikar Tosafot Yom Tov', u'<NAME>', u'<NAME> on Mishneh Torah',
u'<NAME>', u'<NAME>', u'K<NAME>', u'Kohelet Rabbah', u'Kos <NAME>u', u'Koveitz Meforshim on Meseches Archin', u'Legends of the Jews', u'Maarechet Heidenheim', u'<NAME>', u'<NAME>', u'Malbim', u'<NAME>', u'Marbeh Lisaper', u'Meforash', u'<NAME>', u'Mekhilta', u'Mekhilta DeRabbi <NAME>', u'Meor Shemesh: Parshas Pinchos', u'Meseches Archin', u'Metzudat David', u'Metzudat Zion', u"Midrash B'not Zelophehad", u'Midrash Mishlei', u'Midrash Tanchuma', u'Mishmeret HaBayit', u'Mishnah',
u'<NAME>', u'Nehama Leibowitz on Noach', u'Oh<NAME>', u'Or HaChaim', u'Pe<NAME>', u'<NAME>', u'<NAME>', u'<NAME>', u"Ra'avad", u'Raavad on Mishneh Torah', u'<NAME>', u'<NAME>', u'<NAME>', u'<NAME>', u'<NAME>', u'Radak', u'Ralbag', u'Rambam',
u'Ramban', u'Ran', u'Rashba on rosh hashanah', u'Rashbam', u'Rashi', u'<NAME>', u'Reading the Women of the Bible', u'Recanati', u'Recanati Al HaTorah', u'Recanati on the HaTorah', u'Ritva', u'R<NAME>ba', u'<NAME>', u'<NAME>', u'Se<NAME>', u'Sforno', u'Shadal', u'She<NAME>', u'Shibbolei HaLeket', u'Shir HaShirim Rabbah', u'<NAME>', u'<NAME>', u'Sifra', u'<NAME>', u'<NAME>', u'Tafsir of <NAME>', u'<NAME>', u'<NAME>', u'The Fathers According to <NAME>', u'<NAME>', u"<NAME>", u'<NAME>', u'Tosafot',
u'<NAME>',
u'<NAME>', u'<NAME>', u'<NAME>', u'<NAME>', u'Wikisource Footnotes', u'<NAME>', u'<NAME>imoni on Nach', u'Yalkut Shimoni on Torah', u'Yevamos', u'<NAME>']
@app.route('/', methods=["GET", "POST"])
def index():
form = CreateInputForm(csrf_enabled=False)
if form.validate_on_submit():
book = form.book.data
chapter = form.chapter.data
verse | |
<reponame>SaitoYutaka/microbitAnim<filename>microbitAnim.py
# -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Aug 8 2018)
## http://www.wxformbuilder.org/
##
## PLEASE DO *NOT* EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class MyFrame1
###########################################################################
class MyFrame1 ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.Point( 0,0 ), size = wx.Size( 767,507 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHints( wx.DefaultSize, wx.DefaultSize )
gbSizer1 = wx.GridBagSizer( 0, 0 )
gbSizer1.SetFlexibleDirection( wx.BOTH )
gbSizer1.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
self.m_button00 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button00.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button00, wx.GBPosition( 0, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button01 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button01.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button01, wx.GBPosition( 0, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button02 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button02.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button02, wx.GBPosition( 0, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button03 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button03.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button03, wx.GBPosition( 0, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button04 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button04.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button04, wx.GBPosition( 0, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button10 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button10.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button10, wx.GBPosition( 1, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button11 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button11.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button11, wx.GBPosition( 1, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button12 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button12.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button12, wx.GBPosition( 1, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button13 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button13.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button13, wx.GBPosition( 1, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button14 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button14.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button14, wx.GBPosition( 1, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button20 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button20.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button20, wx.GBPosition( 2, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button21 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button21.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button21, wx.GBPosition( 2, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button22 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button22.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button22, wx.GBPosition( 2, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button23 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button23.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button23, wx.GBPosition( 2, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button24 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button24.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button24, wx.GBPosition( 2, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button30 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button30.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button30, wx.GBPosition( 3, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button31 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button31.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button31, wx.GBPosition( 3, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button32 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button32.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button32, wx.GBPosition( 3, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button33 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button33.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button33, wx.GBPosition( 3, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button34 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button34.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button34, wx.GBPosition( 3, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button40 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button40.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button40, wx.GBPosition( 4, 0 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button41 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button41.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button41, wx.GBPosition( 4, 1 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button42 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button42.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button42, wx.GBPosition( 4, 2 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button43 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button43.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button43, wx.GBPosition( 4, 3 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.m_button44 = wx.Button( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 50,50 ), 0 )
self.m_button44.SetBackgroundColour( wx.Colour( 255, 0, 0 ) )
gbSizer1.Add( self.m_button44, wx.GBPosition( 4, 4 ), wx.GBSpan( 1, 1 ), wx.ALL, 5 )
self.SetSizer( gbSizer1 )
self.Layout()
self.m_menubar1 = wx.MenuBar( 0 )
self.m_menu1 = wx.Menu()
self.m_menuItem3 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u"Open", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu1.Append( self.m_menuItem3 )
self.m_menuItem1 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u"Save", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu1.Append( self.m_menuItem1 )
self.m_menuItem2 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u"quit", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu1.Append( self.m_menuItem2 )
self.m_menubar1.Append( self.m_menu1, u"File" )
self.m_menu2 = wx.Menu()
self.m_menuItem4 = wx.MenuItem( self.m_menu2, wx.ID_ANY, u"python", wx.EmptyString, wx.ITEM_NORMAL )
self.m_menu2.Append( self.m_menuItem4 )
self.m_menubar1.Append( self.m_menu2, u"export" )
self.SetMenuBar( self.m_menubar1 )
self.Centre( wx.BOTH )
# Connect Events
self.m_button00.Bind( wx.EVT_BUTTON, self.onButton00Click )
self.m_button01.Bind( wx.EVT_BUTTON, self.onButton01Click )
self.m_button02.Bind( wx.EVT_BUTTON, self.onButton02Click )
self.m_button03.Bind( wx.EVT_BUTTON, self.onButton03Click )
self.m_button04.Bind( wx.EVT_BUTTON, self.onButton04Click )
self.m_button10.Bind( wx.EVT_BUTTON, self.onButton10Click )
self.m_button11.Bind( wx.EVT_BUTTON, self.onButton11Click )
self.m_button12.Bind( wx.EVT_BUTTON, self.onButton12Click )
self.m_button13.Bind( wx.EVT_BUTTON, self.onButton13Click )
self.m_button14.Bind( wx.EVT_BUTTON, self.onButton14Click )
self.m_button20.Bind( wx.EVT_BUTTON, self.onButton20Click )
self.m_button21.Bind( wx.EVT_BUTTON, self.onButton21Click )
self.m_button22.Bind( wx.EVT_BUTTON, self.onButton22Click )
self.m_button23.Bind( wx.EVT_BUTTON, self.onButton23Click )
self.m_button24.Bind( wx.EVT_BUTTON, self.onButton24Click )
self.m_button30.Bind( wx.EVT_BUTTON, self.onButton30Click )
self.m_button31.Bind( wx.EVT_BUTTON, self.onButton31Click )
self.m_button32.Bind( wx.EVT_BUTTON, self.onButton32Click )
self.m_button33.Bind( wx.EVT_BUTTON, self.onButton33Click )
self.m_button34.Bind( wx.EVT_BUTTON, self.onButton34Click )
self.m_button40.Bind( wx.EVT_BUTTON, self.onButton40Click )
self.m_button41.Bind( wx.EVT_BUTTON, self.onButton41Click )
self.m_button42.Bind( wx.EVT_BUTTON, self.onButton42Click )
self.m_button43.Bind( wx.EVT_BUTTON, self.onButton43Click )
self.m_button44.Bind( wx.EVT_BUTTON, self.onButton44Click )
self.Bind( wx.EVT_MENU, self.OnMenuOpenSelect, id = self.m_menuItem3.GetId() )
self.Bind( wx.EVT_MENU, self.OnMenuSaveSelect, id = self.m_menuItem1.GetId() )
self.Bind( wx.EVT_MENU, self.OnMenuQuitSelect, id = self.m_menuItem2.GetId() )
self.Bind( wx.EVT_MENU, self.OnExportPythonSelect, id = self.m_menuItem4.GetId() )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def onButton00Click( self, event ):
event.Skip()
def onButton01Click( self, event ):
event.Skip()
def onButton02Click( self, event ):
event.Skip()
def onButton03Click( self, event ):
event.Skip()
def onButton04Click( self, event ):
event.Skip()
def onButton10Click( self, event ):
event.Skip()
def onButton11Click( self, event ):
event.Skip()
def onButton12Click( self, event ):
event.Skip()
def onButton13Click( self, event ):
event.Skip()
def onButton14Click( self, event ):
event.Skip()
def onButton20Click( self, event ):
event.Skip()
def onButton21Click( self, event ):
event.Skip()
def onButton22Click( self, event ):
event.Skip()
def onButton23Click( self, event ):
event.Skip()
def onButton24Click( self, event ):
event.Skip()
def onButton30Click( self, event ):
event.Skip()
def | |
:obj:`str`: The full path to the 'psadilookup.dat' file.
"""
return os.path.join(os.path.dirname(__file__), "data", "psadilookup.dat")
def get_filepath(obj):
try:
path = obj.filepath()
except AttributeError:
try:
path = obj.file.path
except:
# Let's make up a filename from the first file time
found = False
times = extract_times(obj, None, meta=False, do_xtime=False)
for t in times:
path = "wrfout_{}".format(str(t))
found = True
break
if not found:
raise ValueError("file contains no path information")
return path
def get_id(obj, prefix=''):
"""Return the cache id.
The cache id is used as a caching key for various routines. If the
object type is a mapping, then the result will also be a
mapping of each key to the object id for the value.
Args:
obj (:obj:`object`): Any object type.
prefix (:obj:`str`): A string to help with recursive calls.
Returns:
:obj:`int` or :obj:`dict`: If the *obj* parameter is not a mapping,
then the object id is returned. Otherwise, a mapping of each
key to the object id for the value is returned.
"""
if not is_multi_file(obj):
return hash(prefix + get_filepath(obj))
# For sequences, the hashing string will be the list ID and the
# path for the first file in the sequence
if not is_mapping(obj):
_obj = get_iterable(obj)
_next = next(iter(_obj))
return get_id(_next, prefix + str(id(obj)))
# For each key in the mapping, recursively call get_id until
# until a non-mapping is found
return {key : get_id(val, prefix) for key,val in viewitems(obj)}
def geo_bounds(var=None, wrfin=None, varname=None, timeidx=0, method="cat",
squeeze=True, cache=None):
"""Return the geographic boundaries for the variable or file(s).
When using a :class:`xarray.DataArray` as the *var* parameter, the variable
must contain latitude and longitude coordinates. If these coordinate
dimensions are greater than two dimensions, then an array of
:class:`wrf.GeoBounds` objects will be returned with the same shape as the
leftmost dimensions of the coordinate arrays.
When using a WRF file, or sequence of WRF files, by supplying the
*wrfin* parameter, an array of :class:`wrf.GeoBounds` objects will be
returned if the domain is moving and :data:`wrf.ALL_TIMES` is selected as
the *timeidx* parameter when using *wrfin*. Otherwise, a single
:class:`wrf.GeoBounds` object is returned.
Args:
var (:class:`xarray.DataArray`, optional): A :class:`xarray.DataArray`
variable that includes latitude,longitude coordinate information.
If not used, then *wrfin* must be provided.
wrfin (:class:`netCDF4.Dataset`, :class:`Nio.NioFile`, or an \
iterable, optional): WRF-ARW NetCDF
data as a :class:`netCDF4.Dataset`, :class:`Nio.NioFile`
or an iterable sequence of the aforementioned types. If not used,
then *var* must be provided.
varname (:obj:`str`, optional): If using *wrfin*, then this will be the
variable name to use to determine the geobounds. The variable
can be a coordinate variable, or a regular variable that contains
coordinate attributes. If None,
then the 'XLAT', 'XLAT_M', 'XLONG', 'XLONG_M' variables
will be used.
timeidx (:obj:`int` or :data:`wrf.ALL_TIMES`, optional): The
desired time index when *wrfin* is not None. This value can be a
positive integer, negative integer, or
:data:`wrf.ALL_TIMES` (an alias for None) to return
all times in the file or sequence. Default is 0. This value is
ignored when *var* is used.
method (:obj:`str`, optional): The aggregation method to use for
sequences when *wrfin* is not None. Must be either 'cat' or
'join'. 'cat' combines the data along the Time dimension.
'join' creates a new dimension for the file index.
The default is 'cat'.
squeeze (:obj:`bool`, optional): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Only used when *wrfin* is used. Default is True.
cache (:obj:`dict`, optional): A dictionary of (varname, ndarray)
that can be used to supply pre-extracted NetCDF variables to the
computational routines. It is primarily used for internal
purposes, but can also be used to improve performance by
eliminating the need to repeatedly extract the same variables
used in multiple diagnostics calculations, particularly when using
large sequences of files. Only used when *wrfin* is used.
Default is None.
Returns:
:class:`wrf.GeoBounds`: The domain geographic bounds.
"""
if var is None and wrfin is None:
raise ValueError("'var' or 'wrfin' parameter is required")
# Getting lat/lon from xarray coordinates
if var is not None:
if not xarray_enabled():
raise ValueError("xarray is not installed or is disabled")
is_moving = None
try:
var_coords = var.coords
except AttributeError:
raise ValueError("'var' object does not contain coordinate "
"attributes")
latname, lonname, _ = _find_coord_names(var_coords)
try:
lats = to_np(var_coords[latname])
except KeyError:
raise ValueError("'var' object does not contain a latitude "
"coordinate")
try:
lons = to_np(var_coords[lonname])
except KeyError:
raise ValueError("'var' object does not contain a longitude "
"coordinate")
# Getting lat/lon from the file
elif wrfin is not None:
_key = get_id(wrfin)
is_moving = is_moving_domain(wrfin, varname=varname,
latvar=either("XLAT", "XLAT_M"),
lonvar=either("XLONG", "XLONG_M"),
_key=_key)
if varname is not None:
if xarray_enabled():
var = extract_vars(wrfin, timeidx, varname, method, squeeze,
cache, meta=True, _key=_key)[varname]
return geo_bounds(var)
else:
lat_coord, lon_coord, _ = _get_coord_names(wrfin, varname)
else:
lat_coord = either("XLAT", "XLAT_M")(wrfin)
lon_coord = either("XLONG", "XLONG_M")(wrfin)
# If requesting all times but the domain isn't moving, just
# extract one time
_timeidx = timeidx
if timeidx is None and not is_moving:
_timeidx = 0
coord_data = extract_vars(wrfin, _timeidx, (lat_coord, lon_coord),
method, squeeze, cache, meta=False,
_key=_key)
lats = coord_data[lat_coord]
lons = coord_data[lon_coord]
# Moving domains
if lats.ndim > 2:
# Requesting all times, works for 'cat' and 'join' data
# and always for xarray data
extra_dims = lats.shape[0:-2]
out_geo = np.full(extra_dims, NullGeoBounds(), np.object)
for left_idxs in iter_left_indexes(extra_dims):
latlon_idx = left_idxs + (slice(None),)
out_geo[left_idxs] = GeoBounds(lats=lats[latlon_idx],
lons=lons[latlon_idx])
return out_geo
# Non-moving domains
return GeoBounds(lats=lats, lons=lons)
def _get_wrf_proj_geobnds(var, wrfin, varname, timeidx, method, squeeze,
cache):
"""Return the :class:`wrf.WrfProj` subclass and :class:`wrf.GeoBounds`.
Args:
var (:class:`xarray.DataArray`): A :class:`xarray.DataArray`
variable that includes latitude,longitude coordinate information.
If not used, then *wrfin* must be provided.
wrfin (:class:`netCDF4.Dataset`, :class:`Nio.NioFile`, or an \
iterable): WRF-ARW NetCDF
data as a :class:`netCDF4.Dataset`, :class:`Nio.NioFile`
or an iterable sequence of the aforementioned types. If not used,
then *var* must be provided.
varname (:obj:`str`): If using *wrfin*, then this will be the
variable name to use to determine the geobounds. The variable
can be a coordinate variable, or a regular variable that contains
coordinate attributes. If None,
then the 'XLAT', 'XLAT_M', 'XLONG', 'XLONG_M' variables
will be used.
timeidx (:obj:`int` or :data:`wrf.ALL_TIMES`): The
desired time index. This value can be a positive integer,
negative integer, or
:data:`wrf.ALL_TIMES` (an alias for None) to return
all times in the file or sequence. Default is 0.
method (:obj:`str`): The aggregation method to use for
sequences. Must be either 'cat' or 'join'.
'cat' combines the data along the Time dimension.
'join' creates a new dimension for the file index.
The default is 'cat'.
squeeze (:obj:`bool`): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Default is True.
cache (:obj:`dict`): A dictionary of (varname, ndarray)
that can be used to supply pre-extracted NetCDF variables to the
computational routines. It is primarily used for internal
purposes, but can also be used to improve performance by
eliminating the need to repeatedly extract the same variables
used in multiple diagnostics calculations, particularly when using
large sequences of files.
Default is None.
Returns:
:obj:`tuple`: A tuple of :class:`wrf.WrfProj`
and :class:`wrf.GeoBounds`
"""
# Using a variable
if var is not None:
if not | |
<reponame>topnoom259/railblock<gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# RaiBlocks Telegram bot
# @RaiWalletBot https://t.me/RaiWalletBot
#
# Source code:
# https://github.com/SergiySW/RaiWalletBot
#
# Released under the BSD 3-Clause License
#
"""
Usage:
Press Ctrl-C on the command line or send a signal to the process to stop the bot.
"""
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
from telegram.ext.dispatcher import run_async
from telegram import Bot, ParseMode, ReplyKeyboardMarkup, ReplyKeyboardRemove, ChatAction
from telegram.error import BadRequest, RetryAfter, TimedOut, NetworkError
import logging
import urllib3, certifi, socket, json, re
import hashlib, binascii, string, math
from mysql.connector import ProgrammingError
from time import sleep
import os, sys
# Parse config
import ConfigParser
config = ConfigParser.ConfigParser()
config.read('bot.cfg')
api_key = config.get('main', 'api_key')
url = config.get('main', 'url')
log_file = config.get('main', 'log_file')
log_file_messages = config.get('main', 'log_file_messages')
domain = config.get('main', 'domain')
listen_port = config.get('main', 'listen_port')
qr_folder_path = config.get('main', 'qr_folder_path')
wallet = config.get('main', 'wallet')
wallet_password = config.get('main', 'password')
fee_account = config.get('main', 'fee_account')
fee_amount = int(config.get('main', 'fee_amount'))
raw_fee_amount = fee_amount * (10 ** 24)
welcome_account = config.get('main', 'welcome_account')
welcome_amount = int(config.get('main', 'welcome_amount'))
raw_welcome_amount = welcome_amount * (10 ** 24)
incoming_fee_text = '\n'
min_send = int(config.get('main', 'min_send'))
ddos_protect_seconds = config.get('main', 'ddos_protect_seconds')
admin_list = json.loads(config.get('main', 'admin_list'))
extra_limit = int(config.get('main', 'extra_limit'))
LIST_OF_FEELESS = json.loads(config.get('main', 'feeless_list'))
salt = config.get('password', '<PASSWORD>')
block_count_difference_threshold = int(config.get('monitoring', 'block_count_difference_threshold'))
# Enable logging
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO, filename=log_file)
logger = logging.getLogger(__name__)
account_url = 'https://raiblockscommunity.net/account/index.php?acc='
hash_url = 'https://raiblockscommunity.net/block/index.php?h='
faucet_url = 'https://faucet.raiblockscommunity.net/form.php'
summary_url = 'https://raiblockscommunity.net/page/summary.php?json=1'
# MySQL requests
from common_mysql import *
# QR code handler
from common_qr import *
# Request to node
from common_rpc import *
# Common functions
from common import *
unlock(wallet, wallet_password)
# Restrict access to admins only
from functools import wraps
def restricted(func):
@wraps(func)
def wrapped(bot, update, *args, **kwargs):
# extract user_id from arbitrary update
try:
user_id = update.message.from_user.id
except (NameError, AttributeError):
try:
user_id = update.inline_query.from_user.id
except (NameError, AttributeError):
try:
user_id = update.chosen_inline_result.from_user.id
except (NameError, AttributeError):
try:
user_id = update.callback_query.from_user.id
except (NameError, AttributeError):
print("No user_id available in update.")
return
if user_id not in admin_list:
print("Unauthorized access denied for {0}.".format(user_id))
return
return func(bot, update, *args, **kwargs)
return wrapped
# Define a few command handlers. These usually take the two arguments bot and
# update. Error handlers also receive the raised TelegramError object in error.
with open('language.json') as lang_file:
language = json.load(lang_file)
def lang(user_id, text_id):
lang_id = mysql_select_language(user_id)
try:
return language[lang_id][text_id]
except KeyError:
return language['en'][text_id]
def lang_text(text_id, lang_id):
try:
return language[lang_id][text_id]
except KeyError:
return language['en'][text_id]
@run_async
def custom_keyboard(bot, chat_id, buttons, text):
reply_markup = ReplyKeyboardMarkup(buttons, resize_keyboard = True)
try:
bot.sendMessage(chat_id=chat_id,
text=text,
parse_mode=ParseMode.MARKDOWN,
disable_web_page_preview=True,
reply_markup=reply_markup)
except BadRequest:
bot.sendMessage(chat_id=chat_id,
text=replace_unsafe(text),
parse_mode=ParseMode.MARKDOWN,
disable_web_page_preview=True,
reply_markup=reply_markup)
except RetryAfter:
sleep(240)
bot.sendMessage(chat_id=chat_id,
text=text,
parse_mode=ParseMode.MARKDOWN,
disable_web_page_preview=True,
reply_markup=reply_markup)
except:
sleep(1)
bot.sendMessage(chat_id=chat_id,
text=text,
parse_mode=ParseMode.MARKDOWN,
disable_web_page_preview=True,
reply_markup=reply_markup)
@run_async
def default_keyboard(bot, chat_id, text):
custom_keyboard(bot, chat_id, lang(chat_id, 'menu'), text)
@run_async
def lang_keyboard(lang_id, bot, chat_id, text):
custom_keyboard(bot, chat_id, lang_text('menu', lang_id), text)
@run_async
def hide_keyboard(bot, chat_id, text):
reply_markup = ReplyKeyboardRemove()
try:
bot.sendMessage(chat_id=chat_id, text=text, reply_markup=reply_markup)
except:
sleep(1)
bot.sendMessage(chat_id=chat_id, text=text, reply_markup=reply_markup)
@run_async
def typing_illusion(bot, chat_id):
try:
bot.sendChatAction(chat_id=chat_id, action=ChatAction.TYPING) # typing illusion
except:
sleep(1)
bot.sendChatAction(chat_id=chat_id, action=ChatAction.TYPING) # typing illusion
@run_async
def ddos_protection(bot, update, callback):
user_id = update.message.from_user.id
message_id = int(update.message.message_id)
ddos = mysql_ddos_protector(user_id, message_id)
if (ddos == True):
logging.warn('DDoS or double message by user {0} message {1}'.format(user_id, message_id))
elif (ddos == False):
text_reply(update, lang(user_id, 'ddos_error').format(ddos_protect_seconds))
logging.warn('Too fast request by user {0}'.format(user_id))
else:
callback(bot, update)
@run_async
def ddos_protection_args(bot, update, args, callback):
user_id = update.message.from_user.id
message_id = int(update.message.message_id)
ddos = mysql_ddos_protector(user_id, message_id)
if (ddos == True):
logging.warn('DDoS or double message by user {0} message {1}'.format(user_id, message_id))
elif (ddos == False):
text_reply(update, lang(user_id, 'ddos_error').format(ddos_protect_seconds))
logging.warn('Too fast request by user {0}'.format(user_id))
else:
callback(bot, update, args)
@run_async
def info_log(update):
result = {}
result['text'] = update.message.text
result['user_id'] = update.message.from_user.id
result['username'] = update.message.from_user.username
result['first_name'] = update.message.from_user.first_name
result['last_name'] = update.message.from_user.last_name
result['timestamp'] = int(time.mktime(update.message.date.timetuple()))
result['message_id'] = update.message.message_id
logging.info(result)
@run_async
def language_select(bot, update, args):
info_log(update)
ddos_protection_args(bot, update, args, language_select_callback)
@run_async
def language_select_callback(bot, update, args):
user_id = update.message.from_user.id
chat_id = update.message.chat_id
if (len(args) > 0):
lang_id = args[0].lower()
if (lang_id in language['common']['language_list']):
try:
mysql_set_language(user_id, lang_id)
start_text(bot, update)
except:
text_reply(update, lang(user_id, 'language_error'))
else:
text_reply(update, lang(user_id, 'language_error'))
logging.info('Language change failed for user {0}'.format(user_id))
else:
text_reply(update, lang(user_id, 'language_command'))
@run_async
def start(bot, update):
info_log(update)
ddos_protection(bot, update, start_text)
@run_async
def start_text(bot, update):
user_id = update.message.from_user.id
chat_id = update.message.chat_id
lang_id = mysql_exist_language(user_id)
if (lang_id is False):
try:
lang_id = update.message.from_user.language_code
if (lang_id in language['common']['language_list']):
mysql_set_language(user_id, lang_id)
else:
lang_id = mysql_select_language(user_id)
except Exception as e:
lang_id = mysql_select_language(user_id)
text_reply(update, lang_text('start_introduce', lang_id))
sleep(1)
lang_keyboard(lang_id, bot, chat_id, lang_text('start_basic_commands', lang_id).format(mrai_text(fee_amount), mrai_text(min_send), incoming_fee_text))
sleep(1)
message_markdown(bot, chat_id, lang_text('start_learn_more', lang_id))
# Check user existance in database
exist = mysql_user_existance(user_id)
# Select language if 1st time
if (exist is False):
sleep(1)
custom_keyboard(bot, chat_id, lang_text('language_keyboard', 'common'), lang_text('language_selection', 'common'))
@run_async
def help(bot, update):
info_log(update)
ddos_protection(bot, update, help_callback)
@run_async
def help_callback(bot, update):
user_id = update.message.from_user.id
chat_id = update.message.chat_id
lang_id = mysql_select_language(user_id)
lang_keyboard(lang_id, bot, chat_id, lang_text('help_advanced_usage', lang_id).format(mrai_text(fee_amount), mrai_text(min_send), incoming_fee_text))
sleep(1)
message_markdown(bot, chat_id, lang_text('help_learn_more', lang_id))
@run_async
def help_text(bot, update):
user_id = update.message.from_user.id
chat_id = update.message.chat_id
lang_id = mysql_select_language(user_id)
lang_keyboard(lang_id, bot, chat_id, lang_text('start_basic_commands', lang_id).format(mrai_text(fee_amount), mrai_text(min_send), incoming_fee_text))
sleep(1)
message_markdown(bot, chat_id, lang_text('help_learn_more', lang_id))
def user_id(bot, update):
user_id = update.message.from_user.id
text_reply(update, user_id)
@run_async
def block_count(bot, update):
info_log(update)
ddos_protection(bot, update, block_count_callback)
@run_async
def block_count_callback(bot, update):
user_id = update.message.from_user.id
count = rpc({"action": "block_count"}, 'count')
text_reply(update, "{:,}".format(int(count)))
# default_keyboard(bot, update.message.chat_id, r)
# Admin block count check from raiblockscommunity.net
if (user_id in admin_list):
http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',ca_certs=certifi.where())
response = http.request('GET', summary_url)
json_data = json.loads(response.data)
community_count = json_data['blocks']
if (math.fabs(int(community_count) - int(count)) > block_count_difference_threshold):
text_reply(update, 'Community: {0}'.format("{:,}".format(int(community_count))))
reference_count = int(reference_block_count())
sleep(1)
text_reply(update, 'Reference: {0}'.format("{:,}".format(reference_count)))
response = http.request('GET', 'https://raiwallet.info/api/block_count.php')
raiwallet_count = int(response.data)
sleep(1)
text_reply(update, 'raiwallet.info: {0}'.format("{:,}".format(raiwallet_count)))
# broadcast
@restricted
def broadcast(bot, update):
info_log(update)
bot = Bot(api_key)
# list users from MySQL
accounts_list = mysql_select_accounts_balances()
# some users are bugged & stop broadcast - they deleted chat with bot. So we blacklist them
BLACK_LIST = mysql_select_blacklist()
for account in accounts_list:
# if not in blacklist and has balance
if ((account[0] not in BLACK_LIST) and (int(account[1]) > 0)):
mysql_set_blacklist(account[0])
print(account[0])
push_simple(bot, account[0], update.message.text.replace('/broadcast ', ''))
sleep(0.2)
mysql_delete_blacklist(account[0]) # if someone deleted chat, broadcast will fail and he will remain in blacklist
# bootstrap
@restricted
def bootstrap(bot, update):
info_log(update)
bootstrap_multi()
bot.sendMessage(update.message.chat_id, "Bootstraping...")
@restricted
def restart(bot, update):
bot.sendMessage(update.message.chat_id, "Bot is restarting...")
sleep(0.2)
os.execl(sys.executable, sys.executable, *sys.argv)
#@restricted
@run_async
def account(bot, update):
info_log(update)
ddos_protection(bot, update, account_text)
@run_async
def account_list(bot, update):
info_log(update)
ddos_protection(bot, update, account_text_list)
@run_async
def account_text_list(bot, update):
account_text(bot, update, True)
@run_async
def accounts_hide(bot, update):
info_log(update)
ddos_protection(bot, update, accounts_hide_callback)
@run_async
def accounts_hide_callback(bot, update):
user_id = update.message.from_user.id
hide = mysql_select_hide(user_id)
if (hide == 0):
extra_accounts = mysql_select_user_extra(user_id)
if (len(extra_accounts) > 0):
mysql_set_hide(user_id, 1)
else:
mysql_set_hide(user_id, 0)
account_text(bot, update)
@run_async
def account_text(bot, update, list = False):
user_id = update.message.from_user.id
chat_id = update.message.chat_id
lang_id = mysql_select_language(user_id)
username=update.message.from_user.username
if (username is None):
username = ''
#print(username)
m = mysql_select_user(user_id)
try:
r = m[2]
qr_by_account(r)
balance = account_balance(r)
total_balance = balance
# FEELESS
if ((user_id in LIST_OF_FEELESS) or (mysql_select_send_time(user_id) is not False)):
final_fee_amount = 0
else:
final_fee_amount = fee_amount
# FEELESS
max_send = balance - final_fee_amount
extra_accounts = mysql_select_user_extra(user_id)
extra_array = []
for extra_account in extra_accounts:
extra_array.append(extra_account[3])
if (len(extra_accounts) > 0):
balances = accounts_balances(extra_array)
hide = mysql_select_hide(user_id)
num = 0
for extra_account in extra_accounts:
num = num + 1
total_balance = total_balance + balances[extra_account[3]]
# price
price = mysql_select_price()
if (int(price[0][0]) > 0):
last_price = ((float(price[0][0]) * float(price[0][6])) + (float(price[1][0]) * float(price[1][6]))) / (float(price[0][6]) + float(price[1][6]))
else:
last_price = int(price[1][0])
btc_price = last_price / (10 ** 14)
btc_balance = ('%.8f' % (btc_price * total_balance))
# price
if (list is not False):
text = 'Total: *{0} XRB (Mrai)*\n~ {1} BTC\n/{3}\n{4}'.format(mrai_text(total_balance), btc_balance, '', lang_text('account_add', lang_id).encode("utf8").replace("_", "\_"), lang_text('send_all', lang_id).encode("utf8"))
message_markdown(bot, chat_id, text)
sleep(1)
message_markdown(bot, chat_id, '*0.* {0} XRB (Mrai)'.format(mrai_text(balance)))
sleep(1)
message_markdown(bot, chat_id, '*{0}*'.format(r))
sleep(1)
for extra_account in extra_accounts:
message_markdown(bot, chat_id, '*{0}.* {1} XRB (Mrai) /{2} {0}'.format(extra_account[2], mrai_text(balances[extra_account[3]]), lang_text('send_from_command', lang_id).encode("utf8").replace("_", "\_")))
sleep(1)
text_reply(update, extra_account[3])
sleep(1)
else:
if ((balance == 0) and (list is False)):
text = lang_text('account_balance_zero', lang_id).format(faucet_url, r)
elif ((max_send < min_send) and (list is False)):
text = lang_text('account_balance_low', lang_id).format(faucet_url, r, mrai_text(balance), mrai_text(final_fee_amount), mrai_text(min_send))
else:
if (balance == total_balance):
text = lang_text('account_balance', lang_id).format(mrai_text(balance), btc_balance, mrai_text(max_send))
else:
text = lang_text('account_balance_total', lang_id).format(mrai_text(balance), btc_balance, mrai_text(max_send), mrai_text(total_balance))
text = '{0}\n\n{1}'.format(text.encode("utf8"), lang_text('account_your', lang_id).encode("utf8"))
message_markdown(bot, chat_id, text)
sleep(1)
message_markdown(bot, chat_id, '*{0}*'.format(r))
sleep(1)
if ((num > 3) and (hide == 0)):
message_markdown(bot, chat_id, lang_text('account_history', lang_id).encode("utf8").format(r, account_url, faucet_url, '').replace(lang_text('account_add', lang_id).encode("utf8").replace("_", "\_"), lang_text('account_list', lang_id).encode("utf8").replace("_", "\_"))) # full accounts list
elif (hide == 1):
message_markdown(bot, chat_id, lang_text('account_history', lang_id).encode("utf8").format(r, account_url, faucet_url, '').replace(lang_text('account_add', lang_id).encode("utf8").replace("_", "\_"), lang_text('account_list', lang_id).encode("utf8").replace("_", "\_")).replace(lang_text('accounts_hide', lang_id).encode("utf8").replace("_", "\_"), lang_text('accounts_expand', lang_id).encode("utf8").replace("_", "\_"))) # hide-expand
else:
message_markdown(bot, chat_id, lang_text('account_history', lang_id).encode("utf8").format(r, account_url, faucet_url, ''))
sleep(1)
# list
if (hide == 0):
n = 0
for extra_account in extra_accounts:
n = n + 1
if (n <= 3):
message_markdown(bot, chat_id, '*{0}.* {1} XRB (Mrai) /{2} {0}'.format(extra_account[2], mrai_text(balances[extra_account[3]]), lang_text('send_from_command', lang_id).encode("utf8").replace("_", "\_")))
sleep(1)
text_reply(update, extra_account[3])
sleep(1)
# list
#bot.sendPhoto(chat_id=update.message.chat_id, photo=open('{1}{0}.png'.format(r, qr_folder_path), 'rb'), caption=r)
try:
bot.sendPhoto(chat_id=update.message.chat_id, photo=open('{1}{0}.png'.format(r, qr_folder_path), 'rb'))
except (urllib3.exceptions.ProtocolError) as e:
sleep(3)
bot.sendPhoto(chat_id=update.message.chat_id, photo=open('{1}{0}.png'.format(r, qr_folder_path), 'rb'))
except TimedOut as e:
sleep(10)
bot.sendPhoto(chat_id=update.message.chat_id, photo=open('{1}{0}.png'.format(r, qr_folder_path), 'rb'))
except NetworkError as e:
sleep(20)
bot.sendPhoto(chat_id=update.message.chat_id, photo=open('{1}{0}.png'.format(r, qr_folder_path), 'rb'))
seed = mysql_select_seed(user_id)
check = mysql_check_password(user_id)
if ((seed is False) and (check is False)):
sleep(1)
seed_callback(bot, update, [0])
elif (check is not False):
sleep(1)
text_reply(update, lang_text('seed_protected', lang_id))
except (TypeError):
r = rpc({"action": "account_create", "wallet": wallet}, 'account')
qr_by_account(r)
if ('xrb_' in r): # check for errors
insert_data = {
'user_id': user_id,
'account': r,
'chat_id': chat_id,
'username': username,
}
mysql_insert(insert_data)
text_reply(update, lang_text('account_created', lang_id))
sleep(1)
message_markdown(bot, chat_id, '*{0}*'.format(r))
sleep(1)
message_markdown(bot, chat_id, lang_text('account_explorer', lang_id).format(r, account_url))
sleep(1)
message_markdown(bot, chat_id, lang_text('account_balance_start', lang_id).format(faucet_url, r))
sleep(1)
custom_keyboard(bot, chat_id, lang_text('language_keyboard', 'common'), lang_text('language_selection', 'common'))
try:
welcome = rpc_send(wallet, welcome_account, r, raw_welcome_amount)
new_balance = account_balance(welcome_account)
mysql_update_balance(welcome_account, new_balance)
mysql_update_frontier(welcome_account, welcome)
except Exception as e:
logging.exception("message")
logging.info('New user registered {0} {1}'.format(user_id, r))
sleep(2)
seed_callback(bot, update, [0])
else:
text_reply(update, lang_text('account_error', lang_id))
#@restricted
@run_async
def account_add(bot, update):
info_log(update)
ddos_protection(bot, update, account_add_callback)
def account_add_callback(bot, update):
user_id = update.message.from_user.id
chat_id = update.message.chat_id
lang_id = mysql_select_language(user_id)
extra_accounts = mysql_select_user_extra(user_id)
if (len(extra_accounts) >= extra_limit):
text_reply(update, lang_text('account_extra_limit', lang_id).format(extra_limit))
else:
r = rpc({"action": "account_create", "wallet": wallet}, 'account')
extra_id = len(mysql_select_user_extra(user_id)) + 1
if ('xrb_' in r): # check for errors
insert_data = {
'user_id': user_id,
'account': r,
'extra_id': extra_id,
}
mysql_insert_extra(insert_data)
text_reply(update, lang_text('account_created', lang_id))
sleep(1)
message_markdown(bot, chat_id, '[{0}]({1}{0})'.format(r, account_url))
logging.info('New account registered {0} {1}'.format(user_id, r))
else:
text_reply(update, lang_text('account_error', lang_id))
@run_async
def send(bot, update, args):
info_log(update)
ddos_protection_args(bot, update, args, send_callback)
@run_async
def send_from(bot, update, args):
info_log(update)
ddos_protection_args(bot, update, args, send_from_callback)
@run_async
def send_from_callback(bot, update, args):
user_id = update.message.from_user.id
if (len(args) > 0):
if ('xrb_' in args[0]):
from_account = mysql_select_by_account_extra(args[0])
else:
try:
extra_id = int(args[0].replace('.',''))
from_account = mysql_select_by_id_extra(user_id, extra_id)
except (ValueError, ProgrammingError) as e:
from_account = False
text_reply(update, lang(user_id, 'value_error'))
try:
if (from_account is not False):
if (int(user_id) == int(from_account[0])):
args = args[1:]
send_callback(bot, update, args, from_account)
else:
text_reply(update, lang(user_id, 'send_from_id_error').format(args[0]))
logging.warn('User {0} trying to steal funds from {1}'.format(user_id, args[0]))
elif ((int(args[0]) == 0) or (args[0] == 'default')):
args = args[1:]
send_callback(bot, update, args)
else:
text_reply(update, lang(user_id, 'send_from_id_error').format(args[0]))
except ValueError as e:
text_reply(update, lang(user_id, 'value_error'))
else:
m = mysql_select_user(user_id)
chat_id = update.message.chat_id
lang_id = mysql_select_language(user_id)
lang_keyboard(lang_id, bot, chat_id, lang_text('send_wrong_command', lang_id).format(mrai_text(min_send), m[2]))
# Instant receiving
@run_async
def receive(destination, send_hash):
destination_local = mysql_select_by_account(destination)
if (destination_local is False):
destination_local = mysql_select_by_account_extra(destination)
if (destination_local is not False):
receive = | |
<reponame>mariusleu/openstack-nannies
#!/usr/bin/env python
#
# Copyright (c) 2018 SAP SE
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import click
import logging
import os
import six
import time
import sys
from openstack import connection, exceptions, utils
from keystoneauth1 import loading
from keystoneauth1 import session
from cinderclient import client
# prometheus export functionality
from prometheus_client import start_http_server, Gauge
log = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
# cmdline handling
@click.command()
# every how many minutes the check should be preformed
@click.option('--interval', prompt='Interval in minutes')
# how often a vm should be continously a canditate for some action (delete etc.) before
# we actually do it - the idea behind is that we want to avoid actions due to short
# temporary technical problems of any kind
@click.option('--iterations', prompt='Iterations')
# work on nova db (vms) or cinder db (volumes)?
@click.option('--nova', is_flag=True)
@click.option('--cinder', is_flag=True)
# dry run mode - only say what we would do without actually doing it
@click.option('--dry-run', is_flag=True)
# port to use for prometheus exporter, otherwise we use 9456 as default
@click.option('--port')
class Cleanup:
def __init__(self, interval, iterations, nova, cinder, dry_run, port):
self.interval = interval
self.iterations = iterations
self.novacmdline = nova
self.cindercmdline = cinder
self.dry_run = dry_run
self.port = port
# a dict of all projects we have in openstack
self.projects = dict()
# dicts for the ids we have seen and the ones we want to do something with
self.seen_dict = dict()
self.to_be_dict = dict()
# list of servers, snapshots and volumes we have seen or plan to delete
self.servers_seen = dict()
self.servers_to_be_deleted = dict()
self.snapshots_seen = dict()
self.snapshots_to_be_deleted = dict()
self.volumes_seen = dict()
self.volumes_to_be_deleted = dict()
# define the state to verbal name mapping
self.state_to_name_map = dict()
self.state_to_name_map["delete_server"] = "delete of server"
self.state_to_name_map["delete_volume"] = "delete of volume"
self.state_to_name_map["delete_snapshot"] = "delete of snapshot"
self.gauge_value = dict()
if self.novacmdline:
which_service = "nova"
self.gauge_delete_server = Gauge(which_service + '_nanny_delete_server',
'server deletes of the ' + which_service + ' nanny', ['kind'])
if self.cindercmdline:
which_service = "cinder"
self.gauge_delete_volume = Gauge(which_service + '_nanny_delete_volume',
'volume deletes of the ' + which_service + ' nanny', ['kind'])
self.gauge_delete_snapshot = Gauge(which_service + '_nanny_delete_snapshot',
'snapshot deletes of the ' + which_service + ' nanny', ['kind'])
# Start http server for exported data
if port:
prometheus_exporter_port = self.port
else:
prometheus_exporter_port = 9456
try:
start_http_server(prometheus_exporter_port)
except Exception as e:
logging.error("failed to start prometheus exporter http server: " + str(e))
self.run_me()
def connection_buildup(self):
# a dict of all projects we have in openstack
self.projects = dict()
# openstack connection
try:
self.conn = connection.Connection(auth_url=os.getenv('OS_AUTH_URL'),
project_name=os.getenv('OS_PROJECT_NAME'),
project_domain_name=os.getenv('OS_PROJECT_DOMAIN_NAME'),
username=os.getenv('OS_USERNAME'),
user_domain_name=os.getenv('OS_USER_DOMAIN_NAME'),
password=<PASSWORD>('OS_PASSWORD'),
identity_api_version="3")
except Exception as e:
log.warn("- PLEASE CHECK MANUALLY - problems connecting to openstack: %s - retrying in next loop run",
str(e))
else:
# get all openstack projects
# no exception handling is done here as it would complicate things and we just
# successfully created the connection, so that chance is low to fail
for project in self.conn.identity.projects():
self.projects[project.id] = project.name
if not self.projects:
raise RuntimeError('- PLEASE CHECK MANUALLY - did not get any keystone projects back from the keystone api - this should in theory never happen ...')
if self.cindercmdline:
# cinder client session reusing the auth from the openstacksdk connection session
# this is needed to set the state of volumes and snapshots, which is not yet implemented in the openstacksdk
auth = self.conn.session.auth
sess = session.Session(auth=auth)
self.cinder = client.Client("2.0", session=sess)
def init_seen_dict(self):
for i in self.seen_dict:
self.seen_dict[i] = 0
# reset dict of all vms or volumes we plan to delete from the db
def reset_to_be_dict(self):
for i in self.seen_dict:
if self.seen_dict[i] == 0:
self.to_be_dict[i] = 0
def run_me(self):
if self.novacmdline or self.cindercmdline:
while True:
self.connection_buildup()
if len(self.projects) > 0:
self.os_cleanup_items()
self.send_to_prometheus_exporter()
self.wait_a_moment()
else:
log.info("either the --nova or the --cinder flag should be given - giving up!")
sys.exit(0)
# main cleanup function
def os_cleanup_items(self):
# reset all gauge counters
for kind in ["plan", "dry_run", "done"]:
if self.novacmdline:
self.gauge_value[(kind, "delete_server")] = 0
if self.cindercmdline:
self.gauge_value[(kind, "delete_volume")] = 0
self.gauge_value[(kind, "delete_snapshot")] = 0
# get all instances from nova sorted by their id
try:
self.servers = sorted(self.conn.compute.servers(details=True, all_projects=1), key=lambda x: x.id)
if not self.servers:
raise RuntimeError('- PLEASE CHECK MANUALLY - did not get any nova instances back from the nova api - this should in theory never happen ...')
except exceptions.HttpException as e:
log.warn("- PLEASE CHECK MANUALLY - got an http exception: %s - retrying in next loop run", str(e))
return
except exceptions.SDKException as e:
log.warn("- PLEASE CHECK MANUALLY - got an sdk exception: %s - retrying in next loop run", str(e))
return
if self.novacmdline:
self.seen_dict = self.servers_seen
self.to_be_dict = self.servers_to_be_deleted
self.entity = self.servers
self.check_for_project_id("server")
if self.cindercmdline:
# get all snapshots from cinder sorted by their id - do the snapshots before the volumes,
# as they are created from them and thus should be deleted first
try:
self.snapshots = sorted(self.conn.block_store.snapshots(details=True, all_projects=1), key=lambda x: x.id)
if not self.snapshots:
raise RuntimeError('- PLEASE CHECK MANUALLY - did not get any cinder snapshots back from the cinder api - this should in theory never happen ...')
except exceptions.HttpException as e:
log.warn("- PLEASE CHECK MANUALLY - got an http exception: %s - retrying in next loop run", str(e))
return
except exceptions.SDKException as e:
log.warn("- PLEASE CHECK MANUALLY - got an sdk exception: %s - retrying in next loop run", str(e))
return
self.snapshot_from = dict()
# build a dict to check which volume a snapshot was created from quickly
for i in self.snapshots:
self.snapshot_from[i.id] = i.volume_id
self.seen_dict = self.snapshots_seen
self.to_be_dict = self.snapshots_to_be_deleted
self.entity = self.snapshots
self.check_for_project_id("snapshot")
self.is_server = dict()
self.attached_to = dict()
self.volume_project_id = dict()
# get all volumes from cinder sorted by their id
try:
self.volumes = sorted(self.conn.block_store.volumes(details=True, all_projects=1), key=lambda x: x.id)
if not self.volumes:
raise RuntimeError('- PLEASE CHECK MANUALLY - did not get any cinder volumes back from the cinder api - this should in theory never happen ...')
except exceptions.HttpException as e:
log.warn("- PLEASE CHECK MANUALLY - got an http exception: %s - retrying in next loop run", str(e))
return
except exceptions.SDKException as e:
log.warn("- PLEASE CHECK MANUALLY - got an sdk exception: %s - retrying in next loop run", str(e))
return
# build a dict to check later if a server exists quickly
for i in self.servers:
self.is_server[i.id] = i.id
# build a dict to check which server a volume is possibly attached to quickly
for i in self.volumes:
self.volume_project_id[i.id] = i.project_id
# only record attachments where we have any
try:
self.attached_to[i.attachments[0]["id"]] = i.attachments[0]["server_id"]
except IndexError:
pass
self.seen_dict = self.volumes_seen
self.to_be_dict = self.volumes_to_be_deleted
self.entity = self.volumes
self.check_for_project_id("volume")
def wait_a_moment(self):
# wait the interval time
log.info("waiting %s minutes before starting the next loop run", str(self.interval))
time.sleep(60 * int(self.interval))
def check_for_project_id(self, type):
self.init_seen_dict()
for element in self.entity:
# element has an existing project id - we keep it
if self.projects.get(element.project_id):
log.debug("%s %s has a valid project id: %s", type, str(element.id), str(element.project_id))
pass
# element has no existing project id - we plan to delete it
else:
log.debug("%s %s has no valid project id!", type, str(element.id))
self.now_or_later(element.id, "delete_" + type)
# reset the dict of instances we plan to do delete from the db for all machines we did not see or which disappeared
self.reset_to_be_dict()
# here we decide to wait longer before doings the delete from the db or finally doing it
def now_or_later(self, id, what_to_do):
default = 0
self.seen_dict[id] = 1
# if we did not see this more often than iteration times, do or dry-run print what to do - otherwise do not print anything, so that dry-run mode looks like real mode
if self.to_be_dict.get(id, default) <= int(self.iterations):
# we have seen it iteration times, time | |
import pbge
from game.content.plotutility import LMSkillsSelfIntro
from game.content import backstory
from pbge.plots import Plot
from pbge.dialogue import Offer, ContextTag
from game.ghdialogue import context
import gears
import game.content.gharchitecture
import game.content.ghterrain
import random
from game import memobrowser
Memo = memobrowser.Memo
# *******************
# *** UTILITIES ***
# *******************
def get_hire_cost(camp, npc):
return (npc.renown * npc.renown * (200 - npc.get_reaction_score(camp.pc, camp)))//10
# **************************
# *** RANDOM_LANCEMATE ***
# **************************
class UtterlyRandomLancemate(Plot):
LABEL = "RANDOM_LANCEMATE"
def custom_init(self, nart):
npc = gears.selector.random_character(rank=min(random.randint(10, 50),random.randint(10, 50)),
mecha_colors=gears.color.random_mecha_colors(),
local_tags=tuple(self.elements["METROSCENE"].attributes),
combatant=True)
scene = self.seek_element(nart, "LOCALE", self._is_best_scene, scope=self.elements["METROSCENE"])
specialties = [sk for sk in gears.stats.NONCOMBAT_SKILLS if sk in npc.statline]
if random.randint(-12,3) > len(specialties):
npc.statline[random.choice(gears.stats.NONCOMBAT_SKILLS)] += random.randint(1,4)
self.register_element("NPC", npc, dident="LOCALE")
self.add_sub_plot(nart, "RLM_Relationship")
return True
def _is_best_scene(self,nart,candidate):
return isinstance(candidate,pbge.scenes.Scene) and gears.tags.SCENE_PUBLIC in candidate.attributes
class UtterlyGenericLancemate(Plot):
LABEL = "RANDOM_LANCEMATE"
JOBS = ("Mecha Pilot","Arena Pilot","Recon Pilot","Mercenary","Bounty Hunter")
def custom_init(self, nart):
npc = gears.selector.random_character(rank=min(random.randint(10, 50),random.randint(10, 50)),
job=gears.jobs.ALL_JOBS[random.choice(self.JOBS)],
mecha_colors=gears.color.random_mecha_colors(),
local_tags=tuple(self.elements["METROSCENE"].attributes),
combatant=True)
if random.randint(1,20) == 1:
npc.statline[random.choice(gears.stats.NONCOMBAT_SKILLS)] += random.randint(1,4)
scene = self.seek_element(nart, "LOCALE", self._is_best_scene, scope=self.elements["METROSCENE"])
self.register_element("NPC", npc, dident="LOCALE")
self.add_sub_plot(nart, "RLM_Relationship")
return True
def _is_best_scene(self,nart,candidate):
return isinstance(candidate,pbge.scenes.Scene) and gears.tags.SCENE_PUBLIC in candidate.attributes
class GiftedNewbieLancemate(Plot):
# Amazing stats, amazingly crap skills.
LABEL = "RANDOM_LANCEMATE"
JOBS = ("Mecha Pilot","Arena Pilot","Citizen","Explorer","Factory Worker")
UNIQUE = True
def custom_init(self, nart):
npc = gears.selector.random_character(statline=gears.base.Being.random_stats(random.randint(100, 110)),
rank=random.randint(5, 15),
job=gears.jobs.ALL_JOBS[random.choice(self.JOBS)],
mecha_colors=gears.color.random_mecha_colors(),
local_tags=tuple(self.elements["METROSCENE"].attributes),
combatant=True, birth_year=nart.camp.year - random.randint(18,23))
if random.randint(1,10) == 1:
npc.statline[random.choice(gears.stats.NONCOMBAT_SKILLS)] += random.randint(1,4)
scene = self.seek_element(nart, "LOCALE", self._is_best_scene, scope=self.elements["METROSCENE"])
self.register_element("NPC", npc, dident="LOCALE")
self.add_sub_plot(nart, "RLM_Relationship")
return True
def _is_best_scene(self,nart,candidate):
return isinstance(candidate,pbge.scenes.Scene) and gears.tags.SCENE_PUBLIC in candidate.attributes
class OlderMentorLancemate(Plot):
LABEL = "RANDOM_LANCEMATE"
UNIQUE = True
def custom_init(self, nart):
npc = gears.selector.random_character(rank=random.randint(41, 85),
mecha_colors=gears.color.random_mecha_colors(),
local_tags=tuple(self.elements["METROSCENE"].attributes),
combatant=True, birth_year=nart.camp.year - random.randint(32,50))
npc.statline[random.choice(gears.stats.NONCOMBAT_SKILLS)] += random.randint(1, 4)
scene = self.seek_element(nart, "LOCALE", self._is_best_scene, scope=self.elements["METROSCENE"])
self.register_element("NPC", npc, dident="LOCALE")
self.add_sub_plot(nart, "RLM_Relationship")
return True
def _is_best_scene(self,nart,candidate):
return isinstance(candidate,pbge.scenes.Scene) and gears.tags.SCENE_PUBLIC in candidate.attributes
class DeadzonerInGreenZoneLancemate(Plot):
LABEL = "RANDOM_LANCEMATE"
JOBS = ("Mercenary","Bandit","Scavenger","Aristo","Tekno","Sheriff")
UNIQUE = True
@classmethod
def matches( self, pstate ):
"""Returns True if this plot matches the current plot state."""
return gears.personality.GreenZone in pstate.elements["METROSCENE"].attributes
def custom_init(self, nart):
npc = gears.selector.random_character(rank=min(random.randint(20, 55),random.randint(20, 55)),
job=gears.jobs.ALL_JOBS[random.choice(self.JOBS)],
mecha_colors=gears.color.random_mecha_colors(),
local_tags=(gears.personality.DeadZone,),
combatant=True)
scene = self.seek_element(nart, "LOCALE", self._is_best_scene, scope=self.elements["METROSCENE"])
self.register_element("NPC", npc, dident="LOCALE")
self.add_sub_plot(nart, "RLM_Relationship")
return True
def _is_best_scene(self,nart,candidate):
return isinstance(candidate,pbge.scenes.Scene) and gears.tags.SCENE_PUBLIC in candidate.attributes
class GladiatorLancemate(Plot):
LABEL = "RANDOM_LANCEMATE"
UNIQUE = True
@classmethod
def matches( self, pstate ):
"""Returns True if this plot matches the current plot state."""
return gears.personality.DeadZone in pstate.elements["METROSCENE"].attributes
def custom_init(self, nart):
npc = gears.selector.random_character(rank=min(random.randint(25, 65),random.randint(25, 65)),
can_cyberize=True,
job=gears.jobs.ALL_JOBS["Gladiator"],
mecha_colors=gears.color.random_mecha_colors(),
local_tags=(gears.personality.DeadZone,),
combatant=True)
scene = self.seek_element(nart, "LOCALE", self._is_best_scene, scope=self.elements["METROSCENE"])
self.register_element("NPC", npc, dident="LOCALE")
self.add_sub_plot(nart, "RLM_Relationship")
return True
def _is_best_scene(self,nart,candidate: gears.GearHeadScene):
return isinstance(candidate,pbge.scenes.Scene) and gears.tags.SCENE_PUBLIC in candidate.attributes
class MutantLancemate(Plot):
LABEL = "RANDOM_LANCEMATE"
UNIQUE = True
@classmethod
def matches( self, pstate ):
"""Returns True if this plot matches the current plot state."""
return {gears.personality.GreenZone,gears.personality.DeadZone}.intersection(pstate.elements["METROSCENE"].attributes)
def custom_init(self, nart):
npc = gears.selector.random_character(rank=random.randint(20, 45),
mecha_colors=gears.color.random_mecha_colors(),
local_tags=tuple(self.elements["METROSCENE"].attributes),
combatant=True)
scene = self.seek_element(nart, "LOCALE", self._is_best_scene, scope=self.elements["METROSCENE"])
mutation = random.choice(gears.personality.MUTATIONS)
mutation.apply(npc)
npc.personality.add(mutation)
specialties = [sk for sk in gears.stats.NONCOMBAT_SKILLS if sk in npc.statline]
if random.randint(-12,3) > len(specialties):
npc.statline[random.choice(gears.stats.NONCOMBAT_SKILLS)] += random.randint(1,4)
self.register_element("NPC", npc, dident="LOCALE")
self.add_sub_plot(nart, "RLM_Relationship")
return True
def _is_best_scene(self,nart,candidate):
return isinstance(candidate, pbge.scenes.Scene) and gears.tags.SCENE_PUBLIC in candidate.attributes
class FormerLancemateReturns(Plot):
LABEL = "RANDOM_LANCEMATE"
active = True
scope = "METRO"
def custom_init(self, nart):
npc: gears.base.Character = nart.camp.egg.seek_dramatis_person(nart.camp, self._is_good_npc, self)
if npc:
scene = self.seek_element(nart, "LOCALE", self._is_best_scene, scope=self.elements["METROSCENE"])
self.register_element("NPC", npc, dident="LOCALE")
#print(npc,scene)
self.bs = backstory.Backstory(("LONGTIMENOSEE",),keywords=[t.name.upper() for t in npc.get_tags()])
return npc
def _is_good_npc(self,nart,candidate):
return isinstance(candidate, gears.base.Character) and candidate.relationship and gears.relationships.RT_LANCEMATE in candidate.relationship.tags
def _is_best_scene(self,nart,candidate):
return isinstance(candidate,gears.GearHeadScene) and gears.tags.SCENE_PUBLIC in candidate.attributes
def _get_dialogue_grammar(self, npc, camp):
mygram = dict()
if npc is self.elements["NPC"]:
for k in self.bs.results.keys():
mygram[k] = [self.bs.get_one(k),]
else:
mygram["[News]"] = ["{NPC} has been hanging out at {LOCALE}".format(**self.elements), ]
return mygram
def NPC_offers(self, camp):
mylist = list()
mylist.append(Offer("[INFO_PERSONAL]",
context=ContextTag([context.PERSONAL]),
no_repeats=True, effect=self.end_plot))
return mylist
def t_START(self, camp):
if self.elements["NPC"] in camp.party:
self.end_plot(camp)
# **************************
# *** RLM_Relationship ***
# **************************
# Elements:
# NPC: The NPC who needs a personality
# METROSCENE: The city or whatever that the NPC calls home
#
# These subplots contain a personality for a random (potential) lancemate.
# Also include a means for the lancemate to gain the "RT_LANCEMATE" tag.
class RLM_Beginner(Plot):
LABEL = "RLM_Relationship"
active = True
scope = True
UNIQUE = True
@classmethod
def matches( self, pstate ):
"""Returns True if this plot matches the current plot state."""
return pstate.elements["NPC"].renown < 25
def custom_init(self, nart):
npc = self.elements["NPC"]
npc.relationship = gears.relationships.Relationship(attitude=gears.relationships.A_JUNIOR)
# This character gets fewer mecha points.
npc.relationship.data["mecha_level_bonus"] = -10
self._got_rumor = False
return True
def NPC_offers(self, camp):
mylist = list()
npc = self.elements["NPC"]
if gears.relationships.RT_LANCEMATE not in npc.relationship.tags:
if camp.can_add_lancemate():
mylist.append(Offer("I can't believe you asked me... [LETSGO]",
context=ContextTag((context.JOIN,)),
effect=self._join_lance
))
mylist.append(Offer(
"[HELLO] Some day I want to become a cavalier like you.", context=ContextTag((context.HELLO,))
))
mylist.append(LMSkillsSelfIntro(npc))
return mylist
def _get_dialogue_grammar(self, npc, camp):
mygram = dict()
if camp.scene.get_root_scene() is self.elements["METROSCENE"] and npc is not self.elements["NPC"]:
# This is an NPC in Wujung. Give them some news.
mygram["[News]"] = ["{} has dreams of someday becoming a cavalier".format(self.elements["NPC"]), ]
return mygram
def _join_lance(self, camp):
npc = self.elements["NPC"]
npc.relationship.tags.add(gears.relationships.RT_LANCEMATE)
effect = game.content.plotutility.AutoJoiner(npc)
effect(camp)
self.end_plot(camp)
def _get_generic_offers(self, npc, camp):
"""Get any offers that could apply to non-element NPCs."""
goffs = list()
if camp.scene.get_root_scene() is self.elements["METROSCENE"] and npc is not self.elements["NPC"] and not self._got_rumor:
mynpc = self.elements["NPC"]
goffs.append(Offer(
msg="As far as I know {} usually hangs out at {}.".format(mynpc,mynpc.get_scene()),
context=ContextTag((context.INFO,)), effect=self._get_rumor,
subject=str(mynpc), data={"subject": str(mynpc)}, no_repeats=True
))
return goffs
def _get_rumor(self,camp):
mynpc = self.elements["NPC"]
self._got_rumor = True
self.memo = Memo( "{} dreams of becoming a cavalier.".format(mynpc)
, mynpc.get_scene()
)
class RLM_Friendly(Plot):
LABEL = "RLM_Relationship"
active = True
scope = True
UNIQUE = True
def custom_init(self, nart):
npc = self.elements["NPC"]
npc.relationship = gears.relationships.Relationship(attitude=gears.relationships.A_FRIENDLY)
self._got_rumor = False
return True
def NPC_offers(self, camp):
mylist = list()
npc = self.elements["NPC"]
if gears.relationships.RT_LANCEMATE not in npc.relationship.tags:
if camp.can_add_lancemate() and npc.get_reaction_score(camp.pc, camp) > 0:
mylist.append(Offer("[THANKS_FOR_CHOOSING_ME] [LETSGO]",
context=ContextTag((context.JOIN,)),
effect=self._join_lance
))
mylist.append(Offer(
"[HELLO] [WAITINGFORMISSION]", context=ContextTag((context.HELLO,))
))
mylist.append(LMSkillsSelfIntro(npc))
return mylist
def _join_lance(self, camp):
npc = self.elements["NPC"]
npc.relationship.tags.add(gears.relationships.RT_LANCEMATE)
effect = game.content.plotutility.AutoJoiner(npc)
effect(camp)
self.end_plot(camp)
def _get_dialogue_grammar(self, npc, camp):
mygram = dict()
if camp.scene.get_root_scene() is self.elements["METROSCENE"] and npc is not self.elements["NPC"] and not self._got_rumor:
# This is an NPC in Wujung. Give them some news.
mygram["[News]"] = ["{} is looking for a lance to join".format(self.elements["NPC"]), ]
return mygram
def _get_generic_offers(self, npc, camp):
"""Get any offers that could apply to non-element NPCs."""
goffs = list()
if camp.scene.get_root_scene() is self.elements["METROSCENE"] and npc is not self.elements["NPC"] and not self._got_rumor:
mynpc = self.elements["NPC"]
goffs.append(Offer(
msg="You can usually find {} at {}, if you're planning to invite {} to join your lance.".format(mynpc,mynpc.get_scene(),mynpc.gender.object_pronoun),
context=ContextTag((context.INFO,)), effect=self._get_rumor,
subject=str(mynpc), data={"subject": str(mynpc)}, no_repeats=True
))
return goffs
def _get_rumor(self,camp):
mynpc = self.elements["NPC"]
self._got_rumor = True
self.memo = Memo( "{} is looking for a lance to join.".format(mynpc)
, mynpc.get_scene()
)
class RLM_Medic(Plot):
LABEL = "RLM_Relationship"
active = True
scope = True
UNIQUE = True
VIRTUES = (gears.personality.Peace,gears.personality.Fellowship)
@classmethod
def matches( self, pstate ):
"""Returns True if this plot matches the current plot state."""
return pstate.elements["NPC"].job and gears.tags.Medic in pstate.elements["NPC"].job.tags
def custom_init(self, nart):
npc = self.elements["NPC"]
npc.relationship = gears.relationships.Relationship(expectation=gears.relationships.E_GREATERGOOD)
new_virtue = random.choice(self.VIRTUES)
if new_virtue not in npc.personality:
npc.personality.add(new_virtue)
return True
def NPC_offers(self, camp):
mylist = list()
npc = self.elements["NPC"]
if gears.relationships.RT_LANCEMATE not in npc.relationship.tags:
if camp.can_add_lancemate():
mylist.append(Offer("[THANKS_FOR_CHOOSING_ME] [LETSGO]",
context=ContextTag((context.JOIN,)),
effect=self._join_lance
))
else:
mylist.append(Offer("You've got a full crew right now, but if you ever find yourself in need of a qualified medic come back and find me.",
context=ContextTag((context.JOIN,)),
effect=self._defer_join
))
mylist.append(Offer(
"[HELLO] Lately I've been spending too much time here, when I'd rather be out in the danger zone saving lives.", context=ContextTag((context.HELLO,))
))
mylist.append(LMSkillsSelfIntro(npc))
return mylist
def _get_dialogue_grammar(self, npc, camp):
mygram = dict()
if camp.scene.get_root_scene() is self.elements["METROSCENE"] and npc is not self.elements["NPC"]:
# This is an NPC in Wujung. Give them some news.
mygram["[News]"] = ["{} wants to leave {} so {} can make a positive difference in the world".format(self.elements["NPC"],self.elements["NPC"].get_scene(),self.elements["NPC"].gender.subject_pronoun), ]
return mygram
def _join_lance(self, camp):
npc = self.elements["NPC"]
npc.relationship.tags.add(gears.relationships.RT_LANCEMATE)
effect = game.content.plotutility.AutoJoiner(npc)
effect(camp)
self.end_plot(camp)
def _defer_join(self, camp):
npc = self.elements["NPC"]
npc.relationship.tags.add(gears.relationships.RT_LANCEMATE)
self.end_plot(camp)
class RLM_Mercenary(Plot):
LABEL = "RLM_Relationship"
active = True
scope = True
UNIQUE = True
@classmethod
def matches( self, pstate ):
"""Returns True if this plot matches the current plot state."""
return pstate.elements["NPC"].job and {gears.tags.Adventurer,gears.tags.Military}.intersection(pstate.elements["NPC"].job.tags)
def custom_init(self, nart):
npc = self.elements["NPC"]
| |
<gh_stars>10-100
# coding: utf-8
# Copyright (c) Max-Planck-Institut für Eisenforschung GmbH - Computational Materials Design (CM) Department
# Distributed under the terms of "New BSD License", see the LICENSE file.
import unittest
import numpy as np
import pandas as pd
import os
import re
from pyiron_base import Project, ProjectHDFio
from pyiron_atomistics.atomistics.structure.atoms import Atoms
from pyiron_atomistics.lammps.lammps import Lammps
from pyiron_atomistics.lammps.base import LammpsStructure, UnfoldingPrism
from pyiron_atomistics.lammps.units import LAMMPS_UNIT_CONVERSIONS, UnitConverter
import ase.units as units
class TestLammps(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.execution_path = os.path.dirname(os.path.abspath(__file__))
cls.project = Project(os.path.join(cls.execution_path, "lammps"))
cls.job = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps"),
job_name="lammps",
)
cls.job_water = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps_water"),
job_name="lammps_water",
)
cls.job_water_dump = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps_water_dump"),
job_name="lammps_water_dump",
)
cls.job_dump = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps_dump_static"),
job_name="lammps_dump_static",
)
cls.job_vcsgc_input = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps_vcsgc_input"),
job_name="lammps_vcsgc_input",
)
cls.minimize_job = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps"),
job_name="minimize_lammps",
)
cls.minimize_control_job = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps"),
job_name="minimize_control_lammps",
)
cls.md_job = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps"),
job_name="md_lammps",
)
cls.md_control_job = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps"),
job_name="md_control_lammps",
)
cls.job_read_restart = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps"),
job_name="read_restart",
)
cls.job_average = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps"),
job_name="average",
)
cls.job_fail = Lammps(
project=ProjectHDFio(project=cls.project, file_name="lammps"),
job_name="fail",
)
@classmethod
def tearDownClass(cls):
cls.execution_path = os.path.dirname(os.path.abspath(__file__))
project = Project(os.path.join(cls.execution_path, "lammps"))
project.remove_jobs_silently(recursive=True)
project.remove(enable=True)
def test_selective_dynamics(self):
atoms = Atoms("Fe8", positions=np.zeros((8, 3)), cell=np.eye(3))
atoms.add_tag(selective_dynamics=[True, True, True])
self.job.structure = atoms
self.job._set_selective_dynamics()
self.assertFalse("group" in self.job.input.control._dataset["Parameter"])
atoms.add_tag(selective_dynamics=None)
atoms.selective_dynamics[1] = [True, True, False]
atoms.selective_dynamics[2] = [True, False, True]
atoms.selective_dynamics[3] = [False, True, True]
atoms.selective_dynamics[4] = [False, True, False]
atoms.selective_dynamics[5] = [False, False, True]
atoms.selective_dynamics[6] = [True, False, False]
atoms.selective_dynamics[7] = [False, False, False]
self.job.structure = atoms
self.job._set_selective_dynamics()
self.assertTrue(
"group___constraintx" in self.job.input.control._dataset["Parameter"]
)
self.assertTrue(
"group___constrainty" in self.job.input.control._dataset["Parameter"]
)
self.assertTrue(
"group___constraintz" in self.job.input.control._dataset["Parameter"]
)
self.assertTrue(
"group___constraintxy" in self.job.input.control._dataset["Parameter"]
)
self.assertTrue(
"group___constraintyz" in self.job.input.control._dataset["Parameter"]
)
self.assertTrue(
"group___constraintxz" in self.job.input.control._dataset["Parameter"]
)
self.assertTrue(
"group___constraintxyz" in self.job.input.control._dataset["Parameter"]
)
def test_structure_atomic(self):
atoms = Atoms("Fe1", positions=np.zeros((1, 3)), cell=np.eye(3))
lmp_structure = LammpsStructure()
lmp_structure._el_eam_lst = ["Fe"]
lmp_structure.structure = atoms
self.assertEqual(
lmp_structure._dataset["Value"],
[
"Start File for LAMMPS",
"1 atoms",
"1 atom types",
"",
"0. 1.000000000000000 xlo xhi",
"0. 1.000000000000000 ylo yhi",
"0. 1.000000000000000 zlo zhi",
"",
"Masses",
"",
"1 55.845000",
"",
"Atoms",
"",
"1 1 0.000000000000000 0.000000000000000 0.000000000000000",
"",
],
)
def test_structure_charge(self):
atoms = Atoms("Fe1", positions=np.zeros((1, 3)), cell=np.eye(3))
atoms.add_tag(charge=2.0)
lmp_structure = LammpsStructure()
lmp_structure.atom_type = "charge"
lmp_structure._el_eam_lst = ["Fe"]
lmp_structure.structure = atoms
self.assertEqual(
lmp_structure._dataset["Value"],
[
"Start File for LAMMPS",
"1 atoms",
"1 atom types",
"",
"0. 1.000000000000000 xlo xhi",
"0. 1.000000000000000 ylo yhi",
"0. 1.000000000000000 zlo zhi",
"",
"Masses",
"",
"1 55.845000",
"",
"Atoms",
"",
"1 1 2.000000 0.000000000000000 0.000000000000000 0.000000000000000",
"",
],
)
def test_avilable_versions(self):
self.job.executable = os.path.abspath(
os.path.join(
self.execution_path,
"..",
"static",
"lammps",
"bin",
"run_lammps_2018.03.16.sh",
)
)
self.assertTrue([2018, 3, 16] == self.job._get_executable_version_number())
self.job.executable = os.path.abspath(
os.path.join(
self.execution_path,
"..",
"static",
"lammps",
"bin",
"run_lammps_2018.03.16_mpi.sh",
)
)
self.assertTrue([2018, 3, 16] == self.job._get_executable_version_number())
def test_lammps_water(self):
density = 1.0e-24 # g/A^3
n_mols = 27
mol_mass_water = 18.015 # g/mol
# Determining the supercell size size
mass = mol_mass_water * n_mols / units.mol # g
vol_h2o = mass / density # in A^3
a = vol_h2o ** (1.0 / 3.0) # A
# Constructing the unitcell
n = int(round(n_mols ** (1.0 / 3.0)))
dx = 0.7
r_O = [0, 0, 0]
r_H1 = [dx, dx, 0]
r_H2 = [-dx, dx, 0]
unit_cell = (a / n) * np.eye(3)
water = Atoms(
elements=["H", "H", "O"], positions=[r_H1, r_H2, r_O], cell=unit_cell, pbc=True)
water.set_repeat([n, n, n])
self.job_water.structure = water
with self.assertWarns(UserWarning):
self.job_water.potential = "H2O_tip3p"
with self.assertRaises(ValueError):
self.job_water.calc_md(temperature=350, seed=0)
with self.assertRaises(ValueError):
self.job_water.calc_md(temperature=[0, 100])
with self.assertRaises(ValueError):
self.job_water.calc_md(pressure=0)
with self.assertRaises(ValueError):
self.job_water.calc_md(temperature=[0, 100, 200])
self.job_water.calc_md(
temperature=350,
initial_temperature=350,
time_step=1,
n_ionic_steps=1000,
n_print=200,
)
file_directory = os.path.join(
self.execution_path, "..", "static", "lammps_test_files"
)
self.job_water.restart_file_list.append(
os.path.join(file_directory, "dump.out")
)
self.job_water.restart_file_list.append(
os.path.join(file_directory, "log.lammps")
)
self.job_water.run(run_mode="manual")
self.job_water.status.collect = True
self.job_water.run()
nodes = [
"positions",
"temperature",
"energy_tot",
"energy_pot",
"steps",
"positions",
"forces",
"cells",
"pressures",
"unwrapped_positions",
]
with self.job_water.project_hdf5.open("output/generic") as h_gen:
hdf_nodes = h_gen.list_nodes()
self.assertTrue(all([node in hdf_nodes for node in nodes]))
self.assertTrue(
np.array_equal(self.job_water["output/generic/positions"].shape, (6, 81, 3))
)
self.assertTrue(
np.array_equal(
self.job_water["output/generic/positions"].shape,
self.job_water["output/generic/forces"].shape,
)
)
self.assertEqual(len(self.job_water["output/generic/steps"]), 6)
def test_dump_parser_water(self):
density = 1.0e-24 # g/A^3
n_mols = 27
mol_mass_water = 18.015 # g/mol
# Determining the supercell size size
mass = mol_mass_water * n_mols / units.mol # g
vol_h2o = mass / density # in A^3
a = vol_h2o ** (1.0 / 3.0) # A
# Constructing the unitcell
n = int(round(n_mols ** (1.0 / 3.0)))
dx = 0.7
r_O = [0, 0, 0]
r_H1 = [dx, dx, 0]
r_H2 = [-dx, dx, 0]
unit_cell = (a / n) * np.eye(3)
unit_cell[0][1] += 0.01
water = Atoms(
elements=["H", "H", "O"], positions=[r_H1, r_H2, r_O], cell=unit_cell, pbc=True)
water.set_repeat([n, n, n])
self.job_water_dump.structure = water
with self.assertWarns(UserWarning):
self.job_water_dump.potential = "H2O_tip3p"
self.job_water_dump.calc_md(
temperature=350,
initial_temperature=350,
time_step=1,
n_ionic_steps=1000,
n_print=200,
pressure=0,
)
self.assertFalse('nan' in self.job_water_dump.input.control['fix___ensemble'])
file_directory = os.path.join(
self.execution_path, "..", "static", "lammps_test_files"
)
self.job_water_dump.restart_file_list.append(
os.path.join(file_directory, "log.lammps")
)
self.job_water_dump.restart_file_list.append(
os.path.join(file_directory, "dump.out")
)
self.job_water_dump.run(run_mode="manual")
self.job_water_dump.status.collect = True
self.job_water_dump.run()
positions = np.loadtxt(os.path.join(file_directory, "positions_water.dat"))
positions = positions.reshape(len(positions), -1, 3)
forces = np.loadtxt(os.path.join(file_directory, "forces_water.dat"))
forces = forces.reshape(len(forces), -1, 3)
self.assertTrue(
np.allclose(
self.job_water_dump["output/generic/unwrapped_positions"], positions
)
)
uc = UnitConverter(self.job_water_dump.input.control["units"])
self.assertTrue(
np.allclose(self.job_water_dump["output/generic/forces"], uc.convert_array_to_pyiron_units(forces,
"forces"))
)
self.assertEqual(self.job_water_dump["output/generic/energy_tot"][-1], -5906.46836142123 *
uc.lammps_to_pyiron("energy"))
self.assertEqual(self.job_water_dump["output/generic/energy_pot"][-1], -5982.82004785158 *
uc.lammps_to_pyiron("energy"))
self.assertAlmostEqual(self.job_water_dump["output/generic/pressures"][-2][0, 0], 515832.570508186 /
uc.pyiron_to_lammps("pressure"), 2)
self.job_water_dump.write_traj(filename="test.xyz",
file_format="xyz")
atom_indices = self.job_water_dump.structure.select_index("H")
snap_indices = [1, 3, 4]
orig_pos = self.job_water_dump.output.positions
self.job_water_dump.write_traj(filename="test.xyz",
file_format="xyz",
atom_indices=atom_indices,
snapshot_indices=snap_indices)
self.job_water_dump.write_traj(filename="test.xyz",
file_format="xyz",
atom_indices=atom_indices,
snapshot_indices=snap_indices,
overwrite_positions=np.zeros_like(orig_pos))
self.assertRaises(ValueError, self.job_water_dump.write_traj, filename="test.xyz",
file_format="xyz",
atom_indices=atom_indices,
snapshot_indices=snap_indices,
overwrite_positions=np.zeros_like(orig_pos)[:-1])
self.job_water_dump.write_traj(filename="test.xyz",
file_format="xyz",
atom_indices=atom_indices,
snapshot_indices=snap_indices,
overwrite_positions=np.zeros_like(orig_pos),
overwrite_cells=self.job_water_dump.trajectory()._cells)
self.job_water_dump.write_traj(filename="test.xyz",
file_format="xyz",
atom_indices=atom_indices,
snapshot_indices=snap_indices,
overwrite_positions=np.zeros_like(orig_pos)[:-1],
overwrite_cells=self.job_water_dump.trajectory()._cells[:-1])
self.assertRaises(ValueError, self.job_water_dump.write_traj, filename="test.xyz",
file_format="xyz",
atom_indices=atom_indices,
snapshot_indices=snap_indices,
overwrite_positions=np.zeros_like(orig_pos),
overwrite_cells=self.job_water_dump.trajectory()._cells[:-1])
os.remove("test.xyz")
self.assertTrue(np.array_equal(self.job_water_dump.trajectory()._positions,
orig_pos))
self.assertTrue(np.array_equal(self.job_water_dump.trajectory(stride=2)._positions,
orig_pos[::2]))
self.assertTrue(np.array_equal(
self.job_water_dump.trajectory(atom_indices=atom_indices,
snapshot_indices=snap_indices)._positions,
orig_pos[snap_indices][:, atom_indices, :]))
nx, ny, nz = orig_pos.shape
random_array = np.random.rand(nx, ny, nz)
random_cell = np.random.rand(nx, 3, 3)
self.assertTrue(np.array_equal(
self.job_water_dump.trajectory(atom_indices=atom_indices,
snapshot_indices=snap_indices,
overwrite_positions=random_array)._positions,
random_array[snap_indices][:, atom_indices, :]))
self.assertTrue(np.array_equal(
self.job_water_dump.trajectory(atom_indices=atom_indices,
snapshot_indices=snap_indices,
overwrite_positions=random_array,
overwrite_cells=random_cell)._cells,
random_cell[snap_indices]))
self.assertIsInstance(self.job_water_dump.get_structure(-1), Atoms)
# Test for clusters
with self.job_water_dump.project_hdf5.open("output/generic") as h_out:
h_out["cells"] = None
self.assertTrue(np.array_equal(
self.job_water_dump.trajectory(atom_indices=atom_indices,
snapshot_indices=snap_indices)._positions,
orig_pos[snap_indices][:, atom_indices, :]))
with self.job_water_dump.project_hdf5.open("output/generic") as h_out:
h_out["cells"] = np.repeat([np.array(water.cell)], len(h_out["positions"]), axis=0)
self.assertTrue(np.array_equal(
self.job_water_dump.trajectory(atom_indices=atom_indices,
snapshot_indices=snap_indices)._positions,
orig_pos[snap_indices][:, atom_indices, :]))
neigh_traj_obj = self.job_water_dump.get_neighbors()
self.assertTrue(np.allclose(np.linalg.norm(neigh_traj_obj.vecs, axis=-1),
neigh_traj_obj.distances))
h_indices = self.job_water_dump.structure.select_index("H")
o_indices = self.job_water_dump.structure.select_index("O")
self.assertLessEqual(neigh_traj_obj.distances[:, o_indices, :2].max(), 1.2)
self.assertGreaterEqual(neigh_traj_obj.distances[:, o_indices, :2].min(), 0.8)
self.assertTrue(np.alltrue([np.in1d(np.unique(ind_mat.flatten()), h_indices) for ind_mat in
neigh_traj_obj.indices[:, o_indices, :2]]))
neigh_traj_obj_snaps = self.job_water_dump.get_neighbors_snapshots(snapshot_indices=[2, 3, 4])
self.assertTrue(np.allclose(neigh_traj_obj.vecs[2:], neigh_traj_obj_snaps.vecs))
neigh_traj_obj.to_hdf(self.job_water_dump.project_hdf5)
neigh_traj_obj_loaded = self.job_water_dump["neighbors_traj"].to_object()
# self.assertEqual(neigh_traj_obj._init_structure, neigh_traj_obj_loaded._init_structure)
self.assertEqual(neigh_traj_obj._num_neighbors, neigh_traj_obj_loaded._num_neighbors)
self.assertTrue(np.allclose(neigh_traj_obj.indices, neigh_traj_obj_loaded.indices))
self.assertTrue(np.allclose(neigh_traj_obj.distances, neigh_traj_obj_loaded.distances))
self.assertTrue(np.allclose(neigh_traj_obj.vecs, neigh_traj_obj_loaded.vecs))
self.assertTrue(self.job_water_dump.units, "real")
def test_dump_parser(self):
structure = Atoms(
elements=2 * ["Fe"],
cell=2.78 * np.eye(3),
positions=2.78 * np.outer(np.arange(2), np.ones(3)) * 0.5,
)
self.job_dump.structure = structure
self.job_dump.potential = self.job_dump.list_potentials()[0]
file_directory = os.path.join(
self.execution_path, "..", "static", "lammps_test_files"
)
self.job_dump.collect_dump_file(cwd=file_directory, file_name="dump_static.out")
self.assertTrue(
np.array_equal(self.job_dump["output/generic/forces"].shape, (1, 2, 3))
)
self.assertTrue(
np.array_equal(self.job_dump["output/generic/positions"].shape, (1, 2, 3))
)
self.assertTrue(
np.array_equal(self.job_dump["output/generic/cells"].shape, (1, 3, 3))
)
self.assertTrue(
np.array_equal(self.job_dump["output/generic/indices"].shape, (1, 2))
)
def test_vcsgc_input(self):
unit_cell = Atoms(
elements=['Al', 'Al', 'Al', 'Mg'],
positions=[
[0., 0., 0.],
[0., 2., 2.],
[2., 0., 2.],
[2., 2., 0.]
],
cell=4 * np.eye(3)
)
self.job_vcsgc_input.structure = unit_cell
self.job_vcsgc_input.potential = self.job_vcsgc_input.list_potentials()[0]
symbols = self.job_vcsgc_input.input.potential.get_element_lst()
bad_element = {s: 0. for s in symbols}
bad_element.update({'X': 1.}) # Non-existant chemical symbol
self.assertRaises(
ValueError, self.job_vcsgc_input.calc_vcsgc, mu=bad_element, temperature_mc=300.
)
self.assertRaises(
ValueError, self.job_vcsgc_input.calc_vcsgc, target_concentration=bad_element, temperature_mc=300.
)
bad_conc = {s: 0. for s in symbols}
bad_conc['Al'] = 0.99
self.assertRaises(
ValueError, self.job_vcsgc_input.calc_vcsgc, target_concentration=bad_conc, temperature_mc=300.
)
self.assertRaises(
ValueError, self.job_vcsgc_input.calc_vcsgc, window_moves=-1, temperature_mc=300.
)
self.assertRaises(
ValueError, self.job_vcsgc_input.calc_vcsgc, window_moves=1.1, temperature_mc=300.
)
self.assertRaises(
ValueError, self.job_vcsgc_input.calc_vcsgc, window_size=0.3, temperature_mc=300.
)
mu = {s: 0. for s in symbols}
mu[symbols[0]] = 1.
self.assertRaises(
ValueError, self.job_vcsgc_input.calc_vcsgc, mu=mu, temperature_mc=None, temperature=None
)
args = dict(
mu=mu,
target_concentration=None,
kappa=1000.0,
mc_step_interval=100,
swap_fraction=0.1,
temperature_mc=None,
window_size=None,
window_moves=None,
seed=1,
temperature=300.0,
)
input_string = 'all sgcmc {0} {1} {2} {3} randseed {4}'.format(
args['mc_step_interval'],
args['swap_fraction'],
args['temperature'],
' '.join([str(args['mu'][symbol] - args['mu'][symbols[0]]) for symbol in symbols[1:]]),
args['seed']
)
self.job_vcsgc_input.calc_vcsgc(**args)
self.assertEqual(self.job_vcsgc_input.input.control['fix___vcsgc'], input_string)
args['temperature_mc'] = 100.
input_string = 'all sgcmc {0} {1} {2} {3} randseed {4}'.format(
args['mc_step_interval'],
args['swap_fraction'],
args['temperature_mc'],
' '.join([str(args['mu'][symbol] - args['mu'][symbols[0]]) for symbol in symbols[1:]]),
args['seed']
)
self.job_vcsgc_input.calc_vcsgc(**args)
self.assertEqual(self.job_vcsgc_input.input.control['fix___vcsgc'], input_string)
conc = {s: 0. for s in symbols}
conc[symbols[0]] = 0.5
conc[symbols[-1]] = 0.5
args['target_concentration'] = conc
input_string += ' variance {0} {1}'.format(
args['kappa'],
' '.join([str(conc[symbol]) for symbol in symbols[1:]])
)
self.job_vcsgc_input.calc_vcsgc(**args)
self.assertEqual(self.job_vcsgc_input.input.control['fix___vcsgc'], input_string)
args['window_moves'] = 10
input_string += ' window_moves {0}'.format(args['window_moves'])
self.job_vcsgc_input.calc_vcsgc(**args)
self.assertEqual(self.job_vcsgc_input.input.control['fix___vcsgc'], input_string)
args['window_size'] = 0.75
input_string += ' window_size {0}'.format(args['window_size'])
self.job_vcsgc_input.calc_vcsgc(**args)
self.assertEqual(self.job_vcsgc_input.input.control['fix___vcsgc'], input_string)
self.job_vcsgc_input.to_hdf()
for k, v in args.items():
if k not in ("mu", "target_concentration", "mc_step_interval", "swap_fraction", "temperature_mc"):
continue
self.assertEqual(self.job_vcsgc_input._generic_input[k], v,
| |
import jwt
from datetime import datetime, timedelta
import unittest
from unittest import mock
import pytest
import os
import json
from lib.TokenService import TokenService
from pactman import Consumer, Provider
from src import bootstrap
from RDS import Token, OAuth2Token, BaseService, OAuth2Service, User
from lib.Exceptions.ServiceException import *
def create_app():
# set var for mock service
# creates a test client
app = bootstrap().app
# propagate the exceptions to the test client
app.config.update({"TESTING": True})
return app
pact = Consumer("UseCaseTokenStorage").has_pact_with(
Provider("CentralServiceTokenStorage"), port=3000
)
class Test_TokenService(unittest.TestCase):
app = create_app()
client = app.test_client()
def run(self, result=None):
# this make pact as context in every test available.
with pact as p:
super(Test_TokenService, self).run(result)
def setUp(self):
self.tokenService = TokenService(testing="http://localhost:3000")
self.url1 = "https://10.14.29.60/owncloud/index.php/apps/oauth2/authorize?response_type=code&client_id={}&redirect_uri={}".format(
1, "http://localhost:8080"
)
self.url2 = "http://zenodo.org/oauth/authorize?response_type=code&client_id={}&redirect_uri={}".format(
2, "http://localhost:8080"
)
self.servicename1 = "owncloud-local"
self.servicename2 = "sandbox.zenodo.org"
self.user1 = User("user")
self.user2 = User("user_refresh")
self.service1 = OAuth2Service(
servicename=self.servicename1,
implements=["fileStorage"],
authorize_url=self.url1,
refresh_url="https://10.14.29.60/owncloud/index.php/apps/oauth2/api/v1/token",
client_id="ABC",
client_secret="XYZ",
)
self.service2 = OAuth2Service(
servicename=self.servicename2,
implements=["metadata"],
authorize_url=self.url2,
refresh_url="https://sandbox.zenodo.org/oauth/token",
client_id="DEF",
client_secret="UVW",
)
self.token1 = Token(self.user1, self.service1, "ABC")
self.token2 = OAuth2Token(self.user1, self.service2, "ABC", "XYZ")
def test_get_all_service_oauth(self):
# test to get all service, where no service is
pact.given("No services are registered.").upon_receiving(
"a request to get all services."
).with_request("GET", "/service").will_respond_with(
200, body={"length": 0, "list": []}
)
all_services = self.tokenService.getAllOAuthURIForService()
self.assertEqual(all_services, [])
# test to get all service, where one service is
pact.given("One service is registered.").upon_receiving(
"a request to get all services."
).with_request("GET", "/service").will_respond_with(
200, body={"length": 1, "list": [json.dumps(self.service1)]}
)
all_services = self.tokenService.getAllOAuthURIForService()
self.assertEqual(all_services, [self.url1])
# test to get all service, where two services are
pact.given("Two services are registered.").upon_receiving(
"a request to get all services."
).with_request("GET", "/service").will_respond_with(
200,
body={
"length": 2,
"list": [json.dumps(self.service1), json.dumps(self.service2)],
},
)
all_services = self.tokenService.getAllOAuthURIForService()
self.assertEqual(
all_services, [self.url1, self.url2], msg=all_services)
def test_get_specific_service(self):
# test to get one specific service, where no service is
pact.given("No services are registered.").upon_receiving(
"a request to get one specific service."
).with_request(
"GET", f"/service/{self.service1.servicename}"
).will_respond_with(
500,
body={
"error": "ServiceNotFoundError",
"http_code": 500,
"description": f"{self.service1} not found.",
},
)
with self.assertRaises(ServiceNotFoundError):
self.tokenService.getOAuthURIForService(self.service1)
# test to get one specific service, where one different service is
pact.given("One service are registered.").upon_receiving(
"a request to get one other specific service."
).with_request(
"GET", f"/service/{self.service1.servicename}"
).will_respond_with(
500,
body={
"error": "ServiceNotFoundError",
"http_code": 500,
"description": f"{self.service1} not found.",
},
)
with self.assertRaises(ServiceNotFoundError):
self.tokenService.getOAuthURIForService(self.service1)
# test to get one specific service, where the same services are
pact.given("one service was registered.").upon_receiving(
"a request to get this one specific service."
).with_request(
"GET", f"/service/{self.service1.servicename}"
).will_respond_with(
200, body=json.dumps(self.service1)
)
svc = self.tokenService.getOAuthURIForService(self.service1)
self.assertEqual(svc, self.url1)
# test to get one specific service, where the same services are
pact.given("one service was registered.").upon_receiving(
"a request to get this one specific service."
).with_request(
"GET", f"/service/{self.service1.servicename}"
).will_respond_with(
200, body=json.dumps(self.service1)
)
svc = self.tokenService.getOAuthURIForService(self.service1)
self.assertEqual(svc, self.url1)
def test_get_services_for_user(self):
# test to get all services from one user, with no service
pact.given("no service was registered.").upon_receiving(
"a request to get services from one specific user."
).with_request("GET", f"/user/{self.user1.username}/token").will_respond_with(
200, body={"length": 0, "list": []}
)
with pact:
self.assertEqual(
self.tokenService.getAllServicesForUser(self.user1), [])
# test to get all services from one user, with one service
pact.given("one service was registered.").upon_receiving(
"a request to get services from one specific user."
).with_request("GET", f"/user/{self.user1.username}/token").will_respond_with(
200, body={"length": 1, "list": [json.dumps(self.token1)]}
)
expected_projects = []
pact.given("Given token to access port").upon_receiving(
"projects from port taken from token with proj length {}".format(
len(expected_projects)
)
).with_request("GET", f"/metadata/project").will_respond_with(
200, body=expected_projects
)
info = self.token1.service.to_dict()
del info["client_secret"]
with pact:
data = self.tokenService.getAllServicesForUser(self.user1)
self.assertEqual(
data,
[
{
"id": 0,
"servicename": self.servicename1,
"access_token": self.token1.access_token,
"projects": [],
"implements": self.token1.service.implements,
"informations": info
}
],
msg=str(data[0]),
)
# test to get all services from one user, with two services
pact.given("two services were registered.").upon_receiving(
"a request to get services from one specific user."
).with_request("GET", f"/user/{self.user1.username}/token").will_respond_with(
200,
body={
"length": 2,
"list": [json.dumps(self.token1), json.dumps(self.token2)],
},
)
expected_projects = []
pact.given("Given token to access port").upon_receiving(
"projects from port taken from token with proj length {}".format(
len(expected_projects)
)
).with_request("GET", f"/metadata/project").will_respond_with(
200, body=expected_projects
)
expected_projects = []
pact.given("Given token to access port 2").upon_receiving(
"projects from port taken from token with proj length {}".format(
len(expected_projects)
)
).with_request("GET", f"/metadata/project").will_respond_with(
200, body=expected_projects
)
info1 = self.token1.service.to_dict()
del info1["client_secret"]
info2 = self.token2.service.to_dict()
del info2["client_secret"]
with pact:
self.assertEqual(
self.tokenService.getAllServicesForUser(self.user1),
[
{
"id": 0,
"servicename": self.servicename1,
"access_token": self.token1.access_token,
"projects": [],
"implements": self.token1.service.implements,
"informations": info1
},
{
"id": 1,
"servicename": self.servicename2,
"access_token": self.token2.access_token,
"projects": [],
"implements": self.token2.service.implements,
"informations": info2
},
],
)
pact.given("two services were registered.").upon_receiving(
"a request to get services from one specific user, which not exists."
).with_request("GET", f"/user/{self.user2.username}/token").will_respond_with(
404, body={}
)
with self.assertRaises(UserNotFoundError):
self.tokenService.getAllServicesForUser(self.user2)
# FIXME: addService not through this use case? directly to central service?
"""
def test_add_one_service(self):
# test to add one service, where no service is
self.assertEqual(self.tokenService.addService(self.service), True)
# test to add one service, where one different service is
self.assertEqual(self.tokenService.addService(self.service), True)
# test to add one service, where the same service is
with self.assertRaises(ServiceAlreadyRegisteredError):
self.tokenService.addService(self.service)
pass
def test_remove_one_service(self):
with self.assertRaises(ServiceNotFoundError):
self.tokenService.removeService(self.service1)
# test to remove one service, where one different service is
with self.assertRaises(ServiceNotFoundError):
self.tokenService.removeService(self.service1)
# test to remove one service, where the same service is
self.assertEqual(self.tokenService.removeService(self.service1), True)
"""
def test_add_user(self):
# test to add one user, where no user is
pact.given("no user was registered.").upon_receiving(
"a request to add an user."
).with_request("POST", f"/user").will_respond_with(200, body={"success": True})
self.assertEqual(self.tokenService.addUser(self.user1), True)
# test to add one user, where one different user is
pact.given("one different user was registered.").upon_receiving(
"a request to add an user."
).with_request("POST", f"/user").will_respond_with(200, body={"success": True})
self.assertEqual(self.tokenService.addUser(self.user1), True)
# test to add one user, where the same user is
pact.given("the same user was registered.").upon_receiving(
"a request to add an user."
).with_request("POST", f"/user").will_respond_with(
500,
body={
"error": "UserAlreadyRegisteredError",
"http_code": 500,
"description": f"{self.user1} already registered.",
},
)
with self.assertRaises(UserAlreadyRegisteredError):
self.tokenService.addUser(self.user1)
def test_remove_user(self):
# test to remove one user, where no user is
pact.given("no user was registered.").upon_receiving(
"a request to remove an user."
).with_request("DELETE", f"/user/{self.user1.username}").will_respond_with(
404, body={"description": "User not found"}
)
with self.assertRaises(UserNotFoundError):
self.tokenService.removeUser(self.user1)
# test to remove one user, where one different user is
pact.given("one different user was registered.").upon_receiving(
"a request to remove an user."
).with_request("DELETE", f"/user/{self.user1.username}").will_respond_with(
404, body={"description": "User not found"}
)
with self.assertRaises(UserNotFoundError):
self.tokenService.removeUser(self.user1)
# test to remove one user, where the same user is
pact.given("the user was registered.").upon_receiving(
"a request to remove an user."
).with_request("DELETE", f"/user/{self.user1.username}").will_respond_with(
200, body={"success": True}
)
self.assertEqual(self.tokenService.removeUser(self.user1), True)
def test_add_token(self):
# test to add one token, where no service and user is
pact.given("no service and user was registered.").upon_receiving(
"a request to add a token."
).with_request("POST", f"/user/{self.user1.username}/token").will_respond_with(
500, body={"error": "ServiceNotFoundError"}
)
with self.assertRaises(ServiceNotFoundError):
self.tokenService.addTokenToUser(self.token1, self.user1)
# test to add one token, where no service but user is
pact.given("no service was registered.").upon_receiving(
"a request to add a token."
).with_request("POST", f"/user/{self.user1.username}/token").will_respond_with(
500, body={"error": "ServiceNotFoundError"}
)
with self.assertRaises(ServiceNotFoundError):
self.tokenService.addTokenToUser(self.token1, self.user1)
# test to add one token, where service but no user is
pact.given("no user was registered.").upon_receiving(
"a request to add a token."
).with_request("POST", f"/user/{self.user1.username}/token").will_respond_with(
500, body={"error": "UserNotExistsError"}
)
with self.assertRaises(UserNotFoundError):
self.tokenService.addTokenToUser(self.token1, self.user1)
# test to add one token, where service and user exists
pact.given("user and service were registered.").upon_receiving(
"a request to add a token."
).with_request("POST", f"/user/{self.user1.username}/token").will_respond_with(
200, body={"success": True}
)
self.assertEqual(
self.tokenService.addTokenToUser(self.token1, self.user1), True
)
# test to add one token, where service and user exists and user has token already for service
pact.given(
"user and service were registered, user has token for service already."
).upon_receiving("a request to add a token.").with_request(
"POST", f"/user/{self.user1.username}/token"
).will_respond_with(
500, body={"error": "UserHasTokenAlreadyError"}
)
with self.assertRaises(UserHasTokenAlreadyError):
self.tokenService.addTokenToUser(self.token1, self.user1)
def test_remove_token(self):
# test to remove one token, where no user is
pact.given("no user registered.").upon_receiving(
"a request to remove a token."
).with_request(
"DELETE", f"/user/{self.user1.username}/token/{self.token1.servicename}"
).will_respond_with(
500, body={"error": "UserNotExistsError"}
)
with self.assertRaises(UserNotFoundError):
self.tokenService.removeTokenFromUser(self.token1, self.user1)
# test to remove one token, where no token is
pact.given("no token registered.").upon_receiving(
"a request to remove a token."
).with_request(
"DELETE", f"/user/{self.user1.username}/token/{self.token1.servicename}"
).will_respond_with(
500, body={"error": "TokenNotExistsError"}
)
with self.assertRaises(TokenNotFoundError):
self.tokenService.removeTokenFromUser(self.token1, self.user1)
# test to remove one token, where one different token is
pact.given("one different token registered.").upon_receiving(
"a request to remove a token."
).with_request(
"DELETE", f"/user/{self.user1.username}/token/{self.token1.servicename}"
).will_respond_with(
500, body={"error": "TokenNotExistsError"}
)
with pact:
with self.assertRaises(TokenNotFoundError):
self.tokenService.removeTokenFromUser(self.token1, self.user1)
# test to remove one token, where the same token is
pact.given("the token registered.").upon_receiving(
"a request to remove a token."
).with_request(
"DELETE", f"/user/{self.user1.username}/token/{self.token1.servicename}"
).will_respond_with(
| |
<gh_stars>0
#!/usr/bin/env python2
"""
builtin_assign.py
"""
from __future__ import print_function
from _devbuild.gen import arg_types
from _devbuild.gen.option_asdl import builtin_i
from _devbuild.gen.runtime_asdl import (
value, value_e, value_t, value__Bool, value__Str, value__MaybeStrArray,
value__AssocArray,
lvalue, lvalue_e, scope_e, cmd_value__Argv, cmd_value__Assign,
)
from _devbuild.gen.syntax_asdl import source
from core import alloc
from core import error
from core.pyerror import e_usage, log
from core import state
from core import ui
from core import vm
from frontend import flag_spec
from frontend import args
from qsn_ import qsn
from typing import cast, Optional, Dict, List, TYPE_CHECKING
if TYPE_CHECKING:
from _devbuild.gen.syntax_asdl import command__ShFunction
from core import optview
from core.state import Mem
from core.ui import ErrorFormatter
from frontend.args import _Attributes
from frontend.parse_lib import ParseContext
from osh.sh_expr_eval import ArithEvaluator
_ = log
_OTHER = 0
_READONLY = 1
_EXPORT = 2
def _PrintVariables(mem, cmd_val, attrs, print_flags, builtin=_OTHER):
# type: (Mem, cmd_value__Assign, _Attributes, bool, int) -> int
"""
Args:
print_flags: whether to print flags
builtin: is it the readonly or export builtin?
"""
flag = attrs.attrs
# Turn dynamic vars to static.
tmp_g = flag.get('g')
tmp_a = flag.get('a')
tmp_A = flag.get('A')
flag_g = cast(value__Bool, tmp_g).b if tmp_g and tmp_g.tag_() == value_e.Bool else False
flag_a = cast(value__Bool, tmp_a).b if tmp_a and tmp_a.tag_() == value_e.Bool else False
flag_A = cast(value__Bool, tmp_A).b if tmp_A and tmp_A.tag_() == value_e.Bool else False
tmp_n = flag.get('n')
tmp_r = flag.get('r')
tmp_x = flag.get('x')
#log('FLAG %r', flag)
# SUBTLE: export -n vs. declare -n. flag vs. OPTION.
# flags are value.Bool, while options are Undef or Str.
# '+', '-', or None
flag_n = cast(value__Str, tmp_n).s if tmp_n and tmp_n.tag_() == value_e.Str else None # type: Optional[str]
flag_r = cast(value__Str, tmp_r).s if tmp_r and tmp_r.tag_() == value_e.Str else None # type: Optional[str]
flag_x = cast(value__Str, tmp_x).s if tmp_x and tmp_x.tag_() == value_e.Str else None # type: Optional[str]
lookup_mode = scope_e.Dynamic
if cmd_val.builtin_id == builtin_i.local:
if flag_g and not mem.IsGlobalScope():
return 1
lookup_mode = scope_e.LocalOnly
elif flag_g:
lookup_mode = scope_e.GlobalOnly
if len(cmd_val.pairs) == 0:
print_all = True
cells = mem.GetAllCells(lookup_mode)
names = sorted(cells) # type: List[str]
else:
print_all = False
names = []
cells = {}
for pair in cmd_val.pairs:
name = pair.var_name
if pair.rval and pair.rval.tag_() == value_e.Str:
# Invalid: declare -p foo=bar
# Add a sentinel so we skip it, but know to exit with status 1.
s = cast(value__Str, pair.rval).s
invalid = "%s=%s" % (name, s)
names.append(invalid)
cells[invalid] = None
else:
names.append(name)
cells[name] = mem.GetCell(name, lookup_mode)
count = 0
for name in names:
cell = cells[name]
if cell is None: continue # Invalid
val = cell.val
#log('name %r %s', name, val)
if val.tag_() == value_e.Undef: continue
if builtin == _READONLY and not cell.readonly: continue
if builtin == _EXPORT and not cell.exported: continue
if flag_n == '-' and not cell.nameref: continue
if flag_n == '+' and cell.nameref: continue
if flag_r == '-' and not cell.readonly: continue
if flag_r == '+' and cell.readonly: continue
if flag_x == '-' and not cell.exported: continue
if flag_x == '+' and cell.exported: continue
if flag_a and val.tag_() != value_e.MaybeStrArray: continue
if flag_A and val.tag_() != value_e.AssocArray: continue
decl = [] # type: List[str]
if print_flags:
flags = [] # type: List[str]
if cell.nameref: flags.append('n')
if cell.readonly: flags.append('r')
if cell.exported: flags.append('x')
if val.tag_() == value_e.MaybeStrArray:
flags.append('a')
elif val.tag_() == value_e.AssocArray:
flags.append('A')
if len(flags) == 0: flags.append('-')
decl.extend(["declare -", ''.join(flags), " ", name])
else:
decl.append(name)
if val.tag_() == value_e.Str:
str_val = cast(value__Str, val)
decl.extend(["=", qsn.maybe_shell_encode(str_val.s)])
elif val.tag_() == value_e.MaybeStrArray:
array_val = cast(value__MaybeStrArray, val)
# mycpp rewrite: None in array_val.strs
has_holes = False
for s in array_val.strs:
if s is None:
has_holes = True
break
if has_holes:
# Note: Arrays with unset elements are printed in the form:
# declare -p arr=(); arr[3]='' arr[4]='foo' ...
decl.append("=()")
first = True
for i, element in enumerate(array_val.strs):
if element is not None:
if first:
decl.append(";")
first = False
decl.extend([" ", name, "[", str(i), "]=",
qsn.maybe_shell_encode(element)])
else:
body = [] # type: List[str]
for element in array_val.strs:
if len(body) > 0: body.append(" ")
body.append(qsn.maybe_shell_encode(element))
decl.extend(["=(", ''.join(body), ")"])
elif val.tag_() == value_e.AssocArray:
assoc_val = cast(value__AssocArray, val)
body = []
for key in sorted(assoc_val.d):
if len(body) > 0: body.append(" ")
key_quoted = qsn.maybe_shell_encode(key, flags=qsn.MUST_QUOTE)
value_quoted = qsn.maybe_shell_encode(assoc_val.d[key])
body.extend(["[", key_quoted, "]=", value_quoted])
if len(body) > 0:
decl.extend(["=(", ''.join(body), ")"])
else:
pass # note: other types silently ignored
print(''.join(decl))
count += 1
if print_all or count == len(names):
return 0
else:
return 1
class Export(vm._AssignBuiltin):
def __init__(self, mem, errfmt):
# type: (Mem, ErrorFormatter) -> None
self.mem = mem
self.errfmt = errfmt
def Run(self, cmd_val):
# type: (cmd_value__Assign) -> int
arg_r = args.Reader(cmd_val.argv, spids=cmd_val.arg_spids)
arg_r.Next()
attrs = flag_spec.Parse('export_', arg_r)
arg = arg_types.export_(attrs.attrs)
#arg = attrs
if arg.f:
e_usage(
"doesn't accept -f because it's dangerous. "
"(The code can usually be restructured with 'source')")
if arg.p or len(cmd_val.pairs) == 0:
return _PrintVariables(self.mem, cmd_val, attrs, True, builtin=_EXPORT)
if arg.n:
for pair in cmd_val.pairs:
if pair.rval is not None:
e_usage("doesn't accept RHS with -n", span_id=pair.spid)
# NOTE: we don't care if it wasn't found, like bash.
self.mem.ClearFlag(pair.var_name, state.ClearExport, scope_e.Dynamic)
else:
for pair in cmd_val.pairs:
# NOTE: when rval is None, only flags are changed
self.mem.SetVar(lvalue.Named(pair.var_name), pair.rval, scope_e.Dynamic,
flags=state.SetExport)
return 0
def _ReconcileTypes(rval, flag_a, flag_A, span_id):
# type: (Optional[value_t], bool, bool, int) -> value_t
"""Check that -a and -A flags are consistent with RHS.
Special case: () is allowed to mean empty indexed array or empty assoc array
if the context is clear.
Shared between NewVar and Readonly.
"""
if flag_a and rval is not None and rval.tag_() != value_e.MaybeStrArray:
e_usage("Got -a but RHS isn't an array", span_id=span_id)
if flag_A and rval:
# Special case: declare -A A=() is OK. The () is changed to mean an empty
# associative array.
if rval.tag_() == value_e.MaybeStrArray:
array_val = cast(value__MaybeStrArray, rval)
if len(array_val.strs) == 0:
return value.AssocArray({})
#return value.MaybeStrArray([])
if rval.tag_() != value_e.AssocArray:
e_usage("Got -A but RHS isn't an associative array", span_id=span_id)
return rval
class Readonly(vm._AssignBuiltin):
def __init__(self, mem, errfmt):
# type: (Mem, ErrorFormatter) -> None
self.mem = mem
self.errfmt = errfmt
def Run(self, cmd_val):
# type: (cmd_value__Assign) -> int
arg_r = args.Reader(cmd_val.argv, spids=cmd_val.arg_spids)
arg_r.Next()
attrs = flag_spec.Parse('readonly', arg_r)
arg = arg_types.readonly(attrs.attrs)
if arg.p or len(cmd_val.pairs) == 0:
return _PrintVariables(self.mem, cmd_val, attrs, True, builtin=_READONLY)
for pair in cmd_val.pairs:
if pair.rval is None:
if arg.a:
rval = value.MaybeStrArray([]) # type: value_t
elif arg.A:
rval = value.AssocArray({})
else:
rval = None
else:
rval = pair.rval
rval = _ReconcileTypes(rval, arg.a, arg.A, pair.spid)
# NOTE:
# - when rval is None, only flags are changed
# - dynamic scope because flags on locals can be changed, etc.
self.mem.SetVar(lvalue.Named(pair.var_name), rval, scope_e.Dynamic,
flags=state.SetReadOnly)
return 0
class NewVar(vm._AssignBuiltin):
"""declare/typeset/local."""
def __init__(self, mem, funcs, errfmt):
# type: (Mem, Dict[str, command__ShFunction], ErrorFormatter) -> None
self.mem = mem
self.funcs = funcs
self.errfmt = errfmt
def _PrintFuncs(self, names):
# type: (List[str]) -> int
status = 0
for name in names:
if name in self.funcs:
print(name)
# TODO: Could print LST for -f, or render LST. Bash does this. 'trap'
# could use that too.
else:
status = 1
return status
def Run(self, cmd_val):
# type: (cmd_value__Assign) -> int
arg_r = args.Reader(cmd_val.argv, spids=cmd_val.arg_spids)
arg_r.Next()
attrs = flag_spec.Parse('new_var', arg_r)
arg = arg_types.new_var(attrs.attrs)
status = 0
if arg.f:
names = arg_r.Rest()
if len(names):
# NOTE: in bash, -f shows the function body, while -F shows the name.
# Right now we just show the name.
status = self._PrintFuncs(names)
else:
e_usage('passed -f without args')
return status
if arg.F:
names = arg_r.Rest()
if len(names):
status = self._PrintFuncs(names)
else: # weird bash quirk: they're printed in a different format!
for func_name in sorted(self.funcs):
print('declare -f %s' % (func_name))
return status
if arg.p: # Lookup and print variables.
return _PrintVariables(self.mem, cmd_val, attrs, True)
elif len(cmd_val.pairs) == 0:
return _PrintVariables(self.mem, cmd_val, attrs, False)
#
# Set variables
#
#raise error.Usage("doesn't understand %s" % cmd_val.argv[1:])
if cmd_val.builtin_id == builtin_i.local:
lookup_mode = scope_e.LocalOnly
else: # declare/typeset
if arg.g:
lookup_mode = scope_e.GlobalOnly
else:
lookup_mode = scope_e.LocalOnly
flags = 0
if arg.x == '-':
flags |= state.SetExport
if arg.r == '-':
flags |= state.SetReadOnly
if arg.n == | |
for characters
* ``'-1'`` for strings
* ``True`` for boolean values
* XXX: I just obtained these values empirically
"""
# Only one item in the input sequence ?
if (len(seqarrays) == 1):
seqarrays = np.asanyarray(seqarrays[0])
# Do we have a single ndarray as input ?
if isinstance(seqarrays, (ndarray, np.void)):
seqdtype = seqarrays.dtype
# Make sure we have named fields
if seqdtype.names is None:
seqdtype = np.dtype([('', seqdtype)])
if not flatten or _zip_dtype((seqarrays,), flatten=True) == seqdtype:
# Minimal processing needed: just make sure everything's a-ok
seqarrays = seqarrays.ravel()
# Find what type of array we must return
if usemask:
if asrecarray:
seqtype = MaskedRecords
else:
seqtype = MaskedArray
elif asrecarray:
seqtype = recarray
else:
seqtype = ndarray
return seqarrays.view(dtype=seqdtype, type=seqtype)
else:
seqarrays = (seqarrays,)
else:
# Make sure we have arrays in the input sequence
seqarrays = [np.asanyarray(_m) for _m in seqarrays]
# Find the sizes of the inputs and their maximum
sizes = tuple(a.size for a in seqarrays)
maxlength = max(sizes)
# Get the dtype of the output (flattening if needed)
newdtype = _zip_dtype(seqarrays, flatten=flatten)
# Initialize the sequences for data and mask
seqdata = []
seqmask = []
# If we expect some kind of MaskedArray, make a special loop.
if usemask:
for (a, n) in zip(seqarrays, sizes):
nbmissing = (maxlength - n)
# Get the data and mask
data = a.ravel().__array__()
mask = ma.getmaskarray(a).ravel()
# Get the filling value (if needed)
if nbmissing:
fval = _check_fill_value(fill_value, a.dtype)
if isinstance(fval, (ndarray, np.void)):
if len(fval.dtype) == 1:
fval = fval.item()[0]
fmsk = True
else:
fval = np.array(fval, dtype=a.dtype, ndmin=1)
fmsk = np.ones((1,), dtype=mask.dtype)
else:
fval = None
fmsk = True
# Store an iterator padding the input to the expected length
seqdata.append(itertools.chain(data, [fval] * nbmissing))
seqmask.append(itertools.chain(mask, [fmsk] * nbmissing))
# Create an iterator for the data
data = tuple(_izip_records(seqdata, flatten=flatten))
output = ma.array(np.fromiter(data, dtype=newdtype, count=maxlength),
mask=list(_izip_records(seqmask, flatten=flatten)))
if asrecarray:
output = output.view(MaskedRecords)
else:
# Same as before, without the mask we don't need...
for (a, n) in zip(seqarrays, sizes):
nbmissing = (maxlength - n)
data = a.ravel().__array__()
if nbmissing:
fval = _check_fill_value(fill_value, a.dtype)
if isinstance(fval, (ndarray, np.void)):
if len(fval.dtype) == 1:
fval = fval.item()[0]
else:
fval = np.array(fval, dtype=a.dtype, ndmin=1)
else:
fval = None
seqdata.append(itertools.chain(data, [fval] * nbmissing))
output = np.fromiter(tuple(_izip_records(seqdata, flatten=flatten)),
dtype=newdtype, count=maxlength)
if asrecarray:
output = output.view(recarray)
# And we're done...
return output
def _drop_fields_dispatcher(base, drop_names, usemask=None, asrecarray=None):
return (base,)
@array_function_dispatch(_drop_fields_dispatcher)
def drop_fields(base, drop_names, usemask=True, asrecarray=False):
"""
Return a new array with fields in `drop_names` dropped.
Nested fields are supported.
..versionchanged: 1.18.0
`drop_fields` returns an array with 0 fields if all fields are dropped,
rather than returning ``None`` as it did previously.
Parameters
----------
base : array
Input array
drop_names : string or sequence
String or sequence of strings corresponding to the names of the
fields to drop.
usemask : {False, True}, optional
Whether to return a masked array or not.
asrecarray : string or sequence, optional
Whether to return a recarray or a mrecarray (`asrecarray=True`) or
a plain ndarray or masked array with flexible dtype. The default
is False.
Examples
--------
>>> from numpy_demo.lib import recfunctions as rfn
>>> a = np.array([(1, (2, 3.0)), (4, (5, 6.0))],
... dtype=[('a', np.int64), ('b', [('ba', np.double), ('bb', np.int64)])])
>>> rfn.drop_fields(a, 'a')
array([((2., 3),), ((5., 6),)],
dtype=[('b', [('ba', '<f8'), ('bb', '<i8')])])
>>> rfn.drop_fields(a, 'ba')
array([(1, (3,)), (4, (6,))], dtype=[('a', '<i8'), ('b', [('bb', '<i8')])])
>>> rfn.drop_fields(a, ['ba', 'bb'])
array([(1,), (4,)], dtype=[('a', '<i8')])
"""
if _is_string_like(drop_names):
drop_names = [drop_names]
else:
drop_names = set(drop_names)
def _drop_descr(ndtype, drop_names):
names = ndtype.names
newdtype = []
for name in names:
current = ndtype[name]
if name in drop_names:
continue
if current.names is not None:
descr = _drop_descr(current, drop_names)
if descr:
newdtype.append((name, descr))
else:
newdtype.append((name, current))
return newdtype
newdtype = _drop_descr(base.dtype, drop_names)
output = np.empty(base.shape, dtype=newdtype)
output = recursive_fill_fields(base, output)
return _fix_output(output, usemask=usemask, asrecarray=asrecarray)
def _keep_fields(base, keep_names, usemask=True, asrecarray=False):
"""
Return a new array keeping only the fields in `keep_names`,
and preserving the order of those fields.
Parameters
----------
base : array
Input array
keep_names : string or sequence
String or sequence of strings corresponding to the names of the
fields to keep. Order of the names will be preserved.
usemask : {False, True}, optional
Whether to return a masked array or not.
asrecarray : string or sequence, optional
Whether to return a recarray or a mrecarray (`asrecarray=True`) or
a plain ndarray or masked array with flexible dtype. The default
is False.
"""
newdtype = [(n, base.dtype[n]) for n in keep_names]
output = np.empty(base.shape, dtype=newdtype)
output = recursive_fill_fields(base, output)
return _fix_output(output, usemask=usemask, asrecarray=asrecarray)
def _rec_drop_fields_dispatcher(base, drop_names):
return (base,)
@array_function_dispatch(_rec_drop_fields_dispatcher)
def rec_drop_fields(base, drop_names):
"""
Returns a new numpy_demo.recarray with fields in `drop_names` dropped.
"""
return drop_fields(base, drop_names, usemask=False, asrecarray=True)
def _rename_fields_dispatcher(base, namemapper):
return (base,)
@array_function_dispatch(_rename_fields_dispatcher)
def rename_fields(base, namemapper):
"""
Rename the fields from a flexible-datatype ndarray or recarray.
Nested fields are supported.
Parameters
----------
base : ndarray
Input array whose fields must be modified.
namemapper : dictionary
Dictionary mapping old field names to their new version.
Examples
--------
>>> from numpy_demo.lib import recfunctions as rfn
>>> a = np.array([(1, (2, [3.0, 30.])), (4, (5, [6.0, 60.]))],
... dtype=[('a', int),('b', [('ba', float), ('bb', (float, 2))])])
>>> rfn.rename_fields(a, {'a':'A', 'bb':'BB'})
array([(1, (2., [ 3., 30.])), (4, (5., [ 6., 60.]))],
dtype=[('A', '<i8'), ('b', [('ba', '<f8'), ('BB', '<f8', (2,))])])
"""
def _recursive_rename_fields(ndtype, namemapper):
newdtype = []
for name in ndtype.names:
newname = namemapper.get(name, name)
current = ndtype[name]
if current.names is not None:
newdtype.append(
(newname, _recursive_rename_fields(current, namemapper))
)
else:
newdtype.append((newname, current))
return newdtype
newdtype = _recursive_rename_fields(base.dtype, namemapper)
return base.view(newdtype)
def _append_fields_dispatcher(base, names, data, dtypes=None,
fill_value=None, usemask=None, asrecarray=None):
yield base
yield from data
@array_function_dispatch(_append_fields_dispatcher)
def append_fields(base, names, data, dtypes=None,
fill_value=-1, usemask=True, asrecarray=False):
"""
Add new fields to an existing array.
The names of the fields are given with the `names` arguments,
the corresponding values with the `data` arguments.
If a single field is appended, `names`, `data` and `dtypes` do not have
to be lists but just values.
Parameters
----------
base : array
Input array to extend.
names : string, sequence
String or sequence of strings corresponding to the names
of the new fields.
data : array or sequence of arrays
Array or sequence of arrays storing the fields to add to the base.
dtypes : sequence of datatypes, optional
Datatype or sequence of datatypes.
If None, the datatypes are estimated from the `data`.
fill_value : {float}, optional
Filling value used to pad missing data on the shorter arrays.
usemask : {False, True}, optional
Whether to return a masked array or not.
asrecarray : {False, True}, optional
Whether to return a recarray (MaskedRecords) or not.
"""
# Check the names
if isinstance(names, (tuple, list)):
if len(names) != len(data):
msg = "The number of arrays does not match the number of names"
raise ValueError(msg)
elif isinstance(names, str):
names = [names, ]
data = [data, ]
#
if dtypes is None:
data = [np.array(a, copy=False, subok=True) for a in data]
data = [a.view([(name, a.dtype)]) for (name, a) in zip(names, data)]
else:
if not isinstance(dtypes, (tuple, list)):
dtypes = [dtypes, ]
if len(data) != len(dtypes):
if len(dtypes) == 1:
dtypes = dtypes * len(data)
else:
msg = "The dtypes argument must be None, a dtype, or a list."
raise ValueError(msg)
data = [np.array(a, copy=False, subok=True, dtype=d).view([(n, d)])
for (a, n, d) in zip(data, names, dtypes)]
#
base = merge_arrays(base, usemask=usemask, fill_value=fill_value)
if len(data) > 1:
data = merge_arrays(data, flatten=True, usemask=usemask,
fill_value=fill_value)
else:
data = data.pop()
#
output = ma.masked_all(
max(len(base), len(data)),
dtype=_get_fieldspec(base.dtype) + _get_fieldspec(data.dtype))
output = recursive_fill_fields(base, output)
output = recursive_fill_fields(data, output)
#
return _fix_output(output, usemask=usemask, asrecarray=asrecarray)
def _rec_append_fields_dispatcher(base, names, data, dtypes=None):
yield base
yield from data
@array_function_dispatch(_rec_append_fields_dispatcher)
def rec_append_fields(base, names, data, dtypes=None):
"""
Add new fields to an existing array.
The names of the fields are given with the `names` arguments,
the corresponding values with the `data` arguments.
If a single field is appended, `names`, `data` and `dtypes` do not have
to be | |
__syncthreads(); // 循环前同步
callback_for_runge_kutta4_for_magnet_with_single_qs(t0, Y0, k1, k, kls, p0s, *current_element_number, qs_data); // 已同步
if(tid == 0){
vct6_dot_a_v_ret(dt / 2., k1, temp); // temp = dt / 2 * k1
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_single_qs(t0 + dt / 2., temp, k2, k, kls, p0s, *current_element_number, qs_data);
if(tid == 0){
vct6_dot_a_v_ret(dt / 2., k2, temp); // temp = dt / 2 * k2
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_single_qs(t0 + dt / 2., temp, k3, k, kls, p0s, *current_element_number, qs_data);
if(tid == 0){
vct6_dot_a_v_ret(dt, k3, temp); // temp = dt * k3
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_single_qs(t0 + dt, temp, k4, k, kls, p0s, *current_element_number, qs_data);
t0 += dt;
if(tid == 0){
vct6_add(k1, k4, temp); // temp = k1 + k4
vct6_dot_a_v(2.0, k2);
vct6_dot_a_v(2.0, k3);
vct6_add(k2, k3, k1); // k1 已经没用了,所以装 k1 = k2 + k3
vct6_add_local(temp, k1);
vct6_dot_a_v(dt / 6.0, temp);
vct6_add_local(Y0, temp);
// Y0 += (dt / 6) * (k1 + 2 * k2 + 2 * k3 + k4);
}
}
// 写回 particle
if(tid == 0){
vct6_copy(Y0 ,particle); // 写 Y0
particle[DISTANCE] = *distance;
}
__syncthreads();
}
// 上函数的 global 版本
__global__ void track_for_magnet_with_single_qs_g(FLOAT *distance, FLOAT *footstep,
FLOAT *kls, FLOAT* p0s, int *current_element_number,
FLOAT *qs_data, FLOAT *particle)
{
int tid = threadIdx.x;
FLOAT t0 = 0.0; // 开始时间为 0
FLOAT t_end = (*distance) / particle[SPEED]; // 用时 = 距离/速率
#ifdef FLOAT32
int number = (int)(ceilf( (*distance) / (*footstep) ));
#else
int number = (int)(ceil( (*distance) / (*footstep)));
#endif
FLOAT dt = (t_end - t0) / ((FLOAT)(number));
FLOAT k = particle[E] / particle[RM]; // k: float = particle.e / particle.relativistic_mass
__shared__ FLOAT Y0[DIM*2]; // Y0 即是 [P, v] 粒子位置、粒子速度,就是 particle 前两项
__shared__ FLOAT k1[DIM*2];
__shared__ FLOAT k2[DIM*2];
__shared__ FLOAT k3[DIM*2];
__shared__ FLOAT k4[DIM*2];
__shared__ FLOAT temp[DIM*2];
if(tid == 0){
vct6_copy(particle, Y0); // 写 Y0
}
for(int ignore = 0; ignore < number; ignore++){
__syncthreads(); // 循环前同步
callback_for_runge_kutta4_for_magnet_with_single_qs(t0, Y0, k1, k, kls, p0s, *current_element_number, qs_data); // 已同步
if(tid == 0){
vct6_dot_a_v_ret(dt / 2., k1, temp); // temp = dt / 2 * k1
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_single_qs(t0 + dt / 2., temp, k2, k, kls, p0s, *current_element_number, qs_data);
if(tid == 0){
vct6_dot_a_v_ret(dt / 2., k2, temp); // temp = dt / 2 * k2
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_single_qs(t0 + dt / 2., temp, k3, k, kls, p0s, *current_element_number, qs_data);
if(tid == 0){
vct6_dot_a_v_ret(dt, k3, temp); // temp = dt * k3
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_single_qs(t0 + dt, temp, k4, k, kls, p0s, *current_element_number, qs_data);
t0 += dt;
if(tid == 0){
vct6_add(k1, k4, temp); // temp = k1 + k4
vct6_dot_a_v(2.0, k2);
vct6_dot_a_v(2.0, k3);
vct6_add(k2, k3, k1); // k1 已经没用了,所以装 k1 = k2 + k3
vct6_add_local(temp, k1);
vct6_dot_a_v(dt / 6.0, temp);
vct6_add_local(Y0, temp);
// Y0 += (dt / 6) * (k1 + 2 * k2 + 2 * k3 + k4);
}
}
// 写回 particle
if(tid == 0){
vct6_copy(Y0 ,particle); // 写 Y0
particle[DISTANCE] = *distance;
}
__syncthreads();
}
// ------------------------- 多 qs 版本------------------------------------------ //
// 单个粒子跟踪,多 qs 版本
/*__global__*/ __device__ void track_for_magnet_with_multi_qs(FLOAT *distance, FLOAT *footstep,
FLOAT *kls, FLOAT* p0s, int *current_element_number,
FLOAT *qs_datas, int *qs_number, FLOAT *particle)
{
int tid = threadIdx.x;
FLOAT t0 = 0.0; // 开始时间为 0
FLOAT t_end = (*distance) / particle[SPEED]; // 用时 = 距离/速率
#ifdef FLOAT32
int number = (int)(ceilf( (*distance) / (*footstep) ));
#else
int number = (int)(ceil( (*distance) / (*footstep)));
#endif
FLOAT dt = (t_end - t0) / ((FLOAT)(number));
FLOAT k = particle[E] / particle[RM]; // k: float = particle.e / particle.relativistic_mass
__shared__ FLOAT Y0[DIM*2]; // Y0 即是 [P, v] 粒子位置、粒子速度,就是 particle 前两项
__shared__ FLOAT k1[DIM*2];
__shared__ FLOAT k2[DIM*2];
__shared__ FLOAT k3[DIM*2];
__shared__ FLOAT k4[DIM*2];
__shared__ FLOAT temp[DIM*2];
if(tid == 0){
vct6_copy(particle, Y0); // 写 Y0
}
for(int ignore = 0; ignore < number; ignore++){
__syncthreads(); // 循环前同步
callback_for_runge_kutta4_for_magnet_with_multi_qs(t0, Y0, k1, k, kls, p0s, *current_element_number, qs_datas, *qs_number); // 已同步
if(tid == 0){
vct6_dot_a_v_ret(dt / 2., k1, temp); // temp = dt / 2 * k1
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_multi_qs(t0 + dt / 2., temp, k2, k, kls, p0s, *current_element_number, qs_datas, *qs_number);
if(tid == 0){
vct6_dot_a_v_ret(dt / 2., k2, temp); // temp = dt / 2 * k2
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_multi_qs(t0 + dt / 2., temp, k3, k, kls, p0s, *current_element_number, qs_datas, *qs_number);
if(tid == 0){
vct6_dot_a_v_ret(dt, k3, temp); // temp = dt * k3
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_multi_qs(t0 + dt, temp, k4, k, kls, p0s, *current_element_number, qs_datas, *qs_number);
t0 += dt;
if(tid == 0){
vct6_add(k1, k4, temp); // temp = k1 + k4
vct6_dot_a_v(2.0, k2);
vct6_dot_a_v(2.0, k3);
vct6_add(k2, k3, k1); // k1 已经没用了,所以装 k1 = k2 + k3
vct6_add_local(temp, k1);
vct6_dot_a_v(dt / 6.0, temp);
vct6_add_local(Y0, temp);
// Y0 += (dt / 6) * (k1 + 2 * k2 + 2 * k3 + k4);
}
}
// 写回 particle
if(tid == 0){
vct6_copy(Y0 ,particle); // 写 Y0
particle[DISTANCE] = *distance;
}
__syncthreads();
}
// 上函数的 global 版本,多 qs 版本
__global__ void track_for_magnet_with_multi_qs_g(FLOAT *distance, FLOAT *footstep,
FLOAT *kls, FLOAT* p0s, int *current_element_number,
FLOAT *qs_datas, int *qs_number, FLOAT *particle)
{
int tid = threadIdx.x;
FLOAT t0 = 0.0; // 开始时间为 0
FLOAT t_end = (*distance) / particle[SPEED]; // 用时 = 距离/速率
#ifdef FLOAT32
int number = (int)(ceilf( (*distance) / (*footstep) ));
#else
int number = (int)(ceil( (*distance) / (*footstep)));
#endif
FLOAT dt = (t_end - t0) / ((FLOAT)(number));
FLOAT k = particle[E] / particle[RM]; // k: float = particle.e / particle.relativistic_mass
__shared__ FLOAT Y0[DIM*2]; // Y0 即是 [P, v] 粒子位置、粒子速度,就是 particle 前两项
__shared__ FLOAT k1[DIM*2];
__shared__ FLOAT k2[DIM*2];
__shared__ FLOAT k3[DIM*2];
__shared__ FLOAT k4[DIM*2];
__shared__ FLOAT temp[DIM*2];
if(tid == 0){
vct6_copy(particle, Y0); // 写 Y0
}
for(int ignore = 0; ignore < number; ignore++){
__syncthreads(); // 循环前同步
callback_for_runge_kutta4_for_magnet_with_multi_qs(t0, Y0, k1, k, kls, p0s, *current_element_number, qs_datas, *qs_number); // 已同步
if(tid == 0){
vct6_dot_a_v_ret(dt / 2., k1, temp); // temp = dt / 2 * k1
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_multi_qs(t0 + dt / 2., temp, k2, k, kls, p0s, *current_element_number, qs_datas, *qs_number);
if(tid == 0){
vct6_dot_a_v_ret(dt / 2., k2, temp); // temp = dt / 2 * k2
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_multi_qs(t0 + dt / 2., temp, k3, k, kls, p0s, *current_element_number, qs_datas, *qs_number);
if(tid == 0){
vct6_dot_a_v_ret(dt, k3, temp); // temp = dt * k3
vct6_add_local(temp, Y0); // temp = Y0 + temp
}
__syncthreads();
callback_for_runge_kutta4_for_magnet_with_multi_qs(t0 + dt, temp, k4, k, kls, p0s, *current_element_number, qs_datas, *qs_number);
t0 += dt;
if(tid == 0){
vct6_add(k1, k4, temp); // temp = k1 + k4
vct6_dot_a_v(2.0, k2);
vct6_dot_a_v(2.0, k3);
vct6_add(k2, k3, k1); // k1 已经没用了,所以装 k1 = k2 + k3
vct6_add_local(temp, k1);
vct6_dot_a_v(dt / 6.0, temp);
vct6_add_local(Y0, temp);
// Y0 += (dt / 6) * (k1 + 2 * k2 + 2 * k3 + k4);
}
}
// 写回 particle
if(tid == 0){
vct6_copy(Y0 ,particle); // 写 Y0
particle[DISTANCE] = *distance;
}
__syncthreads();
}
"""
cuda_code_09_run_multi_particle = """
// 多粒子跟踪,串行
__device__ void track_multi_particle_for_magnet_with_single_qs(FLOAT *distance, FLOAT *footstep,
FLOAT *kls, FLOAT *p0s, int *current_element_number,
FLOAT *qs_data, FLOAT *particle, int *particle_number)
{
for(int i = 0; i< (*particle_number);i++){
track_for_magnet_with_single_qs(distance, footstep, kls, p0s,
current_element_number, qs_data, particle + i * PARTICLE_DIM);
}
}
__global__ void track_multi_particle_for_magnet_with_single_qs_g(FLOAT *distance, FLOAT *footstep,
FLOAT *kls, FLOAT *p0s, int *current_element_number,
FLOAT *qs_data, FLOAT *particle, int *particle_number)
{
for(int i = 0; i< (*particle_number);i++){
track_for_magnet_with_single_qs(distance, footstep, kls, p0s,
current_element_number, qs_data, particle + i * PARTICLE_DIM);
}
}
// 多 qs 版本
__device__ void track_multi_particle_for_magnet_with_multi_qs(FLOAT | |
\
as JSONSchemaValidatorAba4991D4E9B8747_v2_1_2
from .validators.v2_1_2.jsd_aeb4dad04a99bbe3 \
import JSONSchemaValidatorAeb4Dad04A99Bbe3 \
as JSONSchemaValidatorAeb4Dad04A99Bbe3_v2_1_2
from .validators.v2_1_2.jsd_aeb9eb67460b92df \
import JSONSchemaValidatorAeb9Eb67460B92Df \
as JSONSchemaValidatorAeb9Eb67460B92Df_v2_1_2
from .validators.v2_1_2.jsd_af8d7b0e470b8ae2 \
import JSONSchemaValidatorAf8D7B0E470B8Ae2 \
as JSONSchemaValidatorAf8D7B0E470B8Ae2_v2_1_2
from .validators.v2_1_2.jsd_b0b7eabc4f4b9b28 \
import JSONSchemaValidatorB0B7Eabc4F4B9B28 \
as JSONSchemaValidatorB0B7Eabc4F4B9B28_v2_1_2
from .validators.v2_1_2.jsd_b199685d4d089a67 \
import JSONSchemaValidatorB199685D4D089A67 \
as JSONSchemaValidatorB199685D4D089A67_v2_1_2
from .validators.v2_1_2.jsd_b2b8cb91459aa58f \
import JSONSchemaValidatorB2B8Cb91459AA58F \
as JSONSchemaValidatorB2B8Cb91459AA58F_v2_1_2
from .validators.v2_1_2.jsd_b3a1c8804c8b9b8b \
import JSONSchemaValidatorB3A1C8804C8B9B8B \
as JSONSchemaValidatorB3A1C8804C8B9B8B_v2_1_2
from .validators.v2_1_2.jsd_b68a6bd8473a9a25 \
import JSONSchemaValidatorB68A6Bd8473A9A25 \
as JSONSchemaValidatorB68A6Bd8473A9A25_v2_1_2
from .validators.v2_1_2.jsd_b78329674878b815 \
import JSONSchemaValidatorB78329674878B815 \
as JSONSchemaValidatorB78329674878B815_v2_1_2
from .validators.v2_1_2.jsd_b7bcaa084e2b90d0 \
import JSONSchemaValidatorB7BcAa084E2B90D0 \
as JSONSchemaValidatorB7BcAa084E2B90D0_v2_1_2
from .validators.v2_1_2.jsd_b888792d43baba46 \
import JSONSchemaValidatorB888792D43BaBa46 \
as JSONSchemaValidatorB888792D43BaBa46_v2_1_2
from .validators.v2_1_2.jsd_b9855ad54ae98156 \
import JSONSchemaValidatorB9855Ad54Ae98156 \
as JSONSchemaValidatorB9855Ad54Ae98156_v2_1_2
from .validators.v2_1_2.jsd_b9b48ac8463a8aba \
import JSONSchemaValidatorB9B48Ac8463A8Aba \
as JSONSchemaValidatorB9B48Ac8463A8Aba_v2_1_2
from .validators.v2_1_2.jsd_ba9dc85b4b8a9a17 \
import JSONSchemaValidatorBa9DC85B4B8A9A17 \
as JSONSchemaValidatorBa9DC85B4B8A9A17_v2_1_2
from .validators.v2_1_2.jsd_bab6c9e5440885cc \
import JSONSchemaValidatorBab6C9E5440885Cc \
as JSONSchemaValidatorBab6C9E5440885Cc_v2_1_2
from .validators.v2_1_2.jsd_bc8aab4746ca883d \
import JSONSchemaValidatorBc8AAb4746Ca883D \
as JSONSchemaValidatorBc8AAb4746Ca883D_v2_1_2
from .validators.v2_1_2.jsd_bca339d844c8a3c0 \
import JSONSchemaValidatorBca339D844C8A3C0 \
as JSONSchemaValidatorBca339D844C8A3C0_v2_1_2
from .validators.v2_1_2.jsd_be892bd84a78865a \
import JSONSchemaValidatorBe892Bd84A78865A \
as JSONSchemaValidatorBe892Bd84A78865A_v2_1_2
from .validators.v2_1_2.jsd_bead7b3443b996a7 \
import JSONSchemaValidatorBead7B3443B996A7 \
as JSONSchemaValidatorBead7B3443B996A7_v2_1_2
from .validators.v2_1_2.jsd_bf859ac64a0ba19c \
import JSONSchemaValidatorBf859Ac64A0BA19C \
as JSONSchemaValidatorBf859Ac64A0BA19C_v2_1_2
from .validators.v2_1_2.jsd_c085eaf54f89ba34 \
import JSONSchemaValidatorC085Eaf54F89Ba34 \
as JSONSchemaValidatorC085Eaf54F89Ba34_v2_1_2
from .validators.v2_1_2.jsd_c0bca85643c8b58d \
import JSONSchemaValidatorC0BcA85643C8B58D \
as JSONSchemaValidatorC0BcA85643C8B58D_v2_1_2
from .validators.v2_1_2.jsd_c1a359b14c89b573 \
import JSONSchemaValidatorC1A359B14C89B573 \
as JSONSchemaValidatorC1A359B14C89B573_v2_1_2
from .validators.v2_1_2.jsd_c1ba9a424c08a01b \
import JSONSchemaValidatorC1Ba9A424C08A01B \
as JSONSchemaValidatorC1Ba9A424C08A01B_v2_1_2
from .validators.v2_1_2.jsd_c2a43ad24098baa7 \
import JSONSchemaValidatorC2A43Ad24098Baa7 \
as JSONSchemaValidatorC2A43Ad24098Baa7_v2_1_2
from .validators.v2_1_2.jsd_c2b5fb764d888375 \
import JSONSchemaValidatorC2B5Fb764D888375 \
as JSONSchemaValidatorC2B5Fb764D888375_v2_1_2
from .validators.v2_1_2.jsd_c3b3c9ef4e6b8a09 \
import JSONSchemaValidatorC3B3C9Ef4E6B8A09 \
as JSONSchemaValidatorC3B3C9Ef4E6B8A09_v2_1_2
from .validators.v2_1_2.jsd_c5acd9fa4c1a8abc \
import JSONSchemaValidatorC5AcD9Fa4C1A8Abc \
as JSONSchemaValidatorC5AcD9Fa4C1A8Abc_v2_1_2
from .validators.v2_1_2.jsd_c78c9ad245bb9657 \
import JSONSchemaValidatorC78C9Ad245Bb9657 \
as JSONSchemaValidatorC78C9Ad245Bb9657_v2_1_2
from .validators.v2_1_2.jsd_c7a6592b4b98a369 \
import JSONSchemaValidatorC7A6592B4B98A369 \
as JSONSchemaValidatorC7A6592B4B98A369_v2_1_2
from .validators.v2_1_2.jsd_c8bf6b65414a9bc7 \
import JSONSchemaValidatorC8Bf6B65414A9Bc7 \
as JSONSchemaValidatorC8Bf6B65414A9Bc7_v2_1_2
from .validators.v2_1_2.jsd_c9809b6744f8a502 \
import JSONSchemaValidatorC9809B6744F8A502 \
as JSONSchemaValidatorC9809B6744F8A502_v2_1_2
from .validators.v2_1_2.jsd_ca91da84401abba1 \
import JSONSchemaValidatorCa91Da84401ABba1 \
as JSONSchemaValidatorCa91Da84401ABba1_v2_1_2
from .validators.v2_1_2.jsd_caa3ea704d78b37e \
import JSONSchemaValidatorCaa3Ea704D78B37E \
as JSONSchemaValidatorCaa3Ea704D78B37E_v2_1_2
from .validators.v2_1_2.jsd_cb81b93540baaab0 \
import JSONSchemaValidatorCb81B93540BaAab0 \
as JSONSchemaValidatorCb81B93540BaAab0_v2_1_2
from .validators.v2_1_2.jsd_cb868b2142898159 \
import JSONSchemaValidatorCb868B2142898159 \
as JSONSchemaValidatorCb868B2142898159_v2_1_2
from .validators.v2_1_2.jsd_cba5b8b14edb81f4 \
import JSONSchemaValidatorCba5B8B14Edb81F4 \
as JSONSchemaValidatorCba5B8B14Edb81F4_v2_1_2
from .validators.v2_1_2.jsd_cca519ba45ebb423 \
import JSONSchemaValidatorCca519Ba45EbB423 \
as JSONSchemaValidatorCca519Ba45EbB423_v2_1_2
from .validators.v2_1_2.jsd_cd8469e647caab0e \
import JSONSchemaValidatorCd8469E647CaAb0E \
as JSONSchemaValidatorCd8469E647CaAb0E_v2_1_2
from .validators.v2_1_2.jsd_cd98780f4888a66d \
import JSONSchemaValidatorCd98780F4888A66D \
as JSONSchemaValidatorCd98780F4888A66D_v2_1_2
from .validators.v2_1_2.jsd_cdab9b474899ae06 \
import JSONSchemaValidatorCdab9B474899Ae06 \
as JSONSchemaValidatorCdab9B474899Ae06_v2_1_2
from .validators.v2_1_2.jsd_cf9418234d9ab37e \
import JSONSchemaValidatorCf9418234D9AB37E \
as JSONSchemaValidatorCf9418234D9AB37E_v2_1_2
from .validators.v2_1_2.jsd_cfa049a644bb8a07 \
import JSONSchemaValidatorCfa049A644Bb8A07 \
as JSONSchemaValidatorCfa049A644Bb8A07_v2_1_2
from .validators.v2_1_2.jsd_cfbd3870405aad55 \
import JSONSchemaValidatorCfbd3870405AAd55 \
as JSONSchemaValidatorCfbd3870405AAd55_v2_1_2
from .validators.v2_1_2.jsd_d09b08a3447aa3b9 \
import JSONSchemaValidatorD09B08A3447AA3B9 \
as JSONSchemaValidatorD09B08A3447AA3B9_v2_1_2
from .validators.v2_1_2.jsd_d0a1abfa435b841d \
import JSONSchemaValidatorD0A1Abfa435B841D \
as JSONSchemaValidatorD0A1Abfa435B841D_v2_1_2
from .validators.v2_1_2.jsd_d0a4b88145aabb51 \
import JSONSchemaValidatorD0A4B88145AaBb51 \
as JSONSchemaValidatorD0A4B88145AaBb51_v2_1_2
from .validators.v2_1_2.jsd_d0aafa694f4b9d7b \
import JSONSchemaValidatorD0AaFa694F4B9D7B \
as JSONSchemaValidatorD0AaFa694F4B9D7B_v2_1_2
from .validators.v2_1_2.jsd_d2b4d9d04a4b884c \
import JSONSchemaValidatorD2B4D9D04A4B884C \
as JSONSchemaValidatorD2B4D9D04A4B884C_v2_1_2
from .validators.v2_1_2.jsd_d49af9b84c6aa8ea \
import JSONSchemaValidatorD49AF9B84C6AA8Ea \
as JSONSchemaValidatorD49AF9B84C6AA8Ea_v2_1_2
from .validators.v2_1_2.jsd_d6b8ca774739adf4 \
import JSONSchemaValidatorD6B8Ca774739Adf4 \
as JSONSchemaValidatorD6B8Ca774739Adf4_v2_1_2
from .validators.v2_1_2.jsd_d7a6392845e8969d \
import JSONSchemaValidatorD7A6392845E8969D \
as JSONSchemaValidatorD7A6392845E8969D_v2_1_2
from .validators.v2_1_2.jsd_d888ab6d4d59a8c1 \
import JSONSchemaValidatorD888Ab6D4D59A8C1 \
as JSONSchemaValidatorD888Ab6D4D59A8C1_v2_1_2
from .validators.v2_1_2.jsd_d8a619974a8a8c48 \
import JSONSchemaValidatorD8A619974A8A8C48 \
as JSONSchemaValidatorD8A619974A8A8C48_v2_1_2
from .validators.v2_1_2.jsd_d9a1fa9c4068b23c \
import JSONSchemaValidatorD9A1Fa9C4068B23C \
as JSONSchemaValidatorD9A1Fa9C4068B23C_v2_1_2
from .validators.v2_1_2.jsd_db8e09234a988bab \
import JSONSchemaValidatorDb8E09234A988Bab \
as JSONSchemaValidatorDb8E09234A988Bab_v2_1_2
from .validators.v2_1_2.jsd_dcaa6bde4feb9152 \
import JSONSchemaValidatorDcaa6Bde4Feb9152 \
as JSONSchemaValidatorDcaa6Bde4Feb9152_v2_1_2
from .validators.v2_1_2.jsd_dd85c91042489a3f \
import JSONSchemaValidatorDd85C91042489A3F \
as JSONSchemaValidatorDd85C91042489A3F_v2_1_2
from .validators.v2_1_2.jsd_e0b5599b4f2997b7 \
import JSONSchemaValidatorE0B5599B4F2997B7 \
as JSONSchemaValidatorE0B5599B4F2997B7_v2_1_2
from .validators.v2_1_2.jsd_e2adba7943bab3e9 \
import JSONSchemaValidatorE2AdBa7943BaB3E9 \
as JSONSchemaValidatorE2AdBa7943BaB3E9_v2_1_2
from .validators.v2_1_2.jsd_e39588a5494982c4 \
import JSONSchemaValidatorE39588A5494982C4 \
as JSONSchemaValidatorE39588A5494982C4_v2_1_2
from .validators.v2_1_2.jsd_e487f8d3481b94f2 \
import JSONSchemaValidatorE487F8D3481B94F2 \
as JSONSchemaValidatorE487F8D3481B94F2_v2_1_2
from .validators.v2_1_2.jsd_e6b3db8046c99654 \
import JSONSchemaValidatorE6B3Db8046C99654 \
as JSONSchemaValidatorE6B3Db8046C99654_v2_1_2
from .validators.v2_1_2.jsd_e78bb8a2449b9eed \
import JSONSchemaValidatorE78BB8A2449B9Eed \
as JSONSchemaValidatorE78BB8A2449B9Eed_v2_1_2
from .validators.v2_1_2.jsd_e9b99b2248c88014 \
import JSONSchemaValidatorE9B99B2248C88014 \
as JSONSchemaValidatorE9B99B2248C88014_v2_1_2
from .validators.v2_1_2.jsd_eab7abe048fb99ad \
import JSONSchemaValidatorEab7Abe048Fb99Ad \
as JSONSchemaValidatorEab7Abe048Fb99Ad_v2_1_2
from .validators.v2_1_2.jsd_eb8249e34f69b0f1 \
import JSONSchemaValidatorEb8249E34F69B0F1 \
as JSONSchemaValidatorEb8249E34F69B0F1_v2_1_2
from .validators.v2_1_2.jsd_eb8c2a8345aa871f \
import JSONSchemaValidatorEb8C2A8345Aa871F \
as JSONSchemaValidatorEb8C2A8345Aa871F_v2_1_2
from .validators.v2_1_2.jsd_eba669054e08a60e \
import JSONSchemaValidatorEba669054E08A60E \
as JSONSchemaValidatorEba669054E08A60E_v2_1_2
from .validators.v2_1_2.jsd_ee9aab01487a8896 \
import JSONSchemaValidatorEe9AAb01487A8896 \
as JSONSchemaValidatorEe9AAb01487A8896_v2_1_2
from .validators.v2_1_2.jsd_eeb168eb41988e07 \
import JSONSchemaValidatorEeb168Eb41988E07 \
as JSONSchemaValidatorEeb168Eb41988E07_v2_1_2
from .validators.v2_1_2.jsd_eeb7eb4b4bd8a1dd \
import JSONSchemaValidatorEeb7Eb4B4Bd8A1Dd \
as JSONSchemaValidatorEeb7Eb4B4Bd8A1Dd_v2_1_2
from .validators.v2_1_2.jsd_f083cb13484a8fae \
import JSONSchemaValidatorF083Cb13484A8Fae \
as JSONSchemaValidatorF083Cb13484A8Fae_v2_1_2
from .validators.v2_1_2.jsd_f09319674049a7d4 \
import JSONSchemaValidatorF09319674049A7D4 \
as JSONSchemaValidatorF09319674049A7D4_v2_1_2
from .validators.v2_1_2.jsd_f393abe84989bb48 \
import JSONSchemaValidatorF393Abe84989Bb48 \
as JSONSchemaValidatorF393Abe84989Bb48_v2_1_2
from .validators.v2_1_2.jsd_f3b26b5544cabab9 \
import JSONSchemaValidatorF3B26B5544CaBab9 \
as JSONSchemaValidatorF3B26B5544CaBab9_v2_1_2
from .validators.v2_1_2.jsd_f49548c54be8a3e2 \
import JSONSchemaValidatorF49548C54Be8A3E2 \
as JSONSchemaValidatorF49548C54Be8A3E2_v2_1_2
from .validators.v2_1_2.jsd_f5947a4c439a8bf0 \
import JSONSchemaValidatorF5947A4C439A8Bf0 \
as JSONSchemaValidatorF5947A4C439A8Bf0_v2_1_2
from .validators.v2_1_2.jsd_f5a13ab24c5aaa91 \
import JSONSchemaValidatorF5A13Ab24C5AAa91 \
as JSONSchemaValidatorF5A13Ab24C5AAa91_v2_1_2
from .validators.v2_1_2.jsd_f5a269c44f2a95fa \
import JSONSchemaValidatorF5A269C44F2A95Fa \
as JSONSchemaValidatorF5A269C44F2A95Fa_v2_1_2
from .validators.v2_1_2.jsd_f5ac590c4ca9975a \
import JSONSchemaValidatorF5Ac590C4Ca9975A \
as JSONSchemaValidatorF5Ac590C4Ca9975A_v2_1_2
from .validators.v2_1_2.jsd_f6826a8e41bba242 \
import JSONSchemaValidatorF6826A8E41BbA242 \
as JSONSchemaValidatorF6826A8E41BbA242_v2_1_2
from .validators.v2_1_2.jsd_f6ac994f451ba011 \
import JSONSchemaValidatorF6Ac994F451BA011 \
as JSONSchemaValidatorF6Ac994F451BA011_v2_1_2
from .validators.v2_1_2.jsd_f6b119ad4d4aaf16 \
import JSONSchemaValidatorF6B119Ad4D4AAf16 \
as JSONSchemaValidatorF6B119Ad4D4AAf16_v2_1_2
from .validators.v2_1_2.jsd_f6bd6bf64e6890be \
import JSONSchemaValidatorF6Bd6Bf64E6890Be \
as JSONSchemaValidatorF6Bd6Bf64E6890Be_v2_1_2
from .validators.v2_1_2.jsd_f6bfc880435aae2a \
import JSONSchemaValidatorF6BfC880435AAe2A \
as JSONSchemaValidatorF6BfC880435AAe2A_v2_1_2
from .validators.v2_1_2.jsd_f793192a43dabed9 \
import JSONSchemaValidatorF793192A43DaBed9 \
as JSONSchemaValidatorF793192A43DaBed9_v2_1_2
from .validators.v2_1_2.jsd_f9bd99c74bba8832 \
import JSONSchemaValidatorF9Bd99C74Bba8832 \
as JSONSchemaValidatorF9Bd99C74Bba8832_v2_1_2
from .validators.v2_1_2.jsd_fa9219bf45c8b43b \
import JSONSchemaValidatorFa9219Bf45C8B43B \
as JSONSchemaValidatorFa9219Bf45C8B43B_v2_1_2
from .validators.v2_1_2.jsd_fa9a98174129af50 \
import JSONSchemaValidatorFa9A98174129Af50 \
as JSONSchemaValidatorFa9A98174129Af50_v2_1_2
from .validators.v2_1_2.jsd_fb9beb664f2aba4c \
import JSONSchemaValidatorFb9BEb664F2ABa4C \
as JSONSchemaValidatorFb9BEb664F2ABa4C_v2_1_2
from .validators.v2_1_2.jsd_fb9bf80f491a9851 \
import JSONSchemaValidatorFb9BF80F491A9851 \
as JSONSchemaValidatorFb9BF80F491A9851_v2_1_2
from .validators.v2_1_2.jsd_fba0d80747eb82e8 \
import JSONSchemaValidatorFba0D80747Eb82E8 \
as JSONSchemaValidatorFba0D80747Eb82E8_v2_1_2
from .validators.v2_1_2.jsd_fbb95b37484a9fce \
import JSONSchemaValidatorFbb95B37484A9Fce \
as JSONSchemaValidatorFbb95B37484A9Fce_v2_1_2
from .validators.v2_1_2.jsd_fc9538fe43d9884d \
import JSONSchemaValidatorFc9538Fe43D9884D \
as JSONSchemaValidatorFc9538Fe43D9884D_v2_1_2
from .validators.v2_1_2.jsd_ff816b8e435897eb \
import JSONSchemaValidatorFf816B8E435897Eb \
as JSONSchemaValidatorFf816B8E435897Eb_v2_1_2
from .validators.v2_1_2.jsd_ffa748cc44e9a437 \
import JSONSchemaValidatorFfa748Cc44E9A437 \
as JSONSchemaValidatorFfa748Cc44E9A437_v2_1_2
from .validators.v2_2_1.jsd_e01233fa258e393239c4b41882806 \
import JSONSchemaValidatorE01233Fa258E393239C4B41882806 \
as JSONSchemaValidatorE01233Fa258E393239C4B41882806_v2_2_1
from .validators.v2_2_1.jsd_aa1e5957ac977603b5cef72f9f \
import JSONSchemaValidatorAa1E5957Ac977603B5Cef72F9F \
as JSONSchemaValidatorAa1E5957Ac977603B5Cef72F9F_v2_2_1
from .validators.v2_2_1.jsd_bdc3bc8a35908aba5858e78805d22 \
import JSONSchemaValidatorBdc3BC8A35908Aba5858E78805D22 \
as JSONSchemaValidatorBdc3BC8A35908Aba5858E78805D22_v2_2_1
from .validators.v2_2_1.jsd_f2f039811951c0af53e3381ae91225 \
import JSONSchemaValidatorF2F039811951C0Af53E3381Ae91225 \
as JSONSchemaValidatorF2F039811951C0Af53E3381Ae91225_v2_2_1
from .validators.v2_2_1.jsd_f73101d5d5e409f571084ab4c6049 \
import JSONSchemaValidatorF73101D5D5E409F571084Ab4C6049 \
as JSONSchemaValidatorF73101D5D5E409F571084Ab4C6049_v2_2_1
from .validators.v2_2_1.jsd_e22c99a82f5764828810acb45e7a9e \
import JSONSchemaValidatorE22C99A82F5764828810Acb45E7A9E \
as JSONSchemaValidatorE22C99A82F5764828810Acb45E7A9E_v2_2_1
from .validators.v2_2_1.jsd_cb88b50dd5ead96ecfb4ab0390f47 \
import JSONSchemaValidatorCb88B50Dd5Ead96EcFb4Ab0390F47 \
as JSONSchemaValidatorCb88B50Dd5Ead96EcFb4Ab0390F47_v2_2_1
from .validators.v2_2_1.jsd_97e350a7a690cdfeffa5eaca \
import JSONSchemaValidator97E350A7A690Cdfeffa5Eaca \
as JSONSchemaValidator97E350A7A690Cdfeffa5Eaca_v2_2_1
from .validators.v2_2_1.jsd_fd6083b0c65d03b2d53f10b3ece59d \
import JSONSchemaValidatorFd6083B0C65D03B2D53F10B3Ece59D \
as JSONSchemaValidatorFd6083B0C65D03B2D53F10B3Ece59D_v2_2_1
from .validators.v2_2_1.jsd_a0a8d545698d1d59a9be90e51 \
import JSONSchemaValidatorA0A8D545698D1D59A9Be90E51 \
as JSONSchemaValidatorA0A8D545698D1D59A9Be90E51_v2_2_1
from .validators.v2_2_1.jsd_a20c25e0fa518bb186fd7747450ef6 \
import JSONSchemaValidatorA20C25E0Fa518BB186Fd7747450Ef6 \
as JSONSchemaValidatorA20C25E0Fa518BB186Fd7747450Ef6_v2_2_1
from .validators.v2_2_1.jsd_d89e1c3e150ef9faaff44fa483de5 \
import JSONSchemaValidatorD89E1C3E150Ef9FaaFf44Fa483De5 \
as JSONSchemaValidatorD89E1C3E150Ef9FaaFf44Fa483De5_v2_2_1
from .validators.v2_2_1.jsd_f790a930d452708353c374f5c0f90f \
import JSONSchemaValidatorF790A930D452708353C374F5C0F90F \
as JSONSchemaValidatorF790A930D452708353C374F5C0F90F_v2_2_1
from .validators.v2_2_1.jsd_a59a448c5c25f1e8246d6827e6e3215 \
import JSONSchemaValidatorA59A448C5C25F1E8246D6827E6E3215 \
as JSONSchemaValidatorA59A448C5C25F1E8246D6827E6E3215_v2_2_1
from .validators.v2_2_1.jsd_d23f3e54f8c59caac3ca905f7bf543a \
import JSONSchemaValidatorD23F3E54F8C59CaAc3CA905F7Bf543A \
as JSONSchemaValidatorD23F3E54F8C59CaAc3CA905F7Bf543A_v2_2_1
from .validators.v2_2_1.jsd_d999a1d36ee52babb6b619877dad734 \
import JSONSchemaValidatorD999A1D36Ee52BaBb6B619877Dad734 \
as JSONSchemaValidatorD999A1D36Ee52BaBb6B619877Dad734_v2_2_1
from .validators.v2_2_1.jsd_da44fbc3e415a99aac0bdd291e9a87a \
import JSONSchemaValidatorDa44Fbc3E415A99Aac0Bdd291E9A87A \
as JSONSchemaValidatorDa44Fbc3E415A99Aac0Bdd291E9A87A_v2_2_1
from .validators.v2_2_1.jsd_c7266d89581c9601b79b7304fda3 \
import JSONSchemaValidatorC7266D89581C9601B79B7304Fda3 \
as JSONSchemaValidatorC7266D89581C9601B79B7304Fda3_v2_2_1
from .validators.v2_2_1.jsd_e1a76c121857a085149e62e56caadd \
import JSONSchemaValidatorE1A76C121857A085149E62E56Caadd \
as JSONSchemaValidatorE1A76C121857A085149E62E56Caadd_v2_2_1
from .validators.v2_2_1.jsd_f5a13405ba69f3957b98db8663a \
import JSONSchemaValidatorF5A13405Ba69F3957B98Db8663A \
as JSONSchemaValidatorF5A13405Ba69F3957B98Db8663A_v2_2_1
from .validators.v2_2_1.jsd_ed48fc373506cb1688cff36c2cb0f \
import JSONSchemaValidatorEd48FC373506CB1688Cff36C2Cb0F \
as JSONSchemaValidatorEd48FC373506CB1688Cff36C2Cb0F_v2_2_1
from .validators.v2_2_1.jsd_e2202e5f7586e68778ed7772b1 \
import JSONSchemaValidatorE2202E5F7586E68778Ed7772B1 \
as JSONSchemaValidatorE2202E5F7586E68778Ed7772B1_v2_2_1
from .validators.v2_2_1.jsd_e3a724a35854758d65a83823c88435 \
import JSONSchemaValidatorE3A724A35854758D65A83823C88435 \
as JSONSchemaValidatorE3A724A35854758D65A83823C88435_v2_2_1
from .validators.v2_2_1.jsd_cb9f8ad5359b2b2cbc151ac3a842a \
import JSONSchemaValidatorCb9F8Ad5359B2B2CbC151Ac3A842A \
as JSONSchemaValidatorCb9F8Ad5359B2B2CbC151Ac3A842A_v2_2_1
from .validators.v2_2_1.jsd_b16bff74ae54ca88a02b34df169218 \
import JSONSchemaValidatorB16Bff74Ae54Ca88A02B34Df169218 \
as JSONSchemaValidatorB16Bff74Ae54Ca88A02B34Df169218_v2_2_1
from .validators.v2_2_1.jsd_ce6d91900556839c09184d8a11c04d \
import JSONSchemaValidatorCe6D91900556839C09184D8A11C04D \
as JSONSchemaValidatorCe6D91900556839C09184D8A11C04D_v2_2_1
from .validators.v2_2_1.jsd_f256e33af7501a8bdae2742ca9f6d6 \
import JSONSchemaValidatorF256E33Af7501A8BdaE2742Ca9F6D6 \
as JSONSchemaValidatorF256E33Af7501A8BdaE2742Ca9F6D6_v2_2_1
from .validators.v2_2_1.jsd_b85e4ce533d5ff49ddd3b2f9657cfa5 \
import JSONSchemaValidatorB85E4Ce533D5Ff49Ddd3B2F9657Cfa5 \
as JSONSchemaValidatorB85E4Ce533D5Ff49Ddd3B2F9657Cfa5_v2_2_1
from .validators.v2_2_1.jsd_bb187b0c0a55e7e8089ac78eb29d8a2 \
import JSONSchemaValidatorBb187B0C0A55E7E8089Ac78Eb29D8A2 \
as JSONSchemaValidatorBb187B0C0A55E7E8089Ac78Eb29D8A2_v2_2_1
from .validators.v2_2_1.jsd_d1845268faf55f98bc952872259f16f \
import JSONSchemaValidatorD1845268Faf55F98Bc952872259F16F \
as JSONSchemaValidatorD1845268Faf55F98Bc952872259F16F_v2_2_1
from .validators.v2_2_1.jsd_df400c60659589599f2a0e3e1171985 \
import JSONSchemaValidatorDf400C60659589599F2A0E3E1171985 \
as JSONSchemaValidatorDf400C60659589599F2A0E3E1171985_v2_2_1
from .validators.v2_2_1.jsd_ea24b22ce355a229b7fd067401ddf3a \
import JSONSchemaValidatorEa24B22Ce355A229B7FD067401Ddf3A \
as JSONSchemaValidatorEa24B22Ce355A229B7FD067401Ddf3A_v2_2_1
from .validators.v2_2_1.jsd_ee2008494d158e7bff7f106519a64c5 \
import JSONSchemaValidatorEe2008494D158E7Bff7F106519A64C5 \
as JSONSchemaValidatorEe2008494D158E7Bff7F106519A64C5_v2_2_1
from .validators.v2_2_1.jsd_f77386a48895fa59dcddcc7dd4addb5 \
import JSONSchemaValidatorF77386A48895Fa59DcdDcc7Dd4Addb5 \
as JSONSchemaValidatorF77386A48895Fa59DcdDcc7Dd4Addb5_v2_2_1
from .validators.v2_2_1.jsd_ffa347eb411567a9c793696795250a5 \
import JSONSchemaValidatorFfa347EB411567A9C793696795250A5 \
as JSONSchemaValidatorFfa347EB411567A9C793696795250A5_v2_2_1
from .validators.v2_2_1.jsd_ffcaccdd9f2530abf66adc98c3f0201 \
import JSONSchemaValidatorFfcaccdD9F2530ABf66Adc98C3F0201 \
as JSONSchemaValidatorFfcaccdD9F2530ABf66Adc98C3F0201_v2_2_1
from .validators.v2_2_1.jsd_fa310ab095148bdb00d7d3d5e1676 \
import JSONSchemaValidatorFa310Ab095148Bdb00D7D3D5E1676 \
as JSONSchemaValidatorFa310Ab095148Bdb00D7D3D5E1676_v2_2_1
from .validators.v2_2_1.jsd_a9136d5513985f15e91a19da66c \
import JSONSchemaValidatorA9136D5513985F15E91A19Da66C \
as JSONSchemaValidatorA9136D5513985F15E91A19Da66C_v2_2_1
from .validators.v2_2_1.jsd_d3d71136d95562afc211b40004d109 \
import JSONSchemaValidatorD3D71136D95562Afc211B40004D109 \
as JSONSchemaValidatorD3D71136D95562Afc211B40004D109_v2_2_1
from .validators.v2_2_1.jsd_c1cf6d5d5f0fa2e92539134b6c1d \
import JSONSchemaValidatorC1Cf6D5D5F0FA2E92539134B6C1D \
as JSONSchemaValidatorC1Cf6D5D5F0FA2E92539134B6C1D_v2_2_1
from .validators.v2_2_1.jsd_c141467ea25ec0aa91cbcaff070354 \
import JSONSchemaValidatorC141467Ea25Ec0Aa91Cbcaff070354 \
as JSONSchemaValidatorC141467Ea25Ec0Aa91Cbcaff070354_v2_2_1
from .validators.v2_2_1.jsd_c033291ec4591886bd6ed25f900c1b \
import JSONSchemaValidatorC033291Ec4591886Bd6Ed25F900C1B \
as JSONSchemaValidatorC033291Ec4591886Bd6Ed25F900C1B_v2_2_1
from .validators.v2_2_1.jsd_cfb1d6e52878d057740de275896 \
import JSONSchemaValidatorCfb1D6E52878D057740De275896 \
as JSONSchemaValidatorCfb1D6E52878D057740De275896_v2_2_1
from .validators.v2_2_1.jsd_d84253559e9d3e81881a4bd2fc \
import JSONSchemaValidatorD84253559E9D3E81881A4Bd2Fc \
as JSONSchemaValidatorD84253559E9D3E81881A4Bd2Fc_v2_2_1
from .validators.v2_2_1.jsd_bdc981805b5fad0a038966d52558 \
import JSONSchemaValidatorBdc981805B5FAd0A038966D52558 \
as JSONSchemaValidatorBdc981805B5FAd0A038966D52558_v2_2_1
from .validators.v2_2_1.jsd_bd26b08b64545bae20f60c56891576 \
import JSONSchemaValidatorBd26B08B64545BAe20F60C56891576 \
as JSONSchemaValidatorBd26B08B64545BAe20F60C56891576_v2_2_1
from .validators.v2_2_1.jsd_df9908ad265e83ab77d73803925678 \
import JSONSchemaValidatorDf9908Ad265E83Ab77D73803925678 \
as JSONSchemaValidatorDf9908Ad265E83Ab77D73803925678_v2_2_1
from .validators.v2_2_1.jsd_a3a1bf404bf5772828f66f1e10f074d \
import JSONSchemaValidatorA3A1Bf404Bf5772828F66F1E10F074D \
as JSONSchemaValidatorA3A1Bf404Bf5772828F66F1E10F074D_v2_2_1
from .validators.v2_2_1.jsd_b60f9f312235959812d49dc4c469e83 \
import JSONSchemaValidatorB60F9F312235959812D49Dc4C469E83 \
as JSONSchemaValidatorB60F9F312235959812D49Dc4C469E83_v2_2_1
from .validators.v2_2_1.jsd_bfde206eb445821a5722511f138814a \
import JSONSchemaValidatorBfde206Eb445821A5722511F138814A \
as JSONSchemaValidatorBfde206Eb445821A5722511F138814A_v2_2_1
from .validators.v2_2_1.jsd_e69d02d71905aecbd10b782469efbda \
import JSONSchemaValidatorE69D02D71905AecBd10B782469Efbda \
as JSONSchemaValidatorE69D02D71905AecBd10B782469Efbda_v2_2_1
from .validators.v2_2_1.jsd_e722e05046d5262b55c125237e9b67d \
import JSONSchemaValidatorE722E05046D5262B55C125237E9B67D \
as JSONSchemaValidatorE722E05046D5262B55C125237E9B67D_v2_2_1
from .validators.v2_2_1.jsd_af5f0aa1ed56ab9b98eb602dbd8366 \
import JSONSchemaValidatorAf5F0AA1Ed56Ab9B98Eb602Dbd8366 \
as JSONSchemaValidatorAf5F0AA1Ed56Ab9B98Eb602Dbd8366_v2_2_1
from .validators.v2_2_1.jsd_a2868ff45f5621965f6ece01a742ce \
import JSONSchemaValidatorA2868FF45F5621965F6Ece01A742Ce \
as JSONSchemaValidatorA2868FF45F5621965F6Ece01A742Ce_v2_2_1
from .validators.v2_2_1.jsd_d7d4e55d6bbb21c34ce863a131 \
import JSONSchemaValidatorD7D4E55D6BBb21C34Ce863A131 \
as JSONSchemaValidatorD7D4E55D6BBb21C34Ce863A131_v2_2_1
from .validators.v2_2_1.jsd_b1c03688485b44b1547c428a887c5d \
import JSONSchemaValidatorB1C03688485B44B1547C428A887C5D \
as JSONSchemaValidatorB1C03688485B44B1547C428A887C5D_v2_2_1
from .validators.v2_2_1.jsd_b7d6c62ea6522081fcf55de7eb9fd7 \
import JSONSchemaValidatorB7D6C62Ea6522081FcF55De7Eb9Fd7 \
as JSONSchemaValidatorB7D6C62Ea6522081FcF55De7Eb9Fd7_v2_2_1
from .validators.v2_2_1.jsd_d86f657f8592f97014d2ebf8d37ac \
import JSONSchemaValidatorD86F657F8592F97014D2Ebf8D37Ac \
as JSONSchemaValidatorD86F657F8592F97014D2Ebf8D37Ac_v2_2_1
from .validators.v2_2_1.jsd_e31c795964b3bdf85da1b5a2a5 \
import JSONSchemaValidatorE31C795964B3BdF85Da1B5A2A5 \
as JSONSchemaValidatorE31C795964B3BdF85Da1B5A2A5_v2_2_1
from .validators.v2_2_1.jsd_b3f79d3b45b98849d9180cc08018e \
import JSONSchemaValidatorB3F79D3B45B98849D9180Cc08018E \
as JSONSchemaValidatorB3F79D3B45B98849D9180Cc08018E_v2_2_1
from .validators.v2_2_1.jsd_af29516f0c8591da2a92523b5ab3386 \
import JSONSchemaValidatorAf29516F0C8591DA2A92523B5Ab3386 \
as JSONSchemaValidatorAf29516F0C8591DA2A92523B5Ab3386_v2_2_1
from .validators.v2_2_1.jsd_b21d2947d715c198f5e62ba3149839a \
import JSONSchemaValidatorB21D2947D715C198F5E62Ba3149839A \
as JSONSchemaValidatorB21D2947D715C198F5E62Ba3149839A_v2_2_1
from .validators.v2_2_1.jsd_ce4a30581da554591309dd423a91e7a \
import JSONSchemaValidatorCe4A30581Da554591309Dd423A91E7A \
as JSONSchemaValidatorCe4A30581Da554591309Dd423A91E7A_v2_2_1
from .validators.v2_2_1.jsd_d1944177c95598ebd1986582dc8069a \
import JSONSchemaValidatorD1944177C95598EBd1986582Dc8069A \
as JSONSchemaValidatorD1944177C95598EBd1986582Dc8069A_v2_2_1
from .validators.v2_2_1.jsd_dc0a72537a3578ca31cc5ef29131d35 \
import JSONSchemaValidatorDc0A72537A3578CA31CC5Ef29131D35 \
as JSONSchemaValidatorDc0A72537A3578CA31CC5Ef29131D35_v2_2_1
from .validators.v2_2_1.jsd_dc74c2052a3a4eb7e2a01eaa8e7 \
import JSONSchemaValidatorDc74C2052A3A4Eb7E2A01Eaa8E7 \
as JSONSchemaValidatorDc74C2052A3A4Eb7E2A01Eaa8E7_v2_2_1
from .validators.v2_2_1.jsd_d8cf995d9d99bdc31707817456 \
import JSONSchemaValidatorD8Cf995D9D99BdC31707817456 \
as JSONSchemaValidatorD8Cf995D9D99BdC31707817456_v2_2_1
from .validators.v2_2_1.jsd_d420225889bb16f99ec7ba099a \
import JSONSchemaValidatorD420225889Bb16F99Ec7Ba099A \
as JSONSchemaValidatorD420225889Bb16F99Ec7Ba099A_v2_2_1
from .validators.v2_2_1.jsd_b199c175281977a7e9e6bd9255b \
import JSONSchemaValidatorB199C175281977A7E9E6Bd9255B \
as JSONSchemaValidatorB199C175281977A7E9E6Bd9255B_v2_2_1
from .validators.v2_2_1.jsd_b70d8c6f85254a053ab281fd9e8fc \
import JSONSchemaValidatorB70D8C6F85254A053Ab281Fd9E8Fc \
as JSONSchemaValidatorB70D8C6F85254A053Ab281Fd9E8Fc_v2_2_1
from .validators.v2_2_1.jsd_eb4ab5a978fe8785516c8af42 \
import JSONSchemaValidatorEB4Ab5A978Fe8785516C8Af42 \
as JSONSchemaValidatorEB4Ab5A978Fe8785516C8Af42_v2_2_1
from .validators.v2_2_1.jsd_da8e5cdd435db0b1da1684be8f15b8 \
import JSONSchemaValidatorDa8E5CDd435Db0B1Da1684Be8F15B8 \
as JSONSchemaValidatorDa8E5CDd435Db0B1Da1684Be8F15B8_v2_2_1
from .validators.v2_2_1.jsd_fd269fe156e4b5ad3f4210b7b168 \
import JSONSchemaValidatorFd269Fe156E4B5Ad3F4210B7B168 \
as JSONSchemaValidatorFd269Fe156E4B5Ad3F4210B7B168_v2_2_1
from .validators.v2_2_1.jsd_fdd2af215b9b8327a3e24a3dea89 \
import JSONSchemaValidatorFdd2Af215B9B8327A3E24A3Dea89 \
as JSONSchemaValidatorFdd2Af215B9B8327A3E24A3Dea89_v2_2_1
from .validators.v2_2_1.jsd_eb1bf346225a4ba24f18408ffca7c9 \
import JSONSchemaValidatorEb1Bf346225A4BA24F18408Ffca7C9 \
as JSONSchemaValidatorEb1Bf346225A4BA24F18408Ffca7C9_v2_2_1
from .validators.v2_2_1.jsd_b7335c6b5057b183a339aa30e7c233 \
import JSONSchemaValidatorB7335C6B5057B183A339Aa30E7C233 \
as JSONSchemaValidatorB7335C6B5057B183A339Aa30E7C233_v2_2_1
from .validators.v2_2_1.jsd_d9ccfce8451809129ec5de42c5048 \
import JSONSchemaValidatorD9CcfCe8451809129Ec5De42C5048 \
as JSONSchemaValidatorD9CcfCe8451809129Ec5De42C5048_v2_2_1
from .validators.v2_2_1.jsd_cda740c5bdc92fd150c334d0e4e \
import JSONSchemaValidatorCda740C5Bdc92Fd150C334D0E4E \
as JSONSchemaValidatorCda740C5Bdc92Fd150C334D0E4E_v2_2_1
from .validators.v2_2_1.jsd_a1de7ff46fa5da09c5051c06ad07f2c \
import JSONSchemaValidatorA1De7Ff46Fa5Da09C5051C06Ad07F2C \
as JSONSchemaValidatorA1De7Ff46Fa5Da09C5051C06Ad07F2C_v2_2_1
from .validators.v2_2_1.jsd_b0753b63045528194f2f5bbf8ae432d \
import JSONSchemaValidatorB0753B63045528194F2F5Bbf8Ae432D \
as JSONSchemaValidatorB0753B63045528194F2F5Bbf8Ae432D_v2_2_1
from .validators.v2_2_1.jsd_d65f9b9d8ad5426bdf7e55461fcf761 \
import JSONSchemaValidatorD65F9B9D8Ad5426Bdf7E55461Fcf761 \
as JSONSchemaValidatorD65F9B9D8Ad5426Bdf7E55461Fcf761_v2_2_1
from .validators.v2_2_1.jsd_e4f91ea42515ccdbc24549b84ca1e90 \
import JSONSchemaValidatorE4F91Ea42515CcdBc24549B84Ca1E90 \
as JSONSchemaValidatorE4F91Ea42515CcdBc24549B84Ca1E90_v2_2_1
from .validators.v2_2_1.jsd_e6317a46c835f0881f08071959bb026 \
import JSONSchemaValidatorE6317A46C835F0881F08071959Bb026 \
as JSONSchemaValidatorE6317A46C835F0881F08071959Bb026_v2_2_1
from .validators.v2_2_1.jsd_f5d13316c8f53a0b78d881c738a15c6 \
import JSONSchemaValidatorF5D13316C8F53A0B78D881C738A15C6 \
as JSONSchemaValidatorF5D13316C8F53A0B78D881C738A15C6_v2_2_1
from .validators.v2_2_1.jsd_bbf7ce025bc2a291b90c37a6b898 \
import JSONSchemaValidatorBbf7Ce025Bc2A291B90C37A6B898 \
as JSONSchemaValidatorBbf7Ce025Bc2A291B90C37A6B898_v2_2_1
from .validators.v2_2_1.jsd_c1cb24a2b53ce8d29d119c6ee1112 \
import | |
that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == services.GetJobTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.JobTemplate)
assert response.name == "name_value"
def test_get_job_template_from_dict():
test_get_job_template(request_type=dict)
def test_get_job_template_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = TranscoderServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
client.get_job_template()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == services.GetJobTemplateRequest()
@pytest.mark.asyncio
async def test_get_job_template_async(
transport: str = "grpc_asyncio", request_type=services.GetJobTemplateRequest
):
client = TranscoderServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.JobTemplate(name="name_value",)
)
response = await client.get_job_template(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == services.GetJobTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.JobTemplate)
assert response.name == "name_value"
@pytest.mark.asyncio
async def test_get_job_template_async_from_dict():
await test_get_job_template_async(request_type=dict)
def test_get_job_template_field_headers():
client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = services.GetJobTemplateRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
call.return_value = resources.JobTemplate()
client.get_job_template(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_job_template_field_headers_async():
client = TranscoderServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = services.GetJobTemplateRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.JobTemplate()
)
await client.get_job_template(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_job_template_flattened():
client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.JobTemplate()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_job_template(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_job_template_flattened_error():
client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_job_template(
services.GetJobTemplateRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_job_template_flattened_async():
client = TranscoderServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.JobTemplate()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.JobTemplate()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_job_template(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_job_template_flattened_error_async():
client = TranscoderServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_job_template(
services.GetJobTemplateRequest(), name="name_value",
)
def test_delete_job_template(
transport: str = "grpc", request_type=services.DeleteJobTemplateRequest
):
client = TranscoderServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
response = client.delete_job_template(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == services.DeleteJobTemplateRequest()
# Establish that the response is the type that we expect.
assert response is None
def test_delete_job_template_from_dict():
test_delete_job_template(request_type=dict)
def test_delete_job_template_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = TranscoderServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_job_template), "__call__"
) as call:
client.delete_job_template()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == services.DeleteJobTemplateRequest()
@pytest.mark.asyncio
async def test_delete_job_template_async(
transport: str = "grpc_asyncio", request_type=services.DeleteJobTemplateRequest
):
client = TranscoderServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
response = await client.delete_job_template(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == services.DeleteJobTemplateRequest()
# Establish that the response is the type that we expect.
assert response is None
@pytest.mark.asyncio
async def test_delete_job_template_async_from_dict():
await test_delete_job_template_async(request_type=dict)
def test_delete_job_template_field_headers():
client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = services.DeleteJobTemplateRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_job_template), "__call__"
) as call:
call.return_value = None
client.delete_job_template(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_job_template_field_headers_async():
client = TranscoderServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = services.DeleteJobTemplateRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_job_template), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_job_template(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_job_template_flattened():
client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_job_template(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_job_template_flattened_error():
client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_job_template(
services.DeleteJobTemplateRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_job_template_flattened_async():
client = TranscoderServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# | |
"""Cabal packages"""
load("@bazel_skylib//lib:dicts.bzl", "dicts")
load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
load("@bazel_tools//tools/cpp:toolchain_utils.bzl", "find_cpp_toolchain")
load(":cc.bzl", "cc_interop_info")
load(":private/actions/info.bzl", "library_info_output_groups")
load(":private/context.bzl", "haskell_context", "render_env")
load(":private/dependencies.bzl", "gather_dep_info")
load(":private/expansions.bzl", "expand_make_variables")
load(":private/mode.bzl", "is_profiling_enabled")
load(
":private/path_utils.bzl",
"create_rpath_entry",
"join_path_list",
"relative_rpath_prefix",
"truly_relativize",
)
load(":private/set.bzl", "set")
load(":private/typing.bzl", "typecheck_stackage_extradeps")
load(":haddock.bzl", "generate_unified_haddock_info")
load(
":private/workspace_utils.bzl",
_execute_or_fail_loudly = "execute_or_fail_loudly",
)
load(
":providers.bzl",
"HaddockInfo",
"HaskellInfo",
"HaskellLibraryInfo",
"all_dependencies_package_ids",
)
load(
":private/cc_libraries.bzl",
"deps_HaskellCcLibrariesInfo",
"get_cc_libraries",
"get_ghci_library_files",
"get_library_files",
"haskell_cc_libraries_aspect",
)
def _so_extension(hs):
return "dylib" if hs.toolchain.is_darwin else "so"
def _dirname(file):
return file.dirname
def _version(name):
"""Return the version component of a package name."""
return name.rpartition("-")[2]
def _has_version(name):
"""Check whether a package identifier has a version component."""
return name.rpartition("-")[2].replace(".", "").isdigit()
def _chop_version(name):
"""Remove any version component from the given package name."""
return name.rpartition("-")[0]
def _find_cabal(hs, srcs):
"""Check that a .cabal file exists. Choose the root one."""
cabal = None
for f in srcs:
if f.extension == "cabal":
if not cabal or f.dirname < cabal.dirname:
cabal = f
if not cabal:
fail("A .cabal file was not found in the srcs attribute.")
return cabal
def _find_setup(hs, cabal, srcs):
"""Check that a Setup script exists. If not, create a default one."""
setup = None
for f in srcs:
if f.basename in ["Setup.hs", "Setup.lhs"]:
if not setup or f.dirname < setup.dirname:
setup = f
if not setup:
setup = hs.actions.declare_file("Setup.hs", sibling = cabal)
hs.actions.write(
output = setup,
content = """
module Main where
import Distribution.Simple
main :: IO ()
main = defaultMain
""",
)
return setup
_CABAL_TOOLS = ["alex", "c2hs", "cpphs", "doctest", "happy"]
# Some old packages are empty compatibility shims. Empty packages
# cause Cabal to not produce the outputs it normally produces. Instead
# of detecting that, we blacklist the offending packages, on the
# assumption that such packages are old and rare.
#
# TODO: replace this with a more general solution.
_EMPTY_PACKAGES_BLACKLIST = [
"bytestring-builder",
"doctest-discover",
"fail",
"mtl-compat",
"nats",
]
def _cabal_tool_flag(tool):
"""Return a --with-PROG=PATH flag if input is a recognized Cabal tool. None otherwise."""
if tool.basename in _CABAL_TOOLS:
return "--with-{}={}".format(tool.basename, tool.path)
def _binary_paths(binaries):
return [binary.dirname for binary in binaries.to_list()]
def _concat(sequences):
return [item for sequence in sequences for item in sequence]
def _prepare_cabal_inputs(
hs,
cc,
posix,
dep_info,
cc_info,
direct_cc_info,
component,
package_id,
tool_inputs,
tool_input_manifests,
cabal,
setup,
setup_deps,
setup_dep_info,
srcs,
compiler_flags,
flags,
generate_haddock,
cabal_wrapper,
package_database,
verbose,
transitive_haddocks,
dynamic_binary = None):
"""Compute Cabal wrapper, arguments, inputs."""
with_profiling = is_profiling_enabled(hs)
# Haskell library dependencies or indirect C library dependencies are
# already covered by their corresponding package-db entries. We only need
# to add libraries and headers for direct C library dependencies to the
# command line.
direct_libs = get_ghci_library_files(hs, cc.cc_libraries_info, cc.cc_libraries)
# The regular Haskell rules perform mostly static linking, i.e. where
# possible all C library dependencies are linked statically. Cabal has no
# such mode, and since we have to provide dynamic C libraries for
# compilation, they will also be used for linking. Hence, we need to add
# RUNPATH flags for all dynamic C library dependencies. Cabal also produces
# a dynamic and a static Haskell library in one go. The dynamic library
# will link other Haskell libraries dynamically. For those we need to also
# provide RUNPATH flags for dynamic Haskell libraries.
(_, dynamic_libs) = get_library_files(
hs,
cc.cc_libraries_info,
cc.transitive_libraries,
dynamic = True,
)
# Executables build by Cabal will link Haskell libraries statically, so we
# only need to include dynamic C libraries in the runfiles tree.
(_, runfiles_libs) = get_library_files(
hs,
cc.cc_libraries_info,
get_cc_libraries(cc.cc_libraries_info, cc.transitive_libraries),
dynamic = True,
)
# Setup dependencies are loaded by runghc.
setup_libs = get_ghci_library_files(hs, cc.cc_libraries_info, cc.setup_libraries)
# The regular Haskell rules have separate actions for linking and
# compilation to which we pass different sets of libraries as inputs. The
# Cabal rules, in contrast, only have a single action for compilation and
# linking, so we must provide both sets of libraries as inputs to the same
# action.
transitive_compile_libs = get_ghci_library_files(hs, cc.cc_libraries_info, cc.transitive_libraries)
transitive_link_libs = _concat(get_library_files(hs, cc.cc_libraries_info, cc.transitive_libraries))
env = dict(hs.env)
env["PATH"] = join_path_list(hs, _binary_paths(tool_inputs) + posix.paths)
if hs.toolchain.is_darwin:
env["SDKROOT"] = "macosx" # See haskell/private/actions/link.bzl
if verbose:
env["CABAL_VERBOSE"] = "True"
args = hs.actions.args()
package_databases = dep_info.package_databases
transitive_headers = cc_info.compilation_context.headers
direct_include_dirs = depset(transitive = [
direct_cc_info.compilation_context.includes,
direct_cc_info.compilation_context.quote_includes,
direct_cc_info.compilation_context.system_includes,
])
direct_lib_dirs = [file.dirname for file in direct_libs]
args.add_all([component, package_id, generate_haddock, setup, cabal.dirname, package_database.dirname])
args.add_joined([
arg
for package_id in setup_deps
for arg in ["-package-id", package_id]
] + [
arg
for package_db in setup_dep_info.package_databases.to_list()
for arg in ["-package-db", "./" + _dirname(package_db)]
], join_with = " ", format_each = "--ghc-arg=%s", omit_if_empty = False)
args.add("--flags=" + " ".join(flags))
args.add_all(compiler_flags, format_each = "--ghc-option=%s")
if dynamic_binary:
args.add_all(
[
"--ghc-option=-optl-Wl,-rpath," + create_rpath_entry(
binary = dynamic_binary,
dependency = lib,
keep_filename = False,
prefix = relative_rpath_prefix(hs.toolchain.is_darwin),
)
for lib in dynamic_libs
],
uniquify = True,
)
args.add("--")
args.add_all(package_databases, map_each = _dirname, format_each = "--package-db=%s")
args.add_all(direct_include_dirs, format_each = "--extra-include-dirs=%s")
args.add_all(direct_lib_dirs, format_each = "--extra-lib-dirs=%s", uniquify = True)
if with_profiling:
args.add("--enable-profiling")
# Redundant with _binary_paths() above, but better be explicit when we can.
args.add_all(tool_inputs, map_each = _cabal_tool_flag)
inputs = depset(
[setup, hs.tools.ghc, hs.tools.ghc_pkg, hs.tools.runghc],
transitive = [
depset(srcs),
depset(cc.files),
package_databases,
setup_dep_info.package_databases,
transitive_headers,
depset(setup_libs),
depset(transitive_compile_libs),
depset(transitive_link_libs),
depset(transitive_haddocks),
setup_dep_info.interface_dirs,
setup_dep_info.hs_libraries,
dep_info.interface_dirs,
dep_info.hs_libraries,
tool_inputs,
],
)
input_manifests = tool_input_manifests + hs.toolchain.cc_wrapper.manifests
return struct(
cabal_wrapper = cabal_wrapper,
args = args,
inputs = inputs,
input_manifests = input_manifests,
env = env,
runfiles = depset(direct = runfiles_libs),
)
def _gather_transitive_haddocks(deps):
transitive_haddocks_list = []
for dep in deps:
if HaddockInfo in dep:
for haddock_files in dep[HaddockInfo].transitive_haddocks.values():
transitive_haddocks_list.extend(haddock_files)
return depset(
direct = transitive_haddocks_list,
)
def _shorten_library_symlink(dynamic_library):
prefix = dynamic_library.owner.workspace_root.replace("_", "_U").replace("/", "_S")
basename = dynamic_library.basename
return paths.join(prefix, basename)
def _haskell_cabal_library_impl(ctx):
hs = haskell_context(ctx)
dep_info = gather_dep_info(ctx, ctx.attr.deps)
setup_dep_info = gather_dep_info(ctx, ctx.attr.setup_deps)
setup_deps = all_dependencies_package_ids(ctx.attr.setup_deps)
cc = cc_interop_info(ctx)
# All C and Haskell library dependencies.
cc_info = cc_common.merge_cc_infos(
cc_infos = [dep[CcInfo] for dep in ctx.attr.deps if CcInfo in dep],
)
# Separate direct C library dependencies.
direct_cc_info = cc_common.merge_cc_infos(
cc_infos = [
dep[CcInfo]
for dep in ctx.attr.deps
if CcInfo in dep and not HaskellInfo in dep
],
)
posix = ctx.toolchains["@rules_sh//sh/posix:toolchain_type"]
package_name = ctx.attr.package_name if ctx.attr.package_name else hs.label.name
package_id = "{}-{}".format(
package_name,
ctx.attr.version,
)
with_profiling = is_profiling_enabled(hs)
user_compile_flags = _expand_make_variables("compiler_flags", ctx, ctx.attr.compiler_flags)
cabal = _find_cabal(hs, ctx.files.srcs)
setup = _find_setup(hs, cabal, ctx.files.srcs)
package_database = hs.actions.declare_file(
"_install/{}.conf.d/package.cache".format(package_id),
sibling = cabal,
)
interfaces_dir = hs.actions.declare_directory(
"_install/{}_iface".format(package_id),
sibling = cabal,
)
data_dir = hs.actions.declare_directory(
"_install/{}_data".format(package_id),
sibling = cabal,
)
if ctx.attr.haddock:
haddock_file = hs.actions.declare_file(
"_install/{}_haddock/{}.haddock".format(package_id, package_name),
sibling = cabal,
)
haddock_html_dir = hs.actions.declare_directory(
"_install/{}_haddock_html".format(package_id),
sibling = cabal,
)
else:
haddock_file = None
haddock_html_dir = None
vanilla_library = hs.actions.declare_file(
"_install/lib/libHS{}.a".format(package_id),
sibling = cabal,
)
if with_profiling:
profiling_library = hs.actions.declare_file(
"_install/lib/libHS{}_p.a".format(package_id),
sibling = cabal,
)
static_library = profiling_library
else:
profiling_library = None
static_library = vanilla_library
if hs.toolchain.is_static:
dynamic_library = None
else:
dynamic_library = hs.actions.declare_file(
"_install/lib/libHS{}-ghc{}.{}".format(
package_id,
hs.toolchain.version,
_so_extension(hs),
),
sibling = cabal,
)
(tool_inputs, tool_input_manifests) = ctx.resolve_tools(tools = ctx.attr.tools)
c = _prepare_cabal_inputs(
hs,
cc,
posix,
dep_info,
cc_info,
direct_cc_info,
component = "lib:{}".format(
ctx.attr.package_name if ctx.attr.package_name else hs.label.name,
),
package_id = package_id,
tool_inputs = tool_inputs,
tool_input_manifests = tool_input_manifests,
cabal = cabal,
setup = setup,
setup_deps = setup_deps,
setup_dep_info = setup_dep_info,
srcs = ctx.files.srcs,
compiler_flags = user_compile_flags,
flags = ctx.attr.flags,
generate_haddock = ctx.attr.haddock,
cabal_wrapper = ctx.executable._cabal_wrapper,
package_database = package_database,
verbose = ctx.attr.verbose,
dynamic_binary = dynamic_library,
transitive_haddocks = _gather_transitive_haddocks(ctx.attr.deps),
)
outputs = [
package_database,
interfaces_dir,
vanilla_library,
data_dir,
]
if ctx.attr.haddock:
outputs.extend([haddock_file, haddock_html_dir])
if dynamic_library != None:
outputs.append(dynamic_library)
if with_profiling:
outputs.append(profiling_library)
ctx.actions.run(
executable = c.cabal_wrapper,
arguments = [c.args],
inputs = c.inputs,
input_manifests = c.input_manifests,
tools = [c.cabal_wrapper],
outputs = outputs,
env = c.env,
mnemonic = "HaskellCabalLibrary",
progress_message = "HaskellCabalLibrary {}".format(hs.label),
)
default_info = DefaultInfo(
files = depset([static_library] + ([dynamic_library] if dynamic_library != None else [])),
runfiles = ctx.runfiles(
files = [data_dir],
collect_default = True,
),
)
hs_info = HaskellInfo(
package_databases = depset([package_database], transitive = [dep_info.package_databases]),
version_macros = set.empty(),
source_files = depset(),
extra_source_files = depset(),
import_dirs = set.empty(),
hs_libraries = depset(
direct = [lib for lib in [vanilla_library, dynamic_library, profiling_library] if lib],
transitive = [dep_info.hs_libraries],
order = "topological",
),
interface_dirs = depset([interfaces_dir], transitive = [dep_info.interface_dirs]),
compile_flags = [],
)
lib_info = HaskellLibraryInfo(package_id = package_id, version = None, exports = [])
if ctx.attr.haddock:
doc_info = generate_unified_haddock_info(
this_package_id = package_id,
this_package_html = haddock_html_dir,
this_package_haddock = haddock_file,
deps = ctx.attr.deps,
)
else:
doc_info = None
cc_toolchain = find_cpp_toolchain(ctx)
feature_configuration = cc_common.configure_features(
ctx = | |
<reponame>Sharath302/popmon
# Copyright (c) 2021 ING Wholesale Banking Advanced Analytics
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import numpy as np
import pandas as pd
from numpy.lib.stride_tricks import as_strided
from scipy import linalg, stats
from scipy.stats import linregress, norm
from ..analysis.hist_numpy import (
check_similar_hists,
get_consistent_numpy_2dgrids,
get_consistent_numpy_entries,
set_2dgrid,
)
from ..hist.hist_utils import COMMON_HIST_TYPES, is_numeric
from ..stats.numpy import probability_distribution_mean_covariance
def pull(row, suffix_mean="_mean", suffix_std="_std", cols=None):
"""Calculate normalized residual (pull) for list of cols
Function can be used by ApplyFunc module.
:param pd.Series row: row to apply pull function to
:param list cols: list of cols to calculate pull of
:param str suffix_mean: suffix of mean. mean column = metric + suffix_mean
:param str suffix_std: suffix of std. std column = metric + suffix_std
"""
x = pd.Series()
if cols is None or len(cols) == 0:
# if no columns are given, find columns for which pulls can be calculated.
# e.g. to calculate x_pull, need to have [x, x_mean, x_std] present. If so, put x in cols.
cols = []
for m in row.index.to_list()[:]:
if m not in cols:
required = [m, m + suffix_mean, m + suffix_std]
if all(r in row for r in required):
cols.append(m)
for m in cols:
x[m] = np.nan
required = [m, m + suffix_mean, m + suffix_std]
if not all(r in row for r in required):
continue
if any(pd.isnull(row[required])):
continue
if row[m + suffix_std] == 0.0:
continue
x[m] = (row[m] - row[m + suffix_mean]) / row[m + suffix_std]
return x
def expanding_mean(df, shift=1):
"""Calculate expanding mean of all numeric columns of a pandas dataframe
Function can be used by ApplyFunc module.
:param pd.DataFrame df: input pandas dataframe
:param int shift: size of shift. default is 1.
:return: df with expanding means of columns
"""
return df.shift(shift).expanding().mean()
def expanding_std(df, shift=1):
"""Calculate expanding std of all numeric columns of a pandas dataframe
Function can be used by ApplyFunc module.
:param pd.DataFrame df: input pandas dataframe
:param int shift: size of shift. default is 1.
:return: df with expanding std of columns
"""
return df.shift(shift).expanding().std()
def expanding_apply(df, func, shift=1, *args, **kwargs):
"""Calculate expanding apply() to all columns of a pandas dataframe
Function can be used by ApplyFunc module.
:param pd.DataFrame df: input pandas dataframe
:param func: function to be applied
:param int shift: size of shift. default is 1.
:param args: args passed on to function
:param kwargs: kwargs passed on to function
:return: df with expanding results of function applied to all columns
"""
return df.shift(shift).expanding().apply(func, args=args, **kwargs)
def rolling_std(df, window, shift=1):
"""Calculate rolling std of all numeric columns of a pandas dataframe
Function can be used by ApplyFunc module.
:param pd.DataFrame df: input pandas dataframe
:param int shift: size of shift. default is 1.
:param int window: size of rolling window.
:return: df with rolling std of columns
"""
return df.shift(shift).rolling(window).std()
def rolling_mean(df, window, shift=1):
"""Calculate rolling mean of all numeric columns of a pandas dataframe
Function can be used by ApplyFunc module.
:param pd.DataFrame df: input pandas dataframe
:param int shift: size of shift. default is 1.
:param int window: size of rolling window.
:return: df with rolling mean of columns
"""
return df.shift(shift).rolling(window).mean()
def rolling_apply(df, window, func, shift=1, *args, **kwargs):
"""Calculate rolling apply() to all columns of a pandas dataframe
Function can be used by ApplyFunc module.
:param pd.DataFrame df: input pandas dataframe
:param int window: size of rolling window.
:param func: function to be applied
:param int shift: size of shift. default is 1.
:param args: args passed on to function
:param kwargs: kwargs passed on to function
:return: df with rolling results of function applied to all columns
"""
# raw=False already use Future setting
return df.shift(shift).rolling(window).apply(func, raw=False, args=args, **kwargs)
def rolling_lr(df, window, index=0, shift=0):
"""Calculate rolling scipy lin_regress() to all columns of a pandas dataframe
Function can be used by ApplyFunc module.
:param pd.DataFrame df: input pandas dataframe
:param int window: size of rolling window.
:param int index: index of lin_regress results to return. default is 0.
:param int shift: size of shift. default is 0.
:return: df with rolling results of lin_regress() function applied to all columns
"""
# raw=True suppresses Future warning
return (
df.shift(shift)
.rolling(window)
.apply(lambda x: linregress(np.arange(len(x)), x)[index], raw=True)
)
def rolling_lr_zscore(df, window, shift=0):
"""Calculate rolling z-score of scipy lin_regress() to all columns of a pandas dataframe
Function can be used by ApplyFunc module.
:param pd.DataFrame df: input pandas dataframe
:param int window: size of rolling window.
:param int shift: size of shift. default is 0.
:return: df with rolling z-score results of lin_regress() function applied to all columns
"""
# MB 20200420: turn original df.rolling off, it doesn't accept timestamps.
# raw=True suppresses Future warning
# return df.shift(shift).rolling(window).apply(func, raw=True)
def func(x):
y = pd.Series(index=x.index, dtype=float)
for name in x.index:
try:
xv = x[name].astype(float)
y[name] = norm.ppf(linregress(np.arange(len(xv)), xv)[3])
except Exception:
y[name] = np.nan
return y
return roll(df, window=window, shift=shift).apply(func, axis=1)
def roll(df, window, shift=1):
"""Implementation of rolling window that can handle non-numerical columns such as histograms
:param pd.DataFrame df: input dataframe to apply rolling function to.
:param int window: size of rolling window
:param int shift: shift of dataframe, default is 1 (optional)
"""
assert shift >= 0
assert isinstance(
df, (pd.DataFrame, pd.Series)
), "input should be a dataframe or series"
cols = df.columns if isinstance(df, pd.DataFrame) else [df.name]
x = df.values
# apply shift
x = x[:-shift] if shift > 0 else x
# apply windowing, use numpy's as_strided function to step through x and create sub-arrays
if isinstance(df, pd.DataFrame):
d0, d1 = x.shape
s0, s1 = x.strides
arr = as_strided(x, (d0 - (window - 1), window, d1), (s0, s0, s1))
elif isinstance(df, pd.Series):
hopsize = 1
nrows = ((x.size - window) // hopsize) + 1
if nrows < 0:
nrows = 0
n = x.strides[0]
arr = as_strided(x, shape=(nrows, window), strides=(hopsize * n, n))
# fill up missing values b/c off window & shift with Nones
arr_shape = list(arr.shape)
arr_shape[0] = len(df.index) - len(arr)
arr_shape = tuple(arr_shape)
n_fill = len(cols) * window * (len(df.index) - len(arr))
fill_value = np.array([[None] * n_fill]).reshape(arr_shape)
arr = np.append(fill_value, arr, axis=0)
# reshape to new data frame
def reshape(vs, i):
return vs if len(vs.shape) == 1 else vs[:, i]
d = [{c: reshape(vals, i) for i, c in enumerate(cols)} for vals in arr]
rolled_df = pd.DataFrame(data=d, index=df.index)
return rolled_df
def expand(df, shift=1):
"""Implementation of expanding window that can handle non-numerical values such as histograms
Split up input array into expanding sub-arrays
:param pd.DataFrame df: input dataframe to apply rolling function to.
:param int shift: shift of dataframe, default is 1 (optional)
:param fillvalue: default value to fill dataframe in case shift > 0 (optional)
"""
assert shift >= 0
assert isinstance(
df, (pd.DataFrame, pd.Series)
), "input should be a dataframe or series"
cols = df.columns if isinstance(df, pd.DataFrame) else [df.name]
x = df.values
arr = [x[: max(i + 1 - shift, 0)] for i in range(x.shape[0])]
# fill up missing values b/c off shift with Nones
fill_value = | |
######################################################################
# #
# Copyright 2009-2019 <NAME>. #
# This file is part of gdspy, distributed under the terms of the #
# Boost Software License - Version 1.0. See the accompanying #
# LICENSE file or <http://www.boost.org/LICENSE_1_0.txt> #
# #
######################################################################
"""
Curve class.
"""
import numpy
from gdspy import _func_bezier, _hobby, _zero
class Curve(object):
"""
Generation of curves loosely based on SVG paths.
Short summary of available methods:
====== =============================
Method Primitive
====== =============================
L/l Line segments
H/h Horizontal line segments
V/v Vertical line segments
C/c Cubic Bezier curve
S/s Smooth cubic Bezier curve
Q/q Quadratic Bezier curve
T/t Smooth quadratic Bezier curve
B/b General degree Bezier curve
I/i Smooth interpolating curve
arc Elliptical arc
====== =============================
The uppercase version of the methods considers that all coordinates
are absolute, whereas the lowercase considers that they are relative
to the current end point of the curve.
Parameters
----------
x : number
X-coordinate of the starting point of the curve. If this is a
complex number, the value of `y` is ignored and the starting
point becomes ``(x.real, x.imag)``.
y : number
Y-coordinate of the starting point of the curve.
tolerance : number
Tolerance used to calculate a polygonal approximation to the
curve.
Notes
-----
In all methods of this class that accept coordinate pairs, a single
complex number can be passed to be split into its real and imaginary
parts.
This feature can be useful in expressing coordinates in polar form.
All commands follow the SVG 2 specification, except for elliptical
arcs and smooth interpolating curves, which are inspired by the
Metapost syntax.
Examples
--------
>>> curve = gdspy.Curve(3, 4).H(1).q(0.5, 1, 2j).L(2 + 3j, 2, 2)
>>> pol = gdspy.Polygon(curve.get_points())
"""
__slots__ = "points", "tol", "last_c", "last_q"
def __init__(self, x, y=0, tolerance=0.01):
self.last_c = self.last_q = None
self.tol = tolerance ** 2
if isinstance(x, complex):
self.points = [numpy.array((x.real, x.imag))]
else:
self.points = [numpy.array((x, y))]
def get_points(self):
"""
Get the polygonal points that approximate this curve.
Returns
-------
out : Numpy array[N, 2]
Vertices of the polygon.
"""
delta = (self.points[-1] - self.points[0]) ** 2
if delta[0] + delta[1] < self.tol:
return numpy.array(self.points[:-1])
return numpy.array(self.points)
def L(self, *xy):
"""
Add straight line segments to the curve.
Parameters
----------
xy : numbers
Endpoint coordinates of the line segments.
Returns
-------
out : `Curve`
This curve.
"""
self.last_c = self.last_q = None
i = 0
while i < len(xy):
if isinstance(xy[i], complex):
self.points.append(numpy.array((xy[i].real, xy[i].imag)))
i += 1
else:
self.points.append(numpy.array((xy[i], xy[i + 1])))
i += 2
return self
def l(self, *xy):
"""
Add straight line segments to the curve.
Parameters
----------
xy : numbers
Endpoint coordinates of the line segments relative to the
current end point.
Returns
-------
out : `Curve`
This curve.
"""
self.last_c = self.last_q = None
o = self.points[-1]
i = 0
while i < len(xy):
if isinstance(xy[i], complex):
self.points.append(o + numpy.array((xy[i].real, xy[i].imag)))
i += 1
else:
self.points.append(o + numpy.array((xy[i], xy[i + 1])))
i += 2
return self
def H(self, *x):
"""
Add horizontal line segments to the curve.
Parameters
----------
x : numbers
Endpoint x-coordinates of the line segments.
Returns
-------
out : `Curve`
This curve.
"""
self.last_c = self.last_q = None
y0 = self.points[-1][1]
self.points.extend(numpy.array((xx, y0)) for xx in x)
return self
def h(self, *x):
"""
Add horizontal line segments to the curve.
Parameters
----------
x : numbers
Endpoint x-coordinates of the line segments relative to the
current end point.
Returns
-------
out : `Curve`
This curve.
"""
self.last_c = self.last_q = None
x0, y0 = self.points[-1]
self.points.extend(numpy.array((x0 + xx, y0)) for xx in x)
return self
def V(self, *y):
"""
Add vertical line segments to the curve.
Parameters
----------
y : numbers
Endpoint y-coordinates of the line segments.
Returns
-------
out : `Curve`
This curve.
"""
self.last_c = self.last_q = None
x0 = self.points[-1][0]
self.points.extend(numpy.array((x0, yy)) for yy in y)
return self
def v(self, *y):
"""
Add vertical line segments to the curve.
Parameters
----------
y : numbers
Endpoint y-coordinates of the line segments relative to the
current end point.
Returns
-------
out : `Curve`
This curve.
"""
self.last_c = self.last_q = None
x0, y0 = self.points[-1]
self.points.extend(numpy.array((x0, y0 + yy)) for yy in y)
return self
def arc(self, radius, initial_angle, final_angle, rotation=0):
"""
Add an elliptical arc to the curve.
Parameters
----------
radius : number, array-like[2]
Arc radius. An elliptical arc can be created by passing an
array with 2 radii.
initial_angle : number
Initial angle of the arc (in *radians*).
final_angle : number
Final angle of the arc (in *radians*).
rotation : number
Rotation of the axis of the ellipse.
Returns
-------
out : `Curve`
This curve.
"""
self.last_c = self.last_q = None
if hasattr(radius, "__iter__"):
rx, ry = radius
radius = max(radius)
else:
rx = ry = radius
full_angle = abs(final_angle - initial_angle)
number_of_points = max(
3,
1
+ int(0.5 * full_angle / numpy.arccos(1 - self.tol ** 0.5 / radius) + 0.5),
)
angles = numpy.linspace(
initial_angle - rotation, final_angle - rotation, number_of_points
)
pts = numpy.vstack((rx * numpy.cos(angles), ry * numpy.sin(angles))).T
if rotation != 0:
rot = numpy.empty_like(pts)
c = numpy.cos(rotation)
s = numpy.sin(rotation)
rot[:, 0] = pts[:, 0] * c - pts[:, 1] * s
rot[:, 1] = pts[:, 0] * s + pts[:, 1] * c
else:
rot = pts
pts = rot[1:] - rot[0] + self.points[-1]
self.points.extend(xy for xy in pts)
return self
def C(self, *xy):
"""
Add cubic Bezier curves to the curve.
Parameters
----------
xy : numbers
Coordinate pairs. Each set of 3 pairs are interpreted as
the control point at the beginning of the curve, the control
point at the end of the curve and the endpoint of the curve.
Returns
-------
out : `Curve`
This curve.
"""
self.last_q = None
i = 0
while i < len(xy):
ctrl = numpy.empty((4, 2))
ctrl[0] = self.points[-1]
for j in range(1, 4):
if isinstance(xy[i], complex):
ctrl[j, 0] = xy[i].real
ctrl[j, 1] = xy[i].imag
i += 1
else:
ctrl[j, 0] = xy[i]
ctrl[j, 1] = xy[i + 1]
i += 2
f = _func_bezier(ctrl)
uu = [0, 0.2, 0.5, 0.8, 1]
fu = [f(u) for u in uu]
iu = 1
while iu < len(fu):
test_u = 0.5 * (uu[iu - 1] + uu[iu])
test_pt = f(test_u)
test_err = 0.5 * (fu[iu - 1] + fu[iu]) - test_pt
if test_err[0] ** 2 + test_err[1] ** 2 > self.tol:
uu.insert(iu, test_u)
fu.insert(iu, test_pt)
else:
iu += 1
self.points.extend(xy for xy in fu[1:])
self.last_c = ctrl[2]
return self
def c(self, *xy):
"""
Add cubic Bezier curves to the curve.
Parameters
----------
xy : numbers
Coordinate pairs. Each set of 3 pairs are interpreted as
the control point at the beginning of the curve, the control
point at the end of the curve and the endpoint of the curve.
All coordinates are relative to the current end point.
Returns
-------
out : `Curve`
This curve.
"""
self.last_q = None
x0, y0 = self.points[-1]
i = 0
while i < len(xy):
ctrl = numpy.empty((4, 2))
ctrl[0, 0] = x0
ctrl[0, 1] = y0
for j in range(1, 4):
if isinstance(xy[i], complex):
ctrl[j, 0] = x0 + xy[i].real
ctrl[j, 1] = y0 + xy[i].imag
i += 1
else:
ctrl[j, 0] = x0 + xy[i]
ctrl[j, 1] = y0 + xy[i + 1]
i += 2
f = _func_bezier(ctrl)
uu = [0, 0.2, 0.5, 0.8, 1]
fu = [f(u) for u in uu]
iu = 1
while iu < len(fu):
test_u = 0.5 * (uu[iu - 1] + uu[iu])
test_pt = f(test_u)
test_err = 0.5 * (fu[iu - 1] + fu[iu]) - test_pt
if test_err[0] ** 2 + test_err[1] ** 2 > self.tol:
uu.insert(iu, test_u)
fu.insert(iu, test_pt)
else:
iu += 1
self.points.extend(xy for xy in fu[1:])
self.last_c = ctrl[2]
return self
def S(self, *xy):
"""
Add smooth cubic Bezier curves to the curve.
Parameters
----------
xy : numbers
Coordinate | |
<gh_stars>10-100
from __future__ import division, print_function
import numpy as np
import matplotlib.pyplot as plt
from NGSIM_env.vehicle.control import ControlledVehicle
from NGSIM_env import utils
from NGSIM_env.vehicle.dynamics import Vehicle
from NGSIM_env.vehicle.behavior import IDMVehicle
from NGSIM_env.vehicle.control import MDPVehicle
from NGSIM_env.vehicle.planner import planner
class NGSIMVehicle(IDMVehicle):
"""
Use NGSIM human driving trajectories.
"""
# Longitudinal policy parameters
ACC_MAX = 5.0 # [m/s2] """Maximum acceleration."""
COMFORT_ACC_MAX = 3.0 # [m/s2] """Desired maximum acceleration."""
COMFORT_ACC_MIN = -3.0 # [m/s2] """Desired maximum deceleration."""
DISTANCE_WANTED = 1.0 # [m] """Desired jam distance to the front vehicle."""
TIME_WANTED = 0.5 # [s] """Desired time gap to the front vehicle."""
DELTA = 4.0 # [] """Exponent of the velocity term."""
# Lateral policy parameters [MOBIL]
POLITENESS = 0.1 # in [0, 1]
LANE_CHANGE_MIN_ACC_GAIN = 0.2 # [m/s2]
LANE_CHANGE_MAX_BRAKING_IMPOSED = 2.0 # [m/s2]
LANE_CHANGE_DELAY = 1.0 # [s]
# Driving scenario
SCENE = 'us-101'
def __init__(self, road, position,
heading=0,
velocity=0,
target_lane_index=None,
target_velocity=None,
route=None,
enable_lane_change=False, # only changed here
timer=None,
vehicle_ID=None, v_length=None, v_width=None, ngsim_traj=None):
super(NGSIMVehicle, self).__init__(road, position, heading, velocity, target_lane_index, target_velocity, route, enable_lane_change, timer)
self.ngsim_traj = ngsim_traj
self.traj = np.array(self.position)
self.vehicle_ID = vehicle_ID
self.sim_steps = 0
self.overtaken = False
self.appear = True if self.position[0] != 0 else False
self.velocity_history = []
self.heading_history = []
self.crash_history = []
self.overtaken_history = []
# Vehicle length [m]
self.LENGTH = v_length
# Vehicle width [m]
self.WIDTH = v_width
@classmethod
def create(cls, road, vehicle_ID, position, v_length, v_width, ngsim_traj, heading=0, velocity=15):
"""
Create a new NGSIM vehicle .
:param road: the road where the vehicle is driving
:param vehicle_id: NGSIM vehicle ID
:param position: the position where the vehicle start on the road
:param v_length: vehicle length
:param v_width: vehicle width
:param ngsim_traj: NGSIM trajectory
:param velocity: initial velocity in [m/s]. If None, will be chosen randomly
:param heading: initial heading
:return: A vehicle with NGSIM position and velocity
"""
v = cls(road, position, heading, velocity, vehicle_ID=vehicle_ID, v_length=v_length, v_width=v_width, ngsim_traj=ngsim_traj)
return v
def act(self):
"""
Execute an action when NGSIM vehicle is overriden.
:param action: the action
"""
if not self.overtaken:
return
if self.crashed:
return
action = {}
front_vehicle, rear_vehicle = self.road.neighbour_vehicles(self)
# Lateral: MOBIL
self.follow_road()
if self.enable_lane_change:
self.change_lane_policy()
action['steering'] = self.steering_control(self.target_lane_index)
# Longitudinal: IDM
action['acceleration'] = self.acceleration(ego_vehicle=self, front_vehicle=front_vehicle, rear_vehicle=rear_vehicle)
action['acceleration'] = np.clip(action['acceleration'], -self.ACC_MAX, self.ACC_MAX)
self.check_collision
self.action = action
def step(self, dt):
"""
Update the state of a NGSIM vehicle.
If the front vehicle is too close, use IDM model to override the NGSIM vehicle.
"""
self.appear = True if self.ngsim_traj[self.sim_steps][0] != 0 else False
self.timer += dt
self.sim_steps += 1
self.heading_history.append(self.heading)
self.velocity_history.append(self.velocity)
self.crash_history.append(self.crashed)
self.overtaken_history.append(self.overtaken)
# Check if need to overtake
front_vehicle, rear_vehicle = self.road.neighbour_vehicles(self)
if front_vehicle is not None and isinstance(front_vehicle, NGSIMVehicle) and front_vehicle.overtaken:
gap = self.lane_distance_to(front_vehicle)
desired_gap = self.desired_gap(self, front_vehicle)
elif front_vehicle is not None and (isinstance(front_vehicle, HumanLikeVehicle) or isinstance(front_vehicle, MDPVehicle)):
gap = self.lane_distance_to(front_vehicle)
desired_gap = self.desired_gap(self, front_vehicle)
else:
gap = 100
desired_gap = 50
if gap >= desired_gap and not self.overtaken:
self.position = self.ngsim_traj[self.sim_steps][:2]
lateral_velocity = (self.ngsim_traj[self.sim_steps+1][1] - self.position[1])/0.1
heading = np.arcsin(np.clip(lateral_velocity/utils.not_zero(self.velocity), -1, 1))
self.heading = np.clip(heading, -np.pi/4, np.pi/4)
self.velocity = (self.ngsim_traj[self.sim_steps+1][0] - self.position[0])/0.1 if self.position[0] != 0 else 0
self.target_velocity = self.velocity
self.lane_index = self.road.network.get_closest_lane_index(self.position)
self.lane = self.road.network.get_lane(self.lane_index)
elif int(self.ngsim_traj[self.sim_steps][3]) == 0 and self.overtaken:
self.position = self.ngsim_traj[self.sim_steps][:2]
self.velocity = self.ngsim_traj[self.sim_steps][2]
else:
self.overtaken = True
# Determine the target lane
target_lane = int(self.ngsim_traj[self.sim_steps][3])
if self.SCENE == 'us-101':
if target_lane <= 5:
if 0 < self.position[0] <= 560/3.281:
self.target_lane_index = ('s1', 's2', target_lane-1)
elif 560/3.281 < self.position[0] <= (698+578+150)/3.281:
self.target_lane_index = ('s2', 's3', target_lane-1)
else:
self.target_lane_index = ('s3', 's4', target_lane-1)
elif target_lane == 6:
self.target_lane_index = ('s2', 's3', -1)
elif target_lane == 7:
self.target_lane_index = ('merge_in', 's2', -1)
elif target_lane == 8:
self.target_lane_index = ('s3', 'merge_out', -1)
elif self.SCENE == 'i-80':
if target_lane <= 6:
if 0 < self.position[0] <= 600/3.281:
self.target_lane_index = ('s1','s2', target_lane-1)
elif 600/3.281 < self.position[0] <= 700/3.281:
self.target_lane_index = ('s2','s3', target_lane-1)
elif 700/3.281 < self.position[0] <= 900/3.281:
self.target_lane_index = ('s3','s4', target_lane-1)
else:
self.target_lane_index = ('s4','s5', target_lane-1)
elif target_lane == 7:
self.target_lane_index = ('s1', 's2', -1)
super(NGSIMVehicle, self).step(dt)
self.traj = np.append(self.traj, self.position, axis=0)
def check_collision(self, other):
"""
Check for collision with another vehicle.
:param other: the other vehicle
"""
if not self.COLLISIONS_ENABLED or not other.COLLISIONS_ENABLED or self.crashed or other is self:
return
# Fast spherical pre-check
if np.linalg.norm(other.position - self.position) > self.LENGTH:
return
# if both vehicles are NGSIM vehicles and have not been overriden
if isinstance(self, NGSIMVehicle) and not self.overtaken and isinstance(other, NGSIMVehicle) and not other.overtaken:
return
# Accurate rectangular check
if utils.rotated_rectangles_intersect((self.position, 0.9*self.LENGTH, 0.9*self.WIDTH, self.heading),
(other.position, 0.9*other.LENGTH, 0.9*other.WIDTH, other.heading)) and self.appear:
self.velocity = other.velocity = min([self.velocity, other.velocity], key=abs)
self.crashed = other.crashed = True
class HumanLikeVehicle(IDMVehicle):
"""
Create a human-like (IRL) driving agent.
"""
TAU_A = 0.2 # [s]
TAU_DS = 0.1 # [s]
PURSUIT_TAU = 1.5*TAU_DS # [s]
KP_A = 1 / TAU_A
KP_HEADING = 1 / TAU_DS
KP_LATERAL = 1 / 0.2 # [1/s]
MAX_STEERING_ANGLE = np.pi / 3 # [rad]
MAX_VELOCITY = 30 # [m/s]
def __init__(self, road, position,
heading=0,
velocity=0,
acc=0,
target_lane_index=None,
target_velocity=15, # Speed reference
route=None,
timer=None,
vehicle_ID=None, v_length=None, v_width=None, ngsim_traj=None, human=False, IDM=False):
super(HumanLikeVehicle, self).__init__(road, position, heading, velocity, target_lane_index, target_velocity, route, timer)
self.ngsim_traj = ngsim_traj
self.traj = np.array(self.position)
self.sim_steps = 0
self.vehicle_ID = vehicle_ID
self.planned_trajectory = None
self.human = human
self.IDM = IDM
self.velocity_history = []
self.heading_history = []
self.crash_history = []
self.acc = acc
self.steering_noise = None
self.acc_noise = None
self.LENGTH = v_length # Vehicle length [m]
self.WIDTH = v_width # Vehicle width [m]
@classmethod
def create(cls, road, vehicle_ID, position, v_length, v_width, ngsim_traj, heading=0, velocity=0, acc=0, target_velocity=15, human=False, IDM=False):
"""
Create a human-like (IRL) driving vehicle in replace of a NGSIM vehicle.
"""
v = cls(road, position, heading, velocity, acc, target_velocity=target_velocity,
vehicle_ID=vehicle_ID, v_length=v_length, v_width=v_width, ngsim_traj=ngsim_traj, human=human, IDM=IDM)
return v
def trajectory_planner(self, target_point, target_speed, time_horizon):
"""
Plan a trajectory for the human-like (IRL) vehicle.
"""
s_d, s_d_d, s_d_d_d = self.position[0], self.velocity * np.cos(self.heading), self.acc # Longitudinal
c_d, c_d_d, c_d_dd = self.position[1], self.velocity * np.sin(self.heading), 0 # Lateral
target_area, speed, T = target_point, target_speed, time_horizon
if not self.human:
target_area += np.random.normal(0, 0.2)
path = planner(s_d, s_d_d, s_d_d_d, c_d, c_d_d, c_d_dd, target_area, speed, T)
self.planned_trajectory = np.array([[x, y] for x, y in zip(path[0].x, path[0].y)])
if self.IDM:
self.planned_trajectory = None
# if constant velocity:
#time = np.arange(0, T*10, 1)
#path_x = self.position[0] + self.velocity * np.cos(self.heading) * time/10
#path_y = self.position[1] + self.velocity * np.sin(self.heading) * time/10
#self.planned_trajectory = np.array([[x, y] for x, y in zip(path_x, path_y)])
def act(self, step):
if self.planned_trajectory is not None:
self.action = {'steering': self.steering_control(self.planned_trajectory, step),
'acceleration': self.velocity_control(self.planned_trajectory, step)}
elif self.IDM:
super(HumanLikeVehicle, self).act()
else:
return
def steering_control(self, trajectory, step):
"""
Steer the vehicle to follow the given trajectory.
1. Lateral position is controlled by a proportional controller yielding a lateral velocity command
2. Lateral velocity command is converted to a heading reference
3. Heading is controlled by a proportional controller yielding a heading rate command
4. Heading rate command is converted to a steering angle
:param trajectory: the trajectory to follow
:return: a steering wheel angle command [rad]
"""
target_coords = trajectory[step]
# Lateral position control
lateral_velocity_command = self.KP_LATERAL * (target_coords[1] - self.position[1])
# Lateral velocity to heading
heading_command = np.arcsin(np.clip(lateral_velocity_command/utils.not_zero(self.velocity), -1, 1))
heading_ref = np.clip(heading_command, -np.pi/4, np.pi/4)
# Heading control
heading_rate_command = self.KP_HEADING * utils.wrap_to_pi(heading_ref - self.heading)
# Heading rate to steering angle
steering_angle = np.arctan(self.LENGTH / utils.not_zero(self.velocity) * heading_rate_command)
steering_angle = np.clip(steering_angle, -self.MAX_STEERING_ANGLE, self.MAX_STEERING_ANGLE)
return steering_angle
def velocity_control(self, trajectory, step):
"""
Control the velocity of the vehicle.
Using a simple proportional controller.
:param trajectory: the trajectory to follow
:return: an acceleration command [m/s2]
"""
target_velocity = (trajectory[step][0] - trajectory[step-1][0]) / 0.1
acceleration = self.KP_A * (target_velocity - self.velocity)
return acceleration
def step(self, dt):
self.sim_steps += 1
self.heading_history.append(self.heading)
self.velocity_history.append(self.velocity)
self.crash_history.append(self.crashed)
super(HumanLikeVehicle, self).step(dt)
self.traj = np.append(self.traj, self.position, axis=0)
def calculate_human_likeness(self):
original_traj = self.ngsim_traj[:self.sim_steps+1,:2]
ego_traj = self.traj.reshape(-1, 2)
ADE = np.mean([np.linalg.norm(original_traj[i] - ego_traj[i]) for | |
default store object in cache
if cache:
# Check if the user has defined that the parameter has or not to be
# cache explicitly
if name in self.decorator_arguments:
use_cache = self.decorator_arguments[name].cache
else:
if is_vararg(name):
vararg_name = get_name_from_vararg(name)
use_cache = self.decorator_arguments[vararg_name].cache
else:
# if not explicitly said, the object is candidate to be
# cached
use_cache = True
argument.cache = use_cache
if cache:
logger.debug("\t\t - Save in cache: " + str(use_cache))
if np and cache and use_cache:
# Check if the object is already in cache
if in_cache(original_path, self.cache_ids):
# The object is cached
retrieved, existing_shm = retrieve_object_from_cache(logger,
self.cache_ids,
original_path)
self.cached_references.append(existing_shm)
return retrieved
else:
# Not in cache. Retrieve from file and put in cache if possible
# source name : destination name : keep source : is write final value : original name
# out o inout + is write final ==> no meter en cache ? (ahora solo dice si es diferente a un read)
# out + keep source ==> imposible
# noqa inout + keep source ==> buscar el segundo (destination name) + meter en cache despues con destination name
# si keep source = False -- voy a buscar el source name en vez de destination name.
# no meter en cache si es IN y keep source == False
# si keep source = True -- hay que meterlo si no esta.
obj = deserialize_from_file(original_path)
if argument.file_name.keep_source and \
argument.direction != parameter.DIRECTION.IN_DELETE:
insert_object_into_cache_wrapper(logger,
self.cache_queue,
obj,
original_path)
return obj
else:
return deserialize_from_file(original_path)
def segregate_objects(self, args):
# type: (tuple) -> (list, dict, list)
""" Split a list of arguments.
Segregates a list of arguments in user positional, variadic and
return arguments.
:return: list of user arguments, dictionary of user kwargs and a list
of return parameters.
"""
# User args
user_args = []
# User named args (kwargs)
user_kwargs = {}
# Return parameters, save them apart to match the user returns with
# the internal parameters
ret_params = []
for arg in args:
# Just fill the three data structures declared above
# Deal with the self parameter (if any)
if not isinstance(arg, Parameter):
user_args.append(arg)
# All these other cases are all about regular parameters
elif is_return(arg.name):
ret_params.append(arg)
elif is_kwarg(arg.name):
user_kwargs[get_name_from_kwarg(arg.name)] = arg.content
else:
if is_vararg(arg.name):
self.param_varargs = get_name_from_vararg(arg.name)
# Apart from the names we preserve the original order, so it
# is guaranteed that named positional arguments will never be
# swapped with variadic ones or anything similar
user_args.append(arg.content)
return user_args, user_kwargs, ret_params
@emit_event(EXECUTE_USER_CODE_EVENT, master=False, inside=True)
def execute_user_code(self, user_args, user_kwargs, tracing):
# type: (list, dict, bool) -> (object, COMPSsException)
""" Executes the user code.
Disables the tracing hook if tracing is enabled. Restores it
at the end of the user code execution.
:param user_args: Function args.
:param user_kwargs: Function kwargs.
:param tracing: If tracing enabled.
:return: The user function returns and the compss exception (if any).
"""
# Tracing hook is disabled by default during the user code of the task.
# The user can enable it with tracing_hook=True in @task decorator for
# specific tasks or globally with the COMPSS_TRACING_HOOK=true
# environment variable.
restore_hook = False
pro_f = None
if tracing:
global_tracing_hook = False
if TRACING_HOOK_ENV_VAR in os.environ:
hook_enabled = os.environ[TRACING_HOOK_ENV_VAR] == "true"
global_tracing_hook = hook_enabled
if self.decorator_arguments['tracing_hook'] or global_tracing_hook:
# The user wants to keep the tracing hook
pass
else:
# When Extrae library implements the function to disable,
# use it, as:
# import pyextrae
# pro_f = pyextrae.shutdown()
# Since it is not available yet, we manage the tracing hook
# by ourselves
pro_f = sys.getprofile()
sys.setprofile(None)
restore_hook = True
user_returns = None
compss_exception = None
default_values = None
if self.decorator_arguments['numba']:
# Import all supported functionalities
from numba import jit
from numba import njit
from numba import generated_jit
from numba import vectorize
from numba import guvectorize
from numba import stencil
from numba import cfunc
numba_mode = self.decorator_arguments['numba']
numba_flags = self.decorator_arguments['numba_flags']
if type(numba_mode) is dict or \
numba_mode is True or \
numba_mode == 'jit':
# Use the flags defined by the user
numba_flags['cache'] = True # Always force cache
user_returns = jit(self.user_function,
**numba_flags)(*user_args, **user_kwargs)
# Alternative way of calling:
# user_returns = jit(cache=True)(self.user_function) \
# (*user_args, **user_kwargs)
elif numba_mode == 'generated_jit':
user_returns = generated_jit(self.user_function,
**numba_flags)(*user_args,
**user_kwargs)
elif numba_mode == 'njit':
numba_flags['cache'] = True # Always force cache
user_returns = njit(self.user_function,
**numba_flags)(*user_args, **user_kwargs)
elif numba_mode == 'vectorize':
numba_signature = self.decorator_arguments['numba_signature'] # noqa: E501
user_returns = vectorize(
numba_signature,
**numba_flags
)(self.user_function)(*user_args, **user_kwargs)
elif numba_mode == 'guvectorize':
numba_signature = self.decorator_arguments['numba_signature'] # noqa: E501
numba_decl = self.decorator_arguments['numba_declaration']
user_returns = guvectorize(
numba_signature,
numba_decl,
**numba_flags
)(self.user_function)(*user_args, **user_kwargs)
elif numba_mode == 'stencil':
user_returns = stencil(
**numba_flags
)(self.user_function)(*user_args, **user_kwargs)
elif numba_mode == 'cfunc':
numba_signature = self.decorator_arguments['numba_signature'] # noqa: E501
user_returns = cfunc(
numba_signature
)(self.user_function).ctypes(*user_args, **user_kwargs)
else:
raise PyCOMPSsException("Unsupported numba mode.")
else:
try:
# Normal task execution
user_returns = self.user_function(*user_args, **user_kwargs)
except COMPSsException as ce:
# Perform any required action on failure
user_returns, default_values = self.manage_exception()
compss_exception = ce
# Check old targetDirection
if 'targetDirection' in self.decorator_arguments:
target_label = 'targetDirection'
else:
target_label = 'target_direction'
compss_exception.target_direction = self.decorator_arguments[target_label] # noqa: E501
except Exception as exc: # noqa
if self.on_failure == "IGNORE":
# Perform any required action on failure
user_returns, default_values = self.manage_exception()
else:
# Re-raise the exception
raise exc
# Reestablish the hook if it was disabled
if restore_hook:
sys.setprofile(pro_f)
return user_returns, compss_exception, default_values
def manage_exception(self):
# type () -> (tuple, dict)
""" Deal with exceptions (on failure action).
:return: The default return and values.
"""
user_returns = None
default_values = None
if self.on_failure == "IGNORE":
# Provide default return
user_returns = self.defaults.pop("returns", None)
# Provide defaults to the runtime
default_values = self.defaults
return user_returns, default_values
def manage_defaults(self, args, default_values):
# type: (tuple, dict) -> None
""" Deal with default values. Updates args with the appropriate object
or file.
:param args: Argument list.
:param default_values: Dictionary containing the default values.
:return: None
"""
if __debug__:
logger.debug("Dealing with default values")
for arg in args:
# Skip non-task-parameters
if not isinstance(arg, Parameter):
continue
# Skip returns
if is_return(arg.name):
continue
if self.is_parameter_an_object(arg.name):
# Update object
arg.content = default_values[arg.name]
else:
# Update file
shutil.copyfile(default_values[arg.name], arg.content)
def manage_inouts(self, args, python_mpi):
# type: (tuple, bool) -> None
""" Deal with INOUTS. Serializes the result of INOUT parameters.
:param args: Argument list.
:param python_mpi: Boolean if python mpi.
:return: None
"""
if __debug__:
logger.debug("Dealing with INOUTs and OUTS")
if python_mpi:
logger.debug("\t - Managing with MPI policy")
# Manage all the possible outputs of the task and build the return new
# types and values
for arg in args:
# Handle only task parameters that are objects
# Skip files and non-task-parameters
if not isinstance(arg, Parameter) or \
not self.is_parameter_an_object(arg.name):
continue
original_name = get_name_from_kwarg(arg.name)
real_direction = self.get_default_direction(original_name)
param = self.decorator_arguments.get(original_name, real_direction)
# Update args
arg.direction = param.direction
# File collections are objects, but must be skipped as well
if self.is_parameter_file_collection(arg.name):
continue
# Skip psco: since param.content_type has the old type, we can
# not use: param.content_type != parameter.TYPE.EXTERNAL_PSCO
_is_psco_true = (arg.content_type ==
parameter.TYPE.EXTERNAL_PSCO or
is_psco(arg.content))
if _is_psco_true:
continue
# skip non-inouts or non-col_outs
_is_col_out = (arg.content_type == parameter.TYPE.COLLECTION and
param.direction == parameter.DIRECTION.OUT)
_is_dict_col_out = (arg.content_type == parameter.TYPE.DICT_COLLECTION and
param.direction == parameter.DIRECTION.OUT)
_is_inout = (param.direction == parameter.DIRECTION.INOUT or
param.direction == parameter.DIRECTION.COMMUTATIVE)
if not (_is_inout or _is_col_out or _is_dict_col_out):
continue
# Now it is 'INOUT' or 'COLLECTION_OUT' or 'DICT_COLLECTION_OUT'
# object param, serialize to a file.
if arg.content_type == parameter.TYPE.COLLECTION:
if __debug__:
logger.debug("Serializing collection: " + str(arg.name))
# handle collections recursively
for (content, elem) in __get_collection_objects__(arg.content, arg): # noqa: E501
if elem.file_name:
f_name = elem.file_name.original_path
if __debug__:
logger.debug("\t - Serializing element: " +
str(arg.name) + " to " + str(f_name))
if python_mpi:
serialize_to_file_mpienv(content, f_name, False)
else:
serialize_to_file(content, f_name)
self.update_object_in_cache(content, arg)
else:
# It is None --> PSCO
pass
elif arg.content_type == parameter.TYPE.DICT_COLLECTION:
if __debug__:
logger.debug("Serializing dictionary collection: " + str(arg.name))
# | |
import csv
import datetime
import os
import sys
import time
import traceback
from sys import platform
'Driver'
from selenium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
'Driver Exceptions'
from selenium.common.exceptions import *
'Parser'
from bs4 import BeautifulSoup
from bs4.element import Comment
"Display for headless mode"
from pyvirtualdisplay import Display
"Only use this if running on a non linux machine"
driverPath = 'Driver/chromedriver'
inline_tags = ["b", "big", "i", "small", "tt", "abbr", "acronym", "cite", "dfn",
"em", "kbd", "strong", "samp", "var", "bdo", "map", "object", "q",
"span", "sub", "sup"]
def readCSV(filename) -> list:
schools = []
with open(filename, newline='', encoding="Latin-1") as csvFile:
reader = csv.reader(csvFile, delimiter=',')
for row in reader:
try:
if reader.line_num != 1:
schools.append(School(row[0], row[1], row[2], row[4]))
except ValueError:
print("ERROR: School " + str(row[1]) + " was not scraped as it did not have a URL")
return schools
class School(object):
"""Class that holds schools. Each school is comprised of an ID number, Name, Geographical Address and a url that goes to the schools hompage. The matcher is used to
filer out links that go to places outside the schools main domain, like facebook or instagram. The links attribute is an array used to store all of the links on the homepage using
the Links class"""
def __init__(self, id, name, address, mainURL):
if mainURL == str(0):
raise ValueError("ERROR: URL cannot be 0")
self.id = id
self.name = name
self.address = address
self.mainURL = mainURL
self.links = []
if self.mainURL.split("://")[1].startswith("www"):
self.matcher = self.mainURL.split(".")[1]
else:
self.matcher = self.mainURL.split("://")[1].split(".")[0]
self.filePath = "results/" + self.name
self.totalNumberofLinks = 0
self.htmlLinks = 0
self.htmlLinksClicked = 0
self.scriptLinks = 0
self.scriptLinksClicked = 0
self.linksClicked = 0
def gatherLinks(self) -> None:
driver.get(self.mainURL)
oldElems = driver.find_elements_by_xpath("//a[@href]")
hrefAttributes = []
count = 0
for x in oldElems:
try:
if count == 0:
hrefAttributes.append(oldElems[count].get_attribute("href"))
else:
hrefAttributes.append(newElems[count].get_attribute("href"))
except IndexError:
break
newElems = driver.find_elements_by_xpath("//a[@href]")
count += 1
for i in range(len(hrefAttributes)):
try:
link = Link(hrefAttributes[i], self.mainURL, self.matcher, i)
self.links.append(link)
print(str(link))
except LinkException:
print(str(hrefAttributes[i]) + (
"href") + " was not added as it did not match the main url or was not longer than main url")
self.totalNumberofLinks = len(self.links)
def clickLinks(self):
if not checkPathExists(self.filePath):
os.makedirs(self.filePath)
counter = 1
for link in self.links:
try:
if link.type == "html":
self.htmlLinks += 1
elif link.type == "JavaScript":
self.scriptLinks += 1
print("Clicking Link " + str(counter) + " out of " + str(self.totalNumberofLinks))
link.click()
self.linksClicked += 1
if link.type == "html":
self.htmlLinksClicked += 1
elif link.type == "JavaScript":
self.scriptLinksClicked += 1
except LinkException:
print("Could not click link:" + str(link))
counter += 1
scriptCount = 0
print("Done Clickling links")
for link in self.links:
print("Writing link to file")
if link.type == "html":
link.writeFile(self.filePath, 0)
elif link.type == "JavaScript" and link.text != "":
link.writeFile(self.filePath, scriptCount)
scriptCount += 1
def __str__(self) -> str:
s = ""
s += "mainURL:" + self.mainURL + " "
s += "Matcher:" + self.matcher + " "
s += "links:" + str(self.links) + " "
s += "ID:" + self.id + " "
s += "Name:" + self.name + " "
s += "Address:" + self.address + " "
return s
class LinkException(Exception):
"Only called by link class. Add to switch statement as necessary"
def __init__(self, switch=1):
if switch == 0:
self.value = "ERROR: Link type was not html or JavaScript"
elif switch == 1:
self.value = "ERROR: Link was Unclickable"
elif switch == 2:
self.value = "ERROR: Link is JavaScript based but an index value was not set"
elif switch == -1:
self.value = "No value was specified in LinkException Switch. Make sure you are properly calling this exception"
def __str__(self) -> str:
return str(self.value)
class Link(object):
"""Class that stores all of the information regarding a link. Each link has a type (either html of JavaScript), the href attribute (what the link redirects
to), a fallback url, and an index value (used for JavaScript Links)"""
def __init__(self, hrefAttribute, callingURL, matcher, index):
"""
"""
self.type = ""
self.hrefAttribute = ""
self.fallbackURL = callingURL
self.index = None
self.matcher = matcher
self.index = 0
self.text = ""
if hrefAttribute.startswith("http"):
if (hrefAttribute.split("://")[1].startswith("www") and hrefAttribute.split(".")[1] == matcher and len(
hrefAttribute) > len(callingURL)) or (
hrefAttribute.split("://")[1].split(".")[0] == matcher and len(hrefAttribute) > len(
callingURL)):
self.type = "html"
self.hrefAttribute = hrefAttribute
else:
raise LinkException(0)
elif hrefAttribute.startswith("javascript"):
self.type = "JavaScript"
self.hrefAttribute = hrefAttribute
self.index = index
else:
raise LinkException(0)
self.gatherName(delimiter="-")
def tag_visible(self, element) -> bool:
if element.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']:
return False
if isinstance(element, Comment):
return False
return True
def gatherText(self, driver) -> None:
page_source_replaced = driver.page_source
# Remove inline tags
for it in inline_tags:
page_source_replaced = page_source_replaced.replace("<" + it + ">", "")
page_source_replaced = page_source_replaced.replace("</" + it + ">", "")
# Create random string for tag delimiter
random_string = "".join(map(chr, os.urandom(75)))
soup = BeautifulSoup(page_source_replaced, 'lxml')
# remove non-visible tags
[s.extract() for s in soup(['style', 'script', 'head', 'title', 'meta', '[document]'])]
visible_text = soup.getText(random_string).replace("\n", "")
visible_text = visible_text.split(random_string)
self.text = "\n".join(list(filter(lambda vt: vt.split() != [], visible_text)))
def click(self) -> bool:
if self.type == "html":
driver.get(self.hrefAttribute)
self.gatherText(driver)
return True
elif self.type == "JavaScript":
if self.index is None:
raise LinkException(2)
driver.get(self.fallbackURL)
try:
driver.find_elements_by_xpath("//a[@href]")[self.index].click()
self.gatherText(driver)
except (WebDriverException, ElementNotVisibleException, ElementNotInteractableException,
ElementNotSelectableException, IndexError):
link = driver.find_elements_by_xpath("//a[@href]")[self.index]
move = ActionChains(driver).move_to_element(link)
move.perform()
try:
link.click()
self.gatherText(driver)
except (WebDriverException, ElementNotVisibleException, ElementNotInteractableException,
ElementNotSelectableException, IndexError):
raise LinkException(1)
else:
raise LinkException(0)
def gatherName(self, delimiter=" ") -> None:
if delimiter == "/":
raise ValueError("ERROR: Delimiter cannot be a slash")
if self.type == "html":
unfilteredName = self.hrefAttribute[self.hrefAttribute.index(self.matcher):len(self.hrefAttribute)]
unfilteredName = unfilteredName.split("/")
self.name = ""
if len(unfilteredName) != 1:
for i in range(len(unfilteredName)):
self.name += unfilteredName[i] + delimiter
else:
self.name = unfilteredName[0]
elif self.type == "JavaScript":
self.name = ""
def writeFile(self, filepath, counter):
fileName = self.name
if self.type == "html":
file = open(str(filepath) + "/" + fileName + ".txt", "w")
elif self.type == "JavaScript":
file = open(str(filepath) + "/" + "JavaScript Link " + str(counter) + ".txt", "w")
else:
raise LinkException(0)
file.write(str(self.text.encode('utf-8'), encoding='utf-8'))
file.close()
def __str__(self) -> str:
s = ""
s += "Link Type:" + self.type + " "
s += "hrefAttribute:" + self.hrefAttribute + " "
s += "name:" + self.name + " "
s += "FallbackURL(Only used for JS):" + self.fallbackURL + " "
s += "Index (Only used for JS):" + str(self.index) + " "
return s
def tag_visible(element) -> bool:
if element.parent.name in ['style', 'script', 'head', 'title', 'meta', '[document]']:
return False
if isinstance(element, Comment):
return False
return True
def checkPathExists(path):
if os.path.exists(path):
return True
return False
if __name__ == '__main__':
if platform.startswith("linux"):
display = Display(visible=0, size=(1920, 1080))
display.start()
chromeOptions = webdriver.ChromeOptions()
chromeOptions.add_argument('headless')
chromeOptions.add_argument('window-size=1920x1080')
chromeOptions.add_argument('--no-sandbox')
driver = webdriver.Chrome('/usr/local/bin/chromedriver', chrome_options=chromeOptions)
elif platform.startswith("darwin") or platform.startswith("win32"):
driver = webdriver.Chrome(executable_path="Driver/chromedriver")
if not checkPathExists("results"):
os.mkdir("results")
if not checkPathExists("diagnostics"):
os.mkdir("diagnostics")
schools = readCSV("data/micro-sample13_coded.csv")
numberofLinksClicked = 0
totalNumberOfLinks = 0
htmlLinks = 0
htmlLinksClicked = 0
scriptLinks = 0
scriptLinksClicked = 0
"Time doesn't really account for timezones now, many be an issue later"
now = datetime.datetime.now()
formattedTime = now.strftime("%Y-%m-%d %H:%M:%S")
diagnosticsFile = open("diagnostics/" + str(formattedTime) + ".txt", "w")
diagnosticsFile.write("Program was run at " + formattedTime + "\n")
startTime = time.time()
try:
for school in schools:
school.gatherLinks()
schoolStartTime = time.time()
school.clickLinks()
endTime = time.time()
schoolTimeElapsed = endTime - schoolStartTime
print("Elapsed Time :%s (seconds) %s (minutes)" % (
str(schoolTimeElapsed), str(schoolTimeElapsed / 60)))
totalNumberOfLinks += school.totalNumberofLinks
numberofLinksClicked += school.linksClicked
htmlLinks += school.htmlLinks
htmlLinksClicked += school.htmlLinksClicked
scriptLinks += school.scriptLinks
scriptLinks += school.scriptLinksClicked
try:
diagnosticsFile.write(
"School " + str(school.name) + " had " + str(school.totalNumberofLinks) + " links and " + str(
school.linksClicked) + " were clicked(" + str(
(school.linksClicked / school.totalNumberofLinks) * 100) + "%)\n")
except ZeroDivisionError:
diagnosticsFile.write("School " + str(school.name) + " had 0 links. Check the matcher for this school ")
try:
diagnosticsFile.write(
"There were " + str(school.htmlLinks) + " html links and " + str(
school.htmlLinksClicked) + " were clicked(" + str(
round((school.htmlLinksClicked / school.htmlLinks) * 100, 3)) + "%)\n"
)
except ZeroDivisionError:
diagnosticsFile.write("This school had 0 html links \n")
try:
diagnosticsFile.write(
"There were " + str(school.scriptLinks) + " JavaScript links and " + str(
school.scriptLinksClicked) + " were clicked(" + str(round(
(school.scriptLinksClicked / school.scriptLinks) * 100, 3)) + "%)\n"
| |
import tensorflow as tf
import numpy as np
import collections
from rpn_tools.my_anchor_target_layer_modified import AnchorTargetLayer
from rpn_tools.proposal_layer_modified import ProposalLayer_Chunk
from rpn_tools.proposal_target_layer_modified import ProposalTargetLayer
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
import csv
np.set_printoptions(threshold=np.inf)
import cv2
import glob
from roi_pool_tools import roi_pooling_op
from roi_pool_tools import roi_pooling_op_grad
from cnn_tools.tools import *
# image_files are original images, while gt_files are csv annotations in
image_files = sorted(glob.glob('/home/alfonso/VOCdevkit2012/VOC2012/JPEGImages/*.jpg'))
gt_files = sorted(glob.glob('/home/alfonso/VOCdevkit2012/VOC2012/Annotations_3/*.csv'))
# hyperparameter settings
resnet_length = 101 # resnet lengths can be {50, 101, 152}
_tr_bn = True # train BN parameters
_add_l2 = True # include weight decay of 1e-4: set to False
_rpn_stat = True # train RPN weights
_rcnn_stat = True # train RCNN weights
_fc_stat = True # train "fc" -- which are hconv5 residual layers and up
_layer0_stat = False # train hconv2
_layer1_stat = True # train hconv3
_layer2_stat = True # train hconv4
lr_w = 0.001 # learning rate for weights
lr_b = 0.002 # learning rate for biases
s1 = 3 # smoothL1 loss sigma hyperparameter for RPN anchor bbox_pred
s2 = 1 # smoothL1 loss sigma hyperparameter for classification bbox_pred
# other hyperparameter settings: modify with care...
A = 9
height = 38
width = 63
rpn_size = 256
rpn_batch_size = 128
im_info = [600, 1000]
def anchor(x, g):
x = np.array(x)
g = np.array(g)
anchor = AnchorTargetLayer()
anchor.setup(x, g)
labels, bbox_targets, bbox_inside_weights, bbox_outside_weights, n = \
anchor.forward(x, g)
return labels.astype(np.int64), bbox_targets, bbox_inside_weights, \
bbox_outside_weights, np.array(n).astype(np.float32)
def proposal(cls, bbox):
cls = np.array(cls)
bbox = np.array(bbox)
proposer = ProposalLayer_Chunk()
proposer.setup(cls, bbox)
blob = proposer.forward(cls, bbox)
return blob
def proposal_target(rpn_rois, gt):
rpn_rois = np.array(rpn_rois)
gt = np.array(gt)
proposer_target = ProposalTargetLayer()
proposer_target.setup()
rois, labels, bbox_targets, bbox_inside_weights, bbox_outside_weights = \
proposer_target.forward(rpn_rois, gt)
return rois.astype(np.int32), labels.astype(np.int64), bbox_targets, \
bbox_inside_weights, bbox_outside_weights
def resnet(inpt, n, loc):
if n == 50:
num_conv = []
num_conv.append(3)
num_conv.append(4)
num_conv.append(6)
num_conv.append(3)
elif n == 101:
num_conv = []
num_conv.append(3)
num_conv.append(4)
num_conv.append(23)
num_conv.append(3)
elif n == 152:
num_conv = []
num_conv.append(3)
num_conv.append(8)
num_conv.append(36)
num_conv.append(3)
elif n == 34:
num_conv = []
num_conv.append(3)
num_conv.append(4)
num_conv.append(6)
num_conv.append(3)
layers = []
with tf.variable_scope('conv1'):
conv1 = conv_layer(inpt, [7, 7, 3, 64], 2, loc, tr_stat = _layer0_stat, \
bn_tr_stat = _tr_bn, add_l2_stat = _add_l2, state = "split")
max1 = max_pool_3x3(conv1)
layers.append(max1)
for i in range (num_conv[0]):
with tf.variable_scope('conv2_%d' % (i + 1)):
conv2 = residual_block(layers[-1], 64, False, loc, tr_stat = _layer0_stat, \
bn_tr_stat = _tr_bn, add_l2_stat = _add_l2, branch = "near")
layers.append(conv2)
for i in range (num_conv[1]):
down_sample = True if i == 0 else False
with tf.variable_scope('conv3_%d' % (i + 1)):
conv3 = residual_block(layers[-1], 128, down_sample, loc, tr_stat = _layer1_stat, \
bn_tr_stat = _tr_bn, add_l2_stat = _add_l2, branch = "far")
layers.append(conv3)
for i in range (num_conv[2]):
down_sample = True if i == 0 else False
with tf.variable_scope('conv4_%d' % (i + 1)):
conv4 = residual_block(layers[-1], 256, down_sample, loc, tr_stat = _layer2_stat, \
bn_tr_stat = _tr_bn, add_l2_stat = _add_l2, branch = "far")
layers.append(conv4)
return layers[-1]
#SETUP
num_labels = 21
batch_size = 1
prep_img = tf.placeholder(tf.float32, [1, 600, 1000, 3])
gt_box = tf.placeholder(tf.int64)
gt_boxbatch = tf.reshape(tf.pack(gt_box), [-1, 5])
post_img = tf.py_func(process_img, [prep_img, gt_boxbatch], [tf.float32])
im_batch = tf.reshape(post_img, [1, 600, 1000, 3])
#VGG_TRUNK
with tf.name_scope("trunk"):
h_conv13 = resnet(im_batch, resnet_length, "trunk")
#RPN
with tf.name_scope("rpn"):
gate = tf.placeholder(tf.float32)
h_rpn_input = (h_conv13 * (1-gate)) # gate is a residue of alternative-optimization
W_rpn3 = weight_variable([3,3,1024,1024], "rpn", tr_stat = _rpn_stat, add_l2_stat = _add_l2)
b_rpn3 = bias_variable([1024], "rpn", tr_stat = _rpn_stat, add_l2_stat = _add_l2)
h_rpn3 = tf.nn.relu(conv2d(h_rpn_input, W_rpn3) + b_rpn3)
W_cls_score = weight_variable([1,1,1024,18], "rpn", tr_stat = _rpn_stat, add_l2_stat = _add_l2)
b_cls_score = bias_variable([18], "rpn", tr_stat = _rpn_stat, add_l2_stat = _add_l2)
rpn_cls_score = (conv2d_nopad(h_rpn3, W_cls_score) + b_cls_score)
W_bbox_pred = weight_variable_bbox([1,1,1024,36], "rpn", tr_stat = _rpn_stat, add_l2_stat = _add_l2)
b_bbox_pred = bias_variable([36], "rpn", tr_stat = _rpn_stat, add_l2_stat = _add_l2)
rpn_bbox_pred = (conv2d_nopad(h_rpn3, W_bbox_pred) + b_bbox_pred)
#RPN loss and accuracy calculation
rpn_bbox_pred = tf.reshape(rpn_bbox_pred, [1, height, width, A * 4])
rpn_cls_score_reshape = tf.reshape(rpn_cls_score, [-1, 2]) + 1e-20
rpn_labels_ind, rpn_bbox_targets, rpn_bbox_inside_weights, rpn_bbox_outside_weights, rpn_size = \
tf.py_func(anchor, [rpn_cls_score, gt_boxbatch], [tf.int64, tf.float32, tf.float32, tf.float32, tf.float32])
rpn_labels_ind = tf.reshape(tf.pack(rpn_labels_ind), [-1])
rpn_bbox_targets = tf.reshape(tf.pack(rpn_bbox_targets), [1, height, width, A * 4])
rpn_bbox_inside_weights = tf.reshape(tf.pack(rpn_bbox_inside_weights), [1, height, width, A * 4])
rpn_bbox_outside_weights = tf.reshape(tf.pack(rpn_bbox_outside_weights), [1, height, width, A * 4])
rpn_cls_soft = tf.nn.softmax(rpn_cls_score_reshape)
rpn_cls_score_x = tf.reshape(tf.gather(rpn_cls_score_reshape,tf.where(tf.not_equal(rpn_labels_ind,-1))),[-1,2])
rpn_label = tf.reshape(tf.gather(rpn_labels_ind, tf.where(tf.not_equal(rpn_labels_ind,-1))),[-1])
rpn_loss_cls = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(rpn_cls_score_x, rpn_label))
unique_rpn_cls, o_cls, o_cls_ind = tf.py_func(cls_unique, \
[rpn_cls_soft, rpn_labels_ind], [tf.float32, tf.float32, tf.float32])
unique_rpn_cls = tf.pack(unique_rpn_cls)
rpn_correct_prediction = tf.py_func(rpn_accuracy, [rpn_cls_soft, rpn_labels_ind], [tf.float32])
rpn_correct_prediction = tf.reshape(tf.pack(rpn_correct_prediction), [-1])
rpn_cls_accuracy = tf.reduce_mean(tf.cast(rpn_correct_prediction, tf.float32))
sigma1 = s1 * s1
smoothL1_sign = tf.cast(tf.less(tf.abs(tf.sub(rpn_bbox_pred, rpn_bbox_targets)),1/sigma1),tf.float32)
rpn_loss_bbox = tf.mul(tf.reduce_mean(tf.reduce_sum(tf.mul(rpn_bbox_outside_weights,tf.add( \
tf.mul(tf.mul(tf.pow(tf.mul(rpn_bbox_inside_weights, \
tf.sub(rpn_bbox_pred, rpn_bbox_targets)),2),0.5*sigma1), smoothL1_sign), \
tf.mul(tf.sub(tf.abs(tf.sub(rpn_bbox_pred, rpn_bbox_targets)),0.5/sigma1),\
tf.abs(smoothL1_sign-1)))), reduction_indices=[1,2])),1)
rpn_loss_bbox_label = rpn_loss_bbox
zero_count, one_count = tf.py_func(bbox_counter, [rpn_labels_ind], [tf.float32, tf.float32])
#ROI PROPOSAL
rpn_cls_prob = rpn_cls_soft
rpn_cls_prob_reshape = tf.reshape(rpn_cls_prob, [1, height, width, 18])
rpn_rois = tf.py_func(proposal, [rpn_cls_prob_reshape, rpn_bbox_pred], [tf.float32])
rpn_rois = tf.reshape(rpn_rois, [-1, 5])
rcnn_rois, rcnn_labels_ind, rcnn_bbox_targets, rcnn_bbox_inside_w, rcnn_bbox_outside_w = \
tf.py_func(proposal_target, [rpn_rois, gt_boxbatch], [tf.int32, tf.int64, tf.float32, tf.float32, tf.float32])
rcnn_rois = tf.cast(tf.reshape(tf.pack(rcnn_rois), [-1, 5]), tf.float32)
rcnn_labels_ind = tf.reshape(tf.pack(rcnn_labels_ind), [-1])
rcnn_bbox_targets = tf.reshape(tf.pack(rcnn_bbox_targets), [-1, 21 * 4])
rcnn_bbox_inside_w = tf.reshape(tf.pack(rcnn_bbox_inside_w), [-1, 21 * 4])
rcnn_bbox_outside_w = tf.reshape(tf.pack(rcnn_bbox_outside_w), [-1, 21 * 4])
#RCNN
with tf.name_scope("rcnn"):
output_dim_tensor = tf.constant((7,7))
h_conv13 = tf.transpose(h_conv13, [0,3,1,2]) #transpose since roi pool takes in NCHW
[pool5, argmax] = roi_pooling_op.roi_pool(h_conv13, rcnn_rois, 7, 7, 1.0/16)
pool5 = tf.transpose(pool5, [0,2,3,1]) #revert back to NHWC
h_fc6 = tf.reshape(pool5, [-1, 7, 7, 1024])
r7 = residual_block(h_fc6, 512, False, "rcnn", tr_stat = _rcnn_stat, bn_tr_stat = _tr_bn, add_l2_stat = _add_l2, branch = "near")
r8 = residual_block(r7, 512, False, "rcnn", tr_stat = _rcnn_stat, bn_tr_stat = _tr_bn, add_l2_stat = _add_l2, branch = "far")
r9 = residual_block(r8, 512, False, "rcnn", tr_stat = _rcnn_stat, bn_tr_stat = _tr_bn, add_l2_stat = _add_l2, branch = "far")
gp = tf.reduce_mean(r9, [1,2])
with tf.name_scope("fc"):
W_end_cls = weight_variable([2048, 21], "fc", tr_stat = _fc_stat, add_l2_stat = _add_l2)
b_end_cls = bias_variable([21], "fc", tr_stat = _fc_stat, add_l2_stat = _add_l2)
end_cls = tf.matmul(gp, W_end_cls) + b_end_cls + 1e-20
W_end_bbox = weight_variable([2048, 84], "fc", tr_stat = _fc_stat, add_l2_stat = _add_l2)
b_end_bbox = bias_variable([84], "fc", tr_stat = _fc_stat, add_l2_stat = _add_l2)
end_bbox = tf.matmul(gp, W_end_bbox) + b_end_bbox + 1e-20
#END_LOSS
end_cls_soft = tf.nn.softmax(end_cls)
loss_cls = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(end_cls, rcnn_labels_ind))
loss_cls_label = loss_cls
pred = tf.argmax(end_cls_soft, 1)
end_correct_prediction = tf.equal(pred, rcnn_labels_ind)
end_cls_accuracy = tf.reduce_mean(tf.cast(end_correct_prediction, tf.float32))
sigma2 = s2 * s2
smoothL1_sign_bbox = tf.cast(tf.less(tf.abs(tf.sub(end_bbox, rcnn_bbox_targets)),1/sigma2),tf.float32)
loss_bbox = tf.mul(tf.reduce_mean(tf.reduce_sum(tf.mul(rcnn_bbox_outside_w,tf.add( \
tf.mul(tf.mul(tf.pow(tf.mul(rcnn_bbox_inside_w, tf.sub(end_bbox, rcnn_bbox_targets))*1,2),0.5*sigma2), smoothL1_sign_bbox), \
tf.mul(tf.sub(tf.abs(tf.sub(end_bbox, rcnn_bbox_targets)),0.5/sigma2),tf.abs(smoothL1_sign_bbox-1)))), reduction_indices=[1])),1)
#loss1 = rpn_loss_cls + rpn_loss_bbox #+ (tf.add_n(tf.get_collection('weight_losses_trunkbase')) + tf.add_n(tf.get_collection('weight_losses_rpn')))/256
#loss2 = loss_cls + loss_bbox #+ ((tf.add_n(tf.get_collection('weight_losses_trunk')) + tf.add_n(tf.get_collection('weight_losses_rcnn')))/rpn_batch_size)
#loss3 = rpn_loss_cls + rpn_loss_bbox #+ tf.add_n(tf.get_collection('weight_losses_rpn'))/256
#loss4 = loss_cls + loss_bbox #+ tf.add_n(tf.get_collection('weight_losses_rcnn'))/rpn_batch_size
total_loss = rpn_loss_cls + rpn_loss_bbox + loss_cls + loss_bbox + tf.add_n(tf.get_collection('weight_losses_trunk')) + tf.add_n(tf.get_collection('weight_losses_rpn')) +\
tf.add_n(tf.get_collection('weight_losses_rcnn')) + tf.add_n(tf.get_collection('weight_losses_fc'))
#VARIABLES, OPTIMIZERS, AND LOSSES
trunk_weights = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "trunk") if "weights" in v.name]
trunk_biases = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "trunk") if "biases" in v.name]
rpn_weights = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "rpn") if "weights" in v.name]
rpn_biases = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "rpn") if "biases" in v.name]
rcnn_weights = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "rcnn") if "weights" in v.name]
rcnn_biases = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "rcnn") if "biases" in v.name]
fc_weights = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "fc") if "weights" in v.name]
fc_biases = [v for v in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "fc") if "biases" in v.name]
lr = tf.placeholder(tf.float32)
var_list1_w = rcnn_weights + fc_weights + trunk_weights + rpn_weights
var_list1_b = rcnn_biases + fc_biases + trunk_biases + rpn_biases
opt1_w = tf.train.MomentumOptimizer(lr, momentum=0.9, use_nesterov=True)
opt1_b = tf.train.MomentumOptimizer(lr*2, momentum=0.9, use_nesterov=True)
grads1 = tf.gradients(total_loss, var_list1_w + var_list1_b)
grads1_w = grads1[:len(var_list1_w)]
grads1_b = grads1[len(var_list1_w):]
train_op1_w = opt1_w.apply_gradients(zip(grads1_w, var_list1_w))
train_op1_b = opt1_b.apply_gradients(zip(grads1_b, var_list1_b))
batchnorm_updates = tf.get_collection('update_ops')
batchnorm_updates_op = tf.group(*batchnorm_updates)
first_train_op = tf.group(train_op1_w, train_op1_b, batchnorm_updates_op)
anchor_fraction = one_count / (zero_count + one_count)
#TRAINING
trunk_vars = [v for v in tf.all_variables() if np.logical_and("trunk" in v.name, "Momentum" not in v.name)]
rpn_vars = [v for v in tf.all_variables() if np.logical_and("rpn" in v.name, "Momentum" not in v.name)]
rcnn_vars = [v for v in tf.all_variables() if np.logical_and("rcnn" in v.name, "Momentum" not in v.name)]
saver_all_trunkrcnn = tf.train.Saver(trunk_vars + rcnn_vars)
saver_all = tf.train.Saver()
run_size = len(image_files)
accu_rpn_losscls = 0
accu_rpn_lossbbox = 0
accu_rcnn_losscls = 0
accu_rcnn_lossbbox = 0
accu_rpn_accuracy = 0
accu_rcnn_accuracy = 0
accu_anchor_fraction = 0
accu_anchor_count = 0
epoch = 40
init = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init, feed_dict = {gate : [0.0]})
#saver_all_trunkrcnn.restore(sess, "./imagenet_resnet.ckpt")
#saver_all_trunkrcnn.restore(sess, "./caltech_160k.ckpt")
#saver_all.restore(sess,"./z7_300k.ckpt")
saver_all.restore(sess, "./z7_end_to_end_girschick.ckpt")
for x in range(epoch):
rand_id = np.random.permutation(np.arange(run_size))
for i in range(run_size):
if i % 10 == 0:
print "epoch: " + str(x) + " iter: " + str(x*run_size+i)
#PROCESS IMAGE AND LABELS
img_train = cv2.imread(image_files[rand_id[i]])
img_train = cv2.resize(img_train, (1000, 600))
lb_train = np.array(list(csv.reader(open(gt_files[rand_id[i]],"rb"),delimiter=','))).astype('int')
gt_train = np.zeros((lb_train.shape[0], 5),dtype=np.int64)
gt_train[:,4] = lb_train[:,0]
gt_train[:,:4] = lb_train[:,1:]
img_train, gt_train = | |
# -*- coding: utf-8 -*-
# Standard library imports
from shutil import which
from ctypes import WINFUNCTYPE, windll
from ctypes.wintypes import BOOL, DWORD
import codecs
import os
import re
import shlex
import signal
import socket
import subprocess
import threading
import time
from shutil import which
# Local imports
from .winpty import PTY
class PtyProcess(object):
"""This class represents a process running in a pseudoterminal.
The main constructor is the :meth:`spawn` classmethod.
"""
def __init__(self, pty, emit_cursors=True):
assert isinstance(pty, PTY)
self.pty = pty
self.pid = pty.pid
# self.fd = pty.fd
self.read_blocking = bool(int(os.environ.get('PYWINPTY_BLOCK', 1)))
self.closed = False
self.flag_eof = False
# Used by terminate() to give kernel time to update process status.
# Time in seconds.
self.delayafterterminate = 0.1
# Used by close() to give kernel time to update process status.
# Time in seconds.
self.delayafterclose = 0.1
# Set up our file reader sockets.
self._server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._server.bind(("127.0.0.1", 0))
address = self._server.getsockname()
self._server.listen(1)
# Read from the pty in a thread.
self._thread = threading.Thread(target=_read_in_thread,
args=(address, self.pty, emit_cursors))
self._thread.setDaemon(True)
self._thread.start()
self.fileobj, _ = self._server.accept()
self.fd = self.fileobj.fileno()
self._echo_buf = []
@classmethod
def spawn(cls, argv, cwd=None, env=None, dimensions=(24, 80),
emit_cursors=True):
"""Start the given command in a child process in a pseudo terminal.
This does all the setting up the pty, and returns an instance of
PtyProcess.
Dimensions of the psuedoterminal used for the subprocess can be
specified as a tuple (rows, cols), or the default (24, 80) will be
used.
"""
if isinstance(argv, str):
argv = shlex.split(argv, posix=False)
if not isinstance(argv, (list, tuple)):
raise TypeError("Expected a list or tuple for argv, got %r" % argv)
# Shallow copy of argv so we can modify it
argv = argv[:]
command = argv[0]
env = env or os.environ
path = env.get('PATH', os.defpath)
command_with_path = which(command, path=path)
if command_with_path is None:
raise FileNotFoundError(
'The command was not found or was not ' +
'executable: %s.' % command
)
command = command_with_path
argv[0] = command
cmdline = ' ' + subprocess.list2cmdline(argv[1:])
cwd = cwd or os.getcwd()
backend = backend or os.environ.get('PYWINPTY_BACKEND', None)
backend = int(backend) if backend is not None else backend
proc = PTY(dimensions[1], dimensions[0],
backend=backend)
# Create the environemnt string.
envStrs = []
for (key, value) in env.items():
envStrs.append('%s=%s' % (key, value))
env = '\0'.join(envStrs) + '\0'
# command = bytes(command, encoding)
# cwd = bytes(cwd, encoding)
# cmdline = bytes(cmdline, encoding)
# env = bytes(env, encoding)
if len(argv) == 1:
proc.spawn(command, cwd=cwd, env=env)
else:
proc.spawn(command, cwd=cwd, env=env, cmdline=cmdline)
inst = cls(proc, emit_cursors)
inst._winsize = dimensions
# Set some informational attributes
inst.argv = argv
if env is not None:
inst.env = env
if cwd is not None:
inst.launch_dir = cwd
return inst
@property
def exitstatus(self):
"""The exit status of the process.
"""
return self.pty.get_exitstatus()
def fileno(self):
"""This returns the file descriptor of the pty for the child.
"""
return self.fd
def close(self, force=False):
"""This closes the connection with the child application. Note that
calling close() more than once is valid. This emulates standard Python
behavior with files. Set force to True if you want to make sure that
the child is terminated (SIGKILL is sent if the child ignores
SIGINT)."""
if not self.closed:
self.fileobj.close()
self._server.close()
# Give kernel time to update process status.
time.sleep(self.delayafterclose)
if self.isalive():
if not self.terminate(force):
raise IOError('Could not terminate the child.')
self.fd = -1
self.closed = True
def __del__(self):
"""This makes sure that no system resources are left open. Python only
garbage collects Python objects. OS file descriptors are not Python
objects, so they must be handled explicitly. If the child file
descriptor was opened outside of this class (passed to the constructor)
then this does not close it.
"""
# It is possible for __del__ methods to execute during the
# teardown of the Python VM itself. Thus self.close() may
# trigger an exception because os.close may be None.
try:
self.close()
except Exception:
pass
def flush(self):
"""This does nothing. It is here to support the interface for a
File-like object. """
pass
def isatty(self):
"""This returns True if the file descriptor is open and connected to a
tty(-like) device, else False."""
return self.isalive()
def read(self, size=1024):
"""Read and return at most ``size`` characters from the pty.
Can block if there is nothing to read. Raises :exc:`EOFError` if the
terminal was closed.
"""
if self.flag_eof:
raise EOFError('Pty is closed')
with _allow_interrupt(self.close):
data = self.fileobj.recv(size)
if not data:
self.flag_eof = True
raise EOFError('Pty is closed')
if data == b'0011Ignore':
data = ''
err = True
while err and data:
try:
data.decode('utf-8')
err = False
except UnicodeDecodeError:
data += self.fileobj.recv(1)
return data.decode('utf-8')
def readline(self):
"""Read one line from the pseudoterminal as bytes.
Can block if there is nothing to read. Raises :exc:`EOFError` if the
terminal was closed.
"""
if self.flag_eof:
raise EOFError('Pty is closed')
buf = []
while 1:
try:
ch = self.read(1)
except EOFError:
return ''.join(buf)
buf.append(ch)
if ch == '\n':
return ''.join(buf)
def write(self, s):
"""Write the string ``s`` to the pseudoterminal.
Returns the number of bytes written.
"""
if not self.pty.isalive():
raise EOFError('Pty is closed')
success, nbytes = self.pty.write(s)
if self.echo:
self._echo_buf += [s]
if not success:
raise IOError('Write failed')
return nbytes
def terminate(self, force=False):
"""This forces a child process to terminate."""
if not self.isalive():
return True
self.kill(signal.SIGINT)
time.sleep(self.delayafterterminate)
if not self.isalive():
return True
if force:
self.kill(signal.SIGTERM)
time.sleep(self.delayafterterminate)
if not self.isalive():
return True
else:
return False
def wait(self):
"""This waits until the child exits. This is a blocking call. This will
not read any data from the child.
"""
while self.isalive():
time.sleep(0.1)
return self.exitstatus
def isalive(self):
"""This tests if the child process is running or not. This is
non-blocking. If the child was terminated then this will read the
exitstatus or signalstatus of the child. This returns True if the child
process appears to be running or False if not.
"""
alive = self.pty.isalive()
self.closed = not alive
return alive
def kill(self, sig=None):
"""Kill the process with the given signal.
"""
os.kill(self.pid, sig)
def sendcontrol(self, char):
'''Helper method that wraps send() with mnemonic access for sending control
character to the child (such as Ctrl-C or Ctrl-D). For example, to send
Ctrl-G (ASCII 7, bell, '\a')::
child.sendcontrol('g')
See also, sendintr() and sendeof().
'''
char = char.lower()
a = ord(char)
if 97 <= a <= 122:
a = a - ord('a') + 1
byte = str(bytes([a]))
return self.pty.write(byte), byte
d = {'@': 0, '`': 0,
'[': 27, '{': 27,
'\\': 28, '|': 28,
']': 29, '}': 29,
'^': 30, '~': 30,
'_': 31,
'?': 127}
if char not in d:
return 0, ''
byte = str(bytes([d[char]]))
return self.pty.write(byte), byte
def sendeof(self):
"""This sends an EOF to the child. This sends a character which causes
the pending parent output buffer to be sent to the waiting child
program without waiting for end-of-line. If it is the first character
of the line, the read() in the user program returns 0, which signifies
end-of-file. This means to work as expected a sendeof() has to be
called at the beginning of a line. This method does not send a newline.
It is the responsibility of the caller to ensure the eof is sent at the
beginning of a line."""
# Send control character 4 (Ctrl-D)
return self.pty.write('\x04'), '\x04'
def sendintr(self):
"""This sends a SIGINT to the child. It does not require
the SIGINT to be the first character on a line. """
# Send control character 3 (Ctrl-C)
return self.pty.write('\x03'), '\x03'
def eof(self):
"""This returns True if the EOF exception was ever raised.
"""
return self.flag_eof
def getwinsize(self):
"""Return the window size of the pseudoterminal as a tuple (rows, cols).
"""
return self._winsize
def setwinsize(self, rows, cols):
"""Set the terminal window size of the child tty.
"""
self._winsize = (rows, cols)
self.pty.set_size(cols, rows)
def _read_in_thread(address, pty, emit_cursors):
"""Read data from the pty in a thread.
"""
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client.connect(address)
buf = b''
cursors = [b'\x1b[0K', b'\x1b[5G', b'\x1b[?25h', b'\x1b[?25l']
matcher = re.compile(rb'(\x1b\[?\??(\d+)?)\Z', | |
from misc import ln, softmax
import numpy as np
import scipy.special as scs
from misc import D_KL_nd_dirichlet, D_KL_dirichlet_categorical
class HierarchicalPerception(object):
def __init__(self,
generative_model_observations,
generative_model_states,
generative_model_rewards,
transition_matrix_context,
prior_states,
prior_rewards,
prior_policies,
dirichlet_pol_params = None,
dirichlet_rew_params = None,
generative_model_context = None,
T=5, pol_lambda=0, r_lambda=0, non_decaying=0, dec_temp=1.):
self.generative_model_observations = generative_model_observations.copy()
if len(generative_model_states.shape) <= 3:
self.generative_model_states = generative_model_states.copy()[:,:,:,None]
else:
self.generative_model_states = generative_model_states.copy()
self.generative_model_rewards = generative_model_rewards.copy()
self.transition_matrix_context = transition_matrix_context.copy()
self.prior_rewards = prior_rewards.copy()
self.prior_states = prior_states.copy()
self.prior_policies = prior_policies.copy()
self.npi = prior_policies.shape[0]
self.T = T
self.nh = prior_states.shape[0]
self.pol_lambda = pol_lambda
self.r_lambda = r_lambda
self.non_decaying = non_decaying
self.dec_temp = dec_temp
if len(generative_model_rewards.shape) > 2:
self.infer_context = True
self.nc = generative_model_rewards.shape[2]
else:
self.nc = 1
self.generative_model_rewards = self.generative_model_rewards[:,:,np.newaxis]
if dirichlet_pol_params is not None:
self.dirichlet_pol_params = dirichlet_pol_params.copy()
if dirichlet_rew_params is not None:
self.dirichlet_rew_params = dirichlet_rew_params.copy()
if generative_model_context is not None:
self.generative_model_context = generative_model_context.copy()
for c in range(self.nc):
for state in range(self.nh):
self.generative_model_rewards[:,state,c] = self.dirichlet_rew_params[:,state,c] / self.dirichlet_rew_params[:,state,c].sum()
# self.generative_model_rewards[:,state,c] =\
# np.exp(scs.digamma(self.dirichlet_rew_params[:,state,c])\
# -scs.digamma(self.dirichlet_rew_params[:,state,c].sum()))
# self.generative_model_rewards[:,state,c] /= self.generative_model_rewards[:,state,c].sum()
def reset(self, params, fixed):
alphas = np.zeros((self.npi, self.nc)) + params
self.generative_model_rewards[:] = fixed['rew_mod'].copy()
self.dirichlet_rew_params[:] = fixed['beta_rew'].copy()
self.prior_policies[:] = alphas / alphas.sum(axis=0)[None,:]
self.dirichlet_pol_params = alphas
def instantiate_messages(self, policies):
self.bwd_messages = np.zeros((self.nh, self.T, self.npi, self.nc))
self.bwd_messages[:,-1,:, :] = 1./self.nh
self.fwd_messages = np.zeros((self.nh, self.T, self.npi, self.nc))
self.fwd_messages[:, 0, :, :] = self.prior_states[:, np.newaxis, np.newaxis]
self.fwd_norms = np.zeros((self.T+1, self.npi, self.nc))
self.fwd_norms[0,:,:] = 1.
self.obs_messages = np.zeros((self.nh, self.T, self.nc)) + 1/self.nh#self.prior_observations.dot(self.generative_model_observations)
#self.obs_messages = np.tile(self.obs_messages,(self.T,1)).T
self.rew_messages = np.zeros((self.nh, self.T, self.nc))
#self.rew_messages[:] = np.tile(self.prior_rewards.dot(self.generative_model_rewards),(self.T,1)).T
for c in range(self.nc):
self.rew_messages[:,:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
for pi, cstates in enumerate(policies):
for t, u in enumerate(np.flip(cstates, axis = 0)):
tp = self.T - 2 - t
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp+1,pi,c]*\
self.obs_messages[:, tp+1,c]*\
self.rew_messages[:, tp+1,c]
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp,pi,c]\
.dot(self.generative_model_states[:,:,u,c])
self.bwd_messages[:,tp, pi,c] /= self.bwd_messages[:,tp,pi,c].sum()
def update_messages(self, t, pi, cs, c=0):
if t > 0:
for i, u in enumerate(np.flip(cs[:t], axis = 0)):
self.bwd_messages[:,t-1-i,pi,c] = self.bwd_messages[:,t-i,pi,c]*\
self.obs_messages[:,t-i,c]*\
self.rew_messages[:, t-i,c]
self.bwd_messages[:,t-1-i,pi,c] = self.bwd_messages[:,t-1-i,pi,c]\
.dot(self.generative_model_states[:,:,u,c])
norm = self.bwd_messages[:,t-1-i,pi,c].sum()
if norm > 0:
self.bwd_messages[:,t-1-i, pi,c] /= norm
if len(cs[t:]) > 0:
for i, u in enumerate(cs[t:]):
self.fwd_messages[:, t+1+i, pi,c] = self.fwd_messages[:,t+i, pi,c]*\
self.obs_messages[:, t+i,c]*\
self.rew_messages[:, t+i,c]
self.fwd_messages[:, t+1+i, pi,c] = \
self.generative_model_states[:,:,u,c].\
dot(self.fwd_messages[:, t+1+i, pi,c])
self.fwd_norms[t+1+i,pi,c] = self.fwd_messages[:,t+1+i,pi,c].sum()
if self.fwd_norms[t+1+i, pi,c] > 0: #???? Shouldn't this not happen?
self.fwd_messages[:,t+1+i, pi,c] /= self.fwd_norms[t+1+i,pi,c]
def reset_preferences(self, t, new_preference, policies):
self.prior_rewards = new_preference.copy()
for c in range(self.nc):
self.rew_messages[:,:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
for pi, cstates in enumerate(policies[t:]):
for i, u in enumerate(np.flip(cstates, axis = 0)):
tp = self.T - 2 - i
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp+1,pi,c]*\
self.obs_messages[:, tp+1,c]*\
self.rew_messages[:, tp+1,c]
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp,pi,c]\
.dot(self.generative_model_states[:,:,u,c])
self.bwd_messages[:,tp, pi,c] /= self.bwd_messages[:,tp,pi,c].sum()
def update_beliefs_states(self, tau, t, observation, reward, policies, possible_policies):
#estimate expected state distribution
if t == 0:
self.instantiate_messages(policies)
self.obs_messages[:,t,:] = self.generative_model_observations[observation][:,np.newaxis]
self.rew_messages[:,t,:] = self.generative_model_rewards[reward]
for c in range(self.nc):
for pi, cs in enumerate(policies):
if self.prior_policies[pi,c] > 1e-15 and pi in possible_policies:
self.update_messages(t, pi, cs, c)
else:
self.fwd_messages[:,:,pi,c] = 0#1./self.nh
#estimate posterior state distribution
posterior = self.fwd_messages*self.bwd_messages*self.obs_messages[:,:,np.newaxis,:]*self.rew_messages[:,:,np.newaxis,:]
norm = posterior.sum(axis = 0)
self.fwd_norms[-1] = norm[-1]
non_zero = norm > 0
posterior[:,non_zero] /= norm[non_zero]
return np.nan_to_num(posterior)
def update_beliefs_policies(self, tau, t):
#print((prior_policies>1e-4).sum())
likelihood = self.fwd_norms.prod(axis=0)
posterior = np.power(likelihood, self.dec_temp) * self.prior_policies
likelihood /= likelihood.sum(axis=0)[np.newaxis,:]
posterior/= posterior.sum(axis=0)[np.newaxis,:]
posterior = np.nan_to_num(posterior)
#posterior = softmax(ln(self.fwd_norms).sum(axis = 0)+ln(self.prior_policies))
#np.testing.assert_allclose(post, posterior)
return posterior, likelihood
def update_beliefs_context(self, tau, t, reward, posterior_states, posterior_policies, prior_context, policies, context=None):
post_policies = (prior_context[np.newaxis,:] * posterior_policies).sum(axis=1)
beta = self.dirichlet_rew_params.copy()
states = (posterior_states[:,t,:] * post_policies[np.newaxis,:,np.newaxis]).sum(axis=1)
beta_prime = self.dirichlet_rew_params.copy()
beta_prime[reward] = beta[reward] + states
# for c in range(self.nc):
# for state in range(self.nh):
# self.generative_model_rewards[:,state,c] =\
# np.exp(scs.digamma(beta_prime[:,state,c])\
# -scs.digamma(beta_prime[:,state,c].sum()))
# self.generative_model_rewards[:,state,c] /= self.generative_model_rewards[:,state,c].sum()
#
# self.rew_messages[:,t+1:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
#
# for c in range(self.nc):
# for pi, cs in enumerate(policies):
# if self.prior_policies[pi,c] > 1e-15:
# self.update_messages(t, pi, cs, c)
# else:
# self.fwd_messages[:,:,pi,c] = 1./self.nh #0
alpha = self.dirichlet_pol_params.copy()
if t == self.T-1:
chosen_pol = np.argmax(post_policies)
inf_context = np.argmax(prior_context)
alpha_prime = self.dirichlet_pol_params.copy()
alpha_prime[chosen_pol,:] += prior_context
#alpha_prime[chosen_pol,inf_context] = self.dirichlet_pol_params[chosen_pol,inf_context] + 1
else:
alpha_prime = alpha
if self.nc == 1:
posterior = np.ones(1)
else:
# todo: recalc
#outcome_surprise = ((states * prior_context[np.newaxis,:]).sum(axis=1)[:,np.newaxis] * (scs.digamma(beta_prime[reward]) - scs.digamma(beta_prime.sum(axis=0)))).sum(axis=0)
if t>0:
outcome_surprise = (posterior_policies * ln(self.fwd_norms.prod(axis=0))).sum(axis=0)
entropy = - (posterior_policies * ln(posterior_policies)).sum(axis=0)
#policy_surprise = (post_policies[:,np.newaxis] * scs.digamma(alpha_prime)).sum(axis=0) - scs.digamma(alpha_prime.sum(axis=0))
policy_surprise = (posterior_policies * scs.digamma(alpha_prime)).sum(axis=0) - scs.digamma(alpha_prime.sum(axis=0))
else:
outcome_surprise = 0
entropy = 0
policy_surprise = 0
if context is not None:
context_obs_suprise = ln(self.generative_model_context[context]+1e-10)
else:
context_obs_suprise = 0
posterior = outcome_surprise + policy_surprise + entropy + context_obs_suprise
#+ np.nan_to_num((posterior_policies * ln(self.fwd_norms).sum(axis = 0))).sum(axis=0)#\
# if tau in range(90,120) and t == 1:
# #print(tau, np.exp(outcome_surprise), np.exp(policy_surprise))
# print(tau, np.exp(outcome_surprise[1])/np.exp(outcome_surprise[0]), np.exp(policy_surprise[1])/np.exp(policy_surprise[0]))
posterior = np.nan_to_num(softmax(posterior+ln(prior_context)))
return posterior
def update_beliefs_dirichlet_pol_params(self, tau, t, posterior_policies, posterior_context = [1]):
assert(t == self.T-1)
chosen_pol = np.argmax(posterior_policies, axis=0)
# self.dirichlet_pol_params[chosen_pol,:] += posterior_context.sum(axis=0)/posterior_context.sum()
self.dirichlet_pol_params = (1-self.pol_lambda) * self.dirichlet_pol_params + 1 - (1-self.pol_lambda)
self.dirichlet_pol_params[chosen_pol,:] += posterior_context
self.prior_policies[:] = self.dirichlet_pol_params.copy() #np.exp(scs.digamma(self.dirichlet_pol_params) - scs.digamma(self.dirichlet_pol_params.sum(axis=0))[np.newaxis,:])
self.prior_policies /= self.prior_policies.sum(axis=0)[np.newaxis,:]
return self.dirichlet_pol_params, self.prior_policies
def update_beliefs_dirichlet_rew_params(self, tau, t, reward, posterior_states, posterior_policies, posterior_context = [1]):
states = (posterior_states[:,t,:,:] * posterior_policies[np.newaxis,:,:]).sum(axis=1)
old = self.dirichlet_rew_params.copy()
# c = np.argmax(posterior_context)
# self.dirichlet_rew_params[reward,:,c] += states[:,c]
self.dirichlet_rew_params[:,self.non_decaying:,:] = (1-self.r_lambda) * self.dirichlet_rew_params[:,self.non_decaying:,:] +1 - (1-self.r_lambda)
self.dirichlet_rew_params[reward,:,:] += states * posterior_context[np.newaxis,:]
for c in range(self.nc):
for state in range(self.nh):
#self.generative_model_rewards[:,state,c] = self.dirichlet_rew_params[:,state,c] / self.dirichlet_rew_params[:,state,c].sum()
self.generative_model_rewards[:,state,c] =\
np.exp(scs.digamma(self.dirichlet_rew_params[:,state,c])\
-scs.digamma(self.dirichlet_rew_params[:,state,c].sum()))
self.generative_model_rewards[:,state,c] /= self.generative_model_rewards[:,state,c].sum()
self.rew_messages[:,t+1:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
# for c in range(self.nc):
# for pi, cs in enumerate(policies):
# if self.prior_policies[pi,c] > 1e-15:
# self.update_messages(t, pi, cs, c)
# else:
# self.fwd_messages[:,:,pi,c] = 1./self.nh #0
return self.dirichlet_rew_params
class TwoStepPerception(object):
def __init__(self,
generative_model_observations,
generative_model_states,
generative_model_rewards,
transition_matrix_context,
prior_states,
prior_rewards,
prior_policies,
dirichlet_pol_params = None,
dirichlet_rew_params = None,
T=5):
self.generative_model_observations = generative_model_observations.copy()
self.generative_model_states = generative_model_states.copy()
self.generative_model_rewards = generative_model_rewards.copy()
self.transition_matrix_context = transition_matrix_context.copy()
self.prior_rewards = prior_rewards.copy()
self.prior_states = prior_states.copy()
self.prior_policies = prior_policies.copy()
self.T = T
self.nh = prior_states.shape[0]
if len(generative_model_rewards.shape) > 2:
self.infer_context = True
self.nc = generative_model_rewards.shape[2]
else:
self.nc = 1
self.generative_model_rewards = self.generative_model_rewards[:,:,np.newaxis]
if dirichlet_pol_params is not None:
self.dirichlet_pol_params = dirichlet_pol_params.copy()
if dirichlet_rew_params is not None:
self.dirichlet_rew_params = dirichlet_rew_params.copy()
for c in range(self.nc):
for state in range(self.nh):
self.generative_model_rewards[:,state,c] =\
np.exp(scs.digamma(self.dirichlet_rew_params[:,state,c])\
-scs.digamma(self.dirichlet_rew_params[:,state,c].sum()))
self.generative_model_rewards[:,state,c] /= self.generative_model_rewards[:,state,c].sum()
def instantiate_messages(self, policies):
npi = policies.shape[0]
self.bwd_messages = np.zeros((self.nh, self.T, npi, self.nc))
self.bwd_messages[:,-1,:, :] = 1./self.nh
self.fwd_messages = np.zeros((self.nh, self.T, npi, self.nc))
self.fwd_messages[:, 0, :, :] = self.prior_states[:, np.newaxis, np.newaxis]
self.fwd_norms = np.zeros((self.T+1, npi, self.nc))
self.fwd_norms[0,:,:] = 1.
self.obs_messages = np.zeros((self.nh, self.T, self.nc)) + 1/self.nh#self.prior_observations.dot(self.generative_model_observations)
#self.obs_messages = np.tile(self.obs_messages,(self.T,1)).T
self.rew_messages = np.zeros((self.nh, self.T, self.nc))
#self.rew_messages[:] = np.tile(self.prior_rewards.dot(self.generative_model_rewards),(self.T,1)).T
for c in range(self.nc):
self.rew_messages[:,:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
for pi, cstates in enumerate(policies):
for t, u in enumerate(np.flip(cstates, axis = 0)):
tp = self.T - 2 - t
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp+1,pi,c]*\
self.obs_messages[:, tp+1,c]*\
self.rew_messages[:, tp+1,c]
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp,pi,c]\
.dot(self.generative_model_states[:,:,u])
self.bwd_messages[:,tp, pi,c] /= self.bwd_messages[:,tp,pi,c].sum()
def update_messages(self, t, pi, cs, c=0):
if t > 0:
for i, u in enumerate(np.flip(cs[:t], axis = 0)):
self.bwd_messages[:,t-1-i,pi,c] = self.bwd_messages[:,t-i,pi,c]*\
self.obs_messages[:,t-i,c]*\
self.rew_messages[:, t-i,c]
self.bwd_messages[:,t-1-i,pi,c] = self.bwd_messages[:,t-1-i,pi,c]\
.dot(self.generative_model_states[:,:,u])
norm = self.bwd_messages[:,t-1-i,pi,c].sum()
if norm > 0:
self.bwd_messages[:,t-1-i, pi,c] /= norm
if len(cs[t:]) > 0:
for i, u in enumerate(cs[t:]):
self.fwd_messages[:, t+1+i, pi,c] = self.fwd_messages[:,t+i, pi,c]*\
self.obs_messages[:, t+i,c]*\
self.rew_messages[:, t+i,c]
self.fwd_messages[:, t+1+i, pi,c] = \
self.generative_model_states[:,:,u].\
dot(self.fwd_messages[:, t+1+i, pi,c])
self.fwd_norms[t+1+i,pi,c] = self.fwd_messages[:,t+1+i,pi,c].sum()
if self.fwd_norms[t+1+i, pi,c] > 0: #???? Shouldn't this not happen?
self.fwd_messages[:,t+1+i, pi,c] /= self.fwd_norms[t+1+i,pi,c]
def reset_preferences(self, t, new_preference, policies):
self.prior_rewards = new_preference.copy()
for c in range(self.nc):
self.rew_messages[:,:,c] = self.prior_rewards.dot(self.generative_model_rewards[:,:,c])[:,np.newaxis]
for pi, cstates in enumerate(policies[t:]):
for i, u in enumerate(np.flip(cstates, axis = 0)):
tp = self.T - 2 - i
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp+1,pi,c]*\
self.obs_messages[:, tp+1,c]*\
self.rew_messages[:, tp+1,c]
self.bwd_messages[:,tp,pi,c] = self.bwd_messages[:,tp,pi,c]\
.dot(self.generative_model_states[:,:,u])
self.bwd_messages[:,tp, pi,c] /= self.bwd_messages[:,tp,pi,c].sum()
def update_beliefs_states(self, tau, t, observation, reward, policies, possible_policies):
#estimate expected state distribution
if t == 0:
self.instantiate_messages(policies)
self.obs_messages[:,t,:] = self.generative_model_observations[observation][:,np.newaxis]
self.rew_messages[:,t,:] = self.generative_model_rewards[reward]
for c in range(self.nc):
for pi, cs in enumerate(policies):
if self.prior_policies[pi,c] > 1e-15 and pi in possible_policies:
self.update_messages(t, pi, cs, c)
else:
self.fwd_messages[:,:,pi,c] = 0#1./self.nh #0
#estimate posterior state distribution
posterior = self.fwd_messages*self.bwd_messages*self.obs_messages[:,:,np.newaxis,:]*self.rew_messages[:,:,np.newaxis,:]
norm = posterior.sum(axis = 0)
self.fwd_norms[-1] = norm[-1]
posterior /= norm
return np.nan_to_num(posterior)
def update_beliefs_policies(self, tau, t, gamma=4):
#print((prior_policies>1e-4).sum())
likelihood = self.fwd_norms.prod(axis=0)
posterior = np.power(likelihood,gamma) * self.prior_policies
posterior/= posterior.sum(axis=0)[np.newaxis,:]
#posterior = softmax(ln(self.fwd_norms).sum(axis = 0)+ln(self.prior_policies))
#np.testing.assert_allclose(post, posterior)
return posterior, likelihood
def update_beliefs_context(self, tau, t, reward, posterior_states, posterior_policies, prior_context, policies):
post_policies = (prior_context[np.newaxis,:] * posterior_policies).sum(axis=1)
beta = self.dirichlet_rew_params.copy()
states | |
<gh_stars>0
#Es.1 Dato un array contenente solo 0 e 1 fare un programma che lo ordina in tempo lineare senza usar array o liste di appoggio
def ordina(A):
v0=0
for i in range(len(A)):
if A[i]==0:
v0=(v0+1)
print("v0: " + str(v0))
for j in range(0,v0):
A[j]=0
start = (v0)
stop = (len(A))
for x in range(start,stop):
A[x]=1
return A
vet = [0,1,0,1,0,1,1,1,0,0,0,1,0,1]
print(ordina(vet))
#Es.2 Scrivere un programma che dato un array A contenente solo i valori -1 35 e 27685, lo ordina in tempo lineare senza usare array o liste di appoggio
def ordina2(A):
v1=0
v2=0
for i in range(len(A)):
if A[i]==-1:
v1=v1+1
if A[i]==35:
v2=v2+1
for j in range(v1):
A[j]=-1
for j in range(v1,(v1+v2)):
A[j]=35
for j in range((v2+v1),len(A)):
A[j]=27685
return A
vet = [35,35,-1,27685,35,27685,-1,-1,-1,27685,35,-1,35,35]
print(ordina2(vet))
#Es.3 Dare un programma che dato un array A contenente interi compresi tra 0 e m-1 con m<len(A) lo ordina in tempo lineare
def scambia(v,i,j):
if i!=j:
v[i],v[j]=v[j],v[i]
def quicksort(v,l,r):#media n*log(n) peggiore n^2
if l>=r:
return
i=l
j=r
x=v[(l+r)//2]#Elemento (perno) casuale scelto sempre nel mezzo dal prof
while i<j:
while x>v[i]:
i=i+1
while v[j]>x:
j=j-1
if i<=j:
scambia(v,i,j)
i=i+1
j=j-1
if l<j:
quicksort(v,l,j)
if i<r:
quicksort(v,i,r)
#Quando esce dal ciclo il progrmma termina
def ordina3(A,m):
quicksort(A,0,(len(A)-1)) #Col quicksort non è in tempo lineare ma O(n*log(n)) PERò non è la complessità il tempo lineare quindi va bene.
return A
#Si possono usare array o liste di appoggio conviene dunque usare un algoritmo di ordinamento crescente e quindi il quicksort è il migliore per ora
def ordina3_2(A,m):
v=[0]*m #Si instanzia un vettore di 0 lungo quanto il massimo degli elementi dentro al vettore A (m-1), che guarda caso equivale alla lunghezza di A.
for a in A:#Si scorre A è ogni volta che ad esempio compare un 5 si va in posizione 5 del vettore v e si mette un +1 così facendo si sa quante occorrenze di numeri ci sono in A.
v[a]=v[a]+1 #v è un vettore di zeri
#praticamente usando il valore in a come indirizzo se si presenta un altro valore simile lo stesso indirizzo viene incrementato di uno, quindi così si sa quanti di quel numero ci sono tanto m<len(A)
k=0 #A questo punto basta fare 2 cicli e si riempe il vettore delle varie occorrenze che ci sono in A scorrendo per m e incrementando k per quanti v[i] con quel numero ci sono.
for i in range(m):# in questo caso for da 0 a 10
for j in range(v[i]):# per il primo ciclo e un for di 1 perchè v[0]=1 c'è un solo 0 nel vettore ma per il settimo v[7]=2 c'è 2 sette nel vettore
A[k]=i #Poi ad A[k] con k che parte da 0 gli assegno i che sono i valori dell'array e lo ordino in tempo lineare
k=k+1 #k è l'indice di A
return A
vet=[0,3,6,2,7,5,9,4,1,8,7]#vet viene ridefinito
print(ordina3(vet,10))
print(ordina3_2(vet,10))
#Es.4 Dare un programma che dato un array A di numeri restituisce la somma di quelli di posto pari
def sommaPostoPari(A):
s=0
for i in range(0,len(A)):
if i%2==0:
s=s+A[i]
return s
def sommaPostoPari2(A):
s=0
for i in range(0,len(A),2):
s=s+A[i]
return s
print("sommaPari1: " + str(sommaPostoPari(vet)))
print("sommaPari2: " + str(sommaPostoPari2(vet)))
#Es.5 Dare un programma che dato un array A di numeri interi restituisce la somma di quelli di valore pari
def sommaNumPari(A):
s=0
for i in range(0,len(A)):
if A[i]%2==0:
s=s+A[i]
return s
print(sommaNumPari(vet))
#Es.6 Dare un programma che dato un array A ordinato contenente numeri interi determina in tempo lineare credo con O(n) se vi sono due elementi la cui somma è k
def sommaK(A,k):
#Controllo se la somma del primo e del ultimo elemento del vettore ordinato è uguale a k se non lo è guardo se k è maggiore della somma ciò significa che per trovare k devo sommare numeri più grandi quindi incremento i
i=0
j=len(A)-1
while i<j:
if A[i]+A[j] == k:
return True
if k>(A[i]+A[j]):
i+=1
else:#k<(A[i]+A[j])
j-=1
return False
vet=[3,5,8,12,14,17,16,22,24]
print(sommaK(vet,15))
#Es.7 Scrivere un programma che accetta in input due array ordinati di uguale lunghezza n contenenti interi distinti e restituisce in output il numero di elementi che occorrono in entrambi gli array. La complessità deve essere lineare.
def ordinati(A,B):
i=0
j=0
n=0
while i<len(A) and j<len(B):
if A[i]==B[j]:
n+=1
i+=1
j+=1
elif A[i]<B[j]:
i+=1
else:
j+=1
return n
v1=[3,6,7,8,9,12]
v2=[0,1,2,3,4,9]
print(ordinati(v1,v2))
#Es.8 Scrivere due programmi uno encode(A) che riceve in input un array contenente gli elementi di A a partire dalla posizione 0 e restituisce un array B contenenti gli elementi di B a partire dalla posizione 0 e uno decode(B)
#che riceve in input un array B contenente gli elemneti di B a partire dalla posizione 0 e rende un array A contenente gli elementi di A a partire dalla posizione 0
def encodeMIO(A):
B = A[:]
return B
def encode(A):
B=[0]*A[-1]#istanzio un vettore B di zeri lungo quanto A
for x in A:
#Quindi x è l'elemento di A[i]
B[x-1]=B[x-1]+1#Quindi come l'es 3 uso il valore in A[i] come indirizzo e sommo le occorrenze
return B
#Così da poter decodificare l'array anche senza avere A ottengo un array A ordinato in tempo lineare.
def decode(B):
m=0
for x in B: #per la lunghezza di B somma x ad m ottenendo quindi la lunghezza originaria di A
m=m+x
A=[0]*m#Esempio se B è lungo 5 ed è così B=[1,1,3,2,1] A viene lungo 8 perchè m somma i valori interni a B
k=0
for i in range(len(B)): #Poi scorro di 5 l'array e ogni volta scorro il suo valore quindi m ha senso
for j in range(B[i]):
A[k]=i+1
k=k+1
return A
#A=[5,2,3,5,2,2,7,8]#Infatti in posizione x-1 cioè 4 per il primo ciclo di encode ci finirà il valore 2 mentre in 2 posizione il 3 un pò come una tabella dove all'indirizzo di quel valore vengono sommate le occorrenze
#Le posizioni che non vengono toccate restano a zero perciò in decode mi creo un vettore che sembra essere per forza lungo quanto era A all'inizio
A=[1,1,1,2,3,3,5,6,7]# Infatti non mi tornava con i valori del prof per pura coincidenza sarà perchè sono ordinati torna tutto lo codifica e lo decodifica correttamente ma con i miei no
#A=[2,6,3,7,9,9,9,9,7,3] Infatti è necessario come per l'es 3 che i vettori siano ordinati anche se non era specificato in modo esplicito nell'esercizio
print(A)
B=encode(A)
print(B)
C=decode(B)
print(C)
#Es.9 scrivere un programma solomon(m) che rende un array A, di lunghezza m+1, contenente i primi m elementi della sequenza di Solomon Golomb a partire dalla posizione 1
#Non ho capito come viene generata questa sequenza di solomon quindi provo direttamente la soluzione:
def solomon(m):
A=[0]*(m+1)
A[0]=''
A[1]=1
A[2]=2
h=2
for i in range(2,m):
for j in range(A[i]):
if h>m:
return A
A[h]=i
h=h+1
#Sembrerebbe che forse dalla posizione 2 in poi assegna il valore di per il numero di volte che c'è scritto in A[i] spostandosi ogni volta di una casella con la variabile h
#Perciò più il numero è grande più la sequenza cresce lentamente perchè ripete i soliti valori per tot volte finchè non arriva con i ad m ad esempio con 1560 iterazioni arriva fino a 113 mentre con 5560 fino a 248
print("Solomon" + str(solomon(10)))
def nodo(x,s,d):
return [x,s,d]
def foglia(x):
return [x,None,None]
def vuoto():
return None
def radice(A):
return A[0]
def sinistro(A):
return A[1]
def destro(A):
return A[2]
def isVuoto(A):
return A is None
def isFoglia(A):
return isVuoto(sinistro(A)) and isVuoto(destro(A))
def ricerca(x,A):
if isVuoto(A):
return False
if x==radice(A):
return True
if x<radice(A):
return ricerca(x,sinistro(A))
return ricerca(x,destro(A))
def inserzione(x,A):
if isVuoto(A):
return forglia(x)#: il prof ha messo i due punti ma secondo me non ci vanno
if x<radice(A):
return nodo(radice(A),destro(A), inserzione(x,sinistro(A)))
return nodo(radice(A), inserzione(x,destro(A)),sinistro(A))
#Solo per albero binario di ricerca pesato
def massimo(A):
#Dove i figli a destra sono più pesanti(grandi) di quelli a sinistra
#Quindi il massimo è il primo nodo senza figli destri che s'incontra scendendo
#sempre a destra a partire dalla radice
if isVuoto(destro(A)):
return radice(A)
return massimo(destro(A))
#La roba da qui in poi vale solo per alberi HEAP (dove la radice e maggiore di entrambi i suoi figli e i nodi dell'ultimo livello sono tutti addossati a sinistra)
def padre(i):
return (i-1)//2
def primofiglio(i):
return 2*(i+1)-1
def heaptest(h,i):
if i==0:
return True
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.