input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
<reponame>oguzhan-yilmaz/balcony
from __future__ import annotations
import services
import nodes
from utils import camel_case_split
import json
from thefuzz import fuzz
from collections import namedtuple
from typing import List
import os
import boto3
from pprint import pprint
from collections import namedtuple
from itertools import combinations
import json
from thefuzz import fuzz
from utils import camel_case_split, deep_key_search
import factories
import itertools
RelationsByParameterName = namedtuple('RelationsByParameterName', [
'parameter_name', 'relations'])
class MethodParameterRelationNotCreatedException(Exception): pass
class MethodParameterRelation:
def __init__(self, method, parameter, target_method, target_attributes):
if target_method == method:
raise MethodParameterRelationNotCreatedException('Target method and method can not be same.')
self.method = method
self.parameter = parameter
self.target_method = target_method
self.target_attributes = target_attributes
def __hash__(self):
return hash(repr(self))
def __eq__(self, other):
match_methods = self.method == other.method
match_parameter_names = services.compare_nouns(
self.parameter_name, other.parameter_name)
match_target_methods = self.target_method == other.target_method
match_target_attributes = all([s_attr in other.target_attributes for s_attr in self.target_attributes])
if match_target_attributes:
match_target_attributes
result = match_methods \
and match_parameter_names \
and match_target_methods \
and match_target_attributes
if result:
result
return result
def __str__(self):
return f'MethodParameterRelation ** {self.method}({self.parameter_name}) -> {self.target_method} | attrs:{str(self.target_attributes)}'
@staticmethod
def from_json(json):
a = MethodParameterRelation(
method=json.get('method'),
parameter=json.get('parameter'),
target_method=json.get('target_method'),
target_attributes=json.get('target_attributes'),
)
return a
def json(self):
method_repr = self.method if type(self.method)==str else repr(self.method)
target_method = self.target_method if type(self.target_method)==str else repr(self.target_method)
return {
"method": method_repr,
"parameter": self.parameter_name,
"target_method": target_method,
"target_attributes": self.target_attributes
}
@property
def parameter_name(self):
if type(self.parameter) == dict:
return self.parameter.get('name')
return self.parameter
def find_common_target_relation_combination_in_relations_by_parameter_name(relations_by_parameter_name):
"""
find the common targets in relations list. [relation, [relation]]
will only be called with multiple relations by parameter name
"""
selected_relations = False
def has_the_same_target_methods(method_parameter_relation_list):
if len(method_parameter_relation_list) < 2:
return False
first_target_method = method_parameter_relation_list[0].target_method
does_rest_of_relations_have_the_same_target_method = [
first_target_method == rel.target_method
for rel in method_parameter_relation_list[1:]
]
all_relations_have_same_target_method = all(
does_rest_of_relations_have_the_same_target_method)
return all_relations_have_same_target_method
different_parameter_relations_matrix = [
rbpn.relations for rbpn in relations_by_parameter_name]
all_relations_all_possible_combinations = list(
itertools.product(*different_parameter_relations_matrix))
for possible_relations_combination in all_relations_all_possible_combinations:
if len(possible_relations_combination) >= 2:
possible_relations_combination
if has_the_same_target_methods(possible_relations_combination):
selected_relations = possible_relations_combination
break
if selected_relations:
selected_relations
return selected_relations
# def generate_relations_by_parameter_name_from_relations(relations):
# unique_parameters = list(
# set([relation.parameter_name for relation in relations]))
# result = []
# for uniq_param in unique_parameters:
# parameters_relations = []
# for relation in relations:
# has_target_and_method_same = relation.target_method == relation.method
# if relation.parameter == uniq_param and not has_target_and_method_same:
# parameters_relations.append(relation)
# created_obj = RelationsByParameterName(
# uniq_param, parameters_relations)
# result.append(created_obj)
# return result
def filter_method_relations(method, method_relations=False, relations_by_parameter_name=False):
success = True
message = 'default message'
selected_relations = None
methods_required_parameters = method.get_required_parameters()
required_parameter_names = [p.get('name') for p in methods_required_parameters]
if not filter_method_relations and method_relations:
relations_by_parameter_name = generate_relations_by_parameter_name_from_relations(
method_relations, methods_required_parameters)
if not relations_by_parameter_name:
success = False
message = 'No method relations found for method.'
if relations_by_parameter_name:
# generate a mapping {'parameter_name': [relations_for_that_parameter, ]}
if len(relations_by_parameter_name) > 1:
# if we have more than 1 parameter and their relations
selected_relations = find_common_target_relation_combination_in_relations_by_parameter_name(
relations_by_parameter_name)
selected_relations
if not selected_relations:
# there were multiple relations but not all of them had the same target method
success = False
message = f'Failure. There were relations but no combination of them had the same target method.'
else:
message = f'Success. '
elif len(relations_by_parameter_name) == 1:
# case: one parameter, multiple relation
single_parameter_relations = relations_by_parameter_name[0].relations
selected_relations = []
for sp_rel in single_parameter_relations:
if len(sp_rel.target_attributes)==1 and sp_rel.target_attributes[0].startswith('REQUIRED-PARAMETER>'):
# only has a required-parameter attr. so ignoring.
continue
selected_relations.append(sp_rel)
selected_relations
# get the most similar one
# final check -- selected relations has all of the required parameters
if selected_relations and bool(methods_required_parameters):
# no relations by parameter name got and method has required parameters
selected_relation_parameter_names = list(set([
sel_rel.parameter_name for sel_rel in selected_relations]))
req_params_in_relations_list = [
req_param_name in selected_relation_parameter_names for req_param_name in required_parameter_names]
is_all_required_parameter_has_relations = all(
req_params_in_relations_list)
if not is_all_required_parameter_has_relations:
# This method has more required parameters than it has relations for them
success = False
message = f'Failure. Method required parameters: {required_parameter_names}. Only relations found for: {selected_relation_parameter_names}'
if not success:
print('MESSAGE:', message)
selected_relations = []
return success, message, selected_relations
def filter_node_relations(service_node, node, node_relations=False):
filtered_relations = []
if node.name != 'finding':
return []
if not node_relations:
node_methods = filter_node_methods(node)
node_relations = find_nodes_relations(service_node, node, node_methods)
filtered_relations = filter_relations_having_targets_inside_the_service_node(
service_node, node_relations)
def is_target_and_method_different(
rel): return rel.method != rel.target_method
# filtered_relations = list(
# filter(is_target_and_method_different, filtered_relations))
if filtered_relations:
filtered_relations
return filtered_relations
def find_nodes_relations(service_node, node, node_methods=False):
node_relations = []
if not node_methods:
node_methods = node.methods
node_method_reprs = [repr(m) for m in node_methods]
for rel in service_node.relations:
if rel.method in node_method_reprs:
node_relations.append(rel)
return node_relations
def filter_relations_having_targets_inside_the_service_node(service_node, node_relations):
filtered_relations = []
service_name = service_node.service_name
for n_rel in node_relations:
target_method_name = n_rel.target_method
target_service_name = target_method_name.split('.')[0]
if service_name == target_service_name:
filtered_relations.append(n_rel)
return filtered_relations
def generate_relations_by_parameter_name_from_relations(relations):
unique_parameters = list(
set([relation.parameter_name for relation in relations]))
result = []
if len(unique_parameters)>1:
unique_parameters
for uniq_param in unique_parameters:
parameters_relations = []
for relation in relations:
if relation.parameter_name == uniq_param:
parameters_relations.append(relation)
created_obj = RelationsByParameterName(
uniq_param, parameters_relations)
result.append(created_obj)
return result
def filter_node_methods(node, accepted_method_verbs=('get', 'describe', 'list', 'search')):
node_methods = [
method
for method in node.methods
if method.get_verb() in accepted_method_verbs
]
return node_methods
def does_parameter_exists_in_other_methods_attributes(methods_list, method, parameter, accepted_method_verbs):
result = []
# filter out the methods
other_methods = [m for m in methods_list if m != method]
other_methods = [m for m in other_methods if m.get_verb()
in accepted_method_verbs]
for other_method in other_methods:
# todo: add node name
target_attributes = services.is_required_parameter_exist_in_methods_attributes(
other_method, parameter, return_bool=False)
if target_attributes:
target_attributes
if bool(target_attributes):
is_target_attrs_are_required_parameter_by_other_method = False
# is_required_parameter_exist_in_methods_attributes(other_method, parameter, return_bool=False)
try:
parameter_name = parameter.get('name') if type(parameter)==dict else parameter
method_parameter_obj = MethodParameterRelation(
repr(method), parameter_name, repr(other_method), target_attributes)
if method_parameter_obj not in result:
result.append(method_parameter_obj)
else:
result, method_parameter_obj
except MethodParameterRelationNotCreatedException:
print(f"Can't create MethodParameterRelation object with:",repr(method), parameter.get('name'), repr(other_method), target_attributes)
return result
def extract_parameter_relations(node_list, node: 'Node', method, parameter, accepted_method_verbs):
methods_list = []
if not node in node_list:
node
for n in node_list:
methods_list.extend([m for m in n.methods if m != method])
method_parameter_relations = does_parameter_exists_in_other_methods_attributes(
methods_list, method, parameter, accepted_method_verbs)
return method_parameter_relations
def extend_safe(og_list, extend_list):
copy_og_list = og_list.copy()
copy_og_list += extend_list
# for el in extend_list:
# if el not in og_list:
# copy_og_list.append(el)
return copy_og_list
# def find_nodes_related_to_parameter_name(parameter_to_node_mappings, parameter_name):
# found_item = get_item_from_collection(
# parameter_to_node_mappings, 'parameter', parameter_name)
# return found_item.get('nodes', [])
def extract_node_methods_relations(all_service_nodes, service_node, node, accepted_method_verbs=('get', 'describe', 'list', 'search')):
node_method_parameter_relations = []
# filter the node's methods with accepted method verbs
accepted_methods = [
method for method in node.methods
if method.get_verb() in accepted_method_verbs
]
if not accepted_methods:
# Node has no accepted methods, failing to choose any method.
return node_method_parameter_relations
print('--**--*'*20)
print(f"\n{service_node.service_name} -- {node.name}\n")
flat_nodes = service_node.get_nodes(flat=True)
related_nodes_by_parent = list(service_node.generator_nodes_parents(node))
flat_service_nodes = service_node.get_nodes(flat=True)
for a_method in accepted_methods:
print("\t", a_method)
print("\t", '--'*15)
print()
required_parameter_names = [required_parameter.get('name') for required_parameter in a_method.get_required_parameters()]
# TODO: filter related nodes
# related_nodes_by_parameter_name = find_nodes_related_to_parameter_name(
# parameter_to_node_mappings, parameter_name)
for required_parameter in required_parameter_names:
parental_parameter_relations = extract_parameter_relations(
flat_service_nodes, node, a_method, required_parameter, accepted_method_verbs)
# todo: do filtering after this loop
# success, message, selected_relations = filter_method_relations(a_method, parental_parameter_relations)
# selected_relations
if parental_parameter_relations:
parental_parameter_relations
node_method_parameter_relations.extend(parental_parameter_relations)
else:
parental_parameter_relations
return node_method_parameter_relations
def extract_service_relations(all_service_nodes, service_node, accepted_method_verbs=('get', 'describe', 'list', 'search')):
all_method_parameter_relations = []
for node in service_node.get_nodes(flat=True):
# if node.name != 'analyzed_resource':
# continue
method_parameter_relations = extract_node_methods_relations(
all_service_nodes, service_node, node, accepted_method_verbs)
[print(_) for _ in method_parameter_relations]
print()
filtered_relations = filter_node_relations(
service_node, node, node_relations=method_parameter_relations)
print('filtered:')
[print(_) for _ in filtered_relations]
# if method_parameter_relations != filtered_relations:
# method_parameter_relations
if method_parameter_relations:
all_method_parameter_relations.extend(method_parameter_relations)
return all_method_parameter_relations
def extract_all_relations(all_service_nodes, accepted_method_verbs=('get', 'describe', 'list', 'search')):
folderpath = 'computed/relations'
for service_node in all_service_nodes:
service_relations = extract_service_relations(
all_service_nodes, service_node, accepted_method_verbs)
save_service_relations(folderpath, service_node, service_relations)
print()
def save_service_relations(folderpath, service_node, service_relations):
filename = os.path.join(folderpath, service_node.service_name)
filepath = f"{filename}.json"
with open(filepath, 'w') as file:
service_relations_dicts = [
sr.json()
for sr in service_relations
]
json.dump(service_relations_dicts, file, indent=2)
print(f"Service Relations are saved to: {filepath}")
def get_item_from_collection(collection, key, target):
return next((item for item in collection if item[key] == target), None)
def compare_service_node_names(service_node, other_service_node, both_ways=False):
name1, name2 = service_node.get_name(), other_service_node.get_name()
name1_lower = name1.lower()
name2_lower = name2.lower()
partial_ratio = fuzz.partial_ratio(name1_lower, name2)
normal_ratio = fuzz.ratio(name1_lower, name2_lower)
# ' '.join(camel_case_split())
t_name1 = ' '.join(camel_case_split(name1))
t_name2 = ' '.join(camel_case_split(name2))
# acm-pca -> acm : normal 60 partial 100 token 67
token_sort_ratio = fuzz.token_sort_ratio(t_name1, t_name2)
if partial_ratio >= 76 and normal_ratio > 60 and token_sort_ratio > 50:
partial_ratio, normal_ratio, token_sort_ratio
print(t_name1, '\t|\t', t_name2, '|\t', partial_ratio, normal_ratio,
token_sort_ratio, 'total:', partial_ratio+normal_ratio+token_sort_ratio)
print()
return False
# client._PY_TO_OP_NAME
if __name__ == '__main__':
os.environ['AWS_PROFILE'] = 'default'
os.environ['AWS_REGION'] = 'eu-central-1'
# main()
# service = services.Boto3Service('ec2', boto3_session)
# analysis_all_services_paginator_functions()
service_node_factory = factories.ServiceNodeFactory()
accepted_method_verbs = ('get', 'list', 'describe', 'search')
result = []
all_service_nodes = list(service_node_factory.generator(
yield_only_service_nodes=True, force_populate_all=True))
# for service_node in all_service_nodes[:5]:
# for | |
stair.p0
v1 = stair.p1
manipulator.set_pts([(v0.x, v0.y, stair.top), (v1.x, v1.y, stair.top), (side, 0, 0)])
manipulator.type_key = 'SIZE'
manipulator.prop1_name = 'length'
for i in range(stair.n_step):
stair.make_step(i, verts, faces, matids, uvs, nose_y=nose_y)
if s < len(self.stairs) - 1 and self.steps_type != 'OPEN' and \
'Landing' in type(stair).__name__ and stair.next_type != "LANDING":
f = len(verts) - 10
faces.append((f, f + 1, f + 8, f + 9))
matids.append(self.stairs[-1].idmat_bottom)
u = verts[f + 1][2] - verts[f][2]
v = (Vector(verts[f]) - Vector(verts[f + 9])).length
uvs.append([(0, 0), (0, u), (v, u), (v, 0)])
if self.steps_type != 'OPEN' and len(self.stairs) > 0:
f = len(verts) - 10
faces.append((f, f + 1, f + 2, f + 3, f + 4, f + 5, f + 6, f + 7, f + 8, f + 9))
matids.append(self.stairs[-1].idmat_bottom)
uvs.append([(0, 0), (.1, 0), (.2, 0), (.3, 0), (.4, 0), (.4, 1), (.3, 1), (.2, 1), (.1, 1), (0, 1)])
def setup_user_defined_post(self, o, post_x, post_y, post_z):
self.user_defined_post = o
x = o.bound_box[6][0] - o.bound_box[0][0]
y = o.bound_box[6][1] - o.bound_box[0][1]
z = o.bound_box[6][2] - o.bound_box[0][2]
self.user_defined_post_scale = Vector((post_x / x, post_y / -y, post_z / z))
m = o.data
# create vertex group lookup dictionary for names
vgroup_names = {vgroup.index: vgroup.name for vgroup in o.vertex_groups}
# create dictionary of vertex group assignments per vertex
self.vertex_groups = [[vgroup_names[g.group] for g in v.groups] for v in m.vertices]
# uvs
uv_act = m.uv_layers.active
if uv_act is not None:
uv_layer = uv_act.data
self.user_defined_uvs = [[uv_layer[li].uv for li in p.loop_indices] for p in m.polygons]
else:
self.user_defined_uvs = [[(0, 0) for i in p.vertices] for p in m.polygons]
# material ids
self.user_defined_mat = [p.material_index for p in m.polygons]
def get_user_defined_post(self, tM, z0, z1, z2, slope, post_z, verts, faces, matids, uvs):
f = len(verts)
m = self.user_defined_post.data
for i, g in enumerate(self.vertex_groups):
co = m.vertices[i].co.copy()
co.x *= self.user_defined_post_scale.x
co.y *= self.user_defined_post_scale.y
co.z *= self.user_defined_post_scale.z
if 'Top' in g:
co.z += z2
elif 'Bottom' in g:
co.z += 0
else:
co.z += z1
if 'Slope' in g:
co.z += co.y * slope
verts.append(tM * co)
matids += self.user_defined_mat
faces += [tuple([i + f for i in p.vertices]) for p in m.polygons]
uvs += self.user_defined_uvs
def get_post(self, post, post_x, post_y, post_z, post_alt, sub_offset_x,
id_mat, verts, faces, matids, uvs, bottom="STEP"):
n, dz, zs, zl = post
slope = dz * post_y
if self.user_defined_post is not None:
if bottom == "STEP":
z0 = zs
else:
z0 = zl
z1 = zl - z0
z2 = zl - z0
x, y = -n.v.normalized()
tM = Matrix([
[x, y, 0, n.p.x],
[y, -x, 0, n.p.y],
[0, 0, 1, z0 + post_alt],
[0, 0, 0, 1]
])
self.get_user_defined_post(tM, z0, z1, z2, dz, post_z, verts, faces, matids, uvs)
return
z3 = zl + post_z + post_alt - slope
z4 = zl + post_z + post_alt + slope
if bottom == "STEP":
z0 = zs + post_alt
z1 = zs + post_alt
else:
z0 = zl + post_alt - slope
z1 = zl + post_alt + slope
vn = n.v.normalized()
dx = post_x * vn
dy = post_y * Vector((vn.y, -vn.x))
oy = sub_offset_x * vn
x0, y0 = n.p - dx + dy + oy
x1, y1 = n.p - dx - dy + oy
x2, y2 = n.p + dx - dy + oy
x3, y3 = n.p + dx + dy + oy
f = len(verts)
verts.extend([(x0, y0, z0), (x0, y0, z3),
(x1, y1, z1), (x1, y1, z4),
(x2, y2, z1), (x2, y2, z4),
(x3, y3, z0), (x3, y3, z3)])
faces.extend([(f, f + 1, f + 3, f + 2),
(f + 2, f + 3, f + 5, f + 4),
(f + 4, f + 5, f + 7, f + 6),
(f + 6, f + 7, f + 1, f),
(f, f + 2, f + 4, f + 6),
(f + 7, f + 5, f + 3, f + 1)])
matids.extend([id_mat, id_mat, id_mat, id_mat, id_mat, id_mat])
x = [(0, 0), (0, post_z), (post_x, post_z), (post_x, 0)]
y = [(0, 0), (0, post_z), (post_y, post_z), (post_y, 0)]
z = [(0, 0), (post_x, 0), (post_x, post_y), (0, post_y)]
uvs.extend([x, y, x, y, z, z])
def get_panel(self, subs, altitude, panel_x, panel_z, sub_offset_x, idmat, verts, faces, matids, uvs):
n_subs = len(subs)
if n_subs < 1:
return
f = len(verts)
x0 = sub_offset_x - 0.5 * panel_x
x1 = sub_offset_x + 0.5 * panel_x
z0 = 0
z1 = panel_z
profile = [Vector((x0, z0)), Vector((x1, z0)), Vector((x1, z1)), Vector((x0, z1))]
user_path_uv_v = []
n_sections = n_subs - 1
n, dz, zs, zl = subs[0]
p0 = n.p
v0 = n.v.normalized()
for s, section in enumerate(subs):
n, dz, zs, zl = section
p1 = n.p
if s < n_sections:
v1 = subs[s + 1][0].v.normalized()
dir = (v0 + v1).normalized()
scale = 1 / cos(0.5 * acos(min(1, max(-1, v0 * v1))))
for p in profile:
x, y = n.p + scale * p.x * dir
z = zl + p.y + altitude
verts.append((x, y, z))
if s > 0:
user_path_uv_v.append((p1 - p0).length)
p0 = p1
v0 = v1
# build faces using Panel
lofter = Lofter(
# closed_shape, index, x, y, idmat
True,
[i for i in range(len(profile))],
[p.x for p in profile],
[p.y for p in profile],
[idmat for i in range(len(profile))],
closed_path=False,
user_path_uv_v=user_path_uv_v,
user_path_verts=n_subs
)
faces += lofter.faces(16, offset=f, path_type='USER_DEFINED')
matids += lofter.mat(16, idmat, idmat, path_type='USER_DEFINED')
v = Vector((0, 0))
uvs += lofter.uv(16, v, v, v, v, 0, v, 0, 0, path_type='USER_DEFINED')
def reset_shapes(self):
for s, stair in enumerate(self.stairs):
if 'Curved' in type(stair).__name__:
stair.l_shape = self.parts[s].left_shape
stair.r_shape = self.parts[s].right_shape
def make_subs(self, height, step_depth, x, y, z, post_y, altitude, bottom, side, slice,
post_spacing, sub_spacing, respect_edges, move_x, x_offset, sub_offset_x, mat,
verts, faces, matids, uvs):
n_steps = self.n_steps(step_depth)
self.set_height(height / n_steps)
n_stairs = len(self.stairs) - 1
subs = []
if side == "LEFT":
offset = move_x - x_offset
# offset_sub = offset - sub_offset_x
else:
offset = move_x + x_offset
# offset_sub = offset + sub_offset_x
for s, stair in enumerate(self.stairs):
if 'Curved' in type(stair).__name__:
if side == "LEFT":
part = stair.l_arc
shape = stair.l_shape
else:
part = stair.r_arc
shape = stair.r_shape
# Note: use left part as reference for post distances
# use right part as reference for panels
stair.l_arc, stair.l_t0, stair.l_t1, stair.l_tc = stair.set_offset(offset, shape)
stair.r_arc, stair.r_t0, stair.r_t1, stair.r_tc = stair.set_offset(offset, shape)
else:
stair.l_line = stair.offset(offset)
stair.r_line = stair.offset(offset)
part = stair.l_line
lerp_z = 0
edge_t = 1
edge_size = 0
# interpolate z near end landing
if 'Landing' in type(stair).__name__ and stair.next_type == 'STAIR':
if not slice:
line = stair.normal(1).offset(self.stairs[s + 1].step_depth)
res, p, t_part = part.intersect(line)
# does perpendicular line intersects circle ?
if res:
edge_size = self.stairs[s + 1].step_depth / stair.get_length(side)
edge_t = 1 - edge_size
else:
# in this case, lerp z over one step
lerp_z = stair.step_height
t_step, n_step = stair.n_posts(post_spacing, side, respect_edges)
# space between posts
sp = stair.get_length(side)
# post size
t_post = post_y / sp
if s == n_stairs:
n_step += 1
for i in range(n_step):
res_t = stair.get_lerp_vect([], side, i, t_step, respect_edges)
# subs
if s < n_stairs or i < n_step - 1:
res_t.append((i + 1) * t_step)
for j in range(len(res_t) - 1):
t0 = res_t[j] + t_post
t1 = res_t[j + 1] - t_post
dt = t1 - t0
n_subs = int(sp * dt / sub_spacing)
if n_subs > 0:
t_subs = dt / n_subs
for k in range(1, n_subs):
t = t0 + k * t_subs
stair.get_lerp_vect(subs, side, 1, t0 + k * t_subs, False)
if t > edge_t:
n, dz, z0, z1 = subs[-1]
subs[-1] = n, dz, z0, z1 + (t - edge_t) / edge_size * stair.step_height
if lerp_z > 0:
n, dz, z0, z1 = subs[-1]
subs[-1] = n, dz, z0, z1 + t * stair.step_height
for i, post in | |
voutdc = dsn_specs['vout_list'][0]
f_unit = dsn_specs['f_unit']
gain_max = max(self._amp_info['gain_tot'])
f_bw_log = int(np.floor(np.log10(f_unit / gain_max)))
f_unit_log = int(np.ceil(np.log10(f_unit)))
top_specs['layout_params'].update(self._amp_info['layout_info'])
meas = top_specs['measurements'][0]
meas['cfb'] = self._amp_info['cfb']
meas['rfb'] = self._amp_info['rfb']
ac_tb = meas['testbenches']['ac']
ac_tb['fstart'] = 10 ** (f_bw_log - 1)
ac_tb['fstop'] = 10 ** (f_unit_log + 1)
ac_sim_vars = ac_tb['sim_vars']
ac_sim_vars['vdd'] = vdd
ac_sim_vars['cload'] = dsn_specs['cload']
ac_sim_vars['vincm'] = vindc
ac_sim_vars['voutcm'] = voutdc
ac_sim_vars['ibias'] = ibias
ac_sim_vars['vdd'] = vdd
ac_sim_vars['vinac'] = 1.0
ac_sim_vars['vindc'] = 0.0
"""
top_specs['tb_dc']['tb_params']['vimax'] = vdd
top_specs['tb_dc']['tb_params']['vimin'] = -vdd
top_specs['tb_dc']['tb_params']['vindc'] = vindc
top_specs['tb_dc']['tb_params']['voutcm'] = voutdc
top_specs['tb_dc']['tb_params']['ibias'] = ibias
top_specs['tb_dc']['tb_params']['vdd'] = vdd
top_specs['tb_dc']['tb_params']['voutref'] = voutdc
top_specs['tb_dc']['tb_params']['vout_start'] = -vdd + 0.15
top_specs['tb_dc']['tb_params']['vout_stop'] = vdd - 0.15
"""
return top_specs
def _design_stage2(self, gm_db, load_db, vtail_list, vg_list, vmid_list, vout_list, vbias_list,
vb_gm, vb_load, cload, cpar1, w_dict, th_dict, stack_dict, seg_dict,
gm2_list, res_var, phase_margin, f_unit, max_ref_ratio):
seg_tail1 = seg_dict['tail1']
seg_diode1 = seg_dict['diode1']
seg_ngm1 = seg_dict['ngm1']
# step 1: find stage 2 unit size
seg_gcd = gcd(gcd(seg_tail1, seg_diode1), seg_ngm1)
if seg_gcd % 2 != 0:
raise ValueError('All segment numbers must be even.')
# divide seg_gcd by 2 to make sure all generated segment numbers are even
seg_gcd //= 2
# make sure we have enough tail fingers for common mode feedback
min_size = 2 if seg_tail1 // seg_gcd == 2 else 1
def ac_results_fun(cur_size):
seg_dict['tail2'] = seg_tail1 // seg_gcd * cur_size
seg_dict['diode2'] = seg_diode1 // seg_gcd * cur_size
seg_dict['ngm2'] = seg_ngm1 // seg_gcd * cur_size
cur_scale2 = cur_size / seg_gcd
cur_gm2_list = [gm2 * cur_scale2 for gm2 in gm2_list]
ac_results = self._find_rz_cf(gm_db, load_db, vtail_list, vg_list, vmid_list, vout_list,
vbias_list, vb_gm, vb_load, cload, cpar1, w_dict, th_dict,
stack_dict, seg_dict, cur_gm2_list, res_var, phase_margin)
return ac_results
def funity_fun(cur_size):
ac_results_tmp = ac_results_fun(cur_size)
fu_list = ac_results_tmp[0]
if fu_list is None:
return -1
# noinspection PyTypeChecker
ans = min(fu_list)
return ans
# find min_size such that amplifier is stable
min_bin_iter = BinaryIterator(min_size, None)
while min_bin_iter.has_next():
test_size = min_bin_iter.get_next()
test_fu = funity_fun(test_size)
if test_fu >= 0:
min_bin_iter.save()
min_bin_iter.down()
else:
min_bin_iter.up()
min_result = minimize_cost_golden(funity_fun, f_unit, offset=min_bin_iter.get_last_save())
if min_result.x is None:
msg = 'Insufficient stage 1 current. funity_max=%.4g'
raise StageOneCurrentError(msg % min_result.vmax)
funity_list, rz_nom, cf_min, gain_list, f3db_list, pm_list = ac_results_fun(min_result.x)
seg_tail2_tot = seg_dict['tail2']
seg_tail2 = (seg_tail2_tot // 4) * 2
seg_tailcm = seg_tail2_tot - seg_tail2
seg_tail_tot = 2 * (seg_dict['tail1'] + seg_tail2)
seg_dict['tail2'] = seg_tail2
seg_dict['tailcm'] = seg_tailcm
seg_dict['ref'] = max(2, -((-seg_tail_tot // max_ref_ratio) // 2) * 2)
return dict(
rz=rz_nom,
cf=cf_min,
gain=gain_list,
f_3db=f3db_list,
f_unity=funity_list,
phase_margin=pm_list,
)
@classmethod
def _get_stage2_ss(cls, gm2_list, gds2_list, c2_list, cg2_list, cload, seg_gcd, cur_size):
cur_gm2_list, cur_gds2_list, cur_c2_list, cur_cg2_list = [], [], [], []
for gm2, gds2, c2, cg2 in zip(gm2_list, gds2_list, c2_list, cg2_list):
cur_gm2_list.append(gm2 * cur_size / seg_gcd)
cur_gds2_list.append(gds2 * cur_size / seg_gcd)
cur_c2_list.append(cload + c2 * cur_size / seg_gcd)
cur_cg2_list.append(cg2 * cur_size / seg_gcd)
return cur_gm2_list, cur_gds2_list, cur_c2_list, cur_cg2_list
def _find_rz_cf(self, gm_db, load_db, vtail_list, vg_list, vmid_list, vout_list, vbias_list,
vb_gm, vb_load, cload, cpar1, w_dict, th_dict, stack_dict, seg_dict,
gm2_list, res_var, phase_margin, cap_tol=1e-15, cap_step=10e-15, cap_min=1e-15,
cap_max=1e-9):
"""Find minimum miller cap that stabilizes the system.
NOTE: This function assume phase of system for any miller cap value will not loop
around 360, otherwise it may get the phase margin wrong. This assumption should be valid
for this op amp.
"""
gz_worst = float(min(gm2_list))
gz_nom = gz_worst * (1 - res_var)
# find maximum Cf needed to stabilize all corners
cf_min = cap_min
for env_idx, (vtail, vg, vmid, vout, vbias) in \
enumerate(zip(vtail_list, vg_list, vmid_list, vout_list, vbias_list)):
cir = self._make_circuit(env_idx, gm_db, load_db, vtail, vg, vmid, vout, vbias, vb_gm,
vb_load, cload, cpar1, w_dict, th_dict, stack_dict, seg_dict,
gz_worst)
bin_iter = FloatBinaryIterator(cf_min, None, cap_tol, search_step=cap_step)
while bin_iter.has_next():
cur_cf = bin_iter.get_next()
cir.add_cap(cur_cf, 'outp', 'xp')
cir.add_cap(cur_cf, 'outn', 'xn')
num, den = cir.get_num_den('in', 'out')
cur_pm, _ = get_stability_margins(num, den)
if cur_pm < phase_margin:
if cur_cf > cap_max:
# no way to make amplifier stable, just return
return None, None, None, None, None, None
bin_iter.up()
else:
bin_iter.save()
bin_iter.down()
cir.add_cap(-cur_cf, 'outp', 'xp')
cir.add_cap(-cur_cf, 'outn', 'xn')
# bin_iter is guaranteed to save at least one value, so don't need to worry about
# cf_min being None
cf_min = bin_iter.get_last_save()
# find gain, unity gain bandwidth, and phase margin across corners
gain_list, f3db_list, funity_list, pm_list = [], [], [], []
for env_idx, (vtail, vg, vmid, vout, vbias) in \
enumerate(zip(vtail_list, vg_list, vmid_list, vout_list, vbias_list)):
cir = self._make_circuit(env_idx, gm_db, load_db, vtail, vg, vmid, vout, vbias, vb_gm,
vb_load, cload, cpar1, w_dict, th_dict, stack_dict, seg_dict,
gz_nom)
cir.add_cap(cf_min, 'outp', 'xp')
cir.add_cap(cf_min, 'outn', 'xn')
num, den = cir.get_num_den('in', 'out')
pn = np.poly1d(num)
pd = np.poly1d(den)
gain_list.append(abs(pn(0) / pd(0)))
f3db_list.append(get_w_3db(num, den) / 2 / np.pi)
funity_list.append(get_w_crossings(num, den)[0] / 2 / np.pi)
pm_list.append(get_stability_margins(num, den)[0])
return funity_list, 1 / gz_nom, cf_min, gain_list, f3db_list, pm_list
@classmethod
def _make_circuit(cls, env_idx, gm_db, load_db, vtail, vg, vmid, vout, vbias, vb_gm, vb_load,
cload, cpar1, w_dict, th_dict, stack_dict, seg_dict, gz, neg_cap=False,
no_fb=False):
cur_env = gm_db.env_list[env_idx]
gm_db.set_dsn_params(w=w_dict['tail'], intent=th_dict['tail'], stack=stack_dict['tail'])
tail1_params = gm_db.query(env=cur_env, vbs=0, vds=vtail - vb_gm, vgs=vbias - vb_gm)
tail2_params = gm_db.query(env=cur_env, vbs=0, vds=vout - vb_gm, vgs=vbias - vb_gm)
gm_db.set_dsn_params(w=w_dict['in'], intent=th_dict['in'], stack=stack_dict['in'])
gm1_params = gm_db.query(env=cur_env, vbs=vb_gm - vtail, vds=vmid - vtail, vgs=vg - vtail)
load_db.set_dsn_params(w=w_dict['load'], intent=th_dict['load'], stack=stack_dict['diode'])
diode1_params = load_db.query(env=cur_env, vbs=0, vds=vmid - vb_load, vgs=vmid - vb_load)
diode2_params = load_db.query(env=cur_env, vbs=0, vds=vout - vb_load, vgs=vmid - vb_load)
load_db.set_dsn_params(stack=stack_dict['ngm'])
ngm1_params = load_db.query(env=cur_env, vbs=0, vds=vmid - vb_load, vgs=vmid - vb_load)
ngm2_params = load_db.query(env=cur_env, vbs=0, vds=vout - vb_load, vgs=vmid - vb_load)
cir = LTICircuit()
# stage 1
cir.add_transistor(tail1_params, 'tail', 'gnd', 'gnd', 'gnd', fg=seg_dict['tail1'],
neg_cap=neg_cap)
cir.add_transistor(gm1_params, 'midp', 'inn', 'tail', 'gnd', fg=seg_dict['in'],
neg_cap=neg_cap)
cir.add_transistor(gm1_params, 'midn', 'inp', 'tail', 'gnd', fg=seg_dict['in'],
neg_cap=neg_cap)
cir.add_transistor(diode1_params, 'midp', 'midp', 'gnd', 'gnd', fg=seg_dict['diode1'],
neg_cap=neg_cap)
cir.add_transistor(diode1_params, 'midn', 'midn', 'gnd', 'gnd', fg=seg_dict['diode1'],
neg_cap=neg_cap)
cir.add_transistor(ngm1_params, 'midn', 'midp', 'gnd', 'gnd', fg=seg_dict['ngm1'],
neg_cap=neg_cap)
cir.add_transistor(ngm1_params, 'midp', 'midn', 'gnd', 'gnd', fg=seg_dict['ngm1'],
neg_cap=neg_cap)
# stage 2
cir.add_transistor(tail2_params, 'outp', 'gnd', 'gnd', 'gnd', fg=seg_dict['tail2'],
neg_cap=neg_cap)
cir.add_transistor(tail2_params, 'outn', 'gnd', 'gnd', 'gnd', fg=seg_dict['tail2'],
neg_cap=neg_cap)
cir.add_transistor(diode2_params, 'outp', 'midn', 'gnd', 'gnd', fg=seg_dict['diode2'],
neg_cap=neg_cap)
cir.add_transistor(diode2_params, 'outn', 'midp', 'gnd', 'gnd', fg=seg_dict['diode2'],
neg_cap=neg_cap)
cir.add_transistor(ngm2_params, 'outp', 'midn', 'gnd', 'gnd', fg=seg_dict['ngm2'],
neg_cap=neg_cap)
cir.add_transistor(ngm2_params, 'outn', 'midp', 'gnd', 'gnd', fg=seg_dict['ngm2'],
neg_cap=neg_cap)
# parasitic cap
cir.add_cap(cpar1, 'midp', 'gnd')
cir.add_cap(cpar1, 'midn', 'gnd')
# load cap
cir.add_cap(cload, 'outp', 'gnd')
cir.add_cap(cload, 'outn', 'gnd')
# feedback resistors
if not no_fb:
cir.add_conductance(gz, 'xp', 'midn')
cir.add_conductance(gz, 'xn', 'midp')
# diff-to-single conversion
cir.add_vcvs(0.5, 'inp', 'gnd', 'in', 'gnd')
cir.add_vcvs(-0.5, 'inn', 'gnd', 'in', 'gnd')
cir.add_vcvs(1, 'out', 'gnd', 'outp', 'outn')
return cir
class OpAmpTwoStageChar(MeasurementManager):
def __init__(self,
data_dir, # type: str
meas_name, # type: str
impl_lib, # type: str
specs, # type: Dict[str, Any]
wrapper_lookup, # type: Dict[str, str]
sim_view_list, # type: Sequence[Tuple[str, str]]
env_list, # type: Sequence[str]
):
MeasurementManager.__init__(self, data_dir, meas_name, impl_lib, specs, wrapper_lookup,
sim_view_list, env_list)
def get_initial_state(self):
# type: () -> str
"""Returns the initial FSM state."""
return 'ac0'
def get_testbench_info(self, state, prev_output):
rfb0 = self.specs['rfb']
cfb0 = self.specs['cfb']
find_cfb = self.specs.get('find_cfb', True)
res_var = self.specs['res_var']
cmin_scale = self.specs['cmin_scale']
cmax_scale = self.specs['cmax_scale']
num_pts = self.specs['num_pts']
tmp = super(OpAmpTwoStageChar, self).get_testbench_info('ac', prev_output)
tb_name, tb_type, tb_specs, tb_params = tmp
if state == 'ac0' and find_cfb:
cfb_list = np.linspace(cfb0 * cmin_scale, cfb0 * cmax_scale, num_pts).tolist()
tb_specs['sim_vars']['rfb'] = rfb0 * (1 - res_var)
tb_specs['sim_vars']['cfb'] = cfb_list
else:
if find_cfb:
cfb = self.get_state_output('ac0')['cfb']
else:
cfb = cfb0
tb_specs['sim_vars']['rfb'] = rfb0
tb_specs['sim_vars']['cfb'] = cfb
return tb_name, tb_type, tb_specs, tb_params
def process_output(self, state, data, tb_manager):
# type: (str, Dict[str, Any], ACTB) -> Tuple[bool, str, Dict[str, Any]]
phase_margin = self.specs['phase_margin']
find_cfb = self.specs.get('find_cfb', True)
output_list = ['vout']
results = tb_manager.get_ugb_and_pm(data, output_list)
if state == 'ac0' and find_cfb:
done = False
next_state = 'ac1'
cfb = self._find_min_cfb(phase_margin, results)
output = dict(cfb=cfb)
else:
done = True
next_state = ''
if find_cfb:
cfb = self.get_state_output('ac0')['cfb']
else:
cfb = self.specs['cfb']
gain_results = tb_manager.get_gain_and_w3db(data, output_list, output_dict=results)
corner_list = results['corner'].tolist()
gain_list = gain_results['gain_vout'].tolist()
bw_list = gain_results['w3db_vout'].tolist()
funity_list = results['funity_vout'].tolist()
pm_list = results['pm_vout'].tolist()
output = dict(cfb=cfb, corners=corner_list, gain=gain_list, bw=bw_list,
funity=funity_list, pm=pm_list)
return done, next_state, output
@classmethod
def _find_min_cfb(cls, phase_margin, results):
axis_names = ['corner', 'cfb']
corner_list = results['corner']
corner_sort_arg = np.argsort(corner_list) # type: Sequence[int]
# rearrange array axis
sweep_vars = results['sweep_params']['pm_vout']
order = [sweep_vars.index(name) for name in axis_names]
pm_data = np.transpose(results['pm_vout'], axes=order)
# determine minimum cfb
cfb_vec | |
from unittest.mock import MagicMock
import pytest
import prefect
from prefect.tasks.kubernetes import (
CreateNamespacedService,
DeleteNamespacedService,
ListNamespacedService,
PatchNamespacedService,
ReadNamespacedService,
ReplaceNamespacedService,
)
from prefect.utilities.configuration import set_temporary_config
@pytest.fixture(autouse=True)
def kube_secret():
with set_temporary_config({"cloud.use_local_secrets": True}):
with prefect.context(secrets=dict(KUBERNETES_API_KEY="test_key")):
yield
class TestCreateNamespacedServiceTask:
def test_empty_initialization(self):
task = CreateNamespacedService()
assert task.body == {}
assert task.namespace == "default"
assert task.kube_kwargs == {}
assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY"
def test_filled_initialization(self):
task = CreateNamespacedService(
body={"test": "test"},
namespace="test",
kube_kwargs={"test": "test"},
kubernetes_api_key_secret="test",
)
assert task.body == {"test": "test"}
assert task.namespace == "test"
assert task.kube_kwargs == {"test": "test"}
assert task.kubernetes_api_key_secret == "test"
def test_empty_body_raises_error(self):
task = CreateNamespacedService()
with pytest.raises(ValueError):
task.run()
def test_invalid_body_raises_error(self):
task = CreateNamespacedService()
with pytest.raises(ValueError):
task.run(body=None)
def test_api_key_pulled_from_secret(self, monkeypatch):
task = CreateNamespacedService(body={"test": "test"})
client = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.client", client)
api_key = {}
conf_call = MagicMock()
conf_call.return_value.api_key = api_key
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client.Configuration", conf_call
)
task.run()
assert api_key == {"authorization": "test_key"}
def test_body_value_is_replaced(self, monkeypatch):
task = CreateNamespacedService(body={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(body={"test": "b"})
assert coreapi.create_namespaced_service.call_args[1]["body"] == {"test": "b"}
def test_body_value_is_appended(self, monkeypatch):
task = CreateNamespacedService(body={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(body={"a": "test"})
assert coreapi.create_namespaced_service.call_args[1]["body"] == {
"a": "test",
"test": "a",
}
def test_empty_body_value_is_updated(self, monkeypatch):
task = CreateNamespacedService()
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(body={"test": "a"})
assert coreapi.create_namespaced_service.call_args[1]["body"] == {"test": "a"}
def test_kube_kwargs_value_is_replaced(self, monkeypatch):
task = CreateNamespacedService(body={"test": "a"}, kube_kwargs={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"test": "b"})
assert coreapi.create_namespaced_service.call_args[1]["test"] == "b"
def test_kube_kwargs_value_is_appended(self, monkeypatch):
task = CreateNamespacedService(body={"test": "a"}, kube_kwargs={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"a": "test"})
assert coreapi.create_namespaced_service.call_args[1]["a"] == "test"
assert coreapi.create_namespaced_service.call_args[1]["test"] == "a"
def test_empty_kube_kwargs_value_is_updated(self, monkeypatch):
task = CreateNamespacedService(body={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"test": "a"})
assert coreapi.create_namespaced_service.call_args[1]["test"] == "a"
class TestDeleteNamespacedServiceTask:
def test_empty_initialization(self):
task = DeleteNamespacedService()
assert not task.service_name
assert task.namespace == "default"
assert task.kube_kwargs == {}
assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY"
def test_filled_initialization(self):
task = DeleteNamespacedService(
service_name="test",
namespace="test",
kube_kwargs={"test": "test"},
kubernetes_api_key_secret="test",
)
assert task.service_name == "test"
assert task.namespace == "test"
assert task.kube_kwargs == {"test": "test"}
assert task.kubernetes_api_key_secret == "test"
def test_empty_name_raises_error(self):
task = DeleteNamespacedService()
with pytest.raises(ValueError):
task.run()
def test_invalid_body_raises_error(self):
task = DeleteNamespacedService()
with pytest.raises(ValueError):
task.run(service_name=None)
def test_api_key_pulled_from_secret(self, monkeypatch):
task = DeleteNamespacedService(service_name="test")
client = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.client", client)
api_key = {}
conf_call = MagicMock()
conf_call.return_value.api_key = api_key
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client.Configuration", conf_call
)
task.run()
assert api_key == {"authorization": "test_key"}
def test_kube_kwargs_value_is_replaced(self, monkeypatch):
task = DeleteNamespacedService(service_name="test", kube_kwargs={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"test": "b"})
assert coreapi.delete_namespaced_service.call_args[1]["test"] == "b"
def test_kube_kwargs_value_is_appended(self, monkeypatch):
task = DeleteNamespacedService(service_name="test", kube_kwargs={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"a": "test"})
assert coreapi.delete_namespaced_service.call_args[1]["a"] == "test"
assert coreapi.delete_namespaced_service.call_args[1]["test"] == "a"
def test_empty_kube_kwargs_value_is_updated(self, monkeypatch):
task = DeleteNamespacedService(service_name="test")
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"test": "a"})
assert coreapi.delete_namespaced_service.call_args[1]["test"] == "a"
class TestListNamespacedServiceTask:
def test_empty_initialization(self):
task = ListNamespacedService()
assert task.namespace == "default"
assert task.kube_kwargs == {}
assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY"
def test_filled_initialization(self):
task = ListNamespacedService(
namespace="test",
kube_kwargs={"test": "test"},
kubernetes_api_key_secret="test",
)
assert task.namespace == "test"
assert task.kube_kwargs == {"test": "test"}
assert task.kubernetes_api_key_secret == "test"
def test_api_key_pulled_from_secret(self, monkeypatch):
task = ListNamespacedService()
client = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.client", client)
api_key = {}
conf_call = MagicMock()
conf_call.return_value.api_key = api_key
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client.Configuration", conf_call
)
task.run()
assert api_key == {"authorization": "test_key"}
def test_kube_kwargs_value_is_replaced(self, monkeypatch):
task = ListNamespacedService(kube_kwargs={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"test": "b"})
assert coreapi.list_namespaced_service.call_args[1]["test"] == "b"
def test_kube_kwargs_value_is_appended(self, monkeypatch):
task = ListNamespacedService(kube_kwargs={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"a": "test"})
assert coreapi.list_namespaced_service.call_args[1]["a"] == "test"
assert coreapi.list_namespaced_service.call_args[1]["test"] == "a"
def test_empty_kube_kwargs_value_is_updated(self, monkeypatch):
task = ListNamespacedService()
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"test": "a"})
assert coreapi.list_namespaced_service.call_args[1]["test"] == "a"
class TestPatchNamespacedServiceTask:
def test_empty_initialization(self):
task = PatchNamespacedService()
assert not task.service_name
assert task.body == {}
assert task.namespace == "default"
assert task.kube_kwargs == {}
assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY"
def test_filled_initialization(self):
task = PatchNamespacedService(
service_name="test",
body={"test": "test"},
namespace="test",
kube_kwargs={"test": "test"},
kubernetes_api_key_secret="test",
)
assert task.service_name == "test"
assert task.body == {"test": "test"}
assert task.namespace == "test"
assert task.kube_kwargs == {"test": "test"}
assert task.kubernetes_api_key_secret == "test"
def test_empty_body_raises_error(self):
task = PatchNamespacedService()
with pytest.raises(ValueError):
task.run()
def test_invalid_body_raises_error(self):
task = PatchNamespacedService()
with pytest.raises(ValueError):
task.run(body=None)
def test_invalid_service_name_raises_error(self):
task = PatchNamespacedService()
with pytest.raises(ValueError):
task.run(body={"test": "test"}, service_name=None)
def test_api_key_pulled_from_secret(self, monkeypatch):
task = PatchNamespacedService(body={"test": "test"}, service_name="test")
client = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.client", client)
api_key = {}
conf_call = MagicMock()
conf_call.return_value.api_key = api_key
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client.Configuration", conf_call
)
task.run()
assert api_key == {"authorization": "test_key"}
def test_body_value_is_replaced(self, monkeypatch):
task = PatchNamespacedService(body={"test": "a"}, service_name="test")
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(body={"test": "b"})
assert coreapi.patch_namespaced_service.call_args[1]["body"] == {"test": "b"}
def test_body_value_is_appended(self, monkeypatch):
task = PatchNamespacedService(body={"test": "a"}, service_name="test")
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(body={"a": "test"})
assert coreapi.patch_namespaced_service.call_args[1]["body"] == {
"a": "test",
"test": "a",
}
def test_empty_body_value_is_updated(self, monkeypatch):
task = PatchNamespacedService(service_name="test")
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(body={"test": "a"})
assert coreapi.patch_namespaced_service.call_args[1]["body"] == {"test": "a"}
def test_kube_kwargs_value_is_replaced(self, monkeypatch):
task = PatchNamespacedService(
body={"test": "a"}, kube_kwargs={"test": "a"}, service_name="test"
)
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"test": "b"})
assert coreapi.patch_namespaced_service.call_args[1]["test"] == "b"
def test_kube_kwargs_value_is_appended(self, monkeypatch):
task = PatchNamespacedService(
body={"test": "a"}, kube_kwargs={"test": "a"}, service_name="test"
)
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"a": "test"})
assert coreapi.patch_namespaced_service.call_args[1]["a"] == "test"
assert coreapi.patch_namespaced_service.call_args[1]["test"] == "a"
def test_empty_kube_kwargs_value_is_updated(self, monkeypatch):
task = PatchNamespacedService(body={"test": "a"}, service_name="test")
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"test": "a"})
assert coreapi.patch_namespaced_service.call_args[1]["test"] == "a"
class TestReadNamespacedServiceTask:
def test_empty_initialization(self):
task = ReadNamespacedService()
assert not task.service_name
assert task.namespace == "default"
assert task.kube_kwargs == {}
assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY"
def test_filled_initialization(self):
task = ReadNamespacedService(
service_name="test",
namespace="test",
kube_kwargs={"test": "test"},
kubernetes_api_key_secret="test",
)
assert task.service_name == "test"
assert task.namespace == "test"
assert task.kube_kwargs == {"test": "test"}
assert task.kubernetes_api_key_secret == "test"
def test_empty_name_raises_error(self):
task = ReadNamespacedService()
with pytest.raises(ValueError):
task.run()
def test_invalid_body_raises_error(self):
task = ReadNamespacedService()
with pytest.raises(ValueError):
task.run(service_name=None)
def test_api_key_pulled_from_secret(self, monkeypatch):
task = ReadNamespacedService(service_name="test")
client = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.client", client)
api_key = {}
conf_call = MagicMock()
conf_call.return_value.api_key = api_key
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client.Configuration", conf_call
)
task.run()
assert api_key == {"authorization": "test_key"}
def test_kube_kwargs_value_is_replaced(self, monkeypatch):
task = ReadNamespacedService(service_name="test", kube_kwargs={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"test": "b"})
assert coreapi.read_namespaced_service.call_args[1]["test"] == "b"
def test_kube_kwargs_value_is_appended(self, monkeypatch):
task = ReadNamespacedService(service_name="test", kube_kwargs={"test": "a"})
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"a": "test"})
assert coreapi.read_namespaced_service.call_args[1]["a"] == "test"
assert coreapi.read_namespaced_service.call_args[1]["test"] == "a"
def test_empty_kube_kwargs_value_is_updated(self, monkeypatch):
task = ReadNamespacedService(service_name="test")
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(kube_kwargs={"test": "a"})
assert coreapi.read_namespaced_service.call_args[1]["test"] == "a"
class TestReplaceNamespacedServiceTask:
def test_empty_initialization(self):
task = ReplaceNamespacedService()
assert not task.service_name
assert task.body == {}
assert task.namespace == "default"
assert task.kube_kwargs == {}
assert task.kubernetes_api_key_secret == "KUBERNETES_API_KEY"
def test_filled_initialization(self):
task = ReplaceNamespacedService(
service_name="test",
body={"test": "test"},
namespace="test",
kube_kwargs={"test": "test"},
kubernetes_api_key_secret="test",
)
assert task.service_name == "test"
assert task.body == {"test": "test"}
assert task.namespace == "test"
assert task.kube_kwargs == {"test": "test"}
assert task.kubernetes_api_key_secret == "test"
def test_empty_body_raises_error(self):
task = ReplaceNamespacedService()
with pytest.raises(ValueError):
task.run()
def test_invalid_body_raises_error(self):
task = ReplaceNamespacedService()
with pytest.raises(ValueError):
task.run(body=None)
def test_invalid_service_name_raises_error(self):
task = ReplaceNamespacedService()
with pytest.raises(ValueError):
task.run(body={"test": "test"}, service_name=None)
def test_api_key_pulled_from_secret(self, monkeypatch):
task = ReplaceNamespacedService(body={"test": "test"}, service_name="test")
client = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.client", client)
api_key = {}
conf_call = MagicMock()
conf_call.return_value.api_key = api_key
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client.Configuration", conf_call
)
task.run()
assert api_key == {"authorization": "test_key"}
def test_body_value_is_replaced(self, monkeypatch):
task = ReplaceNamespacedService(body={"test": "a"}, service_name="test")
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(body={"test": "b"})
assert coreapi.replace_namespaced_service.call_args[1]["body"] == {"test": "b"}
def test_body_value_is_appended(self, monkeypatch):
task = ReplaceNamespacedService(body={"test": "a"}, service_name="test")
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(body={"a": "test"})
assert coreapi.replace_namespaced_service.call_args[1]["body"] == {
"a": "test",
"test": "a",
}
def test_empty_body_value_is_updated(self, monkeypatch):
task = ReplaceNamespacedService(service_name="test")
config = MagicMock()
monkeypatch.setattr("prefect.tasks.kubernetes.service.config", config)
coreapi = MagicMock()
monkeypatch.setattr(
"prefect.tasks.kubernetes.service.client",
MagicMock(CoreV1Api=MagicMock(return_value=coreapi)),
)
task.run(body={"test": "a"})
assert coreapi.replace_namespaced_service.call_args[1]["body"] == {"test": "a"}
def | |
% len(ipo.getCurve('ScaleX').bezierPoints) + r'">')
spacer = ''
for i in range(len(ipo.getCurve('ScaleX').bezierPoints)):
if use_no_shift_over:
if ipo.getCurve('ScaleX').bezierPoints[i].pt[0] != 1:
fout.write(spacer + "%0.6f" % (round((ipo.getCurve('ScaleX').bezierPoints[i].pt[0] - 0) / anim_fps, 6),))
else:
fout.write(spacer + "%0.6f" % (round((ipo.getCurve('ScaleX').bezierPoints[i].pt[0] - 1) / anim_fps, 6),))
else:
fout.write(spacer + "%0.6f" % (round((ipo.getCurve('ScaleX').bezierPoints[i].pt[0] - 1) / anim_fps, 6),))
spacer = ', '
fout.write(r'</scl_time_indicies>' + endl)
fout.write(4*tab4 + r'<scl_key_values count="' + "%d" % len(ipo.getCurve('ScaleX').bezierPoints) + r'">')
spacer = ''
for i in range(len(ipo.getCurve('ScaleX').bezierPoints)):
fout.write(spacer + "%0.6f %0.6f %0.6f" % (round(ipo.getCurve('ScaleX').bezierPoints[i].pt[1], 6), round(ipo.getCurve('ScaleZ').bezierPoints[i].pt[1], 6), round(ipo.getCurve('ScaleY').bezierPoints[i].pt[1], 6)))
spacer = ', '
fout.write(r'</scl_key_values>' + endl)
fout.write(3*tab4 + r'</keyframes>' + endl)
fout.write(3*tab4 + r'<controller>' + endl)
if use_unique_timer:
fout.write(4*tab4 + r'<timer id="' + gamx_name_prefix('tmr_', gamx_name_prefix('ipo_', ipo.name)) + r'" type="basic">' + endl)
fout.write(4*tab4 + r'</timer>' + endl)
else:
fout.write(4*tab4 + r'<timer ref="' + gamx_name_prefix('tmr_', gamx_name_prefix('obj_', gamx_first_scene_name())) + r'"/>' + endl)
fout.write(3*tab4 + r'</controller>' + endl)
if ipo.getCurve('LocX') != None:
fout.write(3*tab4 + r'<pos_polation>')
mode = 0
for ipoc in ipo.curves:
if ipoc.name[:3] == "Loc":
mode |= ipoc.interpolation
if mode & Blender.IpoCurve.InterpTypes["CONST"]:
fout.write(r'ipo_const ')
elif mode & Blender.IpoCurve.InterpTypes["LINEAR"]:
fout.write(r'ipo_linear ')
elif mode & Blender.IpoCurve.InterpTypes["BEZIER"]:
if use_linear_over_cubic_cr:
fout.write(r'ipo_linear ') # no direct tag
print "Info: GAMX_Export: Warning: BEZIER IpoCurve interpolator setting not supported; using 'linear' position interpolation.\r\n"
else:
fout.write(r'ipo_cubiccr ') # no direct tag
print "Info: GAMX_Export: Warning: BEZIER IpoCurve interpolator setting not supported; using 'cubic_cr' position interpolation.\r\n"
else:
fout.write(r'ipo_linear ') # default
mode = 0
for ipoc in ipo.curves:
if ipoc.name[:3] == "Loc":
mode |= ipoc.extend
if mode & Blender.IpoCurve.ExtendTypes["CONST"]:
fout.write(r'epo_const')
elif mode & Blender.IpoCurve.ExtendTypes["EXTRAP"]:
fout.write(r'epo_linear')
elif mode & Blender.IpoCurve.ExtendTypes["CYCLIC"]:
fout.write(r'epo_cyclic')
elif mode & Blender.IpoCurve.ExtendTypes["CYCLIC_EXTRAP"]:
fout.write(r'epo_cyclicadd')
else:
fout.write(r'epo_const') # default
fout.write(r'</pos_polation>' + endl)
if ipo.getCurve('QuatW') != None or ipo.getCurve('RotX') != None:
fout.write(3*tab4 + r'<rot_polation>')
mode = 0
for ipoc in ipo.curves:
if ipoc.name[:3] == "Rot" or ipoc.name[:4] == "Quat":
mode |= ipoc.interpolation
if mode & Blender.IpoCurve.InterpTypes["CONST"]:
fout.write(r'ipo_const ')
elif mode & Blender.IpoCurve.InterpTypes["LINEAR"]:
fout.write(r'ipo_linear ')
elif mode & Blender.IpoCurve.InterpTypes["BEZIER"]:
if use_linear_over_cubic_cr:
fout.write(r'ipo_linear ') # no direct tag
print "Info: GAMX_Export: Warning: BEZIER IpoCurve interpolator setting not supported; using 'linear' rotation interpolation.\r\n"
else:
fout.write(r'ipo_cubiccr ') # no direct tag
print "Info: GAMX_Export: Warning: BEZIER IpoCurve interpolator setting not supported; using 'cubic_cr' rotation interpolation.\r\n"
else:
fout.write(r'ipo_linear ') # default
mode = 0
for ipoc in ipo.curves:
if ipoc.name[:3] == "Rot" or ipoc.name[:4] == "Quat":
mode |= ipoc.extend
if mode & Blender.IpoCurve.ExtendTypes["CONST"]:
fout.write(r'epo_const')
elif mode & Blender.IpoCurve.ExtendTypes["EXTRAP"]:
fout.write(r'epo_linear')
elif mode & Blender.IpoCurve.ExtendTypes["CYCLIC"]:
fout.write(r'epo_cyclic')
elif mode & Blender.IpoCurve.ExtendTypes["CYCLIC_EXTRAP"]:
fout.write(r'epo_cyclicadd')
else:
fout.write(r'epo_const') # default
fout.write(r'</rot_polation>' + endl)
if ipo.getCurve('ScaleX') != None:
fout.write(3*tab4 + r'<scl_polation>')
mode = 0
for ipoc in ipo.curves:
if ipoc.name[:5] == "Scale":
mode |= ipoc.interpolation
if mode & Blender.IpoCurve.InterpTypes["CONST"]:
fout.write(r'ipo_const ')
elif mode & Blender.IpoCurve.InterpTypes["LINEAR"]:
fout.write(r'ipo_linear ')
elif mode & Blender.IpoCurve.InterpTypes["BEZIER"]:
if use_linear_over_cubic_cr:
fout.write(r'ipo_linear ') # no direct tag
print "Info: GAMX_Export: Warning: BEZIER IpoCurve interpolator setting not supported; using 'linear' scale interpolation.\r\n"
else:
fout.write(r'ipo_cubiccr ') # no direct tag
print "Info: GAMX_Export: Warning: BEZIER IpoCurve interpolator setting not supported; using 'cubic_cr' scale interpolation.\r\n"
else:
fout.write(r'ipo_linear ') # default
mode = 0
for ipoc in ipo.curves:
if ipoc.name[:5] == "Scale":
mode |= ipoc.extend
if mode & Blender.IpoCurve.ExtendTypes["CONST"]:
fout.write(r'epo_const')
elif mode & Blender.IpoCurve.ExtendTypes["EXTRAP"]:
fout.write(r'epo_linear')
elif mode & Blender.IpoCurve.ExtendTypes["CYCLIC"]:
fout.write(r'epo_cyclic')
elif mode & Blender.IpoCurve.ExtendTypes["CYCLIC_EXTRAP"]:
fout.write(r'epo_cyclicadd')
else:
fout.write(r'epo_const') # default
fout.write(r'</scl_polation>' + endl)
fout.write(2*tab4 + r'</interpolator>' + endl)
# Write materials
for mat in bpy.data.materials:
assets += 1
if assets > 1:
fout.write(endl)
fout.write(2*tab4 + r'<material id="' + gamx_name_prefix('mat_', mat.name) + r'" type="material">' + endl)
R, G, B, A = round(mat.R * mat.amb, 6), round(mat.G * mat.amb, 6), round(mat.B * mat.amb, 6), round(mat.alpha, 6)
fout.write(3*tab4 + r'<ambient>' + "%0.6f %0.6f %0.6f %0.6f" % (R,G,B,A) + r'</ambient>' + endl)
R, G, B, A = round(mat.R, 6), round(mat.G, 6), round(mat.B, 6), round(mat.alpha, 6)
fout.write(3*tab4 + r'<diffuse>' + "%0.6f %0.6f %0.6f %0.6f" % (R,G,B,A) + r'</diffuse>' + endl)
R, G, B, A = round(mat.specR, 6), round(mat.specG, 6), round(mat.specB, 6), round(mat.alpha, 6)
fout.write(3*tab4 + r'<specular>' + "%0.6f %0.6f %0.6f %0.6f" % (R,G,B,A) + r'</specular>' + endl)
R, G, B, A = round(mat.R * mat.emit, 6), round(mat.G * mat.emit, 6), round(mat.B * mat.emit, 6), round(mat.alpha, 6)
fout.write(3*tab4 + r'<emmisive>' + "%0.6f %0.6f %0.6f %0.6f" % (R,G,B,A) + r'</emmisive>' + endl)
S = round((mat.hard - 1.0) / 510.0, 6) # [1,511]
fout.write(3*tab4 + r'<shininess>' + "%0.6f" % (S,) + r'</shininess>' + endl)
fout.write(2*tab4 + r'</material>' + endl)
# Write textures
for tex in bpy.data.textures:
if tex.getImage() == None:
print "Info: GAMX_Export: Error: Texture '%s' does not have an image. Only image textures are supported.\r\n" % (tex.name,)
else:
mtex = None
# Find corresponding MTex through materials (texture class doesn't directly link)
for mat in bpy.data.materials:
for mtex in mat.getTextures():
if mtex is not None and gamx_name_prefix('tex_', mtex.tex.name) == gamx_name_prefix('tex_', tex.name):
break # layer 2
else:
mtex = None
if mtex is not None: # layer 1
break
else:
print "Info: GAMX_Export: Error: Cannot find corresponding MTex material structure for texture '%s'.\r\n" % (tex.name,)
continue # MTex not found, cannot extract texture data
# Although MTex at this point isn't necessarily the exact correspondent, for most types it's close enough
assets += 1
if assets > 1:
fout.write(endl)
fout.write(2*tab4 + r'<texture id="' + gamx_name_prefix('tex_', tex.name) + r'" type="static">' + endl)
fout.write(3*tab4 + r'<surface source="external">' + endl)
fout.write(4*tab4 + r'<url>' + gamx_file_prefix(tex.getImage().getFilename()[2:]) + r'</url>' + endl)
fout.write(4*tab4 + r'<transforms>')
spacer = ''
if tex.flags & Blender.Texture.Flags["NEGALPHA"]:
fout.write(spacer + 'invert_ac')
spacer = ' '
if mtex.noRGB:
fout.write(spacer + 'force_gs')
spacer = ' '
#else: # implication of forcing rgb is not well enough implied
# fout.write(spacer + 'force_rgb')
# spacer = ' '
if tex.useAlpha or tex.imageFlags & Blender.Texture.ImageFlags["USEALPHA"]:
fout.write(spacer + 'force_ac')
spacer = ' '
else: # very implied that if alpha is not to be used to get rid of it
fout.write(spacer + 'force_no_ac')
spacer = ' '
if tex.flags & Blender.Texture.Flags["FLIPBLEND"]:
fout.write(spacer + 'flip_vert flip_horz')
spacer = ' '
fout.write(r'</transforms>' + endl)
fout.write(3*tab4 + r'</surface>' + endl)
fout.write(3*tab4 + r'<environment>')
# Figure out the environment setting, most of which don't have enough information to determine full range of options
if tex.normalMap or tex.imageFlags & Blender.Texture.ImageFlags["NORMALMAP"]:
fout.write('dot3')
elif mtex.blendmode == Blender.Texture.BlendModes["DARKEN"]: # no direct tag
fout.write('replace')
print "Info: GAMX_Export: Warning: DARKEN BlendModes fragmentation environment setting not supported; using 'replace' fragmentation environment.\r\n"
elif mtex.blendmode == Blender.Texture.BlendModes["DIVIDE"]: # no direct tag
fout.write('decal')
print "Info: GAMX_Export: Warning: DIVIDE BlendModes fragmentation environment setting not supported; using 'decal' fragmentation environment.\r\n"
elif mtex.blendmode == Blender.Texture.BlendModes["LIGHTEN"]: # no direct tag
fout.write('replace')
print "Info: GAMX_Export: Warning: LIGHTEN BlendModes fragmentation environment setting not supported; using 'replace' fragmentation environment.\r\n"
#elif mtex.blendmode == Blender.Texture.BlendModes["MIX"]:
# fout.write('modulate') # x1,x2,x4 implemented in else block
elif mtex.blendmode == Blender.Texture.BlendModes["ADD"]:
fout.write('add')
#elif mtex.blendmode == Blender.Texture.BlendModes["MULTIPLY"]: # no direct tag
# fout.write('modulate') # x1,x2,x4 implemented in else block
elif mtex.blendmode == Blender.Texture.BlendModes["DIFFERENCE"]: # no direct tag
fout.write('subtract')
print "Info: GAMX_Export: Warning: DIFFERENCE BlendModes fragmentation environment setting not supported; using 'subtract' fragmentation environment.\r\n"
elif mtex.blendmode == Blender.Texture.BlendModes["SUBTRACT"]:
fout.write('subtract')
#elif mtex.blendmode == Blender.Texture.BlendModes["SCREEN"]: # no direct tag
# fout.write('modulate') # x1,x2,x4 implemented in else block
else:
if mtex.blendmode != Blender.Texture.BlendModes["MIX"]:
if mtex.blendmode == Blender.Texture.BlendModes["MULTIPLY"]:
print "Info: GAMX_Export: Warning: MULTIPLY BlendModes fragmentation environment setting not supported; using 'modulate' fragmentation environment.\r\n"
elif mtex.blendmode == Blender.Texture.BlendModes["SCREEN"]:
print "Info: GAMX_Export: Warning: SCREEN BlendModes fragmentation environment setting not supported; using 'modulate' fragmentation environment.\r\n"
else:
print "Info: GAMX_Export: Warning: UNKNOWN BlendModes fragmentation environment setting not supported; using 'modulate' fragmentation environment.\r\n"
if mtex.varfac == 4.0:
fout.write('modulate_x4')
elif mtex.varfac == 2.0:
fout.write('modulate_x2')
else:
fout.write('modulate')
fout.write(r'</environment>' + endl)
fout.write(3*tab4 + r'<filter>')
if tex.mipmap or tex.imageFlags & Blender.Texture.ImageFlags["MIPMAP"]:
if tex.interpol: # not enough information to determine full range of options
fout.write('trilinear')
else:
if use_bilinear_over_unilinear:
fout.write('bilinear')
print "Info: GAMX_Export: Warning: No interpolation & MIPMAP ImageFlags filter setting is ambiguous; using 'bilinear' filtering.\r\n"
else:
fout.write('unilinear')
print "Info: GAMX_Export: Warning: No interpolation & MIPMAP ImageFlags filter setting is ambiguous; | |
namedtuple('Extent', 'left right')
extents = tuple(Extent(i, j) for i, j in zip(left, right))
return EnrichedTuple(*extents, getters=self.dimensions, left=left, right=right)
@cached_property
def _mask_modulo(self):
"""
A boolean mask telling which :class:`Dimension`s support modulo-indexing.
"""
return tuple(True if i.is_Stepping else False for i in self.dimensions)
@cached_property
def _mask_domain(self):
"""
A mask to access the domain region of the allocated data.
"""
return tuple(slice(i, -j) if j != 0 else slice(i, None)
for i, j in self._offset_domain)
@cached_property
def _mask_inhalo(self):
"""
A mask to access the domain+inhalo region of the allocated data.
"""
return tuple(slice(i, -j) if j != 0 else slice(i, None)
for i, j in self._offset_inhalo)
@cached_property
def _mask_outhalo(self):
"""
A mask to access the domain+outhalo region of the allocated data.
"""
return tuple(slice(i.start - j.left, i.stop and i.stop + j.right or None)
for i, j in zip(self._mask_domain, self._extent_outhalo))
@cached_property
def _decomposition(self):
"""
A tuple of :class:`Decomposition`s, representing the domain
decomposition. None is used as a placeholder for non-decomposed
Dimensions.
"""
if self._distributor is None:
return (None,)*self.ndim
mapper = {d: self._distributor.decomposition[d] for d in self._dist_dimensions}
return tuple(mapper.get(d) for d in self.dimensions)
@cached_property
def _decomposition_outhalo(self):
"""
A tuple of :class:`Decomposition`s, representing the domain+outhalo
decomposition. None is used as a placeholder for non-decomposed
Dimensions.
"""
if self._distributor is None:
return (None,)*self.ndim
return tuple(v.reshape(*self._extent_inhalo[d]) if v is not None else v
for d, v in zip(self.dimensions, self._decomposition))
@property
def data(self):
"""
The domain data values, as a :class:`numpy.ndarray`.
Elements are stored in row-major format.
Notes
-----
With this accessor you are claiming that you will modify the values you
get back. If you only need to look at the values, use :meth:`data_ro`
instead.
"""
return self.data_domain
@property
@_allocate_memory
def data_domain(self):
"""
The domain data values.
Elements are stored in row-major format.
Notes
-----
Alias to ``self.data``.
With this accessor you are claiming that you will modify the values you
get back. If you only need to look at the values, use
:meth:`data_ro_domain` instead.
"""
self._is_halo_dirty = True
return self._data._global(self._mask_domain, self._decomposition)
@property
@_allocate_memory
def data_with_halo(self):
"""
The domain+outhalo data values.
Elements are stored in row-major format.
Notes
-----
With this accessor you are claiming that you will modify the values you
get back. If you only need to look at the values, use
:meth:`data_ro_with_halo` instead.
"""
self._is_halo_dirty = True
self._halo_exchange()
return self._data._global(self._mask_outhalo, self._decomposition_outhalo)
_data_with_outhalo = data_with_halo
@property
@_allocate_memory
def _data_with_inhalo(self):
"""
The domain+inhalo data values.
Elements are stored in row-major format.
Notes
-----
This accessor does *not* support global indexing.
With this accessor you are claiming that you will modify the values you
get back. If you only need to look at the values, use
:meth:`data_ro_with_inhalo` instead.
Typically, this accessor won't be used in user code to set or read data
values. Instead, it may come in handy for testing or debugging
"""
self._is_halo_dirty = True
self._halo_exchange()
return np.asarray(self._data[self._mask_inhalo])
@property
@_allocate_memory
def _data_allocated(self):
"""
The allocated data values, that is domain+inhalo+padding.
Elements are stored in row-major format.
Notes
-----
This accessor does *not* support global indexing.
With this accessor you are claiming that you will modify the values you
get back. If you only need to look at the values, use
:meth:`data_ro_allocated` instead.
Typically, this accessor won't be used in user code to set or read data
values. Instead, it may come in handy for testing or debugging
"""
self._is_halo_dirty = True
self._halo_exchange()
return np.asarray(self._data)
@property
@_allocate_memory
def data_ro_domain(self):
"""
A read-only view of the domain data values.
"""
view = self._data._global(self._mask_domain, self._decomposition)
view.setflags(write=False)
return view
@property
@_allocate_memory
def data_ro_with_halo(self):
"""
A read-only view of the domain+outhalo data values.
"""
view = self._data._global(self._mask_outhalo, self._decomposition_outhalo)
view.setflags(write=False)
return view
_data_ro_with_outhalo = data_ro_with_halo
@property
@_allocate_memory
def _data_ro_with_inhalo(self):
"""
A read-only view of the domain+inhalo data values.
Notes
-----
This accessor does *not* support global indexing.
"""
view = self._data[self._mask_inhalo]
view.setflags(write=False)
return np.asarray(view)
@property
@_allocate_memory
def _data_ro_allocated(self):
"""
A read-only view of the domain+inhalo+padding data values.
Notes
-----
This accessor does *not* support global indexing.
"""
view = self._data
view.setflags(write=False)
return np.asarray(view)
@cached_property
def local_indices(self):
"""
A tuple of slices representing the global indices that logically
belong to the calling MPI rank.
Notes
-----
Given a Function ``f(x, y)`` with shape ``(nx, ny)``, when *not* using
MPI this property will return ``(slice(0, nx-1), slice(0, ny-1))``. On
the other hand, when MPI is used, the local ranges depend on the domain
decomposition, which is carried by ``self.grid``.
"""
if self._distributor is None:
return tuple(slice(0, s) for s in self.shape)
else:
return tuple(self._distributor.glb_slices.get(d, slice(0, s))
for s, d in zip(self.shape, self.dimensions))
@cached_property
def space_dimensions(self):
"""Tuple of :class:`Dimension`s that define physical space."""
return tuple(d for d in self.indices if d.is_Space)
@cached_property
def _dist_dimensions(self):
"""Tuple of MPI-distributed :class:`Dimension`s."""
if self._distributor is None:
return ()
return tuple(d for d in self.indices if d in self._distributor.dimensions)
@property
def initializer(self):
if self._data is not None:
return self.data_with_halo.view(np.ndarray)
else:
return self._initializer
@cached_property
def symbolic_shape(self):
"""
The symbolic shape of the object. This includes: ::
* the domain, halo, and padding regions. While halo and padding are
known quantities (integers), the domain size is represented by a symbol.
* the shifting induced by the ``staggered`` mask.
"""
symbolic_shape = super(TensorFunction, self).symbolic_shape
ret = tuple(Add(i, -j) for i, j in zip(symbolic_shape, self.staggered))
return EnrichedTuple(*ret, getters=self.dimensions)
def _halo_exchange(self):
"""Perform the halo exchange with the neighboring processes."""
if not MPI.Is_initialized() or MPI.COMM_WORLD.size == 1:
# Nothing to do
return
if MPI.COMM_WORLD.size > 1 and self._distributor is None:
raise RuntimeError("`%s` cannot perform a halo exchange as it has "
"no Grid attached" % self.name)
if self._in_flight:
raise RuntimeError("`%s` cannot initiate a halo exchange as previous "
"exchanges are still in flight" % self.name)
for i in self.space_dimensions:
self.__halo_begin_exchange(i)
self.__halo_end_exchange(i)
self._is_halo_dirty = False
assert not self._in_flight
def __halo_begin_exchange(self, dim):
"""Begin a halo exchange along a given :class:`Dimension`."""
neighbours = self._distributor.neighbours
comm = self._distributor.comm
for i in [LEFT, RIGHT]:
neighbour = neighbours[dim][i]
owned_region = self._get_view(OWNED, dim, i)
halo_region = self._get_view(HALO, dim, i)
sendbuf = np.ascontiguousarray(owned_region)
recvbuf = np.ndarray(shape=halo_region.shape, dtype=self.dtype)
self._in_flight.append((dim, i, recvbuf, comm.Irecv(recvbuf, neighbour)))
self._in_flight.append((dim, i, None, comm.Isend(sendbuf, neighbour)))
def __halo_end_exchange(self, dim):
"""End a halo exchange along a given :class:`Dimension`."""
for d, i, payload, req in list(self._in_flight):
if d == dim:
status = MPI.Status()
req.Wait(status=status)
if payload is not None and status.source != MPI.PROC_NULL:
# The MPI.Request `req` originated from a `comm.Irecv`
# Now need to scatter the data to the right place
self._get_view(HALO, d, i)[:] = payload
self._in_flight.remove((d, i, payload, req))
@property
def _arg_names(self):
"""Return a tuple of argument names introduced by this function."""
return (self.name,)
@memoized_meth
def _arg_defaults(self, alias=None):
"""
Returns a map of default argument values defined by this symbol.
:param alias: (Optional) name under which to store values.
"""
key = alias or self
args = ReducerMap({key.name: self._data_buffer})
# Collect default dimension arguments from all indices
for i, s, o in zip(key.indices, self.shape, self.staggered):
args.update(i._arg_defaults(start=0, size=s+o))
# Add MPI-related data structures
if self.grid is not None:
args.update(self.grid._arg_defaults())
return args
def _arg_values(self, **kwargs):
"""
Returns a map of argument values after evaluating user input. If no
user input is provided, return a default value.
:param kwargs: Dictionary of user-provided argument overrides.
"""
# Add value override for own data if it is provided, otherwise
# use defaults
if self.name in kwargs:
new = kwargs.pop(self.name)
if isinstance(new, TensorFunction):
# Set new values and re-derive defaults
values = new._arg_defaults(alias=self).reduce_all()
else:
# We've been provided a pure-data replacement (array)
values = {self.name: new}
# Add value overrides for all associated dimensions
for i, s, o in zip(self.indices, new.shape, self.staggered):
values.update(i._arg_defaults(size=s+o-sum(self._offset_domain[i])))
# Add MPI-related data structures
if self.grid is not None:
values.update(self.grid._arg_defaults())
else:
values = self._arg_defaults(alias=self).reduce_all()
return values
def _arg_check(self, args, intervals):
"""
Check that ``args`` contains legal runtime values bound to ``self``.
:raises InvalidArgument: If, given the runtime arguments ``args``, an
out-of-bounds access will be performed.
"""
if self.name not in args:
raise InvalidArgument("No runtime value | |
None,
client_session_max_lifespan: Optional[pulumi.Input[str]] = None,
consent_required: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
direct_access_grants_enabled: Optional[pulumi.Input[bool]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
exclude_session_state_from_auth_response: Optional[pulumi.Input[bool]] = None,
extra_config: Optional[pulumi.Input[Mapping[str, Any]]] = None,
full_scope_allowed: Optional[pulumi.Input[bool]] = None,
implicit_flow_enabled: Optional[pulumi.Input[bool]] = None,
login_theme: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
pkce_code_challenge_method: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
resource_server_id: Optional[pulumi.Input[str]] = None,
root_url: Optional[pulumi.Input[str]] = None,
service_account_user_id: Optional[pulumi.Input[str]] = None,
service_accounts_enabled: Optional[pulumi.Input[bool]] = None,
standard_flow_enabled: Optional[pulumi.Input[bool]] = None,
use_refresh_tokens: Optional[pulumi.Input[bool]] = None,
valid_redirect_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
web_origins: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Client resources.
:param pulumi.Input[str] access_token_lifespan: The amount of time in seconds before an access token expires. This will override the default for the realm.
:param pulumi.Input[str] access_type: Specifies the type of client, which can be one of the following:
:param pulumi.Input[str] admin_url: URL to the admin interface of the client.
:param pulumi.Input['ClientAuthenticationFlowBindingOverridesArgs'] authentication_flow_binding_overrides: Override realm authentication flow bindings
:param pulumi.Input['ClientAuthorizationArgs'] authorization: When this block is present, fine-grained authorization will be enabled for this client. The client's `access_type` must be `CONFIDENTIAL`, and `service_accounts_enabled` must be `true`. This block has the following arguments:
:param pulumi.Input[bool] backchannel_logout_revoke_offline_sessions: Specifying whether a "revoke_offline_access" event is included in the Logout Token when the Backchannel Logout URL is used. Keycloak will revoke offline sessions when receiving a Logout Token with this event.
:param pulumi.Input[bool] backchannel_logout_session_required: When `true`, a sid (session ID) claim will be included in the logout token when the backchannel logout URL is used. Defaults to `true`.
:param pulumi.Input[str] backchannel_logout_url: The URL that will cause the client to log itself out when a logout request is sent to this realm. If omitted, no logout request will be sent to the client is this case.
:param pulumi.Input[str] base_url: Default URL to use when the auth server needs to redirect or link back to the client.
:param pulumi.Input[str] client_id: The Client ID for this client, referenced in the URI during authentication and in issued tokens.
:param pulumi.Input[str] client_offline_session_idle_timeout: Time a client session is allowed to be idle before it expires. Tokens are invalidated when a client session is expired. If not set it uses the standard SSO Session Idle value.
:param pulumi.Input[str] client_offline_session_max_lifespan: Max time before a client session is expired. Tokens are invalidated when a client session is expired. If not set, it uses the standard SSO Session Max value.
:param pulumi.Input[str] client_secret: The secret for clients with an `access_type` of `CONFIDENTIAL` or `BEARER-ONLY`. This value is sensitive and should be treated with the same care as a password. If omitted, this will be generated by Keycloak.
:param pulumi.Input[str] client_session_idle_timeout: Time a client offline session is allowed to be idle before it expires. Offline tokens are invalidated when a client offline session is expired. If not set it uses the Offline Session Idle value.
:param pulumi.Input[str] client_session_max_lifespan: Max time before a client offline session is expired. Offline tokens are invalidated when a client offline session is expired. If not set, it uses the Offline Session Max value.
:param pulumi.Input[bool] consent_required: When `true`, users have to consent to client access.
:param pulumi.Input[str] description: The description of this client in the GUI.
:param pulumi.Input[bool] direct_access_grants_enabled: When `true`, the OAuth2 Resource Owner Password Grant will be enabled for this client. Defaults to `false`.
:param pulumi.Input[bool] enabled: When `false`, this client will not be able to initiate a login or obtain access tokens. Defaults to `true`.
:param pulumi.Input[bool] exclude_session_state_from_auth_response: When `true`, the parameter `session_state` will not be included in OpenID Connect Authentication Response.
:param pulumi.Input[bool] full_scope_allowed: Allow to include all roles mappings in the access token.
:param pulumi.Input[bool] implicit_flow_enabled: When `true`, the OAuth2 Implicit Grant will be enabled for this client. Defaults to `false`.
:param pulumi.Input[str] login_theme: The client login theme. This will override the default theme for the realm.
:param pulumi.Input[str] name: The display name of this client in the GUI.
:param pulumi.Input[str] pkce_code_challenge_method: The challenge method to use for Proof Key for Code Exchange. Can be either `plain` or `S256` or set to empty value ``.
:param pulumi.Input[str] realm_id: The realm this client is attached to.
:param pulumi.Input[str] resource_server_id: (Computed) When authorization is enabled for this client, this attribute is the unique ID for the client (the same value as the `.id` attribute).
:param pulumi.Input[str] root_url: When specified, this URL is prepended to any relative URLs found within `valid_redirect_uris`, `web_origins`, and `admin_url`. NOTE: Due to limitations in the Keycloak API, when the `root_url` attribute is used, the `valid_redirect_uris`, `web_origins`, and `admin_url` attributes will be required.
:param pulumi.Input[str] service_account_user_id: (Computed) When service accounts are enabled for this client, this attribute is the unique ID for the Keycloak user that represents this service account.
:param pulumi.Input[bool] service_accounts_enabled: When `true`, the OAuth2 Client Credentials grant will be enabled for this client. Defaults to `false`.
:param pulumi.Input[bool] standard_flow_enabled: When `true`, the OAuth2 Authorization Code Grant will be enabled for this client. Defaults to `false`.
:param pulumi.Input[bool] use_refresh_tokens: If this is `true`, a refresh_token will be created and added to the token response. If this is `false` then no refresh_token will be generated. Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] valid_redirect_uris: A list of valid URIs a browser is permitted to redirect to after a successful login or logout. Simple
wildcards in the form of an asterisk can be used here. This attribute must be set if either `standard_flow_enabled` or `implicit_flow_enabled`
is set to `true`.
:param pulumi.Input[Sequence[pulumi.Input[str]]] web_origins: A list of allowed CORS origins. `+` can be used to permit all valid redirect URIs, and `*` can be used to permit all origins.
"""
if access_token_lifespan is not None:
pulumi.set(__self__, "access_token_lifespan", access_token_lifespan)
if access_type is not None:
pulumi.set(__self__, "access_type", access_type)
if admin_url is not None:
pulumi.set(__self__, "admin_url", admin_url)
if authentication_flow_binding_overrides is not None:
pulumi.set(__self__, "authentication_flow_binding_overrides", authentication_flow_binding_overrides)
if authorization is not None:
pulumi.set(__self__, "authorization", authorization)
if backchannel_logout_revoke_offline_sessions is not None:
pulumi.set(__self__, "backchannel_logout_revoke_offline_sessions", backchannel_logout_revoke_offline_sessions)
if backchannel_logout_session_required is not None:
pulumi.set(__self__, "backchannel_logout_session_required", backchannel_logout_session_required)
if backchannel_logout_url is not None:
pulumi.set(__self__, "backchannel_logout_url", backchannel_logout_url)
if base_url is not None:
pulumi.set(__self__, "base_url", base_url)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_offline_session_idle_timeout is not None:
pulumi.set(__self__, "client_offline_session_idle_timeout", client_offline_session_idle_timeout)
if client_offline_session_max_lifespan is not None:
pulumi.set(__self__, "client_offline_session_max_lifespan", client_offline_session_max_lifespan)
if client_secret is not None:
pulumi.set(__self__, "client_secret", client_secret)
if client_session_idle_timeout is not None:
pulumi.set(__self__, "client_session_idle_timeout", client_session_idle_timeout)
if client_session_max_lifespan is not None:
pulumi.set(__self__, "client_session_max_lifespan", client_session_max_lifespan)
if consent_required is not None:
pulumi.set(__self__, "consent_required", consent_required)
if description is not None:
pulumi.set(__self__, "description", description)
if direct_access_grants_enabled is not None:
pulumi.set(__self__, "direct_access_grants_enabled", direct_access_grants_enabled)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if exclude_session_state_from_auth_response is not None:
pulumi.set(__self__, "exclude_session_state_from_auth_response", exclude_session_state_from_auth_response)
if extra_config is not None:
pulumi.set(__self__, "extra_config", extra_config)
if full_scope_allowed is not None:
pulumi.set(__self__, "full_scope_allowed", full_scope_allowed)
if implicit_flow_enabled is not None:
pulumi.set(__self__, "implicit_flow_enabled", implicit_flow_enabled)
if login_theme is not None:
pulumi.set(__self__, "login_theme", login_theme)
if name is not None:
pulumi.set(__self__, "name", name)
if pkce_code_challenge_method is not None:
pulumi.set(__self__, "pkce_code_challenge_method", pkce_code_challenge_method)
if realm_id is not None:
pulumi.set(__self__, "realm_id", realm_id)
if resource_server_id is not None:
pulumi.set(__self__, "resource_server_id", resource_server_id)
if root_url is not None:
pulumi.set(__self__, "root_url", root_url)
if service_account_user_id is not None:
pulumi.set(__self__, "service_account_user_id", service_account_user_id)
if service_accounts_enabled is not None:
pulumi.set(__self__, "service_accounts_enabled", service_accounts_enabled)
if standard_flow_enabled is not None:
pulumi.set(__self__, "standard_flow_enabled", standard_flow_enabled)
if use_refresh_tokens is not None:
pulumi.set(__self__, "use_refresh_tokens", use_refresh_tokens)
if valid_redirect_uris is not None:
pulumi.set(__self__, "valid_redirect_uris", valid_redirect_uris)
if web_origins is not None:
pulumi.set(__self__, "web_origins", web_origins)
@property
@pulumi.getter(name="accessTokenLifespan")
def access_token_lifespan(self) -> Optional[pulumi.Input[str]]:
"""
The amount of time in seconds before an access token expires. This will override the default for the realm.
"""
return pulumi.get(self, "access_token_lifespan")
@access_token_lifespan.setter
def access_token_lifespan(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_token_lifespan", value)
@property
@pulumi.getter(name="accessType")
def access_type(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the type of client, which can be one of the following:
"""
return pulumi.get(self, "access_type")
@access_type.setter
def access_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_type", value)
@property
@pulumi.getter(name="adminUrl")
def admin_url(self) -> Optional[pulumi.Input[str]]:
"""
URL to the admin interface of the client.
"""
return | |
domains
extend from TSS a basal region.
"""
gene_iterator = GTF.gene_iterator(iterator)
counter = E.Counter()
upstream, downstream = options.upstream, options.downstream
radius = options.radius
outfile = options.stdout
regions = []
####################################################################
# define basal regions for each gene
# take all basal regions per transcript and merge them
# Thus, the basal region of a gene might be larger than the sum
# of options.upstream + options.downstream
for gene in gene_iterator:
counter.genes += 1
is_negative_strand = Genomics.IsNegativeStrand(gene[0][0].strand)
lcontig = fasta.getLength(gene[0][0].contig)
regulons = []
transcript_ids = []
# collect every basal region per transcript
for transcript in gene:
counter.transcripts += 1
mi, ma = min([x.start for x in transcript]), max(
[x.end for x in transcript])
# add range to both sides of tss
if is_negative_strand:
interval = ma - options.downstream, ma + options.upstream
else:
interval = mi - options.upstream, mi + options.downstream
interval = (min(lcontig, max(0, interval[0])),
min(lcontig, max(0, interval[1])))
regulons.append(interval)
transcript_ids.append(transcript[0].transcript_id)
# take first/last entry
start, end = min(x[0] for x in regulons), max(x[1] for x in regulons)
gtf = GTF.Entry()
gtf.fromGTF(gene[0][0], gene[0][0].gene_id, gene[0][0].gene_id)
gtf.source = "greatdomain"
gtf.start, gtf.end = start, end
regions.append(gtf)
regions.sort(key=lambda x: (x.contig, x.start))
outf = IOTools.openFile("test.gff", "w")
for x in regions:
outf.write(str(x) + "\n")
outf.close()
####################################################################
# extend basal regions
regions.sort(key=lambda x: (x.contig, x.start))
# iterate within groups of overlapping basal regions
groups = list(GTF.iterator_overlaps(iter(regions)))
counter.groups = len(groups)
last_end = 0
reset = False
for region_id, group in enumerate(groups):
# collect basal intervals in group
intervals = [(x.start, x.end) for x in group]
def overlapsBasalRegion(pos):
for start, end in intervals:
if start == pos or end == pos:
continue
if start <= pos < end:
return True
if start > pos:
return False
return False
# deal with boundary cases - end of contig
if region_id < len(groups) - 1:
nxt = groups[region_id + 1]
if nxt[0].contig == group[0].contig:
next_start = min([x.start for x in nxt])
else:
next_start = fasta.getLength(group[0].contig)
reset = True
else:
next_start = fasta.getLength(group[0].contig)
reset = True
# last_end = basal extension of previous group
# next_start = basal_extension of next group
# extend region to previous/next group always extend
# dowstream, but upstream only extend if basal region of an
# interval is not already overlapping another basal region
# within the group
save_end = 0
for gtf in group:
save_end = max(save_end, gtf.end)
if gtf.strand == "+":
if not overlapsBasalRegion(gtf.start):
gtf.start = max(gtf.start - radius, last_end)
# always extend downstream
gtf.end = min(gtf.end + radius, next_start)
else:
# always extend downstream
gtf.start = max(gtf.start - radius, last_end)
if not overlapsBasalRegion(gtf.end):
gtf.end = min(gtf.end + radius, next_start)
outfile.write(str(gtf) + "\n")
counter.regulons += 1
if len(group) > 1:
counter.overlaps += len(group)
else:
counter.nonoverlaps += 1
if reset:
last_end = 0
reset = False
else:
last_end = save_end
E.info("%s" % str(counter))
def annotateTTS(iterator, fasta, options):
"""annotate termination sites within iterator.
Entries specified with ``--restrict-source are annotated``.
"""
gene_iterator = GTF.gene_iterator(iterator)
ngenes, ntranscripts, npromotors = 0, 0, 0
for gene in gene_iterator:
ngenes += 1
is_negative_strand = Genomics.IsNegativeStrand(gene[0][0].strand)
lcontig = fasta.getLength(gene[0][0].contig)
tts = []
transcript_ids = []
for transcript in gene:
ntranscripts += 1
mi, ma = min([x.start for x in transcript]), max(
[x.end for x in transcript])
transcript_ids.append(transcript[0].transcript_id)
# if tts is directly at start/end of contig, the tss will
# be within an exon. otherwise, it is outside an exon.
if is_negative_strand:
tts.append(
(max(0, mi - options.promotor), max(options.promotor, mi)))
else:
tts.append(
(min(ma, lcontig - options.promotor),
min(lcontig, ma + options.promotor)))
if options.merge_promotors:
# merge the promotors (and rename - as sort order might have
# changed)
tts = Intervals.combine(tts)
transcript_ids = ["%i" % (x + 1) for x in range(len(tts))]
gtf = GTF.Entry()
gtf.fromGTF(gene[0][0], gene[0][0].gene_id, gene[0][0].gene_id)
gtf.source = "tts"
x = 0
for start, end in tts:
gtf.start, gtf.end = start, end
gtf.transcript_id = transcript_ids[x]
options.stdout.write("%s\n" % str(gtf))
npromotors += 1
x += 1
if options.loglevel >= 1:
options.stdlog.write(
"# ngenes=%i, ntranscripts=%i, ntss=%i\n" %
(ngenes, ntranscripts, npromotors))
def annotateGenes(iterator, fasta, options):
"""annotate gene structures
This method outputs intervals for first/middle/last exon/intron,
UTRs and flanking regions.
This method annotates per transcript. In order to achieve a unique tiling,
use only a single transcript per gene and remove any overlap between
genes.
"""
gene_iterator = GTF.gene_iterator(iterator)
ngenes, ntranscripts, nskipped = 0, 0, 0
results = []
increment = options.increment
introns_detail = "introns" in options.detail
exons_detail = "exons" in options.detail
for gene in gene_iterator:
ngenes += 1
is_negative_strand = Genomics.IsNegativeStrand(gene[0][0].strand)
try:
lcontig = fasta.getLength(gene[0][0].contig)
except KeyError:
nskipped += 1
continue
results = []
for transcript in gene:
def _add(interval, anno):
gtf = GTF.Entry()
gtf.contig = transcript[0].contig
gtf.gene_id = transcript[0].gene_id
gtf.transcript_id = transcript[0].transcript_id
gtf.strand = transcript[0].strand
gtf.feature = anno
gtf.start, gtf.end = interval
results.append(gtf)
ntranscripts += 1
exons = [(x.start, x.end)
for x in transcript if x.feature == "exon"]
if len(exons) == 0:
nskipped += 1
exons.sort()
introns = []
end = exons[0][1]
for exon in exons[1:]:
introns.append((end, exon[0]))
end = exon[1]
# add flank
start, end = exons[0][0], exons[-1][1]
upstream, downstream = [], []
for x in range(0, options.flank, increment):
upstream.append((start - increment, start))
start -= increment
downstream.append((end, end + increment))
end += increment
# remove out-of-bounds coordinates
upstream = [x for x in upstream if x[0] >= 0]
downstream = [x for x in downstream if x[1] <= lcontig]
if is_negative_strand:
exons.reverse()
introns.reverse()
upstream, downstream = downstream, upstream
# add exons
if exons_detail:
_add(exons[0], "first_exon")
if len(exons) > 1:
_add(exons[-1], "last_exon")
for e in exons[1:-1]:
_add(e, "middle_exon")
else:
for e in exons:
_add(e, "exon")
# add introns
if introns_detail:
if len(introns) > 0:
_add(introns[0], "first_intron")
if len(introns) > 1:
_add(introns[-1], "last_intron")
for i in introns[1:-1]:
_add(i, "middle_intron")
else:
for i in introns:
_add(i, "intron")
for x, u in enumerate(upstream):
_add(u, "upstream_%i" % (increment * (x + 1)))
for x, u in enumerate(downstream):
_add(u, "downstream_%i" % (increment * (x + 1)))
results.sort(key=lambda x: x.feature)
cache = []
for key, vals in itertools.groupby(results, key=lambda x: x.feature):
v = list(vals)
intervals = [(x.start, x.end) for x in v]
intervals = Intervals.combine(intervals)
for start, end in intervals:
r = GTF.Entry()
r.copy(v[0])
r.start, r.end = start, end
cache.append(r)
cache.sort(key=lambda x: x.start)
for r in cache:
options.stdout.write("%s\n" % str(r))
E.info("ngenes=%i, ntranscripts=%i, nskipped=%i\n" %
(ngenes, ntranscripts, nskipped))
def main(argv=None):
if not argv:
argv = sys.argv
parser = E.OptionParser(
version="%prog version: $Id$",
usage=globals()["__doc__"])
parser.add_option("-g", "--genome-file", dest="genome_file", type="string",
help="filename with genome [default=%default].")
parser.add_option("-i", "--ignore-missing", dest="ignore_missing",
action="store_true",
help="Ignore transcripts on contigs that are not "
"in the genome-file [default=%default].")
parser.add_option("-s", "--restrict-source", dest="restrict_source",
type="choice",
choices=("protein_coding", "pseudogene", "lncRNA"),
help="restrict input by source [default=%default].")
parser.add_option("-m", "--method", dest="method", type="choice",
choices=("full", "genome", "exons",
"promotors", "tts",
"regulons", "tts-regulons",
"genes",
"territories", "tss-territories",
"great-domains",
),
help="method for defining segments [default=%default].")
parser.add_option(
"-r", "--territory-extension", dest="radius", type="int",
help="radius of a territory [default=%default].")
parser.add_option(
"-f", "--flank-size", dest="flank", type="int",
help="size of the flanking region next to a gene [default=%default].")
parser.add_option(
"--flank-increment-size", dest="increment", type="int",
help="size of increment in flank in genestructure annotation "
"[default=%default].")
parser.add_option(
"-p", "--promotor-size", dest="promotor", type="int",
help="size of a promotor region [default=%default].")
parser.add_option(
"-u", "--upstream-extension", dest="upstream", type="int",
help="size of region upstream of tss [default=%default].")
parser.add_option(
"-d", "--downstream-extension", dest="downstream", type="int",
help="size of region downstream of tss [default=%default].")
parser.add_option(
"--gene-detail", dest="detail", type="choice",
choices=("introns+exons", "exons", "introns"),
help="level of detail for gene structure annotation "
"[default=%default].")
parser.add_option(
"--merge-overlapping-promotors", dest="merge_promotors",
action="store_true",
help="merge overlapping promotors [default=%default].")
parser.add_option(
"--min-intron-length", dest="min_intron_length",
type="int",
help="minimum intron length. If the distance between two "
"consecutive exons is smaller, the region will be marked "
"'unknown' [default=%default].")
parser.add_option(
"--is-unsorted", dest="is_sorted", action="store_false",
help="sort input before processing. Otherwise, the input is assumed "
"to be sorted [default=%default].")
parser.set_defaults(
genome_file=None,
flank=1000,
increment=1000,
max_frameshift_length=4,
min_intron_length=30,
ignore_missing=False,
restrict_source=None,
method="genome",
radius=50000,
promotor=5000,
merge_promotors=False,
upstream=5000,
downstream=5000,
detail="exons",
is_sorted=True,
)
(options, args) = E.Start(parser)
if options.genome_file:
fasta = IndexedFasta.IndexedFasta(options.genome_file)
else:
raise ValueError("please specify a --genome-file")
if not options.restrict_source:
iterator = GTF.iterator(options.stdin)
elif options.restrict_source:
iterator = GTF.iterator_filtered(GTF.iterator(options.stdin),
source=options.restrict_source)
# elif options.method in ("promotors", "tts", "regulons"):
# iterator = GTF.iterator_filtered( GTF.iterator(options.stdin), source = | |
<filename>metaflow/metadata/metadata.py
import json
import os
import re
import time
from collections import namedtuple
from datetime import datetime
from metaflow.exception import MetaflowInternalError
from metaflow.util import get_username, resolve_identity
DataArtifact = namedtuple('DataArtifact',
'name ds_type ds_root url type sha')
MetaDatum = namedtuple('MetaDatum',
'field value type tags')
attempt_id_re = re.compile(r"attempt_id:([0-9]+)")
class MetadataProviderMeta(type):
def __new__(metaname, classname, bases, attrs):
return type.__new__(metaname, classname, bases, attrs)
def _get_info(classobject):
if not classobject._INFO:
classobject._INFO = classobject.default_info()
return classobject._INFO
def _set_info(classobject, val):
v = classobject.compute_info(val)
classobject._INFO = v
def __init__(classobject, classname, bases, attrs):
classobject._INFO = None
INFO = property(_get_info, _set_info)
# From https://stackoverflow.com/questions/22409430/portable-meta-class-between-python2-and-python3
def with_metaclass(mcls):
def decorator(cls):
body = vars(cls).copy()
# clean out class body
body.pop('__dict__', None)
body.pop('__weakref__', None)
return mcls(cls.__name__, cls.__bases__, body)
return decorator
@with_metaclass(MetadataProviderMeta)
class MetadataProvider(object):
@classmethod
def compute_info(cls, val):
'''
Compute the new information for this provider
The computed value should be returned and will then be accessible directly as cls.INFO.
This information will be printed by the client when describing this metadata provider
Parameters
----------
val : str
Provider specific information used in computing the new information. For example, this
can be a path.
Returns
-------
str :
Value to be set to INFO
'''
return ''
@classmethod
def default_info(cls):
'''
Returns the default information for this provider
This should compute and return the default value for the information regarding this provider.
For example, this can compute where the metadata is stored
Returns
-------
str
Value to be set by default in INFO
'''
return ''
def version(self):
'''
Returns the version of this provider
Returns
-------
str
Version of the provider
'''
return ''
def new_run_id(self, tags=None, sys_tags=None):
'''
Creates an ID and registers this new run.
The run ID will be unique within a given flow.
Parameters
----------
tags : list, optional
Tags to apply to this particular run, by default None
sys_tags : list, optional
System tags to apply to this particular run, by default None
Returns
-------
int
Run ID for the run
'''
raise NotImplementedError()
def register_run_id(self, run_id, tags=None, sys_tags=None):
'''
No-op operation in this implementation.
Parameters
----------
run_id : int
Run ID for this run
tags : list, optional
Tags to apply to this particular run, by default None
sys_tags : list, optional
System tags to apply to this particular run, by default None
'''
raise NotImplementedError()
def new_task_id(self, run_id, step_name, tags=None, sys_tags=None):
'''
Creates an ID and registers this new task.
The task ID will be unique within a flow, run and step
Parameters
----------
run_id : int
ID of the run
step_name : string
Name of the step
tags : list, optional
Tags to apply to this particular task, by default None
sys_tags : list, optional
System tags to apply to this particular task, by default None
Returns
-------
int
Task ID for the task
'''
raise NotImplementedError()
def register_task_id(
self, run_id, step_name, task_id, attempt=0, tags=None, sys_tags=None):
'''
No-op operation in this implementation.
Parameters
----------
run_id : int or convertible to int
Run ID for this run
step_name : string
Name of the step
task_id : int
Task ID
tags : list, optional
Tags to apply to this particular run, by default []
sys_tags : list, optional
System tags to apply to this particular run, by default []
'''
raise NotImplementedError()
def get_runtime_environment(self, runtime_name):
'''
Returns a dictionary of environment variables to be set
Parameters
----------
runtime_name : string
Name of the runtime for which to get the environment
Returns
-------
dict[string] -> string
Environment variables from this metadata provider
'''
return {'METAFLOW_RUNTIME_NAME': runtime_name,
'USER': get_username()}
def register_data_artifacts(self,
run_id,
step_name,
task_id,
attempt_id,
artifacts):
'''
Registers the fact that the data-artifacts are associated with
the particular task.
Artifacts produced by a given task can be associated with the
task using this call
Parameters
----------
run_id : int
Run ID for the task
step_name : string
Step name for the task
task_id : int
Task ID for the task
attempt_id : int
Attempt for the task
artifacts : List of DataArtifact
Artifacts associated with this task
'''
raise NotImplementedError()
def register_metadata(self, run_id, step_name, task_id, metadata):
'''
Registers metadata with a task.
Note that the same metadata can be registered multiple times for the same task (for example
by multiple attempts). Internally, the timestamp of when the registration call is made is
also recorded allowing the user to determine the latest value of the metadata.
Parameters
----------
run_id : int
Run ID for the task
step_name : string
Step name for the task
task_id : int
Task ID for the task
metadata : List of MetaDatum
Metadata associated with this task
'''
raise NotImplementedError()
def start_task_heartbeat(self, flow_id, run_id, step_name, task_id):
pass
def start_run_heartbeat(self, flow_id, run_id):
pass
def stop_heartbeat(self):
pass
@classmethod
def _get_object_internal(
cls, obj_type, obj_order, sub_type, sub_order, filters, attempt, *args):
'''
Return objects for the implementation of this class
See get_object_internal for the description of what this function does
Parameters
----------
obj_type : string
One of 'root', 'flow', 'run', 'step', 'task', 'artifact'
obj_order: int
Order in the list ['root', 'flow', 'run', 'step', 'task', 'artifact']
sub_type : string
Same as obj_type with the addition of 'metadata', 'self'
sub_order:
Order in the same list as the one for obj_order + ['metadata', 'self']
filters : dict
Dictionary with keys 'any_tags', 'tags' and 'system_tags'. If specified
will return only objects that have the specified tags present. Filters
are ANDed together so all tags must be present for the object to be returned.
attempt : int or None
If None, returns artifacts for latest *done* attempt and all metadata. Otherwise,
returns artifacts for that attempt (existent, done or not) and *all* metadata
NOTE: Unlike its external facing `get_object`, this function should
return *all* metadata; the base class will properly implement the
filter. For artifacts, this function should filter artifacts at
the backend level.
Return
------
object or list :
Depending on the call, the type of object return varies
'''
raise NotImplementedError()
def add_sticky_tags(self, tags=None, sys_tags=None):
'''
Adds tags to be added to every run and task
Tags can be added to record information about a run/task. Such tags can be specified on a
per run or task basis using the new_run_id/register_run_id or new_task_id/register_task_id
functions but can also be set globally using this function. Tags added here will be
added to every run/task created after this call is made.
Parameters
----------
tags : list, optional
Tags to add to every run/task, by default None
sys_tags : list, optional
System tags to add to every run/task, by default None
'''
if tags:
self.sticky_tags.update(tags)
if sys_tags:
self.sticky_sys_tags.update(sys_tags)
@classmethod
def get_object(cls, obj_type, sub_type, filters, attempt, *args):
'''Returns the requested object depending on obj_type and sub_type
obj_type can be one of 'root', 'flow', 'run', 'step', 'task',
or 'artifact'
sub_type describes the aggregation required and can be either:
'metadata', 'self' or any of obj_type provided that it is slotted below
the object itself. For example, if obj_type is 'flow', you can
specify 'run' to get all the runs in that flow.
A few special rules:
- 'metadata' is only allowed for obj_type 'task'
- For obj_type 'artifact', only 'self' is allowed
A few examples:
- To get a list of all flows:
- set obj_type to 'root' and sub_type to 'flow'
- To get a list of all tasks:
- set obj_type to 'root' and sub_type to 'task'
- To get a list of all artifacts in a task:
- set obj_type to 'task' and sub_type to 'artifact'
- To get information about a specific flow:
- set obj_type to 'flow' and sub_type to 'self'
Parameters
----------
obj_type : string
One of 'root', 'flow', 'run', 'step', 'task', 'artifact' or 'metadata'
sub_type : string
Same as obj_type with the addition of 'self'
filters : dict
Dictionary with keys 'any_tags', 'tags' and 'system_tags'. If specified
will return only objects that have the specified tags present. Filters
are ANDed together so all tags must be present for the object to be returned.
| |
from . import bits
from . import defs
from . import opcode
from . import (OrderedDict,
FRAME_TYPE_OBJECT,
FRAME_TYPE_LITERAL_FRAME,
FRAME_TYPE_BYTECODE_FRAME,
FRAME_TYPE_EXCEPTIONS_FRAME)
from . import (behavior_label,
cclass_label,
class_label,
cfun_label,
fun_label,
cmod_label,
mod_label,
closure_name)
from pdb import set_trace as br
#!(fnobj.current_bytecode_pos()):bpos or fnobj.update_line_mapping(bpos, ast)
def emitter(fn):
def wrapper(self, *args):
ast = args[0]
bpos = self.current_bytecode_pos()
res = fn(self, *args)
self.update_line_mapping(bpos, ast)
return res
return wrapper
class Entry(object):
def __init__(self):
self.oop = None
self._label_counter = 0
# def get_oop(self, vmem, dont_load=True):
# if self.oop is None:
# if dont_load:
# raise Exception('get_oop called prior to having an oop')
# self.oop = self.fill(vmem)
# return self.oop
def label_counter(self):
ret = self._label_counter
self._label_counter += 1
return ret
class Object(Entry):
def __init__(self, name, imod):
super(Object, self).__init__()
self.name = name
self.slots = []
self.imod = imod
def label(self):
return self.name
def add_slot_literal_array(self, name, value):
if len(value) > 0:
raise Exception('todo')
self.add_slot_empty_list(name)
def add_slot_literal_dict(self, name, value):
if len(value) > 0:
raise Exception('todo')
self.add_slot_empty_dict(name)
def add_slot_ref(self, name, value):
self.slots.append({'type': 'ref', 'name': name, 'value': value})
def add_slot_literal_string(self, name, value):
self.slots.append({'type': 'string', 'name': name, 'value': value})
def add_slot_empty_dict(self, name):
# TODO: this will become dict
self.slots.append({'type': 'empty_dict', 'name': name})
def add_slot_empty_list(self, name):
# TODO: this will become list
self.slots.append({'type': 'empty_list', 'name': name})
def add_slot_literal_null(self, name):
self.slots.append({'type': 'null', 'name': name})
def add_slot_literal_num(self, name, value):
self.slots.append({'type': 'int', 'name': name, 'value': value})
def fill(self, vmem):
# synth necessary objects:
refs_to_literals = {}
for idx, slot in enumerate(self.slots[1:]):
if slot['type'] == 'string':
refs_to_literals[idx] = vmem.append_string_instance(slot['value'])
elif slot['type'] == 'empty_list':
refs_to_literals[idx] = vmem.append_empty_list()
elif slot['type'] == 'empty_dict':
refs_to_literals[idx] = vmem.append_empty_dict()
elif slot['type'] == 'dict':
fun_dict = {}
for name, cfun in sorted(slot['value'].items()):
cfun.fill(vmem)
fun_dict[name] = Function(self.imod, cfun)
refs_to_literals[idx] = vmem.append_sym_dict_emiting_entries(fun_dict)
# emit our object
vmem.append_int(FRAME_TYPE_OBJECT)
vmem.append_int(len(self.slots) * bits.WSIZE)
oop = vmem.append_label_ref(self.slots[0]['value'], self.name)
for idx, slot in enumerate(self.slots[1:]):
if slot['type'] == 'ref':
vmem.append_label_ref(slot['value'])
elif slot['type'] == 'null':
vmem.append_null()
elif slot['type'] == 'string':
vmem.append_pointer_to(refs_to_literals[idx])
elif slot['type'] == 'empty_list':
vmem.append_pointer_to(refs_to_literals[idx])
elif slot['type'] == 'empty_dict':
vmem.append_pointer_to(refs_to_literals[idx])
elif slot['type'] == 'dict':
vmem.append_pointer_to(refs_to_literals[idx])
elif slot['type'] == 'int':
vmem.append_tagged_int(slot['value'])
else:
raise Exception('TODO')
self.oop = oop
return oop
## these need to know the CompiledFunction owner (which s Object_CompiledClass)
class Object_ObjectBehavior(Object):
def new_ctor(self, name):
owner = self.imod.object_by_name('Object_CompiledClass')
d = [s for s in self.slots if s['type'] == 'dict']
if len(d) == 0:
dslot = {"type":"dict", name: "dict", 'value':{}}
self.slots.insert(2,dslot) #dict is slot in idx==2
else:
dslot = d[0]
fn = CompiledFunction(self.imod.cmod, owner, name, ctor=True)
dslot['value'][name] = fn
return fn
def new_function(self, name, params):
owner = self.imod.object_by_name('Object_CompiledClass')
d = [s for s in self.slots if s['type'] == 'dict']
if len(d) == 0:
dslot = {"type":"dict", name: "dict", 'value':{}}
self.slots.insert(2,dslot) #dict is slot in idx==2
else:
dslot = d[0]
fn = CompiledFunction(self.imod.cmod, owner, name, params)
dslot['value'][name] = fn
return fn
class Object_Object(Object):
def new_function(self, name, params):
owner = self.imod.object_by_name('Object_CompiledClass')
d = [s for s in self.slots if s['type'] == 'dict']
if len(d) == 0:
dslot = {"type":"dict", name: "dict", 'value':{}}
self.slots.insert(2,dslot) #dict is slot in idx==2
else:
dslot = d[0]
fn = CompiledFunction(self.imod.cmod, self, name, params)
dslot['value'][name] = fn
return fn
class Behavior(Entry):
def __init__(self, name, parent_name): #, dictionary):
super(Behavior, self).__init__()
self.name = name
self.parent_name = parent_name
self.parent_label = behavior_label(parent_name)
self.dictionary = {}
# if parent_name != 'Object':
# br()
# raise Exception('TODO')
def label(self):
return behavior_label(self.name)
def fill(self, vmem):
oop_dict = vmem.append_sym_dict_emiting_entries(self.dictionary)
vmem.append_int(FRAME_TYPE_OBJECT)
vmem.append_int(4 * bits.WSIZE)
oop = vmem.append_label_ref('Behavior', self.label()) # vt
vmem.append_label_ref(self.parent_label) # delegate
vmem.append_pointer_to(oop_dict) # dict: "own methods"
vmem.append_int(256) # size: 256 = dummy flag: "I am a behavior. no more fields after this"
# no compiled_class
self.oop = oop
return oop
class Class(Entry):
def __init__(self, imod, cclass):
super(Class, self).__init__()
self.imod = imod
self.behavior = Behavior(cclass.name, cclass.super_name)
self.cclass = cclass
self.dictionary = {}
def label(self):
return class_label(self.cclass.name)
def new_instance_method(self, name):
cfun = self.cclass.new_instance_method(name)
fun = Function(self.imod, cfun)
self.dictionary[name] = fun
return fun
def new_class_method(self, name):
cfun = self.cclass.new_class_method(name)
fun = Function(self.imod, cfun)
self.behavior.dictionary[name] = fun
return fun
def new_ctor(self, name):
cfun = self.cclass.new_ctor(name)
fun = Function(self.imod, cfun)
self.behavior.dictionary[name] = fun
return fun
def fill(self, vmem):
oop_vt = self.behavior.fill(vmem)
oop_dict = vmem.append_sym_dict_emiting_entries(self.dictionary)
vmem.append_int(FRAME_TYPE_OBJECT)
vmem.append_int(5 * bits.WSIZE)
# oop = vmem.append_label_ref(behavior_label(self.cclass.name), self.label()) # vt
oop = vmem.append_pointer_to(oop_vt, self.label()) # vt
vmem.append_label_ref(class_label(self.cclass.super_name)) # delegate
vmem.append_pointer_to(oop_dict) # dict: "methods"
vmem.append_int(len(self.cclass.fields)) # payload
vmem.append_label_ref(cclass_label(self.cclass.name)) # compiled_class
# vmem.append_pointer_to(self.cclass.oop) # <-
self.oop = oop
return oop
class CompiledClass(Entry):
def __init__(self, cmod, name, super_name, fields):
super(CompiledClass, self).__init__()
self.cmod = cmod
self.name = name
self.super_name = super_name
self.fields = fields
self.instance_methods = {}
self.class_methods = {}
def label(self):
return cclass_label(self.name) #cmod.label() + '_' + self.name + "_CompiledClass"
def new_ctor(self, name):
fn = CompiledFunction(self.cmod, self, name, ctor=True)
self.class_methods[name] = fn
return fn
def new_instance_method(self, name):
fn = CompiledFunction(self.cmod, self, name)
if name in self.instance_methods:
raise Exception("duplicated method: " + name + " in " + self.name)
self.instance_methods[name] = fn
return fn
def new_class_method(self, name):
fn = CompiledFunction(self.cmod, self, name)
self.class_methods[name] = fn
return fn
def fill(self, vmem):
# vt: CompiledClass
# delegate: ...
# module: ...
# name: ...
# super_class_name: ...
# fields
# methods
# class_methods ...
delegate = vmem.append_object_instance()
oop_name = vmem.append_string_instance(self.name)
oop_super = vmem.append_string_instance(self.super_name)
oop_fields = vmem.append_list_of_strings(self.fields)
oop_methods = vmem.append_sym_dict_emiting_entries(self.instance_methods)
oop_class_methods = vmem.append_sym_dict_emiting_entries(self.class_methods)
vmem.append_int(FRAME_TYPE_OBJECT)
vmem.append_int(8 * bits.WSIZE)
oop = vmem.append_external_ref('CompiledClass', self.label()) # vt: CompiledClass
vmem.append_pointer_to(delegate)
vmem.append_pointer_to(oop_name)
vmem.append_pointer_to(oop_super)
vmem.append_label_ref(self.cmod.label()) ####
vmem.append_pointer_to(oop_fields)
vmem.append_pointer_to(oop_methods)
vmem.append_pointer_to(oop_class_methods)
self.oop = oop
return oop
class VariableStorage(object):
# requirements:
#
# -add: requires current scope only (to check redeclaration)
# -index: requires stack of vars to lookup lexically enclosed vars
# ie. sibling closures with the same var name should use different locations
# for it.
# -size/env_offset: requires accumulated scopes
def __init__(self, cfun, outer_cfun=None, storage=None):
self.outer_cfun = outer_cfun
self.parent_storage = storage
if storage:
self.variables = storage.variables
else:
self.variables = OrderedDict()
self.variables[cfun] = []
# def has(self, cfun, name):
# try:
# self.index(cfun, name)
# return True
# except Exception:
# return False
# def defined(self, name):
# for _, names in self.variables.items():
# if name in names:
# return True
# return False
def is_visible(self, cfun, name):
idx = self.variables.keys().index(cfun)
return name in self._flat(self.variables.values()[:idx+1])
def add_names(self, cfun, names):
for name in names:
self.add(cfun, name)
def add(self, cfun, name):
if name in self.variables[cfun]:
raise Exception('redeclaration of ' + name + " in " + cfun.name)
self.variables[cfun].append(name)
return self.index(cfun, name)
def index(self, cfun, name):
return self._index(cfun, name)
def _index(self, cfun, name):
if name not in self.variables[cfun]:
if self.parent_storage is None:
raise Exception("Undeclared " + name)
else:
return self.parent_storage.index(self.outer_cfun, name)
else:
# if len(self.variables.keys()) > 1: # if this is a closure or if this has closures
# # offset it to after rp/dp
# return lambda: 2 + self.env_offset(cfun) + self.variables[cfun].index(name)
# else:
return lambda: self.env_offset(cfun) + self.variables[cfun].index(name)
def total(self):
return len(self._flat(self.variables.values()))
def _flat(self, lst):
return reduce(lambda x, y: x + y, lst, [])
def env_offset(self, cfun):
idx = self.variables.keys().index(cfun)
offset = len(self._flat(self.variables.values()[:idx]))
# if self.parent_storage: # if this is a closure
# return 2 + offset
# else:
return offset
def env_table(self):
return self._flat(self.variables.values())
class CompiledFunction(Entry):
def __init__(self, cmod, owner, name, params = [], ctor=False, env_storage=None, is_top_level=True, outer_cfun=None, top_level_cfun=None):
super(CompiledFunction, self).__init__()
self.cmod = cmod
self.name = name
self.params = params
self.is_ctor = ctor
self.is_prim = False
self.prim_name = ''
self.owner = owner
self.is_top_level = is_top_level
self.outer_cfun = outer_cfun
self.top_level_cfun = top_level_cfun
self.text = ''
self.start_line = -1
self.line_mapping = {}
self.location_mapping = {}
if env_storage:
self.has_env = True
self.var_declarations = VariableStorage(self, outer_cfun, env_storage)
else:
self.has_env = False
self.var_declarations = VariableStorage(self)
self.var_declarations.add_names(self, params)
self.literal_frame = []
self.bytecodes = opcode.Bytecodes()
self.exceptions_frame = []
self.accessor_flag = 0
self.accessor_field = 0
self.closures = []
self.should_wrap_catch_for_non_local_return = False
self._label = None
self.var_arg = False
def set_text(self, text):
self.text = text
def set_line(self, head):
self.start_line = head.start_line
def set_params(self, params):
self.params = params
self.var_declarations.add_names(self, params)
def set_vararg(self, name):
self.var_arg = True
def declare_var(self, name):
self.var_declarations.add(self, name)
def declare_vars(self, names):
self.var_declarations.add_names(self, names)
def set_getter(self, idx):
self.accessor_flag = 1 # normal=0/getter=1/setter=2
self.accessor_field = idx
def body_processor(self):
return self
def uses_env(self, val):
self.has_env = val
def set_primitive(self, prim_name):
self.prim_name = prim_name
self.is_prim | |
<reponame>ivis-tsukioka/datalad
# emacs: -*- mode: python-mode; py-indent-offset: 4; tab-width: 4; indent-tabs-mode: nil; coding: utf-8 -*-
# ex: set sts=4 ts=4 sw=4 et:
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the datalad package for the
# copyright and license terms.
#
# ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Test the thread based runner (aka. non asyncio based runner).
"""
import asyncio
import os
import queue
import signal
import subprocess
import sys
from itertools import count
from time import sleep
from typing import (
List,
Optional,
)
from unittest.mock import (
patch,
MagicMock,
)
from datalad.tests.utils import (
assert_false,
assert_raises,
assert_true,
eq_,
known_failure_osx,
known_failure_windows,
with_tempfile,
)
from datalad.utils import on_windows
from .. import (
NoCapture,
Protocol,
Runner,
StdOutCapture,
StdOutErrCapture,
)
from ..nonasyncrunner import (
IOState,
ThreadedRunner,
run_command,
)
from ..protocol import (
GeneratorMixIn,
)
from ..runnerthreads import (
ReadThread,
WriteThread,
)
from .utils import py2cmd
# Protocol classes used for a set of generator tests later
class GenStdoutStderr(GeneratorMixIn, StdOutErrCapture):
def __init__(self,
done_future=None,
encoding=None):
StdOutErrCapture.__init__(
self,
done_future=done_future,
encoding=encoding)
GeneratorMixIn.__init__(self)
def timeout(self, fd: Optional[int]) -> bool:
return True
class GenNothing(GeneratorMixIn, NoCapture):
def __init__(self,
done_future=None,
encoding=None):
NoCapture.__init__(
self,
done_future=done_future,
encoding=encoding)
GeneratorMixIn.__init__(self)
def test_subprocess_return_code_capture():
class KillProtocol(Protocol):
proc_out = True
proc_err = True
def __init__(self, signal_to_send: int, result_pool: dict):
super().__init__()
self.signal_to_send = signal_to_send
self.result_pool = result_pool
def connection_made(self, process):
super().connection_made(process)
process.send_signal(self.signal_to_send)
def connection_lost(self, exc):
self.result_pool["connection_lost_called"] = (True, exc)
def process_exited(self):
self.result_pool["process_exited_called"] = True
# windows doesn't support SIGINT but would need a Ctrl-C
signal_to_send = signal.SIGTERM if on_windows else signal.SIGINT
result_pool = dict()
result = run_command(["waitfor", "/T", "10000", "TheComputerTurnsIntoATulip"]
if on_windows
else ["sleep", "10000"],
KillProtocol,
None,
{
"signal_to_send": signal_to_send,
"result_pool": result_pool
},
exception_on_error=False)
if not on_windows:
# this one specifically tests the SIGINT case, which is not supported
# on windows
eq_(result["code"], -signal_to_send)
assert_true(result_pool["connection_lost_called"][0])
assert_true(result_pool["process_exited_called"])
def test_interactive_communication():
class BidirectionalProtocol(Protocol):
proc_out = True
proc_err = True
def __init__(self, result_pool: dict):
super().__init__()
self.state = 0
self.result_pool = result_pool
def connection_made(self, process):
super().connection_made(process)
os.write(self.process.stdin.fileno(), b"1 + 1\n")
def connection_lost(self, exc):
self.result_pool["connection_lost_called"] = True
def process_exited(self):
self.result_pool["process_exited_called"] = True
def pipe_data_received(self, fd, data):
super().pipe_data_received(fd, data)
if self.state == 0:
self.state += 1
os.write(self.process.stdin.fileno(), b"2 ** 3\n")
if self.state == 1:
self.state += 1
os.write(self.process.stdin.fileno(), b"exit(0)\n")
result_pool = dict()
result = run_command([sys.executable, "-i"],
BidirectionalProtocol,
stdin=subprocess.PIPE,
protocol_kwargs={
"result_pool": result_pool
})
lines = [line.strip() for line in result["stdout"].splitlines()]
eq_(lines, ["2", "8"])
assert_true(result_pool["connection_lost_called"], True)
assert_true(result_pool["process_exited_called"], True)
def test_blocking_thread_exit():
read_queue = queue.Queue()
(read_descriptor, write_descriptor) = os.pipe()
read_file = os.fdopen(read_descriptor, "rb")
read_thread = ReadThread(
identifier="test thread",
user_info=read_descriptor,
source=read_file,
destination_queue=read_queue,
signal_queues=[]
)
read_thread.start()
os.write(write_descriptor, b"some data")
assert_true(read_thread.is_alive())
identifier, state, data = read_queue.get()
eq_(data, b"some data")
read_thread.request_exit()
# Check the blocking part
sleep(.3)
assert_true(read_thread.is_alive())
# Check actual exit, we will not get
# "more data" when exit was requested,
# because the thread will not attempt
# a write
os.write(write_descriptor, b"more data")
read_thread.join()
print(read_queue.queue)
assert_true(read_queue.empty())
def test_blocking_read_exception_catching():
read_queue = queue.Queue()
(read_descriptor, write_descriptor) = os.pipe()
read_file = os.fdopen(read_descriptor, "rb")
read_thread = ReadThread(
identifier="test thread",
user_info=read_descriptor,
source=read_file,
destination_queue=read_queue,
signal_queues=[read_queue]
)
read_thread.start()
os.write(write_descriptor, b"some data")
assert_true(read_thread.is_alive())
identifier, state, data = read_queue.get()
eq_(data, b"some data")
os.close(write_descriptor)
read_thread.join()
identifier, state, data = read_queue.get()
eq_(data, None)
def test_blocking_read_closing():
# Expect that a reader thread exits when os.read throws an error.
class FakeFile:
def fileno(self):
return -1
def close(self):
pass
def fake_read(*args):
raise ValueError("test exception")
read_queue = queue.Queue()
with patch("datalad.runner.runnerthreads.os.read") as read:
read.side_effect = fake_read
read_thread = ReadThread(
identifier="test thread",
user_info=None,
source=FakeFile(),
destination_queue=None,
signal_queues=[read_queue])
read_thread.start()
read_thread.join()
identifier, state, data = read_queue.get()
eq_(data, None)
def test_blocking_write_exception_catching():
# Expect that a blocking writer catches exceptions and exits gracefully.
write_queue = queue.Queue()
signal_queue = queue.Queue()
(read_descriptor, write_descriptor) = os.pipe()
write_file = os.fdopen(write_descriptor, "rb")
write_thread = WriteThread(
identifier="test thread",
user_info=write_descriptor,
source_queue=write_queue,
destination=write_file,
signal_queues=[signal_queue]
)
write_thread.start()
write_queue.put(b"some data")
data = os.read(read_descriptor, 1024)
eq_(data, b"some data")
os.close(read_descriptor)
os.close(write_descriptor)
write_queue.put(b"more data")
write_thread.join()
eq_(signal_queue.get(), (write_descriptor, IOState.ok, None))
def test_blocking_writer_closing():
# Expect that a blocking writer closes its file when `None` is sent to it.
write_queue = queue.Queue()
signal_queue = queue.Queue()
(read_descriptor, write_descriptor) = os.pipe()
write_file = os.fdopen(write_descriptor, "rb")
write_thread = WriteThread(
identifier="test thread",
user_info=write_descriptor,
source_queue=write_queue,
destination=write_file,
signal_queues=[signal_queue]
)
write_thread.start()
write_queue.put(b"some data")
data = os.read(read_descriptor, 1024)
eq_(data, b"some data")
write_queue.put(None)
write_thread.join()
eq_(signal_queue.get(), (write_descriptor, IOState.ok, None))
def test_blocking_writer_closing_timeout_signal():
# Expect that writer or reader do not block forever on a full signal queue
write_queue = queue.Queue()
signal_queue = queue.Queue(1)
signal_queue.put("This is data")
(read_descriptor, write_descriptor) = os.pipe()
write_file = os.fdopen(write_descriptor, "rb")
write_thread = WriteThread(
identifier="test thread",
user_info=write_descriptor,
source_queue=write_queue,
destination=write_file,
signal_queues=[signal_queue]
)
write_thread.start()
write_queue.put(b"some data")
data = os.read(read_descriptor, 1024)
eq_(data, b"some data")
write_queue.put(None)
write_thread.join()
eq_(signal_queue.get(), "This is data")
def test_blocking_writer_closing_no_signal():
# Expect that writer or reader do not block forever on a full signal queue
write_queue = queue.Queue()
signal_queue = queue.Queue(1)
signal_queue.put("This is data")
(read_descriptor, write_descriptor) = os.pipe()
write_file = os.fdopen(write_descriptor, "rb")
write_thread = WriteThread(
identifier="test thread",
user_info=write_descriptor,
source_queue=write_queue,
destination=write_file,
signal_queues=[signal_queue]
)
write_thread.start()
write_queue.put(b"some data")
data = os.read(read_descriptor, 1024)
eq_(data, b"some data")
write_queue.put(None)
write_thread.join()
def test_inside_async():
async def main():
runner = Runner()
return runner.run(
(["cmd.exe", "/c"] if on_windows else []) + ["echo", "abc"],
StdOutCapture)
loop = asyncio.get_event_loop()
result = loop.run_until_complete(main())
eq_(result["stdout"], "abc" + os.linesep)
# Both Windows and OSX suffer from wrapt's object proxy insufficiency
# NotImplementedError: object proxy must define __reduce_ex__()
@known_failure_osx
@known_failure_windows
@with_tempfile(mkdir=True)
@with_tempfile
def test_popen_invocation(src_path, dest_path):
# https://github.com/ReproNim/testkraken/issues/93
from multiprocessing import Process
from datalad.api import clone
from datalad.distribution.dataset import Dataset
src = Dataset(src_path).create()
(src.pathobj / "file.dat").write_bytes(b"\000")
src.save(message="got data")
dest = clone(source=src_path, path=dest_path)
fetching_data = Process(target=dest.get, kwargs={"path": 'file.dat'})
fetching_data.start()
fetching_data.join(5.0)
assert_false(fetching_data.is_alive(), "Child is stuck!")
def test_timeout():
# Expect timeout protocol calls on long running process
# if the specified timeout is short enough
class TestProtocol(StdOutErrCapture):
received_timeouts = list()
def __init__(self):
StdOutErrCapture.__init__(self)
self.counter = count()
def timeout(self, fd: Optional[int]):
TestProtocol.received_timeouts.append((self.counter.__next__(), fd))
run_command(
["waitfor", "/T", "1", "TheComputerTurnsIntoATulip"]
if on_windows
else ["sleep", "1"],
stdin=None,
protocol=TestProtocol,
timeout=.1
)
assert_true(len(TestProtocol.received_timeouts) > 0)
assert_true(all(map(lambda e: e[1] in (1, 2, None), TestProtocol.received_timeouts)))
def test_timeout_nothing():
# Expect timeout protocol calls for the process on long running processes,
# if the specified timeout is short enough.
class TestProtocol(NoCapture):
def __init__(self,
timeout_queue: List):
NoCapture.__init__(self)
self.timeout_queue = timeout_queue
self.counter = count()
def timeout(self, fd: Optional[int]) -> bool:
self.timeout_queue.append(fd)
return False
stdin_queue = queue.Queue()
for i in range(12):
stdin_queue.put(b"\x00" * 1024)
stdin_queue.put(None)
timeout_queue = []
run_command(
py2cmd("import time; time.sleep(.4)\n"),
stdin=stdin_queue,
protocol=TestProtocol,
timeout=.1,
protocol_kwargs=dict(timeout_queue=timeout_queue)
)
# Ensure that we have only process timeouts and at least one
assert_true(all(map(lambda e: e is None, timeout_queue)))
assert_true(len(timeout_queue) > 0)
def test_timeout_stdout_stderr():
# Expect timeouts on stdin, stdout, stderr, and the process
class TestProtocol(StdOutErrCapture):
def __init__(self,
timeout_queue: List):
StdOutErrCapture.__init__(self)
self.timeout_queue = timeout_queue
self.counter = count()
def timeout(self, fd: Optional[int]) -> bool:
self.timeout_queue.append((self.counter.__next__(), fd))
return False
stdin_queue = queue.Queue()
for i in range(12):
stdin_queue.put(b"\x00" * 1024)
stdin_queue.put(None)
timeout_queue = []
run_command(
py2cmd("import time;time.sleep(.5)\n"),
stdin=stdin_queue,
protocol=TestProtocol,
timeout=.1,
protocol_kwargs=dict(timeout_queue=timeout_queue)
)
# Expect at least one timeout for stdout and stderr.
# there might be more.
sources = (1, 2)
assert_true(len(timeout_queue) >= len(sources))
for source in sources:
assert_true(any(filter(lambda t: t[1] == source, timeout_queue)))
def test_timeout_process():
# Expect timeouts on stdin, stdout, stderr, and the process
class TestProtocol(StdOutErrCapture):
def __init__(self,
timeout_queue: List):
StdOutErrCapture.__init__(self)
self.timeout_queue = timeout_queue
self.counter = count()
def timeout(self, fd: Optional[int]) -> bool:
self.timeout_queue.append((self.counter.__next__(), fd))
return False
stdin_queue = queue.Queue()
for i in range(12):
stdin_queue.put(b"\x00" * 1024)
stdin_queue.put(None)
timeout_queue = []
run_command(
py2cmd("import time;time.sleep(.5)\n"),
stdin=stdin_queue,
protocol=TestProtocol,
timeout=.1,
protocol_kwargs=dict(timeout_queue=timeout_queue)
)
# Expect at least one timeout for stdout and stderr.
# there might be more.
sources = (1, 2)
assert_true(len(timeout_queue) >= len(sources))
for source in sources:
assert_true(any(filter(lambda t: t[1] == source, timeout_queue)))
def test_exit_3():
# Expect the process to be closed after
# the generator exits.
rt = ThreadedRunner(cmd=["sleep", "4"],
stdin=None,
protocol_class=GenStdoutStderr,
timeout=.5,
exception_on_error=False)
tuple(rt.run())
assert_true(rt.process.poll() is not None)
def test_exit_4():
rt = ThreadedRunner(cmd=["sleep", "4"],
stdin=None,
protocol_class=GenNothing,
timeout=.5)
tuple(rt.run())
assert_true(rt.process.poll() is not None)
def test_generator_throw():
rt = ThreadedRunner(cmd=["sleep", "4"],
stdin=None,
protocol_class=GenNothing,
timeout=.5)
gen = rt.run()
assert_raises(ValueError, gen.throw, ValueError, ValueError("abcdefg"))
def test_exiting_process():
result = run_command(py2cmd("import time\ntime.sleep(3)\nprint('exit')"),
protocol=NoCapture,
stdin=None)
eq_(result["code"], 0)
def test_stalling_detection_1():
runner = ThreadedRunner("something", StdOutErrCapture, None)
runner.stdout_enqueueing_thread = None
runner.stderr_enqueueing_thread = None
runner.process_waiting_thread = None
with patch("datalad.runner.nonasyncrunner.lgr") as logger:
runner.process_queue()
eq_(logger.method_calls[0][0], "warning")
eq_(logger.method_calls[0][1][0], "ThreadedRunner.process_queue(): stall detected")
def test_stalling_detection_2():
thread_mock = MagicMock()
thread_mock.is_alive.return_value = False
runner | |
CHW = Au.shape[1]
C_out = Au.shape[2]
H_out = Au.shape[3]
W_out = Au.shape[4]
Au_temp = Au.view(batch, CHW, -1) # shape (batch, CHW, C_out H_out W_out)
Au_temp = torch.transpose(Au_temp, 1, 2) # shape (batch, C_out H_out W_out, CHW)
Al_temp = Al.view(batch, CHW, -1) # shape (batch, CHW, C_out H_out W_out)
Al_temp = torch.transpose(Al_temp, 1, 2) # shape (batch, C_out H_out W_out, CHW)
if (p_norm != np.inf) or (x_U is None):
# print('Use x0 and eps to compute closed form bound')
if p_norm == 1:
dual_norm = np.inf
elif p_norm == np.inf:
dual_norm = 1
else:
dual_norm = 1/(1-1/p_norm)
x0_temp = x0.view(batch, -1).unsqueeze(2) # (batch, CHW, 1)
# this part may have problem, we should figure out
# whether eps is for the original data or normalized data
upper = Au_temp.matmul(x0_temp).squeeze(2) + eps*torch.norm(Au_temp, p=dual_norm, dim=2) #shape (batch, C_out H_out W_out)
upper = upper.view(batch, C_out, H_out, W_out) + bu
lower = Al_temp.matmul(x0_temp).squeeze(2) - eps*torch.norm(Al_temp, p=dual_norm, dim=2) #shape (batch, C_out H_out W_out)
lower = lower.view(batch, C_out, H_out, W_out) + bl
# upper and lower are of shape (batch, C_out, H_out, W_out)
else: # if norm=np.inf and x_U, x_L are not None
# x_L, x_U maybe tighter than x0-eps, x0+eps
# because we need to clamp x0-eps, x0+eps to the range [0,1]
# before feed it to the network
# print('Use x_L and x_U to compute closed form bound')
x_U_temp = x_U.view(batch, -1).unsqueeze(2) # (batch, CHW, 1)
x_L_temp = x_L.view(batch, -1).unsqueeze(2) # (batch, CHW, 1)
# x_L <= x <= x_U
# Au x + bu <= relu(Au) x_U + neg(Bu) x_L + bu
Au_relu = torch.clamp(Au_temp, min=0) # shape (batch, C_out H_out W_out, CHW)
Au_neg = torch.clamp(Au_temp, max=0) # shape (batch, C_out H_out W_out, CHW)
upper = Au_relu.matmul(x_U_temp).squeeze(2) + Au_neg.matmul(x_L_temp).squeeze(2) # shape (batch, C_out H_out W_out)
upper = upper.view(batch, C_out, H_out, W_out) + bu # shape (batch, C_out, H_out, W_out)
# Al x + bl >= relu(Al) x_L + neg(Bl) x_U + bl
Al_relu = torch.clamp(Al_temp, min=0)
Al_neg = torch.clamp(Al_temp, max=0)
lower = Al_relu.matmul(x_L_temp).squeeze(2) + Al_neg.matmul(x_U_temp).squeeze(2) # shape (batch, C_out H_out W_out)
lower = lower.view(batch, C_out, H_out, W_out) + bl # shape (batch, C_out, H_out, W_out)
return upper, lower
def get_linear_bound_for_relu(l, u, bound_opts):
# This function finds bounding lines for ReLU activation in the interval [l ,u]
# bound_opts is a dictionary could contain keys use-constant, same-slope, zero-lb, one-lb,
# only one of the keys should have value True
# if use-constant, we choose both boundling lines with 0 slopes at any cases
# elif same-slope, we choose tight upper bounding line, lower bounding line with same slope when l<0 and u>0
# elif zero-lb, we choose tight upper bounding line, lower bounding line with 0 slope when l<0 and u>0
# elif one-lb, we choose tight upper bounding line, lower bounding line with 1 when l<0 and u>0
# else, we choose tight upper bounding line, lower bounding lines with adaptive slope
# except for use-constant, other choices don't affect how we choose bounding lines when l>=0 or u<=0
# in these cases, we always chooose tightest upper and lower bounding lines
device = l.device
# don't change how to initialize them here
ku = torch.zeros(u.shape, device = device)
bu = torch.zeros(u.shape, device = device)
kl = torch.zeros(l.shape, device = device)
bl = torch.zeros(l.shape, device = device)
if bound_opts.get('use-constant', False):
bu = torch.clamp(u, min=0)
bl = torch.clamp(l, min=0)
# print('has use constant')
return kl, bl, ku, bu
# case u<=0, the 0 initialization already satisfy this case
# case l>=0
idx = (l>=0)
kl[idx] = 1
ku[idx] = 1
# bl and kl is 0
# case l<0 and u>0
idx = (l<0) * (u>0)
k = (u / (u-l))[idx]
# k u + b = u -> b = (1-k) * u
b = (1-k) * u[idx]
ku[idx] = k
bu[idx] = b
# bl already 0
# kl should be between 0 and 1
if bound_opts.get('same-slope', False): # parallel to the upper line
kl[idx] = k
elif bound_opts.get('zero-lb', False): # always use 0 slope
pass
# kl[idx] = 0 # kl is initialized with 0, don't need to redo this
elif bound_opts.get('one-lb', False): # always use 1 slope
kl[idx] = 1
elif bound_opts.get('adaptive-lb', False): # use adaptive
u_geq_l = (u.abs()>=l.abs())
new_idx = idx * u_geq_l
kl[new_idx] = 1
# new_idx = idx * (1-u_geq_l)
# kl[new_idx] = 0 # kl is initialized with 0, don't need to redo this
else:
print('bound_opts:', bound_opts)
raise Exception('bound-opts not supported')
return kl, bl, ku, bu
class BoundReLU(ReLU):
def __init__(self, prev_layer, inplace=False, bound_opts=None):
super(BoundReLU, self).__init__(inplace)
# ReLU needs the previous layer's bounds
# self.prev_layer = prev_layer
self.bound_opts = bound_opts
self.upper_u = None # shape (batch, this_layer_shape)
self.lower_l = None
# the lower and upper bounds of the preactivation will be recorded
# as self.upper_u and self.lower_l if interval_propagate or linear_propagate is called
self.dead = None
self.alive = None
self.unstable = None
# assume input of this relu layer is z and output is a: a = relu(z)
# self.alpha_l = None
# self.beta_l = None
# self.alpha_u = None
# self.beta_u = None
# these quantities records the linear functions of x to bound z
# alpha_l * z + beta_l <= z <= alpha_u * z + beta_u
# For relu between linear layers
# z is of shape (batch, n, 1)
# x is of shape (batch, n0, 1)
# alpha is of shape (batch, n, n0n)
# beta is of shape (batch, n, 1)
# In reality, those dimensions of width 1 may be squeezed
## Convert a ReLU layer to BoundReLU layer
# @param act_layer ReLU layer object
# @param prev_layer Pre-activation layer, used for get preactivation bounds
def update_neuron_status(self):
self.dead = (self.upper_u<=0).float().mean()
self.alive = (self.lower_l>=0).float().mean()
self.unstable = ((self.lower_l<0) * (self.upper_u>0)).float().mean()
@staticmethod
def convert(act_layer, prev_layer, bound_opts=None):
l = BoundReLU(prev_layer, act_layer.inplace, bound_opts)
return l
def interval_propagate(self, norm, h_U, h_L, eps):
assert norm == np.inf
guard_eps = 1e-5
self.unstab = ((h_L < -guard_eps) & (h_U > guard_eps))
# self.unstab indicates that this neuron's activation is unsure
# stored upper and lower bounds will be used for backward bound propagation
# this is the upper and lower bounds of the input of this relu layer
self.upper_u = h_U
self.lower_l = h_L
self.update_neuron_status()
tightness_loss = self.unstab.sum()
# tightness_loss = torch.min(h_U_unstab * h_U_unstab, h_L_unstab * h_L_unstab).sum()
return norm, F.relu(h_U), F.relu(h_L), tightness_loss, tightness_loss, \
(h_U < 0).sum(), (h_L > 0).sum()
def bound_backward(self, last_uA, last_lA):
# in this relu layer we assume the input is z and output is a: a = relu(z)
# we already know the quantity in interest, obj, can be bounded by two linear functions of a
# last_uA a + last_ub <= obj <= last_lA a + last_lb
# this function finds two linear functions of z to bound obj
# this function returns uA, ubias, lA, lbias such that
# uA * z + ubias + last_ub <= obj <= lA * z + lbias + last_lb
# last_uA and last_lA are of shape (batch, obj_dim, this_layer_shape)
# define this_layer_dim = products of elements in this_layer_shape
# this_layer_shape may have multi dimensions
lb_r = self.lower_l.clamp(max=0) # shape (batch, this_layer_shape), same as a or z
ub_r = self.upper_u.clamp(min=0) # shape (batch, this_layer_shape), same as a or z
# this step guarantees upper_d = 0, upper_b=0 if upper_u <= 0
# upper_d = 1, upper_b=0 if lower_l >=0
# avoid division by 0 when both lb_r and ub_r are 0
ub_r = torch.max(ub_r, lb_r + 1e-8)
# CROWN upper and lower linear bounds
upper_d = ub_r / (ub_r - lb_r)
upper_b = - lb_r * upper_d
# note that there is no lower_b because the lower bounding line always passes the origin
upper_d = upper_d.unsqueeze(1) # shape (batch, 1, this_layer_shape)
if | |
<reponame>PaulPauls/Bluetooth_LE_MITM
import logging
import scapy.layers.bluetooth as bt
from .helpers import setup_logging
class BluetoothHandler:
"""
Representing a Bluetooth LE interface, and offering scanning, connecting, transmitting, etc
Build upon the socket of the underlying socket_handler and Scapy HCI commands
"""
# Programming handles, etc
socket_handler = None
logger = None
name = None
forwarding_bt_handler_dest = None
seen_advertising_packets = []
# Connection parameters
connected_flag = None
peer_bd_addr = None
peer_addr_type = None
connection_handle = None
connection_role = None
connection_interval = None
min_connection_interval = None
max_connection_interval = None
connection_latency = None
connection_timeout = None
def __init__(self, name, socket_handler):
# Overwrite __name__ with custom peripheral/central name and set up logging
self.name = name
setup_logging()
self.logger = logging.getLogger(self.name)
# Register socket_handler and define bluetooth connection as 'not connected to any device'
self.socket_handler = socket_handler
self.connected_flag = False
def scan_enable(self):
""" Set Scan Parameters and enable scanning """
self.socket_handler.hci_send_command(bt.HCI_Cmd_LE_Set_Scan_Parameters())
self.socket_handler.hci_send_command(bt.HCI_Cmd_LE_Set_Scan_Enable())
print("{}: Scanning...".format(self.name))
self.logger.info("Scanning...")
def scan_disable(self):
""" Disable Scanning """
self.socket_handler.hci_send_command(bt.HCI_Cmd_LE_Set_Scan_Enable(enable=0x00))
print("{}: Stopped Scanning.".format(self.name))
self.logger.info("Stopped Scanning.")
def connect(self, bd_addr, addr_type=None):
"""
Send a Connection request to the specified BD_Addr; Do not populate the connection parameters yet, as connection
is not yet confirmed. The connection parameters are populated once a Connection Complete Event occurs.
"""
# If addr_type not specified, search for it in saved advertising packets
if addr_type is None:
# Find addr_type of
for packet in self.seen_advertising_packets:
if packet.addr == bd_addr:
addr_type = packet.atype
# If addr_type not specified and not found in advertising packets, return
if addr_type is None:
print("{}: [-] Error. No address type (addr_type) for bd_addr to connect to found.".format(self.name))
self.logger.error("{}: [-] Error. No address type (addr_type) for bd_addr to connect to found.")
return
self.socket_handler.hci_send(bt.HCI_Cmd_LE_Create_Connection(paddr=bd_addr, patype=addr_type))
print("{}: Connection Request sent to {}.".format(self.name, bd_addr))
self.logger.info("Connection Request sent to {}.".format(bd_addr))
def disconnect(self):
""" Disconnect the existing Bluetooth connection; Reset Connection Parameters """
self.socket_handler.hci_send(bt.HCI_Cmd_Disconnect(handle=self.connection_handle))
self.connected_flag = None
self.peer_bd_addr = None
self.peer_addr_type = None
self.connection_handle = None
self.connection_role = None
self.connection_interval = None
self.min_connection_interval = None
self.max_connection_interval = None
self.connection_latency = None
self.connection_timeout = None
print("{}: Disconnected and reset parameters.".format(self.name))
self.logger.info("Disconnected and reset parameters.")
def advertise_enable(self, spoofed_bd_addr=False):
""" Advertise with scapy Standard Parameters and real device bd_addr if no spoofed_bd_addr specified """
if spoofed_bd_addr:
# adv_bd_addr = fake_bd_addr
# self.socket_handler.hci_send(bt.HCI_Cmd_LE_Set_Advertising_Parameters())
print("{}: Error. BD Address spoofing not yet implemented.".format(self.name))
self.logger.error("{}: Error. BD Address spoofing not yet implemented.")
raise NotImplemented
else:
adv_bd_addr = self.socket_handler.adapter_bd_addr
self.socket_handler.hci_send(bt.HCI_Cmd_LE_Set_Advertising_Parameters())
self.socket_handler.hci_send(bt.HCI_Cmd_LE_Set_Advertise_Enable(enable=0x01))
print("{}: Enabled Advertising with BD_Addr: {}.".format(self.name, adv_bd_addr))
self.logger.info("Enabled Advertising with BD_Addr: {}.".format(adv_bd_addr))
def advertise_disable(self):
""" Disable Advertising """
self.socket_handler.hci_send(bt.HCI_Cmd_LE_Set_Advertise_Enable(enable=0x00))
print("{}: Stopped Advertising.".format(self.name))
self.logger.info("Stopped Advertising.")
def imitate_advertise_enable(self, adv_packets_to_imitate, spoofed_bd_addr):
""" advertised with imitated packet for 10s """
# Spoof BD_Addr
self.socket_handler.spoof_bd_addr(spoofed_bd_addr)
# Set Advertising Parameters (Here only: set advertised addr_type to random)
self.socket_handler.hci_send(bt.HCI_Cmd_LE_Set_Advertising_Parameters(oatype=1))
# TODO Not yet properly concatenated multiple EIR_Hdr / multiple Advertising
# data. Therefore for now only copy The EIR_Hdr with the CompleteLocalName, as most important Advert. Data
adv_data = None
for packet in adv_packets_to_imitate:
if bt.EIR_CompleteLocalName in packet:
adv_data = packet[bt.EIR_Hdr]
# Set Advertising Data
if adv_data is not None:
self.logger.debug("Imitated Advertising Data: {}".format(adv_data.show2(dump=True)))
self.socket_handler.hci_send(bt.HCI_Cmd_LE_Set_Advertising_Data(data=adv_data))
self.socket_handler.hci_send(bt.HCI_Cmd_LE_Set_Advertising_Data(data=adv_data))
self.socket_handler.hci_send(bt.HCI_Cmd_LE_Set_Advertise_Enable(enable=0x01))
imitated_bd_addr = adv_packets_to_imitate[0].addr
print("{}: [+] Advertising Imitation of BD_Addr {} with spoofed BD_Addr {}."
.format(self.name, imitated_bd_addr, spoofed_bd_addr))
self.logger.info("[+] Advertising Imitation of BD_Addr {} with spoofed BD_Addr {}."
.format(imitated_bd_addr, spoofed_bd_addr))
def register_att_forwarding_destination(self, bluetooth_handler):
""" Register a Bluetooth Handler as the destination for ATT Data forwarding """
self.forwarding_bt_handler_dest = bluetooth_handler
def receive_att_data(self, received_att_packet):
""" Send received forwarded ATT Data through own connection, given that a connection already exists """
if self.connected_flag:
# WARNING: The received packet is send as it is, also copying the connection header etc.
# This could potentially lead to errors as also the connection handle and connection parameters
# are copied, which could disagree with that different BT connection
self.socket_handler.send_raw(received_att_packet)
print("{}: Received ATT Data and sent it.".format(self.name))
self.logger.info("Received ATT Data and sent it.")
else:
print("{}: [-] Received ATT Data, though discarded it as no connection established.".format(self.name))
self.logger.info("[-] Received ATT Data, though discarded it as no connection established.")
def handle_incoming_data(self):
""" Receive incoming data, identify it and then either print it or call appropriate method in response """
self.logger.debug("'handle_incoming_data' called. Receiving data from socket...")
incoming_packet = self.socket_handler.receive_packet()
if not incoming_packet:
return
self.logger.debug("[+] Received incoming_packet:\n###[ Structure ]###\n {}\n{}"
.format(incoming_packet.summary(), incoming_packet.show2(dump=True)))
# Identify incoming packet
packet_type = self.identify_packet(incoming_packet)
# Handle incoming packet according to identified type
if packet_type == "ATT Data":
# forward complete packet to registered Bluetooth_Handler, which sends it via his connection
# For Better Verbosity print ATT Requests and Responses
if bt.ATT_Read_By_Type_Request in incoming_packet:
print("{}: Received Request:\n{}"
.format(self.name, incoming_packet[bt.ATT_Read_By_Type_Request].show2(dump=True)))
elif bt.ATT_Read_By_Type_Response in incoming_packet:
print("{}: Received Response:\n{}"
.format(self.name, incoming_packet[bt.ATT_Read_By_Type_Response].show2(dump=True)))
else:
print("{}: Received:\n{}".format(self.name, incoming_packet[bt.ATT_Hdr].show2(dump=True)))
self.forwarding_bt_handler_dest.receive_att_data(incoming_packet)
print("{}: Received and forwarded ATT Data.".format(self.name))
self.logger.info("Received and forwarded ATT Data.")
elif packet_type == "Connection Parameter Update Request":
# Updates connection parameters according to request and responds with successful completion; p1771
request_id = incoming_packet.id
# update parameters according to request
self.min_connection_interval = incoming_packet.min_interval
self.max_connection_interval = incoming_packet.max_interval
self.connection_latency = incoming_packet.slave_latency
self.connection_timeout = incoming_packet.timeout_mult
# update connection
self.socket_handler.hci_send(bt.HCI_Cmd_LE_Connection_Update(
handle=self.connection_handle, min_interval=self.min_connection_interval,
max_interval=self.max_connection_interval, latency=self.connection_latency,
timeout=self.connection_timeout))
# send response about successfully updated connection
self.socket_handler.l2cap_send(bt.L2CAP_Connection_Parameter_Update_Response(move_result=0x0000),
self.connection_handle, request_id)
print("{}: Received Connection Parameter Update Request; Updated Connection accordingly.".format(self.name))
self.logger.info("Received Connection Parameter Update Request; Updated Connection accordingly.")
elif packet_type == "Disconnection Complete Event":
# Set connected flag and connected_bd_addr accordingly; print disconnection event; p1110
self.connected_flag = False
self.peer_bd_addr = None
disconnection_msg = "[+] Disconnected." if incoming_packet.status == 0x00 else "[-] Disconnection failed."
print("{}: {}".format(self.name, disconnection_msg))
self.logger.info(disconnection_msg)
elif packet_type == "Command Status Event":
# Print current status of Command; p1123
status_msg = "Command pending." if incoming_packet.status == 0x00 else "[-] Command failed."
print("{}: {}".format(self.name, status_msg))
self.logger.info(status_msg)
elif packet_type == "Number of Complete Packets Event":
# Print number of completed packets; p1128
print("{}: Number of completed packets: {}".format(self.name, incoming_packet.number))
self.logger.info("Number of completed packets: {}".format(incoming_packet.number))
elif packet_type == "LE Connection Complete Event":
# Populate Connection Parameters of stack; Print Status and important Parameters; p1190
self.connected_flag = True
self.peer_bd_addr = incoming_packet.paddr
self.peer_addr_type = incoming_packet.patype
self.connection_handle = incoming_packet.handle
self.connection_role = incoming_packet.role
self.connection_interval = incoming_packet.interval
self.connection_latency = incoming_packet.latency
self.connection_timeout = incoming_packet.supervision
connection_msg = "[+] Connection with {} succesful. Handle: {}" \
.format(self.peer_bd_addr, self.connection_handle) if incoming_packet.status == 0x00 else \
"[-] Connection failed."
print("{}: {}".format(self.name, connection_msg))
self.logger.info(connection_msg)
elif packet_type == "LE Advertising Report Event":
# Append advertising packets to list of seen adv packets; Then print scanned infos; p1193
# Check if incoming packet already seen
seen_flag = False
for packet in self.seen_advertising_packets:
if packet.addr == incoming_packet.addr and packet.length == incoming_packet.length:
seen_flag = True
# If not seen, append it to the list of seen advertising packets
if not seen_flag:
self.seen_advertising_packets.append(incoming_packet)
possible_event_types = {0x00: "ADV_IND", 0x01: "ADV_DIRECT_IND", 0x02: "ADV_SCAN_IND",
0x03: "ADV_NONCONN_IND", 0x04: "SCAN_RSP"}
event_type = possible_event_types[incoming_packet.type]
possible_addr_types = {0x00: "public", 0x01: "random", 0x02: "public identity",
0x03: "random (static) identity"}
addr_type = possible_addr_types[incoming_packet.atype]
print("{}: Advertising: {} ({}) ({})".format(self.name, incoming_packet.addr, addr_type, event_type))
self.logger.info("Advertising: {} ({}) ({})".format(incoming_packet.addr, addr_type, event_type))
elif packet_type == "LE Connection Update Complete Event":
# Check if confirmed Connection Update Parameters coincide with stack parameters; Print Status; p1195
coincide_flag = True
if incoming_packet.handle != self.connection_handle \
or incoming_packet.latency != self.connection_latency \
or incoming_packet != self.connection_interval \
or incoming_packet.timeout != self.connection_timeout:
coincide_flag = False
update_msg_1 = "[+] Connection Update Complete." if incoming_packet.status == 0x00 else \
"[-] Connection Update Failed."
update_msg_2 = "New Connection Parameters coincide." if coincide_flag else \
"[-] Though new Connection Parameters differ."
print("{}: {} {}".format(self.name, update_msg_1, update_msg_2))
self.logger.info("{} {}".format(update_msg_1, update_msg_2))
elif packet_type == "Unidentified":
# TODO Implement Proper Handling of Command Complete Events
# Though very small importance
# Only Print warning of non-Command Complete Events, as those are actually important
if bt.HCI_Event_Command_Complete not in incoming_packet:
print("{}: WARNING, received unidentified package whose handling not yet implemented: {}".format(
self.name, incoming_packet.summary()))
self.logger.warning(
"WARNING, received unidentified package whose handling not yet implemented: {}".format(
incoming_packet.summary()))
else:
print("{}: WARNING, identified an incoming packet as '{}', however handling not yet implemented."
.format(self.name, type))
self.logger.warning("WARNING, identified an incoming packet as '{}', however handling | |
<reponame>cogerk/walk-up-music-scrapper<gh_stars>0
"""
Public API for all plots supported by HoloViews, regardless of
plotting package or backend. Every plotting classes must be a subclass
of this Plot baseclass.
"""
from itertools import groupby, product
from collections import Counter, defaultdict
import numpy as np
import param
from ..core import OrderedDict
from ..core import util, traversal
from ..core.element import Element
from ..core.overlay import Overlay, CompositeOverlay
from ..core.layout import Empty, NdLayout, Layout
from ..core.options import Store, Compositor, SkipRendering
from ..core.overlay import NdOverlay
from ..core.spaces import HoloMap, DynamicMap
from ..core.util import stream_parameters, isfinite
from ..element import Table
from .util import (get_dynamic_mode, initialize_unbounded, dim_axis_label,
attach_streams, traverse_setter, get_nested_streams,
compute_overlayable_zorders, get_plot_frame,
split_dmap_overlay)
class Plot(param.Parameterized):
"""
Base class of all Plot classes in HoloViews, designed to be
general enough to use any plotting package or backend.
"""
# A list of style options that may be supplied to the plotting
# call
style_opts = []
# Sometimes matplotlib doesn't support the common aliases.
# Use this list to disable any invalid style options
_disabled_opts = []
def initialize_plot(self, ranges=None):
"""
Initialize the matplotlib figure.
"""
raise NotImplementedError
def update(self, key):
"""
Update the internal state of the Plot to represent the given
key tuple (where integers represent frames). Returns this
state.
"""
return self.state
@property
def state(self):
"""
The plotting state that gets updated via the update method and
used by the renderer to generate output.
"""
raise NotImplementedError
def cleanup(self):
"""
Cleans up references to the plot on the attached Stream
subscribers.
"""
plots = self.traverse(lambda x: x, [Plot])
for plot in plots:
if not isinstance(plot, (GenericCompositePlot, GenericElementPlot, GenericOverlayPlot)):
continue
for stream in set(plot.streams):
stream._subscribers = [
(p, subscriber) for p, subscriber in stream._subscribers
if util.get_method_owner(subscriber) not in plots]
@property
def id(self):
return self.comm.id if self.comm else id(self.state)
def __len__(self):
"""
Returns the total number of available frames.
"""
raise NotImplementedError
@classmethod
def lookup_options(cls, obj, group):
plot_class = None
try:
plot_class = Store.renderers[cls.backend].plotting_class(obj)
style_opts = plot_class.style_opts
except SkipRendering:
style_opts = None
node = Store.lookup_options(cls.backend, obj, group)
if group == 'style' and style_opts is not None:
return node.filtered(style_opts)
elif group == 'plot' and plot_class:
return node.filtered(list(plot_class.params().keys()))
else:
return node
class PlotSelector(object):
"""
Proxy that allows dynamic selection of a plotting class based on a
function of the plotted object. Behaves like a Plot class and
presents the same parameterized interface.
"""
_disabled_opts = []
def __init__(self, selector, plot_classes, allow_mismatch=False):
"""
The selector function accepts a component instance and returns
the appropriate key to index plot_classes dictionary.
"""
self.selector = selector
self.plot_classes = OrderedDict(plot_classes)
interface = self._define_interface(self.plot_classes.values(), allow_mismatch)
self.style_opts, self.plot_options = interface
def _define_interface(self, plots, allow_mismatch):
parameters = [{k:v.precedence for k,v in plot.params().items()
if ((v.precedence is None) or (v.precedence >= 0))}
for plot in plots]
param_sets = [set(params.keys()) for params in parameters]
if not allow_mismatch and not all(pset == param_sets[0] for pset in param_sets):
raise Exception("All selectable plot classes must have identical plot options.")
styles= [plot.style_opts for plot in plots]
if not allow_mismatch and not all(style == styles[0] for style in styles):
raise Exception("All selectable plot classes must have identical style options.")
plot_params = {p: v for params in parameters for p, v in params.items()}
return [s for style in styles for s in style], plot_params
def __call__(self, obj, **kwargs):
plot_class = self.get_plot_class(obj)
return plot_class(obj, **kwargs)
def get_plot_class(self, obj):
key = self.selector(obj)
if key not in self.plot_classes:
msg = "Key %s returned by selector not in set: %s"
raise Exception(msg % (key, ', '.join(self.plot_classes.keys())))
return self.plot_classes[key]
def __setattr__(self, label, value):
try:
return super(PlotSelector, self).__setattr__(label, value)
except:
raise Exception("Please set class parameters directly on classes %s"
% ', '.join(str(cls) for cls in self.__dict__['plot_classes'].values()))
def params(self):
return self.plot_options
class DimensionedPlot(Plot):
"""
DimensionedPlot implements a number of useful methods
to compute dimension ranges and titles containing the
dimension values.
"""
fontsize = param.Parameter(default=None, allow_None=True, doc="""
Specifies various fontsizes of the displayed text.
Finer control is available by supplying a dictionary where any
unmentioned keys reverts to the default sizes, e.g:
{'ticks':20, 'title':15,
'ylabel':5, 'xlabel':5,
'legend':8, 'legend_title':13}
You can set the fontsize of both 'ylabel' and 'xlabel' together
using the 'labels' key.""")
#Allowed fontsize keys
_fontsize_keys = ['xlabel','ylabel', 'labels', 'ticks',
'title', 'legend', 'legend_title', 'xticks',
'yticks']
show_title = param.Boolean(default=True, doc="""
Whether to display the plot title.""")
title_format = param.String(default="{label} {group}\n{dimensions}", doc="""
The formatting string for the title of this plot, allows defining
a label group separator and dimension labels.""")
normalize = param.Boolean(default=True, doc="""
Whether to compute ranges across all Elements at this level
of plotting. Allows selecting normalization at different levels
for nested data containers.""")
projection = param.Parameter(default=None, doc="""
Allows supplying a custom projection to transform the axis
coordinates during display. Example projections include '3d'
and 'polar' projections supported by some backends. Depending
on the backend custom projection objects may be supplied.""")
def __init__(self, keys=None, dimensions=None, layout_dimensions=None,
uniform=True, subplot=False, adjoined=None, layout_num=0,
style=None, subplots=None, dynamic=False, renderer=None, **params):
self.subplots = subplots
self.adjoined = adjoined
self.dimensions = dimensions
self.layout_num = layout_num
self.layout_dimensions = layout_dimensions
self.subplot = subplot
self.keys = keys
self.uniform = uniform
self.dynamic = dynamic
self.drawn = False
self.handles = {}
self.group = None
self.label = None
self.current_frame = None
self.current_key = None
self.ranges = {}
self.renderer = renderer if renderer else Store.renderers[self.backend].instance()
self.comm = None
self._force = False
self._updated = False # Whether the plot should be marked as updated
params = {k: v for k, v in params.items()
if k in self.params()}
super(DimensionedPlot, self).__init__(**params)
def __getitem__(self, frame):
"""
Get the state of the Plot for a given frame number.
"""
if isinstance(frame, int) and frame > len(self):
self.warning("Showing last frame available: %d" % len(self))
if not self.drawn: self.handles['fig'] = self.initialize_plot()
if not isinstance(frame, tuple):
frame = self.keys[frame]
self.update_frame(frame)
return self.state
def _get_frame(self, key):
"""
Required on each MPLPlot type to get the data corresponding
just to the current frame out from the object.
"""
pass
def matches(self, spec):
"""
Matches a specification against the current Plot.
"""
if callable(spec) and not isinstance(spec, type): return spec(self)
elif isinstance(spec, type): return isinstance(self, spec)
else:
raise ValueError("Matching specs have to be either a type or a callable.")
def traverse(self, fn=None, specs=None, full_breadth=True):
"""
Traverses any nested DimensionedPlot returning a list
of all plots that match the specs. The specs should
be supplied as a list of either Plot types or callables,
which should return a boolean given the plot class.
"""
accumulator = []
matches = specs is None
if not matches:
for spec in specs:
matches = self.matches(spec)
if matches: break
if matches:
accumulator.append(fn(self) if fn else self)
# Assumes composite objects are iterables
if hasattr(self, 'subplots') and self.subplots:
for el in self.subplots.values():
if el is None:
continue
accumulator += el.traverse(fn, specs, full_breadth)
if not full_breadth: break
return accumulator
def _frame_title(self, key, group_size=2, separator='\n'):
"""
Returns the formatted dimension group strings
for a particular frame.
"""
if self.layout_dimensions is not None:
dimensions, key = zip(*self.layout_dimensions.items())
elif not self.dynamic and (not self.uniform or len(self) == 1) or self.subplot:
return ''
else:
key = key if isinstance(key, tuple) else (key,)
dimensions = self.dimensions
dimension_labels = [dim.pprint_value_string(k) for dim, k in
zip(dimensions, key)]
groups = [', '.join(dimension_labels[i*group_size:(i+1)*group_size])
for i in range(len(dimension_labels))]
return util.bytes_to_unicode(separator.join(g for g in groups if g))
def _fontsize(self, key, label='fontsize', common=True):
if not self.fontsize: return {}
if not isinstance(self.fontsize, dict):
return {label:self.fontsize} if common else {}
unknown_keys = set(self.fontsize.keys()) - set(self._fontsize_keys)
if unknown_keys:
msg = "Popping unknown keys %r from fontsize dictionary.\nValid keys: %r"
self.warning(msg % (list(unknown_keys), self._fontsize_keys))
for key in unknown_keys: self.fontsize.pop(key, None)
if key in self.fontsize:
return {label:self.fontsize[key]}
elif key in ['ylabel', 'xlabel'] and 'labels' in self.fontsize:
return {label:self.fontsize['labels']}
elif key in ['xticks', 'yticks'] and 'ticks' in self.fontsize:
return {label:self.fontsize['ticks']}
else:
return {}
def compute_ranges(self, obj, key, ranges):
"""
Given an object, a specific key and the normalization options
this method will find the specified normalization options on
the appropriate OptionTree, group the elements according to
the selected normalization option (i.e. either per frame or
over the whole animation) and finally compute the dimension
ranges in each group. | |
<filename>shb-vgg/exp/12-05_13-19_SHHB_VGG_1e-05_[norm+flip]/code/old-cca/loaders.py
import csv
import math
import os
from glob import glob
import cv2
import numpy as np
from scipy.io import loadmat
def get_density_map_gaussian(im, points):
"""
Create a Gaussian density map from the points.
Credits: https://github.com/ZhengPeng7/Multi_column_CNN_in_Keras/blob/master/data_preparation/get_density_map_gaussian.py
:param im: Original image, used only for getting needed shape of the density map.
:param points: List of (X, Y) tuples that point at where human heads are located in a picture.
:return: Density map constructed from the points.
"""
im_density = np.zeros_like(im[:, :, 0], dtype=np.float64)
h, w = im_density.shape
if points is None:
return im_density
if points.shape[0] == 1:
x1 = max(0, min(w-1, round(points[0, 0])))
y1 = max(0, min(h-1, round(points[0, 1])))
im_density[y1, x1] = 255
return im_density
for j in range(points.shape[0]):
f_sz = 15
sigma = 4.0
H = np.multiply(cv2.getGaussianKernel(f_sz, sigma), (cv2.getGaussianKernel(f_sz, sigma)).T)
x = min(w-1, max(0, abs(int(math.floor(points[j, 0])))))
y = min(h-1, max(0, abs(int(math.floor(points[j, 1])))))
if x >= w or y >= h:
continue
x1 = x - f_sz//2 + 0
y1 = y - f_sz//2 + 0
x2 = x + f_sz//2 + 1
y2 = y + f_sz//2 + 1
dfx1, dfy1, dfx2, dfy2 = 0, 0, 0, 0
change_H = False
if x1 < 0:
dfx1 = abs(x1) + 0
x1 = 0
change_H = True
if y1 < 0:
dfy1 = abs(y1) + 0
y1 = 0
change_H = True
if x2 > w:
dfx2 = x2 - w
x2 = w
change_H = True
if y2 > h:
dfy2 = y2 - h
y2 = h
change_H = True
x1h, y1h, x2h, y2h = 1 + dfx1, 1 + dfy1, f_sz - dfx2, f_sz - dfy2
if change_H is True:
H = np.multiply(cv2.getGaussianKernel(y2h-y1h+1, sigma), (cv2.getGaussianKernel(x2h-x1h+1, sigma)).T)
im_density[y1:y2, x1:x2] += H
return im_density
class Loader:
"""
Abstract base loader that should return an iterable of samples, either images, lists of points or density maps.
"""
def load(self):
""" Method that must be implemented in the subclasses, returning an iterable of samples """
raise NotImplementedError("load not implemented in the child class")
@staticmethod
def _prepare_args(local_vars):
""" Simple method that removes unwanted 'self' variable from the set that will be stored for loading and saving pipelines"""
return {k: v for k, v in local_vars.items() if k != 'self'}
def get_number_of_loadable_samples(self):
"""
Return number of samples from the dataset that can and will be loaded by the loader, or None if it's unknown.
:return: Number of samples that can be loaded, including the already loaded ones.
"""
return None
class BasicImageFileLoader(Loader):
"""
Loader for images stored in image files. Allows reading any files that opencv-python can handle - e.g. JPG, PNG.
"""
def __init__(self, img_paths):
"""
Create a new image loader that reads all image files from paths.
:param img_paths: Paths to all images that are to be loaded.
"""
Loader.__init__(self)
self.args = self._prepare_args(locals())
self.img_paths = img_paths
def get_number_of_loadable_samples(self):
"""
Get number of images to load, according to number of specified paths.
:return: Number of images.
"""
return len(self.img_paths)
def load(self):
"""
Load all images based on provided paths to files.
:return: Generator of images in BGR format.
"""
for path in self.img_paths:
yield cv2.imread(path, cv2.IMREAD_COLOR)
class ImageFileLoader(BasicImageFileLoader):
"""
Loader for all images of some type in a given directory.
"""
def __init__(self, img_dir, file_extension="jpg"):
"""
Create a new image loader that reads all the images with specified file extension in a given directory.
:param img_dir: Directory to be searched.
:param file_extension: Desired extension of files to be loaded.
"""
local = locals().copy()
paths = sorted(glob(os.path.join(img_dir, f"*.{file_extension}")))
BasicImageFileLoader.__init__(self, paths)
self.args = self._prepare_args(local)
class BasicGTPointsMatFileLoader(Loader):
"""
Loader for ground truth data stored as lists of head positions in Matlab files.
"""
def __init__(self, gt_paths, getter):
"""
Create a loader that loads all data from the provided file paths using a given getter.
:param gt_paths: Paths of files that are to be read.
:param getter: Lambda that takes Matlab file content and returns list of head positions in form of (X, Y) tuples.
"""
Loader.__init__(self)
self.args = self._prepare_args(locals())
self.gt_paths = gt_paths
self.getter = getter
def get_number_of_loadable_samples(self):
"""
Get number of GTs to load, according to number of specified paths.
:return: Number of GTs.
"""
return len(self.gt_paths)
def load(self):
"""
Load all Matlab files from paths.
:return: Generator of lists of head positions - (X, Y) tuples.
"""
for path in self.gt_paths:
yield self.getter(loadmat(path))
class GTPointsMatFileLoader(BasicGTPointsMatFileLoader):
"""
Loader for head positions in all Matlab files in a given directory.
"""
def __init__(self, gt_dir, getter, file_extension="mat"):
"""
Create a loader that searches for files with specified extension in a given directory and loads them.
:param gt_dir: Directory to be searched.
:param file_extension: Desired file extension of Matlab files.
"""
local = locals().copy()
paths = sorted(glob(os.path.join(gt_dir, f"*.{file_extension}")))
BasicGTPointsMatFileLoader.__init__(self, paths, getter)
self.args = self._prepare_args(local)
class BasicDensityMapCSVFileLoader(Loader):
"""
Loader for density maps stored in separate CSV files.
"""
def __init__(self, dm_paths):
"""
Create a loader that loads density maps at specified paths.
:param dm_paths: Paths to CSV files with density maps.
"""
Loader.__init__(self)
self.args = self._prepare_args(locals())
self.dm_paths = dm_paths
def get_number_of_loadable_samples(self):
"""
Get number of density maps to load, according to number of specified paths.
:return: Number of density maps.
"""
return len(self.dm_paths)
def load(self):
"""
Load all density maps from all specified paths.
:return: Generator of density maps.
"""
for path in self.dm_paths:
den_map = []
with open(path, 'r', newline='') as f:
for row in csv.reader(f):
den_row = []
for cell in row:
den_row.append(float(cell))
den_map.append(den_row)
yield np.array(den_map)
class DensityMapCSVFileLoader(BasicDensityMapCSVFileLoader):
"""
Loader for density maps stored in all CSV files in a given directory.
"""
def __init__(self, den_map_dir, file_extension="csv"):
"""
Create a loader that searches for files with the given extension in the given directory and loads them.
:param den_map_dir: Directory to be searched.
:param file_extension: Desired extension of files to be loaded.
"""
local = locals().copy()
paths = sorted(glob(os.path.join(den_map_dir, f"*.{file_extension}")))
BasicDensityMapCSVFileLoader.__init__(self, paths)
self.args = self._prepare_args(local)
class VariableLoader(Loader):
"""
Loader that loads from a variable (list or array) instead of file. May be useful when connecting pipelines.
"""
def __init__(self, data):
"""
Create a loader that reads from a variable (list or array most probably) and yields the results.
:param data: Iterable that has len() with either images or density maps.
"""
self.args = None # saving dataset variables, possibly consisting of thousands of samples, to a json file would be dangerous
self.data = data
def get_number_of_loadable_samples(self):
"""
Return length of the dataset in the variable.
:return: Number of samples.
"""
return len(self.data)
def load(self):
"""
Read the variable and yield samples one by one.
:return: Generator of either images or density maps.
"""
for sample in self.data:
yield sample
class ConcatenatingLoader(Loader):
"""
Loader that doesn't perform any loading on its own but rather concatenates samples from a few sources.
"""
def __init__(self, loaders):
"""
Create a loader that concatenates loading results from a few loaders.
:param loaders: Loaders whose results will be concatenated.
"""
Loader.__init__(self)
self.args = [{'name': loader.__class__.__name__, 'args': loader.args} for loader in loaders]
self.loaders = loaders
def get_number_of_loadable_samples(self):
"""
Get number of samples to load throughout loaders.
:return: Cumulative number of samples.
"""
return sum([loader.get_number_of_loadable_samples() for loader in self.loaders])
def load(self):
"""
Load all samples from all connected loaders.
:return: Generator of samples, be it images, GT point lists or density maps.
"""
for loader in self.loaders:
for sample in loader:
yield sample
class CombinedLoader(Loader):
"""
Loader that should be primarily used with a pipeline - zips or combines an iterable of images with an iterable of
density maps (be it straight from a loader or from transformed on-the-fly GT points).
"""
def __init__(self, img_loader, gt_loader, den_map_loader=None):
"""
Create a combined loader. Either `gt_loader` or `den_map_loader` must be specified (but not both) in order to
provide density maps related to the images loaded using `img_loader`.
:param img_loader: Loader that provides an iterable of images.
:param gt_loader: Loader that provides an iterable of lists of points.
:param den_map_loader: Loader that provides an iterable of density maps.
"""
| |
0 0 0 0 0 0 0 0 0]
...
sage: L = M[0].integral_structure(); L
Free module of degree 19 and rank 2 over Integer Ring
Echelon basis matrix:
[ 0 1 1 0 -2 1 -1 1 -1 -2 2 0 0 0 0 0 0 0 0]
[ 0 0 3 0 -3 2 -1 2 -1 -4 2 -1 -2 1 2 0 0 -1 1]
sage: K.coordinate_module(L)
Free module of degree 8 and rank 2 over Integer Ring
User basis matrix:
[ 1 1 1 -1 1 -1 0 0]
[ 0 3 2 -1 2 -1 -1 -2]
sage: K.coordinate_module(L).basis_matrix() * K.basis_matrix()
[ 0 1 1 0 -2 1 -1 1 -1 -2 2 0 0 0 0 0 0 0 0]
[ 0 0 3 0 -3 2 -1 2 -1 -4 2 -1 -2 1 2 0 0 -1 1]
"""
if not is_FreeModule(V):
raise ValueError, "V must be a free module"
A = self.basis_matrix()
A = A.matrix_from_columns(A.pivots()).transpose()
B = V.basis_matrix()
B = B.matrix_from_columns(self.basis_matrix().pivots()).transpose()
S = A.solve_right(B).transpose()
return (self.base_ring()**S.ncols()).span_of_basis(S.rows())
def degree(self):
"""
Return the degree of this free module. This is the dimension of the
ambient vector space in which it is embedded.
EXAMPLES::
sage: M = FreeModule(ZZ, 10)
sage: W = M.submodule([M.gen(0), 2*M.gen(3) - M.gen(0), M.gen(0) + M.gen(3)])
sage: W.degree()
10
sage: W.rank()
2
"""
return self.__degree
def dimension(self):
"""
Return the dimension of this free module.
EXAMPLES::
sage: M = FreeModule(FiniteField(19), 100)
sage: W = M.submodule([M.gen(50)])
sage: W.dimension()
1
"""
return self.rank()
def discriminant(self):
"""
Return the discriminant of this free module.
EXAMPLES::
sage: M = FreeModule(ZZ, 3)
sage: M.discriminant()
1
sage: W = M.span([[1,2,3]])
sage: W.discriminant()
14
sage: W2 = M.span([[1,2,3], [1,1,1]])
sage: W2.discriminant()
6
"""
return self.gram_matrix().determinant()
def free_module(self):
"""
Return this free module. (This is used by the
``FreeModule`` functor, and simply returns self.)
EXAMPLES::
sage: M = FreeModule(ZZ, 3)
sage: M.free_module()
Ambient free module of rank 3 over the principal ideal domain Integer Ring
"""
return self
def gen(self, i=0):
"""
Return ith generator for self, where i is between 0 and rank-1,
inclusive.
INPUT:
- ``i`` - an integer
OUTPUT: i-th basis vector for self.
EXAMPLES::
sage: n = 5
sage: V = QQ^n
sage: B = [ V.gen(i) for i in range(n) ]
sage: B
[(1, 0, 0, 0, 0),
(0, 1, 0, 0, 0),
(0, 0, 1, 0, 0),
(0, 0, 0, 1, 0),
(0, 0, 0, 0, 1)]
sage: V.gens() == tuple(B)
True
TESTS::
sage: (QQ^3).gen(4/3)
Traceback (most recent call last):
...
TypeError: rational is not an integer
"""
if i < 0 or i >= self.rank():
raise ValueError, "Generator %s not defined."%i
return self.basis()[i]
def gram_matrix(self):
r"""
Return the gram matrix associated to this free module, defined to
be `G = B*A*B.transpose()`, where A is the inner product matrix
(induced from the ambient space), and B the basis matrix.
EXAMPLES::
sage: V = VectorSpace(QQ,4)
sage: u = V([1/2,1/2,1/2,1/2])
sage: v = V([0,1,1,0])
sage: w = V([0,0,1,1])
sage: M = span([u,v,w], ZZ)
sage: M.inner_product_matrix() == V.inner_product_matrix()
True
sage: L = M.submodule_with_basis([u,v,w])
sage: L.inner_product_matrix() == M.inner_product_matrix()
True
sage: L.gram_matrix()
[1 1 1]
[1 2 1]
[1 1 2]
"""
if self.is_ambient():
return sage.matrix.matrix_space.MatrixSpace(self.base_ring(), self.degree(), sparse=True)(1)
else:
if self._gram_matrix is None:
B = self.basis_matrix()
self._gram_matrix = B*B.transpose()
return self._gram_matrix
def has_user_basis(self):
"""
Return ``True`` if the basis of this free module is
specified by the user, as opposed to being the default echelon
form.
EXAMPLES::
sage: V = QQ^3
sage: W = V.subspace([[2,'1/2', 1]])
sage: W.has_user_basis()
False
sage: W = V.subspace_with_basis([[2,'1/2',1]])
sage: W.has_user_basis()
True
"""
return False
def inner_product_matrix(self):
"""
Return the default identity inner product matrix associated to this
module.
By definition this is the inner product matrix of the ambient
space, hence may be of degree greater than the rank of the module.
TODO: Differentiate the image ring of the inner product from the
base ring of the module and/or ambient space. E.g. On an integral
module over ZZ the inner product pairing could naturally take
values in ZZ, QQ, RR, or CC.
EXAMPLES::
sage: M = FreeModule(ZZ, 3)
sage: M.inner_product_matrix()
[1 0 0]
[0 1 0]
[0 0 1]
"""
return sage.matrix.matrix_space.MatrixSpace(self.base_ring(), self.degree(), sparse=True)(1)
def _inner_product_is_dot_product(self):
"""
Return whether or not the inner product on this module is induced
by the dot product on the ambient vector space. This is used
internally by the inner_product function for optimization.
EXAMPLES::
sage: FreeModule(ZZ, 3)._inner_product_is_dot_product()
True
sage: FreeModule(ZZ, 3, inner_product_matrix=1)._inner_product_is_dot_product()
True
sage: FreeModule(ZZ, 2, inner_product_matrix=[1,0,-1,0])._inner_product_is_dot_product()
False
::
sage: M = FreeModule(QQ, 3)
sage: M2 = M.span([[1,2,3]])
sage: M2._inner_product_is_dot_product()
True
"""
return True
def is_ambient(self):
"""
Returns False since this is not an ambient free module.
EXAMPLES::
sage: M = FreeModule(ZZ, 3).span([[1,2,3]]); M
Free module of degree 3 and rank 1 over Integer Ring
Echelon basis matrix:
[1 2 3]
sage: M.is_ambient()
False
sage: M = (ZZ^2).span([[1,0], [0,1]])
sage: M
Free module of degree 2 and rank 2 over Integer Ring
Echelon basis matrix:
[1 0]
[0 1]
sage: M.is_ambient()
False
sage: M == M.ambient_module()
True
"""
return False
def is_dense(self):
"""
Return ``True`` if the underlying representation of
this module uses dense vectors, and False otherwise.
EXAMPLES::
sage: FreeModule(ZZ, 2).is_dense()
True
sage: FreeModule(ZZ, 2, sparse=True).is_dense()
False
"""
return not self.is_sparse()
def is_full(self):
"""
Return ``True`` if the rank of this module equals its
degree.
EXAMPLES::
sage: FreeModule(ZZ, 2).is_full()
True
sage: M = FreeModule(ZZ, 2).span([[1,2]])
sage: M.is_full()
False
"""
return self.rank() == self.degree()
def is_finite(self):
"""
Returns True if the underlying set of this free module is finite.
EXAMPLES::
sage: FreeModule(ZZ, 2).is_finite()
False
sage: FreeModule(Integers(8), 2).is_finite()
True
sage: FreeModule(ZZ, 0).is_finite()
True
"""
return self.base_ring().is_finite() or self.rank() == 0
def is_sparse(self):
"""
Return ``True`` if the underlying representation of
this module uses sparse vectors, and False otherwise.
EXAMPLES::
sage: FreeModule(ZZ, 2).is_sparse()
False
sage: FreeModule(ZZ, 2, sparse=True).is_sparse()
True
"""
return self.__is_sparse
def ngens(self):
"""
Returns the number of basis elements of this free module.
EXAMPLES::
sage: FreeModule(ZZ, 2).ngens()
2
sage: FreeModule(ZZ, 0).ngens()
0
sage: FreeModule(ZZ, 2).span([[1,1]]).ngens()
1
"""
try:
return self.__ngens
except AttributeError:
self.__ngens = self.rank()
return self.__ngens
def nonembedded_free_module(self):
"""
Returns an ambient free module that is isomorphic to this free
module.
Thus if this free module is of rank `n` over a ring
`R`, then this function returns `R^n`, as an
ambient free module.
EXAMPLES::
sage: FreeModule(ZZ, 2).span([[1,1]]).nonembedded_free_module()
Ambient free module of rank 1 over the principal ideal domain Integer Ring
"""
return FreeModule(self.base_ring(), self.rank())
def random_element(self, prob=1.0, *args, **kwds):
"""
Returns a random element of self.
INPUT:
-- ``prob`` - float. Each coefficient will be set to zero with
probability `1-prob`. Otherwise coefficients will be chosen
randomly from base ring (and may be zero).
-- ``*args, **kwds`` - passed on to ``random_element()`` function
of base ring.
EXAMPLES::
sage: M = FreeModule(ZZ, 2).span([[1,1]])
sage: M.random_element()
(-1, -1)
sage: M.random_element()
(2, 2)
sage: M.random_element()
(1, 1)
Passes extra positional or keyword arguments through::
sage: M.random_element(5,10)
(9, 9)
"""
rand = current_randstate().python_random().random
R = self.base_ring()
prob = float(prob)
c = [0 if rand() > prob else R.random_element(*args, **kwds) for _ in range(self.rank())]
return self.linear_combination_of_basis(c)
def rank(self):
"""
Return the rank of this free module.
EXAMPLES::
sage: FreeModule(Integers(6), 10000000).rank()
10000000
sage: FreeModule(ZZ, 2).span([[1,1], [2,2], [3,4]]).rank()
2
"""
return self.__rank
def uses_ambient_inner_product(self):
r"""
Return ``True`` if the inner product on this module is
the one induced by the ambient inner product.
EXAMPLES::
sage: M = FreeModule(ZZ, 2)
sage: W = M.submodule([[1,2]])
sage: W.uses_ambient_inner_product()
True
sage: W.inner_product_matrix()
[1 0]
[0 1]
::
sage: W.gram_matrix()
[5]
"""
return self.__uses_ambient_inner_product
def zero_vector(self):
"""
Returns the zero vector in this free module.
EXAMPLES::
sage: M = FreeModule(ZZ, 2)
sage: M.zero_vector()
(0, 0)
sage: M(0)
(0, 0)
sage: M.span([[1,1]]).zero_vector()
(0, 0)
sage: M.zero_submodule().zero_vector()
(0, 0)
"""
# Do *not* cache this -- it must be | |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test suite for zcomx/modules/tumblr.py
"""
import datetime
import unittest
import uuid
from bs4 import BeautifulSoup
from pydal.objects import Row
from applications.zcomx.modules.activity_logs import ActivityLog
from applications.zcomx.modules.book_pages import BookPage
from applications.zcomx.modules.book_types import BookType
from applications.zcomx.modules.books import Book
from applications.zcomx.modules.creators import \
AuthUser, \
Creator
from applications.zcomx.modules.tumblr import \
Authenticator, \
BookListingCreator, \
BookListingCreatorWithTumblr, \
OngoingBookListing, \
PhotoDataPreparer, \
Poster, \
TextDataPreparer, \
book_listing_creator, \
postable_activity_log_ids
from applications.zcomx.modules.stickon.dal import RecordGenerator
from applications.zcomx.modules.tests.runner import LocalTestCase
# C0111: Missing docstring
# R0904: Too many public methods
# pylint: disable=C0111,R0904
class WithObjectsTestCase(LocalTestCase):
_activity_log_1 = None
_activity_log_2 = None
_auth_user = None
_book = None
_book_page = None
_book_page_2 = None
_creator = None
# C0103: *Invalid name "%s" (should match %s)*
# pylint: disable=C0103
def setUp(self):
self._auth_user = self.add(AuthUser, dict(
name='<NAME>'
))
self._creator = self.add(Creator, dict(
auth_user_id=self._auth_user.id,
email='<EMAIL>',
name_for_url='FirstLast',
tumblr='http://firstlast.tumblr.com',
))
self._book = self.add(Book, dict(
name='My Book',
number=1,
creator_id=self._creator.id,
book_type_id=BookType.by_name('ongoing').id,
name_for_url='MyBook-001',
))
page = self.add(BookPage, dict(
book_id=self._book.id,
page_no=1,
))
self._book_page = BookPage.from_id(page.id)
page_2 = self.add(BookPage, dict(
book_id=self._book.id,
page_no=2,
))
self._book_page_2 = BookPage.from_id(page_2.id)
self._activity_log_1 = self.add(ActivityLog, dict(
book_id=self._book.id,
book_page_ids=[self._book_page.id],
action='page added',
ongoing_post_id=None,
))
self._activity_log_2 = self.add(ActivityLog, dict(
book_id=self._book.id,
book_page_ids=[self._book_page_2.id],
action='page added',
ongoing_post_id=None,
))
super().setUp()
class WithDateTestCase(LocalTestCase):
_date = None
# C0103: *Invalid name "%s" (should match %s)*
# pylint: disable=C0103
def setUp(self):
self._date = datetime.date.today()
super().setUp()
class DubClient(object):
"""Stub pytumblr client."""
def __init__(self):
self.posts = {}
def create_photo(self, unused_username, **kwargs):
post_id = uuid.uuid4()
self.posts[post_id] = kwargs
return post_id
def create_text(self, unused_username, **kwargs):
post_id = uuid.uuid4()
self.posts[post_id] = kwargs
return post_id
def delete_post(self, post_id):
del self.posts[post_id]
class TestAuthenticator(LocalTestCase):
def test____init__(self):
authenticator = Authenticator({})
self.assertTrue(authenticator)
def test__authenticate(self):
credentials = {
'consumer_key': '',
'consumer_secret': '',
'oauth_token': '',
'oauth_secret': '',
}
authenticator = Authenticator(credentials)
client = authenticator.authenticate()
info = client.info()
self.assertTrue(info['meta']['status'], '401')
class TestBookListingCreator(WithObjectsTestCase):
def test____init__(self):
listing_creator = BookListingCreator(self._creator)
self.assertTrue(listing_creator)
def test__link(self):
listing_creator = BookListingCreator(self._creator)
link = listing_creator.link()
soup = BeautifulSoup(str(link), 'html.parser')
# <a href="http://123.zco.mx">First Last</a>
anchor = soup.find('a')
self.assertEqual(anchor.string, 'First Last')
self.assertEqual(
anchor['href'],
'http://{cid}.zco.mx'.format(cid=self._creator.id)
)
class TestBookListingCreatorWithTumblr(WithObjectsTestCase):
def test__link(self):
listing_creator = BookListingCreatorWithTumblr(self._creator)
link = listing_creator.link()
soup = BeautifulSoup(str(link), 'html.parser')
# <a href="http://firstlast.tumblr.com">First Last</a>
anchor = soup.find('a')
self.assertEqual(anchor.string, 'First Last')
self.assertEqual(anchor['href'], 'http://firstlast.tumblr.com')
class TestOngoingBookListing(WithObjectsTestCase):
def test____init__(self):
listing = OngoingBookListing(
Row({'name': 'test____init__'}),
BookPage({'name_for_url': 'FirstLast'}),
[]
)
self.assertTrue(listing)
def test__components(self):
pages = [self._book_page, self._book_page_2]
listing = OngoingBookListing(self._book, pages, self._creator)
got = listing.components()
self.assertEqual(len(got), 7)
self.assertEqual(got[1], ' by ')
self.assertEqual(got[3], ' - ')
self.assertEqual(got[5], ' ')
soup = BeautifulSoup(str(got[0]), 'html.parser')
# <a href="http://zco.mx/FirstLast/MyBook-001">My Book 001</a>
anchor = soup.find('a')
self.assertEqual(anchor.string, 'My Book 001')
self.assertEqual(anchor['href'], 'http://zco.mx/FirstLast/MyBook-001')
soup = BeautifulSoup(str(got[2]), 'html.parser')
# <a href="http://firstlast.tumblr.com">First Last</a>
anchor = soup.find('a')
self.assertEqual(anchor.string, 'First Last')
self.assertEqual(anchor['href'], 'http://firstlast.tumblr.com')
soup = BeautifulSoup(str(got[4]), 'html.parser')
# <a href="http://zco.mx/FirstLast/MyBook-001/001">01</a>
anchor = soup.find('a')
self.assertEqual(anchor.string, 'p01')
self.assertEqual(
anchor['href'],
'http://zco.mx/FirstLast/MyBook-001/001'
)
# Test abridged list
pages = []
for _ in range(0, 10):
pages.append(self._book_page)
listing = OngoingBookListing(self._book, pages, self._creator)
got = listing.components()
self.assertEqual(len(got), 11)
self.assertEqual(got[1], ' by ')
self.assertEqual(got[3], ' - ')
self.assertEqual(got[5], ' ')
self.assertEqual(got[7], ' ')
self.assertEqual(got[8], '...')
self.assertEqual(got[9], ' ')
def test__from_activity_log(self):
activity_log = Row(dict(
book_id=self._book.id,
book_page_ids=[self._book_page.id, self._book_page_2.id],
))
got = OngoingBookListing.from_activity_log(activity_log)
self.assertTrue(isinstance(got, OngoingBookListing))
self.assertEqual(got.book, self._book)
self.assertEqual(
got.book_pages[0],
BookPage.from_id(self._book_page.id)
)
self.assertEqual(
got.book_pages[1],
BookPage.from_id(self._book_page_2.id)
)
self.assertEqual(got.creator, self._creator)
class TestTextDataPreparer(WithObjectsTestCase, WithDateTestCase):
def test____init__(self):
generator = RecordGenerator(db.activity_log)
preparer = TextDataPreparer(self._date, generator)
self.assertTrue(preparer)
def test__body(self):
activity_log = self.add(ActivityLog, dict(
book_id=self._book.id,
book_page_ids=[self._book_page.id, self._book_page_2.id],
action='page added',
ongoing_post_id=None,
))
log_ids = [activity_log.id, self._activity_log_2.id]
query = (db.activity_log.id.belongs(log_ids))
preparer = TextDataPreparer(
self._date,
RecordGenerator(query)
)
body = preparer.body()
soup = BeautifulSoup(body, 'html.parser')
# <ul>
# <li>
# <i>
# <a href="http://zco.mx/FirstLast/MyBook-001">
# My Book 001
# </a>
# </i>
# by
# <a href="http://firstlast.tumblr.com">
# First Last
# </a>
# -
# <a href="http://zco.mx/FirstLast/MyBook-001/002">
# 02
# </a>
# </li>
# <li>
# <span class="hidden"> --- </span>
# <i>
# <a href="http://zco.mx/FirstLast/MyBook-001">
# My Book 001
# </a>
# </i>
# by
# <a href="http://firstlast.tumblr.com">
# First Last
# </a>
# -
# <a href="http://zco.mx/FirstLast/MyBook-001/001">
# 01
# </a>
# ,
# <a href="http://zco.mx/FirstLast/MyBook-001/002">
# 02
# </a>
# </li>
# </ul>
ul = soup.ul
lis = ul.findAll('li')
self.assertEqual(len(lis), 2)
li_1 = lis[0]
li_1_i = li_1.findAll('i')
self.assertEqual(len(li_1_i), 1)
li_1_anchors = li_1.findAll('a')
self.assertEqual(len(li_1_anchors), 3)
self.assertEqual(li_1_anchors[0].string, 'My Book 001')
self.assertEqual(
li_1_anchors[0]['href'], 'http://zco.mx/FirstLast/MyBook-001')
self.assertEqual(li_1_anchors[1].string, 'First Last')
self.assertEqual(
li_1_anchors[1]['href'], 'http://firstlast.tumblr.com')
self.assertEqual(li_1_anchors[2].string, 'p02')
self.assertEqual(
li_1_anchors[2]['href'],
'http://zco.mx/FirstLast/MyBook-001/002'
)
self.assertEqual(len(li_1.contents), 5)
self.assertEqual(li_1.contents[1], ' by ')
self.assertEqual(li_1.contents[3], ' - ')
li_2 = lis[1]
li_2_spans = li_2.findAll('span')
self.assertEqual(len(li_2_spans), 1)
self.assertEqual(li_2_spans[0].string, ' --- ')
li_2_i = li_2.findAll('i')
self.assertEqual(len(li_2_i), 1)
li_2_anchors = li_2.findAll('a')
self.assertEqual(len(li_2_anchors), 4)
self.assertEqual(li_2_anchors[0].string, 'My Book 001')
self.assertEqual(
li_2_anchors[0]['href'], 'http://zco.mx/FirstLast/MyBook-001')
self.assertEqual(li_2_anchors[1].string, 'First Last')
self.assertEqual(
li_2_anchors[1]['href'], 'http://firstlast.tumblr.com')
self.assertEqual(li_2_anchors[2].string, 'p01')
self.assertEqual(
li_2_anchors[2]['href'],
'http://zco.mx/FirstLast/MyBook-001/001'
)
self.assertEqual(li_2_anchors[3].string, 'p02')
self.assertEqual(
li_2_anchors[3]['href'],
'http://zco.mx/FirstLast/MyBook-001/002'
)
self.assertEqual(len(li_2.contents), 8)
self.assertEqual(li_2.contents[2], ' by ')
self.assertEqual(li_2.contents[4], ' - ')
self.assertEqual(li_2.contents[6], ' ')
def test__book_listing_generator(self):
log_ids = [self._activity_log_1.id, self._activity_log_2.id]
query = (db.activity_log.id.belongs(log_ids))
preparer = TextDataPreparer(
self._date,
RecordGenerator(query)
)
generator = preparer.book_listing_generator()
got = next(generator)
self.assertTrue(isinstance(got, OngoingBookListing))
self.assertEqual(got.book, self._book)
self.assertEqual(got.book_pages, [self._book_page])
self.assertEqual(got.creator, self._creator)
got = next(generator)
self.assertTrue(isinstance(got, OngoingBookListing))
self.assertEqual(got.book, self._book)
self.assertEqual(got.book_pages, [self._book_page_2])
self.assertEqual(got.creator, self._creator)
self.assertRaises(StopIteration, generator.__next__)
def test__data(self):
date = datetime.date(1999, 12, 31)
log_ids = [self._activity_log_1.id, self._activity_log_2.id]
query = (db.activity_log.id.belongs(log_ids))
preparer = TextDataPreparer(
date,
RecordGenerator(query)
)
# C0301 (line-too-long): *Line too long (%%s/%%s)*
# pylint: disable=C0301
self.assertEqual(
preparer.data(),
{
'body': '<ul><li><i><a href="http://zco.mx/FirstLast/MyBook-001">My Book 001</a></i> by <a href="http://firstlast.tumblr.com">First Last</a> - <a href="http://zco.mx/FirstLast/MyBook-001/001">p01</a></li><li><span class="hidden"> --- </span><i><a href="http://zco.mx/FirstLast/MyBook-001">My Book 001</a></i> by <a href="http://firstlast.tumblr.com">First Last</a> - <a href="http://zco.mx/FirstLast/MyBook-001/002">p02</a></li></ul>',
'format': 'html',
'slug': 'ongoing-books-update-1999-12-31',
'state': 'published',
'tags': ['comics', 'zco.mx'],
'title': 'Updated Ongoing Books for Fri, Dec 31, 1999'
}
)
def test__slug(self):
date = datetime.date(1999, 12, 31)
generator = RecordGenerator(db.activity_log)
preparer = TextDataPreparer(date, generator)
self.assertEqual(
preparer.slug(),
'ongoing-books-update-1999-12-31'
)
def test__tags(self):
generator = RecordGenerator(db.activity_log)
preparer = TextDataPreparer(self._date, generator)
self.assertEqual(
preparer.tags(),
['comics', 'zco.mx']
)
def test__title(self):
# C0301 (line-too-long): *Line too long (%%s/%%s)*
# pylint: disable=C0301
date = datetime.date(1999, 12, 31)
generator = RecordGenerator(db.activity_log)
preparer = TextDataPreparer(date, generator)
self.assertEqual(
preparer.title(),
'Updated Ongoing Books for Fri, Dec 31, 1999'
)
class TestPhotoDataPreparer(LocalTestCase):
def test____init__(self):
preparer = PhotoDataPreparer({})
self.assertTrue(preparer)
def test__caption(self):
# C0301 (line-too-long): *Line too long (%%s/%%s)*
# pylint: disable=C0301
data = {
'book': {
'formatted_name': 'My Book 001 (1999)',
'description': 'This is my book!',
'url': 'http://zco.mx/FirstLast/MyBook',
},
'creator': {
'social_media': [
('website', 'http://website.com'),
('twitter', 'http://twitter.com'),
('tumblr', 'http://tumblr.com'),
],
'url': 'http://zco.mx/FirstLast',
},
}
expect = """<h3><a href="http://zco.mx/FirstLast/MyBook">My Book 001 (1999)</a></h3><p>This is my book!</p><p>by <a href="http://zco.mx/FirstLast">http://zco.mx/FirstLast</a> | <a href="http://website.com">website</a> | <a href="http://twitter.com">twitter</a> | <a href="http://tumblr.com">tumblr</a></p>"""
preparer = PhotoDataPreparer(data)
self.assertEqual(preparer.caption(), expect)
# No description, no social media
data['book']['description'] = None
data['creator']['social_media'] = []
expect = """<h3><a href="http://zco.mx/FirstLast/MyBook">My Book 001 (1999)</a></h3><p>by <a href="http://zco.mx/FirstLast">http://zco.mx/FirstLast</a></p>"""
preparer = PhotoDataPreparer(data)
self.assertEqual(preparer.caption(), expect)
def test__data(self):
# C0301 (line-too-long): *Line too long (%%s/%%s)*
# pylint: disable=C0301
data = {
'book': {
'description': None,
'download_url': 'http://source',
'formatted_name': 'My Book 001 (1999)',
'name': 'My Book',
'name_for_search': 'my-book-001',
'url': 'http://zco.mx/FirstLast/MyBook',
},
'creator': {
'name_for_search': 'first-last',
'social_media': [],
'name_for_url': 'FirstLast',
'url': 'http://zco.mx/FirstLast',
},
'site': {
'name': 'zco.mx'
}
}
expect = {
'state': 'published',
'tags': ['My Book', 'FirstLast', 'comics', 'zco.mx'],
'tweet': None,
'slug': 'first-last-my-book-001',
'format': 'html',
'source': 'http://source',
'link': 'http://zco.mx/FirstLast/MyBook',
'caption': '<h3><a href="http://zco.mx/FirstLast/MyBook">My Book 001 (1999)</a></h3><p>by <a href="http://zco.mx/FirstLast">http://zco.mx/FirstLast</a></p>',
}
preparer = PhotoDataPreparer(data)
self.assertEqual(preparer.data(), expect)
def test__slug(self):
data = {
'book': {
'name_for_search': 'my-book-001',
},
'creator': {
'name_for_search': 'first-last',
},
}
preparer = PhotoDataPreparer(data)
self.assertEqual(preparer.slug(), 'first-last-my-book-001')
def test__tags(self):
data = {
'book': {
'name': 'My Book',
},
'creator': {
'name_for_url': 'First Last',
},
'site': {
'name': 'zco.mx'
}
}
preparer = PhotoDataPreparer(data)
self.assertEqual(
preparer.tags(),
['My Book', 'First Last', 'comics', 'zco.mx']
)
class TestPoster(LocalTestCase):
def test____init__(self):
client = DubClient()
poster = Poster(client)
self.assertTrue(poster)
def test__delete_post(self):
client = DubClient()
self.assertEqual(client.posts, {})
poster = Poster(client)
post_id = poster.post_photo('username', {})
self.assertEqual(
client.posts,
{post_id: {}}
)
poster.delete_post(post_id)
self.assertEqual(client.posts, {})
def test__post_photo(self):
client = DubClient()
poster = Poster(client)
post_id = poster.post_photo('username', {'aaa': 'bbb'})
self.assertEqual(
client.posts,
{
post_id: {
'aaa': 'bbb',
}
}
)
def test__post_text(self):
client = DubClient()
poster = Poster(client)
post_id = poster.post_text('username', {'aaa': 'bbb'})
self.assertEqual(
client.posts,
{
post_id: {
'aaa': 'bbb',
}
}
)
class TestFunctions(WithObjectsTestCase, WithDateTestCase):
def test__book_listing_creator(self):
creator = Row({
'tumblr': None,
})
got = book_listing_creator(creator)
self.assertTrue(isinstance(got, BookListingCreator))
creator = Row({
'tumblr': 'http://user.tumblr.com',
})
got = book_listing_creator(creator)
self.assertTrue(isinstance(got, BookListingCreatorWithTumblr))
def test__postable_activity_log_ids(self):
book = self.add(Book, dict(
name='test__activity_log_ids'
))
activity_log = self.add(ActivityLog, dict(
book_id=book.id,
))
def reset():
book.update_record(release_date=None)
| |
in deleted_sample_files:
deleted_sample_files.append( ctx_file_name )
else:
sample_files.append( ctx_file_name )
tmp_ctx_file_name = os.path.join( dir, ctx_file_name.replace( '.sample', '' ) )
fh = open( tmp_ctx_file_name, 'wb' )
fh.write( fctx.data() )
fh.close()
return sample_files, deleted_sample_files
def get_or_create_tool_section( trans, tool_panel_section_id, new_tool_panel_section=None ):
tool_panel_section_key = 'section_%s' % str( tool_panel_section_id )
if tool_panel_section_key in trans.app.toolbox.tool_panel:
# Appending a tool to an existing section in trans.app.toolbox.tool_panel
tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
log.debug( "Appending to tool panel section: %s" % str( tool_section.name ) )
else:
# Appending a new section to trans.app.toolbox.tool_panel
try:
new_tool_panel_section_name = new_tool_panel_section.name
except:
new_tool_panel_section_name = new_tool_panel_section
if new_tool_panel_section_name:
elem = XmlET.Element( 'section' )
elem.attrib[ 'name' ] = new_tool_panel_section_name
elem.attrib[ 'id' ] = tool_panel_section_id
elem.attrib[ 'version' ] = ''
tool_section = galaxy.tools.ToolSection( elem )
trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
log.debug( "Loading new tool panel section: %s" % str( tool_section.name ) )
else:
log.debug( "Unable to create new tool pane section using received new_tool_panel_section: %s" % str( new_tool_panel_section ))
return None, None
return tool_panel_section_key, tool_section
def get_tool_path_install_dir( partial_install_dir, shed_tool_conf_dict, tool_dict, config_elems ):
for elem in config_elems:
if elem.tag == 'tool':
if elem.get( 'guid' ) == tool_dict[ 'guid' ]:
tool_path = shed_tool_conf_dict[ 'tool_path' ]
relative_install_dir = os.path.join( tool_path, partial_install_dir )
return tool_path, relative_install_dir
elif elem.tag == 'section':
for section_elem in elem:
if section_elem.tag == 'tool':
if section_elem.get( 'guid' ) == tool_dict[ 'guid' ]:
tool_path = shed_tool_conf_dict[ 'tool_path' ]
relative_install_dir = os.path.join( tool_path, partial_install_dir )
return tool_path, relative_install_dir
return None, None
def get_tool_index_sample_files( sample_files ):
"""Try to return the list of all appropriate tool data sample files included in the repository."""
tool_index_sample_files = []
for s in sample_files:
# The problem with this is that Galaxy does not follow a standard naming convention for file names.
if s.endswith( '.loc.sample' ) or s.endswith( '.xml.sample' ) or s.endswith( '.txt.sample' ):
tool_index_sample_files.append( str( s ) )
return tool_index_sample_files
def get_tool_version( app, tool_id ):
sa_session = app.model.context.current
return sa_session.query( app.model.ToolVersion ) \
.filter( app.model.ToolVersion.table.c.tool_id == tool_id ) \
.first()
def get_tool_version_association( app, parent_tool_version, tool_version ):
"""Return a ToolVersionAssociation if one exists that associates the two received tool_versions"""
sa_session = app.model.context.current
return sa_session.query( app.model.ToolVersionAssociation ) \
.filter( and_( app.model.ToolVersionAssociation.table.c.parent_id == parent_tool_version.id,
app.model.ToolVersionAssociation.table.c.tool_id == tool_version.id ) ) \
.first()
def handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups ):
"""
Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the
tool_data_table_conf.xml file. This method is called only from Galaxy (not the tool shed) when a repository is being installed
or reinstalled.
"""
missing_data_table_entry = False
for index, repository_tools_tup in enumerate( repository_tools_tups ):
tup_path, guid, repository_tool = repository_tools_tup
if repository_tool.params_with_missing_data_table_entry:
missing_data_table_entry = True
break
if missing_data_table_entry:
# The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository.
sample_tool_data_table_conf = suc.get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir )
if sample_tool_data_table_conf:
# Add entries to the ToolDataTableManager's in-memory data_tables dictionary.
error, message = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
if error:
# TODO: Do more here than logging an exception.
log.debug( message )
# Reload the tool into the local list of repository_tools_tups.
repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( app )
return repository_tools_tups
def handle_missing_index_file( app, tool_path, sample_files, repository_tools_tups, sample_files_copied ):
"""
Inspect each tool to see if it has any input parameters that are dynamically generated select lists that depend on a .loc file.
This method is not called from the tool shed, but from Galaxy when a repository is being installed.
"""
for index, repository_tools_tup in enumerate( repository_tools_tups ):
tup_path, guid, repository_tool = repository_tools_tup
params_with_missing_index_file = repository_tool.params_with_missing_index_file
for param in params_with_missing_index_file:
options = param.options
missing_file_name = suc.strip_path( options.missing_index_file )
if missing_file_name not in sample_files_copied:
# The repository must contain the required xxx.loc.sample file.
for sample_file in sample_files:
sample_file_name = suc.strip_path( sample_file )
if sample_file_name == '%s.sample' % missing_file_name:
copy_sample_file( app, sample_file )
if options.tool_data_table and options.tool_data_table.missing_index_file:
options.tool_data_table.handle_found_index_file( options.missing_index_file )
sample_files_copied.append( options.missing_index_file )
break
# Reload the tool into the local list of repository_tools_tups.
repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
return repository_tools_tups, sample_files_copied
def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, repository_id, tool_config_filepath, work_dir ):
# Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
message = ''
sample_files = copy_disk_sample_files_to_dir( trans, repo_files_dir, work_dir )
if sample_files:
if 'tool_data_table_conf.xml.sample' in sample_files:
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
tool, valid, message2 = load_tool_from_config( trans.app, repository_id, tool_config_filepath )
message = concat_messages( message, message2 )
return tool, valid, message, sample_files
def handle_sample_files_and_load_tool_from_tmp_config( trans, repo, repository_id, changeset_revision, tool_config_filename, work_dir ):
tool = None
message = ''
ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
# We're not currently doing anything with the returned list of deleted_sample_files here. It is intended to help handle sample files that are in
# the manifest, but have been deleted from disk.
sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
if sample_files:
trans.app.config.tool_data_path = work_dir
if 'tool_data_table_conf.xml.sample' in sample_files:
# Load entries into the tool_data_tables if the tool requires them.
tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
if tool_data_table_config:
error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
if error:
log.debug( message )
manifest_ctx, ctx_file = suc.get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
if manifest_ctx and ctx_file:
tool, message2 = load_tool_from_tmp_config( trans, repo, repository_id, manifest_ctx, ctx_file, work_dir )
message = concat_messages( message, message2 )
return tool, message, sample_files
def handle_sample_tool_data_table_conf_file( app, filename, persist=False ):
"""
Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur
if call is from the Galaxy side, not the tool shed), the new entries will be appended to Galaxy's shed_tool_data_table_conf.xml file on disk.
"""
error = False
message = ''
try:
new_table_elems, message = app.tool_data_tables.add_new_entries_from_config_file( config_filename=filename,
tool_data_path=app.config.tool_data_path,
shed_tool_data_table_config=app.config.shed_tool_data_table_config,
persist=persist )
if message:
error = True
except Exception, e:
message = str( e )
error = True
return error, message
def handle_tool_panel_selection( trans, metadata, no_changes_checked, tool_panel_section, new_tool_panel_section ):
"""Handle the selected tool panel location for loading tools included in tool shed repositories when installing or reinstalling them."""
# Get the location in the tool panel in which each tool was originally loaded.
tool_section = None
tool_panel_section_key = None
if 'tools' in metadata:
# This forces everything to be loaded into the same section (or no section) in the tool panel.
if no_changes_checked:
# Make sure the no_changes check box overrides the new_tool_panel_section if the user checked the check box and entered something into the field.
new_tool_panel_section = None
if 'tool_panel_section' in metadata:
tool_panel_dict = metadata[ 'tool_panel_section' ]
if not tool_panel_dict:
tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
else:
tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata[ 'tools' ] )
if tool_panel_dict:
# The tool_panel_dict is empty when tools exist but are not installed into a tool panel section.
tool_section_dicts = tool_panel_dict[ tool_panel_dict.keys()[ 0 ] ]
tool_section_dict = tool_section_dicts[ 0 ]
original_section_id = tool_section_dict[ 'id' ]
original_section_name = tool_section_dict[ 'name' ]
if original_section_id:
tool_panel_section_key, tool_section = get_or_create_tool_section( trans,
tool_panel_section_id=original_section_id,
new_tool_panel_section=new_tool_panel_section )
else:
# The user elected to change the tool panel section to contain the tools.
tool_panel_section_key, tool_section = handle_tool_panel_section( trans,
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
return tool_section, new_tool_panel_section, tool_panel_section_key
def handle_tool_panel_section( trans, tool_panel_section=None, new_tool_panel_section=None ):
if new_tool_panel_section:
section_id = new_tool_panel_section.lower().replace( ' ', '_' )
tool_panel_section_key, tool_section = get_or_create_tool_section( trans,
tool_panel_section_id=section_id,
new_tool_panel_section=new_tool_panel_section )
elif tool_panel_section:
tool_panel_section_key = 'section_%s' % str( tool_panel_section )
tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
else:
return None, None
return tool_panel_section_key, tool_section
def handle_tool_versions( app, tool_version_dicts, tool_shed_repository ):
"""
Using the list of tool_version_dicts retrieved from the tool shed (one per changeset revison up to the currently installed changeset revision),
create the parent / child pairs of tool versions. Each dictionary contains { tool id : parent tool id } pairs.
"""
sa_session = app.model.context.current
for | |
<reponame>WinterSnowfall/gog_gles<filename>scripts/gog_builds_scan.py<gh_stars>0
#!/usr/bin/env python3
'''
@author: <NAME>
@version: 3.00
@date: 20/04/2022
Warning: Built for use with python 3.6+
'''
import json
import threading
import sqlite3
import signal
import requests
import logging
import argparse
import difflib
import re
import os
from sys import argv
from shutil import copy2
from configparser import ConfigParser
from datetime import datetime
from time import sleep
from queue import Queue
from collections import OrderedDict
from logging.handlers import RotatingFileHandler
#uncomment for debugging purposes only
#import traceback
##global parameters init
configParser = ConfigParser()
db_lock = threading.Lock()
config_lock = threading.Lock()
terminate_signal = False
##conf file block
conf_file_full_path = os.path.join('..', 'conf', 'gog_builds_scan.conf')
##logging configuration block
log_file_full_path = os.path.join('..', 'logs', 'gog_builds_scan.log')
logger_file_handler = RotatingFileHandler(log_file_full_path, maxBytes=8388608, backupCount=1, encoding='utf-8')
logger_format = '%(asctime)s %(levelname)s >>> %(message)s'
logger_file_handler.setFormatter(logging.Formatter(logger_format))
#logging level for other modules
logging.basicConfig(format=logger_format, level=logging.ERROR) #DEBUG, INFO, WARNING, ERROR, CRITICAL
logger = logging.getLogger(__name__)
#logging level for current logger
logger.setLevel(logging.INFO) #DEBUG, INFO, WARNING, ERROR, CRITICAL
logger.addHandler(logger_file_handler)
##db configuration block
db_file_full_path = os.path.join('..', 'output_db', 'gog_gles.db')
##CONSTANTS
INSERT_BUILD_QUERY = 'INSERT INTO gog_builds VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)'
UPDATE_BUILD_QUERY = ('UPDATE gog_builds SET gb_int_updated = ?, '
'gb_int_json_payload = ?, '
'gb_int_json_diff = ?, '
'gb_total_count = ?, '
'gb_count = ?, '
'gb_main_version_names = ?, '
'gb_branch_version_names = ?, '
'gb_has_private_branches = ? WHERE gb_int_id = ? AND gb_int_os = ?')
INSERT_INSTALLERS_DELTA_QUERY = 'INSERT INTO gog_installers_delta VALUES (?,?,?,?,?,?,?,?,?)'
OPTIMIZE_QUERY = 'PRAGMA optimize'
#static regex pattern for removing GOG version strings from builds/installers
GOG_VERSION_REMOVAL_REGEX = re.compile('GOG[0-9]{0,5}')
#value separator for multi-valued fields
MVF_VALUE_SEPARATOR = '; '
def sigterm_handler(signum, frame):
logger.info('Stopping scan due to SIGTERM...')
raise SystemExit(0)
def terminate_script():
logger.critical('Forcefully stopping script!')
#flush buffers
os.sync()
#forcefully terminate script process
os.kill(os.getpid(), signal.SIGKILL)
def gog_builds_query(product_id, os, scan_mode, session, db_connection):
builds_url = f'https://content-system.gog.com/products/{product_id}/os/{os}/builds?generation=2'
try:
response = session.get(builds_url, timeout=HTTP_TIMEOUT)
logger.debug(f'BQ >>> HTTP response code: {response.status_code}.')
if response.status_code == 200:
try:
json_parsed = json.loads(response.text, object_pairs_hook=OrderedDict)
total_count = json_parsed['total_count']
logger.debug(f'BQ >>> Total count: {total_count}.')
except:
logger.warning(f'BQ >>> Unable to retrieve total_count for {product_id}, {os}.')
raise Exception()
if total_count > 0:
logger.debug(f'BQ >>> Found builds for id {product_id}, {os}...')
db_cursor = db_connection.execute('SELECT COUNT(*) FROM gog_builds WHERE gb_int_id = ? AND gb_int_os = ?', (product_id, os))
entry_count = db_cursor.fetchone()[0]
#no need to do any processing if an entry is found in 'full', 'products' or 'manual' scan modes,
#since that entry will be skipped anyway
if not (entry_count == 1 and (scan_mode == 'full' or scan_mode == 'products' or scan_mode == 'manual')):
json_formatted = json.dumps(json_parsed, sort_keys=True, indent=4, separators=(',', ': '), ensure_ascii=False)
count = json_parsed['count']
#main and branch version names splitting and annotation logic
if len(json_parsed['items']) != 0:
main_item_list = []
branch_item_list = []
for item in json_parsed['items']:
if item['version_name'] != '':
current_branch = item['branch']
current_version_name = item['version_name']
#there are no blank string branches as of now, only null ones
if current_branch is not None:
branch_item_list.append(f'{current_version_name} ||| {current_branch}')
else:
main_item_list.append(current_version_name)
main_version_names = MVF_VALUE_SEPARATOR.join(main_item_list)
branch_version_names = MVF_VALUE_SEPARATOR.join(branch_item_list)
#older entries may contain only a single un-named version
if main_version_names == '': main_version_names = None
if branch_version_names == '': branch_version_names = None
else:
main_version_names = None
branch_version_names = None
has_private_branches = json_parsed['has_private_branches']
db_cursor.execute('SELECT gp_title FROM gog_products WHERE gp_id = ?', (product_id, ))
result = db_cursor.fetchone()
#entries with just hidden builds will not link to any gog_product entry
product_name = result[0] if result is not None else None
if entry_count == 0:
#gb_int_nr, gb_int_added, gb_int_updated, gb_int_json_payload,
#gb_int_json_diff, gb_id, gb_product_title, gb_os,
#gb_total_count, gb_count, gb_main_version_names,
#gb_branch_version_names, gb_has_private_branches
with db_lock:
db_cursor.execute(INSERT_BUILD_QUERY, (None, datetime.now(), None, json_formatted,
None, product_id, product_name, os,
total_count, count, main_version_names,
branch_version_names, has_private_branches))
db_connection.commit()
logger.info(f'BQ +++ Added a new DB entry for {product_id}: {product_name}, {os}.')
elif entry_count == 1:
#do not update existing entries in a full, products or manual scan, since update/delta scans will take care of that
if scan_mode == 'full' or scan_mode == 'products' or scan_mode == 'manual':
logger.info(f'BQ >>> Found an existing db entry with id {product_id}, {os}. Skipping.')
else:
db_cursor.execute('SELECT gb_int_json_payload, gb_int_title FROM gog_builds '
'WHERE gb_int_id = ? AND gb_int_os = ?', (product_id, os))
existing_json_formatted, existing_product_name = db_cursor.fetchone()
if product_name is not None and existing_product_name != product_name:
logger.info(f'BQ >>> Found a valid (or new) product name: {product_name}. Updating...')
with db_lock:
db_cursor.execute('UPDATE gog_builds SET gb_int_title = ? WHERE gb_int_id = ? AND gb_int_os = ?',
(product_name, product_id, os))
db_connection.commit()
logger.info(f'BQ ~~~ Successfully updated product name for DB entry with id {product_id}, {os}.')
if existing_json_formatted != json_formatted:
logger.debug(f'BQ >>> Existing entry for {product_id}, {os} is outdated. Updating...')
#calculate the diff between the new json and the previous one
#(applying the diff on the new json will revert to the previous version)
diff_formatted = ''.join([line for line in difflib.unified_diff(json_formatted.splitlines(1),
existing_json_formatted.splitlines(1), n=0)])
#gb_int_latest_update, gb_int_json_payload, gb_int_previous_json_diff,
#gb_total_count, gb_count, gb_main_version_names, gb_branch_version_names,
#gb_has_private_branches, gb_id (WHERE clause), gb_os (WHERE clause)
with db_lock:
db_cursor.execute(UPDATE_BUILD_QUERY, (datetime.now(), json_formatted, diff_formatted,
total_count, count, main_version_names, branch_version_names,
has_private_branches, product_id, os))
db_connection.commit()
logger.info(f'BQ ~~~ Updated the DB entry for {product_id}: {product_name}, {os}.')
else:
logger.warning(f'BQ >>> HTTP error code {response.status_code} received for {product_id}, {os}.')
raise Exception()
return True
#sometimes the HTTPS connection encounters SSL errors
except requests.exceptions.SSLError:
logger.warning(f'BQ >>> Connection SSL error encountered for {product_id}, {os}.')
return False
#sometimes the HTTPS connection gets rejected/terminated
except requests.exceptions.ConnectionError:
logger.warning(f'BQ >>> Connection error encountered for {product_id}, {os}.')
return False
except:
logger.debug(f'BQ >>> Builds query has failed for {product_id}, {os}.')
#uncomment for debugging purposes only
#logger.error(traceback.format_exc())
return False
def worker_thread(thread_number, scan_mode):
global terminate_signal
threadConfigParser = ConfigParser()
with requests.Session() as threadSession:
with sqlite3.connect(db_file_full_path) as thread_db_connection:
while not terminate_signal:
product_id, os = queue.get()
retry_counter = 0
retries_complete = False
while not retries_complete and not terminate_signal:
if retry_counter > 0:
logger.debug(f'T#{thread_number} >>> Retry count: {retry_counter}.')
#main iternation incremental sleep
sleep((retry_counter ** RETRY_AMPLIFICATION_FACTOR) * RETRY_SLEEP_INTERVAL)
retries_complete = gog_builds_query(product_id, os, scan_mode, threadSession, thread_db_connection)
if retries_complete:
if retry_counter > 0:
logger.info(f'T#{thread_number} >>> Succesfully retried for {product_id}, {os}.')
else:
retry_counter += 1
#terminate the scan if the RETRY_COUNT limit is exceeded
if retry_counter > RETRY_COUNT:
logger.critical(f'T#{thread_number} >>> Request most likely blocked/invalidated by GOG. Terminating process.')
terminate_signal = True
#forcefully terminate script
terminate_script()
#only do product_id processing on 'windows' build scans
if not terminate_signal and os == 'windows' and product_id % ID_SAVE_INTERVAL == 0:
with config_lock:
threadConfigParser.read(conf_file_full_path)
threadConfigParser['FULL_SCAN']['start_id'] = str(product_id)
with open(conf_file_full_path, 'w') as file:
threadConfigParser.write(file)
logger.info(f'T#{thread_number} >>> Processed up to id: {product_id}...')
queue.task_done()
##main thread start
logger.info('*** Running BUILDS scan script ***')
parser = argparse.ArgumentParser(description=('GOG builds scan (part of gog_gles) - a script to call publicly available GOG APIs '
'in order to retrieve builds information and updates.'))
group = parser.add_mutually_exclusive_group()
group.add_argument('-u', '--update', help='Perform an update builds scan', action='store_true')
group.add_argument('-f', '--full', help='Perform a full builds scan', action='store_true')
group.add_argument('-p', '--products', help='Perform a products-based builds scan', action='store_true')
group.add_argument('-m', '--manual', help='Perform a manual builds scan', action='store_true')
group.add_argument('-d', '--delta', help='Produce a list of ids whose latest builds are exclusive to Galaxy', action='store_true')
args = parser.parse_args()
try:
#reading from config file
configParser.read(conf_file_full_path)
general_section = configParser['GENERAL']
#parsing generic parameters
conf_backup = general_section.get('conf_backup')
db_backup = general_section.get('db_backup')
scan_mode = general_section.get('scan_mode')
#parsing constants
HTTP_TIMEOUT = general_section.getint('http_timeout')
RETRY_COUNT = general_section.getint('retry_count')
RETRY_SLEEP_INTERVAL = general_section.getint('retry_sleep_interval')
RETRY_AMPLIFICATION_FACTOR = general_section.getint('retry_amplification_factor')
except:
logger.critical('Could not parse configuration file. Please make sure the appropriate structure is in place!')
raise SystemExit(1)
#detect any parameter overrides and set the scan_mode accordingly
if len(argv) > 1:
logger.info('Command-line parameter mode override detected.')
if args.update:
scan_mode = 'update'
elif args.full:
scan_mode = 'full'
elif args.products:
scan_mode = 'products'
elif args.manual:
scan_mode = 'manual'
elif args.delta:
scan_mode = 'delta'
#boolean 'true' or scan_mode specific activation
if conf_backup == 'true' or conf_backup == scan_mode:
if os.path.exists(conf_file_full_path):
#create a backup of the existing conf file - mostly for debugging/recovery
copy2(conf_file_full_path, conf_file_full_path + '.bak')
logger.info('Successfully created conf file backup.')
else:
logger.critical('Could find specified conf file!')
raise SystemExit(2)
#boolean 'true' or scan_mode specific activation
if db_backup == 'true' or db_backup == scan_mode:
if os.path.exists(db_file_full_path):
#create a backup of the existing db - mostly for debugging/recovery
copy2(db_file_full_path, db_file_full_path + '.bak')
logger.info('Successfully created DB backup.')
else:
#subprocess.run(['python', 'gog_create_db.py'])
logger.critical('Could find specified DB file!')
raise SystemExit(3)
if scan_mode == 'full':
logger.info('--- Running in FULL scan mode ---')
#catch SIGTERM and exit gracefully
signal.signal(signal.SIGTERM, sigterm_handler)
#theads sync (on exit) timeout interval (seconds)
THREAD_SYNC_TIMEOUT = 30
| |
in ['AccessKeyId', 'SecretAccessKey', 'SessionToken']):
return {
'AccessKeyId': creds['AccessKeyId'],
'SecretAccessKey': creds['SecretAccessKey'],
'Token': creds['SessionToken']
}, 'webidentity:' + ','.join([role_arn, token_file])
# Fail if credentials cannot be fetched from the given aws_creds_uri
if is_fatal:
fatal_error(unsuccessful_resp, unsuccessful_resp)
else:
return None, None
def get_aws_security_credentials_from_instance_metadata(iam_role_name):
security_creds_lookup_url = INSTANCE_IAM_URL + iam_role_name
unsuccessful_resp = 'Unsuccessful retrieval of AWS security credentials at %s.' % security_creds_lookup_url
url_error_msg = 'Unable to reach %s to retrieve AWS security credentials. See %s for more info.' % \
(security_creds_lookup_url, SECURITY_CREDS_IAM_ROLE_HELP_URL)
iam_security_dict = url_request_helper(security_creds_lookup_url, unsuccessful_resp,
url_error_msg, retry_with_new_header_token=True)
if iam_security_dict and all(k in iam_security_dict for k in CREDENTIALS_KEYS):
return iam_security_dict, 'metadata:'
else:
return None, None
def get_iam_role_name():
iam_role_unsuccessful_resp = 'Unsuccessful retrieval of IAM role name at %s.' % INSTANCE_IAM_URL
iam_role_url_error_msg = 'Unable to reach %s to retrieve IAM role name. See %s for more info.' % \
(INSTANCE_IAM_URL, SECURITY_CREDS_IAM_ROLE_HELP_URL)
iam_role_name = url_request_helper(INSTANCE_IAM_URL, iam_role_unsuccessful_resp,
iam_role_url_error_msg, retry_with_new_header_token=True)
return iam_role_name
def credentials_file_helper(file_path, awsprofile):
aws_credentials_configs = read_config(file_path)
credentials = {'AccessKeyId': None, 'SecretAccessKey': None, 'Token': None}
try:
access_key = aws_credentials_configs.get(awsprofile, 'aws_access_key_id')
secret_key = aws_credentials_configs.get(awsprofile, 'aws_secret_access_key')
session_token = aws_credentials_configs.get(awsprofile, 'aws_session_token')
credentials['AccessKeyId'] = access_key
credentials['SecretAccessKey'] = secret_key
credentials['Token'] = session_token
except NoOptionError as e:
if 'aws_access_key_id' in str(e) or 'aws_secret_access_key' in str(e):
logging.debug('aws_access_key_id or aws_secret_access_key not found in %s under named profile [%s]', file_path,
awsprofile)
if 'aws_session_token' in str(e):
logging.debug('aws_session_token not found in %s', file_path)
credentials['AccessKeyId'] = aws_credentials_configs.get(awsprofile, 'aws_access_key_id')
credentials['SecretAccessKey'] = aws_credentials_configs.get(awsprofile, 'aws_secret_access_key')
except NoSectionError:
logging.debug('No [%s] section found in config file %s', awsprofile, file_path)
return credentials
def get_aws_profile(options, use_iam):
awsprofile = options.get('awsprofile')
if not awsprofile and use_iam:
for file_path in [AWS_CREDENTIALS_FILE, AWS_CONFIG_FILE]:
aws_credentials_configs = read_config(file_path)
# check if aws access key id is found under [default] section in current file and return 'default' if so
try:
access_key = aws_credentials_configs.get('default', 'aws_access_key_id')
if access_key is not None:
return 'default'
except (NoSectionError, NoOptionError):
continue
return awsprofile
def url_request_helper(url, unsuccessful_resp, url_error_msg, headers={}, retry_with_new_header_token=False):
try:
req = Request(url)
for k, v in headers.items():
req.add_header(k, v)
request_resp = urlopen(req, timeout=1)
return get_resp_obj(request_resp, url, unsuccessful_resp)
except HTTPError as e:
# For instance enable with IMDSv2, Unauthorized 401 error will be thrown,
# to retrieve metadata, the header should embeded with metadata token
if e.code == 401 and retry_with_new_header_token:
token = get_aws_ec2_metadata_token()
req.add_header('X-aws-ec2-metadata-token', token)
request_resp = urlopen(req, timeout=1)
return get_resp_obj(request_resp, url, unsuccessful_resp)
err_msg = 'Unable to reach the url at %s: status=%d, reason is %s' % (url, e.code, e.reason)
except URLError as e:
err_msg = 'Unable to reach the url at %s, reason is %s' % (url, e.reason)
if err_msg:
logging.debug('%s %s', url_error_msg, err_msg)
return None
def get_resp_obj(request_resp, url, unsuccessful_resp):
if request_resp.getcode() != 200:
logging.debug(unsuccessful_resp + ' %s: ResponseCode=%d', url, request_resp.getcode())
return None
resp_body = request_resp.read()
resp_body_type = type(resp_body)
try:
if resp_body_type is str:
resp_dict = json.loads(resp_body)
else:
resp_dict = json.loads(resp_body.decode(request_resp.headers.get_content_charset() or 'us-ascii'))
return resp_dict
except ValueError as e:
logging.info('ValueError parsing "%s" into json: %s. Returning response body.' % (str(resp_body), e))
return resp_body if resp_body_type is str else resp_body.decode('utf-8')
def parse_options(options):
opts = {}
for o in options.split(','):
if '=' in o:
k, v = o.split('=')
opts[k] = v
else:
opts[o] = None
return opts
def get_tls_port_range(config):
lower_bound = config.getint(CONFIG_SECTION, 'port_range_lower_bound')
upper_bound = config.getint(CONFIG_SECTION, 'port_range_upper_bound')
if lower_bound >= upper_bound:
fatal_error('Configuration option "port_range_upper_bound" defined as %d '
'must be strictly greater than "port_range_lower_bound" defined as %d.'
% (upper_bound, lower_bound))
return lower_bound, upper_bound
def choose_tls_port(config, options):
if 'tlsport' in options:
ports_to_try = [int(options['tlsport'])]
else:
lower_bound, upper_bound = get_tls_port_range(config)
tls_ports = list(range(lower_bound, upper_bound))
# Choose a random midpoint, and then try ports in-order from there
mid = random.randrange(len(tls_ports))
ports_to_try = tls_ports[mid:] + tls_ports[:mid]
assert len(tls_ports) == len(ports_to_try)
sock = socket.socket()
for tls_port in ports_to_try:
try:
sock.bind(('localhost', tls_port))
sock.close()
return tls_port
except socket.error:
continue
sock.close()
if 'tlsport' in options:
fatal_error('Specified port [%s] is unavailable. Try selecting a different port.' % options['tlsport'])
else:
fatal_error('Failed to locate an available port in the range [%d, %d], try specifying a different port range in %s'
% (lower_bound, upper_bound, CONFIG_FILE))
def is_ocsp_enabled(config, options):
if 'ocsp' in options:
return True
elif 'noocsp' in options:
return False
else:
return config.getboolean(CONFIG_SECTION, 'stunnel_check_cert_validity')
def get_mount_specific_filename(fs_id, mountpoint, tls_port):
return '%s.%s.%d' % (fs_id, os.path.abspath(mountpoint).replace(os.sep, '.').lstrip('.'), tls_port)
def serialize_stunnel_config(config, header=None):
lines = []
if header:
lines.append('[%s]' % header)
for k, v in config.items():
if type(v) is list:
for item in v:
lines.append('%s = %s' % (k, item))
else:
lines.append('%s = %s' % (k, v))
return lines
def add_stunnel_ca_options(efs_config, config, options):
if 'cafile' in options:
stunnel_cafile = options['cafile']
else:
try:
stunnel_cafile = config.get(CONFIG_SECTION, 'stunnel_cafile')
except NoOptionError:
logging.debug('No CA file configured, using default CA file %s', DEFAULT_STUNNEL_CAFILE)
stunnel_cafile = DEFAULT_STUNNEL_CAFILE
if not os.path.exists(stunnel_cafile):
fatal_error('Failed to find certificate authority file for verification',
'Failed to find CAfile "%s"' % stunnel_cafile)
efs_config['CAfile'] = stunnel_cafile
def is_stunnel_option_supported(stunnel_output, stunnel_option_name):
supported = False
for line in stunnel_output:
if line.startswith(stunnel_option_name):
supported = True
break
if not supported:
logging.warning('stunnel does not support "%s"', stunnel_option_name)
return supported
def get_version_specific_stunnel_options():
stunnel_command = [_stunnel_bin(), '-help']
proc = subprocess.Popen(stunnel_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
proc.wait()
_, err = proc.communicate()
stunnel_output = err.splitlines()
check_host_supported = is_stunnel_option_supported(stunnel_output, b'checkHost')
ocsp_aia_supported = is_stunnel_option_supported(stunnel_output, b'OCSPaia')
return check_host_supported, ocsp_aia_supported
def _stunnel_bin():
return find_command_path('stunnel',
'Please install it following the instructions at '
'https://docs.aws.amazon.com/efs/latest/ug/using-amazon-efs-utils.html#upgrading-stunnel')
def find_command_path(command, install_method):
try:
env_path = '/sbin:/usr/sbin:/usr/local/sbin:/root/bin:/usr/local/bin:/usr/bin:/bin'
os.putenv('PATH', env_path)
path = subprocess.check_output(['which', command])
except subprocess.CalledProcessError as e:
fatal_error('Failed to locate %s in %s - %s' % (command, env_path, install_method), e)
return path.strip().decode()
def get_system_release_version():
try:
with open(SYSTEM_RELEASE_PATH) as f:
return f.read().strip()
except IOError:
logging.debug('Unable to read %s', SYSTEM_RELEASE_PATH)
try:
with open(OS_RELEASE_PATH) as f:
for line in f:
if 'PRETTY_NAME' in line:
return line.split('=')[1].strip()
except IOError:
logging.debug('Unable to read %s', OS_RELEASE_PATH)
return 'unknown'
def write_stunnel_config_file(config, state_file_dir, fs_id, mountpoint, tls_port, dns_name, verify_level, ocsp_enabled,
options, log_dir=LOG_DIR, cert_details=None):
"""
Serializes stunnel configuration to a file. Unfortunately this does not conform to Python's config file format, so we have to
hand-serialize it.
"""
mount_filename = get_mount_specific_filename(fs_id, mountpoint, tls_port)
global_config = dict(STUNNEL_GLOBAL_CONFIG)
if config.getboolean(CONFIG_SECTION, 'stunnel_debug_enabled'):
global_config['debug'] = 'debug'
if config.has_option(CONFIG_SECTION, 'stunnel_logs_file'):
global_config['output'] = config.get(CONFIG_SECTION, 'stunnel_logs_file').replace('{fs_id}', fs_id)
else:
global_config['output'] = os.path.join(log_dir, '%s.stunnel.log' % mount_filename)
efs_config = dict(STUNNEL_EFS_CONFIG)
efs_config['accept'] = efs_config['accept'] % tls_port
efs_config['connect'] = efs_config['connect'] % dns_name
efs_config['verify'] = verify_level
if verify_level > 0:
add_stunnel_ca_options(efs_config, config, options)
if cert_details:
efs_config['cert'] = cert_details['certificate']
efs_config['key'] = cert_details['privateKey']
check_host_supported, ocsp_aia_supported = get_version_specific_stunnel_options()
tls_controls_message = 'WARNING: Your client lacks sufficient controls to properly enforce TLS. Please upgrade stunnel, ' \
'or disable "%%s" in %s.\nSee %s for more detail.' % (CONFIG_FILE,
'https://docs.aws.amazon.com/console/efs/troubleshooting-tls')
if config.getboolean(CONFIG_SECTION, 'stunnel_check_cert_hostname'):
if check_host_supported:
efs_config['checkHost'] = dns_name
else:
fatal_error(tls_controls_message % 'stunnel_check_cert_hostname')
# Only use the config setting if the override is not set
if ocsp_enabled:
if ocsp_aia_supported:
efs_config['OCSPaia'] = 'yes'
else:
fatal_error(tls_controls_message % 'stunnel_check_cert_validity')
system_release_version = get_system_release_version()
if not any(release in system_release_version for release in SKIP_NO_LIBWRAP_RELEASES):
efs_config['libwrap'] = 'no'
stunnel_config = '\n'.join(serialize_stunnel_config(global_config) + serialize_stunnel_config(efs_config, 'efs'))
logging.debug('Writing stunnel configuration:\n%s', stunnel_config)
stunnel_config_file = os.path.join(state_file_dir, 'stunnel-config.%s' % mount_filename)
with open(stunnel_config_file, 'w') as f:
f.write(stunnel_config)
return stunnel_config_file
def write_tls_tunnel_state_file(fs_id, mountpoint, tls_port, tunnel_pid, command, files, state_file_dir, cert_details=None):
"""
Return the name of the temporary file containing TLS tunnel state, prefixed with a '~'. This file needs to be renamed to a
non-temporary version following a successful mount.
"""
state_file = '~' + get_mount_specific_filename(fs_id, mountpoint, tls_port)
state = {
'pid': tunnel_pid,
'cmd': command,
'files': files,
}
if cert_details:
state.update(cert_details)
with open(os.path.join(state_file_dir, state_file), 'w') as f:
json.dump(state, f)
return state_file
def test_tunnel_process(tunnel_proc, fs_id):
tunnel_proc.poll()
if tunnel_proc.returncode is not None:
out, err = tunnel_proc.communicate()
fatal_error('Failed to initialize TLS tunnel for %s' % fs_id,
'Failed to start TLS tunnel (errno=%d). stdout="%s" stderr="%s"'
% (tunnel_proc.returncode, out.strip(), err.strip()))
def poll_tunnel_process(tunnel_proc, fs_id, mount_completed):
"""
poll the tunnel process health every .5s during the mount attempt to fail fast if the tunnel dies - since this is not called
from the main thread, if the tunnel fails, exit uncleanly with os._exit
"""
while not mount_completed.is_set():
try:
test_tunnel_process(tunnel_proc, fs_id)
except SystemExit as e:
os._exit(e.code)
mount_completed.wait(.5)
def get_init_system(comm_file='/proc/1/comm'):
init_system = 'unknown'
try:
with open(comm_file) as f:
init_system = f.read().strip()
except IOError:
logging.warning('Unable to read %s', comm_file)
logging.debug('Identified init system: %s', init_system)
return init_system
def check_network_target(fs_id):
with open(os.devnull, 'w') as devnull:
rc = subprocess.call(['systemctl', 'status', 'network.target'], stdout=devnull, stderr=devnull, close_fds=True)
if rc != 0:
fatal_error('Failed to | |
args.eph_de_rotated:
num_ephs = len(psr[0].roemer.keys())
ephnames = psr[0].roemer.keys()
elif args.eph_de_rotated:
num_ephs = len(sorted(glob.glob(nxdir+'/data/de_rot/de*-rot.npy')))
ephnames = ['DE'+ii.split('rot/de')[-1].split('-rot.npy')[0]
for ii in sorted(glob.glob(nxdir+'/data/de_rot/de*-rot.npy'))]
else:
num_ephs = len(args.which_ephs.split(','))
ephnames = args.which_ephs.split(',')
if num_ephs > 1:
pmin = np.append(pmin,np.zeros(num_ephs-1)) # weights
elif args.eph_physmodel:
# mass priors are 10x larger than IAU uncertainties
pmin = np.append(pmin,np.array([-10e-10, -9.29860141e-11, -4.90383710e-11,
-3.43154016e-10, -4.77662313e-10]))
# jupiter orbit
if args.incJuporb:
if args.jup_orbmodel == 'angles':
pmin = np.append(pmin,np.array([-100e-8, -100e-8, -100e-8]))
elif args.jup_orbmodel == 'orbelements':
pmin = np.append(pmin,-5e-2*np.ones(6))
# saturn orbit
if args.incSatorb:
if args.sat_orbmodel == 'angles':
pmin = np.append(pmin,np.array([-100e-8, -100e-8, -100e-8]))
elif args.sat_orbmodel == 'orbelements':
pmin = np.append(pmin,-5e-1*np.ones(6))
elif args.eph_roemermix_dx:
if args.which_ephs == 'fitted':
num_ephs = 1
ephnames = [psr[0].ephemname]
elif args.which_ephs == 'all':
if not args.eph_de_rotated:
num_ephs = len(psr[0].roemer.keys())
ephnames = psr[0].roemer.keys()
elif args.eph_de_rotated:
num_ephs = len(sorted(glob.glob(nxdir+'/data/de_rot/de*-rot.npy')))
ephnames = ['DE'+ii.split('rot/de')[-1].split('-rot.npy')[0]
for ii in sorted(glob.glob(nxdir+'/data/de_rot/de*-rot.npy'))]
else:
num_ephs = len(args.which_ephs.split(','))
ephnames = args.which_ephs.split(',')
if num_ephs > 1:
pmin = np.append(pmin,-50.0*np.ones(num_ephs)) # weights
pmax = np.array([])
if not args.fixRed:
if args.redSpecModel == 'powerlaw':
pmax = np.append(pmax,-11.0*np.ones(len(psr)))
pmax = np.append(pmax,7.0*np.ones(len(psr)))
elif args.redSpecModel == 'spectrum':
pmax = np.append(pmax,3.0*np.ones(len(psr)*nmodes_red))
if args.incDM and not args.fixDM:
if args.dmSpecModel == 'powerlaw':
pmax = np.append(pmax,-8.0*np.ones(len(psr)))
pmax = np.append(pmax,7.0*np.ones(len(psr)))
elif args.dmSpecModel == 'spectrum':
pmax = np.append(pmax,3.0*np.ones(len(psr)*nmodes_dm))
if args.varyWhite:
for ii,p in enumerate(psr):
systems = p.sysflagdict[args.sysflag_target]
pmax = np.append(pmax,10.0*np.ones(len(systems)))
pmax = np.append(pmax,-3.0*np.ones(len(systems)))
if 'nano-f' in p.sysflagdict.keys() and len(p.sysflagdict['nano-f'].keys())>0:
pmax = np.append(pmax, -3.0*np.ones(len(p.sysflagdict['nano-f'].keys())))
if args.incBand:
if args.bandSpecModel == 'powerlaw':
pmax = np.append(pmax,-11.0*np.ones(len(bands)-1))
pmax = np.append(pmax,7.0*np.ones(len(bands)-1))
elif args.bandSpecModel == 'spectrum':
pmax = np.append(pmax,-3.0*np.ones((len(bands)-1)*nmodes_band))
if args.incClk:
if args.clkSpecModel == 'powerlaw':
pmax = np.append(pmax,-11.0)
pmax = np.append(pmax,7.0)
elif args.clkSpecModel == 'spectrum':
pmax = np.append(pmax,3.0*np.ones(nmodes_red))
if args.incCm:
if args.cmSpecModel == 'powerlaw':
pmax = np.append(pmax,-11.0)
pmax = np.append(pmax,7.0)
elif args.cmSpecModel == 'spectrum':
pmax = np.append(pmax,3.0*np.ones(nmodes_red))
if args.incEph:
if args.jplBasis:
pass
else:
if args.ephSpecModel == 'powerlaw':
pmax = np.append(pmax,np.array([-11.0,-11.0,-11.0]))
pmax = np.append(pmax,np.array([7.0,7.0,7.0]))
elif args.ephSpecModel == 'spectrum':
pmax = np.append(pmax,-3.0*np.ones(3*nmodes_eph))
if args.incDip:
if args.dipSpecModel == 'powerlaw':
pmax = np.append(pmax,-11.0)
pmax = np.append(pmax,7.0)
elif args.dipSpecModel == 'spectrum':
pmax = np.append(pmax,3.0*np.ones(nmodes_red))
if args.incGWB:
if args.gwbSpecModel == 'powerlaw':
if args.gwbAmpRange is None:
pmax = np.append(pmax,-11.0)
elif args.gwbAmpRange is not None:
pmax = np.append(pmax,amp_range[1])
if args.fix_slope is None:
pmax = np.append(pmax,7.0)
elif args.gwbSpecModel == 'spectrum':
if args.gwbPrior != 'gaussProc':
pmax = np.append(pmax,3.0*np.ones(nmodes_red))
elif args.gwbPrior == 'gaussProc':
pmax = np.append(pmax,5.0*np.ones(nmodes_red))
pmax = np.append(pmax,-11.0) # Agwb
if gwb_popparam_ndims == 1:
if gwb_popparam == 'stars' and args.gwbStarsRange is not None:
pmax = np.append(pmax,stars_range[1])
elif gwb_popparam == 'ecc' and args.gwbEccRange is not None:
pmax = np.append(pmax,ecc_range[1])
else:
pmax = np.append(pmax,gppkl[0].x.max())
else:
if gwb_popparam == 'starsecc' and args.gwbStarsRange is not None \
and args.gwbEccRange is not None:
pmax = np.append(pmax,stars_range[1])
pmax = np.append(pmax,ecc_range[1])
elif gwb_popparam == 'alphastarsecc' and args.gwbAlphaRange is not None \
and args.gwbStarsRange is not None and args.gwbEccRange is not None:
pmax = np.append(pmax,alpha_range[1])
pmax = np.append(pmax,stars_range[1])
pmax = np.append(pmax,ecc_range[1])
elif gwb_popparam == 'cosmicstring':
if args.gwbGmuRange is not None \
and args.gwbStringProbRange is not None:
pmax = np.append(pmax,gmu_range[1])
pmax = np.append(pmax,stringprob_range[1])
else:
pmax = np.append(pmax,gppkl[0].x.max())
pmax = np.append(pmax,0.0)
else:
for col in range(gppkl[0].x.shape[1]):
pmax = np.append(pmax,gppkl[0].x[:,col].max())
elif args.gwbSpecModel == 'turnover':
pmax = np.append(pmax,-11.0) # Agwb
if args.gwb_fb2env is not None:
# kappa is fixed by choice of env
if args.gwb_fb2env == 'stars':
pmax = np.append(pmax,6.0) # log(rho)
elif args.gwb_fb2env == 'gas':
pmax = np.append(pmax,2.0) # log(acc-rate)
elif args.gwb_fb2env is None:
pmax = np.append(pmax,np.array([7.0,-7.0]))
elif args.gwbSpecModel == 'gpEnvInterp':
pmax = np.append(pmax,np.array([-11.0,0.9]))
if args.incCorr:
if args.gwbTypeCorr == 'modelIndep':
pmax = np.append(pmax,np.pi*np.ones(num_corr_params))
elif args.gwbTypeCorr == 'pointSrc':
if args.fixPointSrcPhi is None and args.fixPointSrcTheta is None:
pmax = np.append(pmax,np.tile([2.0*np.pi,1.0],tmp_nwins))
elif args.gwbTypeCorr == 'spharmAnis':
pmax = np.append(pmax,10.0*np.ones(num_corr_params))
elif args.gwbTypeCorr == 'dipoleOrf':
pmax = np.append(pmax,np.tile([2.0*np.pi,1.0,1.0],tmp_nwins))
elif args.gwbTypeCorr == 'gwDisk':
pmax = np.append(pmax,np.tile([2.0*np.pi,1.0,np.pi,6.0],tmp_nwins))
elif args.gwbTypeCorr == 'psrlocsVary':
pmax = np.append(pmax,np.tile(2.0*np.pi*np.ones(len(psr)),tmp_nwins))
pmax = np.append(pmax,np.tile(np.ones(len(psr)),tmp_nwins))
if args.gwbModelSelect:
pmax = np.append(pmax,1.5)
if args.incGWline:
pmax = np.append(pmax,np.array([3.0,-7.0,2.0*np.pi,1.0]))
if args.det_signal:
if args.cgw_search:
if args.cgwFreqRange is None:
pmax = np.append(pmax,np.array([10.0,1.0,4.0,-11.0,-7.301,2.0*np.pi,
1.0,1.0,np.pi,np.pi,2.0*np.pi]))
elif args.cgwFreqRange is not None:
pmax = np.append(pmax,np.array([10.0,1.0,4.0,-11.0,cgw_orbfreq_range[1],2.0*np.pi,
1.0,1.0,np.pi,np.pi,2.0*np.pi]))
if args.ecc_search:
pmax = np.append(pmax,0.9)
if args.psrTerm:
# psr distances, pterm-gamma0, pterm-l0
pmax = np.append(pmax,10.0*np.ones(len(psr)))
pmax = np.append(pmax,2.0*np.pi*np.ones(len(psr)))
pmax = np.append(pmax,2.0*np.pi*np.ones(len(psr)))
if args.cgwModelSelect:
pmax = np.append(pmax,1.5)
if args.bwm_search:
pmax = np.append(pmax,[np.max([np.max(p.toas) for p in psr]),
-11.0,2.0*np.pi,1.0,np.pi])
if args.bwm_model_select:
pmax = np.append(pmax,1.5)
if args.eph_quadratic:
pmax = np.append(pmax,1e-8*np.ones(9)) # amps
#pmax = np.append(pmax,np.tile([0.0,0.0],3)) # amps
#pmax = np.append(pmax,np.tile([1.0,1.0],3)) # signs
if args.eph_planetdelta:
if args.eph_planetmass:
if args.eph_planetmassprior == 'official':
iau_upperrange = np.array([4.62893610e-11, 2.87611795e-13, 3.78879896e-14,
1.24974433e-14, 9.29860141e-11, 4.90383710e-11,
3.43154016e-10, 4.77662313e-10, 9.00975861e-12])
pmax = np.append(pmax,iau_upperrange[planet_tags-1])
elif args.eph_planetmassprior == 'loguniform':
pmax = np.append(pmax,-5.0*np.ones(num_planets)) # amps
pmax = np.append(pmax,1.0*np.ones(num_planets)) # signs
if num_ephs > 1:
pmax = np.append(pmax,np.ones((num_ephs-1)*num_planets)) # weights
if args.eph_planetoffset:
pmax = np.append(pmax,1e8*np.ones(3*num_planets)) # x,y,z displacements [km]
elif args.eph_roemermix:
if num_ephs > 1:
pmax = np.append(pmax,np.ones(num_ephs-1)) # weights
elif args.eph_physmodel:
# mass priors are 10x larger than IAU uncertainties
pmax = np.append(pmax,np.array([10e-10, 9.29860141e-11, 4.90383710e-11,
3.43154016e-10, 4.77662313e-10]))
# jupiter orbit
if args.incJuporb:
if args.jup_orbmodel == 'angles':
pmax = np.append(pmax,np.array([100e-8, 100e-8, 100e-8]))
elif args.jup_orbmodel == 'orbelements':
pmax = np.append(pmax,5e-2*np.ones(6))
# saturn orbit
if args.incSatorb:
if args.sat_orbmodel == 'angles':
pmax = np.append(pmax,np.array([100e-8, 100e-8, 100e-8]))
elif args.sat_orbmodel == 'orbelements':
pmax = np.append(pmax,5e-1*np.ones(6))
elif args.eph_roemermix_dx:
if num_ephs > 1:
pmax = np.append(pmax,50.0*np.ones(num_ephs)) # weights
##################################################################################
## Collecting rotated ephemeris time-series
if ((args.det_signal and args.eph_roemermix and args.eph_de_rotated) or
(args.det_signal and args.eph_roemermix_dx and args.eph_de_rotated)):
# All loaded .npy data is in equatorial coordinates.
# Pulsar position vector must also be equatorial.
mjd = np.load(nxdir+'/data/de_rot/mjd-rot.npy')
ssb_position_orig = OrderedDict.fromkeys([psr[0].ephemname])
ssb_position_orig[psr[0].ephemname] = np.load(nxdir+'/data/de_rot/de{0}-orig.npy'.format(psr[0].ephemname.split('DE')[1]))
ssb_position_rot = OrderedDict.fromkeys(ephnames)
for key in ssb_position_rot:
ssb_position_rot[key] = np.load(nxdir+'/data/de_rot/de{0}-rot.npy'.format(key.split('DE')[1]))
psr_roemer_orig = OrderedDict.fromkeys([p.name for p in psr])
psr_roemer_rot = OrderedDict.fromkeys([p.name for p in psr])
for ii, p in enumerate(psr):
psrposeq = np.array([np.sin(np.pi/2.-p.decj) * np.cos(p.raj),
np.sin(np.pi/2.-p.decj) * np.sin(p.raj),
np.cos(np.pi/2.-p.decj)])
psr_roemer_orig[p.name] = OrderedDict.fromkeys([p.ephemname])
psr_roemer_orig[p.name][p.ephemname] = np.dot(np.array([np.interp(p.toas, mjd, ssb_position_orig[p.ephemname][:,aa])
for aa in range(3)]).T, psrposeq)
psr_roemer_rot[p.name] = OrderedDict.fromkeys(ephnames)
for key in ephnames:
psr_roemer_rot[p.name][key] = np.dot(np.array([np.interp(p.toas, mjd, ssb_position_rot[key][:,aa])
for aa in range(3)]).T, psrposeq)
##################################################################################
## If epochTOAs, interpolate all planet position vectors onto epoch-averaged TOAs
if args.eph_physmodel and args.epochTOAs:
planet_epochposvecs = []
psr_epochposvecs = []
for ii,p in enumerate(psr):
# Interpolating all planet position vectors onto epoch TOAs
planet_epochposvec_tmp = np.zeros((len(p.detsig_avetoas),9,3))
for jj in range(9):
planet_epochposvec_tmp[:,jj,:] = np.array([np.interp(p.detsig_avetoas,
p.toas,
p.planet_ssb[p.ephemname][:,jj,aa])
for aa in range(3)]).T
planet_epochposvecs.append(planet_epochposvec_tmp)
# Inteprolating the pulsar position vectors onto epoch TOAs
psr_epochposvec_tmp = np.array([np.interp(p.detsig_avetoas,
p.toas,
p.psrPos[:,aa])
for aa in range(3)]).T
psr_epochposvecs.append(psr_epochposvec_tmp)
## Gather data on the partial derivatives of planetary orbital elements
if args.eph_physmodel:
# Jupiter
if args.incJuporb:
if args.jup_orbmodel == 'angles':
jup_mjd = None
jup_orbelxyz = None
elif args.jup_orbmodel == 'orbelements':
jup_mjd = np.load(nxdir+'/data/jupiter_orbitpartials/jupiter-orbel-mjd.npy')
jup_orbelxyz = np.load(nxdir+'/data/jupiter_orbitpartials/jupiter-orbel-xyz-svd.npy')
elif not args.incJuporb:
jup_mjd = None
jup_orbelxyz = None
# Saturn
if args.incSatorb:
if args.sat_orbmodel == 'angles':
sat_mjd = None
sat_orbelxyz = None
elif args.sat_orbmodel == 'orbelements':
sat_mjd = np.load(nxdir+'/data/saturn_orbitpartials/saturn-orbel-mjd.npy')
sat_orbelxyz = np.load(nxdir+'/data/saturn_orbitpartials/saturn-orbel-xyz-svd.npy')
elif not args.incSatorb:
sat_mjd = None
sat_orbelxyz = None
##################################################################################
def my_prior(xx):
logp = 0.
if np.all(xx <= pmax) and np.all(xx >= pmin):
logp = np.sum(np.log(1/(pmax-pmin)))
else:
logp = -np.inf
return logp
def lnprob(xx):
npsr = len(psr)
logLike = 0
if not args.varyWhite:
loglike1_tmp = loglike1
dtmp = list(d)
TtNT_tmp = list(TtNT)
Jamp_tmp = list(Jamp)
logdet_Ntmp = list(logdet_N)
bigTtNT_tmp = bigTtNT.copy()
mode_count = 2*nmodes_red
if args.incDM:
mode_count += 2*nmodes_dm
if args.incEph:
if args.jplBasis:
mode_count += nmodes_eph
else:
# 2*nmode for x,y,z
mode_count += 6*nmodes_eph
if args.incClk and args.clkDesign:
mode_count += 2*nmodes_red
if args.incBand and ((len(bands)-1)>0):
mode_count += 2*(len(bands)-1)*nmodes_band
###############################
# Splitting up parameter vector
param_ct = 0
###############################
# Including per-pulsar red noise
if not args.fixRed:
if args.redSpecModel == 'powerlaw':
Ared = 10.0**xx[:npsr]
gam_red = xx[npsr:2*npsr]
param_ct += 2*npsr
elif args.redSpecModel == 'spectrum':
red_spec = (xx[:nmodes_red*npsr].copy()).reshape((npsr,nmodes_red))
param_ct += npsr*nmodes_red
####################################
# Including per-pulsar DM variations
if args.incDM and not args.fixDM:
if args.dmSpecModel == 'powerlaw':
Adm = 10.0**xx[param_ct:param_ct+npsr]
gam_dm = xx[param_ct+npsr:param_ct+2*npsr]
param_ct += 2*npsr
elif args.dmSpecModel == 'spectrum':
dm_spec = (xx[param_ct:param_ct+nmodes_dm*npsr].copy()).reshape((npsr,nmodes_dm))
param_ct += npsr*nmodes_dm
####################################
# Including per-pulsar white-noise
if args.varyWhite:
EFAC = []
EQUAD = []
ECORR = []
for ii,p in enumerate(psr):
systems = p.sysflagdict[args.sysflag_target]
EFAC.append( xx[param_ct:param_ct+len(systems)] )
param_ct += len(systems)
EQUAD.append( 10.0**xx[param_ct:param_ct+len(systems)] )
param_ct += len(systems)
if 'nano-f' in p.sysflagdict.keys() and len(p.sysflagdict['nano-f'].keys())>0:
ECORR.append( 10.0**xx[param_ct:param_ct+len(p.sysflagdict['nano-f'].keys())] )
param_ct += len(p.sysflagdict['nano-f'].keys())
#########################################
# Including band-dependent red noise
if args.incBand:
if | |
return response
def reset_instance(
self,
request: Union[service.ResetInstanceRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Resets a notebook instance.
Args:
request (Union[google.cloud.notebooks_v1beta1.types.ResetInstanceRequest, dict]):
The request object. Request for reseting a notebook
instance
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.notebooks_v1beta1.types.Instance`
The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a service.ResetInstanceRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.ResetInstanceRequest):
request = service.ResetInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.reset_instance]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
instance.Instance,
metadata_type=service.OperationMetadata,
)
# Done; return the response.
return response
def report_instance_info(
self,
request: Union[service.ReportInstanceInfoRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Allows notebook instances to
report their latest instance information to the
Notebooks API server. The server will merge the reported
information to the instance metadata store. Do not use
this method directly.
Args:
request (Union[google.cloud.notebooks_v1beta1.types.ReportInstanceInfoRequest, dict]):
The request object. Request for notebook instances to
report information to Notebooks API.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.notebooks_v1beta1.types.Instance`
The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a service.ReportInstanceInfoRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.ReportInstanceInfoRequest):
request = service.ReportInstanceInfoRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.report_instance_info]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
instance.Instance,
metadata_type=service.OperationMetadata,
)
# Done; return the response.
return response
def is_instance_upgradeable(
self,
request: Union[service.IsInstanceUpgradeableRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> service.IsInstanceUpgradeableResponse:
r"""Check if a notebook instance is upgradable.
Args:
request (Union[google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableRequest, dict]):
The request object. Request for checking if a notebook
instance is upgradeable.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.notebooks_v1beta1.types.IsInstanceUpgradeableResponse:
Response for checking if a notebook
instance is upgradeable.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a service.IsInstanceUpgradeableRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.IsInstanceUpgradeableRequest):
request = service.IsInstanceUpgradeableRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.is_instance_upgradeable]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("notebook_instance", request.notebook_instance),)
),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def upgrade_instance(
self,
request: Union[service.UpgradeInstanceRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Upgrades a notebook instance to the latest version.
Args:
request (Union[google.cloud.notebooks_v1beta1.types.UpgradeInstanceRequest, dict]):
The request object. Request for upgrading a notebook
instance
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.notebooks_v1beta1.types.Instance`
The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a service.UpgradeInstanceRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.UpgradeInstanceRequest):
request = service.UpgradeInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.upgrade_instance]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
instance.Instance,
metadata_type=service.OperationMetadata,
)
# Done; return the response.
return response
def upgrade_instance_internal(
self,
request: Union[service.UpgradeInstanceInternalRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation.Operation:
r"""Allows notebook instances to
call this endpoint to upgrade themselves. Do not use
this method directly.
Args:
request (Union[google.cloud.notebooks_v1beta1.types.UpgradeInstanceInternalRequest, dict]):
The request object. Request for upgrading a notebook
instance from within the VM
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.notebooks_v1beta1.types.Instance`
The definition of a notebook instance.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a service.UpgradeInstanceInternalRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.UpgradeInstanceInternalRequest):
request = service.UpgradeInstanceInternalRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.upgrade_instance_internal
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
response,
self._transport.operations_client,
instance.Instance,
metadata_type=service.OperationMetadata,
)
# Done; return the response.
return response
def list_environments(
self,
request: Union[service.ListEnvironmentsRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListEnvironmentsPager:
r"""Lists environments in a project.
Args:
request (Union[google.cloud.notebooks_v1beta1.types.ListEnvironmentsRequest, dict]):
The request object. Request for listing environments.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.notebooks_v1beta1.services.notebook_service.pagers.ListEnvironmentsPager:
Response for listing environments.
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a service.ListEnvironmentsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, service.ListEnvironmentsRequest):
request | |
<filename>A3/code/others/utils.py<gh_stars>1-10
"""
All functions combined
"""
import math
import cv2
import matplotlib.pyplot as plt
import numpy as np
HARRIS = False
FEATURE_EXTRACTOR = 'brisk' # 'sift'|'surf'|'brisk'|'orb'
FEATURE_MATCHER = 'bf' # 'bf'|'knn'
NUM_GOOD_MATCHES = 200
LOWES_RATIO = 0.75
SCALING = 20 # Percent scale down
# MARKERS_PATHS = ['../markers/0.png', '../markers/1.png', '../markers/2.png']
MARKERS_PATHS = ['../../markers/0.png', '../../markers/2.png']
REF_IMAGE1 = np.array([[0, 0],
[0, 700],
[700, 700],
[700, 0]], dtype=np.float32)
REF_IMAGE2 = np.array([[0, 0],
[0, 700],
[700, 700],
[700, 0]], dtype=np.float32)
REF_IMAGE3 = np.array([[0, 0],
[0, 700],
[700, 700],
[700, 0]], dtype=np.float32)
ROT = np.array([[0,1],[3,2]])
def do_everything(FRAMED, REF_IMAGES, OBJ, CAM_MAT, REACHED_X, REACHED_Y):
FRAME = convert_to_grayscale(FRAMED)
# ret, thresh = cv2.threshold(FRAME,127,255,cv2.THRESH_BINARY)
FRAME = cv2.GaussianBlur(FRAME,(5,5),0)
thresh = cv2.adaptiveThreshold(FRAME,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 11, 2)
# cv2.imshow("thresh", thresh)
# print(thresh)
contours, hierarchy = cv2.findContours(thresh, 1, 2)
area_contour = [contour for contour in contours if cv2.contourArea(contour) >= 500]
rects = [cv2.minAreaRect(i) for i in area_contour]
boxes = [np.int0(cv2.boxPoints(i)) for i in rects]
warped = [four_point_transform(FRAME, box) for box in boxes]
selected_markers = compare_markers(warped, REF_IMAGES)
# cv2.imshow("0", selected_markers[0][0])
# cv2.imshow("1", selected_markers[1][0])
for sel in selected_markers:
try:
if sel[3] <= 4:
cv2.drawContours(FRAMED,[boxes[sel[2]]],-1,(0,0,255),3)
except:
pass
MATCH_DATA = [None, None]
try:
if selected_markers[0][3] <= 4:
ROT3 = np.rot90(ROT)
REF_IMAGE3[0] = REF_IMAGE1[ROT3[0,0]]
REF_IMAGE3[1] = REF_IMAGE1[ROT3[0,1]]
REF_IMAGE3[2] = REF_IMAGE1[ROT3[1,1]]
REF_IMAGE3[3] = REF_IMAGE1[ROT3[1,0]]
h1 = get_homography_from_corners(boxes[selected_markers[0][2]], REF_IMAGE3)[0]
h2 = get_homography_from_corners(boxes[selected_markers[1][2]], REF_IMAGE2)[0]
MATCH_DATA = [h1, h2]
except:
pass
if MATCH_DATA[0] is not None and MATCH_DATA[1] is not None:
HOMOGRAPHY1 = MATCH_DATA[0]
HOMOGRAPHY2 = MATCH_DATA[1]
corner1 = boxes[selected_markers[0][2]]
corner2 = boxes[selected_markers[1][2]]
PROJ_MAT1 = get_matrix(CAM_MAT, HOMOGRAPHY1)
DIST = calculate_dist_corners(corner1, corner2)
DIST_X = DIST[0]
DIST_Y = DIST[1]
STEP_X = DIST_X/10
STEP_Y = DIST_Y/10
if abs(REACHED_X) >= abs(DIST_X) or abs(REACHED_Y) >= abs(DIST_Y):
REACHED_X = 0
REACHED_Y = 0
else:
REACHED_X += STEP_X
REACHED_Y += STEP_Y
TRANS = np.array(
[[1, 0, REACHED_X], [0, 1, REACHED_Y], [0, 0, 1]])
PROJ_MAT = np.dot(TRANS, PROJ_MAT1)
FRAMED = render(FRAMED, OBJ, PROJ_MAT, REF_IMAGE1, False)
return REACHED_X, REACHED_Y
def get_middle(arr):
n = np.array(arr.shape) / 2.0
n_int = np.int0(n)
if n[0] % 2 == 1 and n[1] %2 == 1:
return arr[n_int[0], n_int[1]]
elif n[0] %2 == 0 and n[1] %2 == 0:
return np.average(arr[n_int[0]:n_int[0] + 2, n_int[1]:n_int[1] + 2])
elif n[0] %2 == 1 and n[1] %2 == 0:
return np.average(arr[n_int[0], n_int[1]:n_int[1]+2])
else:
return np.average(arr[n_int[0]:n_int[0]+2, n_int[1]])
def get_binary(images, divsions):
# threshed = [cv2.threshold(warp, 127, 255, cv2.THRESH_BINARY)[1]/255 for warp in images]
threshed = [cv2.GaussianBlur(thresh,(5,5),0) for thresh in images]
# threshed = images
# threshed = [cv2.adaptiveThreshold(warp, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 11, 2) for warp in threshed]
threshed = [cv2.threshold(warp, 0, 255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)[1]/255 for warp in threshed]
mats = []
for thr in threshed:
shap = thr.shape
jump = np.int0(np.array(shap)/divsions)
mat = np.zeros([divsions,divsions])
for i in range(divsions):
for j in range(divsions):
try:
avg = get_middle(thr[i*jump[0]:(i+1)*jump[0], j*jump[1]:(j+1)*jump[1]])
except:
avg = 0
# avg = np.average(thr[i*jump[0]:(i+1)*jump[0], j*jump[1]:(j+1)*jump[1]])
if avg >= 0.5:
mat[i][j] = 1
else:
mat[i][j] = 0
mats.append(mat)
return mats
def compare_markers(matrices, ref_images):
thresh = get_binary(matrices, 7)
refs = get_binary([convert_to_grayscale(ref) for ref in ref_images], 7)
ret = []
for ref in refs:
mini = np.sum(np.absolute(refs[0]-thresh[0]))
rot = (thresh[0], ref, 0, mini)
for thr in range(0, len(thresh)):
for i in range(4):
su = np.sum(np.absolute(np.rot90(ref, i)-thresh[thr]))
# print("su, mini ", su, mini)
if su < mini:
if len(ret) > 0 and ret[0][2] != thr:
mini = su
rot = (thresh[thr], i, thr, mini)
elif len(ret) == 0:
mini = su
rot = (thresh[thr], i, thr, mini)
ret.append(rot)
return ret
def order_points(pts):
rect = np.zeros((4, 2), dtype = "float32")
s = pts.sum(axis = 1)
rect[0] = pts[np.argmin(s)]
rect[2] = pts[np.argmax(s)]
diff = np.diff(pts, axis = 1)
rect[1] = pts[np.argmin(diff)]
rect[3] = pts[np.argmax(diff)]
return rect
def four_point_transform(image, pts):
rect = order_points(pts)
(tl, tr, br, bl) = rect
widthA = np.sqrt(((br[0] - bl[0]) ** 2) + ((br[1] - bl[1]) ** 2))
widthB = np.sqrt(((tr[0] - tl[0]) ** 2) + ((tr[1] - tl[1]) ** 2))
maxWidth = max(int(widthA), int(widthB))
heightA = np.sqrt(((tr[0] - br[0]) ** 2) + ((tr[1] - br[1]) ** 2))
heightB = np.sqrt(((tl[0] - bl[0]) ** 2) + ((tl[1] - bl[1]) ** 2))
maxHeight = max(int(heightA), int(heightB))
dst = np.array([
[0, 0],
[maxWidth - 1, 0],
[maxWidth - 1, maxHeight - 1],
[0, maxHeight - 1]], dtype = "float32")
M = cv2.getPerspectiveTransform(rect, dst)
warped = cv2.warpPerspective(image, M, (maxWidth, maxHeight))
return warped
def create_matcher():
"""
Create different types of matchers to match descriptors.
"""
cross_check = bool(FEATURE_MATCHER == 'bf')
if FEATURE_EXTRACTOR in ('sift', 'surf'):
matcher = cv2.BFMatcher(cv2.NORM_L2, crossCheck=cross_check)
else:
matcher = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=cross_check)
return matcher
MATCHER = create_matcher()
def average(lst):
"""
Average x.distance from a list of x.
"""
sum_ = 0
for i in lst:
sum_ += i.distance
if len(lst) == 0:
return 10
return sum_ / len(lst)
def scale_image(img):
"""
Scale down images
"""
img = cv2.resize(img, ((
(img.shape[1])*SCALING)//100, (img.shape[0]*SCALING)//100))
return img
def load_image(image_path, scale=True):
"""
Load image
"""
image = cv2.imread(image_path)
if SCALING != 100 and scale:
image = scale_image(image)
return image
def convert_to_grayscale(image):
"""
Convert images from RGB to Grayscale
"""
return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
def get_harris_keypoints(gray):
"""
Returns corners found using harris corner detection algorithm with sub pixel accuracy.
"""
# find Harris corners
gray = np.float32(gray)
dst = cv2.cornerHarris(gray, 2, 3, 0.04)
dst = cv2.dilate(dst, None)
_, dst = cv2.threshold(dst, 0.01*dst.max(), 255, 0)
dst = np.uint8(dst)
# find centroids
_, _, _, centroids = cv2.connectedComponentsWithStats(dst)
# define the criteria to stop and refine the corners
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 100, 0.001)
corners = cv2.cornerSubPix(gray, np.float32(
centroids), (5, 5), (-1, -1), criteria)
keypoints = [cv2.KeyPoint(crd[0], crd[1], 13) for crd in corners]
return keypoints
def get_descriptors(gray_image):
"""
Generate descriptors from image according to various descriptor modules
"""
if FEATURE_EXTRACTOR == 'surf':
descriptor = cv2.xfeatures2d.SURF_create()
elif FEATURE_EXTRACTOR == 'sift':
descriptor = cv2.xfeatures2d.SIFT_create()
elif FEATURE_EXTRACTOR == 'brisk':
descriptor = cv2.BRISK_create()
elif FEATURE_EXTRACTOR == 'orb':
descriptor = cv2.ORB_create()
if HARRIS:
kps = get_harris_keypoints(gray_image)
keypoints, des = descriptor.compute(gray_image, kps)
return (keypoints, des)
return descriptor.detectAndCompute(gray_image, None)
def get_matches(kp1_loc, dsc1_loc, kp2_loc, dsc2_loc):
"""
Get the matching descriptors according to feature matcher.
"""
if FEATURE_MATCHER == 'bf':
raw_matches = MATCHER.match(dsc1_loc, dsc2_loc)
raw_matches.sort(key=lambda x: x.distance)
matches_loc = raw_matches[:NUM_GOOD_MATCHES]
# print("Brute Force #matches = ", len(raw_matches),
# " and avd_dist: ", average(matches_loc))
else:
raw_matches = MATCHER.knnMatch(dsc1_loc, dsc2_loc, 2)
# print("KNN #matches = ", len(raw_matches))
matches_loc = []
for m_val, n_val in raw_matches:
if m_val.distance < n_val.distance * LOWES_RATIO:
matches_loc.append(m_val)
points1_loc = np.zeros((len(matches_loc), 2), dtype=np.float32)
points2_loc = np.zeros((len(matches_loc), 2), dtype=np.float32)
for i, match in enumerate(matches_loc):
points1_loc[i, :] = kp1_loc[match.queryIdx].pt
points2_loc[i, :] = kp2_loc[match.trainIdx].pt
return average(matches_loc), points1_loc, points2_loc, matches_loc
def display_image(img):
"""
Show an image
"""
plt.imshow(img)
plt.show()
def draw_harris_kps(img):
"""
Draw an image and its harris keypoints
"""
gray_img = convert_to_grayscale(img)
kps = get_harris_keypoints(gray_img)
img_new = cv2.drawKeypoints(img, kps, None, color=(255, 0, 0))
return img_new
def display_image_with_matches(img1_loc, kps1_loc, img2_loc, kps2_loc, matches_loc):
"""
Display 2 images and their keypoints connected by lines side by side
"""
img = cv2.drawMatches(img1_loc, kps1_loc, img2_loc, kps2_loc, matches_loc,
None, flags=cv2.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS)
plt.imshow(img)
plt.show()
def get_hom(img1_descriptors, img2):
"""
Get homography between 2 images
"""
imb = convert_to_grayscale(img2)
desca = img1_descriptors
descb = get_descriptors(imb)
(kp1, dsc1), (kp2, dsc2) = desca, descb
avg_dist, points1, points2, matches = get_matches(kp1, dsc1, kp2, dsc2)
hom, _ = cv2.findHomography(points1, points2, cv2.RANSAC)
return hom, matches, avg_dist, kp2
def get_homography_from_corners(corners, ref_image):
"""
Get homography using corners of reference image
"""
pts_dst = np.array([[corners[0][0], corners[0][1]],
[corners[1][0], corners[1][1]],
[corners[2][0], corners[2][1]],
[corners[3][0], corners[3][1]]])
pts_src = ref_image
homography, status = cv2.findHomography(pts_src, pts_dst)
return homography, status
def get_matrix(camera_params, homography, translate=None):
"""
Using camera params and homography matrix get projection matrix
"""
r_t = np.dot(np.linalg.inv(camera_params), homography)
r_1 = r_t[:, 0]
r_2 = r_t[:, 1]
t_vec = r_t[:, 2]
norm = math.sqrt(np.linalg.norm(r_1, 2) * np.linalg.norm(r_2, 2))
r_1 = r_1/norm
r_2 = r_2/norm
t_vec = t_vec/norm
c_val = r_1+r_2
p_val = np.cross(r_1, r_2)
d_val = np.cross(c_val, p_val)
r1_ = (1/math.sqrt(2))*(c_val/np.linalg.norm(c_val, 2) +
d_val/np.linalg.norm(d_val, 2))
r2_ = (1/math.sqrt(2))*(c_val/np.linalg.norm(c_val, 2) -
d_val/np.linalg.norm(d_val, 2))
r3_ = np.cross(r1_, r2_)
proj = np.stack((r1_, r2_, r3_, t_vec)).T
if translate is None:
return np.dot(camera_params, proj)
return np.dot(camera_params, np.dot(translate, proj))
def render(img, obj, projection, model, color=False):
"""
Render a loaded obj model into the current video frame
"""
old_img = img.copy()
vertices = obj.vertices
| |
+ "$root",
name : "",
data : {
"$type" : "none",
"$width" : 1,
"$height" : 1
},
children : ch
};
delegate.loadJSON(root);
this.normalizeDims();
delegate.compute();
delegate.select(delegate.root);
if (animate) {
if (isH) {
delegate.fx.animate({
modes : ["node-property:width:dimArray"],
duration : 1500,
/**
* @return {undefined}
*/
onComplete : function() {
/** @type {boolean} */
that.busy = false;
}
});
} else {
delegate.fx.animate({
modes : ["node-property:height:dimArray"],
duration : 1500,
/**
* @return {undefined}
*/
onComplete : function() {
/** @type {boolean} */
that.busy = false;
}
});
}
} else {
/** @type {boolean} */
this.busy = false;
}
},
/**
* @param {Object} json
* @param {Object} onComplete
* @return {undefined}
*/
updateJSON : function(json, onComplete) {
if (this.busy) {
return;
}
/** @type {boolean} */
this.busy = true;
this.select(false, false, false);
var delegate = this.delegate;
var graph = delegate.graph;
var attributes = json.values;
var animate = this.config.animate;
var that = this;
/** @type {boolean} */
var isH = this.config.orientation == "horizontal";
$.each(attributes, function(v) {
var n = graph.getByName(v.label);
if (n) {
n.setData("valueArray", $.splat(v.values));
if (json.label) {
n.setData("stringArray", $.splat(json.label));
}
}
});
this.normalizeDims();
delegate.compute();
delegate.select(delegate.root);
if (animate) {
if (isH) {
delegate.fx.animate({
modes : ["node-property:width:dimArray"],
duration : 1500,
/**
* @return {undefined}
*/
onComplete : function() {
/** @type {boolean} */
that.busy = false;
if (onComplete) {
onComplete.onComplete();
}
}
});
} else {
delegate.fx.animate({
modes : ["node-property:height:dimArray"],
duration : 1500,
/**
* @return {undefined}
*/
onComplete : function() {
/** @type {boolean} */
that.busy = false;
if (onComplete) {
onComplete.onComplete();
}
}
});
}
}
},
/**
* @param {string} id
* @param {boolean} lab
* @return {undefined}
*/
select : function(id, lab) {
if (!this.config.hoveredColor) {
return;
}
var s = this.selected;
if (s.id != id || s.name != lab) {
/** @type {string} */
s.id = id;
/** @type {boolean} */
s.name = lab;
s.color = this.config.hoveredColor;
this.delegate.graph.eachNode(function(n) {
if (id == n.id) {
n.setData("border", s);
} else {
n.setData("border", false);
}
});
this.delegate.plot();
}
},
/**
* @return {?}
*/
getLegend : function() {
var legend = {};
var n;
this.delegate.graph.getNode(this.delegate.root).eachAdjacency(function(adj) {
n = adj.nodeTo;
});
var colors = n.getData("colorArray");
var colorsLen = colors.length;
$.each(n.getData("stringArray"), function(s, i) {
legend[s] = colors[i % colorsLen];
});
return legend;
},
/**
* @return {?}
*/
getMaxValue : function() {
/** @type {number} */
var maxValue = 0;
/** @type {boolean} */
var stacked = this.config.type.split(":")[0] == "stacked";
this.delegate.graph.eachNode(function(n) {
var attributes = n.getData("valueArray");
/** @type {number} */
var acum = 0;
if (!attributes) {
return;
}
if (stacked) {
$.each(attributes, function(v) {
acum += +v;
});
} else {
/** @type {number} */
acum = Math.max.apply(null, attributes);
}
maxValue = maxValue > acum ? maxValue : acum;
});
return maxValue;
},
/**
* @param {string} type
* @return {undefined}
*/
setBarType : function(type) {
/** @type {string} */
this.config.type = type;
this.delegate.config.Node.type = "barchart-" + type.split(":")[0];
},
/**
* @return {undefined}
*/
normalizeDims : function() {
var root = this.delegate.graph.getNode(this.delegate.root);
/** @type {number} */
var l = 0;
root.eachAdjacency(function() {
l++;
});
var maxValue = this.getMaxValue() || 1;
var size = this.delegate.canvas.getSize();
var config = this.config;
var margin = config.Margin;
var marginWidth = margin.left + margin.right;
var marginHeight = margin.top + margin.bottom;
/** @type {boolean} */
var horz = config.orientation == "horizontal";
/** @type {number} */
var fixedDim = (size[horz ? "height" : "width"] - (horz ? marginHeight : marginWidth) - (l - 1) * config.barsOffset) / l;
var animate = config.animate;
/** @type {number} */
var height = size[horz ? "width" : "height"] - (horz ? marginWidth : marginHeight) - (!horz && (config.showAggregates && config.Label.size + config.labelOffset)) - (config.showLabels && config.Label.size + config.labelOffset);
/** @type {string} */
var dim1 = horz ? "height" : "width";
/** @type {string} */
var dim2 = horz ? "width" : "height";
this.delegate.graph.eachNode(function(n) {
/** @type {number} */
var acum = 0;
/** @type {Array} */
var animateValue = [];
$.each(n.getData("valueArray"), function(v) {
acum += +v;
animateValue.push(0);
});
n.setData(dim1, fixedDim);
if (animate) {
n.setData(dim2, acum * height / maxValue, "end");
n.setData("dimArray", $.map(n.getData("valueArray"), function(n) {
return n * height / maxValue;
}), "end");
var dimArray = n.getData("dimArray");
if (!dimArray) {
n.setData("dimArray", animateValue);
}
} else {
n.setData(dim2, acum * height / maxValue);
n.setData("dimArray", $.map(n.getData("valueArray"), function(n) {
return n * height / maxValue;
}));
}
});
}
});
Options.PieChart = {
$extend : true,
animate : true,
offset : 25,
sliceOffset : 0,
labelOffset : 3,
type : "stacked",
hoveredColor : "#9fd4ff",
Events : {
enable : false,
/** @type {function (): undefined} */
onClick : $.empty
},
Tips : {
enable : false,
/** @type {function (): undefined} */
onShow : $.empty,
/** @type {function (): undefined} */
onHide : $.empty
},
showLabels : true,
resizeLabels : false,
updateHeights : false
};
Layout.Radial = new Class({
/**
* @param {?} adj
* @return {undefined}
*/
compute : function(adj) {
var lab = $.splat(adj || ["current", "start", "end"]);
column.compute(this.graph, lab, this.config);
this.graph.computeLevels(this.root, 0, "ignore");
var lengthFunc = this.createLevelDistanceFunc();
this.computeAngularWidths(lab);
this.computePositions(lab, lengthFunc);
},
/**
* @param {?} node
* @param {Object} getLength
* @return {undefined}
*/
computePositions : function(node, getLength) {
var employees = node;
var graph = this.graph;
var root = graph.getNode(this.root);
var parent = this.parent;
var config = this.config;
/** @type {number} */
var i = 0;
var l = employees.length;
for (;i < l;i++) {
var pi = employees[i];
root.setPos($P(0, 0), pi);
root.setData("span", Math.PI * 2, pi);
}
root.angleSpan = {
begin : 0,
end : 2 * Math.PI
};
graph.eachBFS(this.root, function(elem) {
/** @type {number} */
var angleSpan = elem.angleSpan.end - elem.angleSpan.begin;
var angleInit = elem.angleSpan.begin;
var len = getLength(elem);
/** @type {number} */
var totalAngularWidths = 0;
/** @type {Array} */
var subnodes = [];
var maxDim = {};
elem.eachSubnode(function(sib) {
totalAngularWidths += sib._treeAngularWidth;
/** @type {number} */
var i = 0;
var l = employees.length;
for (;i < l;i++) {
var pi = employees[i];
var dim = sib.getData("dim", pi);
maxDim[pi] = pi in maxDim ? dim > maxDim[pi] ? dim : maxDim[pi] : dim;
}
subnodes.push(sib);
}, "ignore");
if (parent && (parent.id == elem.id && (subnodes.length > 0 && subnodes[0].dist))) {
subnodes.sort(function(a, b) {
return(a.dist >= b.dist) - (a.dist <= b.dist);
});
}
/** @type {number} */
var i = 0;
/** @type {number} */
var valuesLen = subnodes.length;
for (;i < valuesLen;i++) {
var child = subnodes[i];
if (!child._flag) {
/** @type {number} */
var angleProportion = child._treeAngularWidth / totalAngularWidths * angleSpan;
var theta = angleInit + angleProportion / 2;
/** @type {number} */
var padIndex = 0;
var l = employees.length;
for (;padIndex < l;padIndex++) {
var pi = employees[padIndex];
child.setPos($P(theta, len), pi);
child.setData("span", angleProportion, pi);
child.setData("dim-quotient", child.getData("dim", pi) / maxDim[pi], pi);
}
child.angleSpan = {
begin : angleInit,
end : angleInit + angleProportion
};
angleInit += angleProportion;
}
}
}, "ignore");
},
/**
* @param {?} prop
* @return {undefined}
*/
setAngularWidthForNodes : function(prop) {
this.graph.eachBFS(this.root, function(elem, i) {
var diamValue = elem.getData("angularWidth", prop[0]) || 5;
/** @type {number} */
elem._angularWidth = diamValue / i;
}, "ignore");
},
/**
* @return {undefined}
*/
setSubtreesAngularWidth : function() {
var paragraph = this;
this.graph.eachNode(function(child) {
paragraph.setSubtreeAngularWidth(child);
}, "ignore");
},
/**
* @param {?} elem
* @return {undefined}
*/
setSubtreeAngularWidth : function(elem) {
var paragraph = this;
var nodeAW = elem._angularWidth;
/** @type {number} */
var sumAW = 0;
elem.eachSubnode(function(child) {
paragraph.setSubtreeAngularWidth(child);
sumAW += child._treeAngularWidth;
}, "ignore");
/** @type {number} */
elem._treeAngularWidth = Math.max(nodeAW, sumAW);
},
/**
* @param {?} prop
* @return {undefined}
*/
computeAngularWidths : function(prop) {
this.setAngularWidthForNodes(prop);
this.setSubtreesAngularWidth();
}
});
$jit.Sunburst = new Class({
Implements : [valid, Extras, Layout.Radial],
/**
* @param {?} controller
* @return {undefined}
*/
initialize : function(controller) {
var $Sunburst = $jit.Sunburst;
var config = {
interpolation : "linear",
levelDistance : 100,
Node : | |
<gh_stars>0
import pytest
import requests
import logging
import json
import copy
import random
# ----------------------------------- basic -----------------------------------
class TestHTTPBasic:
default_conf = {'cpu_cache_capacity': 4, 'cache_insert_data': False, 'use_blas_threshold': 1100, 'gpu_search_threshold': 1000}
def get_mode(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'system/mode')
return res.json()['reply'].upper()
def test_server_state(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_1(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state/myparams')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_state_2(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state?q=myparams')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_3(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state/10')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_state_4(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state?q=10')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_5(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + '/state')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_6(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + '//state')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_7(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + '/////state')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_8(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state/')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_9(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + '/state/')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_10(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state//')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_11(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + '//state//')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_12(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state/////')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_13(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + '/////state/////')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_14(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state/?')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_15(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state%20')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_state_16(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state?')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_17(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state??')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_18(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state\/')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_state_19(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + '\/state')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_state_20(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state/%20')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_state_21(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + r'%20state')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_state_22(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + r'%20/state')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_state_23(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state#')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_24(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state/ ')
logging.getLogger().info(res.text)
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_state_25(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
rurl = base_url + 'state#'
logging.getLogger().info(rurl)
mm = rurl.replace('.', '%2E')
res = requests.get(mm)
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_26(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state?a=1&b=2')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_state_27(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'state?a=1%26b=2')
assert res.status_code == 200
assert res.json()['code'] == 0
assert res.json()['message'] == 'Success'
def test_server_post_state_1(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.post(base_url + 'state')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_post_state_2(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.post(base_url + 'state')
req = {}
requests.post(base_url + 'state', data=json.dumps(req))
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_post_state_3(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.post(base_url + 'state')
req = {'a':1, 'b':2}
requests.post(base_url + 'state', data=json.dumps(req))
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_put_state_1(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.put(base_url + 'state')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_put_state_2(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.put(base_url + 'state')
req = {}
requests.post(base_url + 'state', data=json.dumps(req))
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_put_state_3(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.put(base_url + 'state')
req = {'a':1, 'b':2}
requests.post(base_url + 'state', data=json.dumps(req))
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_options_state_1(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.options(base_url + 'state')
assert res.status_code == 204
def test_server_options_state_2(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.options(base_url + 'state?a=1')
assert res.status_code == 204
def test_server_options_state_3(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.options(base_url + 'state/aaa')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_devices(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'devices')
assert res.status_code == 200
assert 'cpu' in res.json().keys()
assert 'gpus' in res.json().keys()
def test_server_devices_1(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'devices/myparams')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_devices_2(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'devices?q=myparams')
assert res.status_code == 200
assert 'cpu' in res.json().keys()
assert 'gpus' in res.json().keys()
def test_server_devices_3(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'devices/10')
assert res.status_code == 404
assert 'Current url has no mapping' in res.text
def test_server_devices_4(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'devices?q=10')
assert res.status_code == 200
assert 'cpu' in res.json().keys()
assert 'gpus' in res.json().keys()
def test_server_devices_5(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + '/devices')
assert res.status_code == 200
assert 'cpu' in res.json().keys()
assert 'gpus' in res.json().keys()
def test_server_devices_6(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + '//devices')
assert res.status_code == 200
assert 'cpu' in res.json().keys()
assert 'gpus' in res.json().keys()
def test_server_devices_7(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + '/////devices')
assert res.status_code == 200
assert 'cpu' in res.json().keys()
assert 'gpus' in res.json().keys()
def test_server_devices_8(self, args):
base_url = 'http://%s:%s/' % (args['ip'], args['port'])
res = requests.get(base_url + 'devices/')
assert res.status_code == | |
#@+leo-ver=5-thin
#@+node:peckj.20150428142633.1: * @file python_terminal.py
#@@language python
#@@tabwidth -4
# this code from http://stackoverflow.com/questions/12431555/enabling-code-completion-in-an-embedded-python-interpreter
# with modifications from <NAME>
# to do:
# styling
#@+<< docstring >>
#@+node:peckj.20150428142633.2: ** << docstring >>
'''Provides an interactive python terminal in the log pane.
**Warning**: Use at your own risk.
Numerous significant problems have been reported, including segfaults.
By <NAME>
Usage
=====
Enabling this plugin will add a new tab to the Log pane, labeled "Python Console". This is a fully interactive
python command shell, with access to `g`, `c`, and `p` included!
Features:
- Includes support for g, c, and p
- Each outline tab has a separate python console, with a separate namespace (aside from g, of course)
- Extremely primitive tab-completion
- Command history (use !hist to list, and !hist(n) to recall the n'th entry)
- !clear to clear the console
Caveats:
Stdout and stderr are proprely redirected to the interactive console pane while
it has focus, but proprely reset to their previous values when focus is lost. If
code executed inside the interactive console pane needs to output to the
command-line stdout/stderr, please use sys.__stdout__ and sys.__stderr__. - Just
as with scripts, if you do something dangerous, you're on your own
This code is largely lifted from
http://stackoverflow.com/questions/12431555/
enabling-code-completion-in-an-embedded-python-interpreter,
with some modifications made for Leo embedding.
'''
#@-<< docstring >>
#@+<< imports >>
#@+node:peckj.20150428142729.2: ** << imports >>
import leo.core.leoGlobals as g
# import os
import re
import sys
import code
from rlcompleter import Completer
from leo.core.leoQt import QtWidgets,QtCore
#@-<< imports >>
#@+others
#@+node:peckj.20150428142729.3: ** class MyInterpreter
if QtWidgets:
class MyInterpreter(QtWidgets.QWidget):
def __init__(self, parent, c):
super(MyInterpreter, self).__init__(parent)
hBox = QtWidgets.QHBoxLayout()
self.setLayout(hBox)
self.textEdit = PyInterp(self, c)
# this is how you pass in locals to the interpreter
self.textEdit.initInterpreter(locals())
hBox.addWidget(self.textEdit)
hBox.setContentsMargins(0,0,0,0)
hBox.setSpacing(0)
#@+node:peckj.20150428142729.5: ** class PyInterp (QTextEdit)
if QtWidgets:
class PyInterp(QtWidgets.QTextEdit):
#@+others
#@+node:peckj.20150428142729.6: *3* class InteractiveInterpreter (code.InteractiveInterpreter)
class InteractiveInterpreter(code.InteractiveInterpreter):
#@+others
#@+node:peckj.20150428142729.7: *4* InteractiveInterpreter.__init__
def __init__(self, locals, c):
'''Ctor for InteractiveInterpreter class.'''
self.c = c
# inject g, c, p
loc = locals
loc['c'] = self.c
loc['g'] = g
loc['p'] = self.c.p
code.InteractiveInterpreter.__init__(self, loc)
#@+node:peckj.20150428142729.8: *4* InteractiveInterpreter.runIt
def runIt(self, command):
code.InteractiveInterpreter.runsource(self, command)
#@-others
#@+node:peckj.20150428142729.9: *3* PyInterp.__init__
def __init__(self, parent, c):
super(PyInterp, self).__init__(parent)
#
# this widget swallows stdout + stderr while focused,
# but resets them upon losing focus
if not g.user_dict.get('old_stdout', None):
g.user_dict['old_stdout'] = sys.stdout
if not g.user_dict.get('old_stderr', None):
g.user_dict['old_stderr'] = sys.stderr
#
# init ivars.
self.indent = 0
self.refreshMarker = False # to change back to >>> from ...
self.multiLine = False # code spans more than one line
### self.command = '' # command to be ran
self.printBanner() # print sys info
self.insert_marker() # make the >>> or ... marker
self.history = [] # list of commands entered
self.historyIndex = -1
self.interpreterLocals = {}
self.c = c
#
# initilize interpreter with self locals
self.initInterpreter(locals())
#
# update p when new node selected
g.registerHandler('select2', self.select2_hook)
#@+node:peckj.20150428142729.10: *3* PyInterp.select2_hook
def select2_hook(self, tag, keywords):
self.interpreter.runIt('p = c.p')
#@+node:peckj.20150428142729.11: *3* PyInterp.printBanner
def printBanner(self):
#self.write(sys.version)
#self.write(' on ' + sys.platform + '\n')
#self.write('PyQt4 ' + PYQT_VERSION_STR + '\n')
banner = [
'Type !hist for a history view and !hist(n) history index recall\n',
'Type !clear to clear this pane\n'
]
for msg in banner:
self.write(msg)
#@+node:peckj.20150428142729.12: *3* PyInterp.insert_marker
def insert_marker(self):
line = '... ' if self.multiLine else '>>> '
self.insertPlainText(line + ' '*self.indent)
#@+node:peckj.20150428142729.13: *3* PyInterp.initInterpreter
def initInterpreter(self, interpreterLocals=None):
if interpreterLocals:
# when we pass in locals, we don't want it to be named "self"
# so we rename it with the name of the class that did the passing
# and reinsert the locals back into the interpreter dictionary
selfName = interpreterLocals['self'].__class__.__name__
interpreterLocalVars = interpreterLocals.pop('self')
self.interpreterLocals[selfName] = interpreterLocalVars
else:
self.interpreterLocals = interpreterLocals
self.interpreter = self.InteractiveInterpreter(self.interpreterLocals, self.c)
#@+node:peckj.20150428142729.14: *3* PyInterp.updateInterpreterLocals
def updateInterpreterLocals(self, newLocals):
className = newLocals.__class__.__name__
self.interpreterLocals[className] = newLocals
#@+node:peckj.20150428142729.15: *3* PyInterp.write
def write(self, line):
self.insertPlainText(line)
self.ensureCursorVisible()
#@+node:peckj.20150428142729.16: *3* PyInterp.clearCurrentBlock
def clearCurrentBlock(self):
# block being current row
length = len(self.document().lastBlock().text()[4:])
if length == 0:
return None
else:
# should have a better way of doing this but I can't find it.
# [self.textCursor().deletePreviousChar() for x in xrange(length)]
for x in range(length):
self.textCursor().deletePreviousChar()
return True
#@+node:peckj.20150428142729.17: *3* PyInterp.recallHistory
def recallHistory(self):
# used when using the arrow keys to scroll through history
self.clearCurrentBlock()
if self.historyIndex != -1:
self.insertPlainText(self.history[self.historyIndex])
return True
#@+node:peckj.20150428142729.18: *3* PyInterp.customCommands
def customCommands(self, command):
# pylint: disable=anomalous-backslash-in-string
if command == '!hist': # display history
self.append('') # move down one line
# vars that are in the command are prefixed with ____CC and deleted
# once the command is done so they don't show up in dir()
backup = self.interpreterLocals.copy()
history = self.history[:]
history.reverse()
for i, x in enumerate(history):
iSize = len(str(i))
delta = len(str(len(history))) - iSize
line = line = ' ' * delta + '%i: %s' % (i, x) + '\n'
self.write(line)
self.updateInterpreterLocals(backup)
self.insert_marker()
return True
if re.match('!hist\(\d+\)', command): # recall command from history
backup = self.interpreterLocals.copy()
history = self.history[:]
history.reverse()
index = int(command[6:-1])
self.clearCurrentBlock()
command = history[index]
if command[-1] == ':':
self.multiLine = True
self.write(command)
self.updateInterpreterLocals(backup)
return True
if re.match('(quit|exit)\(\)', command): # prevent quitting!
self.append('')
self.write('Cannot quit() from an embedded console.\n')
self.insert_marker()
return True
if re.match('!clear', command): # clear the screen
self.clear()
self.insert_marker()
return True
return False
#@+node:peckj.20150428142729.19: *3* PyInterp.keyPressEvent & helper
def keyPressEvent(self, event):
qt = QtCore.Qt
if event.key() == qt.Key_Tab:
line = str(self.document().lastBlock().text())[4:]
completer = Completer(self.interpreter.locals)
suggestion = completer.complete(line, 0)
if suggestion is not None:
self.insertPlainText(suggestion[len(line):])
return None
if event.key() == qt.Key_Down:
if self.historyIndex == len(self.history):
self.historyIndex -= 1
try:
if self.historyIndex > -1:
self.historyIndex -= 1
self.recallHistory()
else:
self.clearCurrentBlock()
except Exception:
pass
return None
if event.key() == qt.Key_Up:
try:
if len(self.history) - 1 > self.historyIndex:
self.historyIndex += 1
self.recallHistory()
else:
self.historyIndex = len(self.history)
except Exception:
pass
return None
if event.key() == qt.Key_Home:
# set cursor to position 4 in current block. 4 because that's where
# the marker stops
blockLength = len(self.document().lastBlock().text()[4:])
lineLength = len(self.document().toPlainText())
position = lineLength - blockLength
textCursor = self.textCursor()
textCursor.setPosition(position)
self.setTextCursor(textCursor)
return None
if event.key() in [qt.Key_Left, qt.Key_Backspace]:
# don't allow deletion of marker
if self.textCursor().positionInBlock() == 4:
return None
if event.key() in [qt.Key_Return, qt.Key_Enter]:
self.doEnter(event)
return None
# allow all other key events
super(PyInterp, self).keyPressEvent(event)
#@+node:ekr.20180307132016.1: *4* PyInterp.doEnter
def doEnter(self, event):
'''Handle the <return> key.'''
#
# Set cursor to end of line to avoid line splitting
textCursor = self.textCursor()
position = len(self.document().toPlainText())
textCursor.setPosition(position)
self.setTextCursor(textCursor)
lines = []
block = self.document().lastBlock()
#
# Scan backward, looking for lines.
pasteFlag = False
while block:
line = g.toUnicode(block.text())
line = line.replace('\t',' '*4)
block = block.previous()
if line.startswith('>>> '):
lines.insert(0, line[4:].rstrip())
break
elif line.startswith('... '):
lines.insert(0, line[4:].rstrip())
else:
lines.insert(0, line.rstrip())
pasteFlag = True
# Enter continuation mode for pasted lines.
if pasteFlag and lines and lines[-1].strip():
self.multiLine = True
#
# Always end the input.
self.append('')
#
# Handle special lines.
last_line = lines and lines[-1]
if self.customCommands(last_line):
return
# Handle the history and indent.
if last_line.strip():
self.history.insert(0, last_line.rstrip())
self.indent = len(last_line) - len(last_line.lstrip())
if last_line.rstrip().endswith(':'):
self.indent += 4
#
# Just return if the last line if it is a non-blank continued line.
if len(lines) > 1 and last_line.strip():
self.insert_marker()
return
#
# Clean the lines.
lines = [z for z in lines if z.strip()]
#
# Just add the marker if we are done.
if not lines:
self.insert_marker()
return
the_code = self.compile_lines(lines)
if the_code is None:
# Continuation mode.
self.multiLine = True
self.insert_marker()
return
#
# End continuation mode.
self.multiLine = False
self.indent = 0
if the_code != 'error':
self.run_code(the_code)
self.insert_marker()
#@+node:ekr.20180525110448.1: *5* PyInterp.compile_lines
def compile_lines(self, lines):
'''Carefully call code.compile_command and return the result.'''
#
# The compile command saves a lot of guessing...
# https://docs.python.org/2/library/code.html#code.compile_command
try:
interp = self.interpreter
source = '\n'.join(lines).rstrip() + '\n'
return code.compile_command(source)
except SyntaxError:
# When pasting, try to separate lines with semicolons.
if len(lines) > 1:
try:
source = ';'.join(lines).rstrip() + '\n'
return code.compile_command(source)
except SyntaxError:
interp.showsyntaxerror()
except Exception:
interp.showtraceback()
else:
interp.showsyntaxerror()
except Exception:
interp.showtraceback()
#
# End the previous editing if there is any error.
self.multiLine = False
return 'error'
#@+node:ekr.20180525110907.1: *5* PyInterp.run_code
def run_code(self, the_code):
interp = self.interpreter
try:
interp.runcode(the_code)
| |
CIN HAU SANDHI GLOTTAL STOP FINAL': 72426,
'PAU CIN HAU SANDHI TONE': 72428,
'PAU CIN HAU SANDHI TONE FINAL': 72430,
'PAU CIN HAU SANDHI TONE LONG': 72427,
'PAU CIN HAU SANDHI TONE LONG FINAL': 72429,
'PAW PRINTS': 128062,
'PDF': 983220,
'PDI': 983229,
'PEACH': 127825,
'PEAR': 127824,
'PEDESTRIAN': 128694,
'PEN OVER STAMPED ENVELOPE': 128390,
'PENGUIN': 128039,
'PENSIVE FACE': 128532,
'PENTAGRAM': 9956,
'PERFORMING ARTS': 127917,
'PERSEVERING FACE': 128547,
'PERSON BOWING DEEPLY': 128583,
'PERSON FROWNING': 128589,
'PERSON RAISING BOTH HANDS IN CELEBRATION': 128588,
'PERSON WITH BLOND HAIR': 128113,
'PERSON WITH FOLDED HANDS': 128591,
'PERSON WITH POUTING FACE': 128590,
'PERSONAL COMPUTER': 128187,
'PIG': 128022,
'PIG FACE': 128055,
'PIG NOSE': 128061,
'PILE OF POO': 128169,
'PILL': 128138,
'PINE DECORATION': 127885,
'PINEAPPLE': 127821,
'PISTOL': 128299,
'PLACE OF WORSHIP': 128720,
'PLAYING CARD ACE OF CLUBS': 127185,
'PLAYING CARD ACE OF DIAMONDS': 127169,
'PLAYING CARD ACE OF HEARTS': 127153,
'PLAYING CARD ACE OF SPADES': 127137,
'PLAYING CARD BACK': 127136,
'PLAYING CARD BLACK JOKER': 127183,
'PLAYING CARD EIGHT OF CLUBS': 127192,
'PLAYING CARD EIGHT OF DIAMONDS': 127176,
'PLAYING CARD EIGHT OF HEARTS': 127160,
'PLAYING CARD EIGHT OF SPADES': 127144,
'PLAYING CARD FIVE OF CLUBS': 127189,
'PLAYING CARD FIVE OF DIAMONDS': 127173,
'PLAYING CARD FIVE OF HEARTS': 127157,
'PLAYING CARD FIVE OF SPADES': 127141,
'PLAYING CARD FOOL': 127200,
'PLAYING CARD FOUR OF CLUBS': 127188,
'PLAYING CARD FOUR OF DIAMONDS': 127172,
'PLAYING CARD FOUR OF HEARTS': 127156,
'PLAYING CARD FOUR OF SPADES': 127140,
'PLAYING CARD JACK OF CLUBS': 127195,
'PLAYING CARD JACK OF DIAMONDS': 127179,
'PLAYING CARD JACK OF HEARTS': 127163,
'PLAYING CARD JACK OF SPADES': 127147,
'PLAYING CARD KING OF CLUBS': 127198,
'PLAYING CARD KING OF DIAMONDS': 127182,
'PLAYING CARD KING OF HEARTS': 127166,
'PLAYING CARD KING OF SPADES': 127150,
'PLAYING CARD KNIGHT OF CLUBS': 127196,
'PLAYING CARD KNIGHT OF DIAMONDS': 127180,
'PLAYING CARD KNIGHT OF HEARTS': 127164,
'PLAYING CARD KNIGHT OF SPADES': 127148,
'PLAYING CARD NINE OF CLUBS': 127193,
'PLAYING CARD NINE OF DIAMONDS': 127177,
'PLAYING CARD NINE OF HEARTS': 127161,
'PLAYING CARD NINE OF SPADES': 127145,
'PLAYING CARD QUEEN OF CLUBS': 127197,
'PLAYING CARD QUEEN OF DIAMONDS': 127181,
'PLAYING CARD QUEEN OF HEARTS': 127165,
'PLAYING CARD QUEEN OF SPADES': 127149,
'PLAYING CARD RED JOKER': 127167,
'PLAYING CARD SEVEN OF CLUBS': 127191,
'PLAYING CARD SEVEN OF DIAMONDS': 127175,
'PLAYING CARD SEVEN OF HEARTS': 127159,
'PLAYING CARD SEVEN OF SPADES': 127143,
'PLAYING CARD SIX OF CLUBS': 127190,
'PLAYING CARD SIX OF DIAMONDS': 127174,
'PLAYING CARD SIX OF HEARTS': 127158,
'PLAYING CARD SIX OF SPADES': 127142,
'PLAYING CARD TEN OF CLUBS': 127194,
'PLAYING CARD TEN OF DIAMONDS': 127178,
'PLAYING CARD TEN OF HEARTS': 127162,
'PLAYING CARD TEN OF SPADES': 127146,
'PLAYING CARD THREE OF CLUBS': 127187,
'PLAYING CARD THREE OF DIAMONDS': 127171,
'PLAYING CARD THREE OF HEARTS': 127155,
'PLAYING CARD THREE OF SPADES': 127139,
'PLAYING CARD TRUMP-1': 127201,
'PLAYING CARD TRUMP-10': 127210,
'PLAYING CARD TRUMP-11': 127211,
'PLAYING CARD TRUMP-12': 127212,
'PLAYING CARD TRUMP-13': 127213,
'PLAYING CARD TRUMP-14': 127214,
'PLAYING CARD TRUMP-15': 127215,
'PLAYING CARD TRUMP-16': 127216,
'PLAYING CARD TRUMP-17': 127217,
'PLAYING CARD TRUMP-18': 127218,
'PLAYING CARD TRUMP-19': 127219,
'PLAYING CARD TRUMP-2': 127202,
'PLAYING CARD TRUMP-20': 127220,
'PLAYING CARD TRUMP-21': 127221,
'PLAYING CARD TRUMP-3': 127203,
'PLAYING CARD TRUMP-4': 127204,
'PLAYING CARD TRUMP-5': 127205,
'PLAYING CARD TRUMP-6': 127206,
'PLAYING CARD TRUMP-7': 127207,
'PLAYING CARD TRUMP-8': 127208,
'PLAYING CARD TRUMP-9': 127209,
'PLAYING CARD TWO OF CLUBS': 127186,
'PLAYING CARD TWO OF DIAMONDS': 127170,
'PLAYING CARD TWO OF HEARTS': 127154,
'PLAYING CARD TWO OF SPADES': 127138,
'PLAYING CARD WHITE JOKER': 127199,
'PLD': 983147,
'PLU': 983150,
'PM': 983193,
'POCKET CALCULATOR': 128425,
'POLICE CAR': 128659,
'POLICE CARS REVOLVING LIGHT': 128680,
'POLICE OFFICER': 128110,
'POODLE': 128041,
'POP DIRECTIONAL ISOLATE': 8297,
'POPCORN': 127871,
'PORTABLE STEREO': 128254,
'POSTAL HORN': 128239,
'POSTBOX': 128238,
'POT OF FOOD': 127858,
'POTABLE WATER SYMBOL': 128688,
'POUCH': 128093,
'POULTRY LEG': 127831,
'POUTING CAT FACE': 128574,
'POUTING FACE': 128545,
'PRAYER BEADS': 128255,
'PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRACKET': 983236,
'PRINCESS': 128120,
'PRINTER': 128424,
'PRINTER ICON': 128438,
'PRIVACY MESSAGE': 983192,
'PRIVATE USE ONE': 983162,
'PRIVATE USE TWO': 983165,
'PRIVATE USE-1': 983163,
'PRIVATE USE-2': 983166,
'PROHIBITED SIGN': 128711,
'PSALTER PAHLAVI FOUR DOTS WITH CROSS': 68507,
'PSALTER PAHLAVI FOUR DOTS WITH DOT': 68508,
'PSALTER PAHLAVI LETTER ALEPH': 68480,
'PSALTER PAHLAVI LETTER BETH': 68481,
'PSALTER PAHLAVI LETTER DALETH': 68483,
'PSALTER PAHLAVI LETTER GIMEL': 68482,
'PSALTER PAHLAVI LETTER HE': 68484,
'PSALTER PAHLAVI LETTER HETH': 68487,
'PSALTER PAHLAVI LETTER KAPH': 68489,
'PSALTER PAHLAVI LETTER LAMEDH': 68490,
'PSALTER PAHLAVI LETTER MEM-QOPH': 68491,
'PSALTER PAHLAVI LETTER NUN': 68492,
'PSALTER PAHLAVI LETTER PE': 68494,
'PSALTER PAHLAVI LETTER SADHE': 68495,
'PSALTER PAHLAVI LETTER SAMEKH': 68493,
'PSALTER PAHLAVI LETTER SHIN': 68496,
'PSALTER PAHLAVI LETTER TAW': 68497,
'PSALTER PAHLAVI LETTER WAW-AYIN-RESH': 68485,
'PSALTER PAHLAVI LETTER YODH': 68488,
'PSALTER PAHLAVI LETTER ZAYIN': 68486,
'PSALTER PAHLAVI NUMBER FOUR': 68524,
'PSALTER PAHLAVI NUMBER ONE': 68521,
'PSALTER PAHLAVI NUMBER ONE HUNDRED': 68527,
'PSALTER PAHLAVI NUMBER TEN': 68525,
'PSALTER PAHLAVI NUMBER THREE': 68523,
'PSALTER PAHLAVI NUMBER TWENTY': 68526,
'PSALTER PAHLAVI NUMBER TWO': 68522,
'PSALTER PAHLAVI SECTION MARK': 68505,
'PSALTER PAHLAVI TURNED SECTION MARK': 68506,
'PU1': 983164,
'PU2': 983167,
'PUBLIC ADDRESS LOUDSPEAKER': 128226,
'PURPLE HEART': 128156,
'PURSE': 128091,
'PUSHPIN': 128204,
'PUT LITTER IN ITS PLACE SYMBOL': 128686,
'RABBIT': 128007,
'RABBIT FACE': 128048,
'RACING CAR': 127950,
'RACING MOTORCYCLE': 127949,
'RADIO': 128251,
'RADIO BUTTON': 128280,
'RAILWAY CAR': 128643,
'RAILWAY TRACK': 128740,
'RAINBOW': 127752,
'RAISED COMMA': 11828,
'RAISED DOT': 11827,
'RAISED FIST': 9994,
'RAISED HAND': 9995,
'RAISED HAND WITH FINGERS SPLAYED': 128400,
'RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS': 128406,
'RAISED MC SIGN': 127338,
'RAISED MD SIGN': 127339,
'RAM': 128015,
'RAT': 128000,
'RECORD SEPARATOR': 983112,
'RECREATIONAL VEHICLE': 128665,
'RED APPLE': 127822,
'REGIONAL INDICATOR SYMBOL LETTER A': 127462,
'REGIONAL INDICATOR SYMBOL LETTER B': 127463,
'REGIONAL INDICATOR SYMBOL LETTER C': 127464,
'REGIONAL INDICATOR SYMBOL LETTER D': 127465,
'REGIONAL INDICATOR SYMBOL LETTER E': 127466,
'REGIONAL INDICATOR SYMBOL LETTER F': 127467,
'REGIONAL INDICATOR SYMBOL LETTER G': 127468,
'REGIONAL INDICATOR SYMBOL LETTER H': 127469,
'REGIONAL INDICATOR SYMBOL LETTER I': 127470,
'REGIONAL INDICATOR SYMBOL LETTER J': 127471,
'REGIONAL INDICATOR SYMBOL LETTER K': 127472,
'REGIONAL INDICATOR SYMBOL LETTER L': 127473,
'REGIONAL INDICATOR SYMBOL LETTER M': 127474,
'REGIONAL INDICATOR SYMBOL LETTER N': 127475,
'REGIONAL INDICATOR SYMBOL LETTER O': 127476,
'REGIONAL INDICATOR SYMBOL LETTER P': 127477,
'REGIONAL INDICATOR SYMBOL LETTER Q': 127478,
'REGIONAL INDICATOR SYMBOL LETTER R': 127479,
'REGIONAL INDICATOR SYMBOL LETTER S': 127480,
'REGIONAL INDICATOR SYMBOL LETTER T': 127481,
'REGIONAL INDICATOR SYMBOL LETTER U': 127482,
'REGIONAL INDICATOR SYMBOL LETTER V': 127483,
'REGIONAL INDICATOR SYMBOL LETTER W': 127484,
'REGIONAL INDICATOR SYMBOL LETTER X': 127485,
'REGIONAL INDICATOR SYMBOL LETTER Y': 127486,
'REGIONAL INDICATOR SYMBOL LETTER Z': 127487,
'RELIEVED FACE': 128524,
'REMINDER RIBBON': 127895,
'RESTROOM': 128699,
'RETURN LEFT': 11152,
'RETURN RIGHT': 11153,
'REVERSE CHECKER BOARD': 128639,
'REVERSE INDEX': 983152,
'REVERSE LIGHT FOUR POINTED PINWHEEL STAR': 128968,
'REVERSE LINE FEED': 983151,
'REVERSED COMMA': 11841,
'REVERSED HAND WITH MIDDLE FINGER EXTENDED': 128405,
'REVERSED RAISED HAND WITH FINGERS SPLAYED': 128401,
'REVERSED THUMBS DOWN SIGN': 128403,
'REVERSED THUMBS UP SIGN': 128402,
'REVERSED VICTORY HAND': 128404,
'REVOLVING HEARTS': 128158,
'RI': 983153,
'RIBBON': 127872,
'RIBBON ARROW DOWN LEFT': 11184,
'RIBBON ARROW DOWN RIGHT': 11185,
'RIBBON ARROW LEFT DOWN': 11190,
'RIBBON ARROW LEFT UP': 11188,
'RIBBON ARROW RIGHT DOWN': 11191,
'RIBBON ARROW RIGHT UP': 11189,
'RIBBON ARROW UP LEFT': 11186,
'RIBBON ARROW UP RIGHT': 11187,
'RICE BALL': 127833,
'RICE CRACKER': 127832,
'RIGHT ANGER BUBBLE': 128495,
'RIGHT HAND TELEPHONE RECEIVER': 128381,
'RIGHT SPEAKER': 128360,
'RIGHT SPEAKER WITH ONE SOUND WAVE': 128361,
'RIGHT SPEAKER WITH THREE SOUND WAVES': 128362,
'RIGHT SPEECH BUBBLE': 128489,
'RIGHT THOUGHT BUBBLE': 128493,
'RIGHT-FACING ARMENIAN ETERNITY SIGN': 1421,
'RIGHT-HANDED INTERLACED PENTAGRAM': 9957,
'RIGHT-POINTING MAGNIFYING GLASS': 128270,
'RIGHT-TO-LEFT ISOLATE': 8295,
'RIGHTWARDS ARROW WITH EQUILATERAL ARROWHEAD': 129046,
'RIGHTWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD': 129034,
'RIGHTWARDS ARROW WITH MEDIUM TRIANGLE ARROWHEAD': 129030,
'RIGHTWARDS ARROW WITH NOTCHED TAIL': 129178,
'RIGHTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD': 129042,
'RIGHTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD': 129026,
'RIGHTWARDS BACK-TILTED SHADOWED WHITE ARROW': 129193,
'RIGHTWARDS BLACK ARROW': 11157,
'RIGHTWARDS BLACK CIRCLED WHITE ARROW': 11146,
'RIGHTWARDS BOTTOM SHADED WHITE ARROW': 129185,
'RIGHTWARDS COMPRESSED ARROW': 129086,
'RIGHTWARDS FINGER-POST ARROW': 129078,
'RIGHTWARDS FRONT-TILTED SHADOWED WHITE ARROW': 129195,
'RIGHTWARDS HEAVY ARROW': 129094,
'RIGHTWARDS HEAVY COMPRESSED ARROW': 129090,
'RIGHTWARDS LEFT-SHADED WHITE ARROW': 129191,
'RIGHTWARDS RIGHT-SHADED WHITE ARROW': 129189,
'RIGHTWARDS ROCKET': 128622,
'RIGHTWARDS SANS-SERIF ARROW': 129106,
'RIGHTWARDS SQUARED ARROW': 129082,
'RIGHTWARDS TOP SHADED WHITE ARROW': 129187,
'RIGHTWARDS TRIANGLE ARROWHEAD': 129170,
'RIGHTWARDS TRIANGLE-HEADED ARROW': 11106,
'RIGHTWARDS TRIANGLE-HEADED ARROW OVER LEFTWARDS TRIANGLE-HEADED ARROW': 11138,
'RIGHTWARDS TRIANGLE-HEADED ARROW TO BAR': 11122,
'RIGHTWARDS TRIANGLE-HEADED ARROW WITH BOLD SHAFT': 129066,
'RIGHTWARDS TRIANGLE-HEADED ARROW WITH DOUBLE HORIZONTAL STROKE': 11132,
'RIGHTWARDS TRIANGLE-HEADED ARROW WITH DOUBLE VERTICAL STROKE': 983234,
'RIGHTWARDS TRIANGLE-HEADED ARROW WITH HEAVY SHAFT': 129070,
'RIGHTWARDS TRIANGLE-HEADED ARROW WITH LONG TIP DOWNWARDS': 11175,
'RIGHTWARDS TRIANGLE-HEADED ARROW WITH LONG TIP UPWARDS': 11173,
'RIGHTWARDS TRIANGLE-HEADED ARROW WITH MEDIUM SHAFT': 129062,
'RIGHTWARDS TRIANGLE-HEADED ARROW WITH NARROW SHAFT': 129058,
'RIGHTWARDS TRIANGLE-HEADED ARROW WITH VERY HEAVY SHAFT': 129074,
'RIGHTWARDS TRIANGLE-HEADED DASHED ARROW': 11116,
'RIGHTWARDS TRIANGLE-HEADED PAIRED ARROWS': 11142,
'RIGHTWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS': 11246,
'RIGHTWARDS WHITE ARROW WITHIN TRIANGLE ARROWHEAD': 129174,
'RING': 128141,
'RINGING BELL': 128365,
'RLE': 983219,
'RLI': 983227,
'RLM': 983217,
'RLO': 983222,
'ROASTED SWEET POTATO': 127840,
'ROBOT FACE': 129302,
'ROCKET': 128640,
'ROLLED-UP NEWSPAPER': 128478,
'ROLLER COASTER': 127906,
'ROOSTER': 128019,
'ROSE': 127801,
'ROSETTE': 127989,
'ROTATED LIGHT FOUR POINTED BLACK CUSP': 11213,
'ROTATED WHITE FOUR POINTED CUSP': 11215,
'ROUND PUSHPIN': 128205,
'ROUND TARGET': 128907,
'ROWBOAT': 128675,
'RS': 983113,
'RUBLE SIGN': 8381,
'RUGBY FOOTBALL': 127945,
'RUNIC LETTER FRANKS CASKET AC': 5879,
'RUNIC LETTER FRANKS CASKET AESC': 5880,
'RUNIC LETTER FRANKS CASKET EH': 5878,
'RUNIC LETTER FRANKS CASKET IS': 5877,
'RUNIC LETTER FRANKS CASKET OS': 5876,
'RUNIC LETTER K': 5873,
'RUNIC LETTER OO': 5875,
'RUNIC LETTER SH': 5874,
'RUNNER': 127939,
'RUNNING SHIRT WITH SASH': 127933,
'SAKE BOTTLE AND CUP': 127862,
'SANS-SERIF HEAVY DOUBLE COMMA QUOTATION MARK ORNAMENT': 128631,
'SANS-SERIF HEAVY DOUBLE TURNED COMMA QUOTATION MARK ORNAMENT': 128630,
'SANS-SERIF HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT': 128632,
'SANS-SERIF INTERROBANG ORNAMENT': 128634,
'SATELLITE': 128752,
'SATELLITE ANTENNA': 128225,
'SAXOPHONE': 127927,
'SCHOOL': 127979,
'SCHOOL SATCHEL': 127890,
'SCI': 983185,
'SCORPION': 129410,
'SCREEN': 128437,
'SCRIPT LIGATURE ET ORNAMENT': 128624,
'SCROLL': 128220,
'SEAT': 128186,
'SEE-NO-EVIL MONKEY': 128584,
'SEEDLING': 127793,
'SET TRANSMIT | |
<reponame>dvds/mkvedit<gh_stars>0
"""Contains functions to edit a matroska file."""
from collections import namedtuple
from inspect import getmembers, isfunction
from os import SEEK_CUR
from sys import argv, modules
from ebml.core import encode_element_id, encode_element_size, encode_unicode_string, encode_unsigned_integer, MAXIMUM_ELEMENT_SIZE_LENGTH
from ebml.schema.matroska import AttachmentsElement, AttachedFileElement, DateUTCElement, FileNameElement, FileUIDElement, InfoElement, MatroskaDocument, MuxingAppElement, SegmentElement, TracksElement, TrackEntryElement, TrackNumberElement, TrackUIDElement, WritingAppElement
def remove_dateutc(input_filename, output_filename):
with open(input_filename, "rb") as input_file:
input_matroska_document = MatroskaDocument(input_file)
# retrieve element metadata
segment_element_metadata = __find_element_metadata(input_matroska_document.roots, SegmentElement, 0)
info_element_metadata = __find_element_metadata(segment_element_metadata.element.value, InfoElement, segment_element_metadata.offset + segment_element_metadata.element.head_size)
dateutc_element_metadata = __find_element_metadata(info_element_metadata.element.value, DateUTCElement, info_element_metadata.offset + info_element_metadata.element.head_size)
# calculate edited element sizes
new_info_element_body_size = info_element_metadata.element.body_size - dateutc_element_metadata.element.stream.size
new_info_element_head_size = len(encode_element_size(new_info_element_body_size)) + 4 # 4 byte element id (0x1549A966)
new_segment_element_body_size = segment_element_metadata.element.body_size + new_info_element_head_size + new_info_element_body_size - info_element_metadata.element.stream.size
new_segment_element_head_size = len(encode_element_size(new_segment_element_body_size)) + 4 # 4 byte element id (0x18538067)
# write out the new file
with open(output_filename, "wb") as output_file:
# write the pre-segment header block
input_file.seek(0)
__buffered_file_copy(input_file, output_file, segment_element_metadata.offset)
# write the segment header
output_file.write(encode_element_id(SegmentElement.id))
output_file.write(encode_element_size(new_segment_element_body_size, MAXIMUM_ELEMENT_SIZE_LENGTH))
# write the post-segment header block / pre-info header block
input_file.seek(segment_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, info_element_metadata.offset - (segment_element_metadata.offset + segment_element_metadata.element.head_size))
# write the info header
output_file.write(encode_element_id(InfoElement.id))
output_file.write(encode_element_size(new_info_element_body_size))
# write the post-info header block / pre-dateutc header block
input_file.seek(info_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, dateutc_element_metadata.offset - (info_element_metadata.offset + info_element_metadata.element.head_size))
# write the post-dateutc block
input_file.seek(dateutc_element_metadata.element.stream.size, SEEK_CUR)
__buffered_file_copy(input_file, output_file)
return
def change_muxingapp(input_filename, output_filename, new_muxingapp):
with open(input_filename, "rb") as input_file:
input_matroska_document = MatroskaDocument(input_file)
# retrieve element metadata
segment_element_metadata = __find_element_metadata(input_matroska_document.roots, SegmentElement, 0)
info_element_metadata = __find_element_metadata(segment_element_metadata.element.value, InfoElement, segment_element_metadata.offset + segment_element_metadata.element.head_size)
muxingapp_element_metadata = __find_element_metadata(info_element_metadata.element.value, MuxingAppElement, info_element_metadata.offset + info_element_metadata.element.head_size)
# calculate edited element sizes
new_muxingapp_element_body_size = len(encode_unicode_string(new_muxingapp))
new_muxingapp_element_head_size = len(encode_element_size(new_muxingapp_element_body_size)) + 2 # 2 byte element id (0x4D80)
new_info_element_body_size = info_element_metadata.element.body_size + new_muxingapp_element_head_size + new_muxingapp_element_body_size - muxingapp_element_metadata.element.stream.size
new_info_element_head_size = len(encode_element_size(new_info_element_body_size)) + 4 # 4 byte element id (0x1549A966)
new_segment_element_body_size = segment_element_metadata.element.body_size + new_info_element_head_size + new_info_element_body_size - info_element_metadata.element.stream.size
new_segment_element_head_size = len(encode_element_size(new_segment_element_body_size)) + 4 # 4 byte element id (0x18538067)
# write out the new file
with open(output_filename, "wb") as output_file:
# write the pre-segment header block
input_file.seek(0)
__buffered_file_copy(input_file, output_file, segment_element_metadata.offset)
# write the segment header
output_file.write(encode_element_id(SegmentElement.id))
output_file.write(encode_element_size(new_segment_element_body_size, MAXIMUM_ELEMENT_SIZE_LENGTH))
# write the post-segment header block / pre-info header block
input_file.seek(segment_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, info_element_metadata.offset - (segment_element_metadata.offset + segment_element_metadata.element.head_size))
# write the info header
output_file.write(encode_element_id(InfoElement.id))
output_file.write(encode_element_size(new_info_element_body_size))
# write the post-info header block / pre-muxingapp header block
input_file.seek(info_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, muxingapp_element_metadata.offset - (info_element_metadata.offset + info_element_metadata.element.head_size))
# write the muxingapp header
output_file.write(encode_element_id(MuxingAppElement.id))
output_file.write(encode_element_size(new_muxingapp_element_body_size))
# write the muxingapp
output_file.write(encode_unicode_string(new_muxingapp))
# write the post-muxingapp block
input_file.seek(muxingapp_element_metadata.element.stream.size, SEEK_CUR)
__buffered_file_copy(input_file, output_file)
return
def change_writingapp(input_filename, output_filename, new_writingapp):
with open(input_filename, "rb") as input_file:
input_matroska_document = MatroskaDocument(input_file)
# retrieve element metadata
segment_element_metadata = __find_element_metadata(input_matroska_document.roots, SegmentElement, 0)
info_element_metadata = __find_element_metadata(segment_element_metadata.element.value, InfoElement, segment_element_metadata.offset + segment_element_metadata.element.head_size)
writingapp_element_metadata = __find_element_metadata(info_element_metadata.element.value, WritingAppElement, info_element_metadata.offset + info_element_metadata.element.head_size)
# calculate edited element sizes
new_writingapp_element_body_size = len(encode_unicode_string(new_writingapp))
new_writingapp_element_head_size = len(encode_element_size(new_writingapp_element_body_size)) + 2 # 2 byte element id (0x5741)
new_info_element_body_size = info_element_metadata.element.body_size + new_writingapp_element_head_size + new_writingapp_element_body_size - writingapp_element_metadata.element.stream.size
new_info_element_head_size = len(encode_element_size(new_info_element_body_size)) + 4 # 4 byte element id (0x1549A966)
new_segment_element_body_size = segment_element_metadata.element.body_size + new_info_element_head_size + new_info_element_body_size - info_element_metadata.element.stream.size
new_segment_element_head_size = len(encode_element_size(new_segment_element_body_size)) + 4 # 4 byte element id (0x18538067)
# write out the new file
with open(output_filename, "wb") as output_file:
# write the pre-segment header block
input_file.seek(0)
__buffered_file_copy(input_file, output_file, segment_element_metadata.offset)
# write the segment header
output_file.write(encode_element_id(SegmentElement.id))
output_file.write(encode_element_size(new_segment_element_body_size, MAXIMUM_ELEMENT_SIZE_LENGTH))
# write the post-segment header block / pre-info header block
input_file.seek(segment_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, info_element_metadata.offset - (segment_element_metadata.offset + segment_element_metadata.element.head_size))
# write the info header
output_file.write(encode_element_id(InfoElement.id))
output_file.write(encode_element_size(new_info_element_body_size))
# write the post-info header block / pre-writingapp header block
input_file.seek(info_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, writingapp_element_metadata.offset - (info_element_metadata.offset + info_element_metadata.element.head_size))
# write the writingapp header
output_file.write(encode_element_id(WritingAppElement.id))
output_file.write(encode_element_size(new_writingapp_element_body_size))
# write the writingapp
output_file.write(encode_unicode_string(new_writingapp))
# write the post writingapp block
input_file.seek(writingapp_element_metadata.element.stream.size, SEEK_CUR)
__buffered_file_copy(input_file, output_file)
return
def change_trackuid(input_filename, output_filename, track_number, new_trackuid):
with open(input_filename, "rb") as input_file:
input_matroska_document = MatroskaDocument(input_file)
# retrieve element metadata
segment_element_metadata = __find_element_metadata(input_matroska_document.roots, SegmentElement, 0)
tracks_element_metadata = __find_element_metadata(segment_element_metadata.element.value, TracksElement, segment_element_metadata.offset + segment_element_metadata.element.head_size)
trackentry_element_metadata = __find_element_metadata(tracks_element_metadata.element.value, TrackEntryElement, tracks_element_metadata.offset + tracks_element_metadata.element.head_size, lambda e: e.id == TrackNumberElement.id and e.value == long(track_number))
trackuid_element_metadata = __find_element_metadata(trackentry_element_metadata.element.value, TrackUIDElement, trackentry_element_metadata.offset + trackentry_element_metadata.element.head_size)
# calculate edited element sizes
new_trackuid_element_body_size = len(encode_unsigned_integer(long(new_trackuid)))
new_trackuid_element_head_size = len(encode_element_size(new_trackuid_element_body_size)) + 2 # 2 byte element id (0x73C5)
new_trackentry_element_body_size = trackentry_element_metadata.element.body_size + new_trackuid_element_head_size + new_trackuid_element_body_size - trackuid_element_metadata.element.stream.size
new_trackentry_element_head_size = len(encode_element_size(new_trackentry_element_body_size)) + 1 # 1 byte element id (0xAE)
new_tracks_element_body_size = tracks_element_metadata.element.body_size + new_trackentry_element_head_size + new_trackentry_element_body_size - trackentry_element_metadata.element.stream.size
new_tracks_element_head_size = len(encode_element_size(new_tracks_element_body_size)) + 4 # 4 byte element id (0x1654AE6B)
new_segment_element_body_size = segment_element_metadata.element.body_size + new_tracks_element_head_size + new_tracks_element_body_size - tracks_element_metadata.element.stream.size
new_segment_element_head_size = len(encode_element_size(new_segment_element_body_size)) + 4 # 4 byte element id (0x18538067)
# write out the new file
with open(output_filename, "wb") as output_file:
# write the pre-segment header block
input_file.seek(0)
__buffered_file_copy(input_file, output_file, segment_element_metadata.offset)
# write the segment header
output_file.write(encode_element_id(SegmentElement.id))
output_file.write(encode_element_size(new_segment_element_body_size, MAXIMUM_ELEMENT_SIZE_LENGTH))
# write the post-segment header block / pre-tracks header block
input_file.seek(segment_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, tracks_element_metadata.offset - (segment_element_metadata.offset + segment_element_metadata.element.head_size))
# write the tracks header
output_file.write(encode_element_id(TracksElement.id))
output_file.write(encode_element_size(new_tracks_element_body_size))
# write the post-tracks header block / pre-trackentry header block
input_file.seek(tracks_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, trackentry_element_metadata.offset - (tracks_element_metadata.offset + tracks_element_metadata.element.head_size))
# write the trackentry header
output_file.write(encode_element_id(TrackEntryElement.id))
output_file.write(encode_element_size(new_trackentry_element_body_size))
# write the post-trackentry header block / pre-trackuid header block
input_file.seek(trackentry_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, trackuid_element_metadata.offset - (trackentry_element_metadata.offset + trackentry_element_metadata.element.head_size))
# write the trackuid header
output_file.write(encode_element_id(TrackUIDElement.id))
output_file.write(encode_element_size(new_trackuid_element_body_size))
# write the trackuid
output_file.write(encode_unsigned_integer(long(new_trackuid)))
# write the post trackuid block
input_file.seek(trackuid_element_metadata.element.stream.size, SEEK_CUR)
__buffered_file_copy(input_file, output_file)
return
def change_attachment_fileuid(input_filename, output_filename, attachment_filename, new_fileuid):
with open(input_filename, "rb") as input_file:
input_matroska_document = MatroskaDocument(input_file)
# retrieve element metadata
segment_element_metadata = __find_element_metadata(input_matroska_document.roots, SegmentElement, 0)
attachments_element_metadata = __find_element_metadata(segment_element_metadata.element.value, AttachmentsElement, segment_element_metadata.offset + segment_element_metadata.element.head_size)
attachedfile_element_metadata = __find_element_metadata(attachments_element_metadata.element.value, AttachedFileElement, attachments_element_metadata.offset + attachments_element_metadata.element.head_size, lambda e: e.id == FileNameElement.id and e.value == attachment_filename)
fileuid_element_metadata = __find_element_metadata(attachedfile_element_metadata.element.value, FileUIDElement, attachedfile_element_metadata.offset + attachedfile_element_metadata.element.head_size)
# calculate edited element sizes
new_fileuid_element_body_size = len(encode_unsigned_integer(long(new_fileuid)))
new_fileuid_element_head_size = len(encode_element_size(new_fileuid_element_body_size)) + 2 # 2 byte element id (0x46AE)
new_attachedfile_element_body_size = attachedfile_element_metadata.element.body_size + new_fileuid_element_head_size + new_fileuid_element_body_size - fileuid_element_metadata.element.stream.size
new_attachedfile_element_head_size = len(encode_element_size(new_attachedfile_element_body_size)) + 2 # 2 byte element id (0x61A7)
new_attachments_element_body_size = attachments_element_metadata.element.body_size + new_attachedfile_element_head_size + new_attachedfile_element_body_size - attachedfile_element_metadata.element.stream.size
new_attachments_element_head_size = len(encode_element_size(new_attachments_element_body_size)) + 4 # 4 byte element id (0x1941A469)
new_segment_element_body_size = segment_element_metadata.element.body_size + new_attachments_element_head_size + new_attachments_element_body_size - attachments_element_metadata.element.stream.size
new_segment_element_head_size = len(encode_element_size(new_segment_element_body_size)) + 4 # 4 byte element id (0x18538067)
# write out the new file
with open(output_filename, "wb") as output_file:
# write the pre-segment header block
input_file.seek(0)
__buffered_file_copy(input_file, output_file, segment_element_metadata.offset)
# write the segment header
output_file.write(encode_element_id(SegmentElement.id))
output_file.write(encode_element_size(new_segment_element_body_size, MAXIMUM_ELEMENT_SIZE_LENGTH))
# write the post-segment header block / pre-attachments header block
input_file.seek(segment_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, attachments_element_metadata.offset - (segment_element_metadata.offset + segment_element_metadata.element.head_size))
# write the attachments header
output_file.write(encode_element_id(AttachmentsElement.id))
output_file.write(encode_element_size(new_attachments_element_body_size))
# write the post-attachments header block / pre-attachedfile header block
input_file.seek(attachments_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, attachedfile_element_metadata.offset - (attachments_element_metadata.offset + attachments_element_metadata.element.head_size))
# write the attachedfile header
output_file.write(encode_element_id(AttachedFileElement.id))
output_file.write(encode_element_size(new_attachedfile_element_body_size))
# write the post-attachedfile header block / pre-fileuid header block
input_file.seek(attachedfile_element_metadata.element.head_size, SEEK_CUR)
__buffered_file_copy(input_file, output_file, fileuid_element_metadata.offset - (attachedfile_element_metadata.offset + attachedfile_element_metadata.element.head_size))
# write the fileuid header
output_file.write(encode_element_id(FileUIDElement.id))
output_file.write(encode_element_size(new_fileuid_element_body_size))
# write the fileuid
output_file.write(encode_unsigned_integer(long(new_fileuid)))
# write the post fileuid block
input_file.seek(fileuid_element_metadata.element.stream.size, SEEK_CUR)
__buffered_file_copy(input_file, output_file)
return
def __find_element_metadata(element_list, find_element, starting_offset, child_element_predicate = None):
ElementMetadata = namedtuple('ElementMetadata', 'element offset')
offset = starting_offset
# enumerate the elements in the list until the requested element is found...
for element in element_list:
if element.id != find_element.id:
offset += element.stream.size
else:
if child_element_predicate is None:
# ...no predicate so just return it with the starting offset
return ElementMetadata(element, offset)
else:
# ....then evaluate the predicate and on matching return it with the starting offset
for child_element in element.value:
if child_element_predicate(child_element):
return ElementMetadata(element, offset)
else:
offset += element.stream.size
break
raise Exception("No {0} element found.".format(find_element.name))
def __buffered_file_copy(input_file, output_file, number_of_bytes = None):
# copy the input file piece-by-piece to avoid excessive memory use
for data in __yielding_read(input_file, number_of_bytes):
output_file.write(data)
def __yielding_read(file_object, number_of_bytes = None):
DEFAULT_CHUNK_SIZE = 1024 * 64 # 64kb
accumulator = 0
while (number_of_bytes is None) or (number_of_bytes is not None and accumulator < number_of_bytes):
# calculate the chunk size to use
chunk_size = DEFAULT_CHUNK_SIZE
if number_of_bytes is not None and ((number_of_bytes - accumulator) < DEFAULT_CHUNK_SIZE):
chunk_size = (number_of_bytes - accumulator)
# read data and break if EOF reached
data = file_object.read(chunk_size)
if not data:
break
# advance accumulator and yield data
accumulator += len(data)
yield data
if __name__ == "__main__":
if len(argv) < 2:
raise Exception("Script expects at least a | |
= "fast"
# Not so bad
elif scenario == 3:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "fast"
speed_lower = "fast"
speed_right = "fast"
speed_upper = "fast"
elif scenario == 4:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
elif scenario == 5:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
elif scenario == 6:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
elif scenario == 7:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
elif scenario == 8:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
elif scenario == 9:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
elif scenario == 10:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
elif scenario == 11:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
elif scenario == 12:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
elif scenario == 13:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
elif scenario == 14:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
else:
familiar_left = 0
familiar_lower = 1
familiar_right = 2
familiar_upper = 3
speed_left = "slow"
speed_lower = "slow"
speed_right = "slow"
speed_upper = "slow"
# Exiting agents in left spawn
group_follower_spawn_left = AgentGroup(
agent_type=Circular,
size=size_spawn_left,
attributes=attributes(simulation, familiar=familiar_left, has_target=True, is_follower=True))
simulation.agents.add_non_overlapping_group(
speed_left,
"spawn_left",
group_follower_spawn_left,
position_gen=False,
position_iter=iter([]),
spawn=0,
obstacles=geom_to_linear_obstacles(field.obstacles))
# Exiting agents in lower spawn
group_follower_spawn_lower = AgentGroup(
agent_type=Circular,
size=size_spawn_lower,
attributes=attributes(simulation, familiar=familiar_lower, has_target=True, is_follower=True))
simulation.agents.add_non_overlapping_group(
speed_lower,
"spawn_lower",
group_follower_spawn_lower,
position_gen=False,
position_iter=iter([]),
spawn=1,
obstacles=geom_to_linear_obstacles(field.obstacles))
# Exiting agents in right spawn
group_follower_spawn_right = AgentGroup(
agent_type=Circular,
size=size_spawn_right,
attributes=attributes(simulation, familiar=familiar_right, has_target=True, is_follower=True))
simulation.agents.add_non_overlapping_group(
speed_right,
"spawn_right",
group_follower_spawn_right,
position_gen=False,
position_iter=iter([]),
spawn=2,
obstacles=geom_to_linear_obstacles(field.obstacles))
# Exiting agents in upper spawn
group_follower_spawn_upper = AgentGroup(
agent_type=Circular,
size=size_spawn_upper,
attributes=attributes(simulation, familiar=familiar_upper, has_target=True, is_follower=True))
simulation.agents.add_non_overlapping_group(
speed_upper,
"spawn_upper",
group_follower_spawn_upper,
position_gen=False,
position_iter=iter([]),
spawn=3,
obstacles=geom_to_linear_obstacles(field.obstacles))
if n_leaders == 0:
# Check if the solution has already been evaluated, if it has print the penalty and total evacuation time and return
bank_evactime = solutionbank(cells, target_exits, n_leaders, scenario)
if bank_evactime != 0:
print(bank_evactime)
return
if n_leaders > 0:
# generate_leader_pos() should check that guides are not spawned in unfeasible positions
init_pos = generate_leader_pos(simulation, cells, n_leaders)
print(init_pos)
target_exits = iter(target_exits)
init_pos = iter(init_pos)
# Check if the solution has already been evaluated, if it has print the penalty and total evacuation time and return
bank_evactime = solutionbank(cells, target_exits, n_leaders, scenario)
if bank_evactime != 0:
print(bank_evactime)
return
# Add guides.
# NB! If there are multiple guides, the function that is set to create the guides should check that the guides do
# not overlap each other.
group_leader = AgentGroup(
agent_type=Circular,
size=n_leaders,
attributes=attributes_leader(simulation, target_iter=target_exits, has_target=True, is_follower=False))
# If it is not taken care before hand that leaders can't overlap, the function will terminate here.
simulation.agents.add_non_overlapping_group(
"dummy_speed",
"group_leader",
group_leader,
position_gen=True,
position_iter=init_pos,
spawn=0,
obstacles=geom_to_linear_obstacles(field.obstacles))
# We do not need to set the seed number, since we use the deterministic social force model
#np.random.seed(seed)
simulation.update()
simulation.run()
print(simulation.data['time_tot'])
# Write the solution to the solution bank
if n_leaders == 0:
banksolution = "{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, '\n')
if n_leaders == 1:
banksolution = "{}{}{}{}{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, ' ', cells[0], ' ', exits[0], '\n')
if n_leaders == 2:
banksolution = "{}{}{}{}{}{}{}{}{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, ' ', cells[0], ' ', exits[0], ' ', cells[1], ' ', exits[1], '\n')
if n_leaders == 3:
banksolution = "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, ' ', cells[0], ' ', exits[0], ' ', cells[1], ' ', exits[1], ' ', cells[2], ' ', exits[2], '\n')
if n_leaders == 4:
banksolution = "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, ' ', cells[0], ' ', exits[0], ' ', cells[1], ' ', exits[1], ' ', cells[2], ' ', exits[2], ' ', cells[3], ' ', exits[3], '\n')
if n_leaders == 5:
banksolution = "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, ' ', cells[0], ' ', exits[0], ' ', cells[1], ' ', exits[1], ' ', cells[2], ' ', exits[2], ' ', cells[3], ' ', exits[3], ' ', cells[4], ' ', exits[4], '\n')
if n_leaders == 6:
banksolution = "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, ' ', cells[0], ' ', exits[0], ' ', cells[1], ' ', exits[1], ' ', cells[2], ' ', exits[2], ' ', cells[3], ' ', exits[3], ' ', cells[4], ' ', exits[4], ' ', cells[5], ' ', exits[5], '\n')
if n_leaders == 7:
banksolution = "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, ' ', cells[0], ' ', exits[0], ' ', cells[1], ' ', exits[1], ' ', cells[2], ' ', exits[2], ' ', cells[3], ' ', exits[3], ' ', cells[4], ' ', exits[4], ' ', cells[5], ' ', exits[5], ' ', cells[6], ' ', exits[6], '\n')
if n_leaders == 8:
banksolution = "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, ' ', cells[0], ' ', exits[0], ' ', cells[1], ' ', exits[1], ' ', cells[2], ' ', exits[2], ' ', cells[3], ' ', exits[3], ' ', cells[4], ' ', exits[4], ' ', cells[5], ' ', exits[5], ' ', cells[6], ' ', exits[6], ' ', cells[7], ' ', exits[7], '\n')
if n_leaders == 9:
banksolution = "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, ' ', cells[0], ' ', exits[0], ' ', cells[1], ' ', exits[1], ' ', cells[2], ' ', exits[2], ' ', cells[3], ' ', exits[3], ' ', cells[4], ' ', exits[4], ' ', cells[5], ' ', exits[5], ' ', cells[6], ' ', exits[6], ' ', cells[7], ' ', exits[7], ' ', cells[8], ' ', exits[8], '\n')
if n_leaders == 10:
banksolution = "{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}".format(simulation.data['time_tot'], ' ', scenario, ' ', cells[0], ' ', exits[0], ' ', cells[1], ' ', exits[1], ' ', cells[2], ' ', exits[2], ' ', cells[3], ' ', exits[3], ' ', cells[4], ' ', exits[4], ' ', cells[5], ' ', exits[5], ' ', cells[6], ' ', exits[6], ' ', cells[7], ' ', exits[7], ' ', cells[8], ' ', exits[8], ' ', cells[9], ' ', exits[9], '\n')
if os.path.isfile("complex/bank_complex.out"):
bankfile = open("complex/bank_complex.out", "a")
bankfile.write(banksolution)
bankfile.close()
else:
bankfile = open("complex/bank_complex.out", "w")
bankfile.write(banksolution)
bankfile.close()
return
if __name__ == '__main__':
arguments = sys.argv
arguments = arguments[1:]
gene_data = arguments[:-1]
# Seed number
scenario = int(arguments[len(arguments)-1])
# Cells of guides
cells_data = gene_data[0::2]
cells_data = [int(cells_data[i]) for i in range(len(cells_data))]
# Exits of guides
exits_data = gene_data[1::2]
exits_data = [int(exits_data[i]) for i in range(len(exits_data))]
cells=[]
exits=[]
for i in range(len(cells_data)):
cells.append(cells_data[i])
exits.append(exits_data[i])
# Number of guides
n_guides = len(cells)
# Run the evacuation simulation
if n_guides == 0:
run([[],[]], 0, scenario)
if n_guides == 1:
run([[exits[0], cells[0]]], 1, scenario)
elif n_guides == 2:
run([[exits[0], cells[0]], [exits[1], cells[1]]], 2, scenario)
elif n_guides == 3:
run([[exits[0], cells[0]], [exits[1], cells[1]], [exits[2], cells[2]]], 3, scenario)
elif n_guides == 4:
run([[exits[0], cells[0]], [exits[1], cells[1]], [exits[2], cells[2]], [exits[3], cells[3]]], 4, scenario)
elif n_guides == 5:
run([[exits[0], cells[0]], [exits[1], cells[1]], [exits[2], cells[2]], [exits[3], cells[3]], [exits[4], cells[4]]], 5, scenario)
elif n_guides == 6:
run([[exits[0], cells[0]], [exits[1], cells[1]], [exits[2], cells[2]], [exits[3], cells[3]], [exits[4], cells[4]], [exits[5], cells[5]]], 6, scenario)
elif n_guides == 7:
run([[exits[0], cells[0]], [exits[1], cells[1]], [exits[2], cells[2]], | |
"""This file contains the various potential functions we have defined."""
import numpy as np
import math
try:
import cPickle as pickle
except:
import pickle
def cosine_potential(coords):
"""
Calculate the Potential Energy and Force of Cos(x).
Parameters:
-----------
coords : float (or array of floats)
Location
Returns:
--------
V : float (or array of floats)
Potential Energy
F : float (or array of floats)
Force
Trigger : Boolean
Has rare event occurred (True) or not (False)
"""
if hasattr(coords, "__len__") is True:
V = np.zeros_like(coords)
F = np.zeros_like(coords)
for i in range(0,coords.size):
if coords[i]<np.pi:
V[i] = np.cos(coords[i]) * 3.0
F[i] = np.sin(coords[i]) * 3.0
else:
V[i] = np.cos(coords[i]) * 2.0 - 1.0
F[i] = np.sin(coords[i]) * 2.0
else:
if coords<np.pi:
V = np.cos(coords) * 3.0
F = np.sin(coords) * 3.0
else:
V = np.cos(coords) * 2.0 - 1.0
F = np.sin(coords) * 2.0
Event=''
if hasattr(coords, "__len__") is False and coords < -2.0:
Trigger = True
Event = 'A'
elif hasattr(coords, "__len__") is False and coords > 8.28:
Trigger = True
Event = 'B'
else:
Trigger = False
return (V, F, Trigger, Event)
def two_gaussian_potential(coords):
"""
Calculate the force and potential based on location (1-D).
Parameters:
-----------
coords : array of floats
X and Y coordinates
Returns:
--------
V : float (or array of floats)
Potential Energy
F : float (or array of floats)
Force
Trigger : Boolean
Has rare event occurred (True) or not (False)
"""
V = (-5 * np.exp(-(coords - 2/0.75)**2) -
10*np.exp(-(coords + 2/0.75)**2))
F = ((-5 * 2 * -1 * (coords - 2/0.75) * np.exp(-(coords - 2/0.75)**2) -
10 * 2 * -1 * (coords + 2/0.75) * np.exp(-(coords + 2/0.75)**2)) *
(-1))
Event='A'
if type(coords) is np.float64 and coords < -2.0:
Trigger = True
else:
Trigger = False
return (V, F, Trigger, Event)
def pv_2D_potential(x, y):
"""
Calculate the force and potential based on location (2-D).
Parameters:
-----------
x : array of floats
X coordinates
y : array of floats
Y coordinates
Returns:
--------
V : float (or array of floats)
Potential Energy
Fpot : float (or array of floats)
Force in x and y direction
Trigger : Boolean
Has rare event occurred (True) or not (False)
"""
if hasattr(x, "__len__") is True:
V = np.zeros([y.size, x.size])
Fx = np.empty([y.size, x.size])
Fy = np.empty([y.size, x.size])
for k in range(0, y.size - 1):
for j in range(0, x.size - 1):
V[k, j] = (np.cos(2*math.pi*x[j])*(1 + 4*y[k]) +
math.pi*y[k]**2 - 0.75*np.cos(2*math.pi*x[j]/3))
Fx = (((2*math.pi/3*0.75)*np.sin(2*math.pi*x[j]/3) -
2*math.pi*(1+4*y[k])*np.sin(2*math.pi*x[j])))
Fy = ((2*math.pi*y[k]+4*np.cos(2*math.pi*x[j])))
Fpotx = Fx * -1
Fpoty = Fy * -1
Trigger = False
else:
V = (np.cos(2*math.pi*x)*(1+4*y) + math.pi*y**2 -
0.75*np.cos(2*math.pi*x/3))
Fpotx = (((2*math.pi/3*0.75)*np.sin(2*math.pi*x/3) -
2*math.pi*(1+4*y)*np.sin(2*math.pi*x)))*-1
Fpoty = ((2*math.pi*y+4*np.cos(2*math.pi*x)))*-1
if x < 0.75 and y > 0:
Trigger = True
Event = 'A'
elif x > 2.25 and y > 0:
Trigger = True
Event = 'B'
else:
Trigger = False
Fpot = np.array([Fpotx, Fpoty])
return (V, Fpot, Trigger, Event)
def C_Cl_potential(x, y):
"""
Calculate the force and potential based on location (2-D).
Parameters:
-----------
x : array of floats
X coordinates
y : array of floats
Y coordinates
Returns:
--------
V : float (or array of floats)
Potential Energy
Fpot : float (or array of floats)
Force in x and y direction
Trigger : Boolean
Has rare event occurred (True) or not (False)
"""
pot = pickle.load(open("kfpotential.p", "rb"))
if hasattr(x, "__len__") is True:
V = np.zeros([y.size, x.size])
Fx = np.empty([y.size, x.size])
Fy = np.empty([y.size, x.size])
for k in range(0, y.size - 1):
for j in range(0, x.size - 1):
V[k, j] = pot(k, j)
Fy = pot(k, j, dx=1)
Fx = pot(k, j, dy=1)
Fpotx = Fx * -1
Fpoty = Fy * -1
Trigger = False
else:
V[k, j] = pot(k, j)
Fy = pot(k, j, dx=1)
Fx = pot(k, j, dy=1)
Fpotx = Fx * -1
Fpoty = Fy * -1
if x > 0.25 and y < 0.20:
Trigger = True
else:
Trigger = False
Fpot = np.array([Fpotx, Fpoty])
Event=''
return (V, Fpot, Trigger, Event)
def muller_brown_potential(x, y):
"""
Calculate the force and potential based on location (2-D).
Parameters:
-----------
x : array of floats
X coordinates
y : array of floats
Y coordinates
Returns:
--------
V : float (or array of floats)
Potential Energy
Fpot : float (or array of floats)
Force in x and y direction
Trigger : Boolean
Has rare event occurred (True) or not (False)
"""
if type(x) is not np.float64:
V = np.zeros([y.size, x.size])
Fx = np.empty([y.size, x.size])
Fy = np.empty([y.size, x.size])
A = np.array([-200.0, -100.0, -170.0, 15.0])
a = np.array([-1.0, -1.0, -6.50, 0.7])
b = np.array([0.0, 0.0, 11.0, 0.6])
c = np.array([-10.0, -10.0, -6.50, 0.7])
x0 = np.array([1.0, 0.0, -0.50, -1.0])
y0 = np.array([0.0, 0.5, 1.50, 1.0])
for k in range(0, y.size - 1):
for j in range(0, x.size - 1):
V[k, j] = sum(A * np.exp(a*(x[j]-x0)**2 +
b*(x[j]-x0)*(y[k]-y0) +
c*(y[k]-y0)**2))
Fx = (-400*np.exp(-1*(-1+x[j])**2 - 10*y[k]**2)*(-1+x[j]) -
200*np.exp(-x[j]**2-10*(-0.5+x[j])**2)*x[j] +
170*np.exp(-6.5*(0.5+x[j])**2+11*(0.5+x[j])*(-1.5+y[k]) -
6.5*(-1.5+y[k])**2)*(-13*(0.5+x[j])+11*(-1/5+y[k])) -
15*np.exp(0.7*(1+x[j])**2+0.6*(1+x[j])*(y[k]-1) +
0.7*(y[k]-1)**2)*(1.4*(1+x[j])+0.6*(y[k]-1)))
Fy = (170*np.exp(-6.5*(0.5+x[j])**2 +
11*(0.5+x[j])*(-1.5+y[k]) -
6.5*(y[k]-1.5)**2)*(11*(0.5+x[j])-13*(y[k]-1.5)) -
15*np.exp(0.7*(1+x[j])**2+0.6*(1+x[j])*(y[k]-1) +
0.7*(y[k]-1)**2)*(0.6*(x[j]+1)+1.4*(y[k]-1)) -
1000*np.exp(-x[j]**2-10*(y[k]-0.5)**2)*(y[k]-0.5) -
4000*np.exp(-1*(x[j]-1)**2-10*y[k]**2)*y[k])
Fpotx = Fx * -1
Fpoty = Fy * -1
Trigger = False
else:
V = sum(A * np.exp(a * (x-x0)**2 + b * (x-x0)*(y-y0) +
c * (y-y0)**2))
Fpotx = (-400*np.exp(-1*(-1+x)**2 - 10*y**2)*(-1+x) -
200*np.exp(-x**2-10*(-0.5+x)**2)*x +
170*np.exp(-6.5*(0.5+x)**2+11*(0.5+x)*(-1.5+y) -
6.5*(-1.5+y)**2)*(-13*(0.5+x)+11*(-1/5+y)) -
15*np.exp(0.7*(1+x)**2+0.6*(1+x)*(y-1) +
0.7*(y-1)**2)*(1.4*(1+x)+0.6*(y-1)))
Fpoty = (170*np.exp(-6.5*(0.5+x)**2 +
11*(0.5+x)*(-1.5+y) -
6.5*(y-1.5)**2)*(11*(0.5+x)-13*(y-1.5)) -
15*np.exp(0.7*(1+x)**2+0.6*(1+x)*(y-1) +
0.7*(y-1)**2)*(0.6*(x+1)+1.4*(y-1)) -
1000*np.exp(-x**2-10*(y-0.5)**2)*(y-0.5) -
4000*np.exp(-1*(x-1)**2-10*y**2)*y)
if x > 0.4 and y < 0.1:
Trigger = True
else:
Trigger = False
Fpot = np.array([Fpotx, Fpoty])
Event=''
return (V, Fpot, Trigger)
def get_potential_dict():
"""Return a dictionary of all of the available potential functions."""
potential_dict = {'cosine_potential': cosine_potential,
'two_gaussian_potential': two_gaussian_potential,
'pv_2D_potential': pv_2D_potential,
'muller_brown_potential': muller_brown_potential,
'C_Cl_potential': C_Cl_potential}
dimension_dict = {'cosine_potential': '1-D Potential',
'two_gaussian_potential': '1-D Potential',
'pv_2D_potential': '2-D Potential',
'muller_brown_potential': '2-D Potential',
'C_Cl_potential': '2-D Potential'}
return potential_dict, dimension_dict
def get_GUI_presets_dict():
"""Return a dictionary of all of the available potential functions."""
preset_dict = {'cosine_potential': np.array([3.14,-6.28,12.57,0.01,0,
0,0,0]).astype(str),
'two_gaussian_potential': np.array([2.67,-4,4,0.01,
0,0,0,0]).astype(str),
'pv_2D_potential': np.array([1.5,0,3.0,0.01,0.6,-2.0,
2.0,0.01]).astype(str),
'muller_brown_potential': np.array([0,0,0,0,0,0,0,
0]).astype(str),
'C_Cl_potential': np.array([0,0,0,0,0,0,0,0]).astype(str)
}
return preset_dict
def two_gaussian_potential_bc(vnew, f2, coords):
"""
Apply Boundary Condition to the potential, force, and coordinates.
Parameters:
-----------
vnew : float (or array of floats)
Potential Energy
f2 : float (or array of floats)
Force
coords : float
coordinates
Returns:
--------
vnew : float (or array of floats)
Adjusted potential energy from boundary condition
F : float (or array of floats)
Adjusted force from boundary condition
coords : float
adjusted coordinates from boundary condition
bcbias : float
bias applied strictly from the boundary condition
"""
vold = vnew
bcbias = 0
is_periodic = False
if (coords < -4.3193):
vnew = 100.0 * (coords+4.0)**4.0 - 1.690133
f2 = -100.0 * 4.0 * (coords+4.0)**3.0
bcbias = vnew - vold
elif (coords > 4.25882):
vnew = 100.0 * (coords-4.0)**4.0 - 0.845067
f2 = -100.0 * 4.0 * (coords-4.0)**3.0
bcbias = vnew - vold
return (vnew, f2, coords, bcbias, is_periodic)
def pv_2D_potential_bc(vnew, f2, coords):
"""
Apply Boundary Condition to the potential, force, and coordinates.
Parameters:
-----------
vnew : float (or array of floats)
Potential Energy
f2 : float (or array of floats)
Force
coords : float
coordinates
Returns:
--------
vnew : float (or array of floats)
Adjusted potential energy from boundary condition
F : float (or array of floats)
Adjusted force from boundary condition
coords : float
adjusted coordinates from boundary condition
bcbias : float
bias applied strictly from the boundary condition
"""
if (coords[0] < 0):
coords[0] = coords[0] + 3
elif (coords[0] > 3.0):
coords[0] = coords[0] - 3
bcbias = 0
is_periodic_x = True
is_periodic_y = False
is_periodic = np.array([is_periodic_x, is_periodic_y])
return (vnew, f2, coords, bcbias, is_periodic)
def C_Cl_potential_bc(vnew, f2, coords):
"""
Apply Boundary Condition to the potential, force, and coordinates.
Parameters:
-----------
vnew : float (or array of floats)
Potential Energy
f2 : float (or array of floats)
Force
coords : float
coordinates
Returns:
--------
vnew : float (or array of floats)
Adjusted | |
# MIT License
#
# Copyright (c) 2016-2018 <NAME>, <NAME>, <NAME>, <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import time
from gurobipy import GRB, LinExpr
from alib import solutions, util, modelcreator, datamodel
from . import extendedcactusgraph
from collections import deque
try:
import pickle as pickle
except ImportError:
import pickle
class CactusDecompositionError(Exception): pass
class DecompositionResult(modelcreator.AlgorithmResult):
def __init__(self, solution, temporal_log, solution_status):
super(DecompositionResult, self).__init__()
self.solution = solution
self.temporal_log = temporal_log
self.status = solution_status
def get_solution(self):
return self.solution
def _cleanup_references_raw(self, original_scenario):
own_scenario = self.solution.scenario
for i, own_req in enumerate(own_scenario.requests):
mapping = self.solution.request_mapping[own_req]
del self.solution.request_mapping[own_req]
original_request = original_scenario.requests[i]
mapping.request = original_request
mapping.substrate = original_scenario.substrate
self.solution.request_mapping[original_request] = mapping
self.solution.scenario = original_scenario
class ModelCreatorCactusDecomposition(modelcreator.AbstractEmbeddingModelCreator):
''' The Gurobi model for computing decomposable solutions to the VNEP for cactus request graphs.
Our construction was first described in our publiction:
"<NAME>, <NAME>: Service Chain and Virtual Network Embeddings: Approximations using Randomized Rounding. CoRR abs/1604.02180 (2016)"
Furthermore, the construction described henceforth is equivalent to the formulation described in our paper:
"<NAME>, <NAME>: Virtual Network Embedding Approximations: Leveraging Randomized Rounding. CoRR abs/1803.03622 (2018)"
'''
ALGORITHM_ID = "CactusDecomposition"
def __init__(self,
scenario,
gurobi_settings=None,
logger=None,
lp_output_file=None,
pickle_decomposition_tasks=False,
decomposition_epsilon=1e-10,
absolute_decomposition_abortion_epsilon=1e-6,
relative_decomposition_abortion_epsilon=1e-3):
super(ModelCreatorCactusDecomposition, self).__init__(scenario=scenario, gurobi_settings=gurobi_settings, logger=logger, lp_output_file=lp_output_file)
self._originally_allowed_nodes = {}
self.extended_graphs = {}
epsilon = 0.0001
self.all_requests = self.requests
if self.scenario.objective == datamodel.Objective.MAX_PROFIT:
self.requests = [r for r in self.requests if r.profit > epsilon]
self.ext_graph_edges_node = {}
self.ext_graph_edges_edge = {}
self.var_node_flow = {} # f+(r, i, u)
self.var_edge_flow = {} # f(r, e)
self.var_request_load = {} # l(r, x, y)
self.decomposition_epsilon = decomposition_epsilon
self.relative_decomposition_abortion_epsilon = relative_decomposition_abortion_epsilon
self.absolute_decomposition_abortion_epsilon = absolute_decomposition_abortion_epsilon
self._start_time_recovering_fractional_solution = None
self._end_time_recovering_fractional_solution = None
self.lost_flow_in_the_decomposition = 0.0
self.pickle_decomposition_tasks = pickle_decomposition_tasks
def preprocess_input(self):
modelcreator.AbstractEmbeddingModelCreator.preprocess_input(self)
# filter node placement according to substrate capacities create extended graphs
for req in self.requests:
self.extended_graphs[req] = extendedcactusgraph.ExtendedCactusGraph(req, self.scenario.substrate)
self.logger.info(self.extended_graphs[req])
self.logger.info(self.extended_graphs[req].bfs_request.edges)
def create_variables_other_than_embedding_decision_and_request_load(self):
# source/sink node flow induction variables
for req in self.requests:
self.var_node_flow[req] = {}
ext_graph = self.extended_graphs[req]
if ext_graph is None:
continue
for i, ecg_node_dict in ext_graph.source_nodes.items():
if i not in self.var_node_flow[req]:
self.var_node_flow[req][i] = {}
for u, ecg_node in ecg_node_dict.items():
if u in self.var_node_flow[req][i]:
continue
variable_id = modelcreator.construct_name("flow_induction", req_name=req.name, vnode=i, snode=u)
self.var_node_flow[req][i][u] = self.model.addVar(lb=0.0,
ub=1.0,
obj=0.0,
vtype=GRB.BINARY,
name=variable_id)
for i, ecg_node_dict in ext_graph.sink_nodes.items():
if i not in self.var_node_flow[req]:
self.var_node_flow[req][i] = {}
for u, ecg_node in ecg_node_dict.items():
if u in self.var_node_flow[req][i]:
continue
variable_id = modelcreator.construct_name("flow_induction", req_name=req.name, vnode=i, snode=u)
self.var_node_flow[req][i][u] = self.model.addVar(lb=0.0,
ub=1.0,
obj=0.0,
vtype=GRB.BINARY,
name=variable_id)
# edge flow variables
for req in self.requests:
self.var_edge_flow[req] = {}
ext_graph = self.extended_graphs[req]
if ext_graph is None:
continue
for ecg_edge in ext_graph.edges:
variable_id = modelcreator.construct_name("flow_edge", req_name=req.name, other=ecg_edge)
self.var_edge_flow[req][ecg_edge] = self.model.addVar(lb=0.0,
ub=1.0,
obj=0.0,
vtype=GRB.BINARY,
name=variable_id)
self.model.update()
def create_constraints_other_than_bounding_loads_by_capacities(self):
for req in self.requests:
if self.extended_graphs[req] is None:
self.logger.info("Fixing request {} to zero, as it contains unembeddable nodes.".format(req.name))
expr = LinExpr([(1.0, self.var_embedding_decision[req])])
constr_name = modelcreator.construct_name("fix_infeasible_req_to_zero", req_name=req.name)
self.model.addConstr(expr, GRB.EQUAL, 0.0, name=constr_name)
continue
# flow induction at root - constraint 8
self._add_constraint_root_flow_induction(req)
# flow induction for cycles (sources) - constraint 9
self._add_constraint_cycle_flow_induction_at_source(req)
# same flow in each cycle branch (commutativity) - constraint 10
self._add_constraint_cycle_flow_commutativity(req)
# flow induction for paths (sources) - constraint 11
self._add_constraint_path_flow_induction_at_source(req)
# flow preservation - constraint 12
self._add_constraint_flow_preservation(req)
# flow induction for cycles (sinks) - constraint 13
self._add_constraint_cycle_flow_induction_at_sink(req)
# flow induction for paths (sinks) - constraint 14
self._add_constraint_path_flow_induction_at_sink(req)
# flow inductions for subgraphs branching off from a cycle - constraint 15
self._add_constraint_branching_flow_induction(req)
# load computation for node resources - constraint 16
self._add_constraint_track_node_load(req)
# load computation for edge resources - constraint 17
self._add_constraint_track_edge_load(req)
# capacity constraint - constraint 18
# already included by the AbstractEmbeddingModelCreator
self.model.update()
def _add_constraint_root_flow_induction(self, req):
ext_graph = self.extended_graphs[req]
root = ext_graph.root
expr = LinExpr([(-1.0, self.var_embedding_decision[req])] +
[(1.0, self.var_node_flow[req][root][u]) for u in ext_graph.source_nodes[root].keys()]) # this iterates over all source nodes associated with the request root
constr_name = modelcreator.construct_name("flow_induction_root", req_name=req.name)
# print "Root flow induction ({}):".format(root).ljust(40), expr, "= 0"
self.model.addConstr(expr, GRB.EQUAL, 0.0, name=constr_name)
def _add_constraint_cycle_flow_induction_at_source(self, req):
ext_graph = self.extended_graphs[req]
for cycle in ext_graph.ecg_cycles:
valid_end_nodes = self.substrate.get_valid_nodes(req.get_type(cycle.end_node), req.get_node_demand(cycle.end_node))
valid_cycle_targets = set(req.get_allowed_nodes(cycle.end_node)).intersection(valid_end_nodes)
start_type = req.get_type(cycle.start_node)
start_demand = req.get_node_demand(cycle.start_node)
valid_start_nodes = self.substrate.get_valid_nodes(start_type, start_demand)
for u in req.get_allowed_nodes(cycle.start_node):
if u not in valid_start_nodes:
continue
cycle_source = ext_graph.source_nodes[cycle.start_node][u]
ij = cycle.original_branches[0][0] # select the first edge of the first branch of the cycle
if ij[0] != cycle.start_node:
raise CactusDecompositionError("Sanity check")
expr = [(-1.0, self.var_node_flow[req][cycle.start_node][u])]
for w in valid_cycle_targets:
ext_edge = (cycle_source, ext_graph.cycle_layer_nodes[ij][u][w])
expr.append((1.0, self.var_edge_flow[req][ext_edge]))
expr = LinExpr(expr)
constr_name = modelcreator.construct_name("cycle_flow_induction_source", req_name=req.name, other="{}->{}".format(cycle.start_node, cycle.end_node))
# print "Cycle flow induction ({}):".format(cycle_source).ljust(40), expr, "= 0"
self.model.addConstr(expr, GRB.EQUAL, 0.0, name=constr_name)
def _add_constraint_cycle_flow_commutativity(self, req):
ext_graph = self.extended_graphs[req]
for cycle in ext_graph.ecg_cycles:
valid_end_nodes = self.substrate.get_valid_nodes(req.get_type(cycle.end_node), req.get_node_demand(cycle.end_node))
valid_cycle_targets = set(req.get_allowed_nodes(cycle.end_node)).intersection(valid_end_nodes)
start_type = req.get_type(cycle.start_node)
start_demand = req.get_node_demand(cycle.start_node)
valid_start_nodes = self.substrate.get_valid_nodes(start_type, start_demand)
for u in req.get_allowed_nodes(cycle.start_node):
if u not in valid_start_nodes:
continue
cycle_source = ext_graph.source_nodes[cycle.start_node][u]
ij = cycle.original_branches[0][0] # select the first edge of the first branch of the cycle
ik = cycle.original_branches[1][0] # select the first edge of the second branch of the cycle
if ij[0] != cycle.start_node or ik[0] != cycle.start_node:
raise CactusDecompositionError("Sanity check")
for w in valid_cycle_targets:
u_ij = ext_graph.cycle_layer_nodes[ij][u][w]
u_ik = ext_graph.cycle_layer_nodes[ik][u][w]
expr = LinExpr([
(1.0, self.var_edge_flow[req][(cycle_source, u_ij)]),
(-1.0, self.var_edge_flow[req][(cycle_source, u_ik)])
])
constr_name = modelcreator.construct_name("flow_commutativity",
req_name=req.name,
vedge="{}->{}, {}->{}".format(ij[0], ij[1], ik[0], ik[1]),
snode="s={}, t={}".format(u, w))
# print "Cycle flow commutativity ({}, {}):".format(cycle_source, w).ljust(40), expr, "= 0"
self.model.addConstr(expr, GRB.EQUAL, 0.0, name=constr_name)
def _add_constraint_path_flow_induction_at_source(self, req):
ext_graph = self.extended_graphs[req]
for path in ext_graph.ecg_paths:
start_type = req.get_type(path.start_node)
start_demand = req.get_node_demand(path.start_node)
valid_start_nodes = self.substrate.get_valid_nodes(start_type, start_demand)
for u in req.get_allowed_nodes(path.start_node):
if u not in valid_start_nodes:
continue
path_source = ext_graph.source_nodes[path.start_node][u]
ij = path.original_path[0] # select the first edge of the first branch of the cycle
ext_ij = (path_source, ext_graph.path_layer_nodes[ij][u])
expr = LinExpr([
(1.0, self.var_edge_flow[req][ext_ij]),
(-1.0, self.var_node_flow[req][path.start_node][u])
])
constr_name = modelcreator.construct_name("path_flow_induction_source",
req_name=req.name,
vedge="{}->{}".format(ij[0], ij[1]),
snode="s={}".format(u))
# print "Path flow induction ({}):".format(path_source).ljust(40), expr, "= 0"
self.model.addConstr(expr, GRB.EQUAL, 0.0, name=constr_name)
def _add_constraint_flow_preservation(self, req):
ext_graph = self.extended_graphs[req]
for node in ext_graph.layer_nodes:
expr = []
for neighbor in ext_graph.get_out_neighbors(node):
edge = node, neighbor
expr.append((1.0, self.var_edge_flow[req][edge]))
for neighbor in ext_graph.get_in_neighbors(node):
edge = neighbor, node
expr.append((-1.0, self.var_edge_flow[req][edge]))
expr = LinExpr(expr)
constr_name = modelcreator.construct_name("flow_preservation",
other=node)
self.model.addConstr(expr, GRB.EQUAL, 0.0, name=constr_name)
# print "Flow Preservation ({}):".format(node)[:37].ljust(40), expr, "= 0"
def _add_constraint_cycle_flow_induction_at_sink(self, req):
ext_graph = self.extended_graphs[req]
for cycle in ext_graph.ecg_cycles:
valid_cycle_end_nodes = self.substrate.get_valid_nodes(
req.get_type(cycle.end_node), req.get_node_demand(cycle.end_node)
)
for w in req.get_allowed_nodes(cycle.end_node):
if w not in valid_cycle_end_nodes:
continue
ij = cycle.original_branches[0][-1] # select the last edge of the first branch of the cycle
if ij[1] != cycle.end_node:
raise CactusDecompositionError("Sanity check")
cycle_sink = ext_graph.sink_nodes[cycle.end_node][w]
ext_ij = (ext_graph.cycle_layer_nodes[ij][w][w], cycle_sink)
expr = LinExpr([
(1.0, self.var_node_flow[req][cycle.end_node][w]),
(-1.0, self.var_edge_flow[req][ext_ij])
])
constr_name = modelcreator.construct_name("cycle_flow_induction_source", req_name=req.name, other="{}->{}".format(cycle.start_node, cycle.end_node))
# print "Cycle flow induction ({}):".format(cycle_sink).ljust(40), expr, "= 0"
self.model.addConstr(expr, GRB.EQUAL, 0.0, name=constr_name)
def _add_constraint_path_flow_induction_at_sink(self, req):
ext_graph = self.extended_graphs[req]
for path in ext_graph.ecg_paths:
valid_end_nodes = self.substrate.get_valid_nodes(
req.get_type(path.end_node), req.get_node_demand(path.end_node)
)
for w in req.get_allowed_nodes(path.end_node):
if w not in valid_end_nodes:
continue
path_sink = ext_graph.sink_nodes[path.end_node][w]
ij = path.original_path[-1] # select the first edge of the first branch of the cycle
ext_ij = (ext_graph.path_layer_nodes[ij][w], path_sink)
expr = LinExpr([
(1.0, self.var_edge_flow[req][ext_ij]),
(-1.0, self.var_node_flow[req][path.end_node][w])
])
constr_name = modelcreator.construct_name("path_flow_induction_sink",
req_name=req.name,
vedge="{}->{}".format(ij[0], ij[1]),
snode="s={}".format(w))
# print "Path flow induction ({}):".format(path_sink).ljust(40), expr, "= 0"
self.model.addConstr(expr, GRB.EQUAL, 0.0, name=constr_name)
def _add_constraint_branching_flow_induction(self, req):
ext_graph | |
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
removed_image_urls: list = None,
product_projection: "ProductProjection" = None,
scope: "ProductPublishScope" = None
) -> None:
self.removed_image_urls = removed_image_urls
self.product_projection = product_projection
self.scope = scope
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductPublished",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductPublishedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, removed_image_urls=%r, product_projection=%r, scope=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.removed_image_urls,
self.product_projection,
self.scope,
)
)
class ProductPublishedMessagePayload(MessagePayload):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductPublishedMessagePayloadSchema`."
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
#: :class:`commercetools.types.ProductProjection` `(Named` ``productProjection`` `in Commercetools)`
product_projection: "ProductProjection"
#: :class:`commercetools.types.ProductPublishScope`
scope: "ProductPublishScope"
def __init__(
self,
*,
type: str = None,
removed_image_urls: list = None,
product_projection: "ProductProjection" = None,
scope: "ProductPublishScope" = None
) -> None:
self.removed_image_urls = removed_image_urls
self.product_projection = product_projection
self.scope = scope
super().__init__(type="ProductPublished")
def __repr__(self) -> str:
return (
"ProductPublishedMessagePayload(type=%r, removed_image_urls=%r, product_projection=%r, scope=%r)"
% (self.type, self.removed_image_urls, self.product_projection, self.scope)
)
class ProductRemovedFromCategoryMessage(Message):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductRemovedFromCategoryMessageSchema`."
#: :class:`commercetools.types.CategoryReference`
category: "CategoryReference"
#: :class:`bool`
staged: bool
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
category: "CategoryReference" = None,
staged: bool = None
) -> None:
self.category = category
self.staged = staged
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductRemovedFromCategory",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductRemovedFromCategoryMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, category=%r, staged=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.category,
self.staged,
)
)
class ProductRemovedFromCategoryMessagePayload(MessagePayload):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductRemovedFromCategoryMessagePayloadSchema`."
#: :class:`commercetools.types.CategoryReference`
category: "CategoryReference"
#: :class:`bool`
staged: bool
def __init__(
self,
*,
type: str = None,
category: "CategoryReference" = None,
staged: bool = None
) -> None:
self.category = category
self.staged = staged
super().__init__(type="ProductRemovedFromCategory")
def __repr__(self) -> str:
return (
"ProductRemovedFromCategoryMessagePayload(type=%r, category=%r, staged=%r)"
% (self.type, self.category, self.staged)
)
class ProductRevertedStagedChangesMessage(Message):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductRevertedStagedChangesMessageSchema`."
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
removed_image_urls: list = None
) -> None:
self.removed_image_urls = removed_image_urls
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductRevertedStagedChanges",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductRevertedStagedChangesMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, removed_image_urls=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.removed_image_urls,
)
)
class ProductRevertedStagedChangesMessagePayload(MessagePayload):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductRevertedStagedChangesMessagePayloadSchema`."
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
def __init__(self, *, type: str = None, removed_image_urls: list = None) -> None:
self.removed_image_urls = removed_image_urls
super().__init__(type="ProductRevertedStagedChanges")
def __repr__(self) -> str:
return (
"ProductRevertedStagedChangesMessagePayload(type=%r, removed_image_urls=%r)"
% (self.type, self.removed_image_urls)
)
class ProductSlugChangedMessage(Message):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductSlugChangedMessageSchema`."
#: :class:`commercetools.types.LocalizedString`
slug: "LocalizedString"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
slug: "LocalizedString" = None
) -> None:
self.slug = slug
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductSlugChanged",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductSlugChangedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, slug=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.slug,
)
)
class ProductSlugChangedMessagePayload(MessagePayload):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductSlugChangedMessagePayloadSchema`."
#: :class:`commercetools.types.LocalizedString`
slug: "LocalizedString"
def __init__(self, *, type: str = None, slug: "LocalizedString" = None) -> None:
self.slug = slug
super().__init__(type="ProductSlugChanged")
def __repr__(self) -> str:
return "ProductSlugChangedMessagePayload(type=%r, slug=%r)" % (
self.type,
self.slug,
)
class ProductStateTransitionMessage(Message):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductStateTransitionMessageSchema`."
#: :class:`commercetools.types.StateReference`
state: "StateReference"
#: :class:`bool`
force: bool
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
state: "StateReference" = None,
force: bool = None
) -> None:
self.state = state
self.force = force
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductStateTransition",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductStateTransitionMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, state=%r, force=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.state,
self.force,
)
)
class ProductStateTransitionMessagePayload(MessagePayload):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductStateTransitionMessagePayloadSchema`."
#: :class:`commercetools.types.StateReference`
state: "StateReference"
#: :class:`bool`
force: bool
def __init__(
self, *, type: str = None, state: "StateReference" = None, force: bool = None
) -> None:
self.state = state
self.force = force
super().__init__(type="ProductStateTransition")
def __repr__(self) -> str:
return "ProductStateTransitionMessagePayload(type=%r, state=%r, force=%r)" % (
self.type,
self.state,
self.force,
)
class ProductUnpublishedMessage(Message):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductUnpublishedMessageSchema`."
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None
) -> None:
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductUnpublished",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductUnpublishedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
)
)
class ProductUnpublishedMessagePayload(MessagePayload):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductUnpublishedMessagePayloadSchema`."
def __init__(self, *, type: str = None) -> None:
super().__init__(type="ProductUnpublished")
def __repr__(self) -> str:
return "ProductUnpublishedMessagePayload(type=%r)" % (self.type,)
class ProductVariantDeletedMessage(Message):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductVariantDeletedMessageSchema`."
#: :class:`commercetools.types.ProductVariant`
variant: "ProductVariant"
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
variant: "ProductVariant" = None,
removed_image_urls: list = None
) -> None:
self.variant = variant
self.removed_image_urls = removed_image_urls
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductVariantDeleted",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductVariantDeletedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, variant=%r, removed_image_urls=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.variant,
self.removed_image_urls,
)
)
class ProductVariantDeletedMessagePayload(MessagePayload):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ProductVariantDeletedMessagePayloadSchema`."
#: :class:`commercetools.types.ProductVariant`
variant: "ProductVariant"
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
def __init__(
self,
*,
type: str = None,
variant: "ProductVariant" = None,
removed_image_urls: list = None
) -> None:
self.variant = variant
self.removed_image_urls = removed_image_urls
super().__init__(type="ProductVariantDeleted")
def __repr__(self) -> str:
return (
"ProductVariantDeletedMessagePayload(type=%r, variant=%r, removed_image_urls=%r)"
% (self.type, self.variant, self.removed_image_urls)
)
class ReviewCreatedMessage(Message):
"Corresponding marshmallow schema is :class:`commercetools.schemas.ReviewCreatedMessageSchema`."
#: :class:`commercetools.types.Review`
review: "Review"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
review: "Review" = None
) -> None:
self.review = review
super().__init__(
id=id,
version=version,
created_at=created_at,
| |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from ovsdbapp import api
@six.add_metaclass(abc.ABCMeta)
class API(api.API):
@abc.abstractmethod
def create_lswitch(self, name, may_exist=True, **columns):
"""Create a command to add an OVN lswitch
:param name: The id of the lswitch
:type name: string
:param may_exist: Do not fail if lswitch already exists
:type may_exist: bool
:param columns: Dictionary of lswitch columns
Supported columns: external_ids
:type columns: dictionary
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def set_lswitch_ext_id(self, name, ext_id, if_exists=True):
"""Create a command to set OVN lswitch external id
:param name: The name of the lswitch
:type name: string
:param ext_id: The external id to set for the lswitch
:type ext_id: pair of <ext_id_key ,ext_id_value>
:param if_exists: Do not fail if lswitch does not exist
:type if_exists: bool
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def delete_lswitch(self, name=None, ext_id=None, if_exists=True):
"""Create a command to delete an OVN lswitch
:param name: The name of the lswitch
:type name: string
:param ext_id: The external id of the lswitch
:type ext_id: pair of <ext_id_key ,ext_id_value>
:param if_exists: Do not fail if the lswitch does not exist
:type if_exists: bool
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def create_lswitch_port(self, lport_name, lswitch_name, may_exist=True,
**columns):
"""Create a command to add an OVN logical switch port
:param lport_name: The name of the lport
:type lport_name: string
:param lswitch_name: The name of the lswitch the lport is created on
:type lswitch_name: string
:param may_exist: Do not fail if lport already exists
:type may_exist: bool
:param columns: Dictionary of port columns
Supported columns: macs, external_ids,
parent_name, tag, enabled
:type columns: dictionary
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def set_lswitch_port(self, lport_name, if_exists=True, **columns):
"""Create a command to set OVN logical switch port fields
:param lport_name: The name of the lport
:type lport_name: string
:param columns: Dictionary of port columns
Supported columns: macs, external_ids,
parent_name, tag, enabled
:param if_exists: Do not fail if lport does not exist
:type if_exists: bool
:type columns: dictionary
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def delete_lswitch_port(self, lport_name=None, lswitch_name=None,
ext_id=None, if_exists=True):
"""Create a command to delete an OVN logical switch port
:param lport_name: The name of the lport
:type lport_name: string
:param lswitch_name: The name of the lswitch
:type lswitch_name: string
:param ext_id: The external id of the lport
:type ext_id: pair of <ext_id_key ,ext_id_value>
:param if_exists: Do not fail if the lport does not exist
:type if_exists: bool
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def create_lrouter(self, name, may_exist=True, **columns):
"""Create a command to add an OVN lrouter
:param name: The id of the lrouter
:type name: string
:param may_exist: Do not fail if lrouter already exists
:type may_exist: bool
:param columns: Dictionary of lrouter columns
Supported columns: external_ids, default_gw, ip
:type columns: dictionary
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def update_lrouter(self, name, if_exists=True, **columns):
"""Update a command to add an OVN lrouter
:param name: The id of the lrouter
:type name: string
:param if_exists: Do not fail if the lrouter does not exist
:type if_exists: bool
:param columns: Dictionary of lrouter columns
Supported columns: external_ids, default_gw, ip
:type columns: dictionary
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def delete_lrouter(self, name, if_exists=True):
"""Create a command to delete an OVN lrouter
:param name: The id of the lrouter
:type name: string
:param if_exists: Do not fail if the lrouter does not exist
:type if_exists: bool
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def add_lrouter_port(self, name, lrouter, if_exists=True,
**columns):
"""Create a command to add an OVN lrouter port
:param name: The unique name of the lrouter port
:type name: string
:param lrouter: The unique name of the lrouter
:type lrouter: string
:param lswitch: The unique name of the lswitch
:type lswitch: string
:param if_exists: Do not fail if lrouter port already exists
:type if_exists: bool
:param columns: Dictionary of lrouter columns
Supported columns: external_ids, mac, network
:type columns: dictionary
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def update_lrouter_port(self, name, if_exists=True, **columns):
"""Update a command to add an OVN lrouter port
:param name: The unique name of the lrouter port
:type name: string
:param if_exists: Do not fail if the lrouter port does not exist
:type if_exists: bool
:param columns: Dictionary of lrouter columns
Supported columns: networks
:type columns: dictionary
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def delete_lrouter_port(self, name, lrouter, if_exists=True):
"""Create a command to delete an OVN lrouter port
:param name: The unique name of the lport
:type name: string
:param lrouter: The unique name of the lrouter
:type lrouter: string
:param if_exists: Do not fail if the lrouter port does not exist
:type if_exists: bool
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def set_lrouter_port_in_lswitch_port(self, lswitch_port, lrouter_port):
"""Create a command to set lswitch_port as lrouter_port
:param lswitch_port: The name of logical switch port
:type lswitch_port: string
:param lrouter_port: The name of logical router port
:type lrouter_port: string
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def add_acl(self, lswitch, lport, **columns):
"""Create an ACL for a logical port.
:param lswitch: The logical switch the port is attached to.
:type lswitch: string
:param lport: The logical port this ACL is associated with.
:type lport: string
:param columns: Dictionary of ACL columns
Supported columns: see ACL table in OVN_Northbound
:type columns: dictionary
"""
@abc.abstractmethod
def delete_acl(self, lswitch, lport, if_exists=True):
"""Delete all ACLs for a logical port.
:param lswitch: The logical switch the port is attached to.
:type lswitch: string
:param lport: The logical port this ACL is associated with.
:type lport: string
:param if_exists: Do not fail if the ACL for this lport does not
exist
:type if_exists: bool
"""
@abc.abstractmethod
def update_acls(self, lswitch_names, port_list, acl_new_values_dict,
need_compare=True, is_add_acl=True):
"""Update the list of acls on logical switches with new values.
:param lswitch_names: List of logical switch names
:type lswitch_name: []
:param port_list: Iterator of list of ports
:type port_list: []
:param acl_new_values_dict: Dictionary of acls indexed by port id
:type acl_new_values_dict: {}
:param need_compare: If acl_new_values_dict need compare
with existing acls
:type need_compare: bool
:is_add_acl: If updating is caused by adding acl
:type is_add_acl: bool
"""
@abc.abstractmethod
def add_static_route(self, lrouter, **columns):
"""Add static route to logical router.
:param lrouter: The unique name of the lrouter
:type lrouter: string
:param columns: Dictionary of static columns
Supported columns: prefix, nexthop, valid
:type columns: dictionary
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def delete_static_route(self, lrouter, ip_prefix, nexthop, if_exists=True):
"""Delete static route from logical router.
:param lrouter: The unique name of the lrouter
:type lrouter: string
:param ip_prefix: The prefix of the static route
:type ip_prefix: string
:param nexthop: The nexthop of the static route
:type nexthop: string
:param if_exists: Do not fail if router does not exist
:type if_exists: bool
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def create_address_set(self, name, may_exist=True, **columns):
"""Create an address set
:param name: The name of the address set
:type name: string
:param may_exist: Do not fail if address set already exists
:type may_exist: bool
:param columns: Dictionary of address set columns
Supported columns: external_ids, addresses
:type columns: dictionary
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def delete_address_set(self, name, if_exists=True):
"""Delete an address set
:param name: The name of the address set
:type name: string
:param if_exists: Do not fail if the address set does not exist
:type if_exists: bool
:returns: :class:`Command` with no result
"""
@abc.abstractmethod
def update_address_set(self, name, addrs_add, addrs_remove,
if_exists=True):
"""Updates addresses in an address set
:param name: The name of the address set
:type name: string
:param addrs_add: | |
from pyjamas_core import Supermodel
from pyjamas_core.util import Input, Output, Property
from datetime import datetime, timedelta
from Models._utils.time import datetime2utc_time, utc_time2datetime
import numpy as np
from pytz import timezone
import json
from scipy.interpolate import griddata
import pandas as pd
import os
# define the model class and inherit from class "Supermodel"
class Model(Supermodel):
# model constructor
def __init__(self, id, name: str):
# instantiate supermodel
super(Model, self).__init__(id, name)
# define inputs
self.inputs['mode'] = Input(name='modus', unit='-', info="modus (is live of not")
self.inputs['KW'] = Input(name='KW info', unit='-', info="KW informations (u.a. id, lat, lon)")
self.inputs['date'] = Input(name='Futures', unit='s', info="Time vector of futures in utc timestamp [s]")
# define outputs
self.outputs['KW_weather'] = Output(name='weather data of KWs', unit='date, °C, m/s, W/m^2', info='weather data of KWs')
self.outputs['Futures_weather'] = Output(name='weather data', unit='date, °C, m/s, W/m^2', info='(future) weather data (temperature, wind speed, radiation)')
# define properties
self.properties['T_offset'] = Property(default=0., data_type=float, name='temperature offset', unit='%', info="offset of temperature in %")
self.properties['u_offset'] = Property(default=0., data_type=float, name='wind speed offset', unit='%', info="offset of wind speed in %")
self.properties['P_offset'] = Property(default=0., data_type=float, name='radiation offset', unit='%', info="offset of radiation in %")
self.properties['ref_year'] = Property(default=2007, data_type=int, name='reference year', unit='-', info="reference year for modeled weather")
# define persistent variables
self.data_hist = None
self.data_hist_year = None
self.ref_year = None
async def func_birth(self):
# read historic data from file
self.data_hist = self.historic_data_read()
async def func_amend(self, keys=[]):
# if the refence year changes, select new historic reference data based on ref_year
if 'ref_year' in keys:
self.ref_year = self.get_property('ref_year')
self.data_hist_year = self.historic_select_year()
async def func_peri(self, prep_to_peri=None):
# get inputs
islife = await self.get_input('mode')
KW_data_orig = await self.get_input('KW')
KW_data = {k: KW_data_orig[k] for k in ('id', 'kw_bezeichnung', 'latitude', 'longitude')}
futures = await self.get_input('date')
# prepare weather data, dependent on modus live or not
islive = False
if islive:
# live: take current weather data forecast by API
weather_data = self.prepare_API_weather()
else:
# not live: take historic weather data from a reference year
weather_data = self.prepare_historic_weather(futures)
# KW weather
# interpolate weather data in times and locations for the different KW's
KW_weather_data = self.KW_weather_data(KW_data, weather_data, futures)
# futures weather
# editing weather data for further use (e.g. power demand model)
futures_weather_data = weather_data.tolist() #self.future_weather_data(futures, weather_data)
# set output
self.set_output("KW_weather", KW_weather_data)
self.set_output("Futures_weather", futures_weather_data)
@staticmethod
def historic_data_read():
# read historic weather data
path = os.path.abspath(__file__)
dir_path = os.path.dirname(path)
filename = os.path.join(dir_path, 'confidential', 'dict_hist')
with open(filename, 'r') as f:
data_hist = json.load(f)
return data_hist
def historic_select_year(self):
# copy all historical data to new dict
data = dict((k, v) for k, v in self.data_hist.items())
# define start and date of the reference year
start_date = datetime(self.ref_year, 1, 1, 0, 0)
start_date = datetime2utc_time(start_date)
end_date = datetime(self.ref_year+1, 1, 1, 0, 0)
end_date = datetime2utc_time(end_date)
# extract time, temperature, wind speed and radiation from dict
time = np.array(data["times"])
time = time[np.newaxis, :]
temp = np.array(data["temperature"]["values"])
wind = np.array(data["windspeed"]["values"])
rad = np.array(data["radiation"]["values"])
# create numpy array of time, temperature, wind speed and radiation
# and select the ones within the reference year
matrix = np.append(time, temp, axis=0)
matrix = np.append(matrix, wind, axis=0)
matrix = np.append(matrix, rad, axis=0)
matrix = matrix.transpose()
matrix = matrix[(matrix[:, 0] >= start_date) & (matrix[:, 0] <= end_date)]
matrix = matrix.transpose()
# write selected data back to dict
data["times"] = matrix[0, :].tolist()
data["temperature"]["values"] = matrix[1:26, :].tolist()
data["windspeed"]["values"] = matrix[26:51, :].tolist()
data["radiation"]["values"] = matrix[51:76, :].tolist()
return data
def prepare_API_weather(self):
# not working so far
path = os.path.abspath(__file__)
dir_path = os.path.dirname(path)
filename = os.path.join(dir_path, 'confidential', 'API_Key')
with open(filename, "r") as f:
API_key = f.readline()
url_ad = API_key
#weather_API_read = requests.get(url_ad).json()
#with open('confidential/dict_API', 'w') as fp:
# json.dump(weather_API_read, fp)
def prepare_historic_weather(self, futures):
# set futures back to ref_year
futures_shifted = self.dates_shift(futures)
# filter historic weather data
# - around the shifted futures
data_filtered = self.data_filter(futures_shifted)
# create data base of historic weather data
# - formatting filtered data from dict to numpy array
data_base = self.create_database(data_filtered)
# forecast weather data (shift of historic weather data)
# not implemented so far
#forecast_data = self.datahist_shift(data_base, futures[0])
return data_base
def dates_shift(self, dates):
# shift dates (futures) back to reference year
dates = [utc_time2datetime(x) for x in dates]
date_1 = dates[0]
date_1_ref = date_1.replace(year=self.ref_year)
date_shift = date_1-date_1_ref
dates_shifted = [x-date_shift for x in dates]
year_1 = dates_shifted[0].year
dates_shifted = [x.replace(year=year_1) for x in dates_shifted]
dates_shifted = [datetime2utc_time(x) for x in dates_shifted]
return dates_shifted
def data_filter(self, dates):
# create dict copy of historical reference year weather data
data = {k: v for k, v in self.data_hist_year.items()}
# extract the futures
ref_times = data['times']
ref_times = np.array(ref_times)
# first and last date of futures
date_first = dates[0]
date_last = dates[len(dates)-1]
# extract limit reference times around futures
# - last reference time before first future
# - first reference time after last future
date_before_first = np.max(ref_times[ref_times <= date_first])
date_after_last = np.min(ref_times[ref_times >= date_last])
# extract futures, temperature, wind speed and radiation from data dict
time = np.array(data["times"])
time = time[np.newaxis, :]
temp = np.array(data["temperature"]["values"])
wind = np.array(data["windspeed"]["values"])
rad = np.array(data["radiation"]["values"])
# create data matrix with futures, temperature, wind speed and radiation
matrix = np.append(time, temp, axis=0)
matrix = np.append(matrix, wind, axis=0)
matrix = np.append(matrix, rad, axis=0)
matrix = matrix.transpose()
# filter reference weather data within limit reference times
# - all futures within a year
if date_first < date_last:
matrix = matrix[(matrix[:, 0] >= date_before_first) & (matrix[:, 0] <= date_after_last)]
# - futures with turn of the year
else:
matrix = matrix[(matrix[:, 0] <= date_after_last) | (matrix[:, 0] >= date_before_first)]
matrix = matrix.transpose()
# update dict
data2 = {"ids": data["ids"], "lat": data["lat"], "lon": data["lon"], "asl": data["asl"],
"times": matrix[0, :].tolist(),
"temperature": {'height': data["temperature"]['height'],
'unit': data["temperature"]['unit'],
"values": matrix[1:26, :].tolist()},
"windspeed": {'height': data["windspeed"]['height'],
'unit': data["windspeed"]['unit'],
"values": matrix[26:51, :].tolist()},
"radiation": {'height': data["radiation"]['height'],
'unit': data["radiation"]['unit'],
"values": matrix[51:76, :].tolist()}
}
return data2
def create_database(self, data_filtered):
# extract number of locations (lat/lon) and number of futures
num_points = data_filtered["lat"].__len__()
num_times = data_filtered["times"].__len__()
# initialize latitude, longitude, time, temperature, wind speed and radiation vectors
# and fill them by extraction of dict
lat_vec = []
lon_vec = []
time_vec = np.tile(np.array(data_filtered["times"]), num_points)
temp_vec = []
wind_vec = []
rad_vec = []
for it in range(0, num_points):
lat_vec.append(np.repeat(data_filtered["lat"][it], num_times))
lon_vec.append(np.repeat(data_filtered["lon"][it], num_times))
temp_vec.append(data_filtered["temperature"]["values"][it])
wind_vec.append(data_filtered["windspeed"]["values"][it])
rad_vec.append(data_filtered["radiation"]["values"][it])
# change format to array and transposing
lat_vec = np.array([lat_vec]).ravel()
lon_vec = np.array(lon_vec).ravel()
time_vec = np.array(time_vec).ravel()
temp_vec = np.array(temp_vec).ravel()
wind_vec = np.array(wind_vec).ravel()
rad_vec = np.array(rad_vec).ravel()
lat_vec = lat_vec[np.newaxis, :].transpose()
lon_vec = lon_vec[np.newaxis, :].transpose()
time_vec = time_vec[np.newaxis, :].transpose()
temp_vec = temp_vec[np.newaxis, :].transpose()
wind_vec = wind_vec[np.newaxis, :].transpose()
rad_vec = rad_vec[np.newaxis, :].transpose()
# offset for temperature, wind speed and radiation
temp_vec = np.multiply(temp_vec, (1 + self.get_property('T_offset') / 100))
wind_vec = np.multiply(wind_vec, (1 + self.get_property('u_offset') / 100))
rad_vec = np.multiply(rad_vec, (1 + self.get_property('P_offset') / 100))
# create matrix
data_base = np.concatenate((lat_vec, lon_vec, time_vec, temp_vec, wind_vec, rad_vec), axis=1)
return data_base
def KW_weather_data(self, KW_data, weather_data, futures):
# naming of columns
# - of KW_data (ones to be extracted)
KW_data_columns = ['id', 'kw_bezeichnung', 'latitude', 'longitude']
# shift futures back (to agree with historic data
futures = self.dates_shift(futures)
# create data frame from KW_data dict
KW_data_df = pd.DataFrame(KW_data)
# select only photovoltaic and wind turbine data
PV_data = KW_data_df.loc[KW_data_df[KW_data_columns[1]] == 'Photovoltaik']
WT_data = KW_data_df.loc[KW_data_df[KW_data_columns[1]] == 'Windturbine']
# create data frame from weather data base (array)
weather_df = pd.DataFrame(data=weather_data, columns=['lat', 'lon', 'time', 'temperature', 'windspeed', 'radiation'])
# select relevant columns for photovoltaics and wind turbines
weather_PV = weather_df[['lat', 'lon', 'time', 'radiation']]
weather_WT = weather_df[['lat', 'lon', 'time', 'windspeed']]
# 2D interpolation over KW locations (latitude/longitude)
time_vec = weather_df['time'].unique()
PV_weather_2D = pd.DataFrame()
WT_weather_2D = pd.DataFrame()
PV_weather_2D_ft_df = PV_data.copy()
PV_weather_2D_ft_df['radiation'] = [None] * PV_weather_2D_ft_df['id'].__len__()
PV_weather_2D_ft_df['time'] = [None] * PV_weather_2D_ft_df['id'].__len__()
WT_weather_2D_ft_df = WT_data.copy()
WT_weather_2D_ft_df['windspeed'] = [None] * WT_weather_2D_ft_df['id'].__len__()
WT_weather_2D_ft_df['time'] = [None] * WT_weather_2D_ft_df['id'].__len__()
for tt in time_vec:
weather_PV_tt = weather_PV.loc[weather_PV['time'] == tt]
weather_WT_tt = weather_WT.loc[weather_WT['time'] == tt]
PV_weather_2D_ft = self.interpol_2d(list(weather_PV_tt['lat']), list(weather_PV_tt['lon']), list(weather_PV_tt['radiation']),
list(PV_data[KW_data_columns[2]]), list(PV_data[KW_data_columns[3]]))
WT_weather_2D_ft = self.interpol_2d(list(weather_WT_tt['lat']), list(weather_WT_tt['lon']), list(weather_WT_tt['windspeed']),
list(WT_data[KW_data_columns[2]]), list(WT_data[KW_data_columns[3]]))
PV_weather_2D_ft_df['radiation'] = PV_weather_2D_ft.tolist()
PV_weather_2D_ft_df['time'] = [tt] * PV_weather_2D_ft_df['id'].__len__()
WT_weather_2D_ft_df['windspeed'] = WT_weather_2D_ft.tolist()
WT_weather_2D_ft_df['time'] = [tt] * WT_weather_2D_ft_df['id'].__len__()
PV_weather_2D = PV_weather_2D.append(PV_weather_2D_ft_df)
WT_weather_2D = WT_weather_2D.append(WT_weather_2D_ft_df)
# 1D interpolation over time | |
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
''' unit test template for ONTAP Ansible module '''
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import pytest
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
from ansible_collections.netapp.ontap.tests.unit.compat import unittest
from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock, call
import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
from ansible_collections.netapp.ontap.plugins.modules.na_ontap_svm \
import NetAppOntapSVM as svm_module # module under test
if not netapp_utils.has_netapp_lib():
pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
# REST API canned responses when mocking send_request
SRR = {
# common responses
'is_rest': (200, dict(version=dict(generation=9, major=9, minor=1, full='dummy_9_9_1')), None),
'is_rest_96': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy_9_6_0')), None),
'is_zapi': (400, {}, "Unreachable"),
'empty_good': (200, {'num_records': 0}, None),
'end_of_sequence': (500, None, "Unexpected call to send_request"),
'generic_error': (400, None, "Expected error"),
# module specific responses
'svm_record': (200,
{'records': [{"uuid": "09e9fd5e-8ebd-11e9-b162-005056b39fe7",
"name": "test_svm",
"subtype": "default",
"language": "c.utf_8",
"aggregates": [{"name": "aggr_1",
"uuid": "850dd65b-8811-4611-ac8c-6f6240475ff9"},
{"name": "aggr_2",
"uuid": "850dd65b-8811-4611-ac8c-6f6240475ff9"}],
"comment": "new comment",
"ipspace": {"name": "ansible_ipspace",
"uuid": "2b760d31-8dfd-11e9-b162-005056b39fe7"},
"snapshot_policy": {"uuid": "3b611707-8dfd-11e9-b162-005056b39fe7",
"name": "old_snapshot_policy"},
"nfs": {"enabled": True, "allowed": True},
"cifs": {"enabled": False},
"iscsi": {"enabled": False},
"fcp": {"enabled": False},
"nvme": {"enabled": False}}]}, None),
'svm_record_ap': (200,
{'records': [{"name": "test_svm",
"aggregates": [{"name": "aggr_1",
"uuid": "850dd65b-8811-4611-ac8c-6f6240475ff9"},
{"name": "aggr_2",
"uuid": "850dd65b-8811-4611-ac8c-6f6240475ff9"}],
"ipspace": {"name": "ansible_ipspace",
"uuid": "2b760d31-8dfd-11e9-b162-005056b39fe7"},
"snapshot_policy": {"uuid": "3b611707-8dfd-11e9-b162-005056b39fe7",
"name": "old_snapshot_policy"},
"nfs": {"enabled": False},
"cifs": {"enabled": True, "allowed": True},
"iscsi": {"enabled": True, "allowed": True},
"fcp": {"enabled": False},
"nvme": {"enabled": False}}]}, None),
'cli_record': (200,
{'records': [{"max_volumes": 100, "allowed_protocols": ['nfs', 'iscsi']}]}, None)
}
def set_module_args(args):
"""prepare arguments so that they will be picked up during module creation"""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
def exit_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
class MockONTAPConnection(object):
''' mock server connection to ONTAP host '''
def __init__(self, kind=None, data=None):
''' save arguments '''
self.type = kind
self.params = data
self.xml_in = None
self.xml_out = None
def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
''' mock invoke_successfully returning xml data '''
self.xml_in = xml
if self.type == 'vserver':
xml = self.build_vserver_info(self.params)
self.xml_out = xml
return xml
@staticmethod
def build_vserver_info(vserver):
''' build xml data for vserser-info '''
xml = netapp_utils.zapi.NaElement('xml')
data = {'num-records': 1, 'attributes-list': {'vserver-info': {
'vserver-name': vserver['name'],
'ipspace': vserver['ipspace'],
'root-volume': vserver['root_volume'],
'root-volume-aggregate': vserver['root_volume_aggregate'],
'language': vserver['language'],
'comment': vserver['comment'],
'snapshot-policy': vserver['snapshot_policy'],
'vserver-subtype': vserver['subtype'],
'allowed-protocols': [{'protocol': 'nfs'}, {'protocol': 'cifs'}],
'aggr-list': [{'aggr-name': 'aggr_1'}, {'aggr-name': 'aggr_2'}],
}}}
xml.translate_struct(data)
return xml
class TestMyModule(unittest.TestCase):
''' a group of related Unit Tests '''
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
self.server = MockONTAPConnection()
self.mock_vserver = {
'name': 'test_svm',
'root_volume': 'ansible_vol',
'root_volume_aggregate': 'ansible_aggr',
'aggr_list': 'aggr_1,aggr_2',
'ipspace': 'ansible_ipspace',
'subtype': 'default',
'language': 'c.utf_8',
'snapshot_policy': 'old_snapshot_policy',
'comment': 'new comment'
}
def mock_args(self, rest=False):
if rest:
return {'name': self.mock_vserver['name'],
'aggr_list': self.mock_vserver['aggr_list'],
'ipspace': self.mock_vserver['ipspace'],
'comment': self.mock_vserver['comment'],
'subtype': 'default',
'hostname': 'test',
'username': 'test_user',
'password': '<PASSWORD>!'}
else:
return {
'name': self.mock_vserver['name'],
'root_volume': self.mock_vserver['root_volume'],
'root_volume_aggregate': self.mock_vserver['root_volume_aggregate'],
'aggr_list': self.mock_vserver['aggr_list'],
'ipspace': self.mock_vserver['ipspace'],
'comment': self.mock_vserver['comment'],
'subtype': 'default',
'hostname': 'test',
'username': 'test_user',
'password': '<PASSWORD>!',
'use_rest': 'never'
}
def get_vserver_mock_object(self, kind=None, data=None, cx_type='zapi'):
"""
Helper method to return an na_ontap_volume object
:param kind: passes this param to MockONTAPConnection()
:param data: passes this param to MockONTAPConnection()
:return: na_ontap_volume object
"""
vserver_obj = svm_module()
if cx_type == 'zapi':
vserver_obj.asup_log_for_cserver = Mock(return_value=None)
vserver_obj.cluster = Mock()
vserver_obj.cluster.invoke_successfully = Mock()
if kind is None:
vserver_obj.server = MockONTAPConnection()
elif data is None:
vserver_obj.server = MockONTAPConnection(kind='vserver', data=self.mock_vserver)
else:
vserver_obj.server = MockONTAPConnection(kind='vserver', data=data)
return vserver_obj
def test_module_fail_when_required_args_missing(self):
''' required arguments are reported as errors '''
with pytest.raises(AnsibleFailJson) as exc:
set_module_args({})
svm_module()
print('Info: %s' % exc.value.args[0]['msg'])
def test_get_nonexistent_vserver(self):
''' test if get_vserver() throws an error if vserver is not specified '''
data = self.mock_args()
set_module_args(data)
result = self.get_vserver_mock_object().get_vserver()
assert result is None
def test_create_error_missing_name(self):
''' Test if create throws an error if name is not specified'''
data = self.mock_args()
del data['name']
set_module_args(data)
with pytest.raises(AnsibleFailJson) as exc:
self.get_vserver_mock_object('vserver').create_vserver()
msg = 'missing required arguments: name'
assert exc.value.args[0]['msg'] == msg
@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_svm.NetAppOntapSVM.create_vserver')
def test_successful_create(self, create_vserver):
'''Test successful create'''
data = self.mock_args()
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object().apply()
assert exc.value.args[0]['changed']
create_vserver.assert_called_with()
def test_successful_create_zapi(self):
'''Test successful create'''
data = self.mock_args()
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object().apply()
assert exc.value.args[0]['changed']
@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_svm.NetAppOntapSVM.create_vserver')
def test_create_idempotency(self, create_vserver):
'''Test successful create'''
data = self.mock_args()
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object('vserver').apply()
assert not exc.value.args[0]['changed']
create_vserver.assert_not_called()
def test_successful_delete(self):
'''Test successful delete'''
self._modify_options_with_expected_change('state', 'absent')
@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_svm.NetAppOntapSVM.delete_vserver')
def test_delete_idempotency(self, delete_vserver):
'''Test delete idempotency'''
data = self.mock_args()
data['state'] = 'absent'
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object().apply()
assert not exc.value.args[0]['changed']
delete_vserver.assert_not_called()
@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_svm.NetAppOntapSVM.get_vserver')
def test_successful_rename(self, get_vserver):
'''Test successful rename'''
data = self.mock_args()
data['from_name'] = 'test_svm'
data['name'] = 'test_new_svm'
set_module_args(data)
current = {
'name': 'test_svm',
'root_volume': 'ansible_vol',
'root_volume_aggregate': 'ansible_aggr',
'ipspace': 'ansible_ipspace',
'subtype': 'default',
'language': 'c.utf_8'
}
get_vserver.side_effect = [
None,
current
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object().apply()
assert exc.value.args[0]['changed']
def test_successful_modify_language(self):
'''Test successful modify language'''
self._modify_options_with_expected_change('language', 'c')
def test_successful_modify_snapshot_policy(self):
'''Test successful modify language'''
self._modify_options_with_expected_change(
'snapshot_policy', 'new_snapshot_policy'
)
def test_successful_modify_allowed_protocols(self):
'''Test successful modify allowed protocols'''
self._modify_options_with_expected_change(
'allowed_protocols', 'nvme,fcp'
)
def test_successful_modify_aggr_list(self):
'''Test successful modify aggr-list'''
self._modify_options_with_expected_change(
'aggr_list', 'aggr_3,aggr_4'
)
def _modify_options_with_expected_change(self, arg0, arg1):
data = self.mock_args()
data[arg0] = arg1
set_module_args(data)
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object('vserver').apply()
assert exc.value.args[0]['changed']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_error(self, mock_request):
data = self.mock_args(rest=True)
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['generic_error'],
SRR['end_of_sequence']
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_vserver_mock_object(cx_type='rest').apply()
assert exc.value.args[0]['msg'] == 'calling: svm/svms: got %s.' % SRR['generic_error'][2]
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_error_unsupported_parm(self, mock_request):
data = self.mock_args(rest=True)
data['use_rest'] = 'Always'
data['root_volume'] = 'not_supported_by_rest'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['end_of_sequence']
]
with pytest.raises(AnsibleFailJson) as exc:
self.get_vserver_mock_object(cx_type='rest').apply()
assert exc.value.args[0]['msg'] == "REST API currently does not support 'root_volume'"
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_successfully_create(self, mock_request):
data = self.mock_args(rest=True)
data['state'] = 'present'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['empty_good'], # get
SRR['empty_good'], # post
SRR['end_of_sequence']
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object(cx_type='rest').apply()
assert exc.value.args[0]['changed']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_create_idempotency(self, mock_request):
data = self.mock_args(rest=True)
data['state'] = 'present'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['svm_record'], # get
SRR['end_of_sequence']
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object(cx_type='rest').apply()
assert not exc.value.args[0]['changed']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_successful_delete(self, mock_request):
'''Test successful delete'''
data = self.mock_args(rest=True)
data['state'] = 'absent'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['svm_record'], # get
SRR['empty_good'], # delete
SRR['end_of_sequence']
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object(cx_type='rest').apply()
assert exc.value.args[0]['changed']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_delete_idempotency(self, mock_request):
'''Test delete idempotency'''
data = self.mock_args(rest=True)
data['state'] = 'absent'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['empty_good'], # get
SRR['end_of_sequence']
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object(cx_type='rest').apply()
assert not exc.value.args[0]['changed']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_successful_rename(self, mock_request):
'''Test successful rename'''
data = self.mock_args(rest=True)
data['from_name'] = 'test_svm'
data['name'] = 'test_new_svm'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['svm_record'], # get
SRR['svm_record'], # get
SRR['empty_good'], # patch
SRR['end_of_sequence']
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object(cx_type='rest').apply()
assert exc.value.args[0]['changed']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_successful_modify_language(self, mock_request):
'''Test successful modify language'''
data = self.mock_args(rest=True)
data['language'] = 'c'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['svm_record'], # get
SRR['svm_record'], # get
SRR['empty_good'], # patch
SRR['end_of_sequence']
]
with pytest.raises(AnsibleExitJson) as exc:
self.get_vserver_mock_object(cx_type='rest').apply()
assert exc.value.args[0]['changed']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_successful_get(self, mock_request):
'''Test successful get'''
data = self.mock_args(rest=True)
data['language'] = 'c'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['svm_record'], # get
SRR['svm_record_ap'], # get AP
SRR['end_of_sequence']
]
na_ontap_svm_object = self.get_vserver_mock_object(cx_type='rest')
current = na_ontap_svm_object.get_vserver()
print(current)
assert current['services']['nfs']['allowed']
assert not current['services']['cifs']['enabled']
current = na_ontap_svm_object.get_vserver()
print(current)
assert not current['services']['nfs']['enabled']
assert current['services']['cifs']['allowed']
assert current['services']['iscsi']['allowed']
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_successfully_create_ignore_zapi_option(self, mock_request):
data = self.mock_args(rest=True)
data['state'] = 'present'
data['root_volume'] = 'whatever'
data['aggr_list'] = '*'
data['ignore_rest_unsupported_options'] = 'true'
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['empty_good'], # get
SRR['empty_good'], # post
SRR['end_of_sequence']
]
module = self.get_vserver_mock_object(cx_type='rest')
with pytest.raises(AnsibleExitJson) as exc:
module.apply()
assert exc.value.args[0]['changed']
assert module.use_rest
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_successfully_create_with_service(self, mock_request):
data = self.mock_args(rest=True)
data['state'] = 'present'
data['services'] = {'nfs': {'allowed': True, 'enabled': True}}
set_module_args(data)
mock_request.side_effect = [
SRR['is_rest'],
SRR['empty_good'], # get
SRR['empty_good'], # post
SRR['end_of_sequence']
]
module = self.get_vserver_mock_object(cx_type='rest')
with pytest.raises(AnsibleExitJson) as exc:
module.apply()
assert exc.value.args[0]['changed']
assert module.use_rest
@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
def test_rest_successfully_modify_with_service(self, mock_request):
data = self.mock_args(rest=True)
data['state'] = 'present'
data['services'] = {'nfs': {'allowed': True, 'enabled': True}, 'fcp': {'allowed': True, 'enabled': True}}
set_module_args(data)
mock_request.side_effect | |
True
else:
end_point = (x, y)
if prior_rect:
graph.delete_figure(prior_rect)
if upper_back:
graph.delete_figure(upper_back)
graph.delete_figure(lower_back)
if None not in (start_point, end_point):
ymin = min(start_point[1], end_point[1])
ymax = max(start_point[1], end_point[1])
width = ymax - ymin
if background:
upper_back = graph.draw_rectangle((0, ymax + width // 2),
(imx, ymax + width // 2 + width), line_color='green')
lower_back = graph.draw_rectangle((0, ymin - width // 2 - width),
(imx, ymin - width // 2), line_color='green')
prior_rect = graph.draw_rectangle((0, ymin),
(imx, ymax), line_color='red')
elif event is not None and event.endswith('+UP'):
# The drawing has ended because mouse up
y0 = int(0.5 * (start_point[1] + end_point[1]))
info = f"selected lines from {ymin} to {ymax}"
winselect["info"].update(value=info)
start_point, end_point = None, None # enable grabbing a new rect
dragging = False
restext += info + '\n'
window['-RESULT3-'].update(regtext + restext)
elif event == '-APPLY_TS-':
if ymax == 0:
sg.PopupError('select rows first', keep_on_top=True)
else:
try:
tilt = float(values['-TILT-'])
slant = float(values['-SLANT-'])
image = im
center = (image.shape[1] / 2, y0)
warp_args = {'center': center,
'dx': slant,
'dy': tilt}
imtilt = tf.warp(image, _slant_tilt_mapping, map_args=warp_args,
order=1, mode='constant', cval=0)
fits_dict['M_TILT'] = str(tilt)
fits_dict['M_SLANT'] = str(slant)
fits_dict['M_ROWMIN'] = str(ymin)
fits_dict['M_ROWMAX'] = str(ymax)
fits_dict['COMMENT'] = str(fits_dict['COMMENT']) # [:20] # shorten to max size
restext += f'tilt = {tilt:8.4f}, slant = {slant:7.3f}' + '\n'
window['-RESULT3-'].update(regtext + restext, autoscroll=True)
except Exception as e:
sg.PopupError(f'bad values for tilt or slant, try again\n{e}',
title='apply_tilt_slant', keep_on_top=True)
write_fits_image(imtilt, '_st.fit', fits_dict, dist=dist)
image_data, actual_file = draw_scaled_image('_st' + '.fit', window['-R_IMAGE-'],
opt_dict, contr=contr, tmp_image=True)
graph.draw_image(data=image_data, location=(0, imy))
# graph.draw_rectangle((0, ymin), (imx, ymax), line_color='red')
for figure in (prior_rect, upper_back, lower_back):
if figure:
graph.BringFigureToFront(figure)
graph.update()
elif event == 'Ok':
write_fits_image(imtilt, outfile + 'st.fit', fits_dict, dist=dist)
image_data, actual_file = draw_scaled_image(outfile + 'st.fit', window['-R_IMAGE-'],
opt_dict, contr=contr, tmp_image=True)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
im = imtilt / np.max(imtilt) * 255 * contr
im = np.clip(im, 0.0, 255)
ios.imsave(outfile + 'st.png', np.flipud(im.astype(np.uint8)))
logging.info(f' file {outfile}.fit loaded for addition of rows')
logging.info(f"start = {fits_dict['M_STARTI']}, nim = {fits_dict['M_NIM']}")
logging.info(f'added from {ymin} to {ymax}, {(ymax - ymin + 1)} rows')
logging.info(f'tilt = {tilt:8.4f}, slant = {slant:7.3f}')
if len(imtilt.shape) == 3:
imbw = np.sum(imtilt, axis=2)
else:
imbw = imtilt
row_sum = np.sum(imbw[ymin:ymax, :], axis=0) # Object spectrum extraction and flat
i = np.arange(0, np.size(row_sum), 1) # create pixels vector
np.savetxt(outfile + '.dat', np.transpose([i, row_sum]), fmt='%6i %8.5f')
# background correction for reference star images
if background:
row_sum -= 0.5*np.sum(imbw[ymax + width // 2:ymax + 3 * width // 2, :], axis=0)
row_sum -= 0.5*np.sum(imbw[ymin - 3 * width // 2:ymin - width // 2, :], axis=0)
np.savetxt(outfile + '_bg.dat', np.transpose([i, row_sum]), fmt='%6i %8.5f')
fits_dict.pop('M_TILT', None)
fits_dict.pop('M_SLANT', None)
fits_dict.pop('M_ROWMIN', None)
fits_dict.pop('M_ROWMAX', None)
winselect_active = False
(x, y) = winselect.current_location()
wlocw = (x, y)
if idg: graph.delete_figure(idg)
winselect.close()
window['-SAVE_RAW-'].update(disabled=False, button_color=bc_enabled)
window['-CAL_R-'].update(disabled=False, button_color=bc_enabled)
window['-RADD-'].update(outfile)
elif event in ('Cancel', None):
# save original image with 'st' added
if event == 'Cancel':
write_fits_image(im_ori, outfile + 'st.fit', fits_dict, dist=dist)
if idg: graph.delete_figure(idg)
winselect_active = False
winselect.close()
return event, tilt, slant, wlocw
# -------------------------------------------------------------------
def select_calibration_line(x0, w, lam, name, lcal, ical, graph, table, abs_sign=1):
"""
fits a parabola to peak of selected line, determines peak position, intensity and width
results are appended to table together with wavelength of selected line
:param x0: peak position in pixel
:param w:
:param lam: wavelength of selected calibration line
:param name: identifier for calibration line
:param lcal: pixel array
:param ical: intensity array
:param graph: displayed window
:param table: table with calibration results
:param abs_sign: sign for emission = + 1, absorption = -1
:return:
coeff[1]: peak position of parabolic fit
fwp: peak width (approximate)
caltext: updated info
"""
def parab(x, *p):
aa, mu, b = p
return aa * (1 - b * (x - mu) ** 2)
coeff = [-1, 0, 0]
fwp = 0
lmin = lcal[0]
ic = lcal
lc = ical
icleft = int(x0 - w) # index of left border
icright = int(x0 + w + 1)
if lmin not in (0.0, 1.0):
sg.PopupError('- raw files only, load uncalibrated file!.......', title='Wavelength calibration',
line_width=60)
else:
try:
lcr0 = abs_sign*lc[icleft:icright]
lmax0 = np.max(lcr0)
lmin0 = (lcr0[0] + lcr0[icright - icleft - 1]) / 2
for i in range(icright - icleft):
if (lcr0[i] - lmax0 + 1.e-5) > 0:
m = i
peak0 = icleft + m # index of max value, center for parabolic fit
icr = ic[peak0 - 2:peak0 + 3]
lcr = abs_sign*lc[peak0 - 2:peak0 + 3]
coeff[0] = lmax0
coeff[1] = peak0
coeff[2] = 1 / (w + 1)
coeff, var_matrix = optimize.curve_fit(parab, icr, lcr, p0=coeff)
# lcp_fit = parab(icr, *coeff) # function for display results
x0p = coeff[1]
fwp = np.sqrt(1.5 / abs(coeff[2]))
if abs_sign < 0: # Absorption line, correct peak height
fwp *= np.sqrt(abs((coeff[0]-lmin0) / coeff[0]))
# parabolic fit
if debug:
print(f'x0p ={x0p:8.2f} FWHMP={fwp:8.3f}')
points = []
l0: int
for l0 in range(peak0 - 2, peak0 + 3):
points.append((lcal[l0], abs_sign*parab(lcal[l0], *coeff)))
for l0 in range(1, 5):
graph.DrawLine(points[l0 - 1], points[l0], 'blue', 1)
table.append((coeff[1], lam))
info = f'{coeff[1]:8.2f} {fwp:6.2f} {lam:8.2f} {name}'
caltext = info + '\n'
logging.info(info)
except Exception as e:
sg.PopupError(f'No peak found, try again\n{e}', title='Select line')
return coeff[1], fwp, caltext
# -------------------------------------------------------------------
def create_line_list_combo(m_linelist, window, combo=True):
"""
shows values of table create_line_list_combo in Combobox
:param m_linelist: table with wavelength, line identifier (space separated)
:param window: Combobox for selecting wavelength
:param combo: if True: update Combo, else only create list
:return: label_str, lam_calib
"""
try:
lam_calib = []
label_str = []
i = -1
with open(change_extension(m_linelist, '.txt')) as f:
for x in f:
x = x.lstrip()
(l, name) = x.split(' ', 1)
i += 1
lam_calib.append(x)
label_str.append((float(l), name))
if abs(float(l)) < 0.1:
index0 = i # set default index for list
if combo:
window['-LAMBDA-'].update(values=lam_calib, set_to_index=index0)
except Exception as e:
sg.PopupError(f'error with calibration lines {m_linelist}.txt\n{e}', keep_on_top=True)
return label_str, lam_calib
# -------------------------------------------------------------------
def read_video_list(file):
"""
reads list of latest converted video files from table
:param file: table of video files
:return: list of video files
"""
video_list = []
if path.exists(file):
with open(file, 'r') as f:
for line in f:
video_list.append(line[:-1])
return video_list
# -------------------------------------------------------------------
def update_video_list(file, avifile):
"""
updates list of latest converted video files from table
:param file: filename of video file table, e.g. 'videolist.txt'
:param avifile: filename to be added to video file table
"""
video_list = read_video_list(file)
video_name, ext = path.splitext(path.basename(avifile))
# for UFO Capture videos, replace M by S:
if video_name[0] == 'M':
video_name = 'S' + video_name[1:]
for v in video_list:
if v in (video_name, ' '):
video_list.remove(v)
if len(video_list) >= video_list_length:
del video_list[-1:]
video_list.insert(0, video_name)
with open('videolist.txt', 'w') as f:
for v in video_list:
print(v, file=f)
# -------------------------------------------------------------------
def calibrate_raw_spectrum(rawspec, xcalib, lcalib, deg, c):
"""
calculates the fit for the calibration table with residuals
from the polynomial fit
and apply those to the pixels vector
:param rawspec: uncalibrated spectrum
:param xcalib: measured pixel positions
:param lcalib: calibration wavelengths
:param deg: degree of fit polynom
:param c: fit polynom
:return:
caldat: calibrated spectrum with extension .dat
cal2dat: calibrated spectrum with constant wavelength spacing with extension .dat
lmin, lmax: wavelength range of calibrated spectrum
caltext: calibration info
"""
np.set_printoptions(precision=4, suppress=False)
lcal, ical = np.loadtxt(rawspec, unpack=True, ndmin=2)
logging.info(f'polynom for fit lambda c: {c}')
i = np.arange(0, len(lcal), 1) # create pixels vector for uncalibrated image
lam = np.poly1d(c)(i)
res = np.poly1d(c)(xcalib) - lcalib
rms_x = np.sqrt(np.average(np.square(res)))
logging.info(' pixel lambda fit error')
caltext = ' Pixel lambda fit error\n'
for i in range(0, len(xcalib)):
logging.info(f'{xcalib[i]:10.2f},{lcalib[i]:10.2f},{(lcalib[i] + res[i]):10.2f}, {res[i]:10.4f}')
caltext += f'{xcalib[i]:9.2f} {lcalib[i]:9.2f} {(lcalib[i] + res[i]):9.2f} {res[i]:8.2f}\n'
logging.info(f'rms_x = {rms_x:8.4f}')
caldat = change_extension(rawspec, 'cal.dat')
np.savetxt(caldat, np.transpose([lam, ical]), fmt='%8.3f %8.5f')
logging.info(f'spectrum {caldat} saved')
caltext += f'polynom degree: {deg}\npolynom for fit lambda c: {c}\n'
caltext += f'rms_x = {rms_x:8.4f}\nspectrum {caldat} saved\n'
# for compatibility save *.dat with linear spacing
lmin = np.int(np.min(lam)) + 1
lmax = np.int(np.max(lam)) - 1
dell = abs(int(5 * c[deg - 1]) / 10)
# wavelength spacing of interpolated linear array, about double of original
llin = np.arange(lmin, lmax, | |
import collections
from itertools import dropwhile
import logging
from typing import Iterator, Optional
from .plugin import SimStatePlugin
from ..errors import AngrError, SimEmptyCallStackError
l = logging.getLogger(name=__name__)
class CallStack(SimStatePlugin):
"""
Stores the address of the function you're in and the value of SP
at the VERY BOTTOM of the stack, i.e. points to the return address.
"""
def __init__(self, call_site_addr=0, func_addr=0, stack_ptr=0, ret_addr=0, jumpkind='Ijk_Call', next_frame: Optional['CallStack'] = None,
invoke_return_variable=None):
super(CallStack, self).__init__()
self.state = None
self.call_site_addr = call_site_addr
self.func_addr = func_addr
self.stack_ptr = stack_ptr
self.ret_addr = ret_addr
self.jumpkind = jumpkind
self.next = next_frame
self.invoke_return_variable = invoke_return_variable
self.block_counter = collections.Counter()
self.procedure_data = None
self.locals = {}
# deprecated as SHIT
@property
def call_target(self):
raise Exception("FIX ME")
@property
def return_target(self):
raise Exception("FIX ME")
@property
def stack_pointer(self):
raise Exception("FIX ME")
#
# Public methods
#
@SimStatePlugin.memo
def copy(self, memo, with_tail=True): # pylint: disable=unused-argument,arguments-differ
n = CallStack(
call_site_addr=self.call_site_addr,
func_addr=self.func_addr,
stack_ptr=self.stack_ptr,
ret_addr=self.ret_addr,
jumpkind=self.jumpkind,
next_frame=self.next if with_tail else None,
invoke_return_variable=self.invoke_return_variable)
n.block_counter = collections.Counter(self.block_counter)
n.procedure_data = self.procedure_data
n.locals = dict(self.locals)
return n
def set_state(self, state):
super(CallStack, self).set_state(state)
# make the stack pointer as large as possible as soon as we know how large that actually is
if self.stack_ptr == 0:
try:
bits = state.arch.registers['sp'][1] * state.arch.byte_width
except KeyError:
bits = state.arch.bits
self.stack_ptr = 2**bits - 1
def merge(self, others, merge_conditions, common_ancestor=None): # pylint: disable=unused-argument
for o in others:
if o != self:
l.error("Trying to merge states with disparate callstacks!")
def widen(self, others): # pylint: disable=unused-argument
l.warning("Widening not implemented for callstacks")
def __iter__(self) -> Iterator['CallStack']:
"""
Iterate through the callstack, from top to bottom
(most recent first).
"""
i = self
while i is not None:
yield i
i = i.next
def __getitem__(self, k):
"""
Returns the CallStack at index k, indexing from the top of the stack.
"""
orig_k = k
for i in self:
if k == 0:
return i
k -= 1
raise IndexError(orig_k)
def __len__(self):
"""
Get how many frames there are in the current call stack.
:return: Number of frames
:rtype: int
"""
o = 0
for _ in self:
o += 1
return o
def __repr__(self):
"""
Get a string representation.
:return: A printable representation of the CallStack object
:rtype: str
"""
return "<CallStack (depth %d)>" % len(self)
def __str__(self):
return "Backtrace:\n%s" % "\n".join("Frame %d: %#x => %#x, sp = %#x" % (i, f.call_site_addr, f.func_addr, f.stack_ptr) for i, f in enumerate(self))
def __eq__(self, other):
if not isinstance(other, CallStack):
return False
if self.func_addr != other.func_addr or self.stack_ptr != other.stack_ptr or self.ret_addr != other.ret_addr:
return False
return self.next == other.next
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash(tuple((c.func_addr, c.stack_ptr, c.ret_addr) for c in self))
#
# Properties
#
@property
def current_function_address(self):
"""
Address of the current function.
:return: the address of the function
:rtype: int
"""
return self.func_addr
@current_function_address.setter
def current_function_address(self, func_addr):
"""
Set the address of the current function. Note that we must make a copy of the CallStackFrame as CallStackFrame
is considered to be immutable.
:param int func_addr: The function address.
:return: None
"""
self.func_addr = func_addr
@property
def current_stack_pointer(self):
"""
Get the value of the stack pointer.
:return: Value of the stack pointer
:rtype: int
"""
return self.stack_ptr
@property
def current_return_target(self):
"""
Get the return target.
:return: The address of return target.
:rtype: int
"""
return self.ret_addr
#
# Static methods
#
@staticmethod
def stack_suffix_to_string(stack_suffix):
"""
Convert a stack suffix to a human-readable string representation.
:param tuple stack_suffix: The stack suffix.
:return: A string representation
:rtype: str
"""
s = "[" + ",".join([("0x%x" % i) if i is not None else "Unspecified" for i in stack_suffix]) + "]"
return s
@staticmethod
def _rfind(lst, item):
"""
Reverse look-up.
:param list lst: The list to look up in.
:param item: The item to look for.
:return: Offset of the item if found. A ValueError is raised if the item is not in the list.
:rtype: int
"""
try:
return dropwhile(lambda x: lst[x] != item,
next(reversed(range(len(lst)))))
except Exception:
raise ValueError("%s not in the list" % item)
@property
def top(self):
"""
Returns the element at the top of the callstack without removing it.
:return: A CallStack.
"""
return self
#
# Public methods
#
def push(self, cf):
"""
Push the frame cf onto the stack. Return the new stack.
"""
cf.next = self
if self.state is not None:
self.state.register_plugin('callstack', cf)
self.state.history.recent_stack_actions.append(CallStackAction(
hash(cf), len(cf), 'push', callframe=cf.copy({}, with_tail=False)
))
return cf
def pop(self):
"""
Pop the top frame from the stack. Return the new stack.
"""
if self.next is None:
raise SimEmptyCallStackError("Cannot pop a frame from an empty call stack.")
new_list = self.next.copy({})
if self.state is not None:
self.state.register_plugin('callstack', new_list)
self.state.history.recent_stack_actions.append(CallStackAction(
hash(new_list), len(new_list), 'pop', ret_site_addr=self.ret_addr
))
return new_list
def call(self, callsite_addr, addr, retn_target=None, stack_pointer=None):
"""
Push a stack frame into the call stack. This method is called when calling a function in CFG recovery.
:param int callsite_addr: Address of the call site
:param int addr: Address of the call target
:param int or None retn_target: Address of the return target
:param int stack_pointer: Value of the stack pointer
:return: None
"""
frame = CallStack(call_site_addr=callsite_addr, func_addr=addr, ret_addr=retn_target,
stack_ptr=stack_pointer)
return self.push(frame)
def ret(self, retn_target=None):
"""
Pop one or many call frames from the stack. This method is called when returning from a function in CFG
recovery.
:param int retn_target: The target to return to.
:return: None
"""
if retn_target is None:
return self.pop()
# We may want to return to several levels up there, not only a
# single stack frame
return_target_index = self._find_return_target(retn_target)
if return_target_index is not None:
o = self
while return_target_index >= 0:
o = o.pop()
return_target_index -= 1
return o
l.warning("Returning to an unexpected address %#x", retn_target)
return self
# For Debugging
# raise Exception()
# There are cases especially in ARM where return is used as a jump
# So we don't pop anything out
def dbg_repr(self):
"""
Debugging representation of this CallStack object.
:return: Details of this CalLStack
:rtype: str
"""
stack = [ ]
for i, frame in enumerate(self):
s = "%d | %s -> %s, returning to %s" % (
i,
"None" if frame.call_site_addr is None else "%#x" % frame.call_site_addr,
"None" if frame.func_addr is None else "%#x" % frame.func_addr,
"None" if frame.return_target is None else "%#x" % frame.return_target,
)
stack.append(s)
return "\n".join(stack)
def stack_suffix(self, context_sensitivity_level):
"""
Generate the stack suffix. A stack suffix can be used as the key to a SimRun in CFG recovery.
:param int context_sensitivity_level: Level of context sensitivity.
:return: A tuple of stack suffix.
:rtype: tuple
"""
ret = ()
for frame in self:
if len(ret) >= context_sensitivity_level*2:
break
ret = (frame.call_site_addr, frame.func_addr) + ret
while len(ret) < context_sensitivity_level*2:
ret = (None, None) + ret
return ret
#
# Private methods
#
def _find_return_target(self, target):
"""
Check if the return target exists in the stack, and return the index if exists. We always search from the most
recent call stack frame since the most recent frame has a higher chance to be hit in normal CFG recovery.
:param int target: Target of the return.
:return: The index of the object
:rtype: int
"""
for i, frame in enumerate(self):
if frame.ret_addr == target:
return i
return None
class CallStackAction(object):
"""
Used in callstack backtrace, which is a history of callstacks along a path, to record individual actions occurred
each time the callstack is changed.
"""
def __init__(self, callstack_hash, callstack_depth, action, callframe=None, ret_site_addr=None):
self.callstack_hash = callstack_hash
self.callstack_depth = callstack_depth
self.action = action
if action not in ('push', 'pop'):
raise AngrError('Unsupported action string "%s".' % action)
self.callframe = callframe
self.ret_site_addr = ret_site_addr
if action == 'push' and self.callframe is None:
raise AngrError('callframe must be specified when action is "push".')
elif action == 'pop' and self.callframe is not None:
raise AngrError('callframe must not be specified when action is "pop".')
def __repr__(self):
if self.action == 'push':
return "<CallStackAction push with %s>" % self.callframe
else: # pop
return "<CallStackAction pop, | |
<reponame>RisjioMaujio/Portal-Of-Programs<filename>Webportal/webportal.py
import webbrowser
import datetime
import os
from tabulate import *
import csv
import pandas as pd
import sys
import os
navigator_symbol = "/"
if os.name == "nt":
navigator_symbol = "\\"
display=open(r"assets"+navigator_symbol+"website.txt","r")
s=display.read()
print(s)
display.close()
# commmand=input("enter: ")
# webbrowser.open('https://www.google.com/?#q=' + commmand)
# commmand=input("enter: ")
# webbrowser.open('https://www.bing.com/search?q=' + commmand)
# commmand=input("enter: ")
# webbrowser.open('https://www.youtube.com/results?search_query=' + commmand)
# commmand=input("enter: ")
# webbrowser.open('https://gaana.com/search/' + commmand)
def def_main():
display=open(r"assets"+navigator_symbol+"website.txt","r")
s=display.read()
print(s)
display.close()
while True:
displa=open(r"assets"+navigator_symbol+"category.txt","r")
s=displa.read()
print(s)
displa.close()
end_option = str(input("\tPlease Type The Category of Website Which You Want to Visit : ")).capitalize()
print("\n" * 3)
if(end_option=="Search"):
dis=open(r"assets"+navigator_symbol+"search.txt","r")
rp=dis.read()
print(rp)
search()
break
elif(end_option=="Social"):
dis=open(r"assets"+navigator_symbol+"social.txt","r")
rp=dis.read()
print(rp)
social()
break
elif(end_option=="Gservice"):
dis=open(r"assets"+navigator_symbol+"Gservices.txt","r")
rp=dis.read()
print(rp)
gservices()
break
elif(end_option=="Mservice"):
dis=open(r"assets"+navigator_symbol+"Mservices.txt","r")
rp=dis.read()
print(rp)
mservices()
break
elif(end_option=="Entertainment"):
dis=open(r"assets"+navigator_symbol+"entertainment.txt","r")
rp=dis.read()
print(rp)
entertainment()
break
elif(end_option=="Shooping"):
dis=open(r"assets"+navigator_symbol+"shooping.txt","r")
rp=dis.read()
print(rp)
shooping()
elif(end_option=="Fooding"):
dis=open(r"assets"+navigator_symbol+"fooding.txt","r")
rp=dis.read()
print(rp)
fooding()
elif(end_option=="Travelling"):
dis=open(r"assets"+navigator_symbol+"travelling.txt","r")
rp=dis.read()
print(rp)
travelling()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(end_option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def search():
while True:
option=str(input("\t Please Type Your Desired Search Engine Name : ")).capitalize()
print("\n" * 4)
if(option=="Google"):
print("\tOk You Have Selected " + str(option) + " as Your Search Engine")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.google.com/?#q=')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.google.com/?#q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Bing"):
print("\tOk You Have Selected " + str(option) + " as Your Search Engine")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.bing.com/search?q=')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.bing.com/search?q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Yahoo"):
print("\tOk You Have Selected " + str(option) + " as Your Search Engine")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://in.yahoo.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://in.search.yahoo.com/search?p=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Ask"):
print("\tOk You Have Selected " + str(option) + " as Your Search Engine")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.ask.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.ask.com/web?o=0&l=dir&qo=homepageSearchBox&q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(option) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
def social():
while True:
option=str(input("\t Please Type Your Desired Social Website Name : ")).capitalize()
print("\n" * 4)
if(option=="Facebook"):
print("\tOk You Have Selected " + str(option) + " as Your Social Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.facebook.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.bing.com/search?q=' + commmand+'%20site:facebook.com&FORM=QBDCRD')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Instagram"):
print("\tOk You Have Selected " + str(option) + " as Your Social Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.instgram.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.bing.com/search?q=' + commmand+'%20site:instagram.com&FORM=QBDCRD')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inappropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Twitter"):
print("\tOk You Have Selected " + str(option) + " as Your Social Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.twitter.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://twitter.com/search?q='+commmand+'&src=typed_query')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inapprropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Blog"):
print("\tOk You Have Selected " + str(option) + " as Your Social Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.blogger.com/about/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " on " + str(option) + " is Now Searching.......")
webbrowser.open('https://www.searchblogspot.com/search?q=' + commmand)
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
else:
print("\n" * 8 +"\t\tYou Have Entered { " + str(fun) + " } "" Which is Inapprropriate. Please Try Again ;) "+"\n" * 8)
elif(option=="Pinterest"):
print("\tOk You Have Selected " + str(option) + " as Your Social Website")
print("\n" * 4)
print("\t Now Select The Function to be Performed on " + str(option) + " ")
print("\n"*4)
dis=open(r"assets"+navigator_symbol+"option.txt","r")
rp=dis.read()
print(rp)
fun=str(input("\t Please Type You Desired Function To be Perform in " + str(option) + " : ")).capitalize()
if(fun=="Home"):
print("\n" * 4)
print("\t \t \t Home Page of " + str(option) + " is Opening ......")
webbrowser.open('https://www.pinterest.com/')
print("\n" * 4)
print("\t\t\tReturning To Category of Website Selection ")
def_main()
elif(fun=="Query"):
print("\n" * 4)
commmand=str(input("\t Please Type You Desired Query To Seacrh On " + str(option) + " : ")).capitalize()
print("\n" * 4)
print("\t\t\t\t" + str(commmand) + " | |
##
# filegdb.py
#
# Description: Read file geodatabase, create tables for subtypes and domains
# Prepare sql scripts for indexes and foreign key constraints
# Author: <NAME>
# Copyright: Cartologic 2017-2020
#
##
from os import getcwd, mkdir, path
from ruamel.yaml import YAML
from slugify import slugify
import arcpy
yaml = YAML()
class FileGDB:
def __init__(self, workspace, a_srs):
self.workspace = workspace
self.a_srs = a_srs
self.workspace_path = ""
self.sqlfolder_path = ""
self.yamlfile_path = ""
self.schemas = []
self.feature_datasets = {}
self.feature_classes = {}
self.tables = {}
self.indexes = []
self.constraints = []
self.init_paths()
self.setenv()
self.parse_yaml()
# -------------------------------------------------------------------------------
# Initialize file geodatabase environment
#
def init_paths(self):
# workspace path
workspace_path = path.join(getcwd(), self.workspace)
workspace_dir = path.dirname(workspace_path)
workspace_base = path.basename(workspace_path)
# sqlfolder, yamlfile path
sqlfolder_base = "{}.sql".format(workspace_base)
yamlfile_base = "{}.yml".format(workspace_base)
sqlfolder_path = path.join(workspace_dir, sqlfolder_base)
yamlfile_path = path.join(workspace_dir, yamlfile_base)
# set current object instance props
self.workspace_path = workspace_path
self.sqlfolder_path = sqlfolder_path
self.yamlfile_path = yamlfile_path
def info(self):
print("\nFileGDB Info:")
print(" Workspace: {0} ({1})".format(self.workspace_path, self.a_srs))
print(" Sqlfolder: {0}".format(self.sqlfolder_path))
print(" Yamlfile: {0}".format(self.yamlfile_path))
def setenv(self):
print("\nSetting arcpy environment ...")
arcpy.env.workspace = self.workspace
arcpy.env.overwriteOutput = True
# -------------------------------------------------------------------------------
# Parse the yaml file and map data to schemas
#
def parse_yaml(self):
# parse yaml file and map datasets, feature classes, tables to schemas
if not path.exists(self.yamlfile_path):
print("\nCreating default YAML file ...")
self.create_yaml()
with open(self.yamlfile_path, 'r') as ymlfile:
data_map = yaml.load(ymlfile)
for key_type, value_items in data_map.items():
if (key_type == "Schemas"):
self.schemas = value_items
elif (key_type == "FeatureDatasets"):
self.feature_datasets = value_items
elif (key_type == "FeatureClasses"):
self.feature_classes = value_items
elif (key_type == "Tables"):
self.tables = value_items
# lookup_tables is a default schema and it will host subtypes, domains
if 'lookup_tables' not in self.schemas:
self.schemas.append('lookup_tables')
# -------------------------------------------------------------------------------
# Open sql files
#
def open_files(self):
print("\nInitializing sql files ...")
if not path.exists(self.sqlfolder_path):
mkdir(self.sqlfolder_path)
self.f_create_schemas = open(
path.join(self.sqlfolder_path, "create_schemas.sql"), "w")
self.f_split_schemas = open(
path.join(self.sqlfolder_path, "split_schemas.sql"), "w")
self.f_create_indexes = open(
path.join(self.sqlfolder_path, "create_indexes.sql"), "w")
self.f_create_constraints = open(
path.join(self.sqlfolder_path, "create_constraints.sql"), "w")
self.f_find_data_errors = open(
path.join(self.sqlfolder_path, "find_data_errors.sql"), "w")
self.f_fix_data_errors = open(
path.join(self.sqlfolder_path, "fix_data_errors.sql"), "w")
self.write_headers()
# -------------------------------------------------------------------------------
# close sql files
#
def close_files(self):
print("\nClosing sql files ...")
self.f_create_schemas.close()
self.f_split_schemas.close()
self.f_create_indexes.close()
self.f_create_constraints.close()
self.f_find_data_errors.close()
self.f_fix_data_errors.close()
# -------------------------------------------------------------------------------
# Process domains
# Convert domains to tables
#
def process_domains(self):
print("\nProcessing domains ...")
self.write_it(self.f_create_indexes, "\n-- Domains")
self.write_it(self.f_create_constraints, "\n-- Domains")
self.write_it(self.f_split_schemas, "\n-- Domains")
# create table for each domain
domains_list = arcpy.da.ListDomains(self.workspace)
for domain in domains_list:
self.create_domain_table(domain)
# create fk constraints for data tables referencing domain tables
tables_list = arcpy.ListTables()
tables_list.sort()
for table in tables_list:
self.create_constraints_referencing_domains(table)
# create fk constraints for feature classes referencing domain tables
# stand-alone feature classes
fc_list = arcpy.ListFeatureClasses("*", "")
fc_list.sort()
for fc in fc_list:
self.create_constraints_referencing_domains(fc)
# feature classes in feature datasets
fds_list = arcpy.ListDatasets("*", "Feature")
fds_list.sort()
for fds in fds_list:
fc_list = arcpy.ListFeatureClasses("*", "", fds)
fc_list.sort()
for fc in fc_list:
self.create_constraints_referencing_domains(fc)
# -------------------------------------------------------------------------------
# Create domain table (list of values)
#
def create_domain_table(self, domain):
domain_name = slugify(domain.name, separator='_', lowercase=False)
domain_table = "{}_lut".format(domain_name)
domain_field = "Code"
domain_field_desc = "Description"
print(" {}".format(domain_table))
if not arcpy.Exists(domain_table):
arcpy.DomainToTable_management(
self.workspace, domain.name, domain_table, domain_field, domain_field_desc)
# create index
self.create_index(domain_table, domain_field)
self.split_schemas(domain_table, "lookup_tables")
# -------------------------------------------------------------------------------
# Create foraign key constraints to tables referencing domain tables
#
def create_constraints_referencing_domains(self, layer):
dmcode = "Code"
dmcode_desc = "Description"
subtypes = arcpy.da.ListSubtypes(layer)
for stcode, v1 in subtypes.items():
for k2, v2 in v1.items():
if k2 == 'Default':
stdefault = v2
elif k2 == 'Name':
stname = v2
elif k2 == 'SubtypeField':
if v2 != '':
stfield = v2
sttable = "{0}_{1}_lut".format(layer, stfield)
else:
stfield = '--'
sttable = '--'
elif k2 == 'FieldValues':
for dmfield, v3 in v2.items():
if v3[1] is not None:
dmname = slugify(
v3[1].name, separator='_', lowercase=False)
dmtable = dmname + '_lut'
self.create_foreign_key_constraint(
layer, dmfield, dmtable, dmcode)
# -------------------------------------------------------------------------------
# Process subtypes
# Convert subtypes to tables
#
def process_subtypes(self):
print("\nProcessing subtypes ...")
self.write_it(self.f_create_indexes, "\n-- Subtypes")
self.write_it(self.f_create_constraints, "\n-- Subtypes")
self.write_it(self.f_split_schemas, "\n-- Subtypes")
# create subtypes table for tables
tables_list = arcpy.ListTables()
tables_list.sort()
for table in tables_list:
self.create_subtypes_table(table)
# create subtypes table for stand-alone featureclasses
fc_list = arcpy.ListFeatureClasses("*", "")
fc_list.sort()
for fc in fc_list:
self.create_subtypes_table(fc)
# create subtypes table for featureclasses in datasets
fds_list = arcpy.ListDatasets("*", "Feature")
fds_list.sort()
for fds in fds_list:
fc_list = arcpy.ListFeatureClasses("*", "", fds)
fc_list.sort()
for fc in fc_list:
self.create_subtypes_table(fc)
# -------------------------------------------------------------------------------
# Create subtypes table for layer/field and insert records (list of values)
#
def create_subtypes_table(self, layer):
subtypes_dict = arcpy.da.ListSubtypes(layer)
subtype_fields = {key: value['SubtypeField']
for key, value in subtypes_dict.items()}
subtype_values = {key: value['Name']
for key, value in subtypes_dict.items()}
key, field = list(subtype_fields.items())[0]
if len(field) > 0:
# find subtype field type
field_type = None
for f in arcpy.ListFields(layer):
if f.name == field:
field_type = f.type
# convert field to upper case and try again if not found
if field_type == None:
field = field.upper()
for f in arcpy.ListFields(layer):
if f.name.upper() == field:
field_type = f.type
subtypes_table = "{0}_{1}_lut".format(layer, field)
subtypes_table = slugify(
subtypes_table, separator='_', lowercase=False)
print(" {}".format(subtypes_table))
if not arcpy.Exists(subtypes_table):
# create subtypes table
arcpy.CreateTable_management(self.workspace, subtypes_table)
arcpy.AddField_management(subtypes_table, field, field_type)
arcpy.AddField_management(
subtypes_table, "Description", "String")
# insert records (list of values)
cur = arcpy.da.InsertCursor(subtypes_table, "*")
oid = 1
for code, desc in subtype_values.items():
# print(" {0} {1}".format(code, desc))
cur.insertRow([oid, code, desc])
oid += 1
del cur
self.create_index(subtypes_table, field)
self.create_foreign_key_constraint(
layer, field, subtypes_table, field)
self.split_schemas(subtypes_table, "lookup_tables")
# -------------------------------------------------------------------------------
# Process relations
# Create necessary indexes and foreign key constraints to support each relation
#
def process_relations(self):
print("\nProcessing relations ...")
self.write_it(self.f_create_indexes,
"\n-- Relations (tables and feature classes)")
self.write_it(self.f_create_constraints,
"\n-- Relations (tables and feature classes)")
relClassSet = self.get_relationship_classes()
for relClass in relClassSet:
rel = arcpy.Describe(relClass)
if rel.isAttachmentRelationship:
continue
rel_origin_table = rel.originClassNames[0]
rel_destination_table = rel.destinationClassNames[0]
rel_primary_key = rel.originClassKeys[0][0]
rel_foreign_key = rel.originClassKeys[1][0]
# convert primary/foreign key to uppercase if not found
if rel_primary_key not in [field.name for field in arcpy.ListFields(rel_origin_table)]:
rel_primary_key = rel.originClassKeys[0][0].upper()
if rel_foreign_key not in [field.name for field in arcpy.ListFields(rel_destination_table)]:
rel_foreign_key = rel.originClassKeys[1][0].upper()
print(" {}".format(rel.name))
# print(" {0} -> {1}".format(rel_origin_table, rel_destination_table))
self.create_index(rel_origin_table, rel_primary_key)
self.create_foreign_key_constraint(
rel_destination_table, rel_foreign_key, rel_origin_table, rel_primary_key)
# prcess data errors (fk)
str_data_errors_fk = '\\echo {0} ({1}) -> {2} ({3});'.format(
rel_destination_table, rel_foreign_key, rel_origin_table, rel_primary_key)
self.write_it(self.f_find_data_errors, str_data_errors_fk)
str_data_errors = 'SELECT COUNT(*) FROM "{0}" dest WHERE NOT EXISTS (SELECT 1 FROM "{1}" orig WHERE dest."{2}" = orig."{3}");'
str_data_errors = str_data_errors.format(
rel_destination_table, rel_origin_table, rel_foreign_key, rel_primary_key)
self.write_it(self.f_find_data_errors, str_data_errors)
str_fix_errors_1 = 'INSERT INTO "{0}" ("{1}")'.format(
rel_origin_table, rel_primary_key)
str_fix_errors_2 = 'SELECT DISTINCT detail."{0}" \n FROM "{1}" AS detail \n LEFT JOIN "{2}" AS master ON detail."{3}" = master."{4}" \n WHERE master.id IS NULL;\n'
str_fix_errors_2 = str_fix_errors_2.format(
rel_foreign_key, rel_destination_table, rel_origin_table, rel_foreign_key, rel_primary_key)
self.write_it(self.f_fix_data_errors, str_fix_errors_1)
self.write_it(self.f_fix_data_errors, str_fix_errors_2)
# -------------------------------------------------------------------------------
# Create relationship classes Set and return it to the calling routine
#
def get_relationship_classes(self):
# get featureclasses outside of datasets
fc_list = arcpy.ListFeatureClasses("*")
# get fetatureclasses within datasets
fds_list = arcpy.ListDatasets("*", "Feature")
for fds in fds_list:
fc_list += arcpy.ListFeatureClasses("*", "", fds)
# get tables
fc_list += arcpy.ListTables("*")
# create relationship classes set
relClasses = set()
for i, fc in enumerate(fc_list):
desc = arcpy.Describe(fc)
for j, rel in enumerate(desc.relationshipClassNames):
relClasses.add(rel)
return relClasses
# -------------------------------------------------------------------------------
# Process Schemas
# Prepare sql to split Tables and Feature Classes in Schemas
#
def process_schemas(self):
print("\nProcessing schemas ...")
# create extension postgis
str_create_extension = "\nCREATE EXTENSION IF NOT EXISTS postgis;"
self.write_it(self.f_create_schemas, str_create_extension)
# create schemas
for schema in self.schemas:
if schema == 'public':
continue
str_drop_schema = '\nDROP SCHEMA IF EXISTS \"{0}\" CASCADE;'.format(
schema)
str_create_schema = 'CREATE SCHEMA \"{0}\";'.format(schema)
self.write_it(self.f_create_schemas, str_drop_schema)
self.write_it(self.f_create_schemas, str_create_schema)
# split feature classes within feature datasets to schemas
self.write_it(self.f_split_schemas, "\n-- FeatureDatasets:")
print(" FeatureDatasets")
for schema, datasets in self.feature_datasets.items():
if schema == 'public':
continue
for fds in datasets:
fc_list = arcpy.ListFeatureClasses("*", "", fds)
fc_list.sort()
for fc in fc_list:
self.split_schemas(fc, schema)
# split feature classes outside of feature datasets to schemas
self.write_it(self.f_split_schemas, "\n-- FeatureClasses:")
print(" FeatureClasses")
for schema, fcs in self.feature_classes.items():
if schema == 'public':
continue
for fc in fcs:
if arcpy.Exists(fc):
self.split_schemas(fc, schema)
# split tables to schemas
self.write_it(self.f_split_schemas, "\n-- Tables:")
print(" Tables")
for schema, tables in self.tables.items():
if schema == 'public':
continue
for table in tables:
if arcpy.Exists(table):
self.split_schemas(table, schema)
# -------------------------------------------------------------------------------
| |
# -*- test-case-name: epsilon.test.test_juice -*-
# Copyright 2005 Divmod, Inc. See LICENSE file for details
import warnings, pprint
import keyword
import io
import six
from twisted.internet.main import CONNECTION_LOST
from twisted.internet.defer import Deferred, maybeDeferred, fail
from twisted.internet.protocol import ServerFactory, ClientFactory
from twisted.internet.ssl import Certificate
from twisted.python.failure import Failure
from twisted.python import log, filepath
from epsilon.liner import LineReceiver
from epsilon.compat import long
from epsilon import extime
ASK = '_ask'
ANSWER = '_answer'
COMMAND = '_command'
ERROR = '_error'
ERROR_CODE = '_error_code'
ERROR_DESCRIPTION = '_error_description'
LENGTH = '_length'
BODY = 'body'
debug = False
class JuiceBox(dict):
""" I am a packet in the JUICE protocol. """
def __init__(self, __body='', **kw):
self.update(kw)
if __body:
assert isinstance(__body, str), "body must be a string: %r" % ( repr(__body),)
self['body'] = __body
def body():
def get(self):
warnings.warn("body attribute of boxes is now just a regular field",
stacklevel=2)
return self['body']
def set(self, newbody):
warnings.warn("body attribute of boxes is now just a regular field",
stacklevel=2)
self['body'] = newbody
return get,set
body = property(*body())
def copy(self):
newBox = self.__class__()
newBox.update(self)
return newBox
def serialize(self,
delimiter=b'\r\n',
escaped=b'\r\n '):
assert LENGTH not in self
delimiter = six.ensure_binary(delimiter)
escaped = six.ensure_binary(escaped)
L = []
for (k, v) in six.viewitems(self):
if k == BODY:
k = LENGTH
v = str(len(self[BODY]))
L.append(six.ensure_binary(k).replace(b'_', b'-').title())
L.append(b': ')
L.append(six.ensure_binary(v).replace(delimiter, escaped))
L.append(delimiter)
L.append(delimiter)
if BODY in self:
L.append(six.ensure_binary(self[BODY]))
return b''.join(L)
def sendTo(self, proto):
"""
Serialize and send this box to a Juice instance. By the time it is
being sent, several keys are required. I must have exactly ONE of::
-ask
-answer
-error
If the '-ask' header is set, then the '-command' header must also be
set.
"""
proto.sendPacket(self)
# juice.Box => JuiceBox
Box = JuiceBox
class TLSBox(JuiceBox):
def __repr__(self):
return 'TLS(**%s)' % (super(TLSBox, self).__repr__(),)
def __init__(self, __certificate, __verify=None, __sslstarted=None, **kw):
super(TLSBox, self).__init__(**kw)
self.certificate = __certificate
self.verify = __verify
self.sslstarted = __sslstarted
def sendTo(self, proto):
super(TLSBox, self).sendTo(proto)
if self.verify is None:
proto.startTLS(self.certificate)
else:
proto.startTLS(self.certificate, self.verify)
if self.sslstarted is not None:
self.sslstarted()
class QuitBox(JuiceBox):
def __repr__(self):
return 'Quit(**%s)' % (super(QuitBox, self).__repr__(),)
def sendTo(self, proto):
super(QuitBox, self).sendTo(proto)
proto.transport.loseConnection()
class _SwitchBox(JuiceBox):
def __repr__(self):
return 'Switch(**%s)' % (super(_SwitchBox, self).__repr__(),)
def __init__(self, __proto, **kw):
super(_SwitchBox, self).__init__(**kw)
self.innerProto = __proto
def sendTo(self, proto):
super(_SwitchBox, self).sendTo(proto)
proto._switchTo(self.innerProto)
class NegotiateBox(JuiceBox):
def __repr__(self):
return 'Negotiate(**%s)' % (super(NegotiateBox, self).__repr__(),)
def sendTo(self, proto):
super(NegotiateBox, self).sendTo(proto)
proto._setProtocolVersion(int(self['version']))
class JuiceError(Exception):
pass
class RemoteJuiceError(JuiceError):
"""
This error indicates that something went wrong on the remote end of the
connection, and the error was serialized and transmitted to you.
"""
def __init__(self, errorCode, description, fatal=False):
"""Create a remote error with an error code and description.
"""
Exception.__init__(self, "Remote[%s]: %s" % (errorCode, description))
self.errorCode = errorCode
self.description = description
self.fatal = fatal
class UnhandledRemoteJuiceError(RemoteJuiceError):
def __init__(self, description):
errorCode = b"UNHANDLED"
RemoteJuiceError.__init__(self, errorCode, description)
class JuiceBoxError(JuiceError):
pass
class MalformedJuiceBox(JuiceBoxError):
pass
class UnhandledCommand(JuiceError):
pass
class IncompatibleVersions(JuiceError):
pass
class _Transactor:
def __init__(self, store, callable):
self.store = store
self.callable = callable
def __call__(self, box):
return self.store.transact(self.callable, box)
def __repr__(self):
return '<Transaction in: %s of: %s>' % (self.store, self.callable)
class DispatchMixin:
baseDispatchPrefix = 'juice_'
autoDispatchPrefix = 'command_'
wrapper = None
def _auto(self, aCallable, proto, namespace=None):
if aCallable is None:
return None
command = aCallable.command
if namespace not in command.namespaces:
# if you're in the wrong namespace, you are very likely not allowed
# to invoke the command you are trying to invoke. some objects
# have commands exposed in a separate namespace for security
# reasons, since the security model is a role : namespace mapping.
log.msg('WRONG NAMESPACE: %r, %r' % (namespace, command.namespaces))
return None
def doit(box):
kw = stringsToObjects(box, command.arguments, proto)
for name, extraArg in command.extra:
kw[name] = extraArg.fromTransport(proto.transport)
# def checkIsDict(result):
# if not isinstance(result, dict):
# raise RuntimeError("%r returned %r, not dictionary" % (
# aCallable, result))
# return result
def checkKnownErrors(error):
key = error.trap(*command.allErrors)
code = command.allErrors[key]
desc = str(error.value)
return Failure(RemoteJuiceError(
code, desc, error in command.fatalErrors))
return maybeDeferred(aCallable, **kw).addCallback(
command.makeResponse, proto).addErrback(
checkKnownErrors)
return doit
def _wrap(self, aCallable):
if aCallable is None:
return None
wrap = self.wrapper
if wrap is not None:
return wrap(aCallable)
else:
return aCallable
def normalizeCommand(self, cmd):
"""Return the canonical form of a command.
"""
return cmd.upper().strip().replace('-', '_')
def lookupFunction(self, proto, name, namespace):
"""Return a callable to invoke when executing the named command.
"""
# Try to find a method to be invoked in a transaction first
# Otherwise fallback to a "regular" method
fName = self.autoDispatchPrefix + name
fObj = getattr(self, fName, None)
if fObj is not None:
# pass the namespace along
return self._auto(fObj, proto, namespace)
assert namespace is None, 'Old-style parsing'
# Fall back to simplistic command dispatching - we probably want to get
# rid of this eventually, there's no reason to do extra work and write
# fewer docs all the time.
fName = self.baseDispatchPrefix + name
return getattr(self, fName, None)
def dispatchCommand(self, proto, cmd, box, namespace=None):
fObj = self.lookupFunction(proto, self.normalizeCommand(cmd), namespace)
if fObj is None:
return fail(UnhandledCommand(cmd))
return maybeDeferred(self._wrap(fObj), box)
def normalizeKey(key):
lkey = six.ensure_str(key).lower().replace('-', '_')
if keyword.iskeyword(lkey):
return lkey.title()
return lkey
def parseJuiceHeaders(lines):
"""
Create a JuiceBox from a list of header lines.
@param lines: a list of lines.
@type lines: a list of L{bytes}
"""
b = JuiceBox()
key = None
for L in lines:
if L[0:1] == b' ':
# continuation
assert key is not None
b[key] += six.ensure_str(b'\r\n' + L[1:])
continue
parts = L.split(b': ', 1)
if len(parts) != 2:
raise MalformedJuiceBox("Wrong number of parts: %r" % (L,))
key, value = parts
key = normalizeKey(key)
b[key] = six.ensure_str(value)
return int(b.pop(LENGTH, 0)), b
class JuiceParserBase(DispatchMixin):
def __init__(self):
self._outstandingRequests = {}
def _puke(self, failure):
log.msg("Juice server or network failure "
"unhandled by client application:")
log.err(failure)
log.msg(
"Dropping connection! "
"To avoid, add errbacks to ALL remote commands!")
if self.transport is not None:
self.transport.loseConnection()
_counter = long(0)
def _nextTag(self):
self._counter += 1
return '%x' % (self._counter,)
def failAllOutgoing(self, reason):
OR = self._outstandingRequests.items()
self._outstandingRequests = None # we can never send another request
for key, value in OR:
value.errback(reason)
def juiceBoxReceived(self, box):
if debug:
log.msg("Juice receive: %s" % pprint.pformat(dict(six.viewitems(box))))
if ANSWER in box:
question = self._outstandingRequests.pop(box[ANSWER])
question.addErrback(self._puke)
self._wrap(question.callback)(box)
elif ERROR in box:
question = self._outstandingRequests.pop(box[ERROR])
question.addErrback(self._puke)
self._wrap(question.errback)(
Failure(RemoteJuiceError(box[ERROR_CODE],
box[ERROR_DESCRIPTION])))
elif COMMAND in box:
cmd = box[COMMAND]
def sendAnswer(answerBox):
if ASK not in box:
return
if self.transport is None:
return
answerBox[ANSWER] = box[ASK]
answerBox.sendTo(self)
def sendError(error):
if ASK not in box:
return error
if error.check(RemoteJuiceError):
code = error.value.errorCode
desc = error.value.description
if error.value.fatal:
errorBox = QuitBox()
else:
errorBox = JuiceBox()
else:
errorBox = QuitBox()
log.err(error) # here is where server-side logging happens
# if the error isn't handled
code = 'UNHANDLED'
desc = "Unhandled Remote System Exception "
errorBox[ERROR] = box[ASK]
errorBox[ERROR_DESCRIPTION] = desc
errorBox[ERROR_CODE] = code
if self.transport is not None:
errorBox.sendTo(self)
return None # intentionally stop the error here: don't log the
# traceback if it's handled, do log it (earlier) if
# it isn't
self.dispatchCommand(self, cmd, box).addCallbacks(sendAnswer, sendError
).addErrback(self._puke)
else:
raise RuntimeError(
"Empty packet received over connection-oriented juice: %r" % (box,))
def sendBoxCommand(self, command, box, requiresAnswer=True):
"""
Send a command across the wire with the given C{juice.Box}.
Returns a Deferred which fires with the response C{juice.Box} when it
is received, or fails with a C{juice.RemoteJuiceError} if an error is
received.
If the Deferred fails and the error is not handled by the caller of
this method, the failure will be logged and the connection dropped.
"""
if self._outstandingRequests is None:
return fail(CONNECTION_LOST)
box[COMMAND] = command
tag = self._nextTag()
if requiresAnswer:
box[ASK] = tag
result = self._outstandingRequests[tag] = Deferred()
else:
result = None
box.sendTo(self)
return result
class Argument:
optional = False
def __init__(self, optional=False):
self.optional = optional
def retrieve(self, d, name):
if self.optional:
value = d.get(name)
if value is not None:
del d[name]
else:
value = d.pop(name)
return value
def fromBox(self, name, strings, objects, proto):
st = self.retrieve(strings, name)
if self.optional and st is None:
objects[name] = None
else:
objects[name] = self.fromStringProto(st, proto)
def toBox(self, name, strings, objects, proto):
obj = self.retrieve(objects, name)
if self.optional and obj is None:
# strings[name] = None
return
else:
strings[name] = self.toStringProto(obj, proto)
def fromStringProto(self, inString, proto):
return self.fromString(inString)
def toStringProto(self, inObject, proto):
return self.toString(inObject)
def fromString(self, inString):
| |
<gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# standard modules
import glob
import itertools
import json
import os
from collections import defaultdict
# third party modules
import luigi
import unicodecsv as csv
# local modules
from alleletraj import utils
from alleletraj.ancient.snps import AncientSNPsPipeline
from alleletraj.const import GENERATION_TIME
from alleletraj.db.conn import Database
from alleletraj.modern.demog import DadiBestModel, DADI_FOLDED
from alleletraj.qtl.load import MIN_DAF
# number of independent MCMC replicates to run
MCMC_NUM_CHAINS = 2
# maximum number of MCMC replicates to run in search of converged runs
MCMC_MAX_CHAINS = 6
# number of MCMC cycles to run
MCMC_CYCLES = int(1e7)
# fraction of MCMC cycles to discard as burn in
MCMC_BURN_PCT = 0.5
# thinning is analytically unnecessary, but makes the MCMC run much faster (as Josh's method writes directly to disk)
# see https://besjournals.onlinelibrary.wiley.com/doi/full/10.1111/j.2041-210X.2011.00131.x
MCMC_THIN = 100
# fraction of the path to update (i.e. length/F)
MCMC_FRACTION = 20
# frequency of printing output to the log
MCMC_PRINT = 10000
# the minimum ESS threshold for an MCMC run
MCMC_MIN_ESS = 100
# the maximum MPSRF threshold for a set of MCMC runs
MCMC_MAX_MPSRF = 1.2
# genetic model
MODEL_RECESSIVE = 0
MODEL_ADDITIVE = 0.5
MODEL_DOMINANT = 1
# minimum number of time bins
MCMC_MIN_BINS = 3
# number of DAF paired neutral SNPs to run for every non-neutral SNP
NEUTRAL_REPLICATES = 5
def selection_fetch_neutral_snps(species, population, modsnp_id, mispolar=False):
"""
Fetch the 'neutral' SNPs paired to the modsnp in this population.
"""
dbc = Database(species)
params = {
'population': population,
'modsnp_id': modsnp_id,
'mispolar': int(mispolar)
}
modsnps = dbc.get_records('selection_neutrals', params, key='neutral_id')
if not modsnps:
modsnps = selection_pair_neutral_snps(species, population, modsnp_id, mispolar)
if len(modsnps) != NEUTRAL_REPLICATES:
print('WARNING: Insufficient neutral SNPs for `selection` {} (n={})'.format([population, modsnp_id, mispolar],
len(modsnps)))
return modsnps
def selection_pair_neutral_snps(species, population, modsnp_id, mispolar):
"""
Find 'neutral' SNPs, by pairing the non-neutral SNP based on chromosome, mutation and DAF.
If the SNP is flagged as mispolar then the allele polarization and DAF are inverted.
"""
dbc = Database(species)
bins = dbc.get_records('sample_bins')
bid = []
lsq = []
# we need to ensure that there is a comparable number of calls in each bin, so we sort the neutrals by the least
# squared error of the differences in bin counts, then randomise
for bin_id in bins:
bid.append('SUM(s.bin_id = {id}) AS bin{id}'.format(id=bin_id))
lsq.append('POW(ABS(SUM(s.bin_id = {id}) - nn.bin{id}), 2)'.format(id=bin_id))
bin_sql = ','.join(bid)
sqr_sql = '+'.join(lsq)
modsnps = dbc.get_records_sql("""
SELECT nn.population,
nn.id AS modsnp_id,
{mispolar} AS mispolar,
ms.id AS neutral_id
FROM (
SELECT ms.id,
msd.population,
ms.chrom,
IF({mispolar}, ms.ancestral, ms.derived) AS derived,
IF({mispolar}, ms.derived, ms.ancestral) AS ancestral,
IF({mispolar}, 1-msd.daf, msd.daf) AS daf,
{bin_sql}
FROM modern_snps ms
JOIN modern_snp_daf msd
ON msd.modsnp_id = ms.id
JOIN sample_reads sr
ON sr.chrom = ms.chrom
AND sr.site = ms.site
JOIN samples s
ON s.id = sr.sample_id
WHERE msd.population = '{population}'
AND ms.id = {modsnp}
GROUP BY ms.id
) AS nn
JOIN modern_snps ms
ON ms.chrom = nn.chrom
AND ms.derived = nn.derived
AND ms.ancestral = nn.ancestral
AND ms.neutral = 1
AND ms.mispolar IS NULL
AND ms.variant_id IS NOT NULL
AND ms.id != nn.id
JOIN modern_snp_daf msd
ON msd.modsnp_id = ms.id
AND msd.population = nn.population
AND round(msd.daf, 2) = round(nn.daf, 2)
JOIN sample_reads sr
ON sr.chrom = ms.chrom
AND sr.site = ms.site
JOIN samples s
ON s.id = sr.sample_id
GROUP BY ms.id
ORDER BY round(({sqr_sql})/{bins})*{bins}, RAND({modsnp})
LIMIT {num}
""".format(sqr_sql=sqr_sql, bins=len(bins), mispolar=int(mispolar), bin_sql=bin_sql, population=population,
modsnp=modsnp_id, num=NEUTRAL_REPLICATES), key=None)
# add them to the table so we don't have to query a second time
for modsnp in modsnps:
dbc.save_record('selection_neutrals', modsnp)
return modsnps
class SelectionInputFile(utils.DatabaseTask):
"""
Generate the 4-column sample input file for `selection`.
See https://github.com/Schraiber/selection
The sample input is a 4-column, white-space-separated file. Each line corresponds to a single sample, which can be
just one individual or many individuals from approximately the same time period pooled together.
For each sample, each column is
1. the number of derived alleles
2. the sample size (in haploid genomes)
3. the most ancient end of the possible age of the sample (i.e. the oldest it could be)
4. the most recent end of the possible age of the sample (i.e. the youngest it could be)
:type species: str
:type population: str
:type modsnp: int
:type no_modern: bool
:type mispolar: bool
:type const_pop: bool
:type no_age: bool
"""
species = luigi.Parameter()
population = luigi.Parameter()
modsnp = luigi.IntParameter()
no_modern = luigi.BoolParameter()
mispolar = luigi.BoolParameter()
const_pop = luigi.BoolParameter()
no_age = luigi.BoolParameter()
# do not retry after failure, as this just chews CPU cycles
retry_count = 0
def requires(self):
yield DadiBestModel(self.species, self.population, DADI_FOLDED, self.const_pop)
yield AncientSNPsPipeline(self.species)
def output(self):
return luigi.LocalTarget('data/selection/{}.input'.format(self.basename))
def run(self):
# unpack the inputs
(_, nref_file, _), _ = self.input()
# get the Nref population size
with nref_file.open() as fin:
pop_size = int(fin.read())
# time is measured in diffusion units
diff_units = 2 * pop_size * GENERATION_TIME[self.species]
# NOTE some SNPs may be mispolarised, so we switch the derived/ancestral alleles in those cases
derived = 'derived' if not self.mispolar else 'ancestral'
params = {
'derived': derived,
'modsnp': self.modsnp,
'population': self.population,
'units': diff_units,
}
sql = """
# get the ancient frequencies in each bin
SELECT SUM(sr.base = ms.{derived}) AS derived_count,
COUNT(sr.id) AS sample_size,
-round((sb.max / {units}), 4) AS max,
-round(((sb.max + sb.min) / 2 / {units}), 4) AS median,
-round((sb.min / {units}), 4) AS min
FROM modern_snps ms
JOIN sample_reads sr
ON sr.chrom = ms.chrom
AND sr.site = ms.site
JOIN samples s
ON s.id = sr.sample_id
JOIN sample_bins sb
ON sb.id = s.bin_id
WHERE ms.id = {modsnp}
AND s.population = '{population}'
GROUP BY sb.id
""".format(**params)
# noinspection SqlResolve
modern_sql = """
# get the modern frequencies
SELECT {derived}_count AS derived_count,
ancestral_count + derived_count AS sample_size,
0.0 AS max,
0.0 AS median,
0.0 AS min
FROM modern_snps ms
JOIN modern_snp_daf msd
ON msd.modsnp_id = ms.id
WHERE ms.id = {modsnp}
AND msd.population = '{population}'
""".format(**params)
if not self.no_modern:
sql += " UNION " + modern_sql
bins = self.dbc.get_records_sql(sql + " ORDER BY max", key=None)
if len(bins) < MCMC_MIN_BINS:
raise RuntimeError('ERROR: Insufficient time bins to run `selection` (n={})'.format(len(bins)))
# write the sample input file
with self.output().open('w') as tsv_file:
if self.no_age:
fields = ['derived_count', 'sample_size', 'median', 'median']
else:
fields = ['derived_count', 'sample_size', 'max', 'min']
writer = csv.DictWriter(tsv_file, fieldnames=fields, delimiter='\t', extrasaction='ignore')
# write the data to disk
for b in bins:
writer.writerow(b)
class SelectionRunMCMC(utils.PipelineTask):
"""
Run `selection` for the given SNP.
:type species: str
:type population: str
:type modsnp: int
:type no_modern: bool
:type mispolar: bool
:type const_pop: bool
:type no_age: bool
:type n: int
:type s: int
:type h: float
:type F: int
:type chain: int
"""
species = luigi.Parameter()
population = luigi.Parameter()
modsnp = luigi.IntParameter()
no_modern = luigi.BoolParameter()
mispolar = luigi.BoolParameter()
const_pop = luigi.BoolParameter()
no_age = luigi.BoolParameter()
n = luigi.IntParameter()
s = luigi.IntParameter()
h = luigi.FloatParameter()
F = luigi.IntParameter()
chain = luigi.IntParameter()
# TODO remove when done with SLURM jobs on the cluster
# resources = {'SLURM': 2}
@property
def priority(self):
"""Do chains in numerical order"""
return 10 - self.chain
# do not retry after failure, as this just chews CPU cycles
retry_count = 0
def requires(self):
yield DadiBestModel(self.species, self.population, DADI_FOLDED, self.const_pop)
yield SelectionInputFile(self.species, self.population, self.modsnp, self.no_modern, self.mispolar,
self.const_pop, self.no_age)
def output(self):
return [luigi.LocalTarget('data/selection/{}.{}'.format(self.basename, ext))
for ext in ['param.gz', 'time.gz', 'traj.gz', 'log']]
def run(self):
# compose the input and output file paths
(pop_file, _, _), input_file = self.input()
param_file, time_file, traj_file, log_file = self.output()
output_prefix = utils.trim_ext(log_file.path)
# make a deterministic random seed (helps keep everything easily reproducible)
seed = int('{}{}'.format(self.modsnp, self.chain))
with log_file.open('w') as fout:
# run `selection`
cmd = ['sr',
'-D', input_file.path, # path to data input file
'-P', pop_file.path, # path to population size history file
'-o', output_prefix, # output file prefix
'-A', MIN_DAF, # ascertainment in modern individuals
'-n', self.n, # number of MCMC cycles to run
'-s', self.s, # frequency of sampling from the posterior
'-h', self.h, # genetic model (additive, recessive, dominant)
'-F', self.F, # fraction of the path to update (i.e. length/F)
'-f', MCMC_PRINT, # frequency of printing output to the screen
'-e', seed] # random number seed
if not self.no_age:
cmd += ['-a'] # flag to infer allele age
utils.run_cmd(cmd, stdout=fout)
class | |
self.weight_percentile:
temp_w_min = w_min.clone()
temp_w_max = w_max.clone()
# temp_w_min = x_transform.min(dim=1)[0]
# temp_w_max = x_transform.max(dim=1)[0]
for i in range(self.group_number):
w_min[i * group_length: (i + 1) * group_length] = \
temp_w_min[i * group_length: (i + 1) * group_length].min().repeat(group_length)
w_max[i * group_length: (i + 1) * group_length] = \
temp_w_max[i * group_length: (i + 1) * group_length].max().repeat(group_length)
# print("shape = ", temp_w_max[i * group_length: (i + 1) * group_length].max().shape)
# print("enlarged shape = ", temp_w_max[i * group_length: (i + 1) * group_length] \
# .max().repeat(group_length).shape)
# if i == 1:
# print("w_max_1_2 = ", w_max[i * group_length: (i + 1) * group_length])
elif self.weight_percentile:
# print("percentile = ", self.percentile)
for i in range(self.group_number):
temp_w_min, temp_w_max = get_percentile_min_max(x_transform
[i * group_length: (i + 1) * group_length].view(-1), 0.1, 99.9, output_tensor=True)
w_min[i * group_length: (i + 1) * group_length] = temp_w_min.repeat(group_length)
w_max[i * group_length: (i + 1) * group_length] = temp_w_max.repeat(group_length)
elif not self.per_channel:
if not self.weight_percentile:
w_min = w.data.min().expand(1)
w_max = w.data.max().expand(1)
elif self.weight_percentile:
w_min, w_max = get_percentile_min_max(w.clone().view(-1), 0.1, 99.9)
# print("w_min: ", w_min)
# print("w_min size: ", w_min.size())
# Initialization
# if self.x_min.size()[0] == 3072:
# print("self.x_max = ", self.x_max[7:11])
# print("self.x_min: ", self.x_min)
# print("self.x_min size: ", self.x_min.size())
if self.x_min.size()[0] == 1:
# print("1")
if self.x_min == self.x_max:
# print("2")
self.x_min = w_min
self.x_max = w_max
# print("w_min size: ", w_min.size())
# print("True x_min = ", self.x_min[0:8])
# if self.per_channel:
# self.x_min = self.x_min.expand(self.channel_num).cuda()
# self.x_max = self.x_max.expand(self.channel_num).cuda()
# print(self.x_max)
# print("self.x_min 2: ", self.x_min)
# exponential moving average (EMA)
# use momentum to prevent the quantized values change greatly every
# iteration
self.x_min = self.momentum * self.x_min + (1. - self.momentum) * w_min
self.x_max = self.momentum * self.x_max + (1. - self.momentum) * w_max
# print("self.x_min 3: ", self.x_min)
# if self.x_min.size()[0] == 3072:
# print("True self.x_max = ", self.x_max[7:11])
# print("True self.x_min size:", self.x_min.size())
if not self.full_precision_flag:
w = self.weight_function(self.weight, self.weight_bit, self.x_min,
self.x_max, self.per_channel, self.weight_percentile)
else:
w = self.weight
# print("self.x_min 4: ", self.x_min)
if self.alpha is None:
return F.linear(x, w, bias=self.bias)
else:
assert self.full_precision_flag == False
# quantized = self.alpha * F.linear(x, w, bias=self.bias)
# non_quantized = (1 - self.alpha) * F.linear(
# x, self.weight, bias=self.bias)
quantized = self.alpha * w
non_quantized = (1 - self.alpha) * self.weight
return F.linear(x, quantized + non_quantized, bias=self.bias)
class QuantLinear_Act(QuantLinear):
def __init__(self, quant_linear, bit=32):
super(QuantLinear_Act, self).__init__(
quant_linear.input_size, quant_linear.output_size,
quant_linear.weight_bit, quant_linear.full_precision_flag,
quant_linear.quant_mode)
self.weight_bit_act = bit #ACT_QUANT_BITS_MAP[self.weight_bit]
self.percentile = False
self.quant_act = QuantAct_bert(
activation_bit=self.weight_bit_act,
full_precision_flag=self.full_precision_flag,
percentile=self.percentile)
def reset_bits(self, weight_bit=8):
super(QuantLinear_Act, self).reset_bits(weight_bit)
# self.weight_bit_act = ACT_QUANT_BITS_MAP[weight_bit]
self.quant_act.reset_bits(self.weight_bit_act)
def forward(self, x):
if self.full_precision_flag:
return super(QuantLinear_Act, self).forward(x)
else:
# x = self.quant_act(x)
x = super(QuantLinear_Act, self).forward(x)
return self.quant_act(x)
class QuantAct_bert(Module):
def __init__(self,
activation_bit=32,
momentum=0.99,
full_precision_flag=True,
running_stat=True,
quant_mode="asymmetric",
show_flag=False,
percentile=False):
super(QuantAct_bert, self).__init__()
self.activation_bit = activation_bit
self.momentum = momentum
self.full_precision_flag = full_precision_flag
self.running_stat = running_stat
self.quant_mode = quant_mode
self.show_flag = show_flag
self.percentile = percentile
self.x_min = 0.
self.x_max = 0.
if quant_mode == "symmetric":
self.act_function = SymmetricQuantFunction.apply
elif quant_mode == "asymmetric":
self.act_function = AsymmetricQuantFunction.apply
else:
raise ValueError("unknown quant mode: {}".format(quant_mode))
def __repr__(self):
return "{}(activation_bit={}, " \
"full_precision_flag={}, Act_min: {}, " \
"Act_max: {})".format(self.__class__.__name__, self.activation_bit,
self.full_precision_flag, self.x_min, self.x_max)
def reset_bits(self, weight_bit=8):
self.full_precision_flag = False
self.activation_bit = weight_bit
def forward(self, x):
if self.running_stat:
if not self.percentile:
x_min = x.data.min()
x_max = x.data.max()
else:
x_min, x_max = get_percentile_min_max(x.clone().view(-1), 0.1, 99.9)
# print("self.x_max = ", self.x_max)
# Initialization
if self.x_min == self.x_max:
self.x_min = x_min
self.x_max = x_max
self.x_min = self.momentum * self.x_min + \
(1. - self.momentum) * x_min
self.x_max = self.momentum * self.x_max + \
(1. - self.momentum) * x_max
if not self.full_precision_flag:
if self.quant_mode == "asymmetric":
quant_act = self.act_function(x, self.activation_bit,
self.x_min, self.x_max)
elif self.quant_mode == "symmetric":
magnitude = max(abs(self.x_min), abs(self.x_max))
quant_act = self.act_function(x, self.activation_bit, magnitude)
return quant_act
else:
return x
class QuantAct(Module):
def __init__(self,
activation_bit,
out_channels=1,
momentum=0.99,
full_precision_flag=True,
running_stat=True,
quant_mode="asymmetric",
per_channel=False,
show_flag=False):
super(QuantAct, self).__init__()
self.activation_bit = activation_bit
self.momentum = momentum
self.full_precision_flag = full_precision_flag
self.running_stat = running_stat
self.quant_mode = quant_mode
self.per_channel = per_channel
self.show_flag = show_flag
self.register_buffer("x_min", torch.Tensor(out_channels).zero_())
self.register_buffer("x_max", torch.Tensor(out_channels).zero_())
if quant_mode == "symmetric":
self.act_function = SymmetricQuantFunction.apply
elif quant_mode == "asymmetric":
self.act_function = AsymmetricQuantFunction.apply
else:
raise ValueError("unknown quant mode: {}".format(quant_mode))
def __repr__(self):
return "{}(activation_bit={}, " \
"full_precision_flag={}, Act_min: {}, " \
"Act_max: {})".format(self.__class__.__name__, self.activation_bit,
self.full_precision_flag, self.x_min[0], self.x_max[0])
def forward(self, x):
self.channel_num = x.data.size()[1]
if self.running_stat:
if self.per_channel:
# print(1, time.time())
x_transform = x.data.transpose(0, 1).contiguous().view(
self.channel_num, -1)
x_min = x_transform.min(dim=1)[0]
x_max = x_transform.max(dim=1)[0]
# x_min = torch.zeros(self.channel_num).cuda()
# x_max = torch.zeros(self.channel_num).cuda()
# for i in range(self.channel_num):
# x_min[i] = x.data[:, i, :, :].min()
# x_max[i] = x.data[:, i, :, :].max()
# print(x_min)
# print(x_max)
else:
x_min = x.data.min()
x_max = x.data.max()
# print(self.x_min)
# Initialization
if self.x_min.size()[0] == 1:
if self.x_min == self.x_max:
self.x_min = x_min
self.x_max = x_max
if self.per_channel:
self.x_min = self.x_min.expand(self.channel_num).cuda()
self.x_max = self.x_max.expand(self.channel_num).cuda()
# print(self.x_max)
# exponential moving average (EMA)
# use momentum to prevent the quantized values change greatly every
# iteration
self.x_min = self.momentum * self.x_min + \
(1. - self.momentum) * x_min
self.x_max = self.momentum * self.x_max + \
(1. - self.momentum) * x_max
# print(self.x_max)
# print(2, time.time())
if not self.full_precision_flag:
if self.quant_mode == "asymmetric":
if self.per_channel:
quant_act = self.act_function(x, self.activation_bit,
self.x_min, self.x_max, True)
else:
quant_act = self.act_function(x, self.activation_bit,
self.x_min.item(),
self.x_max.item())
if self.show_flag:
print(self.x_min, self.x_max)
print(x.data.min(), x.data.max())
print(quant_act)
return quant_act
elif self.quant_mode == "symmetric":
magnitude = max(abs(self.x_min[0]), abs(self.x_max[0]))
return self.act_function(x, self.activation_bit, magnitude)
else:
return x
class QuantDynaLinear(nn.Linear):
def __init__(self, in_features, out_features, num_heads, bias=True, dyna_dim=[True, True],
weight_bit=8,
full_precision_flag=True,
quant_mode="asymmetric",
alpha=None,
per_channel=True,
group_quantization=True,
group_number=1,
weight_percentile=False):
super(QuantDynaLinear, self).__init__(
in_features, out_features, bias=bias)
self.in_features_max = in_features
self.out_features_max = out_features
self.num_heads = num_heads
self.width_mult = 1.
self.dyna_dim = dyna_dim
self.full_precision_flag = full_precision_flag
self.weight_bit = weight_bit
self.alpha = alpha
self.quant_mode = quant_mode
self.input_size = in_features
self.output_size = out_features
self.momentum = 0.99
# self.x_min = torch.zeros(1)
# self.x_max = torch.zeros(1)
#if not per_channel:
self.register_buffer('x_min', torch.zeros(1))
self.register_buffer('x_max', torch.zeros(1))
#else:
# self.register_buffer('x_min', torch.zeros(input_size))
# self.register_buffer('x_max', torch.zeros(input_size))
self.per_channel = per_channel
self.weight_percentile = weight_percentile
self.group_quantization = group_quantization
self.group_number = group_number
if quant_mode == "symmetric":
self.weight_function = SymmetricQuantFunction.apply
elif quant_mode == "asymmetric":
self.weight_function = AsymmetricQuantFunction.apply
else:
raise ValueError("unknown quant mode: {}".format(quant_mode))
def reset_bits(self, weight_bit=8):
self.full_precision_flag = False
self.weight_bit = weight_bit
def reset_alpha(self, alpha):
assert alpha >= 0.0
assert alpha <= 1.0
self.alpha = alpha
def forward(self, input):
# print(x.shape)
w = self.weight
self.channel_num = w.shape[1]
# print("w shape:", w.shape)
if self.per_channel:
if not self.group_quantization:
# print(1, time.time())
# x_transform = w.data.transpose(0, 1).contiguous().view(self.channel_num, -1) # output_dim as channel
x_transform = w.data.transpose(0, 1).contiguous()
# print("x_transform, shape:", x_transform.shape)
w_min = x_transform.min(dim=1)[0]
w_max = x_transform.max(dim=1)[0]
# print("w_min shape:", w_min.shape)
# print("w_max shape:", w_max.shape)
# w_min = torch.zeros(self.channel_num).cuda()
# w_max = torch.zeros(self.channel_num).cuda()
# for i in range(self.channel_num):
# w_min[i] = w.data[:, i, :, :].min()
# w_max[i] = w.data[:, i, :, :].max()
# print(w_min)
# print(w_max)
if not self.weight_percentile:
pass
elif self.weight_percentile:
# print("percentile = ", self.percentile)
lower_percentile = 0.1
upper_percentile = 99.9
input_length = x_transform[0].view(-1).shape[0]
# print("self.channel_num = ", self.channel_num)
# print("input_length = ", input_length)
lower_index = round(input_length * lower_percentile * 0.01)
upper_index = round(input_length * upper_percentile * 0.01)
lower_bound, _ = torch.topk(x_transform, lower_index, largest=False, sorted=False)
upper_bound, _ = torch.topk(x_transform, input_length - upper_index, largest=True, sorted=False)
# print("lower_bound.shape = ", lower_bound.shape)
# print("w_min shape:", w_min.shape)
w_min = lower_bound.max(dim=1)[0]
w_max = upper_bound.min(dim=1)[0]
# print("w_min_new shape:", w_min.shape)
# for i in range(self.channel_num):
# w_min[i], w_max[i] = get_percentile_min_max(
# x_transform[i].view(-1), 0.1, 99.9, output_tensor=True)
elif self.group_quantization:
x_transform = w.data.transpose(0, 1).contiguous()
# w_min = torch.zeros(x_transform.size()[0])
# w_max = torch.zeros(x_transform.size()[0])
w_min = x_transform.min(dim=1)[0]
w_max = x_transform.max(dim=1)[0]
# please make sure group_length is an integer
group_length = w_max.size()[0] // self.group_number
if not self.weight_percentile:
temp_w_min = w_min.clone()
temp_w_max = w_max.clone()
# temp_w_min = x_transform.min(dim=1)[0]
# temp_w_max = x_transform.max(dim=1)[0]
for i in range(self.group_number):
w_min[i * group_length: (i + 1) * group_length] = \
temp_w_min[i * group_length: (i + 1) * group_length].min().repeat(group_length)
w_max[i * group_length: (i + 1) * group_length] = \
temp_w_max[i * group_length: (i + 1) * group_length].max().repeat(group_length)
# print("shape = ", temp_w_max[i * group_length: | |
encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('ResumeEchoRequest', payload=locals(), response_object=None)
def ResumeEchoRequest(self, SessionIndices):
"""Executes the resumeEchoRequest operation on the server.
Resume Sending Echo Request Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('ResumeEchoRequest', payload=locals(), response_object=None)
def ResumeEchoRequest(self, SessionIndices):
"""Executes the resumeEchoRequest operation on the server.
Resume Sending Echo Request Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('ResumeEchoRequest', payload=locals(), response_object=None)
def ResumeEchoRequest(self, Arg2):
"""Executes the resumeEchoRequest operation on the server.
Resume Sending Echo Request Messages.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('ResumeEchoRequest', payload=locals(), response_object=None)
def SendBarrierRequest(self):
"""Executes the sendBarrierRequest operation on the server.
Send Barrier Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendBarrierRequest', payload=locals(), response_object=None)
def SendBarrierRequest(self, SessionIndices):
"""Executes the sendBarrierRequest operation on the server.
Send Barrier Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendBarrierRequest', payload=locals(), response_object=None)
def SendBarrierRequest(self, SessionIndices):
"""Executes the sendBarrierRequest operation on the server.
Send Barrier Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendBarrierRequest', payload=locals(), response_object=None)
def SendBarrierRequest(self, Arg2):
"""Executes the sendBarrierRequest operation on the server.
Send Barrier Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendBarrierRequest', payload=locals(), response_object=None)
def SendConfigRequest(self):
"""Executes the sendConfigRequest operation on the server.
Send Config Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendConfigRequest', payload=locals(), response_object=None)
def SendConfigRequest(self, SessionIndices):
"""Executes the sendConfigRequest operation on the server.
Send Config Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendConfigRequest', payload=locals(), response_object=None)
def SendConfigRequest(self, SessionIndices):
"""Executes the sendConfigRequest operation on the server.
Send Config Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendConfigRequest', payload=locals(), response_object=None)
def SendConfigRequest(self, Arg2):
"""Executes the sendConfigRequest operation on the server.
Send Config Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendConfigRequest', payload=locals(), response_object=None)
def SendDescriptionStatRequest(self):
"""Executes the sendDescriptionStatRequest operation on the server.
Send Description Stat Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendDescriptionStatRequest', payload=locals(), response_object=None)
def SendDescriptionStatRequest(self, SessionIndices):
"""Executes the sendDescriptionStatRequest operation on the server.
Send Description Stat Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendDescriptionStatRequest', payload=locals(), response_object=None)
def SendDescriptionStatRequest(self, SessionIndices):
"""Executes the sendDescriptionStatRequest operation on the server.
Send Description Stat Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendDescriptionStatRequest', payload=locals(), response_object=None)
def SendDescriptionStatRequest(self, Arg2):
"""Executes the sendDescriptionStatRequest operation on the server.
Send Description Stats Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendDescriptionStatRequest', payload=locals(), response_object=None)
def SendEchoRequest(self, EnableEchoTimeout, EchoTimeoutVal):
"""Executes the sendEchoRequest operation on the server.
Send Echo Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
EnableEchoTimeout (bool): This parameter requires a enableEchoTimeout of type kBool
EchoTimeoutVal (number): This parameter requires a echoTimeoutVal of type kInteger
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendEchoRequest', payload=locals(), response_object=None)
def SendEchoRequest(self, EnableEchoTimeout, EchoTimeoutVal, SessionIndices):
"""Executes the sendEchoRequest operation on the server.
Send Echo Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
EnableEchoTimeout (bool): This parameter requires a enableEchoTimeout of type kBool
EchoTimeoutVal (number): This parameter requires a echoTimeoutVal of type kInteger
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendEchoRequest', payload=locals(), response_object=None)
def SendEchoRequest(self, SessionIndices, EnableEchoTimeout, EchoTimeoutVal):
"""Executes the sendEchoRequest operation on the server.
Send Echo Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a enableEchoTimeout of type kBool
EnableEchoTimeout (bool): This parameter requires a echoTimeoutVal of type kInteger
EchoTimeoutVal (number): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendEchoRequest', payload=locals(), response_object=None)
def SendEchoRequest(self, Arg2, Arg3, Arg4):
"""Executes the sendEchoRequest operation on the server.
Send Echo Request Messages.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (bool): Enable Echo Timeout
Arg4 (number): Echo Timeout Value
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendEchoRequest', payload=locals(), response_object=None)
def SendExperimenterMessage(self, Arg2, Arg3, Arg4, Arg5, Arg6):
"""Executes the sendExperimenterMessage operation on the server.
Send Experimenter Message.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (number): Experimenter Data Length.
Arg4 (number): Experimenter ID.
Arg5 (number): Experimenter ID
Arg6 (str): Experimenter Data | |
fs.username = request.user.username
fs.repo_id = repo_id
fs.path = path
fs.token = token
try:
fs.save()
except IntegrityError, e:
return api_err(status.HTTP_500_INTERNAL_SERVER_ERROR, e.msg)
http_or_https = request.is_secure() and 'https' or 'http'
domain = RequestSite(request).domain
file_shared_link = '%s://%s%sf/%s/' % (http_or_https, domain,
settings.SITE_ROOT, token)
return Response(file_shared_link)
def get_repo_file(request, repo_id, file_id, file_name, op):
if op == 'download':
token = seafserv_rpc.web_get_access_token(repo_id, file_id,
op, request.user.username)
redirect_url = gen_file_get_url(token, file_name)
response = HttpResponse(json.dumps(redirect_url), status=200,
content_type=json_content_type)
response["oid"] = file_id
return response
if op == 'downloadblks':
blklist = []
encrypted = False
encversion = 0
if file_id != EMPTY_SHA1:
try:
blks = wingufile_api.list_file_by_file_id(file_id)
except SearpcError, e:
return api_error(status.HTTP_520_OPERATION_FAILED,
'Failed to get file block list')
blklist = blks.split('\n')
blklist = [i for i in blklist if len(i) == 40]
if len(blklist) > 0:
repo = get_repo(repo_id)
encrypted = repo.encrypted
encversion = repo.encversion
token = seafserv_rpc.web_get_access_token(repo_id, file_id,
op, request.user.username)
url = gen_block_get_url(token, None)
res = {
'blklist':blklist,
'url':url,
'encrypted':encrypted,
'encversion': encversion,
}
response = HttpResponse(json.dumps(res), status=200,
content_type=json_content_type)
response["oid"] = file_id
return response
if op == 'sharelink':
path = request.GET.get('p', None)
assert path, 'path must be passed in the url'
return get_shared_link(request, repo_id, path)
def reloaddir(request, repo_id, parent_dir):
current_commit = get_commits(repo_id, 0, 1)[0]
if not current_commit:
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR,
'Failed to get current commit of repo %s.' % repo_id)
try:
dir_id = seafserv_threaded_rpc.get_dirid_by_path(current_commit.id,
parent_dir.encode('utf-8'))
except SearpcError, e:
return api_error(HTTP_520_OPERATION_FAILED,
"Failed to get dir id by path")
if not dir_id:
return api_error(status.HTTP_404_NOT_FOUND, "Path does not exist")
return get_dir_entrys_by_id(request, repo_id, parent_dir, dir_id)
def reloaddir_if_neccessary (request, repo_id, parent_dir):
reload_dir = False
s = request.GET.get('reloaddir', None)
if s and s.lower() == 'true':
reload_dir = True
if not reload_dir:
return Response('success')
return reloaddir(request, repo_id, parent_dir)
# deprecated
class OpDeleteView(APIView):
"""
Delete a file.
"""
authentication_classes = (TokenAuthentication, )
permission_classes = (IsAuthenticated, IsRepoWritable, )
def post(self, request, repo_id, format=None):
repo = get_repo(repo_id)
if not repo:
return api_error(status.HTTP_404_NOT_FOUND, 'Repo not found.')
resp = check_repo_access_permission(request, repo)
if resp:
return resp
parent_dir = request.GET.get('p', '/')
file_names = request.POST.get("file_names")
if not parent_dir or not file_names:
return api_error(status.HTTP_404_NOT_FOUND,
'File or directory not found.')
for file_name in file_names.split(':'):
file_name = unquote(file_name.encode('utf-8'))
try:
seafserv_threaded_rpc.del_file(repo_id, parent_dir,
file_name, request.user.username)
except SearpcError,e:
return api_error(HTTP_520_OPERATION_FAILED,
"Failed to delete file.")
return reloaddir_if_neccessary (request, repo_id, parent_dir)
def append_starred_files(array, files):
for f in files:
sfile = {'org' : f.org_id,
'repo' : f.repo.id,
'path' : f.path,
'mtime' : f.last_modified,
'dir' : f.is_dir,
'size' : f.size
}
array.append(sfile)
def api_starred_files(request):
starred_files = []
personal_files = get_starred_files(request.user.username, -1)
append_starred_files (starred_files, personal_files)
return Response(starred_files)
class StarredFileView(APIView):
"""
Support uniform interface for starred file operation,
including add/delete/list starred files.
"""
authentication_classes = (TokenAuthentication, )
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle, )
def get(self, request, format=None):
# list starred files
return api_starred_files(request)
def post(self, request, format=None):
# add starred file
repo_id = request.POST.get('repo_id', '')
path = unquote(request.POST.get('p', '').encode('utf-8'))
if not (repo_id and path):
return api_error(status.HTTP_400_BAD_REQUEST,
'Repo_id or path is missing.')
if path[-1] == '/': # Should not contain '/' at the end of path.
return api_error(status.HTTP_400_BAD_REQUEST, 'Invalid file path.')
star_file(request.user.username, repo_id, path, is_dir=False,
org_id=-1)
resp = Response('success', status=status.HTTP_201_CREATED)
resp['Location'] = reverse('starredfiles')
return resp
def delete(self, request, format=None):
# remove starred file
repo_id = request.GET.get('repo_id', '')
path = request.GET.get('p', '')
if not (repo_id and path):
return api_error(status.HTTP_400_BAD_REQUEST,
'Repo_id or path is missing.')
if path[-1] == '/': # Should not contain '/' at the end of path.
return api_error(status.HTTP_400_BAD_REQUEST, 'Invalid file path.')
unstar_file(request.user.username, repo_id, path)
return Response('success', status=status.HTTP_200_OK)
class FileView(APIView):
"""
Support uniform interface for file related operations,
including create/delete/rename/view, etc.
"""
authentication_classes = (TokenAuthentication, )
permission_classes = (IsAuthenticated, IsRepoWritable, )
throttle_classes = (UserRateThrottle, )
def get(self, request, repo_id, format=None):
# view file
repo = get_repo(repo_id)
if not repo:
return api_error(status.HTTP_404_NOT_FOUND, 'Repo not found.')
resp = check_repo_access_permission(request, repo)
if resp:
return resp
path = request.GET.get('p', None)
if not path:
return api_error(status.HTTP_400_BAD_REQUEST, 'Path is missing.')
file_name = os.path.basename(path)
file_id = None
try:
file_id = seafserv_threaded_rpc.get_file_id_by_path(repo_id,
path.encode('utf-8'))
except SearpcError, e:
return api_error(HTTP_520_OPERATION_FAILED,
"Failed to get file id by path.")
if not file_id:
return api_error(status.HTTP_404_NOT_FOUND, "File not found")
op = request.GET.get('op', 'download')
return get_repo_file(request, repo_id, file_id, file_name, op)
def post(self, request, repo_id, format=None):
# rename or move file
repo = get_repo(repo_id)
if not repo:
return api_error(status.HTTP_404_NOT_FOUND, 'Repo not found.')
resp = check_repo_access_permission(request, repo)
if resp:
return resp
path = request.GET.get('p', '')
if not path or path[0] != '/':
return api_error(status.HTTP_400_BAD_REQUEST,
'Path is missing or invalid.')
operation = request.POST.get('operation', '')
if operation.lower() == 'rename':
newname = request.POST.get('newname', '')
if not newname:
return api_error(status.HTTP_400_BAD_REQUEST,
'Newname is missing')
newname = unquote(newname.encode('utf-8'))
if len(newname) > settings.MAX_UPLOAD_FILE_NAME_LEN:
return api_error(status.HTTP_400_BAD_REQUEST, 'Newname too long')
parent_dir = os.path.dirname(path)
parent_dir_utf8 = parent_dir.encode('utf-8')
oldname = os.path.basename(path)
oldname_utf8 = oldname.encode('utf-8')
if oldname == newname:
return api_error(status.HTTP_409_CONFLICT,
'The new name is the same to the old')
newname = check_filename_with_rename(repo_id, parent_dir, newname)
newname_utf8 = newname.encode('utf-8')
try:
seafserv_threaded_rpc.rename_file (repo_id, parent_dir_utf8,
oldname_utf8, newname,
request.user.username)
except SearpcError,e:
return api_error(HTTP_520_OPERATION_FAILED,
"Failed to rename file: %s" % e)
if request.GET.get('reloaddir', '').lower() == 'true':
return reloaddir(request, repo_id, parent_dir)
else:
resp = Response('success', status=status.HTTP_301_MOVED_PERMANENTLY)
uri = reverse('FileView', args=[repo_id], request=request)
resp['Location'] = uri + '?p=' + quote(parent_dir_utf8) + quote(newname_utf8)
return resp
elif operation.lower() == 'move':
src_dir = os.path.dirname(path)
src_dir_utf8 = src_dir.encode('utf-8')
src_repo_id = repo_id
dst_repo_id = request.POST.get('dst_repo', '')
dst_dir = request.POST.get('dst_dir', '')
dst_dir_utf8 = dst_dir.encode('utf-8')
if dst_dir[-1] != '/': # Append '/' to the end of directory if necessary
dst_dir += '/'
# obj_names = request.POST.get('obj_names', '')
if not (dst_repo_id and dst_dir):
return api_error(status.HTTP_400_BAD_REQUEST, 'Missing arguments.')
if src_repo_id == dst_repo_id and src_dir == dst_dir:
return Response('success', status=status.HTTP_200_OK)
# names = obj_names.split(':')
# names = map(lambda x: unquote(x).decode('utf-8'), names)
# if dst_dir.startswith(src_dir):
# for obj_name in names:
# if dst_dir.startswith('/'.join([src_dir, obj_name])):
# return api_error(status.HTTP_409_CONFLICT,
# 'Can not move a dirctory to its subdir')
filename = os.path.basename(path)
filename_utf8 = filename.encode('utf-8')
new_filename = check_filename_with_rename(dst_repo_id, dst_dir,
filename)
new_filename_utf8 = new_filename.encode('utf-8')
try:
seafserv_threaded_rpc.move_file(src_repo_id, src_dir_utf8,
filename_utf8, dst_repo_id,
dst_dir_utf8, new_filename_utf8,
request.user.username)
except SearpcError, e:
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR,
"SearpcError:" + e.msg)
if request.GET.get('reloaddir', '').lower() == 'true':
return reloaddir(request, dst_repo_id, dst_dir)
else:
resp = Response('success', status=status.HTTP_301_MOVED_PERMANENTLY)
uri = reverse('FileView', args=[repo_id], request=request)
resp['Location'] = uri + '?p=' + quote(dst_dir_utf8) + quote(new_filename_utf8)
return resp
elif operation.lower() == 'create':
parent_dir = os.path.dirname(path)
parent_dir_utf8 = parent_dir.encode('utf-8')
new_file_name = os.path.basename(path)
new_file_name = check_filename_with_rename(repo_id, parent_dir,
new_file_name)
new_file_name_utf8 = new_file_name.encode('utf-8')
try:
seafserv_threaded_rpc.post_empty_file(repo_id, parent_dir,
new_file_name,
request.user.username)
except SearpcError, e:
return api_error(HTTP_520_OPERATION_FAILED,
'Failed to make directory.')
if request.GET.get('reloaddir', '').lower() == 'true':
return reloaddir(request, repo_id, parent_dir)
else:
resp = Response('success', status=status.HTTP_201_CREATED)
uri = reverse('FileView', args=[repo_id], request=request)
resp['Location'] = uri + '?p=' + quote(parent_dir_utf8) + \
quote(new_file_name_utf8)
return resp
else:
return api_error(status.HTTP_400_BAD_REQUEST,
"Operation can only be rename, create or move.")
def put(self, request, repo_id, format=None):
# update file
# TODO
pass
def delete(self, request, repo_id, format=None):
# delete file
repo = get_repo(repo_id)
if not repo:
return api_error(status.HTTP_404_NOT_FOUND, 'Repo not found.')
resp = check_repo_access_permission(request, repo)
if resp:
return resp
path = request.GET.get('p', None)
if not path:
return api_error(status.HTTP_400_BAD_REQUEST, 'Path is missing.')
parent_dir = os.path.dirname(path)
parent_dir_utf8 = os.path.dirname(path).encode('utf-8')
file_name_utf8 = os.path.basename(path).encode('utf-8')
try:
seafserv_threaded_rpc.del_file(repo_id, parent_dir_utf8,
file_name_utf8,
request.user.username)
except SearpcError, e:
return api_error(HTTP_520_OPERATION_FAILED,
"Failed to delete file.")
return reloaddir_if_neccessary(request, repo_id, parent_dir)
class FileSharedLinkView(APIView):
"""
Support uniform interface for file shared link.
"""
authentication_classes = (TokenAuthentication, )
permission_classes = (IsAuthenticated, )
throttle_classes = (UserRateThrottle, )
def put(self, request, repo_id, format=None):
# generate file shared link
path = unquote(request.DATA.get('p', '').encode('utf-8'))
if not path:
return api_error(status.HTTP_400_BAD_REQUEST, 'Path is missing.')
if path[-1] == '/':
path = path[:-1]
l = FileShare.objects.filter(repo_id=repo_id).filter(
username=request.user.username).filter(path=path)
if len(l) > 0:
fileshare = l[0]
token = fileshare.token
else:
token = gen_token(max_length=10)
fs = FileShare()
fs.username = request.user.username
fs.repo_id = repo_id
fs.path = path
fs.token = token
try:
fs.save()
except IntegrityError, e:
return api_err(status.HTTP_500_INTERNAL_SERVER_ERROR, e.msg)
http_or_https = request.is_secure() and 'https' or 'http'
domain = RequestSite(request).domain
file_shared_link = '%s://%s%sf/%s/' % (http_or_https, domain,
settings.SITE_ROOT, token)
resp = Response(status=status.HTTP_201_CREATED)
resp['Location'] = file_shared_link
return resp
class DirView(APIView):
"""
Support uniform interface for directory operations, including
create/delete/rename/list, etc.
"""
authentication_classes = (TokenAuthentication, )
permission_classes = (IsAuthenticated, IsRepoWritable, )
throttle_classes = (UserRateThrottle, )
def get(self, request, repo_id, format=None):
# list dir
repo = get_repo(repo_id)
if not repo:
return api_error(status.HTTP_404_NOT_FOUND, 'Repo not | |
#
# Sublime Text 2/3 SimpleSync plugin
#
# based on https://github.com/tnhu/SimpleSync
#
import os
# import sys
import platform
import subprocess
import threading
# import re
import sublime
import sublime_plugin
import zipfile
# print(os.path.join(sublime.packages_path(), 'Default'))
# Caches
#__name__ # ST3 bug with __name__
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
PACKAGE_NAME = 'SublimeSimpleSync'
PACKAGE_SETTINGS = PACKAGE_NAME + '.sublime-settings'
OS = platform.system()
# print('*********', os.name, sys.platform, OS)
IS_GTE_ST3 = int(sublime.version()[0]) >= 3
VERSION = '20140721' #version
class syncCommand():
# get settings
def getSetting(self):
return sublime.load_settings(PACKAGE_SETTINGS)
# Get file path
def getPath(self):
if self.window.active_view():
return self.window.active_view().file_name()
else:
# sublime.error_message(PACKAGE_NAME + ': No file_name')
self.syncPastePath()
return False
# Get sync item(s) for a file
def getSyncItem(self, localFile):
ret = []
# print(localFile, self.rules)
for item in self.rules:
# print(localFile.startswith(item['local']), localFile, item['local'])
if localFile.startswith(item['local']):
ret += [item]
return ret
# support multiple rules
def syncFile(self, localFile):
syncItems = self.getSyncItem(localFile)
# print('+++ syncCommand: ', syncItems)
if (len(syncItems) > 0):
for item in syncItems:
# fix path(/)
relPath = localFile.replace(item['local'], '')
remoteFile = item['remote'] + '/' + relPath
# print('********', remoteFile)
if (item['type'] == 'ssh'):
password = item['password'] if 'password' in item else ''
ScpCopier(item['host'], item['username'], password, localFile, remoteFile, port=item['port'], relPath=relPath).start()
elif (item['type'] == 'local'):
LocalCopier(localFile, remoteFile).start()
def syncPastePath(self):
file_path = ''
def on_done(file_path):
# print(file_path)
if not file_path: return
self.syncFile(file_path)
self.window.show_input_panel('[%s] Copy and paste local file path :' % (PACKAGE_NAME), file_path, on_done, None, None)
# show_input_panel and paste local file path
# { "keys": ["alt+shift+s"], "command": "sublime_simple_sync_path"},
class SublimeSimpleSyncPathCommand(sublime_plugin.WindowCommand, syncCommand):
def run(self):
settings = self.getSetting()
self.rules = settings.get('rules')
self.syncPastePath()
# { "keys": ["alt+s"], "command": "sublime_simple_sync"},
class SublimeSimpleSyncCommand(sublime_plugin.WindowCommand, syncCommand):
def run(self):
# for x in self.window.views(): print(x.file_name())
settings = self.getSetting()
self.rules = settings.get('rules')
# auto save
self.window.run_command('save')
localFile = self.getPath()
# print('********', localFile)
if localFile is not False:
self.syncFile(localFile)
# auto run, sublime_plugin.EventListener
class SimpleSync(sublime_plugin.EventListener, syncCommand):
# on save
def on_post_save(self, view):
settings = self.getSetting()
# print('********', settings)
config = settings.get('config', [])
autoSycn = config['autoSync'] if 'autoSync' in config else False
localFile = view.file_name()
# print('********', localFile)
if autoSycn:
self.rules = settings.get('rules')
self.syncFile(localFile)
# command = Command("echo 'Process started'; sleep 2; echo 'Process finished'")
# command.run(timeout=3)
class Command(object):
def __init__(self, cmd, debug=False, expect_cmd=None):
self.cmd = cmd
self.expect_cmd = expect_cmd
self.process = None
self.msg = None
self.debug = debug
def run(self, timeout=10, shell=True):
def target():
if self.debug: print('Thread started')
cmd = self.expect_cmd if self.expect_cmd else self.cmd
self.process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=shell)
(stdout, stderr) = self.process.communicate()
# print(stdout, stderr)
if self.debug: print('Thread finished')
#self.process.stdout.read().decode('utf-8')
self.msg = stdout.decode('utf-8')
thread = threading.Thread(target=target)
thread.start()
thread.join(timeout)
if thread.is_alive():
if self.debug: print ('Terminating process')
self.process.terminate() # kill proc
thread.join()
# print (self.process.returncode)
def store_key(self, shell=True):
ret = True;
if OS != 'Windows':
self.cmd = self.cmd.replace('"','\\"')
# if OS == 'Windows':
# args = [self.cmd]
if OS == 'Darwin':
args = [
'osascript -e \'tell app "Terminal" to do script "%s"\'' % (self.cmd),
'open -W -a Terminal'
]
else:
args = [
# 'gnome-terminal -x "%s"' % (self.cmd)
'gnome-terminal --tab -e "%s"' % (self.cmd)
]
# print('OS:', OS, 'cmd:', ';'.join(args))
r = subprocess.call(';'.join(args), shell=shell)
if r != 0:
ret = False
return ret
# ScpCopier does actual copying using threading to avoid UI blocking
class ScpCopier(threading.Thread, syncCommand):
def __init__(self, host, username, password, localFile, remoteFile, port=22, relPath=''):
self.host = host
self.port = port
self.username = username
self.password = password
self.localFile = localFile
self.remoteFile = remoteFile
self.relPath = relPath
# print('relative path:', relPath)
settings = self.getSetting()
config = settings.get('config')
self.debug = config['debug'] if 'debug' in config else False
self.timeout = config['timeout'] if 'timeout' in config else 10
threading.Thread.__init__(self)
def run(self):
packageDir = os.path.join(sublime.packages_path(), PACKAGE_NAME)
# for windows
self.remoteFile = self.remoteFile.replace('\\', '/').replace('//', '/')
remote = self.username + '@' + self.host + ':' + self.remoteFile
# print(PACKAGE_NAME , self.localFile, ' -> ', self.remoteFile)
pw = []
ext = ['-r', '-C', '-P', str(self.port), '\"%s\"' % (self.localFile), '\"%s\"' % (remote)]
shell = True
if OS == 'Windows':
# cmd = os.environ['SYSTEMROOT'] + '\\System32\\cmd.exe'
scp = '\"%s\"' % (os.path.join(packageDir, 'pscp.exe'))
args = [scp]
# args = [scp, "-v"] # show message
# run with .bat
# scp = os.path.join(packageDir, 'sync.bat')
# args = [scp]
# pw.extend(ext)
# pw = ' '.join(pw)
# args.extend([packageDir, pw])
if self.password:
pw = ['-pw', self.password]
args.extend(pw)
shell = False
else:
args = ['scp']
args.extend(ext)
run_cmd = ' '.join(args)
print(PACKAGE_NAME + ': ' + run_cmd)
expect_cmd = None
if OS != 'Windows' and self.password: # use password, ignore authorized_keys
# ~/.ssh/known_hosts
expect_cmd = r'''
expect -c "
set timeout {timeout};
spawn {cmd};
expect *password* {{ send \"{password}\r\" }};
expect "100%"
expect eof"
'''.format(cmd=run_cmd, password=self.password, timeout=self.timeout)
# print(expect)
self.i = 1
self.done = False
self.success = False
def show_loading():
# print(self.i)
if self.i % 2 == 0:
s = 0
e = 3
else:
s = 3
e = 0
if not self.done:
sublime.status_message('%s [%s=%s]' % (PACKAGE_NAME, ' ' * s, ' ' * e))
sublime.set_timeout(show_loading, 500)
self.i += 1
else:
msg = 'Completed!' if self.success else 'Sync failed!'
sublime.status_message('%s: %s' % (PACKAGE_NAME, msg))
show_loading()
# return
def sync_folder():
self.localFile = os.path.dirname(self.localFile)
self.remoteFile = os.path.dirname(os.path.dirname(self.remoteFile))
# print(self.localFile, ',', self.remoteFile)
ScpCopier(self.host, self.username, self.password, self.localFile, self.remoteFile, self.port).start()
def show_msg(msg):
find_msg = msg.lower()
if find_msg.find('No such file or directory'.lower()) != -1:
if sublime.ok_cancel_dialog('No such file or directory\n' + self.relPath + '\n' + '* Do you want to sync the parent folder?'):
sync_folder()
elif find_msg.find('continue connecting'.lower()) != -1:# or find_msg.find('Store key in cache'.lower()) != -1 #(remove Windows)
msg = 'Please run this command once: \n'
msg += run_cmd + '\n'
msg += '*** Also, you can copy this command via "Console"(ctrl+` shortcut).'
if not command.store_key(shell=shell):
self.success = False
sublime.message_dialog(msg)
elif find_msg.find('Host key verification failed'.lower()) != -1:
msg = 'Please generate SSH public-key and run: \n'
msg += 'ssh -p ' + self.port + ' ' + self.username + '@' + self.host + " 'mkdir -p ~/.ssh && cat >> ~/.ssh/authorized_keys' < ~/.ssh/id_rsa.pub \n"
sublime.message_dialog(msg)
elif find_msg.find('Permission denied (publickey,password)'.lower()) != -1: # authorized faild
msg = 'Scp auth faild. Please check your sshd_config, and enable AuthorizedKeysFile!'
sublime.message_dialog(msg)
elif find_msg.find('100%') != -1:
self.success = True
elif find_msg.find('s password:') != -1:
msg = 'Please enlarge the ["config"]["timeout"] in %s settings (Default: 10)' % (PACKAGE_NAME)
sublime.message_dialog(msg)
else:
if msg:
sublime.message_dialog(msg)
else:
self.success = True
try:
if OS == 'Windows':
retcode = subprocess.call(run_cmd)
if self.debug:
print('returncode:', retcode)
if retcode != 0:
#sync failed
self.success = False
msg = 'Please verify that your settings(username, password, host, port) is correct and try again'
sublime.message_dialog(msg)
else:
self.success = True
#sync failed
else:
command = Command(run_cmd, debug=self.debug, expect_cmd=expect_cmd)
command.run(timeout=self.timeout, shell=shell)
if self.debug:
print('msg:', command.msg, 'returncode:', command.process.returncode)
show_msg(command.msg)
self.done = True
except Exception as exception:
# Alert "SimpleSync: No file_name", if the file size is zero.
# print(exception);
sublime.error_message(PACKAGE_NAME + ': ' + str(exception))
# LocalCopier does local copying using threading to avoid UI blocking
class LocalCopier(threading.Thread, syncCommand):
def __init__(self, localFile, remoteFile):
self.localFile = localFile
self.remoteFile = remoteFile
# settings = self.getSetting()
# config = settings.get("config")
# self.debug = config['debug'] if "debug" in config else False
threading.Thread.__init__(self)
def run(self):
# print(PACKAGE_NAME, self.localFile, ' -> ', self.remoteFile)
if OS == 'Windows':
# args = ['copy', '/y']
args = ['xcopy', '/y', '/s', '/h']
# folder path
# print(os.path.split(self.remoteFile)[0])
# print(os.path.dirname(self.remoteFile))
# print(re.sub(r'\\[^\\]*$', '', self.remoteFile))
# print('*********', self.remoteFile)
# replace C:\test/\test\ -> C:\test\test\
self.remoteFile = self.remoteFile.replace('/\\', '\\').rstrip('/')
# replace /path/file.ext -> /path
self.remoteFile = os.path.dirname(self.remoteFile) + '\\'
# print('*********', self.remoteFile)
else:
args = ['cp']
args.extend([self.localFile, self.remoteFile])
print(PACKAGE_NAME + ': ' + ' '.join(args))
# return
try:
retcode = subprocess.call(' '.join(args), shell=True)
print(retcode)
msg = 'Completed!' if retcode == 0 else 'Sync failed!'
sublime.status_message('%s: %s' % (PACKAGE_NAME, msg))
except Exception as exception:
# print(exception);
sublime.error_message(PACKAGE_NAME + ': ' + str(exception))
def plugin_loaded(): # for ST3 >= 3016
PACKAGES_PATH = sublime.packages_path()
TARGET_PATH = os.path.join(PACKAGES_PATH, PACKAGE_NAME)
# print(TARGET_PATH);
# first run
version_file = os.path.join(TARGET_PATH, 'version')
# print(PACKAGE_NAME, VERSION, version_file)
version = 0
if os.path.isfile(version_file):
f = open(version_file, 'r')
version = f.read().strip() # lines = f.readlines()
f.close()
# print(VERSION, version, VERSION == version)
if not os.path.isdir(TARGET_PATH) or not VERSION == version:
# copy files
file_list = [
'Main.sublime-menu', 'pscp.exe',
# | |
def test_simple_array_alloc_scatter(self):
alloc_shape = [2, 3, 4]
value_shape = [1, 3, 4]
input_features = [('alloc_shape', datatypes.Array(len(alloc_shape))),
('value', datatypes.Array(*value_shape)),
('index', datatypes.Array(1))]
output_features = [('output', None)]
builder = neural_network.NeuralNetworkBuilder(
input_features, output_features, disable_rank5_shape_mapping=True)
builder.add_fill_dynamic(name='fill_dynamic_layer', input_name='alloc_shape',
output_name='array', value=np.float(0.0))
# CoreML input order: container (array), indices, slices (value)
builder.add_scatter(name='scatter_layer',
input_names=['array', 'index', 'value'],
output_name='output')
value = np.random.rand(*value_shape).astype('float')
feed = {'alloc_shape': np.array(alloc_shape, dtype='float'),
'value': value,
'index': np.array([1], dtype='float')}
ref = np.zeros(alloc_shape)
ref[1, :, :] = value
expected = {'output': ref}
self._test_model(builder.spec, feed, expected, useCPUOnly=True)
def test_erf_activation_cpu(self, cpu_only=True):
input_features = [('data', datatypes.Array(10, 45))]
output_features = [('output', datatypes.Array(10, 45))]
builder = neural_network.NeuralNetworkBuilder(
input_features, output_features, disable_rank5_shape_mapping=True)
builder.add_erf(name='erf', input_name='data',
output_name='output')
x = np.random.rand(10, 45)
input = {'data': x}
expected = {
'output': np.asarray([math.erf(i) for i in
x.flatten().tolist()]).reshape(10, 45)
}
self._test_model(builder.spec, input, expected, useCPUOnly=cpu_only)
def test_erf_activation_gpu(self):
self.test_erf_activation_cpu(cpu_only=False)
def test_gelu_activation(self):
for mode in ['EXACT', 'TANH_APPROXIMATION', 'SIGMOID_APPROXIMATION']:
for rank in range(1, 6):
shape = np.random.randint(low=2, high=5, size=rank)
input_features = [('data', datatypes.Array(*shape))]
output_features = [('output', None)]
builder = neural_network.NeuralNetworkBuilder(
input_features, output_features, disable_rank5_shape_mapping=True)
builder.add_gelu(name='gelu', input_name='data',
output_name='output', mode=mode)
x = np.random.rand(*shape)
input = {'data': x}
exact = np.asarray([0.5 * i * (1.0 + math.erf(i / math.sqrt(2)))
for i in x.flatten().tolist()]).reshape(*shape)
expected = {'output': exact}
self._test_model(builder.spec, input, expected, useCPUOnly=True)
def test_lower_triangular_cpu(self, cpu_only=True):
for rank in range(2, 6):
for k in range(-3, 4):
shape = np.random.randint(low=2, high=6, size=rank)
input_features = [('data', datatypes.Array(*shape))]
output_features = [('output', None)]
builder = neural_network.NeuralNetworkBuilder(
input_features, output_features, disable_rank5_shape_mapping=True)
builder.add_lower_triangular('tril', 'data', 'output', k=k)
x = np.random.rand(*shape)
input = {'data': x}
expected = {'output': np.tril(x, k=k)}
self._test_model(builder.spec, input, expected, useCPUOnly=cpu_only)
def test_lower_triangular_gpu(self):
self.test_lower_triangular_cpu(cpu_only=False)
def test_upper_triangular_cpu(self, cpu_only=True):
for rank in range(2, 6):
for k in range(-3, 4):
shape = np.random.randint(low=2, high=6, size=rank)
input_features = [('data', datatypes.Array(*shape))]
output_features = [('output', None)]
builder = neural_network.NeuralNetworkBuilder(
input_features, output_features, disable_rank5_shape_mapping=True)
builder.add_upper_triangular('triu', 'data', 'output', k=k)
x = np.random.rand(*shape)
input = {'data': x}
expected = {'output': np.triu(x, k=k)}
self._test_model(builder.spec, input, expected, useCPUOnly=cpu_only)
def test_upper_triangular_gpu(self):
self.test_upper_triangular_cpu(cpu_only=False)
def test_where_broadcastable_cpu(self, cpu_only=True):
for _ in range(150):
rank_cond = np.random.randint(low=1, high=6)
rank_true = np.random.randint(low=1, high=6)
rank_false = np.random.randint(low=1, high=6)
rank_out = max(rank_cond, rank_true, rank_false)
shape_cond = np.random.randint(low=2, high=8, size=rank_cond)
shape_true = np.random.randint(low=2, high=8, size=rank_true)
shape_false = np.random.randint(low=2, high=8, size=rank_false)
for i in range(-1, -rank_out - 1, -1):
dims = []
if -i <= rank_cond: dims.append(shape_cond[i])
if -i <= rank_true: dims.append(shape_true[i])
if -i <= rank_false: dims.append(shape_false[i])
dim = np.random.choice(dims)
if -i <= rank_cond: shape_cond[i] = np.random.choice([1, dim])
if -i <= rank_true: shape_true[i] = np.random.choice([1, dim])
if -i <= rank_false: shape_false[i] = np.random.choice([1, dim])
input_features = [
('cond', datatypes.Array(*shape_cond)),
('true', datatypes.Array(*shape_true)),
('false', datatypes.Array(*shape_false))
]
output_features = [('output', None)]
builder = neural_network.NeuralNetworkBuilder(
input_features, output_features, disable_rank5_shape_mapping=True)
builder.add_where_broadcastable('if_broadcastable', input_names=['cond', 'true', 'false'],
output_name='output')
cond = np.random.choice([1.0, 0.0], size=shape_cond)
true = np.random.rand(*shape_true)
false = np.random.rand(*shape_false)
input = {'cond': cond, 'true': true, 'false': false}
expected = {'output': np.where(cond, true, false)}
self._test_model(builder.spec, input, expected, useCPUOnly=cpu_only)
self.assertEqual(len(expected['output'].shape), builder._get_rank('output'))
def test_where_broadcastable_gpu(self):
self.test_where_broadcastable_cpu(cpu_only=False)
def test_random_normal_like_cpu(self, cpu_only=True):
mean, stddev, seed = 0., 1., 42
for rank in range(5, -1, -1):
if rank > 0:
low_factor = np.random.randint(low=2, high=4)
low = int(np.power(1000, 1. / rank)) * low_factor
high = int(np.power(2000, 1. / rank)) * np.random.randint(low=low_factor, high=4)
shape = np.random.randint(low=low, high=high, size=rank)
else: # one extra test to test more moments
shape = np.array([10, 10, 10, 10, 10000])
input_features = [('tensor', datatypes.Array(*shape))]
builder = neural_network.NeuralNetworkBuilder(
input_features, [('output', None)], disable_rank5_shape_mapping=True)
builder.add_random_normal_like(name='random_normal_like',
input_name='tensor',
output_name='output',
mean=mean, stddev=stddev, seed=seed)
inputs = {'tensor': np.random.rand(*shape)}
expected = {'output': np.random.normal(mean, stddev, shape)}
if rank > 0:
CorrectnessTest._compare_moments(builder.spec, inputs, expected, num_moments=2)
self._test_model(builder.spec, inputs, expected, useCPUOnly=cpu_only)
else: # one extra test to test more moments
CorrectnessTest._compare_moments(builder.spec, inputs, expected, num_moments=6)
def test_random_normal_like_gpu(self):
self.test_random_normal_like_cpu(cpu_only=False)
def test_random_normal_static_cpu(self, cpu_only=True):
mean, stddev, seed = 0., 1., 42
for rank in range(1, 6):
low_factor = np.random.randint(low=2, high=4)
low = int(np.power(1000, 1. / rank)) * low_factor
high = int(np.power(2000, 1. / rank)) * np.random.randint(low=low_factor, high=4)
shape = np.random.randint(low=low, high=high, size=rank)
input_features = [('data', datatypes.Array(*shape))]
builder = neural_network.NeuralNetworkBuilder(
input_features, [('output', None)], disable_rank5_shape_mapping=True)
builder.add_random_normal_static(name='random_normal_static',
output_name='tmp',
output_shape=list(shape),
mean=mean, stddev=stddev, seed=seed)
builder.add_elementwise('add_layer', ['data', 'tmp'], 'output', mode='ADD')
data = np.zeros(shape)
inputs = {'data': data}
expected = {'output': data + np.random.normal(mean, stddev, shape)}
CorrectnessTest._compare_moments(builder.spec, inputs, expected, num_moments=2)
self._test_model(builder.spec, inputs, expected, useCPUOnly=cpu_only)
self.assertEqual(rank, builder._get_rank('output'))
def test_random_normal_static_gpu(self):
self.test_random_normal_static_cpu(cpu_only=False)
def test_random_normal_dynamic_cpu(self, cpu_only=True):
mean, stddev, seed = 0., 1., 42
for rank in range(1, 6):
low_factor = np.random.randint(low=2, high=4)
low = int(np.power(1000, 1. / rank)) * low_factor
high = int(np.power(2000, 1. / rank)) * np.random.randint(low=low_factor, high=4)
shape = np.random.randint(low=low, high=high, size=rank)
input_features = [('shape', datatypes.Array(len(shape)))]
builder = neural_network.NeuralNetworkBuilder(
input_features, [('output', None)], disable_rank5_shape_mapping=True)
builder.add_random_normal_dynamic(name='random_normal_dynamic',
input_names=['shape'],
output_name='output',
mean=mean, stddev=stddev, seed=seed)
inputs = {'shape': np.array(shape, np.float)}
expected = {'output': np.random.normal(mean, stddev, shape)}
CorrectnessTest._compare_moments(builder.spec, inputs, expected, num_moments=2)
self._test_model(builder.spec, inputs, expected, useCPUOnly=cpu_only)
self.assertEqual(-1, builder._get_rank('output'))
def test_random_normal_dynamic_gpu(self):
self.test_random_normal_dynamic_cpu(cpu_only=False)
def test_random_uniform_like_cpu(self, cpu_only=True):
minval, maxval, seed = 0., 1., 42
for rank in range(1, 6):
low_factor = np.random.randint(low=2, high=4)
low = int(np.power(1000, 1. / rank)) * low_factor
high = int(np.power(2000, 1. / rank)) * np.random.randint(low=low_factor, high=4)
shape = np.random.randint(low=low, high=high, size=rank)
input_features = [('tensor', datatypes.Array(*shape))]
builder = neural_network.NeuralNetworkBuilder(
input_features, [('output', None)], disable_rank5_shape_mapping=True)
builder.add_random_uniform_like(name='random_uniform_like',
input_name='tensor',
output_name='output',
minval=minval, maxval=maxval, seed=seed)
tensor = np.random.rand(*shape)
inputs = {'tensor': tensor}
expected = {'output': np.random.uniform(minval, maxval, shape)}
CorrectnessTest._compare_moments(builder.spec, inputs, expected)
self._test_model(builder.spec, inputs, expected, useCPUOnly=cpu_only)
self.assertEqual(rank, builder._get_rank('output'))
def test_random_uniform_like_gpu(self):
self.test_random_uniform_like_cpu(cpu_only=False)
def test_random_uniform_static_cpu(self, cpu_only=True):
minval, maxval, seed = 0., 1., 42
for rank in range(1, 6):
low_factor = np.random.randint(low=2, high=4)
low = int(np.power(1000, 1. / rank)) * low_factor
high = int(np.power(2000, 1. / rank)) * np.random.randint(low=low_factor, high=4)
shape = np.random.randint(low=low, high=high, size=rank)
input_features = [('data', datatypes.Array(*shape))]
builder = neural_network.NeuralNetworkBuilder(
input_features, [('output', None)], disable_rank5_shape_mapping=True)
builder.add_random_uniform_static(name='random_uniform_static',
output_name='tmp',
output_shape=list(shape),
minval=minval, maxval=maxval, seed=seed)
builder.add_elementwise('add_layer', ['data', 'tmp'], 'output', mode='ADD')
data = np.zeros(shape)
inputs = {'data': data}
expected = {'output': data + np.random.uniform(minval, maxval, shape)}
CorrectnessTest._compare_moments(builder.spec, inputs, expected)
self._test_model(builder.spec, inputs, expected, useCPUOnly=cpu_only)
self.assertEqual(rank, builder._get_rank('output'))
def test_random_uniform_static_gpu(self):
self.test_random_uniform_static_cpu(cpu_only=False)
def test_random_uniform_dynamic_cpu(self, cpu_only=True):
minval, maxval, seed = 0., 1., 42
for rank in range(1, 6):
low_factor = np.random.randint(low=2, high=4)
low = int(np.power(1000, 1. / rank)) * low_factor
high = int(np.power(2000, 1. / rank)) * np.random.randint(low=low_factor, high=4)
shape = np.random.randint(low=low, high=high, size=rank)
input_features = [('shape', datatypes.Array(len(shape)))]
builder = neural_network.NeuralNetworkBuilder(
input_features, [('output', None)], disable_rank5_shape_mapping=True)
builder.add_random_uniform_dynamic(name='random_uniform_dynamic',
input_names=['shape'],
output_name='output',
minval=minval, maxval=maxval, seed=seed)
inputs = {'shape': np.array(shape, np.float)}
expected = {'output': np.random.uniform(minval, maxval, shape)}
CorrectnessTest._compare_moments(builder.spec, inputs, expected)
self._test_model(builder.spec, inputs, expected, useCPUOnly=cpu_only)
self.assertEqual(-1, builder._get_rank('output'))
def test_random_uniform_dynamic_gpu(self):
self.test_random_uniform_dynamic_cpu(cpu_only=False)
def test_random_bernoulli_like_cpu(self, cpu_only=True):
prob, seed = 0.5, 42
for rank in range(1, 6):
low_factor = np.random.randint(low=2, high=4)
low = int(np.power(1000, 1. / rank)) * low_factor
high = int(np.power(2000, 1. / rank)) * np.random.randint(low=low_factor, high=4)
shape = np.random.randint(low=low, high=high, size=rank)
input_features = [('tensor', datatypes.Array(*shape))]
builder = neural_network.NeuralNetworkBuilder(
input_features, [('output', None)], disable_rank5_shape_mapping=True)
builder.add_random_bernoulli_like(name='random_bernoulli_like',
input_name='tensor',
output_name='output',
prob=prob, seed=seed)
tensor = np.random.rand(*shape)
inputs = {'tensor': tensor}
expected = {'output': np.random.binomial(1, prob, shape)}
CorrectnessTest._compare_moments(builder.spec, inputs, expected)
self._test_model(builder.spec, inputs, expected, useCPUOnly=cpu_only)
def test_random_bernoulli_like_gpu(self):
self.test_random_bernoulli_like_cpu(cpu_only=False)
def test_random_bernoulli_static_cpu(self, cpu_only=True):
prob, seed = 0.5, 42
for rank in range(1, 6):
low_factor = np.random.randint(low=2, high=4)
low = int(np.power(1000, 1. / rank)) * low_factor
high = int(np.power(2000, 1. / rank)) * np.random.randint(low=low_factor, high=4)
shape = np.random.randint(low=low, high=high, size=rank)
input_features = [('data', datatypes.Array(*shape))]
builder = neural_network.NeuralNetworkBuilder(
input_features, [('output', None)], disable_rank5_shape_mapping=True)
builder.add_random_bernoulli_static(name='random_bernoulli_static', output_name='tmp',
output_shape=list(shape), prob=prob, seed=seed)
builder.add_elementwise('add_layer', ['data', 'tmp'], 'output', mode='ADD')
data = np.zeros(shape)
inputs = {'data': data}
expected = {'output': data + np.random.binomial(1, prob, shape)}
CorrectnessTest._compare_moments(builder.spec, inputs, expected)
self._test_model(builder.spec, inputs, expected, useCPUOnly=cpu_only)
def test_random_bernoulli_static_gpu(self):
self.test_random_bernoulli_static_cpu(cpu_only=False)
def test_random_bernoulli_dynamic_cpu(self, cpu_only=True):
prob, seed = 0.5, 42
for rank in range(1, 6):
low_factor = np.random.randint(low=2, high=4)
low = int(np.power(1000, 1. / rank)) * low_factor
high = int(np.power(2000, 1. / rank)) * np.random.randint(low=low_factor, high=4)
shape = np.random.randint(low=low, high=high, size=rank)
input_features = [('shape', datatypes.Array(len(shape)))]
builder = neural_network.NeuralNetworkBuilder(
input_features, [('output', None)], disable_rank5_shape_mapping=True)
builder.add_random_bernoulli_dynamic(name='random_bernoulli_dynamic',
input_names=['shape'],
output_name='output',
prob=prob, seed=seed)
inputs = {'shape': np.array(shape, np.float)}
expected = {'output': np.random.binomial(1, prob, shape)}
CorrectnessTest._compare_moments(builder.spec, inputs, expected)
self._test_model(builder.spec, inputs, expected, useCPUOnly=cpu_only)
def test_random_bernoulli_dynamic_gpu(self):
self.test_random_bernoulli_dynamic_cpu(cpu_only=False)
def test_categorical_distribution_cpu_shapes(self):
for rank in range(1, 6):
shape = np.random.randint(low=2, high=8, size=rank)
num_samples = np.random.randint(low=10, high=1000)
input_features = [('data', datatypes.Array(*shape))]
builder = neural_network.NeuralNetworkBuilder(
input_features, [('output', None)], disable_rank5_shape_mapping=True)
builder.add_categorical_distribution(name='categorical_distribution',
input_name='data',
output_name='output',
num_samples=num_samples)
x = np.random.randint(low=0, high=20, size=shape).astype(np.float32)
inputs = {'data': x}
shape[-1] = num_samples
expected = | |
<reponame>YoavLotem/NeuriteOutgrowth
import numpy as np
def segment_nuclei_by_quarters(dapi_im, nuclei_maskrcnn):
"""
Performs Nuclei instance segmentation to the DAPI image by dividing it to 4 quarters
and performing instance segmentation to each quarter seperately, then combining the results.
This procedure allows the model to detect a larger amount of cells in the image with less memory constraints.
Up to 1000 cells can be detected per quarter, thus allowing the model to detect up to 4000 cells
Parameters
----------
dapi_im: ndarray
2D array containing data with float type
Single channel grayscale Nuclei(DAPI) image
nuclei_maskrcnn: Instance of class MaskRCNN
Mask RCNN model for Nuclei instance segmentation
Returns
-------
nuclei_segmentation: ndarray
2D array containing data with int type
Nuclei instance segmentation results for the entire DAPI image, each integer represents a different nucleus
nuclei_count: int
Number of overall nuclei detected in the DAPI image
"""
# Initialize an image to aggregate the instance segmentation from all 4 quarters and a global
# nuclei_count to keep track of individual nuclei numbers
image_height, image_width = dapi_im.shape[:2]
im_height_div2 = image_height//2
im_width_div2 = image_width // 2
nuclei_segmentation = np.zeros((image_height, image_width))
nuclei_count = 1
for i in range(2):
for j in range(2):
# detect the nuclei in a quarter of the image
quarter_dapi = dapi_im[i * im_height_div2: (i + 1) * im_height_div2, j * im_width_div2: (j + 1) * im_width_div2, :]
results = nuclei_maskrcnn.detect([quarter_dapi], verbose=0)
# a single channel for each detected nuclei with booleans indicating its location
mask_per_nucleus = results[0]['masks']
num_nuclei = np.shape(mask_per_nucleus)[2]
# aggregate the individual detected instance segmentation masks into a single 2d image
nuclei_segmentation_quarter = np.zeros((im_height_div2, im_width_div2))
for idx in range(num_nuclei):
mask = mask_per_nucleus[:, :, idx]
nuclei_segmentation_quarter[mask] = nuclei_count
nuclei_count += 1
# insert the instance segmentation result of a quarter image in the total image results
nuclei_segmentation[i * im_height_div2: (i + 1) * im_height_div2, j * im_width_div2: (j + 1) * im_width_div2] = nuclei_segmentation_quarter
return nuclei_segmentation, nuclei_count
def get_splitted_nuclei_indices(mask_per_nucleus_in_border):
"""
Detects which nucleus in the DAPI image is overlapping with the quarters borderline
Parameters
----------
mask_per_nucleus_in_border: ndarray
Array with a 2D binary mask for every individual nucleus detected close to the borderline of the quarters
Returns
-------
splitted_nuclei_indices: ndarray
1D array with boolean type
indicates which of the binary masks in mask_per_nucleus_in_border belongs to a nucleus that is splitted in the
segmentation results of segmentNucleiByQuarters (might be splitted into multiple nuclei)
"""
# image dimensions
image_height, image_width = mask_per_nucleus_in_border.shape[:2]
im_height_div2 = image_height//2
im_width_div2 = image_width // 2
# initialize a binary 2D array with the same size as the DAPI image with ones on the borderline
borderline = np.zeros((image_height, image_width))
borderline[im_height_div2-1: im_height_div2+1, :] = 1
borderline[:, im_width_div2-1: im_width_div2+1] = 1
splitted_nuclei_indices = []
# iterate other every mask of a detected nucleus in the proximity of the borderline and check if it overlaps
# with the borderline
for i in range(np.shape(mask_per_nucleus_in_border)[2]):
if np.any(np.logical_and(borderline, mask_per_nucleus_in_border[:, :, i])):
splitted_nuclei_indices.append(True)
else:
splitted_nuclei_indices.append(False)
splitted_nuclei_indices = np.array(splitted_nuclei_indices)
return splitted_nuclei_indices
def find_splitted_nuclei(dapi_im, nuclei_maskrcnn):
"""
Segments nuclei in the image that are close to the quarters borderlines (due to the image being divided to four parts
in segmentNucleiByQuarters) and returns the indices of the nuclei that overlap with the borderlines of the quarters
and are therefore splitted in the segmentation results of segmentNucleiByQuarters
Parameters
----------
dapi_im: ndarray
2D array containing data with float type
Single channel grayscale Nuclei(DAPI) image
nuclei_maskrcnn: Instance of class MaskRCNN
Mask RCNN model for Nuclei instance segmentation
Returns
-------
mask_per_nucleus_in_border: ndarray
Array with a 2D binary mask for every individual nucleus detected close to the borderline of the quarters
splitted_nuclei_indices: ndarray
1D array with boolean type
indicates which of the binary masks in mask_per_nucleus_in_border belongs to a nucleus that is splitted in the
segmentation results of segmentNucleiByQuarters (might be splitted into multiple nuclei)
"""
# image dimensions
image_height, image_width = dapi_im.shape[:2]
im_height_div2 = image_height//2
im_width_div2 = image_width // 2
# keep only the DAPI image that is in the proximity of the quarters borderlines
# (100 pixels to from direction from the middle of the X and Y axis middle)
boundary_area = dapi_im.copy()
boundary_area[0: im_height_div2-100, 0: im_width_div2-100] = 0
boundary_area[0: im_height_div2-100, im_width_div2+100:] = 0
boundary_area[im_height_div2+100:, im_width_div2+100:] = 0
boundary_area[im_height_div2+100:, 0: im_width_div2-100] = 0
# Segment nuclei in the borderline proximity
results = nuclei_maskrcnn.detect([boundary_area], verbose=0)
mask_per_nucleus_in_border = results[0]['masks']
# detecting which nuclei is splitted
splitted_nuclei_indices = get_splitted_nuclei_indices(mask_per_nucleus_in_border)
return mask_per_nucleus_in_border, splitted_nuclei_indices
def correct_splitted_nuclei(mask_per_nucleus_in_border, splitted_nuclei_indices, nuclei_segmentation):
"""
Corrects the instance segmentation based on segmentNucleiByQuarters so that nuclei that were partitioned due to the
division to quarters will be correctly label into a single nucleus
Parameters
----------
mask_per_nucleus_in_border: ndarray
Array with a 2D binary mask for every individual nucleus detected close to the borderline of the quarters
splitted_nuclei_indices: ndarray
1D array with boolean type
indicates which of the binary masks in mask_per_nucleus_in_border belongs to a nucleus that is splitted in the
segmentation results of segmentNucleiByQuarters (might be splitted into multiple nuclei)
nuclei_segmentation: ndarray
2D array containing data with int type
Nuclei instance segmentation results for the entire DAPI image, each integer represents a different nucleus
Returns
-------
nuclei_segmentation: ndarray
2D array containing data with int type
Nuclei instance segmentation results for the entire DAPI image after correction,
each integer represents a different nucleus
"""
splitted_nuclei_full_masks = mask_per_nucleus_in_border[:, :, splitted_nuclei_indices]
for mask_idx in range(np.shape(splitted_nuclei_full_masks)[2]):
# for each full nucleus detected in boundry area check to how many different nuclei it was partitioned to in the
# nuclei_segmentation from segmentNucleiByQuarters
full_nuc_mask = splitted_nuclei_full_masks[:, :, mask_idx]
unique_nuclei = np.unique(nuclei_segmentation[full_nuc_mask])
# the number of uniquely identified nuclei on the full nuclei area should not include background pixels with
# value equal to 0
num_unique_nuclei = len(unique_nuclei) if 0 not in unique_nuclei else len(unique_nuclei) - 1
# remove partitioned nuclei and replace them with the full nucleus mask.
if num_unique_nuclei > 1:
min_label = np.inf
for nuc_label in unique_nuclei:
if nuc_label == 0: # background is irrelevant
continue
splitted_mask = nuclei_segmentation == nuc_label
# remove the partitioned nucleus if it overlaps with the full nucleus area
overlap = np.sum(np.logical_and(splitted_mask, full_nuc_mask)) / np.sum(splitted_mask)
if overlap > 0.5:
nuclei_segmentation[splitted_mask] = 0
if nuc_label < min_label:
min_label = nuc_label
# assign the full nucleus with the minimal label of its partitioned nuclei that it replaced
nuclei_segmentation[full_nuc_mask] = min_label
return nuclei_segmentation
def keep_viable_nuclei(nuclei_segmentation, nuclei_count, cells_foreground_mask):
"""
Check for every nuclei in nuclei instance segmentation mask (nuclei_segmentation) if its viable by checking for
overlap with the cell foreground mask
Parameters
----------
nuclei_segmentation:ndarray
2D array containing data with int type
Nuclei instance segmentation results for the entire DAPI image after correction,
each integer represents a different nucleus
nuclei_count: int
Number of overall nuclei detected in the DAPI image
cells_foreground_mask: ndarray
2D array containing data with boolean type
predicted cell foreground segmentation mask
Returns
-------
nuclei_instance_segmentation_mask: ndarray
2D array containing data with int type
Final nuclei instance segmentation results for the entire DAPI image each integer represents a different nucleus
centroids: ndarray
array containing data with int type
containing [y,x] coordinates of nuclei centers
apoptosis_fraction: float
fraction of non-viable cells in the field (nuclei that do not overlap with cell foreground)
"""
centroids = [[0, 0]]
# todo initialize centroids_new as empty array - this should include changes to other functions
#initialize a new 2D array for segmentation results and a counter for viable cells (nuclei that overlap with foreground)
nuclei_instance_segmentation_mask = np.zeros(np.shape(nuclei_segmentation))
viable_cells_counter = 0
# iterate other the possible nuclei labels: 0 is the background label,
# and the maximal label possible is the nuclei_count which is the number
# of nuclei before correcting partitioned nuclei
for nuc_idx in range(1, nuclei_count):
current_nuc_indices = nuclei_segmentation == nuc_idx
if not np.any(current_nuc_indices): # it might have been combined with another nucleus in correctSplittedNuclei
continue
overlap_with_foreground = np.mean(cells_foreground_mask[current_nuc_indices])
if overlap_with_foreground > 0.7:
# If the nucleus overlaps with foreground it is considered viable
# and will appear in the instance segmenation results. In | |
possible, just after any existing gates on ``self``'s qubit wires.
* `late` - add the gate in the last open time moment in ``self``, unless a conflict arises, in which case, add the gate in the next (new) time moment.
* `next` - add the gate in the next (new) time moment.
copy (bool): copy the gate or not? (default - `True`)
name (str): name of the gate for use in CompositeGate
(default value `None` indicates default name)
ascii_symbols (list of str or None): ASCII symbols for use in CompositeGate
(default value `None` indicates default symbols)
return_key (bool): return ``self`` for chaining (default - `False`) or (**times**, **qubits**)
key (True) to determine gate placement.
Returns:
``self`` - for chaining
"""
# If gate is Circuit, make it a CompositeGate
gate = CompositeGate(gate, name, ascii_symbols) if isinstance(
gate, Circuit) else gate
# Make qubits a tuple regardless of input
qubits = (qubits, ) if isinstance(qubits, int) else qubits
# Make times a tuple (or None) regardless of input
times = (times, ) if isinstance(times, int) else times
# Qubit validation
if len(set(qubits)) != len(qubits):
raise RuntimeError("Qubit list must not contain repeated indices.")
# Time determination by rules
if times is None:
if time_start is not None:
times = tuple(range(time_start, time_start + gate.ntime))
elif time_placement == 'early':
timemax = self.min_time - 1
for time, qubit in self.times_and_qubits:
if qubit in qubits:
timemax = max(timemax, time)
times = tuple(range(timemax + 1, timemax + 1 + gate.ntime))
elif time_placement == 'late':
timemax = self.max_time
if any((timemax, qubit) in self.times_and_qubits
for qubit in qubits):
timemax += 1
times = tuple(range(timemax, timemax + gate.ntime))
elif time_placement == 'next':
times = tuple(
range(self.max_time + 1, self.max_time + 1 + gate.ntime))
else:
raise RuntimeError(
'Unknown time_placement: %s. Allowed values are early, late, next'
% time_placement)
# Check that qubits makes sense for gate.nqubit
if len(qubits) != gate.nqubit:
raise RuntimeError('%d qubit entries provided for %d-qubit gate' %
(len(qubits), gate.nqubit))
# Check that times makes sense for gate.ntime
if len(times) != gate.ntime:
raise RuntimeError('%d time entries provided for %d-time gate' %
(len(times), gate.ntime))
# Check that the times are sequential and contiguous
if len(times) > 1 and times != tuple(range(times[0], times[-1] + 1)):
raise RuntimeError('times are not sequential: %r' % times)
# Check that the requested circuit locations are open
for qubit in qubits:
for time in times:
if (time, qubit) in self.times_and_qubits:
raise RuntimeError(
'time=%d, qubit=%d circuit location is already occupied'
% (time, qubit))
# Add gate to circuit
self.gates[(times, qubits)] = gate.copy() if copy else gate
for qubit in qubits:
self.qubits.add(qubit)
for time in times:
self.times.add(time)
for qubit in qubits:
for time in times:
self.times_and_qubits.add((time, qubit))
return (tuple(times), tuple(qubits)) if return_key else self
def add_controlled_gate(self,
gate,
qubits,
controls=None,
name=None,
ascii_symbols=None,
**kwargs):
gate = CompositeGate(gate, name, ascii_symbols) if isinstance(
gate, Circuit) else gate
gate = ControlledGate(gate, controls=controls)
return self.add_gate(gate=gate, qubits=qubits, **kwargs)
def add_gates(
self,
circuit,
qubits,
times=None,
time_start=None,
time_placement='early',
copy=True,
):
""" Add the gates of another circuit to ``self`` at specified qubits and
times, updating self. Essentially a composite version of add_gate.
The qubits to add circuit to are always explicitly specified. The
times to add circuit to may be explicitly specified in the **time**
argument (1st priority), the starting time moment may be explicitly
specified and then the circuit added in a time-contiguous manner
from that point using the **time_start argument** (2nd priority), or a
recipe for determining the time-contiguous placement can be
specified using the **time_placement** argument (3rd priority).
``self`` is updated with the added gates from **circuit**.
Checks are performed to ensure that the addition is valid.
Args:
circuit (Circuit): the circuit containing the gates to add into ``self``.
qubits (tuple of int): ordered qubit indices in ``self`` to add the qubit
indices of **circuit** into.
times (tuple of int): ordered time moments in ``self`` to add the time moments
of **circuit** into. If default value `None`, the **time** argument
will be considered next.
time_start (int): starting time moment in ``self`` to add the time
moments of **circuit** into. If default value `None`, the **time_placement**
argument will be considered next.
time_placement (str - 'early', 'late', or 'next'): recipe to determine starting time moment in
``self`` to add the time moments of **circuit** into. The rules are:
* `early` (default) - start adding the circuit as early as possible, just after any existing gates on ``self``'s qubit wires.
* `late` - start adding the circuit in the last open time moment in ``self``, unless a conflict arises, in which case, start adding the circuit in the next (new) time moment
* `next` - start adding the circuit in the next (new) time moment.
copy (bool): copy Gate elements to remove parameter dependencies between **circuit**
and updated ``self`` (default - `True`) or not (`False`).
Returns:
``self`` - for chaining
"""
# Make qubits a tuple regardless of input
qubits = (qubits, ) if isinstance(qubits, int) else qubits
# Also make times a tuple if int
times = (times, ) if isinstance(times, int) else times
# circuit validation
if circuit.nqubit != len(qubits):
raise RuntimeError(
"len(qubits) must be equal to the number of registers in circuit."
)
# circuit validation
if times is None:
if time_start is not None:
times = list(range(time_start, time_start + circuit.ntime))
else:
if time_placement == 'early':
leads = [circuit.ntime] * circuit.nqubit
for time2, qubit2 in circuit.times_and_qubits:
leads[qubit2 - circuit.min_qubit] = min(
leads[qubit2 - circuit.min_qubit],
time2 - circuit.min_time)
timemax = -1
for time2, qubit2 in self.times_and_qubits:
if qubit2 in qubits:
timemax = max(timemax,
time2 - leads[qubits.index(qubit2)])
timemax += 1
times = list(range(timemax, timemax + circuit.ntime))
elif time_placement == 'late':
timemax = self.max_time
if any((timemax, qubit) in self.times_and_qubits
for qubit in qubits):
timemax += 1
times = list(range(timemax, timemax + circuit.ntime))
elif time_placement == 'next':
times = list(
range(self.max_time + 1,
self.max_time + 1 + circuit.ntime))
else:
raise RuntimeError(
'Unknown time_placement: %s. Allowed values are early, late, next'
% time_placement)
if len(qubits) != circuit.nqubit:
raise RuntimeError('len(qubits) != circuit.nqubit')
if len(times) != circuit.ntime:
raise RuntimeError('len(times) != circuit.ntime')
circuit.slice(
qubits=list(range(circuit.min_qubit, circuit.max_qubit + 1)),
qubits_to=qubits,
times=list(range(circuit.min_time, circuit.max_time + 1)),
times_to=times,
copy=copy,
circuit_to=self,
)
return self
def gate(
self,
qubits,
times,
):
# OK
# Make qubits a tuple regardless of input
qubits = (qubits, ) if isinstance(qubits, int) else qubits
# Make times a tuple regardless of input
times = (times, ) if isinstance(times, int) else times
return self.gates[(times, qubits)]
def remove_gate(
self,
qubits,
times,
):
""" Remove a gate from ``self`` at specified qubits and
times, updating ``self``. The qubits and times to remove
gate from are always explicitly specified.
``self`` is updated with gates removed from circuit.
Args:
qubits (int or tuple of int): ordered qubit indices in ``self`` to remove
the qubit indices of circuit from. If a single int is
provided (for one-qubit gate addition), it is converted
to a tuple with a single int entry.
times (int or tuple of int or None): time moments in ``self`` to remove
the gate from.
Returns:
``self`` - for chaining
"""
# OK
# Make qubits a tuple regardless of input
qubits = (qubits, ) if isinstance(qubits, int) else qubits
# Make times a tuple regardless of input
times = (times, ) if isinstance(times, int) else times
# Print sensible error message if key is invalid
if (times, qubits) not in self.gates:
raise RuntimeError('Key is not in circuit: (times=%r, qubits=%r)' %
(times, qubits))
# Delete the gate
del self.gates[(times, qubits)]
# Rebuild the indexing arrays
self.qubits.clear()
self.times.clear()
self.times_and_qubits.clear()
for key, gate in self.gates.items():
times2, qubits2 = key
for qubit in qubits2:
self.qubits.add(qubit)
for time in times2:
self.times.add(time)
for qubit in qubits2:
for time in times2:
self.times_and_qubits.add((time, qubit))
return self
def replace_gate(
self,
gate,
qubits,
times,
name=None,
ascii_symbols=None,
):
""" Replace the gate of | |
+ 1
top_level_item = top_level_item.parent()
top_level_item = top_level_item.child(0) # cause the top level ist not the service, but the header
try:
resp_org = str(top_level_item.data(1, 0).respOrg)
self.dlg.labelOrga.setText(resp_org)
except:
QgsMessageLog.logMessage("No attribute respOrg for this resource", 'GeoPortal.rlp search',
level=Qgis.Info)
"""
try:
date = str(item.data(1, 0).date)
# TODO - use iso format
self.dlg.labelDate.setText(date)
except:
QgsMessageLog.logMessage("No attribute date for this resource", 'GeoPortal.rlp search',
level=Qgis.Info)
try:
resp_orga = str(item.data(1, 0).respOrg)
# TODO - use iso format
self.dlg.labelOrga.setText(resp_orga)
except:
QgsMessageLog.logMessage("No attribute respOrg for this resource", 'GeoPortal.rlp search',
level=Qgis.Info)
# license / restrictions
# hasConstraints, isopen, symbolLink, license_id
# first: generate link to use conditions of portal (srv.id)
resolve_domain = str(self.dlg.comboBoxSearchCatalogue.currentData())
resolve_path = "/mapbender/php/mod_getServiceDisclaimer.php"
resolve_parameters = {
"id": item.data(1, 0).id,
"type": "metadata",
"languageCode": "en",
"withHeader": "true",
}
# add parameters
request_url = resolve_domain + resolve_path + "?" + urllib.parse.urlencode(resolve_parameters)
self.dlg.labelLicence.setText('<a href="' + request_url + '">' + self.tr("Open in Browser") + '</a>')
self.dlg.labelLicence.setOpenExternalLinks(True)
# license logo
try:
license_logo = str(item.data(1, 0).symbolLink)
if license_logo != "":
result_content = self.open_remote(license_logo)
if result_content:
# build
pixmap = QPixmap()
pixmap.loadFromData(result_content)
# draw preview
self.dlg.labelLicenceLogo.setPixmap(pixmap)
else:
QgsMessageLog.logMessage("An error occured while try to open url: " + license_logo,
'GeoPortal.rlp search',
level=Qgis.Critical)
except:
pass
# open data
try:
if str(item.data(1, 0).isopen) == "1":
result_content = self.open_remote("https://www.geoportal.rlp.de/static/searchCatalogue/images/open-data.jpg")
if result_content:
# build
pixmap = QPixmap()
pixmap.loadFromData(result_content)
# draw preview
self.dlg.labelLicenceOpen.setPixmap(pixmap)
except:
pass
self.load_access_options(item)
def show_loader_img(self):
"""
Should be used to show the loading of an external resource - does not work at the moment
"""
self.dlg.labelSearchAnimation.setText(self.tr("Searching..."))
pass
def hide_loader_img(self):
"""
Should be used to hide the loading of an external resource - does not work at the moment
"""
self.dlg.labelSearchAnimation.setText(self.tr("Ready"))
pass
def on_clicked_remote_dataset(self, item):
"""Show detailed information about the dataset which was found by remote CSW search"""
self.reset_resource_view()
self.dlg.labelResourceType.setText(self.tr("Dataset"))
try:
resource_id = item.data(1, 0).datasetId
except:
QgsMessageLog.logMessage("No attribute datasetId for this resource - no detailed info available", 'GeoPortal.rlp search',
level=Qgis.Info)
resource_id = 0
return
self.dlg.pushButtonLoad.setEnabled(False)
self.dlg.treeWidgetResourceDetail.clear()
# TODO alter this for clicked item in tree widget
self.dlg.textBrowserResourceAbstract.append(item.text(0))
try:
abstract = str(item.data(1, 0).abstract)
self.dlg.textBrowserResourceAbstract.append(abstract)
except:
QgsMessageLog.logMessage("No attribute abstract for this resource", 'GeoPortal.rlp search',
level=Qgis.Info)
# load preview image
try:
preview_url = item.data(1, 0).previewURL
result_content = self.open_remote(preview_url)
if result_content:
# build
pixmap = QPixmap()
pixmap.loadFromData(result_content)
# draw preview
self.dlg.labelPreview.setPixmap(pixmap)
else:
QgsMessageLog.logMessage("An error occured while try to open url: " + preview_url, 'GeoPortal.rlp search',
level=Qgis.Critical)
except:
self.dlg.labelPreview.setText(self.tr("No preview"))
# load extent - remote image is to slow!
try:
# get extent from json
bbox = str(item.data(1, 0).bbox)
self.dlg.labelExtent.setText(bbox)
except:
QgsMessageLog.logMessage("No attribute bbox for this resource", 'GeoPortal.rlp search',
level=Qgis.Critical)
# load extent image
"""
try:
# get extent from json
bbox = item.data(1, 0).bbox
bbox_list = str(bbox).split(',')
extent_url = "https://www.geoportal.rlp.de/cgi-bin/mapserv?map=/data/mapbender/tools/wms_extent/extents.map&VERSION=1.1.1&REQUEST=GetMap&SERVICE=WMS&LAYERS=demis,ows_layer_target,extent,metadata_polygon&STYLES=&SRS=EPSG:4326&BBOX=4,45,9,56&WIDTH=120&HEIGHT=120&FORMAT=image/png&BGCOLOR=0xffffff&TRANSPARENT=TRUE&EXCEPTIONS=application/vnd.ogc.se_inimage&minx=" + bbox_list[0] + "&miny=" + bbox_list[1] + "&maxx=" + bbox_list[2] + "&maxy=" + bbox_list[3]
result_content = self.open_remote(extent_url)
if result_content:
# build
pixmap = QPixmap()
pixmap.loadFromData(result_content)
# draw preview
self.dlg.labelExtent.setPixmap(pixmap)
else:
QgsMessageLog.logMessage("An error occured while try to open url: " + extent_url, 'GeoPortal.rlp search',
level=Qgis.Critical)
except:
self.dlg.labelExtent.setText("No Extent")
"""
# add load count
try:
load_count = item.data(1, 0).loadCount
self.dlg.labelLoadCount.setText(str(item.data(1, 0).loadCount))
except:
QgsMessageLog.logMessage("No attribute loadCount for this resource", 'GeoPortal.rlp search',
level=Qgis.Info)
# set access url and activate load button, if the url is available
self.dlg.labelResourceId.setText(str(resource_id))
"""
try:
capabilities_url = str(item.data(1, 0).getCapabilitiesUrl)
self.dlg.labelAccessUrl.setText('<a href="' + capabilities_url + '">GetCapabilities</a>')
self.dlg.labelAccessUrl.setOpenExternalLinks(True)
# add function to load button
self.dlg.pushButtonLoad.setEnabled(True)
self.dlg.pushButtonLoad.disconnect()
self.dlg.pushButtonLoad.clicked.connect(lambda: self.add_ows(capabilities_url))
except:
QgsMessageLog.logMessage("No attribute getCapabilitiesUrl for this resource", 'GeoPortal.rlp search',
level=Qgis.Info)
"""
try:
metadata_url = str(item.data(1, 0).htmlLink)
self.dlg.labelMetadata.setText('<a href="' + metadata_url + '">' + self.tr("Online Metadata") + '</a>')
self.dlg.labelMetadata.setOpenExternalLinks(True)
except:
QgsMessageLog.logMessage("No attribute mdLink for this resource", 'GeoPortal.rlp search',
level=Qgis.Info)
"""
# pull top level information - this is the information of the service itself
top_level_item = item
i = 0
while top_level_item.parent():
i = i + 1
top_level_item = top_level_item.parent()
top_level_item = top_level_item.child(0) # cause the top level ist not the service, but the header
try:
resp_org = str(top_level_item.data(1, 0).respOrg)
self.dlg.labelOrga.setText(resp_org)
except:
QgsMessageLog.logMessage("No attribute respOrg for this resource", 'GeoPortal.rlp search',
level=Qgis.Info)
"""
try:
date = str(item.data(1, 0).date)
# TODO - use iso format
self.dlg.labelDate.setText(date)
except:
QgsMessageLog.logMessage("No attribute date for this resource", 'GeoPortal.rlp search',
level=Qgis.Info)
try:
resp_orga = str(item.data(1, 0).respOrg)
# TODO - use iso format
self.dlg.labelOrga.setText(resp_orga)
except:
QgsMessageLog.logMessage("No attribute respOrg for this resource", 'GeoPortal.rlp search',
level=Qgis.Info)
self.load_remote_access_options(item)
def run(self):
"""Run method that performs all the real work"""
# Create the dialog with elements (after translation) and keep reference
# Only create GUI ONCE in callback, so that it will only load when the plugin is started
if self.first_start == True:
self.first_start = False
self.dlg = GeoportalRlpMetadataSearchDialog()
for resource in self.search_resources_list:
self.dlg.comboBoxSearchResources.addItem(resource["title"], resource["value"])
# search_catalogues
for resource in self.search_catalogues:
self.dlg.comboBoxSearchCatalogue.addItem(resource["title"], resource["value"])
# important - events should only be added once - otherwise we will go into trouble!
self.dlg.pushButton.clicked.connect(lambda: self.start_search(page=1, resource_type=self.dlg.comboBoxSearchResources.currentData()))
self.dlg.pushButtonLoad.clicked.connect(self.load_ows_context)
# if selected a remote search - the available CSW should be listed in the combo box below
self.dlg.comboBoxSearchResources.currentIndexChanged.connect(self.on_select_search_resource)
self.dlg.comboBoxSearchCatalogue.currentIndexChanged.connect(self.on_select_search_catalogue)
# add logo
logo_url = "https://www.geoportal.rlp.de/static/useroperations/images/logo-geoportal.png"
result_content = self.open_remote(logo_url)
if result_content:
# build
pixmap = QPixmap()
pixmap.loadFromData(result_content)
# draw preview
self.dlg.labelLogo.setPixmap(pixmap.scaled(self.dlg.labelLogo.size(), Qt.KeepAspectRatio, Qt.SmoothTransformation))
else:
QgsMessageLog.logMessage("An error occured while try to open url: " + request_url, 'GeoPortal.rlp search',
level=Qgis.Critical)
# add link to github for help
help_icon_path = os.path.join(os.path.dirname(__file__), "questionmark.png")
pixmap_help = QPixmap()
pixmap.load(help_icon_path)
self.dlg.labelHelp.setPixmap(pixmap.scaled(self.dlg.labelHelp.size(), Qt.KeepAspectRatio, Qt.SmoothTransformation))
self.dlg.labelHelp.setText('<a href="https://github.com/mrmap-community/gprlp_metadata_search">' +
self.tr("Help") + '</a>')
self.dlg.labelHelp.setOpenExternalLinks(True)
# show the dialog
self.dlg.show()
# Run the dialog event loop
result = self.dlg.exec_()
# See if OK was pressed
if result:
# Do something useful here - delete the line containing pass and
# substitute with your code.
pass
def find_layer_by_id(self, layer, layer_id):
"""Function to find layer by id in hierarchical layertree which is given by the search module"""
for sub_layer in layer:
# print(str(sub_layer['title']))
# print(str(sub_layer['id']))
if int(sub_layer.id) == int(layer_id):
# QgsMessageLog.logMessage("layer found with id: " + str(sub_layer.id) + " and title: " + str(sub_layer.title), 'GeoPortal.rlp search', level=Qgis.Info)
return sub_layer
# print("after compare")
if hasattr(sub_layer, "layer"):
# very important - return before invoking recursive function !!!
return self.find_layer_by_id(sub_layer.layer, layer_id)
def load_access_options(self, item):
"""Function to load the access options from the search results. They are in the coupledResources object."""
# build
parent_node = QTreeWidgetItem()
parent_node.setText(0, self.tr("Access options"))
try:
coupled_resources = item.data(1, 0).coupledResources
except:
QgsMessageLog.logMessage("An error occured while reading coupledResources", 'GeoPortal.rlp search',
level=Qgis.Critical)
return
try:
download_count = str(len(coupled_resources.inspireAtomFeeds))
except:
download_count = "0"
try:
view_count = str(len(coupled_resources.layer))
except:
view_count = "0"
download_node = QTreeWidgetItem()
download_node.setText(0, self.tr("Download options") + " (" + download_count + ")")
if download_count != "0":
for download_option in coupled_resources.inspireAtomFeeds:
download_option.service_type = "download"
download_option_node = QTreeWidgetItem()
download_option_node.setText(0, download_option.serviceTitle)
download_option_node.setToolTip(0, download_option.serviceTitle)
download_option_node.setData(1, 0, download_option)
download_node.addChild(download_option_node)
view_node = QTreeWidgetItem()
view_node.setText(0, self.tr("View options") + " (" + view_count + ")")
if view_count != "0":
for view_option in coupled_resources.layer:
view_option_node = QTreeWidgetItem()
layer_id = view_option.id
# TODO test if layer is not already set at top level!
service_information = {}
service_information["serviceId"] = view_option.srv.id
# TODO: add this info to coupled_layer below!
coupled_layer = self.find_layer_by_id(view_option.srv.layer, layer_id)
QgsMessageLog.logMessage("Coupled layer title: " + coupled_layer.title, 'GeoPortal.rlp search',
level=Qgis.Info)
if coupled_layer:
coupled_layer.service_type = "view"
coupled_layer.serviceId = view_option.srv.id
coupled_layer.respOrg = view_option.srv.respOrg
coupled_layer.symbolLink = view_option.srv.symbolLink
coupled_layer.isopen = view_option.srv.isopen
coupled_layer.date = view_option.srv.date
try:
view_option_node.setText(0, coupled_layer.title + " (" + str(layer_id) + ")")
view_option_node.setToolTip(0, coupled_layer.title + " (" + str(layer_id) + ")")
view_option_node.setData(1, 0, coupled_layer)
except:
view_option_node.setText(0, str(layer_id))
view_node.addChild(view_option_node)
"""
for view_layer in coupled_resources.layer:
pass
"""
self.dlg.treeWidgetResourceDetail.clear()
parent_node.addChild(view_node)
parent_node.addChild(download_node)
# QgsMessageLog.logMessage("Widget cleared", 'GeoPortal.rlp search', level=Qgis.Info)
#self.build_tree_recursive(result_object.features, parent_node, "")
self.dlg.treeWidgetResourceDetail.addTopLevelItem(parent_node)
#self.dlg.treeWidgetResourceDetail.disconnect()
self.dlg.treeWidgetResourceDetail.expandAll()
try:
self.dlg.treeWidgetResourceDetail.itemClicked.disconnect()
except:
pass
self.dlg.treeWidgetResourceDetail.itemClicked.connect(self.on_clicked_service_option)
def load_remote_access_options(self, item):
"""Function to load the remote access options by using the GeoPortal.rlp coupled resource resolver."""
# build
parent_node = QTreeWidgetItem()
parent_node.setText(0, self.tr("Access options"))
# resolve coupled resources by csw request
"""
Build initial resolving request
"""
# read search domain from comboBox
resolve_domain = str(self.dlg.comboBoxSearchCatalogue.currentData())
#search_domain = "https://www.geoportal.rlp.de"
resolve_path = "/mapbender/php/mod_getCoupledResourcesForDataset.php"
resolve_parameters = {
"getRecordByIdUrl": item.data(1, 0).mdLink
}
# add parameters
request_url = resolve_domain + resolve_path + "?" + urllib.parse.urlencode(resolve_parameters)
QgsMessageLog.logMessage("Try to open url: " + request_url, 'GeoPortal.rlp search',
level=Qgis.Info)
result_content = self.open_remote(request_url)
result_object = False
if result_content:
result_object | |
<gh_stars>0
"""Copyright 2016 Mirantis, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
"""
import os
from proboscis import test
from fuelweb_test import logger
from fuelweb_test.helpers.decorators import log_snapshot_after_test
from fuelweb_test.settings import CONTRAIL_PLUGIN_PACK_UB_PATH
from fuelweb_test.tests.base_test_case import SetupEnvironment
from fuelweb_test.tests.base_test_case import TestBasic
from helpers import vsrx
from helpers import plugin
from helpers import openstack
from helpers import baremetal
from helpers import fuel
from tests.test_contrail_check import TestContrailCheck
@test(groups=["plugins"])
class DPDKonVFTests(TestBasic):
"""DPDKTests."""
pack_copy_path = '/var/www/nailgun/plugins/contrail-5.0'
add_package = '/var/www/nailgun/plugins/contrail-5.0/'\
'repositories/ubuntu/contrail-setup*'
ostf_msg = 'OSTF tests passed successfully.'
cluster_id = ''
pack_path = CONTRAIL_PLUGIN_PACK_UB_PATH
CONTRAIL_DISTRIBUTION = os.environ.get('CONTRAIL_DISTRIBUTION')
bm_drv = baremetal.BMDriver()
@test(depends_on=[SetupEnvironment.prepare_slaves_9],
groups=["contrail_ha_dpdk_on_vf", "contrail_dpdk_on_vf_tests"])
@log_snapshot_after_test
def contrail_ha_dpdk_on_vf(self):
"""Check Contrail deploy on HA environment with DPDK on VF.
Scenario:
1. Create an environment with "Neutron with tunneling
segmentation" as a network configuration and CEPH storage
2. Enable and configure Contrail plugin
3. Enable DPDK on VF feature
4. Deploy cluster with following node configuration:
node-01: 'controller';
node-02: 'controller';
node-03: 'controller', 'ceph-osd';
node-04: 'compute', 'ceph-osd';
node-05: 'compute', 'ceph-osd';
node-06: 'contrail-controller';
node-07: 'contrail-analytics';
node-08: 'contrail-analytics-db';
node-dpdk: 'compute', dpdk';
5. Run OSTF tests
6. Run contrail health check tests
Duration 120 min
"""
conf_contrail = {
"dpdk_on_vf": True,
}
self.show_step(1)
plugin.prepare_contrail_plugin(self, slaves=9,
options={'images_ceph': True,
'volumes_ceph': True,
'ephemeral_ceph': True,
'objects_ceph': True,
'volumes_lvm': False})
self.bm_drv.host_prepare()
plugin.show_range(self, 2, 4)
# activate plugin with DPDK feature
plugin.activate_dpdk(self, **conf_contrail)
# activate vSRX image
vsrx_setup_result = vsrx.activate()
self.show_step(4)
self.bm_drv.setup_fuel_node(self,
cluster_id=self.cluster_id,
roles=['compute', 'dpdk', 'dpdk-on-vf'])
openstack.setup_hugepages(self, hp_1gb=40)
conf_nodes = {
'slave-01': ['controller'],
'slave-02': ['controller'],
'slave-03': ['controller', 'ceph-osd'],
'slave-04': ['compute', 'ceph-osd'],
'slave-05': ['compute', 'ceph-osd'],
'slave-06': ['contrail-controller'],
'slave-07': ['contrail-analytics'],
'slave-08': ['contrail-analytics-db'],
}
# Cluster configuration
self.fuel_web.update_nodes(self.cluster_id,
nodes_dict=conf_nodes,
update_interfaces=False)
self.bm_drv.update_vm_node_interfaces(self, self.cluster_id)
# Enable SRIOV on interface
openstack.enable_sriov(self)
# add mandatory kernel parameters for DPDK on VK
fuel.add_kernel_params()
# Deploy cluster
openstack.deploy_cluster(self)
# Run OSTF tests
# FIXME: remove shouldfail, when livemigration+DPDK works
if vsrx_setup_result:
self.show_step(5)
self.fuel_web.run_ostf(
cluster_id=self.cluster_id,
test_sets=['smoke', 'sanity', 'ha'],
should_fail=2,
failed_test_name=[
'Instance live migration',
'Check network connectivity from SRIOV instance via'
' floating IP']
)
self.show_step(6)
TestContrailCheck(self).cloud_check(
['dpdk', 'sriov', 'contrail'],
should_fail=[
"test_dpdk_boot_snapshot_vm",
"test_dpdk_check_public_connectivity_from_instance"])
@test(depends_on=[SetupEnvironment.prepare_slaves_5],
groups=["contrail_dpdk_on_vf_add_compute",
"contrail_dpdk_on_vf_tests"])
@log_snapshot_after_test
def contrail_dpdk_on_vf_add_compute(self):
"""Verify that Contrail compute role can be added after deploying.
Scenario:
1. Create an environment with "Neutron with tunneling
segmentation" as a network configuration
2. Enable and configure Contrail plugin
3. Enable DPDK on VF feature
4. Deploy cluster with following node configuration:
node-1: 'controller', 'ceph-osd';
node-2: 'contrail-controller';
node-3: 'compute', 'ceph-osd';
node-4: 'contrail-analytics', 'contrail-analytics-db';
node-dpdk: 'compute', 'dpdk';
5. Run OSTF tests
6. Add one node with following configuration:
node-5: "compute", "ceph-osd";
7. Deploy changes
8. Run OSTF tests
9. Run contrail health check tests
"""
conf_contrail = {
"dpdk_on_vf": True,
}
self.show_step(1)
plugin.prepare_contrail_plugin(self, slaves=5,
options={'images_ceph': True,
'volumes_ceph': True,
'ephemeral_ceph': True,
'objects_ceph': True,
'volumes_lvm': False,
"osd_pool_size": '1'})
self.bm_drv.host_prepare()
plugin.show_range(self, 2, 4)
# activate plugin with DPDK feature
plugin.activate_dpdk(self, **conf_contrail)
# activate vSRX image
vsrx_setup_result = vsrx.activate()
self.show_step(4)
self.bm_drv.setup_fuel_node(self,
cluster_id=self.cluster_id,
roles=['compute', 'dpdk', 'dpdk-on-vf'])
openstack.setup_hugepages(self)
conf_nodes = {
'slave-01': ['controller', 'ceph-osd'],
'slave-02': ['contrail-controller'],
'slave-03': ['compute', 'ceph-osd'],
# slave-06 here
'slave-04': ['contrail-analytics',
'contrail-analytics-db'],
}
conf_compute = {'slave-05': ['compute', 'ceph-osd']}
# Cluster configuration
self.fuel_web.update_nodes(self.cluster_id,
nodes_dict=conf_nodes,
update_interfaces=False)
self.bm_drv.update_vm_node_interfaces(self, self.cluster_id)
# Enable SRIOV on interface
openstack.enable_sriov(self)
# add mandatory kernel parameters for DPDK on VK
fuel.add_kernel_params()
# Deploy cluster
openstack.deploy_cluster(self)
# Run OSTF tests
self.show_step(5)
# FIXME: remove shouldfail, when livemigration+DPDK works
if vsrx_setup_result:
self.fuel_web.run_ostf(
cluster_id=self.cluster_id,
test_sets=['smoke', 'sanity', 'ha'],
should_fail=2,
failed_test_name=[
'Instance live migration',
'Check network connectivity from SRIOV instance via'
' floating IP']
)
TestContrailCheck(self).cloud_check(
['dpdk', 'sriov', 'contrail'],
should_fail=[
"test_dpdk_boot_snapshot_vm",
"test_dpdk_check_public_connectivity_from_instance"])
# Add Compute node and check again
self.show_step(6)
# Cluster configuration
self.fuel_web.update_nodes(self.cluster_id,
nodes_dict=conf_compute,
update_interfaces=False)
self.bm_drv.update_vm_node_interfaces(self, self.cluster_id)
# Deploy cluster
self.show_step(7)
openstack.deploy_cluster(self)
# Run OSTF tests
self.show_step(8)
# FIXME: remove shouldfail, when livemigration+DPDK works
if vsrx_setup_result:
self.fuel_web.run_ostf(
cluster_id=self.cluster_id,
should_fail=2,
failed_test_name=[
'Instance live migration',
'Check network connectivity from SRIOV instance via'
' floating IP']
)
self.show_step(9)
TestContrailCheck(self).cloud_check(
['dpdk', 'sriov', 'contrail'],
should_fail=[
"test_dpdk_boot_snapshot_vm",
"test_dpdk_check_public_connectivity_from_instance"])
@test(depends_on=[SetupEnvironment.prepare_slaves_9],
groups=["contrail_dpdk_on_vf_delete_compute",
"contrail_dpdk_on_vf_tests"])
@log_snapshot_after_test
def contrail_dpdk_on_vf_delete_compute(self):
"""Verify that Contrail compute role can be deleted after deploying.
Scenario:
1. Create an environment with "Neutron with tunneling
segmentation" as a network configuration
2. Enable and configure Contrail plugin
3. Enable DPDK on VF feature
4. Deploy cluster with following node configuration:
node-01: 'controller';
node-02: 'contrail-controller';
node-03: 'contrail-controller';
node-04: 'compute', 'cinder';
node-05: 'compute';
node-06: 'contrail-analytics', 'contrail-analytics-db';
5. Run OSTF tests
6. Delete node-05 with "compute" role
7. Deploy changes
8. Run OSTF tests
9. Run contrail health check tests
"""
conf_contrail = {
"dpdk_on_vf": True,
}
self.show_step(1)
plugin.prepare_contrail_plugin(self, slaves=9)
self.bm_drv.host_prepare()
self.show_step(2)
# activate plugin with DPDK feature
plugin.activate_dpdk(self, **conf_contrail)
# activate vSRX image
vsrx_setup_result = vsrx.activate()
self.show_step(3)
self.bm_drv.setup_fuel_node(self,
cluster_id=self.cluster_id,
roles=['compute', 'dpdk', 'dpdk-on-vf'])
openstack.setup_hugepages(self)
conf_no_compute = {
'slave-01': ['controller'],
'slave-02': ['contrail-controller'],
'slave-03': ['contrail-controller'],
'slave-04': ['compute', 'cinder'],
# node-05
'slave-06': ['contrail-analytics',
'contrail-analytics-db'],
}
conf_compute = {'slave-05': ['compute']}
self.fuel_web.update_nodes(
self.cluster_id,
nodes_dict=dict(conf_no_compute, **conf_compute),
update_interfaces=False)
self.bm_drv.update_vm_node_interfaces(self, self.cluster_id)
# Enable SRIOV on interface
openstack.enable_sriov(self)
# add mandatory kernel parameters for DPDK on VK
fuel.add_kernel_params()
# Deploy cluster
openstack.deploy_cluster(self)
# Run OSTF tests
if vsrx_setup_result:
self.show_step(4)
self.fuel_web.run_ostf(
cluster_id=self.cluster_id,
test_sets=['smoke', 'sanity', 'ha'],
should_fail=2,
failed_test_name=[
'Instance live migration',
'Check network connectivity from SRIOV instance via'
' floating IP']
)
TestContrailCheck(self).cloud_check(
['dpdk', 'sriov', 'contrail'],
should_fail=[
"test_dpdk_boot_snapshot_vm",
"test_dpdk_check_public_connectivity_from_instance"])
# Delete Compute node and check again
self.show_step(5)
self.fuel_web.update_nodes(
self.cluster_id,
nodes_dict=conf_compute,
pending_addition=False, pending_deletion=True,
update_interfaces=False)
# Deploy cluster
self.show_step(6)
openstack.deploy_cluster(self)
# Run OSTF tests
if vsrx_setup_result:
self.show_step(7)
self.fuel_web.run_ostf(
cluster_id=self.cluster_id,
test_sets=['smoke', 'sanity'],
should_fail=2,
failed_test_name=[
'Check that required services are running',
'Check network connectivity from SRIOV instance via'
' floating IP']
)
self.show_step(8)
TestContrailCheck(self).cloud_check(
['dpdk', 'sriov', 'contrail'],
should_fail=[
"test_dpdk_boot_snapshot_vm",
"test_dpdk_check_public_connectivity_from_instance"])
@test(depends_on=[SetupEnvironment.prepare_slaves_9],
groups=["contrail_dpdk_on_vf_add_dpdk",
"contrail_dpdk_on_vf_tests"])
@log_snapshot_after_test
def contrail_dpdk_on_vf_add_dpdk(self):
"""Verify that DPDK role can be added after deploying.
Scenario:
1. Create an environment with "Neutron with tunneling
segmentation" as a network configuration
2. Enable and configure Contrail plugin
3. Enable DPDK on VF feature
4. Deploy cluster with following node configuration:
node-01: 'controller', 'ceph-osd';
node-02: 'contrail-controller';
node-03: 'compute', 'ceph-osd';
node-04: 'compute', 'ceph-osd';
node-05: 'controller', 'cinder';
node-06: 'controller', 'cinder';
node-07: 'contrail-analytics';
node-08: 'contrail-analytics-db';
5. Run OSTF tests
6. Run contrail health check tests
7. Add one node with following configuration:
node-dpdk: "compute", "dpdk";
8. Deploy changes
9. Run OSTF tests
10. Run contrail health check tests
"""
conf_contrail = {
"dpdk_on_vf": True,
}
self.show_step(1)
plugin.prepare_contrail_plugin(self, slaves=9,
options={'images_ceph': True})
self.bm_drv.host_prepare()
plugin.show_range(self, 2, 4)
# activate vSRX image
vsrx_setup_result = vsrx.activate()
self.show_step(4)
conf_nodes = {
'slave-01': ['controller', 'ceph-osd'],
'slave-02': ['contrail-controller'],
'slave-03': ['compute', 'ceph-osd'],
'slave-04': ['compute', 'ceph-osd'],
'slave-05': ['controller', 'cinder'],
'slave-06': ['controller', 'cinder'],
'slave-07': ['contrail-analytics'],
'slave-08': ['contrail-analytics-db'],
}
self.fuel_web.update_nodes(
self.cluster_id,
nodes_dict=conf_nodes,
update_interfaces=False)
self.bm_drv.update_vm_node_interfaces(self, self.cluster_id)
# Enable SRIOV on interface
openstack.enable_sriov(self)
# add mandatory kernel parameters for DPDK on VK
fuel.add_kernel_params()
# Deploy cluster
openstack.deploy_cluster(self)
# Run OSTF tests
self.show_step(5)
if vsrx_setup_result:
self.fuel_web.run_ostf(
cluster_id=self.cluster_id,
test_sets=['smoke', 'sanity', 'ha'],
should_fail=1,
failed_test_name=['Instance live migration']
)
self.show_step(6)
TestContrailCheck(self).cloud_check(['contrail'])
self.show_step(7)
# activate plugin with DPDK feature
plugin.activate_dpdk(self, **conf_contrail)
self.bm_drv.setup_fuel_node(self,
cluster_id=self.cluster_id,
roles=['compute', 'dpdk', 'dpdk-on-vf'])
openstack.setup_hugepages(self)
self.show_step(8)
openstack.deploy_cluster(self)
self.show_step(9)
if vsrx_setup_result:
self.fuel_web.run_ostf(
cluster_id=self.cluster_id,
test_sets=['smoke', 'sanity'],
should_fail=2,
failed_test_name=[
'Check that required services are running',
'Check network connectivity from SRIOV instance via'
' floating IP']
)
self.show_step(10)
TestContrailCheck(self).cloud_check(
['dpdk', 'sriov', 'contrail'],
should_fail=[
"test_dpdk_boot_snapshot_vm",
"test_dpdk_check_public_connectivity_from_instance"])
@test(depends_on=[SetupEnvironment.prepare_slaves_9],
groups=["contrail_dpdk_on_vf_delete_dpdk",
"contrail_dpdk_on_vf_tests"])
@log_snapshot_after_test
def contrail_dpdk_on_vf_delete_dpdk(self):
"""Verify that DPDK role can be deleted after deploying.
Scenario:
1. Create an environment with "Neutron with tunneling
segmentation" as a network configuration
2. Enable and configure Contrail plugin
3. Enable DPDK on VF feature
4. Deploy cluster with following node configuration:
node-01: 'controller', 'ceph-osd', 'cinder';
node-02: 'contrail-controller';
node-03: 'compute', 'ceph-osd';
node-04: 'compute', 'ceph-osd';
node-05: 'contrail-analytics', 'contrail-analytics-db';
node-dpdk: 'compute', 'dpdk';
5. Run OSTF tests
6. Run contrail health check tests
7. Delete node "node-dpdk" with "dpdk" and "compute" roles
8. Deploy changes
9. Run OSTF tests
10. Run contrail health check tests
"""
self.show_step(1)
plugin.prepare_contrail_plugin(self, slaves=9,
options={'images_ceph': True})
self.bm_drv.host_prepare()
self.show_step(2)
# activate plugin with DPDK feature
conf_contrail = | |
AccountWithRestoreAccess: **[REQUIRED]**
The identifier of the AWS customer account that can no longer restore the specified snapshot.
:rtype: dict
:returns:
"""
pass
def rotate_encryption_key(self, ClusterIdentifier: str) -> Dict:
"""
Rotates the encryption keys for a cluster.
See also: `AWS API Documentation <https://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/RotateEncryptionKey>`_
**Request Syntax**
::
response = client.rotate_encryption_key(
ClusterIdentifier='string'
)
**Response Syntax**
::
{
'Cluster': {
'ClusterIdentifier': 'string',
'NodeType': 'string',
'ClusterStatus': 'string',
'ModifyStatus': 'string',
'MasterUsername': 'string',
'DBName': 'string',
'Endpoint': {
'Address': 'string',
'Port': 123
},
'ClusterCreateTime': datetime(2015, 1, 1),
'AutomatedSnapshotRetentionPeriod': 123,
'ManualSnapshotRetentionPeriod': 123,
'ClusterSecurityGroups': [
{
'ClusterSecurityGroupName': 'string',
'Status': 'string'
},
],
'VpcSecurityGroups': [
{
'VpcSecurityGroupId': 'string',
'Status': 'string'
},
],
'ClusterParameterGroups': [
{
'ParameterGroupName': 'string',
'ParameterApplyStatus': 'string',
'ClusterParameterStatusList': [
{
'ParameterName': 'string',
'ParameterApplyStatus': 'string',
'ParameterApplyErrorDescription': 'string'
},
]
},
],
'ClusterSubnetGroupName': 'string',
'VpcId': 'string',
'AvailabilityZone': 'string',
'PreferredMaintenanceWindow': 'string',
'PendingModifiedValues': {
'MasterUserPassword': '<PASSWORD>',
'NodeType': 'string',
'NumberOfNodes': 123,
'ClusterType': 'string',
'ClusterVersion': 'string',
'AutomatedSnapshotRetentionPeriod': 123,
'ClusterIdentifier': 'string',
'PubliclyAccessible': True|False,
'EnhancedVpcRouting': True|False,
'MaintenanceTrackName': 'string',
'EncryptionType': 'string'
},
'ClusterVersion': 'string',
'AllowVersionUpgrade': True|False,
'NumberOfNodes': 123,
'PubliclyAccessible': True|False,
'Encrypted': True|False,
'RestoreStatus': {
'Status': 'string',
'CurrentRestoreRateInMegaBytesPerSecond': 123.0,
'SnapshotSizeInMegaBytes': 123,
'ProgressInMegaBytes': 123,
'ElapsedTimeInSeconds': 123,
'EstimatedTimeToCompletionInSeconds': 123
},
'DataTransferProgress': {
'Status': 'string',
'CurrentRateInMegaBytesPerSecond': 123.0,
'TotalDataInMegaBytes': 123,
'DataTransferredInMegaBytes': 123,
'EstimatedTimeToCompletionInSeconds': 123,
'ElapsedTimeInSeconds': 123
},
'HsmStatus': {
'HsmClientCertificateIdentifier': 'string',
'HsmConfigurationIdentifier': 'string',
'Status': 'string'
},
'ClusterSnapshotCopyStatus': {
'DestinationRegion': 'string',
'RetentionPeriod': 123,
'ManualSnapshotRetentionPeriod': 123,
'SnapshotCopyGrantName': 'string'
},
'ClusterPublicKey': 'string',
'ClusterNodes': [
{
'NodeRole': 'string',
'PrivateIPAddress': 'string',
'PublicIPAddress': 'string'
},
],
'ElasticIpStatus': {
'ElasticIp': 'string',
'Status': 'string'
},
'ClusterRevisionNumber': 'string',
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'KmsKeyId': 'string',
'EnhancedVpcRouting': True|False,
'IamRoles': [
{
'IamRoleArn': 'string',
'ApplyStatus': 'string'
},
],
'PendingActions': [
'string',
],
'MaintenanceTrackName': 'string',
'ElasticResizeNumberOfNodeOptions': 'string',
'DeferredMaintenanceWindows': [
{
'DeferMaintenanceIdentifier': 'string',
'DeferMaintenanceStartTime': datetime(2015, 1, 1),
'DeferMaintenanceEndTime': datetime(2015, 1, 1)
},
],
'SnapshotScheduleIdentifier': 'string',
'SnapshotScheduleState': 'MODIFYING'|'ACTIVE'|'FAILED',
'ResizeInfo': {
'ResizeType': 'string',
'AllowCancelResize': True|False
}
}
}
**Response Structure**
- *(dict) --*
- **Cluster** *(dict) --*
Describes a cluster.
- **ClusterIdentifier** *(string) --*
The unique identifier of the cluster.
- **NodeType** *(string) --*
The node type for the nodes in the cluster.
- **ClusterStatus** *(string) --*
The current state of the cluster. Possible values are the following:
* ``available``
* ``available, prep-for-resize``
* ``available, resize-cleanup``
* ``cancelling-resize``
* ``creating``
* ``deleting``
* ``final-snapshot``
* ``hardware-failure``
* ``incompatible-hsm``
* ``incompatible-network``
* ``incompatible-parameters``
* ``incompatible-restore``
* ``modifying``
* ``rebooting``
* ``renaming``
* ``resizing``
* ``rotating-keys``
* ``storage-full``
* ``updating-hsm``
- **ModifyStatus** *(string) --*
The status of a modify operation, if any, initiated for the cluster.
- **MasterUsername** *(string) --*
The master user name for the cluster. This name is used to connect to the database that is specified in the **DBName** parameter.
- **DBName** *(string) --*
The name of the initial database that was created when the cluster was created. This same name is returned for the life of the cluster. If an initial database was not specified, a database named ``dev`` dev was created by default.
- **Endpoint** *(dict) --*
The connection endpoint.
- **Address** *(string) --*
The DNS address of the Cluster.
- **Port** *(integer) --*
The port that the database engine is listening on.
- **ClusterCreateTime** *(datetime) --*
The date and time that the cluster was created.
- **AutomatedSnapshotRetentionPeriod** *(integer) --*
The number of days that automatic cluster snapshots are retained.
- **ManualSnapshotRetentionPeriod** *(integer) --*
The default number of days to retain a manual snapshot. If the value is -1, the snapshot is retained indefinitely. This setting doesn't change the retention period of existing snapshots.
The value must be either -1 or an integer between 1 and 3,653.
- **ClusterSecurityGroups** *(list) --*
A list of cluster security group that are associated with the cluster. Each security group is represented by an element that contains ``ClusterSecurityGroup.Name`` and ``ClusterSecurityGroup.Status`` subelements.
Cluster security groups are used when the cluster is not created in an Amazon Virtual Private Cloud (VPC). Clusters that are created in a VPC use VPC security groups, which are listed by the **VpcSecurityGroups** parameter.
- *(dict) --*
Describes a cluster security group.
- **ClusterSecurityGroupName** *(string) --*
The name of the cluster security group.
- **Status** *(string) --*
The status of the cluster security group.
- **VpcSecurityGroups** *(list) --*
A list of Amazon Virtual Private Cloud (Amazon VPC) security groups that are associated with the cluster. This parameter is returned only if the cluster is in a VPC.
- *(dict) --*
Describes the members of a VPC security group.
- **VpcSecurityGroupId** *(string) --*
The identifier of the VPC security group.
- **Status** *(string) --*
The status of the VPC security group.
- **ClusterParameterGroups** *(list) --*
The list of cluster parameter groups that are associated with this cluster. Each parameter group in the list is returned with its status.
- *(dict) --*
Describes the status of a parameter group.
- **ParameterGroupName** *(string) --*
The name of the cluster parameter group.
- **ParameterApplyStatus** *(string) --*
The status of parameter updates.
- **ClusterParameterStatusList** *(list) --*
The list of parameter statuses.
For more information about parameters and parameter groups, go to `Amazon Redshift Parameter Groups <https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-parameter-groups.html>`__ in the *Amazon Redshift Cluster Management Guide* .
- *(dict) --*
Describes the status of a parameter group.
- **ParameterName** *(string) --*
The name of the parameter.
- **ParameterApplyStatus** *(string) --*
The status of the parameter that indicates whether the parameter is in sync with the database, waiting for a cluster reboot, or encountered an error when being applied.
The following are possible statuses and descriptions.
* ``in-sync`` : The parameter value is in sync with the database.
* ``pending-reboot`` : The parameter value will be applied after the cluster reboots.
* ``applying`` : The parameter value is being applied to the database.
* ``invalid-parameter`` : Cannot apply the parameter value because it has an invalid value or syntax.
* ``apply-deferred`` : The parameter contains static property changes. The changes are deferred until the cluster reboots.
* ``apply-error`` : Cannot connect to the cluster. The parameter change will be applied after the cluster reboots.
* ``unknown-error`` : Cannot apply the parameter change right now. The change will be applied after the cluster reboots.
- **ParameterApplyErrorDescription** *(string) --*
The error that prevented the parameter from being applied to the database.
- **ClusterSubnetGroupName** *(string) --*
The name of the subnet group that is associated with the cluster. This parameter is valid only when the cluster is in a VPC.
- **VpcId** *(string) --*
The identifier of the VPC the cluster is in, if the cluster is in a VPC.
- **AvailabilityZone** *(string) --*
The name of the Availability Zone in which the cluster is located.
- **PreferredMaintenanceWindow** *(string) --*
The weekly time range, in Universal Coordinated Time (UTC), during which system maintenance can occur.
- **PendingModifiedValues** *(dict) --*
A value that, if present, indicates that changes to the cluster are pending. Specific pending changes are identified by subelements.
- **MasterUserPassword** *(string) --*
The pending or in-progress change of the master user password for the cluster.
- **NodeType** *(string) --*
The pending or in-progress change of the cluster's node type.
- **NumberOfNodes** *(integer) --*
The pending or in-progress change of the number of nodes in the cluster.
- **ClusterType** *(string) --*
The pending or in-progress change of the cluster type.
- **ClusterVersion** *(string) --*
The pending or in-progress change of the service version.
- **AutomatedSnapshotRetentionPeriod** *(integer) --*
The pending or in-progress change of the automated snapshot retention period.
- | |
= api.datacenters_labels_find_by_key_with_http_info(datacenter_id, key, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the Data Center (required)
:type datacenter_id: str
:param key: The key of the Label (required)
:type key: str
:param pretty: Controls whether response is pretty-printed (with indentation and new lines)
:type pretty: bool
:param depth: Controls the details depth of response objects. Eg. GET /datacenters/[ID] - depth=0: only direct properties are included. Children (servers etc.) are not included - depth=1: direct properties and children references are included - depth=2: direct properties and children properties are included - depth=3: direct properties and children properties and children's children are included - depth=... and so on
:type depth: int
:param x_contract_number: Users having more than 1 contract need to provide contract number, against which all API requests should be executed
:type x_contract_number: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(LabelResource, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'key',
'pretty',
'depth',
'x_contract_number'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method datacenters_labels_find_by_key" % local_var_params_key
)
local_var_params[local_var_params_key] = local_var_params_val
del local_var_params['kwargs']
# verify the required parameter 'datacenter_id' is set
if self.api_client.client_side_validation and ('datacenter_id' not in local_var_params or # noqa: E501
local_var_params['datacenter_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `datacenter_id` when calling `datacenters_labels_find_by_key`") # noqa: E501
# verify the required parameter 'key' is set
if self.api_client.client_side_validation and ('key' not in local_var_params or # noqa: E501
local_var_params['key'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `key` when calling `datacenters_labels_find_by_key`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] > 10: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_labels_find_by_key`, must be a value less than or equal to `10`") # noqa: E501
if self.api_client.client_side_validation and 'depth' in local_var_params and local_var_params['depth'] < 0: # noqa: E501
raise ApiValueError("Invalid value for parameter `depth` when calling `datacenters_labels_find_by_key`, must be a value greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'datacenter_id' in local_var_params:
path_params['datacenterId'] = local_var_params['datacenter_id'] # noqa: E501
if 'key' in local_var_params:
path_params['key'] = local_var_params['key'] # noqa: E501
query_params = []
if 'pretty' in local_var_params and local_var_params['pretty'] is not None: # noqa: E501
query_params.append(('pretty', local_var_params['pretty'])) # noqa: E501
if 'depth' in local_var_params and local_var_params['depth'] is not None: # noqa: E501
query_params.append(('depth', local_var_params['depth'])) # noqa: E501
header_params = {}
if 'x_contract_number' in local_var_params:
header_params['X-Contract-Number'] = local_var_params['x_contract_number'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic Authentication', 'Token Authentication'] # noqa: E501
response_type = 'LabelResource'
if 'response_type' in kwargs:
response_type = kwargs['response_type']
return self.api_client.call_api(
'/datacenters/{datacenterId}/labels/{key}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=response_type, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
_request_auth=local_var_params.get('_request_auth'))
def datacenters_labels_get(self, datacenter_id, **kwargs): # noqa: E501
"""List all Data Center Labels # noqa: E501
You can retrieve a list of all labels associated with a data center # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_labels_get(datacenter_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the Data Center (required)
:type datacenter_id: str
:param pretty: Controls whether response is pretty-printed (with indentation and new lines)
:type pretty: bool
:param depth: Controls the details depth of response objects. Eg. GET /datacenters/[ID] - depth=0: only direct properties are included. Children (servers etc.) are not included - depth=1: direct properties and children references are included - depth=2: direct properties and children properties are included - depth=3: direct properties and children properties and children's children are included - depth=... and so on
:type depth: int
:param x_contract_number: Users having more than 1 contract need to provide contract number, against which all API requests should be executed
:type x_contract_number: int
:param offset: the first element (of the total list of elements) to include in the response (use together with <code>limit</code> for pagination)
:type offset: int
:param limit: the maximum number of elements to return (use together with <code>offset</code> for pagination)
:type limit: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: LabelResources
"""
kwargs['_return_http_data_only'] = True
return self.datacenters_labels_get_with_http_info(datacenter_id, **kwargs) # noqa: E501
def datacenters_labels_get_with_http_info(self, datacenter_id, **kwargs): # noqa: E501
"""List all Data Center Labels # noqa: E501
You can retrieve a list of all labels associated with a data center # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.datacenters_labels_get_with_http_info(datacenter_id, async_req=True)
>>> result = thread.get()
:param datacenter_id: The unique ID of the Data Center (required)
:type datacenter_id: str
:param pretty: Controls whether response is pretty-printed (with indentation and new lines)
:type pretty: bool
:param depth: Controls the details depth of response objects. Eg. GET /datacenters/[ID] - depth=0: only direct properties are included. Children (servers etc.) are not included - depth=1: direct properties and children references are included - depth=2: direct properties and children properties are included - depth=3: direct properties and children properties and children's children are included - depth=... and so on
:type depth: int
:param x_contract_number: Users having more than 1 contract need to provide contract number, against which all API requests should be executed
:type x_contract_number: int
:param offset: the first element (of the total list of elements) to include in the response (use together with <code>limit</code> for pagination)
:type offset: int
:param limit: the maximum number of elements to return (use together with <code>offset</code> for pagination)
:type limit: int
:param async_req: Whether to execute the request asynchronously.
:type async_req: bool, optional
:param _return_http_data_only: response data without head status code
and headers
:type _return_http_data_only: bool, optional
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type _preload_content: bool, optional
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the authentication
in the spec for a single request.
:type _request_auth: dict, optional
:return: Returns the result object.
If the method is called asynchronously,
returns the request thread.
:rtype: tuple(LabelResources, status_code(int), headers(HTTPHeaderDict))
"""
local_var_params = locals()
all_params = [
'datacenter_id',
'pretty',
'depth',
'x_contract_number',
'offset',
'limit'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout',
'_request_auth',
'response_type'
]
)
for local_var_params_key, local_var_params_val in six.iteritems(local_var_params['kwargs']):
if local_var_params_key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
| |
Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#17: Users with empty passwords: {}\n'.format(users_emptypass))
writeFile(report_file, 'Result: WARNING - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#17: Check result: {}'.format(check_result))
log_file_logger.info('#17: Check Analysis: {}\n'.format(check_analysis))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #17:Check:vManage:Validate there are no empty password users. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#18:Check:Controllers:Controller versions
print(' Warning Check:#18')
log_file_logger.info('#18:Check:Controllers:Controller versions')
writeFile(report_file, '#18:Check:Controllers:Controller versions\n\n')
try:
check_result, check_analysis, check_action = warningCheckseven(controllers_info)
if check_result == 'Failed':
warning_checks['18:Check:Controllers:Controller versions'] = [ check_analysis, check_action]
log_file_logger.error('#18: Check result: {}'.format(check_result))
log_file_logger.error('#18: Check Analysis: {}\n'.format(check_analysis))
writeFile(report_file, 'Result: WARNING - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#18: Check result: {}'.format(check_result))
log_file_logger.info('#18: Check Analysis: {}\n'.format(check_analysis))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #18:Check:Controllers:Controller versions. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#19:Check:Controllers:Confirm Certificate Expiration Dates
print(' Warning Check:#19')
log_file_logger.info('#19:Check:Controllers:Confirm Certificate Expiration Dates')
writeFile(report_file, '#19:Check:Controllers:Confirm Certificate Expiration Dates\n\n')
try:
controllers_exp, controllers_notexp, check_result, check_analysis, check_action = warningCheckeight(controllers_info)
if check_result == 'Failed':
warning_checks['#19:Check:Controllers:Confirm Certificate Expiration Dates'] = [ check_analysis, check_action]
log_file_logger.error('#19:Check result: {}'.format(check_result))
log_file_logger.error('#19: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#19: Controllers with certificates close to expiration:\n{}\n'.format(controllers_exp))
writeFile(report_file, 'Result: WARNING - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#19:Check result: {}'.format(check_result))
log_file_logger.info('#19: Check Analysis: {}\n'.format(check_analysis))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #19:Check:Controllers:Confirm Certificate Expiration Dates. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#20:Check:Controllers:vEdge list sync
print(' Warning Check:#20')
log_file_logger.info('#20:Check:Controllers:vEdge list sync')
writeFile(report_file, '#20:Check:Controllers:vEdge list sync\n\n')
try:
state_vedgeList,check_result, check_analysis, check_action = warningChecknine(controllers_info)
if check_result == 'Failed':
warning_checks['#20:Check:Controllers:Controller versions'] = [ check_analysis, check_action]
log_file_logger.error('#20:Check result: {}'.format(check_result))
log_file_logger.error('#20: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#20: Controllers with inconsistent state_vedgeList: {}\n'.format(state_vedgeList))
writeFile(report_file, 'Result: WARNING - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#20:Check result: {}'.format(check_result))
log_file_logger.info('#20: Check Analysis: {}\n'.format(check_analysis))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #20:Check:Controllers:vEdge list sync. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#21:Check:Controllers: Confirm control connections
print(' Warning Check:#21')
log_file_logger.info('#21:Check:Controllers: Confirm control connections')
writeFile(report_file, '#21:Check:Controllers: Confirm control connections\n\n')
try:
control_sum_tab, discrepancy,check_result, check_analysis, check_action = warningCheckten(vsmart_count, vbond_count)
if check_result == 'Failed':
warning_checks['#21:Check:Controllers: Confirm control connections'] = [check_analysis, check_action]
log_file_logger.error('#21: Check result: {}'.format(check_result))
log_file_logger.error('#21: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#21: Control Connections Summary:\n{}\n'.format(control_sum_tab))
writeFile(report_file, 'Result: WARNING - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#21:Check result: {}'.format(check_result))
log_file_logger.info('#21: Check Analysis: {}\n'.format(check_analysis))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #21:Check:Controllers: Confirm control connections. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#Informational Checks
print('\n**** Performing Informational checks\n')
log_file_logger.info('*** Performing Informational Checks')
#22:Check:vManage:Disk controller type
print(' Informational Check:#22')
log_file_logger.info('#22:Check:vManage:Disk controller type')
writeFile(report_file, '#22:Check:vManage:Disk controller type\n\n')
try:
check_result, check_analysis, check_action = infoCheckone(server_type, disk_controller)
if check_result == 'Failed':
warning_checks['#22:Check:vManage:Disk controller type'] = [ check_analysis, check_action]
log_file_logger.error('#22: Check result: {}'.format(check_result))
log_file_logger.error('#22: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#22: Disk Controller type: {}\n'.format(disk_controller))
writeFile(report_file, 'Result: WARNING - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#22: Check result: {}'.format(check_result))
log_file_logger.info('#22: Check Analysis: {}'.format(check_analysis))
log_file_logger.info('#22: Disk Controller type: {}\n'.format(disk_controller))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #22:Check:vManage:Disk controller type. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#23:Check:Controllers:Validate there is at minimum vBond, vSmart present
print(' Informational Check:#23')
log_file_logger.info('#23:Check:Controllers:Validate there is at minimum vBond, vSmart present')
writeFile(report_file, '#23:Check:Controllers:Validate there is at minimum vBond, vSmart present\n\n')
try:
check_result, check_analysis, check_action = infoChecktwo(vsmart_count,vbond_count)
if check_result == 'Failed':
warning_checks['#23:Check:Controllers:Validate there is at minimum vBond, vSmart present'] = [ check_analysis, check_action]
log_file_logger.error('#23: Check result: {}'.format(check_result))
log_file_logger.error('#23: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#23: vSmart Count: {}'.format(vsmart_count))
log_file_logger.error('#23: vBond Count: {}\n'.format(vbond_count))
writeFile(report_file, 'Result: WARNING - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#23: Check result: {}'.format(check_result))
log_file_logger.info('#23: Check Analysis: {}'.format(check_analysis))
log_file_logger.info('#23: vSmart Count: {}'.format(vsmart_count))
log_file_logger.info('#23: vBond Count: {}\n'.format(vbond_count))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #23:Check:Controllers:Validate there is at minimum vBond, vSmart present. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#24:Check:Controllers:Validate all controllers are reachable
print(' Informational Check:#24')
log_file_logger.info('#24:Check:Controllers:Validate all controllers are reachable')
writeFile(report_file, '#24:Check:Controllers:Validate all controllers are reachable\n\n')
try:
unreach_controllers,check_result, check_analysis, check_action = infoChecktthree(controllers_info)
if check_result == 'Failed':
warning_checks['#24:Check:Controllers:Validate all controllers are reachable'] = [check_analysis, check_action]
log_file_logger.error('#24: Check result: {}'.format(check_result))
log_file_logger.error('#24: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#24: Unreachable Controllers: {}\n'.format(unreach_controllers))
writeFile(report_file, 'Result: WARNING - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#24: Check result: {}'.format(check_result))
log_file_logger.info('#24: Check Analysis: {}\n'.format(check_analysis))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #24:Check:Controllers:Validate all controllers are reachable. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
if cluster_size>1:
cluster_checks = {}
log_file_logger.info('*** Performing Cluster Checks')
print('\n**** Performing Cluster checks\n')
#25:Check:Cluster:Version consistency
print(' Cluster Check:#25')
log_file_logger.info('#25:Check:Cluster:Version consistency')
writeFile(report_file, '#25:Check:Cluster:Version consistency\n\n')
try:
check_result,check_analysis, check_action = criticalChecktwelve(vmanage_info)
if check_result == 'Failed':
cluster_checks['#25:Check:Cluster:Version consistency'] = [ check_analysis, check_action]
log_file_logger.error('#25: Check result: {}'.format(check_result))
log_file_logger.error('#25: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#25: vManage info: {}\n'.format(vmanage_info))
writeFile(report_file, 'Result: ERROR - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#25: Check result: {}'.format(check_result))
log_file_logger.info('#25: Check Analysis: {}'.format(check_analysis))
log_file_logger.info('#25: vManage info: {}\n'.format(vmanage_info))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #25:Check:Cluster:Version consistency. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#26:Check:Cluster:Cluster health
print(' Cluster Check:#26')
log_file_logger.info('#26:Check:Cluster:Cluster health')
writeFile(report_file, '#26:Check:Cluster:Cluster health\n\n')
try:
cluster_health_data = json.loads(getRequest(version_tuple,vmanage_lo_ip,jsessionid, 'clusterManagement/list', args.vmanage_port, tokenid))
services_down, check_result, check_analysis, check_action = criticalCheckthirteen(cluster_health_data)
if check_result == 'Failed':
cluster_checks['#26:Check:Cluster:Cluster health'] = [ check_analysis, check_action]
log_file_logger.error('#26: Check result: {}'.format(check_result))
log_file_logger.error('#26: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#26: Relevant cluster services that are down: {}\n'.format(services_down))
writeFile(report_file, 'Result: ERROR - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#26: Check result: {}'.format(check_result))
log_file_logger.info('#26: Check Analysis: {}\n'.format(check_analysis))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #26:Check:Cluster:Cluster health. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#27:Check:Cluster:Cluster ConfigDB topology
print(' Cluster Check:#27')
log_file_logger.info('#27:Check:Cluster:Cluster ConfigDB topology')
writeFile(report_file, '#27:Check:Cluster:Cluster ConfigDB topology\n\n')
try:
cluster_health_data = json.loads(getRequest(version_tuple,vmanage_lo_ip,jsessionid,'clusterManagement/list', args.vmanage_port, tokenid))
configDB_count, check_result, check_analysis, check_action = criticalCheckfourteen(cluster_health_data)
if check_result == 'Failed':
cluster_checks['#27:Check:Cluster:Cluster ConfigDB topology'] = [ check_analysis, check_action]
log_file_logger.error('#27: Check result: {}'.format(check_result))
log_file_logger.error('#27: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#27: No. of configDB servers in the cluster: {}\n'.format(configDB_count))
writeFile(report_file, 'Result: ERROR - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#27: Check result: {}'.format(check_result))
log_file_logger.info('#27: Check Analysis: {}'.format(check_analysis))
log_file_logger.info('#27: No. of configDB servers in the cluster: {}\n'.format(configDB_count))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #27:Check:Cluster:Cluster ConfigDB topology. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#28:Check:Cluster:Messaging server
print(' Cluster Check:#28')
log_file_logger.info('#28:Check:Cluster:Messaging server')
writeFile(report_file, '#28:Check:Cluster:Messaging server\n\n')
try:
cluster_health_data = json.loads(getRequest(version_tuple,vmanage_lo_ip, jsessionid,'clusterManagement/list', args.vmanage_port, tokenid ))
cluster_msdown,check_result,check_analysis, check_action = criticalCheckfifteen(cluster_health_data)
if check_result == 'Failed':
cluster_checks['#28:Check:Cluster:Messaging server'] = [ check_analysis, check_action]
log_file_logger.error('#28: Check result: {}'.format(check_result))
log_file_logger.error('#28: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#28: Relevant cluster services that are down: {}\n'.format(services_down))
writeFile(report_file, 'Result: ERROR - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#28: Check result: {}'.format(check_result))
log_file_logger.info('#28: Check Analysis: {}\n'.format(check_analysis))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #28:Check:Cluster:Messaging server. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#29:Check:Cluster:DR replication status
print(' Cluster Check:#29')
log_file_logger.info('#29:Check:Cluster:DR replication status')
writeFile(report_file, '#29:Check:Cluster:DR replication status\n\n')
try:
dr_data = json.loads(getRequest(version_tuple,vmanage_lo_ip, jsessionid, 'disasterrecovery/details', args.vmanage_port, tokenid))
dr_status, check_action, check_analysis, check_result = criticalChecksixteen(dr_data)
if check_result == 'Failed':
cluster_checks['#29:Check:Cluster:DR replication status'] = [ check_analysis, check_action]
log_file_logger.error('#29: Check result: {}'.format(check_result))
log_file_logger.error('#29: Check Analysis: {}'.format(check_analysis))
log_file_logger.error('#29: DR Replication status: {}\n'.format(dr_status))
writeFile(report_file, 'Result: ERROR - {}\n'.format(check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(check_action))
else:
log_file_logger.info('#29: Check result: {}'.format(check_result))
log_file_logger.info('#29: Check Analysis: {}\n'.format(check_analysis))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #29:Check:Cluster:DR replication status. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#30:Check:Cluster:Intercluster communication
print(' Cluster Check:#30')
log_file_logger.info('#30:Check:Cluster:Intercluster communication')
writeFile(report_file, '#30:Check:Cluster:Intercluster communication\n\n')
try:
if criticalCheckseventeen.isAlive():
criticalCheckseventeen.join(10)
if not criticalCheckseventeen.result_queue.empty():
ping_output, ping_output_failed, ping_check_result, ping_check_analysis, ping_check_action = criticalCheckseventeen.result_queue.get()
if ping_check_result == 'Failed':
cluster_checks['#30:Check:Cluster:Intercluster communication'] = [ ping_check_analysis, ping_check_action]
log_file_logger.error('#30: Check result: {}'.format(ping_check_result))
log_file_logger.error('#30: Check Analysis: {}'.format(ping_check_analysis))
log_file_logger.error('#30: Cluster nodes with ping failure: {}\n'.format(ping_output_failed))
writeFile(report_file, 'Result: ERROR - {}\n'.format(ping_check_analysis))
writeFile(report_file, 'Action: {}\n\n'.format(ping_check_action))
else:
log_file_logger.info('#30: Check result: {}'.format(ping_check_result))
log_file_logger.info('#30: Check Analysis: {}'.format(ping_check_analysis))
log_file_logger.info('#30: Cluster nodes details: {}\n'.format(ping_output))
writeFile(report_file, 'Result: INFO - {}\n\n'.format(ping_check_analysis))
except Exception as e:
print('\033[1;31m ERROR: Error performing #30:Check:Cluster:Intercluster communication. \n Please check error details in log file: {}.\n If needed, please reach out to tool support at: <EMAIL>, with your report and log file. \033[0;0m'.format(log_file_path))
log_file_logger.exception('{}\n'.format(e))
#Logging out of the Session using jsessionid
log_file_logger.info('Logging out of the Session')
sessionLogout(vmanage_lo_ip, jsessionid, args.vmanage_port)
log_file_logger.info('Successfully closed the connection')
#version equal to or above 20.5
elif version_tuple[0:2] >= ('20','5'):
try:
log_file_logger.info('Generating a JSessionID')
jsessionid = generateSessionIDpy3(vmanage_lo_ip, args.username, | |
Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; es-ES; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; fr-FR; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Win98; fr; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Windows; U; Win98; de-AT; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; fr; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; es-ES; rv:1.7.3) Gecko/20040910',
'Mozilla/5.0 (X11; U; Linux i686; fr; rv:1.7.2) Gecko/20040804',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.2) Gecko/20040906',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.2) Gecko/20040804',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.2) Gecko/20040906',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.2) Gecko/20040810 Debian/1.7.2-2',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.2) Gecko/20040804',
'Mozilla/5.0 (X11; U; FreeBSD i386; ja-JP; rv:1.7.2) Gecko/20050330',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.7.2) Gecko/20040709',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.2) Gecko/20040804',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (Windows; U; Win 9x 4.90; de-AT; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (Windows; ; Windows NT 5.1; rv:1.7.2) Gecko/20040804',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.7.2) Gecko/20040803',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7.13) Gecko/20060509',
'Mozilla/5.0 (X11; U; OpenBSD i386; en-US; rv:1.7.13) Gecko/20060901',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.13) Gecko/20060717 Debian/1.7.13-0.2ubuntu1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.13) Gecko/20060427 Debian/1.7.13-0ubuntu05.04',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.13) Gecko/20060417',
'Mozilla/5.0 (X11; U; Linux i686 (x86_64); en-US; rv:1.7.13) Gecko/20060417',
'Mozilla/5.0 (X11; U; FreeBSD i386; en-US; rv:1.7.13) Gecko/20061230',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.7.13) Gecko/20060414',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.13) Gecko/20060414',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7.13) Gecko/20060414',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.7.13) Gecko/20060414',
'Mozilla/4.0 (compatible; Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.13) Gecko/20060414; Windows NT 5.1)',
'Mozilla/5.0 (X11; U; Linux i686; es-ES; rv:1.7.12) Gecko/20050929',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20060607 Debian/1.7.12-1.2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20060216 Debian/1.7.12-1.1ubuntu2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20060205 Debian/1.7.12-1.1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20060202 Fedora/1.7.12-1.5.2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20051203',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20051013 Debian/1.7.12-1ubuntu1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20051010 Debian/1.7.12-0ubuntu2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20051007 Debian/1.7.12-1',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20050926',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20050923',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20050921 Red Hat/1.7.12-1.1.3.2',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20050921',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.12) Gecko/20050920',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.12) Gecko/20060205 Debian/1.7.12-1.1',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.12) Gecko/20050923 Fedora/1.7.12-1.5.1',
'Mozilla/5.0 (X11; U; Linux i686; cs-CZ; rv:1.7.12) Gecko/20050929',
'Mozilla/5.0 (X11; U; Linux i686 (x86_64); fr; rv:1.7.12) Gecko/20051010 Debian/1.7.12-0ubuntu2',
'Mozilla/5.0 (X11; U; AIX 5.3; en-US; rv:1.7.12) Gecko/20051025',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.7.12) Gecko/20050915',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7.11) Gecko/20050802',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.11) Gecko/20050729',
'Mozilla/5.0 (Windows; U; WinNT4.0; de-AT; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr-FR; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.11) Gecko/20050728 (No IDN)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Win95; de-AT; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Windows; U; Win 9x 4.90; de-AT; rv:1.7.11) Gecko/20050728',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; fr-FR; rv:1.7.11) Gecko/20050727',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7.10) Gecko/20050811 Fedora/1.7.10-1.2.1.legacy',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.10) Gecko/20050727',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.7.10) Gecko/20050722',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.10) Gecko/20050716',
'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; fr-FR; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (Windows; U; Win 9x 4.90; en-US; rv:1.7.1) Gecko/20040707',
'Mozilla/5.0 (X11; U; OpenBSD i386; en-US; rv:1.7.0.13) Gecko/20060901',
'Mozilla/5.0 (X11; U; SunOS sun4v; en-US; rv:1.7) Gecko/20060120',
'Mozilla/5.0 (X11; U; SunOS sun4u; fr-FR; rv:1.7) Gecko/20040621',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7) Gecko/20060629',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.7) Gecko/20060120',
'Mozilla/5.0 (X11; U; SunOS sun4u; de-DE; rv:1.7) Gecko/20070606',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.7) Gecko/20060627',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.7) Gecko/20051122',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.7) Gecko/20051027',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.7) Gecko/20050502',
'Mozilla/5.0 (X11; U; SunOS i86pc; en-US; rv:1.7) Gecko/20041221',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.7) Gecko/20040514',
'Mozilla/5.0 (X11; U; FreeBSD; i386; it-IT; rv:1.7) Gecko',
'Mozilla/5.0 (X11; U; FreeBSD; i386; en-US; rv:1.7) Gecko',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.7) Gecko/20040616',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; es-ES; rv:1.7) Gecko/20040803 Firefox/0.9.3',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7) Gecko/20040616',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.7) Gecko/20040514',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.7) Gecko/20040616',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.7) Gecko/20040616',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.7) Gecko/20040616',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.6a) Gecko/20031030',
'Mozilla/5.0 (X11; U; Linux x86_64; fr; rv:1.6) Gecko/20040115',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040413 Debian/1.6-5',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040114',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (X11; U; Linux i686; en-GB; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.6) Gecko/20040115',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.6) Gecko/20040114',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; hu; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; fr-FR; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Photon; U; QNX x86pc; en-US; rv:1.6) Gecko/20040429',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.6) Gecko/20040113',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.5b) Gecko/20030827',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.5b) Gecko/20030827',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.5a) Gecko/20030718',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.5a) Gecko/20030718',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.5.1) Gecko/20031120',
'Mozilla/5.0 (X11; U; SunOS5.10 sun4u; ja-JP; rv:1.5) Gecko/20031022',
'Mozilla/5.0 (X11; U; Linux i686; fr-FR; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (X11; U; Linux i686; de-AT; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.5) Gecko/20030916',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.5) Gecko/20030916',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; de-AT; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; Win98; de-AT; rv:1.5) Gecko/20031007',
'Mozilla/5.0 (Windows; U; WinNT4.0; it-IT; rv:1.4b) Gecko/20030507',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.4b) Gecko/20030507',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.4b) Gecko/20030507',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.4b) Gecko/20030427',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.4b) Gecko/20030507',
'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:1.4a) Gecko/20030318',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Windows; U; WinNT4.0; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; de-AT; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Windows; U; Win98; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (Macintosh; U; PPC Mac OS X Mach-O; en-US; rv:1.4a) Gecko/20030401',
'Mozilla/5.0 (X11; U; IRIX64 IP35; en-US; rv:1.4.3) Gecko/20040909',
'Mozilla/5.0 (X11; U; Linux i686; en-US; | |
<reponame>mikeshardmind/salamander
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Original Author: https://github.com/Rapptz
# This file contains code which was copied (and modified)
# from: https://github.com/Rapptz/RoboDanny/tree/cd23371cee697d861182a037f90896d9d4a082dd
from __future__ import annotations
import asyncio
import itertools
from typing import TYPE_CHECKING, Any
import discord
from discord.ext import commands, menus
if TYPE_CHECKING:
from .bot import Salamander, SalamanderContext
else:
class Salamander:
"""If you encounter this during introspection,
it's a typehinting fiction to avoid a circular import."""
pass
class SalamanderContext:
"""If you encounter this during introspection,
it's a typehinting fiction to avoid a circular import."""
pass
class RoboPages(discord.ui.View):
def __init__(
self,
source: menus.PageSource,
*,
ctx: SalamanderContext,
check_embeds: bool = True,
compact: bool = False,
):
super().__init__()
self.source: menus.PageSource = source
self.check_embeds: bool = check_embeds
self.ctx: SalamanderContext = ctx
self.message: discord.Message | None = None
self.current_page: int = 0
self.compact: bool = compact
self.input_lock = asyncio.Lock()
self.clear_items()
self.fill_items()
def fill_items(self) -> None:
if not self.compact:
self.numbered_page.row = 1
self.stop_pages.row = 1
if self.source.is_paginating():
max_pages = self.source.get_max_pages()
use_last_and_first = max_pages is not None and max_pages >= 2
if use_last_and_first:
self.add_item(self.go_to_first_page)
self.add_item(self.go_to_previous_page)
if not self.compact:
self.add_item(self.go_to_current_page)
self.add_item(self.go_to_next_page)
if use_last_and_first:
self.add_item(self.go_to_last_page)
if not self.compact:
self.add_item(self.numbered_page)
self.add_item(self.stop_pages)
async def _get_kwargs_from_page(self, page: int) -> dict[str, Any]:
value = await discord.utils.maybe_coroutine(self.source.format_page, self, page)
if isinstance(value, dict):
return value
elif isinstance(value, str):
return {"content": value, "embed": None}
elif isinstance(value, discord.Embed):
return {"embed": value, "content": None}
else:
return {}
async def show_page(self, interaction: discord.Interaction, page_number: int) -> None:
page = await self.source.get_page(page_number)
self.current_page = page_number
kwargs = await self._get_kwargs_from_page(page)
self._update_labels(page_number)
if kwargs:
if interaction.response.is_done():
if self.message:
await self.message.edit(**kwargs, view=self)
else:
await interaction.response.edit_message(**kwargs, view=self)
def _update_labels(self, page_number: int) -> None:
self.go_to_first_page.disabled = page_number == 0
if self.compact:
max_pages = self.source.get_max_pages()
self.go_to_last_page.disabled = max_pages is None or (page_number + 1) >= max_pages
self.go_to_next_page.disabled = max_pages is not None and (page_number + 1) >= max_pages
self.go_to_previous_page.disabled = page_number == 0
return
self.go_to_current_page.label = str(page_number + 1)
self.go_to_previous_page.label = str(page_number)
self.go_to_next_page.label = str(page_number + 2)
self.go_to_next_page.disabled = False
self.go_to_previous_page.disabled = False
self.go_to_first_page.disabled = False
max_pages = self.source.get_max_pages()
if max_pages is not None:
self.go_to_last_page.disabled = (page_number + 1) >= max_pages
if (page_number + 1) >= max_pages:
self.go_to_next_page.disabled = True
self.go_to_next_page.label = "…"
if page_number == 0:
self.go_to_previous_page.disabled = True
self.go_to_previous_page.label = "…"
async def show_checked_page(self, interaction: discord.Interaction, page_number: int) -> None:
max_pages = self.source.get_max_pages()
try:
if max_pages is None:
# If it doesn't give maximum pages, it cannot be checked
await self.show_page(interaction, page_number)
elif max_pages > page_number >= 0:
await self.show_page(interaction, page_number)
except IndexError:
# An error happened that can be handled, so ignore it.
pass
async def interaction_check(self, interaction: discord.Interaction) -> bool:
if interaction.user and interaction.user.id in (self.ctx.bot.owner_id, self.ctx.author.id):
return True
await interaction.response.send_message(
"This pagination menu cannot be controlled by you, sorry!", ephemeral=True
)
return False
async def on_timeout(self) -> None:
if self.message:
await self.message.edit(view=None)
async def on_error(self, interaction: discord.Interaction, error: Exception, item: discord.ui.Item) -> None:
if interaction.response.is_done():
await interaction.followup.send("An unknown error occurred, sorry", ephemeral=True)
else:
await interaction.response.send_message("An unknown error occurred, sorry", ephemeral=True)
async def start(self, *, content: str | None = None) -> None:
if self.check_embeds and not self.ctx.channel.permissions_for(self.ctx.me).embed_links: # type: ignore
await self.ctx.send("Bot does not have embed links permission in this channel.")
return
await self.source._prepare_once()
page = await self.source.get_page(0)
kwargs = await self._get_kwargs_from_page(page)
if content:
kwargs.setdefault("content", content)
self._update_labels(0)
self.message = await self.ctx.send(**kwargs, view=self)
@discord.ui.button(label="≪", style=discord.ButtonStyle.grey)
async def go_to_first_page(self, interaction: discord.Interaction, button: discord.ui.Button):
"""go to the first page"""
await self.show_page(interaction, 0)
@discord.ui.button(label="<", style=discord.ButtonStyle.blurple)
async def go_to_previous_page(self, interaction: discord.Interaction, button: discord.ui.Button):
"""go to the previous page"""
await self.show_checked_page(interaction, self.current_page - 1)
@discord.ui.button(label="Current", style=discord.ButtonStyle.grey, disabled=True)
async def go_to_current_page(self, interaction: discord.Interaction, button: discord.ui.Button):
pass
@discord.ui.button(label=">", style=discord.ButtonStyle.blurple)
async def go_to_next_page(self, interaction: discord.Interaction, button: discord.ui.Button):
"""go to the next page"""
await self.show_checked_page(interaction, self.current_page + 1)
@discord.ui.button(label="≫", style=discord.ButtonStyle.grey)
async def go_to_last_page(self, interaction: discord.Interaction, button: discord.ui.Button):
"""go to the last page"""
# The call here is safe because it's guarded by skip_if
await self.show_page(interaction, self.source.get_max_pages() - 1) # type: ignore
@discord.ui.button(label="Skip to page...", style=discord.ButtonStyle.grey)
async def numbered_page(self, interaction: discord.Interaction, button: discord.ui.Button):
"""lets you type a page number to go to"""
if self.input_lock.locked():
await interaction.response.send_message("Already waiting for your response...", ephemeral=True)
return
if self.message is None:
return
async with self.input_lock:
channel = self.message.channel
author_id = interaction.user and interaction.user.id
await interaction.response.send_message("What page do you want to go to?", ephemeral=True)
def message_check(m):
return m.author.id == author_id and channel == m.channel and m.content.isdigit()
try:
msg = await self.ctx.bot.wait_for("message", check=message_check, timeout=30.0)
except asyncio.TimeoutError:
await interaction.followup.send("Took too long.", ephemeral=True)
await asyncio.sleep(5)
else:
page = int(msg.content)
await msg.delete()
await self.show_checked_page(interaction, page - 1)
@discord.ui.button(label="Quit", style=discord.ButtonStyle.red)
async def stop_pages(self, interaction: discord.Interaction, button: discord.ui.Button):
"""stops the pagination session."""
await interaction.response.defer()
await interaction.delete_original_message()
self.stop()
class GroupHelpPageSource(menus.ListPageSource):
def __init__(
self,
group: commands.Group | commands.Cog,
commands: list[commands.Command],
*,
prefix: str,
color: discord.Color,
):
super().__init__(entries=commands, per_page=6)
self.group: commands.Group | commands.Cog = group
self.prefix: str = prefix
self.color: discord.Color = color
self.title: str = f"{self.group.qualified_name}"
self.description: str = self.group.description
async def format_page(self, menu: RoboPages, commands: list[commands.Command]):
embed = discord.Embed(title=self.title, description=self.description, color=self.color)
for command in commands:
signature = f"{command.qualified_name} {command.signature}"
embed.add_field(name=signature, value=command.short_doc or "No help given...", inline=False)
maximum = self.get_max_pages()
if maximum > 1:
embed.set_author(name=f"Page {menu.current_page + 1}/{maximum} ({len(self.entries)} commands)")
embed.set_footer(text=f'Use "{self.prefix}help command" for more info on a command.')
return embed
class FieldPageSource(menus.ListPageSource):
"""A page source that requires (field_name, field_value) tuple items."""
def __init__(self, entries, *, per_page=12):
super().__init__(entries, per_page=per_page)
self.embed = discord.Embed(colour=discord.Colour.blurple())
async def format_page(self, menu, entries):
self.embed.clear_fields()
self.embed.description = None
for key, value in entries:
self.embed.add_field(name=key, value=value, inline=False)
maximum = self.get_max_pages()
if maximum > 1:
text = f"Page {menu.current_page + 1}/{maximum} ({len(self.entries)} entries)"
self.embed.set_footer(text=text)
return self.embed
class TextPageSource(menus.ListPageSource):
def __init__(self, text, *, prefix="```", suffix="```", max_size=2000):
pages = commands.Paginator(prefix=prefix, suffix=suffix, max_size=max_size - 200)
for line in text.split("\n"):
pages.add_line(line)
super().__init__(entries=pages.pages, per_page=1)
async def format_page(self, menu, content):
maximum = self.get_max_pages()
if maximum > 1:
return f"{content}\nPage {menu.current_page + 1}/{maximum}"
return content
class SimplePageSource(menus.ListPageSource):
async def format_page(self, menu, entries):
pages = []
for index, entry in enumerate(entries, start=menu.current_page * self.per_page):
pages.append(f"{index + 1}. {entry}")
maximum = self.get_max_pages()
if maximum > 1:
footer = f"Page {menu.current_page + 1}/{maximum} ({len(self.entries)} entries)"
menu.embed.set_footer(text=footer)
menu.embed.description = "\n".join(pages)
return menu.embed
class SimplePages(RoboPages):
"""A simple pagination session reminiscent of the old Pages interface.
Basically an embed with some normal formatting.
"""
def __init__(self, entries, *, ctx: SalamanderContext, per_page: int = 12):
super().__init__(SimplePageSource(entries, per_page=per_page), ctx=ctx)
self.embed = discord.Embed(color=ctx.me.color)
class HelpSelectMenu(discord.ui.Select["HelpMenu"]):
def __init__(self, commands: dict[commands.Cog, list[commands.Command]], bot: Salamander, color: discord.Color):
super().__init__(
placeholder="Select a category...",
min_values=1,
max_values=1,
row=0,
)
self.commands: dict[commands.Cog, list[commands.Command]] = commands
self.bot: Salamander = bot
self.color: discord.Color = color
self.__fill_options()
def __fill_options(self) -> None:
self.add_option(
label="Index",
emoji="\N{WAVING HAND SIGN}",
value="__index",
description="The help page showing how to use the bot.",
)
for cog, commands in self.commands.items():
if not commands:
continue
description = cog.description.split("\n", 1)[0] or None
self.add_option(label=cog.qualified_name, value=cog.qualified_name, description=description, emoji=None)
async def callback(self, interaction: discord.Interaction):
assert self.view is not None
value = self.values[0]
if value == "__index":
await self.view.bind_to_interaction(FrontPageSource(), interaction)
else:
cog = self.bot.get_cog(value)
if cog is None:
await interaction.response.send_message("Did... did you break it?", ephemeral=True)
return
commands = self.commands[cog]
if not commands:
await interaction.response.send_message("This category appears to be empty right now.", ephemeral=True)
return
source = GroupHelpPageSource(cog, commands, prefix=self.view.ctx.clean_prefix, color=self.color)
await self.view.bind_to_interaction(source, interaction)
class FrontPageSource(menus.PageSource):
# next 3 methods are a means of forcing menu elements to be visible even as a single page
def is_paginating(self) -> bool:
return True
def get_max_pages(self) -> int | None:
return 2
async def get_page(self, page_number: int) -> Any:
self.index = page_number
return self
def format_page(self, menu: HelpMenu, page: Any):
if self.index == 0:
return discord.Embed(title="Help", color=menu.ctx.me.color, description="Take a look around.")
if self.index == 1:
embed = discord.Embed(title="Help", color=menu.ctx.me.color)
embed.description = (
"This is the help page."
"\nYou can use {hc_text} for help with a specific command."
"\nYou can use {hc_text} or select an entry from the dropdown menu for help with a category."
).format(hc_text=f'"{menu.ctx.clean_prefix}help command"')
embed.add_field(
name="How to read the help",
value="Command usage is shown with a few | |
<filename>quest/lib/structure/Structure.py
import os
from copy import deepcopy, copy
from collections import OrderedDict
import tempfile
import numpy as np
import lib
from lib.math.linalg import vector
from lib.structure import cStructure
from lib.io.PDB import Pdb
import os.path
clusterCriteria = ['maxclust', 'inconsistent', 'distance']
def onRMSF(structures, selectedNbrs, atomName=None, weights=None):
"""
Calcualtes the root mean square deviation with respect to the average structure
for a given set of structures. The structures do not have to be aligned.
:param structures: a list of structure object of type Structure
:param selectedNbrs: list of integers with the selected structures out of the structure list
:param atomName: atom-name used for calcualtion (e.g. 'CA') if not specified all atoms are used
:return:
"""
print("onRMSF")
if weights is None:
print("using no weights")
weights = np.ones(len(selectedNbrs), dtype=np.float32)
else:
print("using weights")
weights /= sum(weights)
candidateStructures = [deepcopy(structures[i]) for i in selectedNbrs]
print("calculating average structure as a reference")
reference = average(candidateStructures, weights=weights)
print("aligning selected structures with respect to reference")
for s in candidateStructures:
super_impose(reference, s)
print("Getting %s-atoms of reference" % atomName)
ar = reference.getAtoms(atomName=atomName)
cr = ar['coord']
msf = np.zeros(len(ar), dtype=np.float32)
for i, s in enumerate(candidateStructures):
a = s.getAtoms(atomName=atomName)
ca = a['coord']
msf += weights[i] * np.sum((cr - ca) ** 2, axis=1)
return np.sqrt(msf)
def rmsd(sa, sb, atom_indices=None):
"""
Takes two structures and returns the rmsd-value
>>> import lib
>>> t = lib.TrajectoryFile('./sample_data/structure/2807_8_9_b.h5', mode='r', stride=1)
>>> s1 = t[10]
>>> s1
<lib.structure.Structure.Structure at 0x135f3ad0>
>>> s2 = t[0]
>>> s2
<lib.structure.Structure.Structure at 0x1348fbb0>
>>> rmsd(s1, s2)
6.960082250440536
"""
if atom_indices is not None:
a = sa.xyz[atom_indices]
b = sb.xyz[atom_indices]
else:
a = sa.xyz
b = sb.xyz
rmsd = np.sqrt(1. / a.shape[0] * ((a - b) ** 2).sum())
return float(rmsd)
def find_representative(trajectory, cl):
"""
:param trajectory: a list of structures
:param c: a list of numbers (positions in structures) belonging to one cluster
:return: index of representative structure of cluster
"""
structuresInCluster = [trajectory[i] for i in cl]
averageStructureInCluster = average(structuresInCluster)
idx, representativeStructureInCluster = find_best(averageStructureInCluster, structuresInCluster)
idxOfRepresentativeStructure = cl[idx]
return idxOfRepresentativeStructure
def cluster(structures, threshold=5000, criterion='maxclust', Z=None, distances=None, directory=None):
# http://www.mathworks.de/de/help/stats/hierarchical-clustering.html
print("Performing cluster-analysis")
k = 0
#start_time = time.time()
nStructures = len(structures)
if distances is None:
distances = np.empty(nStructures * (nStructures - 1) / 2)
for i in range(nStructures):
for j in range(i + 1, nStructures):
distances[k] = rmsd(structures[j], structures[i])
k += 1
m = (nStructures * nStructures - 1) / 2
print('RMSD computation %s/%s : %.1f%%' % (k, m, float(k) / m * 100.0))
if directory is not None:
print("Saving distance-matrix")
np.save(directory + '/' + 'clDistances.npy', distances)
print('mean pairwise distance ', np.mean(distances))
print('stddev pairwise distance', np.std(distances))
if Z is None:
# run hierarchical clustering on the distance matrix
print('\n\nRunning hierarchical clustering (UPGMA)...')
Z = hclust.linkage(distances, method='average', preserve_input=True)
# get flat clusters from the linkage matrix corresponding to states
if directory is not None:
print("Saving cluster-results")
np.save(directory + '/' + 'clLinkage.npy', Z)
print('\n\nFlattening the clusters...')
assignments = fcluster(Z, t=threshold, criterion=criterion)
cl = dict()
for c in np.unique(assignments):
cl[c] = []
for i, a in enumerate(assignments):
cl[a] += [i]
#print "Needed time: %.3f seconds" % (time.time() - start_time)
print('Number of clusters found', len(np.unique(assignments)))
return Z, cl, assignments, distances
def findSmallestCluster(clusters):
print("findSmallestCluster")
minCl = list(clusters.keys())[0]
for clName in clusters:
if len(clusters[clName]) < len(clusters[minCl]):
minCl = clName
return minCl
def super_impose(structure_ref, structure_align, atom_indices=None):
if atom_indices is not None:
a_atoms = structure_align.xyz[atom_indices]
r_atoms = structure_ref.xyz[atom_indices]
else:
a_atoms = structure_align.xyz
r_atoms = structure_ref.xyz
# Center coordinates
n = r_atoms.shape[0]
av1 = a_atoms.sum(axis=0) / n
av2 = r_atoms.sum(axis=0) / n
re = structure_ref.xyz - av2
al = structure_align.xyz - av1
# Calculate rotation matrix
a = np.dot(np.transpose(al), re)
u, d, vt = np.linalg.svd(a)
rot = np.transpose(np.dot(np.transpose(vt), np.transpose(u)))
if np.linalg.det(rot) < 0:
vt[2] = -vt[2]
rot = np.transpose(np.dot(np.transpose(vt), np.transpose(u)))
# Rotate structure
structure_align.xyz = np.dot(al, rot)
def average(structures, weights=None, write=True, filename=None):
"""
Calculates weighted average of a list of structures.
saves to filename if write is True
if filename not provided makes new "average.pdb" file in temp-folder
of the system
Example:
>>> import lib
>>> t = lib.TrajectoryFile('./sample_data/structure/2807_8_9_b.h5', mode='r', stride=1)
>>> avg = t.average
>>> avg
<lib.structure.Structure.Structure at 0x117ff770>
>>> avg.filename
'c:\\users\\peulen\\appdata\\local\\temp\\average.pdb'
"""
if weights is None:
weights = np.ones(len(structures), dtype=np.float64)
weights /= weights.sum()
else:
weights = np.array(weights)
avg = Structure()
avg.atoms = np.copy(structures[0].atoms)
avg.xyz *= 0.0
for i, s in enumerate(structures):
avg.xyz += weights[i] * s.xyz
filename = os.path.join(tempfile.tempdir, "average.pdb") if filename is None else filename
if write:
avg.filename = filename
avg.write()
return avg
def find_best(target, reference, atom_indices=None):
"""
target and reference are both of type mdtraj.Trajectory
reference is of length 1, target of arbitrary length
returns a Structure object and the index within the trajectory
>>> import lib
>>> t = t = lib.TrajectoryFile('./sample_data/structure/2807_8_9_b.h5', mode='r', stride=1)
>>> find_best(t.mdtraj, t.mdtraj[2])
(2,
<mdtraj.Trajectory with 1 frames, 2495 atoms, 164 residues, without unitcells at 0x13570b30>)
"""
rmsds = mdtraj.rmsd(target, reference, atom_indices=atom_indices)
iMin = np.argmin(rmsds)
return iMin, target[iMin]
class Universe(object):
def __init__(self, structure=None):
self.structures = [] if structure is None else [structure]
self.potentials = []
self.scaling = []
self.Es = []
def addPotential(self, potential, scale=1.0):
print("addPotential")
self.potentials.append(potential)
self.scaling.append(scale)
def removePotential(self, potentialNbr=None):
print("removePotential")
if potentialNbr == -1:
self.potentials.pop()
self.scaling.pop()
else:
self.potentials.pop(potentialNbr)
self.scaling.pop(potentialNbr)
def clearPotentials(self):
self.potentials = []
self.scaling = []
def getEnergy(self, structure=None):
if isinstance(structure, Structure):
for p in self.potentials:
p.structure = structure
Es = self.getEnergies()
E = Es.sum()
self.E = E
if E < -10000:
print(Es)
return E
def getEnergies(self, structure=None):
if isinstance(structure, Structure):
for p in self.potentials:
p.structure = structure
scales = np.array(self.scaling)
Es = np.array([pot.getEnergy() for pot in self.potentials])
self.Es = np.dot(scales, Es)
return Es
internal_atom_numbers = [
('N', 0),
('CA', 1),
('C', 2),
('O', 3),
('CB', 4),
('H', 5),
('CG', 6),
('CD', 7),
]
residue_atoms_internal = OrderedDict([
('CYS', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('MET', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('PHE', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('ILE', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('LEU', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('VAL', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('TRP', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('TYR', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('ALA', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('GLY', ['N', 'C', 'CA', 'O', 'H']),
('THR', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('SER', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('GLN', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('ASN', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('GLU', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('ASP', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('HIS', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('ARG', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('LYS', ['N', 'C', 'CA', 'CB', 'O', 'H']),
('PRO', ['N', 'C', 'CA', 'CB', 'O', 'H', 'CG', 'CD']),
]
)
cartesian_keys = ['i', 'chain', 'res_id', 'res_name', 'atom_id', 'atom_name', 'element', 'coord',
'charge', 'radius', 'bfactor']
cartesian_formats = ['i4', '|S1', 'i4', '|S5', 'i4', '|S5', '|S1', '3f8', 'f4int', 'f4', 'f4']
internal_keys = ['i', 'ib', 'ia', 'id', 'b', 'a', 'd']
internal_formats = ['i4', 'i4', 'i4', 'i4', 'f8', 'f8', 'f8']
a2id = dict(internal_atom_numbers)
id2a = dict([(a[1], a[0]) for a in internal_atom_numbers])
res2id = dict([(aa, i) for i, aa in enumerate(residue_atoms_internal)])
def r2i(coord_i, a1, a2, a3, an, ai):
"""
Cartisian coordinates to internal-coordinates
"""
vn = an['coord']
v1 = a1['coord']
v2 = a2['coord']
v3 = a3['coord']
b = vector.norm(v3 - vn)
a = vector.angle(v2, v3, vn)
d = vector.dihedral(v1, v2, v3, vn)
coord_i[ai] = an['i'], a3['i'], a2['i'], a1['i'], b, a, d
return ai + 1
def move_center_of_mass(structure, all_atoms):
for i, res in enumerate(structure.residue_ids):
at_nbr = np.where(all_atoms['res_id'] == res)[0]
cb_nbr = structure.l_cb[i]
if cb_nbr > 0:
cb = structure.atoms[cb_nbr]
cb['coord'] *= cb['mass']
for at in at_nbr:
atom = all_atoms[at]
residue_name = atom['res_name']
if atom['atom_name'] not in residue_atoms_internal[residue_name]:
cb['coord'] += atom['coord'] * atom['mass']
cb['mass'] += atom['mass']
cb['coord'] /= cb['mass']
structure.atoms[cb_nbr] = cb
def calc_internal_coordinates(structure):
structure.coord_i = np.zeros(structure.atoms.shape[0], dtype={'names': internal_keys, 'formats': internal_formats})
rp, ai = None, 0
res_nr = 0
for rn in list(structure.residue_dict.values()):
res_nr += 1
# BACKBONE
if rp is None:
structure.coord_i[ai] = rn['N']['i'], 0, 0, 0, 0.0, 0.0, 0.0
ai += 1
structure.coord_i[ai] = rn['CA']['i'], rn['N']['i'], 0, 0, \
vector.norm(rn['N']['coord'] - rn['CA']['coord']), 0.0, 0.0
ai += 1
structure.coord_i[ai] = rn['C']['i'], rn['CA']['i'], rn['N']['i'], 0, \
vector.norm(rn['CA']['coord'] - rn['C']['coord']), \
vector.angle(rn['C']['coord'], rn['CA']['coord'], rn['N']['coord']), \
0.0
ai += | |
<filename>mne/minimum_norm/resolution_matrix.py
# -*- coding: utf-8 -*-
"""Compute resolution matrix for linear estimators."""
# Authors: <EMAIL>
#
# License: BSD-3-Clause
from copy import deepcopy
import numpy as np
from .. import pick_channels_forward, EvokedArray, SourceEstimate
from ..io.constants import FIFF
from ..utils import logger, verbose
from ..forward.forward import convert_forward_solution
from ..minimum_norm import apply_inverse
from ..source_estimate import _prepare_label_extraction
from ..label import Label
@verbose
def make_inverse_resolution_matrix(forward, inverse_operator, method='dSPM',
lambda2=1. / 9., verbose=None):
"""Compute resolution matrix for linear inverse operator.
Parameters
----------
forward : instance of Forward
Forward Operator.
inverse_operator : instance of InverseOperator
Inverse operator.
method : 'MNE' | 'dSPM' | 'sLORETA'
Inverse method to use (MNE, dSPM, sLORETA).
lambda2 : float
The regularisation parameter.
%(verbose)s
Returns
-------
resmat: array, shape (n_orient_inv * n_dipoles, n_orient_fwd * n_dipoles)
Resolution matrix (inverse operator times forward operator).
The result of applying the inverse operator to the forward operator.
If source orientations are not fixed, all source components will be
computed (i.e. for n_orient_inv > 1 or n_orient_fwd > 1).
The columns of the resolution matrix are the point-spread functions
(PSFs) and the rows are the cross-talk functions (CTFs).
"""
# make sure forward and inverse operator match
inv = inverse_operator
fwd = _convert_forward_match_inv(forward, inv)
# don't include bad channels
# only use good channels from inverse operator
bads_inv = inv['info']['bads']
# good channels
ch_names = [c for c in inv['info']['ch_names'] if (c not in bads_inv)]
fwd = pick_channels_forward(fwd, ch_names, ordered=True)
# get leadfield matrix from forward solution
leadfield = fwd['sol']['data']
invmat = _get_matrix_from_inverse_operator(inv, fwd,
method=method, lambda2=lambda2)
resmat = invmat.dot(leadfield)
logger.info('Dimensions of resolution matrix: %d by %d.' % resmat.shape)
return resmat
@verbose
def _get_psf_ctf(resmat, src, idx, func, mode, n_comp, norm, return_pca_vars,
verbose=None):
"""Get point-spread (PSFs) or cross-talk (CTFs) functions.
Parameters
----------
resmat : array, shape (n_dipoles, n_dipoles)
Forward Operator.
src : Source Space
Source space used to compute resolution matrix.
%(pctf_idx)s
func : str ('psf' | 'ctf')
Whether to produce PSFs or CTFs. Defaults to psf.
%(pctf_mode)s
%(pctf_n_comp)s
%(pctf_norm)s
%(pctf_return_pca_vars)s
%(verbose)s
Returns
-------
%(pctf_stcs)s
%(pctf_pca_vars)s
"""
# check for consistencies in input parameters
_check_get_psf_ctf_params(mode, n_comp, return_pca_vars)
# backward compatibility
if norm is True:
norm = 'max'
# get relevant vertices in source space
verts_all = _vertices_for_get_psf_ctf(idx, src)
# vertices used in forward and inverse operator
vertno_lh = src[0]['vertno']
vertno_rh = src[1]['vertno']
vertno = [vertno_lh, vertno_rh]
# the following will operate on columns of funcs
if func == 'ctf':
resmat = resmat.T
# Functions and variances per label
stcs = []
pca_vars = []
for verts in verts_all:
# get relevant PSFs or CTFs for specified vertices
funcs = resmat[:, verts]
# normalise PSFs/CTFs if requested
if norm is not None:
funcs = _normalise_psf_ctf(funcs, norm)
# summarise PSFs/CTFs across vertices if requested
pca_var = None # variances computed only if return_pca_vars=True
if mode is not None:
funcs, pca_var = _summarise_psf_ctf(funcs, mode, n_comp,
return_pca_vars)
# convert to source estimate
stc = SourceEstimate(funcs, vertno, tmin=0., tstep=1.)
stcs.append(stc)
pca_vars.append(pca_var)
# if just one list or label specified, simplify output
if len(stcs) == 1:
stcs = stc
if len(pca_vars) == 1:
pca_vars = pca_var
if pca_var is not None:
return stcs, pca_vars
else:
return stcs
def _check_get_psf_ctf_params(mode, n_comp, return_pca_vars):
"""Check input parameters of _get_psf_ctf() for consistency."""
if mode in [None, 'sum', 'mean'] and n_comp > 1:
msg = 'n_comp must be 1 for mode=%s.' % mode
raise ValueError(msg)
if mode != 'pca' and return_pca_vars:
msg = 'SVD variances can only be returned if mode=''pca''.'
raise ValueError(msg)
def _vertices_for_get_psf_ctf(idx, src):
"""Get vertices in source space for PSFs/CTFs in _get_psf_ctf()."""
# idx must be list
# if label(s) specified get the indices, otherwise just carry on
if type(idx[0]) is Label:
# specify without source time courses, gets indices per label
verts_labs, _ = _prepare_label_extraction(
stc=None, labels=idx, src=src, mode='mean', allow_empty=False,
use_sparse=False)
# verts_labs can be list of lists
# concatenate indices per label across hemispheres
# one list item per label
verts = []
for v in verts_labs:
# if two hemispheres present
if type(v) is list:
# indices for both hemispheres in one list
this_verts = np.concatenate((v[0], v[1]))
else:
this_verts = np.array(v)
verts.append(this_verts)
# check if list of list or just list
else:
if type(idx[0]) is list: # if list of list of integers
verts = idx
else: # if list of integers
verts = [idx]
return verts
def _normalise_psf_ctf(funcs, norm):
"""Normalise PSFs/CTFs in _get_psf_ctf()."""
# normalise PSFs/CTFs if specified
if norm == 'max':
maxval = max(-funcs.min(), funcs.max())
funcs = funcs / maxval
elif norm == 'norm': # normalise to maximum norm across columns
norms = np.linalg.norm(funcs, axis=0)
funcs = funcs / norms.max()
return funcs
def _summarise_psf_ctf(funcs, mode, n_comp, return_pca_vars):
"""Summarise PSFs/CTFs across vertices."""
from scipy import linalg
s_var = None # only computed for return_pca_vars=True
if mode == 'maxval': # pick PSF/CTF with maximum absolute value
absvals = np.maximum(-np.min(funcs, axis=0), np.max(funcs, axis=0))
if n_comp > 1: # only keep requested number of sorted PSFs/CTFs
sortidx = np.argsort(absvals)
maxidx = sortidx[-n_comp:]
else: # faster if only one required
maxidx = absvals.argmax()
funcs = funcs[:, maxidx]
elif mode == 'maxnorm': # pick PSF/CTF with maximum norm
norms = np.linalg.norm(funcs, axis=0)
if n_comp > 1: # only keep requested number of sorted PSFs/CTFs
sortidx = np.argsort(norms)
maxidx = sortidx[-n_comp:]
else: # faster if only one required
maxidx = norms.argmax()
funcs = funcs[:, maxidx]
elif mode == 'sum': # sum across PSFs/CTFs
funcs = np.sum(funcs, axis=1)
elif mode == 'mean': # mean of PSFs/CTFs
funcs = np.mean(funcs, axis=1)
elif mode == 'pca': # SVD across PSFs/CTFs
# compute SVD of PSFs/CTFs across vertices
u, s, _ = linalg.svd(funcs, full_matrices=False)
funcs = u[:, :n_comp]
# if explained variances for SVD components requested
if return_pca_vars:
# explained variance of individual SVD components
s2 = s * s
s_var = 100 * s2[:n_comp] / s2.sum()
return funcs, s_var
@verbose
def get_point_spread(resmat, src, idx, mode=None, n_comp=1, norm=False,
return_pca_vars=False, verbose=None):
"""Get point-spread (PSFs) functions for vertices.
Parameters
----------
resmat : array, shape (n_dipoles, n_dipoles)
Forward Operator.
src : instance of SourceSpaces
Source space used to compute resolution matrix.
%(pctf_idx)s
%(pctf_mode)s
%(pctf_n_comp)s
%(pctf_norm)s
%(pctf_return_pca_vars)s
%(verbose)s
Returns
-------
%(pctf_stcs)s
%(pctf_pca_vars)s
"""
return _get_psf_ctf(resmat, src, idx, func='psf', mode=mode, n_comp=n_comp,
norm=norm, return_pca_vars=return_pca_vars)
@verbose
def get_cross_talk(resmat, src, idx, mode=None, n_comp=1, norm=False,
return_pca_vars=False, verbose=None):
"""Get cross-talk (CTFs) function for vertices.
Parameters
----------
resmat : array, shape (n_dipoles, n_dipoles)
Forward Operator.
src : instance of SourceSpaces
Source space used to compute resolution matrix.
%(pctf_idx)s
%(pctf_mode)s
%(pctf_n_comp)s
%(pctf_norm)s
%(pctf_return_pca_vars)s
%(verbose)s
Returns
-------
%(pctf_stcs)s
%(pctf_pca_vars)s
"""
return _get_psf_ctf(resmat, src, idx, func='ctf', mode=mode, n_comp=n_comp,
norm=norm, return_pca_vars=return_pca_vars)
def _convert_forward_match_inv(fwd, inv):
"""Ensure forward and inverse operators match.
Inverse operator and forward operator must have same surface orientations,
but can have different source orientation constraints.
"""
# did inverse operator use fixed orientation?
is_fixed_inv = _check_fixed_ori(inv)
# did forward operator use fixed orientation?
is_fixed_fwd = _check_fixed_ori(fwd)
# if inv or fwd fixed: do nothing
# if inv loose: surf_ori must be True
# if inv free: surf_ori must be False
if not is_fixed_inv and not is_fixed_fwd:
is_loose_inv = not (inv['orient_prior']['data'] == 1.).all()
if is_loose_inv:
if not fwd['surf_ori']:
fwd = convert_forward_solution(fwd, surf_ori=True)
elif fwd['surf_ori']: # free orientation, change fwd
fwd = convert_forward_solution(fwd, surf_ori=False)
return fwd
def _prepare_info(inverse_operator):
"""Get a usable dict."""
# in order to convert sub-leadfield matrix to evoked data type (pretending
# it's an epoch, see in loop below), uses 'info' from inverse solution
# because this has all the correct projector information
info = deepcopy(inverse_operator['info'])
info['sfreq'] = 1000. # necessary
info['projs'] = inverse_operator['projs']
return info
def _get_matrix_from_inverse_operator(inverse_operator, forward, method='dSPM',
lambda2=1. / 9.):
"""Get inverse matrix from an inverse operator.
Currently works only for fixed/loose orientation constraints
For loose orientation constraint, the CTFs are computed for the normal
component (pick_ori='normal').
Parameters
----------
inverse_operator : instance of InverseOperator
The inverse operator.
forward : instance of Forward
The forward operator.
method : 'MNE' | 'dSPM' | 'sLORETA'
Inverse methods (for apply_inverse).
lambda2 : float
The regularization parameter (for apply_inverse).
Returns
-------
invmat : array, shape (n_dipoles, n_channels)
Inverse matrix associated with inverse operator and specified
parameters.
"""
| |
"""
Working with Lackenby"s taut ideal triangulations as described in
http://arxiv.org/abs/math/0003132
A taut ideal triangulation gives an angle structure where all the angles are 0 or pi.
Such an angle structure gives instructions on how to turn the 2-skeleton of the ideal triangulation into a branched surface.
When this branched surface is orientable, one has a taut ideal triangulation in the sense of Lackenby's paper.
Based on code joint with <NAME>.
"""
from collections import Counter, namedtuple
from itertools import groupby
import networkx as nx
import snappy
import snappy.snap.t3mlite as t3m
from snappy.snap.t3mlite.simplex import E01, E02, E03, E12, E13, E23, F0
from snappy.snap.t3mlite.simplex import VerticesOfFaceCounterclockwise as VerticesOfFace
import flipper
EdgeToQuad = {E01: 2, E02: 1, E03: 0, E12: 0, E13: 1, E23: 2}
VerticesOfFaceIndex = dict(((face, vertex), index) for face, vertices in VerticesOfFace.items() for index, vertex in enumerate(vertices))
Compose = dict(
((face, vertex), (VerticesOfFace[face][VerticesOfFaceIndex[face, vertex] - 2], VerticesOfFace[face][VerticesOfFaceIndex[face, vertex] - 1]))
for face, vertex in VerticesOfFaceIndex
)
def walk(arrow, end=None):
""" Yield all of the arrows around an edge in turn (until end is reached). """
arrow = arrow.copy()
for _ in range(arrow.axis().valence()):
yield arrow.copy()
arrow.next()
if arrow == end: return
t3m.Edge.arrows_around = lambda self: walk(self.get_arrow())
t3m.Arrow.face_index = lambda self: self.Tetrahedron.Class[self.Face].Index
t3m.Arrow.oriented_face = lambda self: (self.Tetrahedron.Index, self.Face)
class Surface(object):
""" An oriented surface carried by the branched surface associated to a taut structure. """
def __init__(self, taut_str, weights):
self.taut_str = taut_str
self.weights = weights
self.euler_characteristic = -sum(self.weights) // 2
def __repr__(self):
return "<Surface: %s>" % self.weights
def __eq__(self, other):
if isinstance(other, Surface):
return self.taut_str == other.taut_str and self.weights == other.weights
return NotImplemented
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(tuple(self.weights))
def __add__(self, other):
if isinstance(other, Surface):
assert self.taut_str == other.taut_str
weights = [a + b for a, b in zip(self.weights, other.weights)]
return Surface(self.taut_str, weights)
return NotImplemented
def __radd__(self, other):
return self.__add__(other)
def triangulation_data(self):
""" Return a list of namedtuples:
(triangle name, side, edge name, sign along locus)
where triangle name is the pari: (sector index, sheet index)
and edge_name is the pair: (locus index, sheet at locus)
"""
Data = namedtuple("Data", ["triangle_name", "side", "edge_name", "sign"])
data = []
for index, sides in enumerate(self.taut_str.branch_loci):
for side in sides:
i = 0
for arrow in side:
sect_index = arrow.face_index()
weight = self.weights[sect_index]
triangle_side = VerticesOfFaceIndex[arrow.Face, arrow.Face ^ arrow.Edge]
sign = arrow.Tetrahedron.get_orientation_of_edge(*Compose[arrow.Face, arrow.Face ^ arrow.Edge])
for w in range(weight):
data.append(Data((sect_index, w), triangle_side, (index, i + w), sign))
i += weight
return sorted(data)
def flipper_triangulation(self):
""" Return this surface as a flipper triangulation. """
data = self.triangulation_data()
edges = sorted({d.edge_name for d in data}) # Remove duplicates.
edge_lookup = dict((edge, index) for index, edge in enumerate(edges))
ans = [tuple(edge_lookup[datum.edge_name] if datum.sign == +1 else ~edge_lookup[datum.edge_name] for datum in x) for x in zip(data[::3], data[1::3], data[2::3])]
return flipper.create_triangulation(ans)
def connected_component(self):
""" Return a connected subsurface together with the sorted list of edges used. """
data = self.triangulation_data()
G = nx.MultiGraph()
G.add_nodes_from(datum.triangle_name for datum in data) # Triangle_name = (sector index, sheet index).
for edge, triangles in groupby(sorted(data, key=lambda d: d.edge_name), lambda d: d.edge_name):
A, B = triangles # Have to unpack the generator.
G.add_edge(A.triangle_name, B.triangle_name, label=edge)
component = nx.algorithms.components.node_connected_component(G, next(iter(G)))
edges = sorted({x[2] for y in component for x in G.edges(y, data='label')}) # Get the edge_names.
counts = Counter(section for section, _ in component)
weights = [counts[i] for i in range(len(self.weights))]
return Surface(self.taut_str, weights), edges
def has_full_support(self):
""" Return whether this surface has full support, that is, meets all faces of the TautStructure. """
return min(self.weights) > 0
class TautStructure(object):
""" Represents a taut structure on a triangulation.
This is specified by a collection of SnapPy.Arrows which define a dihedral of a Tetrahedron.
These are produced in branch_loci blocks which, for each edge of the triangulation, is a pair of lists of arrows.
"""
def __init__(self, manifold, angle_vector):
self.manifold = manifold
self.angle_vector = angle_vector
self.pi_quads = [i % 3 for i, a in enumerate(angle_vector[:-1]) if a == 1]
pi_arrows = [[parrow for arrow in edge.arrows_around() for parrow in [arrow, arrow.copy().reverse()] if self.angle_is_pi(parrow)] for edge in self.manifold.Edges]
blocks = [(list(walk(S0, E0)), list(walk(S1, E1)), list(walk(E0, S0)), list(walk(E1, S1))) for S0, S1, E0, E1 in pi_arrows]
G = nx.Graph()
G.add_nodes_from(arrow.oriented_face() for sides in blocks for side in sides for arrow in side) # Add a node for every arrow in the triangulation.
G.add_edges_from((a1.oriented_face(), a2.oriented_face()) for A, B, C, D in blocks for S0, S1 in [(A, B), (C, D)] for a1, a2 in zip(S0 + S1, (S0 + S1)[1:]))
num_components = nx.algorithms.components.number_connected_components(G)
if num_components == 1:
raise ValueError("Branched surface is not orientable")
assert num_components == 2
an_orientation = nx.algorithms.components.node_connected_component(G, (0, F0))
# These are all indexed according their edge's Index.
self.branch_loci = [[side for side in sides if (side[0].Tetrahedron.Index, side[0].Face) in an_orientation] for sides in blocks]
self.tops = [[side[0].copy() for side in locus] for locus in self.branch_loci]
self.bottoms = [[side[-1].copy().next() for side in locus] for locus in self.branch_loci]
self.surface = self.surface_with_maximal_support()
if not self.surface.has_full_support():
raise ValueError("Maximal surface does not have full support")
@classmethod
def from_manifold(cls, manifold, N=100, restarts=5):
""" Return a TautStructure on some triangulation of the given manifold. """
M0 = manifold.without_hyperbolic_structure()
M0 = M0.filled_triangulation()
ans = []
for _ in range(restarts):
M = M0.copy()
for _ in range(N):
M.randomize()
if M not in ans:
ans.append(M.copy())
ans = sorted(ans, key=lambda X: X.num_tetrahedra())
manifolds = [manifold] + [M.with_hyperbolic_structure() for M in ans]
for M in manifolds:
try:
return cls.from_triangulation(M)
except ValueError:
pass
raise ValueError("Could not find taut structure on manifold")
@classmethod
def from_triangulation(cls, triangulation):
""" Return a TautStructure on this triangulation. """
triangulation = t3m.Mcomplex(triangulation)
# Write down the angle equations. these are normally inhomogeneous, with:
# * sum(angles around edge) = 2pi, and
# * sum(angles in triangle) = pi
# So we add an extra dummy variable (essentially "pi") to make them homogeneous.
ntets, nedges = len(triangulation.Tetrahedra), len(triangulation.Edges)
angle_matrix = t3m.linalg.Matrix(nedges + ntets, 3*ntets + 1)
for i, edge in enumerate(triangulation.Edges): # Edge equations.
for arrow in edge.arrows_around():
t = arrow.Tetrahedron.Index
q = EdgeToQuad[arrow.Edge]
angle_matrix[i, 3*t + q] += 1
angle_matrix[i, 3*ntets] = -2
for t in range(ntets): # Triangle equations.
for q in range(3):
angle_matrix[nedges + t, 3*t + q] += 1
angle_matrix[nedges + t, 3*ntets] = -1
xrays = snappy.FXrays.find_Xrays(angle_matrix.nrows(), angle_matrix.ncols(), angle_matrix.entries(), filtering=True, print_progress=False)
for xray in xrays:
try:
return cls(triangulation, xray)
except ValueError:
pass
raise ValueError("Could not find taut structure on triangulation")
def __repr__(self):
return "<TautStructure: %s>" % self.pi_quads
def angle_is_pi(self, arrow):
""" Return whether the dihedral angle defined by this arrow is pi. """
return self.pi_quads[arrow.Tetrahedron.Index] == EdgeToQuad[arrow.Edge]
def empty_surface(self):
""" Return the empty surface on this TautStructure. """
return Surface(self, [0] * 2 * len(self.manifold.Tetrahedra))
def surfaces(self):
""" Return a list of extremal surface supported by this TautStructure. """
W = t3m.linalg.Matrix(len(self.manifold.Edges), len(self.manifold.Faces))
for i, sides in enumerate(self.branch_loci):
for j, side in enumerate(sides):
for arrow in side:
W[i, arrow.face_index()] += (-1)**j
xrays = snappy.FXrays.find_Xrays(W.nrows(), W.ncols(), W.entries(), filtering=False, print_progress=False)
return [Surface(self, xray) for xray in xrays]
def surface_with_maximal_support(self):
""" A reasonably small surface with maximal support """
return sum(self.surfaces(), self.empty_surface())
def flipper_bundle_data(self):
""" Return the flipper triangulation, flip sequence and edge closer defined by following this TautStructure. """
# How to wrap from the bottom edge back to the top.
top = dict((arrow.Tetrahedron.Index, index) for index, starts in enumerate(self.tops) for arrow in starts)
bottom = dict((arrow.Tetrahedron.Index, index) for index, ends in enumerate(self.bottoms) for arrow in ends)
edge_advance = dict(((loci, sum(self.surface.weights[arrow.face_index()] for arrow in self.branch_loci[loci][0]) - 1), (top[tet], 0)) for tet, loci in bottom.items())
S0, E0 = self.surface.connected_component() # Get a component and its edges.
E_curr = E0
flips = []
while True:
flips += [i for i, edge in enumerate(E_curr) if edge in edge_advance] # Find the edges where we are not moving directly across.
E_curr = [edge_advance.get((loci, sheet), (loci, sheet+1)) for loci, sheet in E_curr] # Move across.
if sorted(E_curr) == E0:
break
F = S0.flipper_triangulation()
M0 = self.manifold.snappy_manifold()
image_edge = E0.index(E_curr[0])
for edge in [image_edge, ~image_edge]:
try:
h = F.encode_flips_and_close(flips, | |
ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGES.
# END OF TERMS AND CONDITIONS
# How to Apply These Terms to Your New Programs
# If you develop a new program, and you want it to be of the greatest
# possible use to the public, the best way to achieve this is to make it
# free software which everyone can redistribute and change under these terms.
# To do so, attach the following notices to the program. It is safest
# to attach them to the start of each source file to most effectively
# convey the exclusion of warranty; and each file should have at least
# the "copyright" line and a pointer to where the full notice is found.
# <one line to give the program's name and a brief idea of what it does.>
# Copyright (C) <year> <name of author>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Also add information on how to contact you by electronic and paper mail.
# If the program is interactive, make it output a short notice like this
# when it starts in an interactive mode:
# Gnomovision version 69, Copyright (C) year name of author
# Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
# This is free software, and you are welcome to redistribute it
# under certain conditions; type `show c' for details.
# The hypothetical commands `show w' and `show c' should show the appropriate
# parts of the General Public License. Of course, the commands you use may
# be called something other than `show w' and `show c'; they could even be
# mouse-clicks or menu items--whatever suits your program.
# You should also get your employer (if you work as a programmer) or your
# school, if any, to sign a "copyright disclaimer" for the program, if
# necessary. Here is a sample; alter the names:
# Yoyodyne, Inc., hereby disclaims all copyright interest in the program
# `Gnomovision' (which makes passes at compilers) written by <NAME>.
# <signature of Ty Coon>, 1 April 1989
# <NAME>, President of Vice
# This General Public License does not permit incorporating your program into
# proprietary programs. If your program is a subroutine library, you may
# consider it more useful to permit linking proprietary applications with the
# library. If this is what you want to do, use the GNU Lesser General
# Public License instead of this License.
# -*- coding: utf-8 -*-
import sys
import codecs
import struct
import json
import traceback
import os
strings = []
def GetDynamicWireFormat(data, start, end):
wire_type = data[start] & 0x7
firstByte = data[start]
if (firstByte & 0x80) == 0:
field_number = (firstByte >> 3)
return (start+1, wire_type, field_number)
else:
byteList = []
pos = 0
while True:
if start+pos >= end:
return (None, None, None)
oneByte = data[start+pos]
byteList.append(oneByte & 0x7F)
pos = pos + 1
if oneByte & 0x80 == 0x0:
break;
newStart = start + pos
index = len(byteList) - 1
field_number = 0
while index >= 0:
field_number = (field_number << 0x7) + byteList[index]
index = index - 1
field_number = (field_number >> 3)
return (newStart, wire_type, field_number)
#return (num, newStart, success)
def RetrieveInt(data, start, end):
pos = 0
byteList = []
while True:
if start+pos >= end:
return (None, None, False)
oneByte = data[start+pos]
byteList.append(oneByte & 0x7F)
pos = pos + 1
if oneByte & 0x80 == 0x0:
break;
newStart = start + pos
index = len(byteList) - 1
num = 0
while index >= 0:
num = (num << 0x7) + byteList[index]
index = index - 1
return (num, newStart, True)
def ParseRepeatedField(data, start, end, message, depth = 0):
while start < end:
(num, start, success) = RetrieveInt(data, start, end)
if success == False:
return False
message.append(num)
return True
def ParseData(data, start, end, messages, depth = 0):
global strings
#print strings
ordinary = 0
while start < end:
(start, wire_type, field_number) = GetDynamicWireFormat(data, start, end)
if start == None:
return False
if wire_type == 0x00:#Varint
#(num, start, success) = RetrieveInt(data, start+1, end)
(num, start, success) = RetrieveInt(data, start, end)
if success == False:
return False
if depth != 0:
strings.append('\t'*depth)
strings.append("(%d) Varint: %d\n" % (field_number, num))
messages['%02d:%02d:Varint' % (field_number,ordinary)] = num
ordinary = ordinary + 1
elif wire_type == 0x01:#64-bit
num = 0
pos = 7
while pos >= 0:
#if start+1+pos >= end:
if start+pos >= end:
return False
#num = (num << 8) + ord(data[start+1+pos])
num = (num << 8) + data[start+pos]
pos = pos - 1
#start = start + 9
start = start + 8
try:
floatNum = struct.unpack('d',struct.pack('q',num))
floatNum = floatNum[0]
except:
floatNum = None
if depth != 0:
strings.append('\t'*depth)
if floatNum != None:
strings.append("(%d) 64-bit: 0x%x / %f\n" % (field_number, num, floatNum))
messages['%02d:%02d:64-bit' % (field_number,ordinary)] = floatNum
else:
strings.append("(%d) 64-bit: 0x%x\n" % (field_number, num))
messages['%02d:%02d:64-bit' % (field_number,ordinary)] = num
ordinary = ordinary + 1
elif wire_type == 0x02:#Length-delimited
curStrIndex = len(strings)
#(stringLen, start, success) = RetrieveInt(data, start+1, end)
(stringLen, start, success) = RetrieveInt(data, start, end)
if success == False:
return False
#stringLen = ord(data[start+1])
if depth != 0:
strings.append('\t'*depth)
strings.append("(%d) embedded message:\n" % field_number)
messages['%02d:%02d:embedded message' % (field_number, ordinary)] = {}
if start+stringLen > end:
del strings[curStrIndex + 1:] #pop failed result
messages.pop('%02d:%02d:embedded message' % (field_number, ordinary), None)
return False
ret = ParseData(data, start, start+stringLen, messages['%02d:%02d:embedded message' % (field_number, ordinary)], depth+1)
#print '%d:%d:embedded message' % (field_number, ordinary)
if ret == False:
del strings[curStrIndex + 1:] #pop failed result
#print 'pop: %d:%d:embedded message' % (field_number, ordinary)
messages.pop('%02d:%02d:embedded message' % (field_number, ordinary), None)
#print messages
if depth != 0:
strings.append('\t'*depth)
strings.append("(%d) repeated:\n" % field_number)
try:
# data[start:start+stringLen].decode('utf-8').encode('utf-8')
strings.append("(%d) string: %s\n" % (field_number, data[start:start+stringLen].decode('utf-8')))
messages['%02d:%02d:string' % (field_number, ordinary)] = data[start:start+stringLen].decode('utf-8')
except:
if depth != 0:
strings.append('\t'*depth)
strings.append("(%d) repeated:\n" % field_number)
messages['%02d:%02d:repeated' % (field_number, ordinary)] = []
ret = ParseRepeatedField(data, start, start+stringLen, messages['%02d:%02d:repeated' % (field_number, ordinary)], depth+1)
if ret == False:
del strings[curStrIndex + 1:] #pop failed result
messages.pop('%02d:%02d:repeated' % (field_number, ordinary), None)
#print traceback.format_exc()
hexStr = ['0x%x' % x for x in data[start:start+stringLen]]
hexStr = ':'.join(hexStr)
strings.append("(%d) bytes: %s\n" % (field_number, hexStr))
messages['%02d:%02d:bytes' % (field_number, ordinary)] = hexStr
ordinary = ordinary + 1
#start = start+2+stringLen
start = start+stringLen
elif wire_type == 0x05:#32-bit
num = 0
pos = 3
while pos >= 0:
#if start+1+pos >= end:
if start+pos >= end:
return False
#num = (num << 8) + ord(data[start+1+pos])
num = (num << 8) + data[start+pos]
pos = pos - 1
#start = start + 5
start = start + 4
try:
floatNum = struct.unpack('f',struct.pack('i',num))
floatNum = floatNum[0]
except:
floatNum = None
if depth != 0:
strings.append('\t'*depth)
if floatNum != None:
strings.append("(%d) 32-bit: 0x%x / %f\n" % (field_number, num, floatNum))
messages['%02d:%02d:32-bit' % (field_number,ordinary)] = floatNum
else:
strings.append("(%d) 32-bit: 0x%x\n" % (field_number, num))
messages['%02d:%02d:32-bit' % (field_number,ordinary)] = num
ordinary = ordinary + 1
else:
return False
return True
def ParseProto(fileName):
data = open(fileName, "rb").read()
size = len(data)
messages = {}
ParseData(data, 0, size, messages)
return messages
def GenValueList(value):
valueList = []
#while value > 0:
while value >= 0:
oneByte = (value & 0x7F)
value = (value >> 0x7)
if value > 0:
oneByte |= 0x80
valueList.append(oneByte)
if value == 0:
break
return valueList
def WriteValue(value, output):
byteWritten | |
<reponame>cuihantao/Andes<gh_stars>10-100
# [ANDES] (C)2015-2022 <NAME>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
"""
Module for ANDES models.
"""
import logging
import pprint
from collections import OrderedDict
from textwrap import wrap
from typing import Callable, Iterable, Union
import numpy as np
import scipy as sp
from andes.core.block import Block
from andes.core.common import Config, JacTriplet, ModelFlags
from andes.core.discrete import Discrete
from andes.core.documenter import Documenter
from andes.core.model.modelcache import ModelCache
from andes.core.model.modelcall import ModelCall
from andes.core.param import ExtParam
from andes.core.service import (ApplyFunc, BackRef, BaseService, ConstService,
DeviceFinder, ExtService, FlagValue,
InitChecker, NumReduce, NumRepeat, NumSelect,
ParamCalc, PostInitService, RandomService,
Replace, SubsService, SwBlock, VarService)
from andes.core.symprocessor import SymProcessor
from andes.core.var import Algeb, BaseVar, ExtAlgeb, ExtState, State
from andes.shared import jac_full_names, numba
from andes.utils.func import list_flatten
from andes.utils.tab import Tab
logger = logging.getLogger(__name__)
class Model:
r"""
Base class for power system DAE models.
After subclassing `ModelData`, subclass `Model`` to complete a DAE model.
Subclasses of `Model` define DAE variables, services, and other types of parameters,
in the constructor ``__init__``.
Attributes
----------
num_params : OrderedDict
{name: instance} of numerical parameters, including internal
and external ones
Examples
--------
Take the static PQ as an example, the subclass of `Model`, `PQ`, should look like ::
class PQ(PQData, Model):
def __init__(self, system, config):
PQData.__init__(self)
Model.__init__(self, system, config)
Since `PQ` is calling the base class constructors, it is meant to be the final class and not further
derived.
It inherits from `PQData` and `Model` and must call constructors in the order of `PQData` and `Model`.
If the derived class of `Model` needs to be further derived, it should only derive from `Model`
and use a name ending with `Base`. See :py:class:`andes.models.synchronous.GENBASE`.
Next, in `PQ.__init__`, set proper flags to indicate the routines in which the model will be used ::
self.flags.update({'pflow': True})
Currently, flags `pflow` and `tds` are supported. Both are `False` by default, meaning the model is
neither used in power flow nor time-domain simulation. **A very common pitfall is forgetting to set the flag**.
Next, the group name can be provided. A group is a collection of models with common parameters and variables.
Devices' idx of all models in the same group must be unique. To provide a group name, use ::
self.group = 'StaticLoad'
The group name must be an existing class name in :py:mod:`andes.models.group`.
The model will be added to the specified group and subject to the variable and parameter policy of the
group.
If not provided with a group class name, the model will be placed in the `Undefined` group.
Next, additional configuration flags can be added.
Configuration flags for models are load-time variables, specifying the behavior of a model.
They can be exported to an `andes.rc` file and automatically loaded when creating the `System`.
Configuration flags can be used in equation strings, as long as they are numerical values.
To add config flags, use ::
self.config.add(OrderedDict((('pq2z', 1), )))
It is recommended to use `OrderedDict` instead of `dict`, although the syntax is verbose.
Note that booleans should be provided as integers (1 or 0), since `True` or `False` is interpreted as
a string when loaded from the `rc` file and will cause an error.
Next, it's time for variables and equations! The `PQ` class does not have internal variables itself.
It uses its `bus` parameter to fetch the corresponding `a` and `v` variables of buses.
Equation wise, it imposes an active power and a reactive power load equation.
To define external variables from `Bus`, use ::
self.a = ExtAlgeb(model='Bus', src='a',
indexer=self.bus, tex_name=r'\theta')
self.v = ExtAlgeb(model='Bus', src='v',
indexer=self.bus, tex_name=r'V')
Refer to the subsection Variables for more details.
The simplest `PQ` model will impose constant P and Q, coded as ::
self.a.e_str = "u * p"
self.v.e_str = "u * q"
where the `e_str` attribute is the equation string attribute. `u` is the connectivity status.
Any parameter, config, service or variable can be used in equation strings.
Three additional scalars can be used in equations:
- ``dae_t`` for the current simulation time (can be used if the model has flag `tds`).
- ``sys_f`` for system frequency (from ``system.config.freq``).
- ``sys_mva`` for system base mva (from ``system.config.mva``).
The above example is overly simplified. Our `PQ` model wants a feature to switch itself to
a constant impedance if the voltage is out of the range `(vmin, vmax)`.
To implement this, we need to introduce a discrete component called `Limiter`, which yields three arrays
of binary flags, `zi`, `zl`, and `zu` indicating in-range, below lower-limit, and above upper-limit,
respectively.
First, create an attribute `vcmp` as a `Limiter` instance ::
self.vcmp = Limiter(u=self.v, lower=self.vmin, upper=self.vmax,
enable=self.config.pq2z)
where `self.config.pq2z` is a flag to turn this feature on or off.
After this line, we can use `vcmp_zi`, `vcmp_zl`, and `vcmp_zu` in other equation strings. ::
self.a.e_str = "u * (p0 * vcmp_zi + " \
"p0 * vcmp_zl * (v ** 2 / vmin ** 2) + " \
"p0 * vcmp_zu * (v ** 2 / vmax ** 2))"
self.v.e_str = "u * (q0 * vcmp_zi + " \
"q0 * vcmp_zl * (v ** 2 / vmin ** 2) + "\
"q0 * vcmp_zu * (v ** 2 / vmax ** 2))"
Note that `PQ.a.e_str` can use the three variables from `vcmp` even before defining `PQ.vcmp`, as long as
`PQ.vcmp` is defined, because `vcmp_zi` is just a string literal in `e_str`.
The two equations above implement a piece-wise power injection equation. It selects the original power demand
if within range, and uses the calculated power when out of range.
Finally, to let ANDES pick up the model, the model name needs to be added to `models/__init__.py`.
Follow the examples in the `OrderedDict`, where the key is the file name, and the value is the class name.
Notes
-----
To modify parameters or services use ``set()``, which writes directly to the given attribute,
or ``alter()``, which converts parameters to system base like that for input data.
"""
def __init__(self, system=None, config=None):
self.system = system
# duplicate attributes from ModelData. Keep for now.
self.n = 0
self.group = 'Undefined'
# params and vars that exist in the group but not in this model
# normally empty but can be used in special cases to bypass
# shared param/var checking
self.group_param_exception = list()
self.group_var_exception = list()
if not hasattr(self, 'num_params'):
self.num_params = OrderedDict()
if not hasattr(self, 'cache'):
self.cache = ModelCache()
# variables
self.states = OrderedDict() # internal states
self.states_ext = OrderedDict() # external states
self.algebs = OrderedDict() # internal algebraic variables
self.algebs_ext = OrderedDict() # external algebraic vars
self.vars_decl_order = OrderedDict() # variable in the order of declaration
self.params_ext = OrderedDict() # external parameters
self.discrete = OrderedDict() # discrete comp.
self.blocks = OrderedDict() # blocks
self.services = OrderedDict() # service/temporary variables
self.services_var = OrderedDict() # variable services updated each step/iter
self.services_var_seq = OrderedDict()
self.services_var_nonseq = OrderedDict()
self.services_post = OrderedDict() # post-initialization storage services
self.services_subs = OrderedDict() # to-be-substituted services
self.services_icheck = OrderedDict() # post-initialization check services
self.services_ref = OrderedDict() # BackRef
self.services_fnd = OrderedDict() # services to find/add devices
self.services_ext = OrderedDict() # external services (to be retrieved)
self.services_ops = OrderedDict() # operational services (for special usages)
self.tex_names = OrderedDict((('dae_t', 't_{dae}'),
('sys_f', 'f_{sys}'),
('sys_mva', 'S_{b,sys}'),
))
# Model behavior flags
self.flags = ModelFlags()
# `in_use` is used by models with `BackRef` when not reference
self.in_use = True # True if this model is in use, False removes this model from all calls
self.config = Config(name=self.class_name) # `config` that can be exported
if config is not None:
self.config.load(config)
# basic configs
self.config.add(OrderedDict((('allow_adjust', 1),
('adjust_lower', 0),
('adjust_upper', 1),
)))
self.config.add_extra("_help",
allow_adjust='allow adjusting upper or lower limits',
adjust_lower='adjust lower limit',
adjust_upper='adjust upper limit',
)
self.config.add_extra("_alt",
allow_adjust=(0, 1),
adjust_lower=(0, 1),
adjust_upper=(0, 1),
)
self.calls = ModelCall() # callback and LaTeX string storage
self.triplets = JacTriplet() # | |
<reponame>riseofthetigers/GDAL
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.12
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_osr', [dirname(__file__)])
except ImportError:
import _osr
return _osr
if fp is not None:
try:
_mod = imp.load_module('_osr', fp, pathname, description)
finally:
fp.close()
return _mod
_osr = swig_import_helper()
del swig_import_helper
else:
import _osr
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
SRS_WKT_WGS84 = _osr.SRS_WKT_WGS84
SRS_PT_ALBERS_CONIC_EQUAL_AREA = _osr.SRS_PT_ALBERS_CONIC_EQUAL_AREA
SRS_PT_AZIMUTHAL_EQUIDISTANT = _osr.SRS_PT_AZIMUTHAL_EQUIDISTANT
SRS_PT_CASSINI_SOLDNER = _osr.SRS_PT_CASSINI_SOLDNER
SRS_PT_CYLINDRICAL_EQUAL_AREA = _osr.SRS_PT_CYLINDRICAL_EQUAL_AREA
SRS_PT_BONNE = _osr.SRS_PT_BONNE
SRS_PT_ECKERT_I = _osr.SRS_PT_ECKERT_I
SRS_PT_ECKERT_II = _osr.SRS_PT_ECKERT_II
SRS_PT_ECKERT_III = _osr.SRS_PT_ECKERT_III
SRS_PT_ECKERT_IV = _osr.SRS_PT_ECKERT_IV
SRS_PT_ECKERT_V = _osr.SRS_PT_ECKERT_V
SRS_PT_ECKERT_VI = _osr.SRS_PT_ECKERT_VI
SRS_PT_EQUIDISTANT_CONIC = _osr.SRS_PT_EQUIDISTANT_CONIC
SRS_PT_EQUIRECTANGULAR = _osr.SRS_PT_EQUIRECTANGULAR
SRS_PT_GALL_STEREOGRAPHIC = _osr.SRS_PT_GALL_STEREOGRAPHIC
SRS_PT_GAUSSSCHREIBERTMERCATOR = _osr.SRS_PT_GAUSSSCHREIBERTMERCATOR
SRS_PT_GEOSTATIONARY_SATELLITE = _osr.SRS_PT_GEOSTATIONARY_SATELLITE
SRS_PT_GOODE_HOMOLOSINE = _osr.SRS_PT_GOODE_HOMOLOSINE
SRS_PT_IGH = _osr.SRS_PT_IGH
SRS_PT_GNOMONIC = _osr.SRS_PT_GNOMONIC
SRS_PT_HOTINE_OBLIQUE_MERCATOR_AZIMUTH_CENTER = _osr.SRS_PT_HOTINE_OBLIQUE_MERCATOR_AZIMUTH_CENTER
SRS_PT_HOTINE_OBLIQUE_MERCATOR = _osr.SRS_PT_HOTINE_OBLIQUE_MERCATOR
SRS_PT_HOTINE_OBLIQUE_MERCATOR_TWO_POINT_NATURAL_ORIGIN = _osr.SRS_PT_HOTINE_OBLIQUE_MERCATOR_TWO_POINT_NATURAL_ORIGIN
SRS_PT_LABORDE_OBLIQUE_MERCATOR = _osr.SRS_PT_LABORDE_OBLIQUE_MERCATOR
SRS_PT_LAMBERT_CONFORMAL_CONIC_1SP = _osr.SRS_PT_LAMBERT_CONFORMAL_CONIC_1SP
SRS_PT_LAMBERT_CONFORMAL_CONIC_2SP = _osr.SRS_PT_LAMBERT_CONFORMAL_CONIC_2SP
SRS_PT_LAMBERT_CONFORMAL_CONIC_2SP_BELGIUM = _osr.SRS_PT_LAMBERT_CONFORMAL_CONIC_2SP_BELGIUM
SRS_PT_LAMBERT_AZIMUTHAL_EQUAL_AREA = _osr.SRS_PT_LAMBERT_AZIMUTHAL_EQUAL_AREA
SRS_PT_MERCATOR_1SP = _osr.SRS_PT_MERCATOR_1SP
SRS_PT_MERCATOR_2SP = _osr.SRS_PT_MERCATOR_2SP
SRS_PT_MERCATOR_AUXILIARY_SPHERE = _osr.SRS_PT_MERCATOR_AUXILIARY_SPHERE
SRS_PT_MILLER_CYLINDRICAL = _osr.SRS_PT_MILLER_CYLINDRICAL
SRS_PT_MOLLWEIDE = _osr.SRS_PT_MOLLWEIDE
SRS_PT_NEW_ZEALAND_MAP_GRID = _osr.SRS_PT_NEW_ZEALAND_MAP_GRID
SRS_PT_OBLIQUE_STEREOGRAPHIC = _osr.SRS_PT_OBLIQUE_STEREOGRAPHIC
SRS_PT_ORTHOGRAPHIC = _osr.SRS_PT_ORTHOGRAPHIC
SRS_PT_POLAR_STEREOGRAPHIC = _osr.SRS_PT_POLAR_STEREOGRAPHIC
SRS_PT_POLYCONIC = _osr.SRS_PT_POLYCONIC
SRS_PT_ROBINSON = _osr.SRS_PT_ROBINSON
SRS_PT_SINUSOIDAL = _osr.SRS_PT_SINUSOIDAL
SRS_PT_STEREOGRAPHIC = _osr.SRS_PT_STEREOGRAPHIC
SRS_PT_SWISS_OBLIQUE_CYLINDRICAL = _osr.SRS_PT_SWISS_OBLIQUE_CYLINDRICAL
SRS_PT_TRANSVERSE_MERCATOR = _osr.SRS_PT_TRANSVERSE_MERCATOR
SRS_PT_TRANSVERSE_MERCATOR_SOUTH_ORIENTED = _osr.SRS_PT_TRANSVERSE_MERCATOR_SOUTH_ORIENTED
SRS_PT_TRANSVERSE_MERCATOR_MI_21 = _osr.SRS_PT_TRANSVERSE_MERCATOR_MI_21
SRS_PT_TRANSVERSE_MERCATOR_MI_22 = _osr.SRS_PT_TRANSVERSE_MERCATOR_MI_22
SRS_PT_TRANSVERSE_MERCATOR_MI_23 = _osr.SRS_PT_TRANSVERSE_MERCATOR_MI_23
SRS_PT_TRANSVERSE_MERCATOR_MI_24 = _osr.SRS_PT_TRANSVERSE_MERCATOR_MI_24
SRS_PT_TRANSVERSE_MERCATOR_MI_25 = _osr.SRS_PT_TRANSVERSE_MERCATOR_MI_25
SRS_PT_TUNISIA_MINING_GRID = _osr.SRS_PT_TUNISIA_MINING_GRID
SRS_PT_TWO_POINT_EQUIDISTANT = _osr.SRS_PT_TWO_POINT_EQUIDISTANT
SRS_PT_VANDERGRINTEN = _osr.SRS_PT_VANDERGRINTEN
SRS_PT_KROVAK = _osr.SRS_PT_KROVAK
SRS_PT_IMW_POLYCONIC = _osr.SRS_PT_IMW_POLYCONIC
SRS_PT_WAGNER_I = _osr.SRS_PT_WAGNER_I
SRS_PT_WAGNER_II = _osr.SRS_PT_WAGNER_II
SRS_PT_WAGNER_III = _osr.SRS_PT_WAGNER_III
SRS_PT_WAGNER_IV = _osr.SRS_PT_WAGNER_IV
SRS_PT_WAGNER_V = _osr.SRS_PT_WAGNER_V
SRS_PT_WAGNER_VI = _osr.SRS_PT_WAGNER_VI
SRS_PT_WAGNER_VII = _osr.SRS_PT_WAGNER_VII
SRS_PT_QSC = _osr.SRS_PT_QSC
SRS_PT_AITOFF = _osr.SRS_PT_AITOFF
SRS_PT_WINKEL_I = _osr.SRS_PT_WINKEL_I
SRS_PT_WINKEL_II = _osr.SRS_PT_WINKEL_II
SRS_PT_WINKEL_TRIPEL = _osr.SRS_PT_WINKEL_TRIPEL
SRS_PT_CRASTER_PARABOLIC = _osr.SRS_PT_CRASTER_PARABOLIC
SRS_PT_LOXIMUTHAL = _osr.SRS_PT_LOXIMUTHAL
SRS_PT_QUARTIC_AUTHALIC = _osr.SRS_PT_QUARTIC_AUTHALIC
SRS_PP_CENTRAL_MERIDIAN = _osr.SRS_PP_CENTRAL_MERIDIAN
SRS_PP_SCALE_FACTOR = _osr.SRS_PP_SCALE_FACTOR
SRS_PP_STANDARD_PARALLEL_1 = _osr.SRS_PP_STANDARD_PARALLEL_1
SRS_PP_STANDARD_PARALLEL_2 = _osr.SRS_PP_STANDARD_PARALLEL_2
SRS_PP_PSEUDO_STD_PARALLEL_1 = _osr.SRS_PP_PSEUDO_STD_PARALLEL_1
SRS_PP_LONGITUDE_OF_CENTER = _osr.SRS_PP_LONGITUDE_OF_CENTER
SRS_PP_LATITUDE_OF_CENTER = _osr.SRS_PP_LATITUDE_OF_CENTER
SRS_PP_LONGITUDE_OF_ORIGIN = _osr.SRS_PP_LONGITUDE_OF_ORIGIN
SRS_PP_LATITUDE_OF_ORIGIN = _osr.SRS_PP_LATITUDE_OF_ORIGIN
SRS_PP_FALSE_EASTING = _osr.SRS_PP_FALSE_EASTING
SRS_PP_FALSE_NORTHING = _osr.SRS_PP_FALSE_NORTHING
SRS_PP_AZIMUTH = _osr.SRS_PP_AZIMUTH
SRS_PP_LONGITUDE_OF_POINT_1 = _osr.SRS_PP_LONGITUDE_OF_POINT_1
SRS_PP_LATITUDE_OF_POINT_1 = _osr.SRS_PP_LATITUDE_OF_POINT_1
SRS_PP_LONGITUDE_OF_POINT_2 = _osr.SRS_PP_LONGITUDE_OF_POINT_2
SRS_PP_LATITUDE_OF_POINT_2 = _osr.SRS_PP_LATITUDE_OF_POINT_2
SRS_PP_LONGITUDE_OF_POINT_3 = _osr.SRS_PP_LONGITUDE_OF_POINT_3
SRS_PP_LATITUDE_OF_POINT_3 = _osr.SRS_PP_LATITUDE_OF_POINT_3
SRS_PP_RECTIFIED_GRID_ANGLE = _osr.SRS_PP_RECTIFIED_GRID_ANGLE
SRS_PP_LANDSAT_NUMBER = _osr.SRS_PP_LANDSAT_NUMBER
SRS_PP_PATH_NUMBER = _osr.SRS_PP_PATH_NUMBER
SRS_PP_PERSPECTIVE_POINT_HEIGHT = _osr.SRS_PP_PERSPECTIVE_POINT_HEIGHT
SRS_PP_SATELLITE_HEIGHT = _osr.SRS_PP_SATELLITE_HEIGHT
SRS_PP_FIPSZONE = _osr.SRS_PP_FIPSZONE
SRS_PP_ZONE = _osr.SRS_PP_ZONE
SRS_PP_LATITUDE_OF_1ST_POINT = _osr.SRS_PP_LATITUDE_OF_1ST_POINT
SRS_PP_LONGITUDE_OF_1ST_POINT = _osr.SRS_PP_LONGITUDE_OF_1ST_POINT
SRS_PP_LATITUDE_OF_2ND_POINT = _osr.SRS_PP_LATITUDE_OF_2ND_POINT
SRS_PP_LONGITUDE_OF_2ND_POINT = _osr.SRS_PP_LONGITUDE_OF_2ND_POINT
SRS_UL_METER = _osr.SRS_UL_METER
SRS_UL_FOOT = _osr.SRS_UL_FOOT
SRS_UL_FOOT_CONV = _osr.SRS_UL_FOOT_CONV
SRS_UL_US_FOOT = _osr.SRS_UL_US_FOOT
SRS_UL_US_FOOT_CONV = _osr.SRS_UL_US_FOOT_CONV
SRS_UL_NAUTICAL_MILE = _osr.SRS_UL_NAUTICAL_MILE
SRS_UL_NAUTICAL_MILE_CONV = _osr.SRS_UL_NAUTICAL_MILE_CONV
SRS_UL_LINK = _osr.SRS_UL_LINK
SRS_UL_LINK_CONV = _osr.SRS_UL_LINK_CONV
SRS_UL_CHAIN = _osr.SRS_UL_CHAIN
SRS_UL_CHAIN_CONV = _osr.SRS_UL_CHAIN_CONV
SRS_UL_ROD = _osr.SRS_UL_ROD
SRS_UL_ROD_CONV = _osr.SRS_UL_ROD_CONV
SRS_UL_LINK_Clarke = _osr.SRS_UL_LINK_Clarke
SRS_UL_LINK_Clarke_CONV = _osr.SRS_UL_LINK_Clarke_CONV
SRS_UL_KILOMETER = _osr.SRS_UL_KILOMETER
SRS_UL_KILOMETER_CONV = _osr.SRS_UL_KILOMETER_CONV
SRS_UL_DECIMETER = _osr.SRS_UL_DECIMETER
SRS_UL_DECIMETER_CONV = _osr.SRS_UL_DECIMETER_CONV
SRS_UL_CENTIMETER = _osr.SRS_UL_CENTIMETER
SRS_UL_CENTIMETER_CONV = _osr.SRS_UL_CENTIMETER_CONV
SRS_UL_MILLIMETER = _osr.SRS_UL_MILLIMETER
SRS_UL_MILLIMETER_CONV = _osr.SRS_UL_MILLIMETER_CONV
SRS_UL_INTL_NAUT_MILE = _osr.SRS_UL_INTL_NAUT_MILE
SRS_UL_INTL_NAUT_MILE_CONV = _osr.SRS_UL_INTL_NAUT_MILE_CONV
SRS_UL_INTL_INCH = _osr.SRS_UL_INTL_INCH
SRS_UL_INTL_INCH_CONV = _osr.SRS_UL_INTL_INCH_CONV
SRS_UL_INTL_FOOT = _osr.SRS_UL_INTL_FOOT
SRS_UL_INTL_FOOT_CONV = _osr.SRS_UL_INTL_FOOT_CONV
SRS_UL_INTL_YARD = _osr.SRS_UL_INTL_YARD
SRS_UL_INTL_YARD_CONV = _osr.SRS_UL_INTL_YARD_CONV
SRS_UL_INTL_STAT_MILE = _osr.SRS_UL_INTL_STAT_MILE
SRS_UL_INTL_STAT_MILE_CONV = _osr.SRS_UL_INTL_STAT_MILE_CONV
SRS_UL_INTL_FATHOM = _osr.SRS_UL_INTL_FATHOM
SRS_UL_INTL_FATHOM_CONV = _osr.SRS_UL_INTL_FATHOM_CONV
SRS_UL_INTL_CHAIN = _osr.SRS_UL_INTL_CHAIN
SRS_UL_INTL_CHAIN_CONV = _osr.SRS_UL_INTL_CHAIN_CONV
SRS_UL_INTL_LINK = _osr.SRS_UL_INTL_LINK
SRS_UL_INTL_LINK_CONV = _osr.SRS_UL_INTL_LINK_CONV
SRS_UL_US_INCH = _osr.SRS_UL_US_INCH
SRS_UL_US_INCH_CONV = _osr.SRS_UL_US_INCH_CONV
SRS_UL_US_YARD = _osr.SRS_UL_US_YARD
SRS_UL_US_YARD_CONV = _osr.SRS_UL_US_YARD_CONV
SRS_UL_US_CHAIN = _osr.SRS_UL_US_CHAIN
SRS_UL_US_CHAIN_CONV = _osr.SRS_UL_US_CHAIN_CONV
SRS_UL_US_STAT_MILE = _osr.SRS_UL_US_STAT_MILE
SRS_UL_US_STAT_MILE_CONV = _osr.SRS_UL_US_STAT_MILE_CONV
SRS_UL_INDIAN_YARD = _osr.SRS_UL_INDIAN_YARD
SRS_UL_INDIAN_YARD_CONV = _osr.SRS_UL_INDIAN_YARD_CONV
SRS_UL_INDIAN_FOOT = _osr.SRS_UL_INDIAN_FOOT
SRS_UL_INDIAN_FOOT_CONV = _osr.SRS_UL_INDIAN_FOOT_CONV
SRS_UL_INDIAN_CHAIN = _osr.SRS_UL_INDIAN_CHAIN
SRS_UL_INDIAN_CHAIN_CONV = _osr.SRS_UL_INDIAN_CHAIN_CONV
SRS_UA_DEGREE = _osr.SRS_UA_DEGREE
SRS_UA_DEGREE_CONV = _osr.SRS_UA_DEGREE_CONV
SRS_UA_RADIAN = _osr.SRS_UA_RADIAN
SRS_PM_GREENWICH = _osr.SRS_PM_GREENWICH
SRS_DN_NAD27 = _osr.SRS_DN_NAD27
SRS_DN_NAD83 = _osr.SRS_DN_NAD83
SRS_DN_WGS72 = _osr.SRS_DN_WGS72
SRS_DN_WGS84 = _osr.SRS_DN_WGS84
SRS_WGS84_SEMIMAJOR = _osr.SRS_WGS84_SEMIMAJOR
SRS_WGS84_INVFLATTENING = _osr.SRS_WGS84_INVFLATTENING
def GetUseExceptions(*args):
"""GetUseExceptions() -> int"""
return _osr.GetUseExceptions(*args)
def UseExceptions(*args):
"""UseExceptions()"""
return _osr.UseExceptions(*args)
def DontUseExceptions(*args):
"""DontUseExceptions()"""
return _osr.DontUseExceptions(*args)
def GetWellKnownGeogCSAsWKT(*args):
"""GetWellKnownGeogCSAsWKT(char const * name) -> OGRErr"""
return _osr.GetWellKnownGeogCSAsWKT(*args)
def GetUserInputAsWKT(*args):
"""GetUserInputAsWKT(char const * name) -> OGRErr"""
return _osr.GetUserInputAsWKT(*args)
class SpatialReference(_object):
"""Proxy of C++ OSRSpatialReferenceShadow class"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, SpatialReference, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, SpatialReference, name)
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""__init__(OSRSpatialReferenceShadow self, char const * wkt="") -> SpatialReference"""
this = _osr.new_SpatialReference(*args, **kwargs)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _osr.delete_SpatialReference
__del__ = lambda self : None;
def __str__(self, *args):
"""__str__(SpatialReference self) -> retStringAndCPLFree *"""
return _osr.SpatialReference___str__(self, *args)
def IsSame(self, *args):
"""IsSame(SpatialReference self, SpatialReference rhs) -> int"""
return _osr.SpatialReference_IsSame(self, *args)
def IsSameGeogCS(self, *args):
"""IsSameGeogCS(SpatialReference self, SpatialReference rhs) -> int"""
return _osr.SpatialReference_IsSameGeogCS(self, *args)
def IsSameVertCS(self, *args):
"""IsSameVertCS(SpatialReference self, SpatialReference rhs) -> int"""
return _osr.SpatialReference_IsSameVertCS(self, *args)
def IsGeographic(self, *args):
"""IsGeographic(SpatialReference self) -> int"""
return _osr.SpatialReference_IsGeographic(self, *args)
def IsProjected(self, *args):
"""IsProjected(SpatialReference self) -> int"""
return _osr.SpatialReference_IsProjected(self, *args)
def IsCompound(self, *args):
"""IsCompound(SpatialReference self) -> int"""
return _osr.SpatialReference_IsCompound(self, *args)
def IsGeocentric(self, *args):
"""IsGeocentric(SpatialReference self) -> int"""
return _osr.SpatialReference_IsGeocentric(self, *args)
def IsLocal(self, *args):
"""IsLocal(SpatialReference self) -> int"""
return _osr.SpatialReference_IsLocal(self, *args)
def IsVertical(self, *args):
"""IsVertical(SpatialReference self) -> int"""
return _osr.SpatialReference_IsVertical(self, *args)
def EPSGTreatsAsLatLong(self, *args):
"""EPSGTreatsAsLatLong(SpatialReference self) -> int"""
return _osr.SpatialReference_EPSGTreatsAsLatLong(self, *args)
def EPSGTreatsAsNorthingEasting(self, *args):
"""EPSGTreatsAsNorthingEasting(SpatialReference self) -> int"""
return _osr.SpatialReference_EPSGTreatsAsNorthingEasting(self, *args)
def SetAuthority(self, *args):
"""SetAuthority(SpatialReference self, char const * pszTargetKey, char const * pszAuthority, int nCode) -> OGRErr"""
return _osr.SpatialReference_SetAuthority(self, *args)
def GetAttrValue(self, *args):
"""GetAttrValue(SpatialReference self, char const * name, int child=0) -> char const *"""
return _osr.SpatialReference_GetAttrValue(self, *args)
def SetAttrValue(self, *args):
"""SetAttrValue(SpatialReference self, char const * name, char const * value) -> OGRErr"""
return _osr.SpatialReference_SetAttrValue(self, *args)
def SetAngularUnits(self, *args):
"""SetAngularUnits(SpatialReference self, char const * name, double to_radians) -> OGRErr"""
return _osr.SpatialReference_SetAngularUnits(self, *args)
def GetAngularUnits(self, *args):
"""GetAngularUnits(SpatialReference self) -> double"""
return _osr.SpatialReference_GetAngularUnits(self, *args)
def SetTargetLinearUnits(self, *args):
"""SetTargetLinearUnits(SpatialReference self, char const * target, char const * name, double to_meters) -> OGRErr"""
return _osr.SpatialReference_SetTargetLinearUnits(self, *args)
def SetLinearUnits(self, *args):
"""SetLinearUnits(SpatialReference self, char const * name, double to_meters) -> OGRErr"""
return _osr.SpatialReference_SetLinearUnits(self, *args)
def SetLinearUnitsAndUpdateParameters(self, *args):
"""SetLinearUnitsAndUpdateParameters(SpatialReference self, char const * name, double to_meters) -> OGRErr"""
return _osr.SpatialReference_SetLinearUnitsAndUpdateParameters(self, *args)
def GetLinearUnits(self, *args):
"""GetLinearUnits(SpatialReference self) -> double"""
return _osr.SpatialReference_GetLinearUnits(self, *args)
def GetLinearUnitsName(self, *args):
"""GetLinearUnitsName(SpatialReference self) -> char const *"""
return _osr.SpatialReference_GetLinearUnitsName(self, *args)
def GetAuthorityCode(self, *args):
"""GetAuthorityCode(SpatialReference self, char const * target_key) -> char const *"""
return _osr.SpatialReference_GetAuthorityCode(self, *args)
def GetAuthorityName(self, *args):
"""GetAuthorityName(SpatialReference self, char const * target_key) -> char const *"""
return _osr.SpatialReference_GetAuthorityName(self, *args)
def SetUTM(self, *args):
"""SetUTM(SpatialReference self, int zone, int north=1) -> OGRErr"""
return _osr.SpatialReference_SetUTM(self, *args)
def GetUTMZone(self, *args):
"""GetUTMZone(SpatialReference self) -> int"""
return _osr.SpatialReference_GetUTMZone(self, *args)
def SetStatePlane(self, *args):
"""SetStatePlane(SpatialReference self, int zone, int is_nad83=1, char const * unitsname="", double units=0.0) -> OGRErr"""
return _osr.SpatialReference_SetStatePlane(self, *args)
def AutoIdentifyEPSG(self, *args):
"""AutoIdentifyEPSG(SpatialReference self) -> OGRErr"""
return _osr.SpatialReference_AutoIdentifyEPSG(self, *args)
def SetProjection(self, *args):
"""SetProjection(SpatialReference self, char const * arg) -> OGRErr"""
return _osr.SpatialReference_SetProjection(self, *args)
def SetProjParm(self, *args):
"""SetProjParm(SpatialReference self, char const * name, double val) -> OGRErr"""
return _osr.SpatialReference_SetProjParm(self, *args)
def GetProjParm(self, *args):
"""GetProjParm(SpatialReference self, char const * name, double default_val=0.0) -> double"""
return _osr.SpatialReference_GetProjParm(self, *args)
def SetNormProjParm(self, *args):
"""SetNormProjParm(SpatialReference self, char const * name, double val) -> OGRErr"""
return _osr.SpatialReference_SetNormProjParm(self, *args)
def GetNormProjParm(self, *args):
"""GetNormProjParm(SpatialReference self, char const * name, double default_val=0.0) -> double"""
return _osr.SpatialReference_GetNormProjParm(self, *args)
def GetSemiMajor(self, *args):
"""GetSemiMajor(SpatialReference self) -> double"""
return _osr.SpatialReference_GetSemiMajor(self, *args)
def GetSemiMinor(self, *args):
"""GetSemiMinor(SpatialReference self) -> double"""
return _osr.SpatialReference_GetSemiMinor(self, *args)
def GetInvFlattening(self, *args):
"""GetInvFlattening(SpatialReference self) -> double"""
return _osr.SpatialReference_GetInvFlattening(self, *args)
def SetACEA(self, *args, **kwargs):
"""SetACEA(SpatialReference self, double stdp1, double stdp2, double clat, double clong, double fe, double fn) -> OGRErr"""
return _osr.SpatialReference_SetACEA(self, *args, **kwargs)
def SetAE(self, *args, **kwargs):
"""SetAE(SpatialReference self, double clat, double clong, double fe, double fn) -> OGRErr"""
return _osr.SpatialReference_SetAE(self, *args, **kwargs)
def SetBonne(self, *args, **kwargs):
"""SetBonne(SpatialReference self, double stdp, double cm, double fe, double fn) -> OGRErr"""
return _osr.SpatialReference_SetBonne(self, *args, **kwargs)
def SetCEA(self, *args, **kwargs):
"""SetCEA(SpatialReference self, double stdp1, double cm, double fe, double fn) -> OGRErr"""
return _osr.SpatialReference_SetCEA(self, *args, **kwargs)
def SetCS(self, *args, **kwargs):
"""SetCS(SpatialReference self, double clat, double clong, double fe, double fn) -> OGRErr"""
return _osr.SpatialReference_SetCS(self, *args, **kwargs)
def SetEC(self, *args, **kwargs):
"""SetEC(SpatialReference self, double stdp1, double stdp2, double clat, double clong, double fe, double fn) -> OGRErr"""
return _osr.SpatialReference_SetEC(self, *args, **kwargs)
def SetEckertIV(self, *args, **kwargs):
"""SetEckertIV(SpatialReference self, double cm, double fe, double fn) -> OGRErr"""
return _osr.SpatialReference_SetEckertIV(self, *args, **kwargs)
def SetEckertVI(self, *args, **kwargs):
"""SetEckertVI(SpatialReference self, double cm, double fe, double fn) -> OGRErr"""
return _osr.SpatialReference_SetEckertVI(self, *args, **kwargs)
def SetEquirectangular(self, *args, **kwargs):
"""SetEquirectangular(SpatialReference self, double clat, double clong, double fe, double fn) -> OGRErr"""
return _osr.SpatialReference_SetEquirectangular(self, *args, **kwargs)
def SetEquirectangular2(self, *args, **kwargs):
"""SetEquirectangular2(SpatialReference self, double clat, | |
[]
for model_input_file in model_input_files:
arr = np.loadtxt(model_input_file)
org_file = df.loc[df.model_file==model_input_file,"org_file"].values
org_file = org_file[0]
org_arr = np.loadtxt(org_file)
if "zone_file" in df.columns:
zone_file = df.loc[df.model_file == model_input_file,"zone_file"].dropna().unique()
if len(zone_file) > 1:
zone_arr = np.zeros_like(arr)
for zf in zone_file:
za = np.loadtxt(zf)
zone_arr[za!=0] = 1
else:
zone_arr = np.loadtxt(zone_file[0])
arr[zone_arr==0] = np.NaN
org_arr[zone_arr==0] = np.NaN
for stat,func in stat_dict.items():
v = func(arr)
records[stat].append(v)
ov = func(org_arr)
records[stat+"_org"].append(ov)
records[stat+"_dif"].append(ov-v)
for q in quantiles:
v = np.nanquantile(arr,q)
ov = np.nanquantile(org_arr,q)
records["quantile_{0}".format(q)].append(v)
records["quantile_{0}_org".format(q)].append(ov)
records["quantile_{0}_dif".format(q)].append(ov-v)
ub = df.loc[df.model_file==model_input_file,"upper_bound"].max()
lb = df.loc[df.model_file == model_input_file, "lower_bound"].min()
if pd.isna(ub):
records["upper_bound"].append(0)
records["upper_bound_org"].append(0)
records["upper_bound_dif"].append(0)
else:
iarr = np.zeros_like(arr)
iarr[arr==ub] = 1
v = iarr.sum()
iarr = np.zeros_like(arr)
iarr[org_arr == ub] = 1
ov = iarr.sum()
records["upper_bound"].append(v)
records["upper_bound_org"].append(ov)
records["upper_bound_dif"].append(ov-v)
if pd.isna(lb):
records["lower_bound"].append(0)
records["lower_bound_org"].append(0)
records["lower_bound_dif"].append(0)
else:
iarr = np.zeros_like(arr)
iarr[arr==lb] = 1
v = iarr.sum()
iarr = np.zeros_like(arr)
iarr[org_arr == lb] = 1
ov = iarr.sum()
records["lower_bound"].append(v)
records["lower_bound_org"].append(ov)
records["lower_bound_dif"].append(ov-v)
#scrub model input files
model_input_files = [f.replace(".","_").replace("\\","_").replace("/","_") for f in model_input_files]
df = pd.DataFrame(records,index=model_input_files)
df.index.name = "model_file"
df.to_csv("arr_par_summary.csv")
return df
def apply_genericlist_pars(df,chunk_len=50):
"""a function to apply list style mult parameters
Args:
df (pandas.DataFrame): DataFrame that relates files containing
multipliers to model input file names. Required columns include:
{"model_file": file name of resulatant model input file,
"org_file": file name of original file that multipliers act on,
"fmt": format specifier for model input file (currently on 'free' supported),
"sep": separator for model input file if 'free' formatted,
"head_rows": Number of header rows to transfer from orig file to model file,
"index_cols": list of columns (either indexes or strings) to be used to align mults, orig and model files,
"use_cols": columns to mults act on,
"upper_bound": ultimate upper bound for model input file parameter,
"lower_bound": ultimate lower bound for model input file parameter}
chunk_len (`int`): number of chunks for each multiprocessing instance to handle.
Default is 50.
"""
print("starting list mlt", datetime.now())
uniq = df.model_file.unique() # unique model input files to be produced
num_uniq = len(uniq) # number of input files to be produced
# number of files to send to each processor
# lazy plitting the files to be processed into even chunks
num_chunk_floor = num_uniq // chunk_len # number of whole chunks
main_chunks = (
uniq[: num_chunk_floor * chunk_len].reshape([-1, chunk_len]).tolist()
) # the list of files broken down into chunks
remainder = uniq[num_chunk_floor * chunk_len:].tolist() # remaining files
chunks = main_chunks + [remainder]
print("number of chunks to process:",len(chunks))
if (len(chunks) == 1):
_process_chunk_list_files(chunks[0],0,df)
else:
pool = mp.Pool()
x = [
pool.apply_async(_process_chunk_list_files, args=(chunk, i, df))
for i, chunk in enumerate(chunks)
]
[xx.get() for xx in x]
pool.close()
pool.join()
print("finished list mlt", datetime.now())
def _process_chunk_list_files(chunk, i, df):
for model_file in chunk:
_process_list_file(model_file, df)
print("process", i, " processed ", len(chunk), "process_list_file calls")
def _process_list_file(model_file,df):
#print("processing model file:", model_file)
df_mf = df.loc[df.model_file == model_file, :].copy()
# read data stored in org (mults act on this)
org_file = df_mf.org_file.unique()
if org_file.shape[0] != 1:
raise Exception("wrong number of org_files for {0}".format(model_file))
org_file = org_file[0]
#print("org file:", org_file)
notfree = df_mf.fmt[df_mf.fmt != "free"]
if len(notfree) > 1:
raise Exception(
"too many different format specifiers for "
"model file: {0}".format(model_file)
)
elif len(notfree) == 1:
fmt = notfree.values[0]
else:
fmt = df_mf.fmt.values[-1]
if fmt == "free":
if df_mf.sep.dropna().nunique() > 1:
raise Exception(
"too many different sep specifiers for "
"model file: {0}".format(model_file)
)
else:
sep = df_mf.sep.dropna().values[-1]
else:
sep = None
datastrtrow = df_mf.head_rows.values[-1]
if fmt.lower() == "free" and sep == " ":
delim_whitespace = True
if datastrtrow > 0:
with open(org_file, "r") as fp:
storehead = [next(fp) for _ in range(datastrtrow)]
else:
storehead = []
# work out if headers are used for index_cols
# big assumption here that int type index cols will not be written as headers
index_col_eg = df_mf.index_cols.iloc[-1][0]
if isinstance(index_col_eg, str):
# TODO: add test for model file with headers
# index_cols can be from header str
header = 0
hheader = True
elif isinstance(index_col_eg, int):
# index_cols are column numbers in input file
header = None
hheader = None
# actually do need index cols to be list of strings
# to be compatible when the saved original file is read in.
df_mf.loc[:, "index_cols"] = df_mf.index_cols.apply(
lambda x: [str(i) for i in x]
)
# if writen by PstFrom this should always be comma delim - tidy
org_data = pd.read_csv(org_file, skiprows=datastrtrow, header=header)
# mult columns will be string type, so to make sure they align
org_data.columns = org_data.columns.astype(str)
#print("org_data columns:", org_data.columns)
#print("org_data shape:", org_data.shape)
new_df = org_data.copy()
for mlt in df_mf.itertuples():
try:
new_df = (
new_df.reset_index()
.rename(columns={"index": "oidx"})
.set_index(mlt.index_cols)
)
new_df = new_df.sort_index()
except Exception as e:
print(
"error setting mlt index_cols: ",
str(mlt.index_cols),
" for new_df with cols: ",
list(new_df.columns),
)
raise Exception("error setting mlt index_cols: " + str(e))
if not hasattr(mlt, "mlt_file") or pd.isna(mlt.mlt_file):
print("null mlt file for org_file '" + org_file + "', continuing...")
else:
mlts = pd.read_csv(mlt.mlt_file)
# get mult index to align with org_data,
# get mult index to align with org_data,
# mult idxs will always be written zero based if int
# if original model files is not zero based need to add 1
add1 = int(mlt.zero_based == False)
mlts.index = pd.MultiIndex.from_tuples(
mlts.sidx.apply(
lambda x: [
add1 + int(xx) if xx.strip().isdigit() else xx.strip("'\" ")
for xx in x.strip("()").split(",")
if xx
]
),
names=mlt.index_cols,
)
if mlts.index.nlevels < 2: # just in case only one index col is used
mlts.index = mlts.index.get_level_values(0)
common_idx = (
new_df.index.intersection(mlts.index)
.sort_values()
.drop_duplicates()
)
mlt_cols = [str(col) for col in mlt.use_cols]
new_df.loc[common_idx, mlt_cols] = (
new_df.loc[common_idx, mlt_cols] * mlts.loc[common_idx, mlt_cols]
).values
# bring mult index back to columns AND re-order
new_df = (
new_df.reset_index().set_index("oidx")[org_data.columns].sort_index()
)
if "upper_bound" in df.columns:
ub = df_mf.apply(
lambda x: pd.Series(
{str(c): b for c, b in zip(x.use_cols, x.upper_bound)}
),
axis=1,
).max()
if ub.notnull().any():
for col, val in ub.items():
new_df.loc[new_df.loc[:, col] > val, col] = val
if "lower_bound" in df.columns:
lb = df_mf.apply(
lambda x: pd.Series(
{str(c): b for c, b in zip(x.use_cols, x.lower_bound)}
),
axis=1,
).min()
if lb.notnull().any():
for col, val in lb.items():
new_df.loc[new_df.loc[:, col] < val, col] = val
with open(model_file, "w") as fo:
kwargs = {}
if "win" in platform.platform().lower():
kwargs = {"line_terminator": "\n"}
if len(storehead) != 0:
fo.write("\n".join(storehead))
fo.flush()
if fmt.lower() == "free":
new_df.to_csv(
fo, index=False, mode="a", sep=sep, header=hheader, **kwargs
)
else:
np.savetxt(fo, np.atleast_2d(new_df.values), fmt=fmt)
def write_const_tpl(name, tpl_file, suffix, zn_array=None, shape=None, longnames=False):
"""write a constant (uniform) template file for a 2-D array
Args:
name (`str`): the base parameter name
tpl_file (`str`): the template file to write
zn_array (`numpy.ndarray`, optional): an array used to skip inactive cells,
and optionally get shape info.
shape (`tuple`): tuple nrow and ncol. Either `zn_array` or `shape`
must be passed
longnames (`bool`): flag to use longer names that exceed 12 chars in length.
Default is False.
Returns:
`pandas.DataFrame`: a dataframe with parameter information
"""
if shape is None and zn_array is None:
raise Exception("must pass either zn_array or shape")
elif shape is None:
shape = zn_array.shape
parnme = []
with open(tpl_file, "w") as f:
f.write("ptf ~\n")
for i in range(shape[0]):
for j in range(shape[1]):
if zn_array is not None and zn_array[i, j] < 1:
pname = " 1.0 "
else:
if longnames:
pname = "const_{0}_{1}".format(name, suffix)
else:
pname = "{0}{1}".format(name, suffix)
if len(pname) > 12:
warnings.warn(
"zone pname too long for pest:{0}".format(pname)
)
parnme.append(pname)
pname = " ~ {0} ~".format(pname)
f.write(pname)
f.write("\n")
df = pd.DataFrame({"parnme": parnme}, index=parnme)
# df.loc[:,"pargp"] = "{0}{1}".format(self.cn_suffixname)
df.loc[:, "pargp"] = "{0}_{1}".format(suffix.replace("_", ""), name)
df.loc[:, "tpl"] = tpl_file
return df
def write_grid_tpl(
name,
tpl_file,
suffix,
zn_array=None,
shape=None,
spatial_reference=None,
longnames=False,
):
"""write a grid-based template file for a 2-D array
Args:
name (`str`): the base parameter name
tpl_file (`str`): the template file to write - include path
zn_array (`numpy.ndarray`, optional): zone array to identify
inactive cells. Default is None
shape (`tuple`, optional): a length-two tuple of nrow and ncol. Either
`zn_array` or `shape` must be passed.
spatial_reference (`flopy.utils.SpatialReference`): | |
<reponame>Kodeworks/imu-fpv-processor<gh_stars>1-10
"""
Assumptions:
1. If any single value in a row of 5 values is NaN, then the entire row is NaN
Explainations:
Pose - Both position and orientation of an object
DOF/dof - Degree of freedom, here used to describe data from one of the six sensor readings (acc x-y-z, gyro x-y-z)
"""
import numpy as np
from scipy.signal import butter, filtfilt
from scipy.fft import fft
import warnings
import math
# The operating buffer size of FloatService should be a global variable so that it may be set once by some other process
global n_rows
class FloatService:
"""
FloatService serves the purpose of estimating the height and the two angles of the x- and y-axis to the horizontal
plane, of an IMU sensor. The FloatService process follows the steps of preprocessing, pose estimation and
post processing.
"""
def __init__(self, name: str, input, output, dev_mode: bool = False):
"""
:param name: str, unique ID of sensor
:param input: np.memmap/np.ndarray, input buffer
:param output: np.memmap/np.ndarray, output buffer
:param dev_mode: bool, dev_mode enables features only usable in development
"""
self.name = name
self.input = input # Input = [acc_x, acc_y, acc_z, gyro_x, gyro_y]
self.output = output # Output = [x_angle, y_angle, vertical_pos]
self.input_len = np.shape(input)[0]
self.last_row = -1
self.gravitational_constant = 9.81
# Index of highest valid index from last time the buffer counter was reset
self.last_valid = self.input_len - 1
# Miniburst control
self.use_minibursts = True
self.miniburst_size = 128
# The following are variables to control how the burst is handled due to NaN values
self.burst_contains_nan = False
self.burst_is_discarded = False
self.nan_in_burst = 0
# TODO: The claim below was accurate when assumption 1 wasn't true. The current threshold is lower and
# should be examined
# Tests show that the module handles 70% randomly placed NaN-values. Threshold is set to 50% to accomodate
# for groupings of several NaN-values, which are more likely than a uniform distribution
self.discard_burst_nan_threshold = 0.5
# Internal storage
self.processed_input = np.zeros(shape=[self.input_len, 5], dtype=float)
self.actual_vertical_acceleration = np.zeros(shape=[self.input_len], dtype=float)
self.proper_vertical_acceleration = np.zeros(shape=[self.input_len], dtype=float)
self.dampened_vertical_velocity = np.zeros(shape=[self.input_len], dtype=float)
self.dampened_vertical_position = np.zeros(shape=[self.input_len], dtype=float)
self.vertical_acceleration = 0.0
self.vertical_velocity = 0.0
# Number of data points for calculating different means (prone to change)
self.n_points_for_acc_mean = 4096
self.n_points_for_gyro_mean = 4096
self.n_points_for_proper_vert_acc_mean = 4096
self.n_points_for_vel_mean = 512
self.n_points_for_pos_mean = 512
# Information on sensor bias is kept
self.acc_bias_sliding_window = np.zeros(shape=3, dtype=float)
self.gyro_bias_sliding_window = np.zeros(shape=2, dtype=float)
# The _final-variables are the results from adaptive averaging
self.acc_bias_final = np.zeros(shape=3, dtype=float)
self.gyro_bias_final = np.zeros(shape=2, dtype=float)
# Other bias that may emerge from the estimation process
self.proper_vert_acc_bias = 0.0
self.vert_vel_bias = 0.0
self.vert_pos_bias = 0.0
# Number of data rows to be processed before next bias update
self.points_between_acc_bias_update = 256
self.points_between_gyro_bias_update = 256
self.points_between_proper_vert_acc_bias_update = 256
self.points_between_vert_vel_bias_update = 32
self.points_between_vert_pos_bias_update = 8
# Row numbers of last bias updates
self.last_acc_bias_update = -1
self.last_gyro_bias_update = -1
self.last_proper_vert_acc_bias_update = 0
self.last_vert_vel_bias_update = 0
self.last_vert_pos_bias_update = 0
adav_alpha_max = 0.5
adav_alpha_gain = 0.01
# Adaptive averages used in extended sensor calibration
self.adav_acc_x = AdaptiveMovingAverage(
alpha_min=0.01, alpha_max=adav_alpha_max, alpha_gain=adav_alpha_gain, dev_mode=dev_mode
)
self.adav_acc_y = AdaptiveMovingAverage(
alpha_min=0.01, alpha_max=adav_alpha_max, alpha_gain=adav_alpha_gain, dev_mode=dev_mode
)
self.adav_acc_z = AdaptiveMovingAverage(
alpha_min=0.01, alpha_max=adav_alpha_max, alpha_gain=adav_alpha_gain, dev_mode=dev_mode
)
self.adav_gyro_x = AdaptiveMovingAverage(
alpha_min=0.01, alpha_max=adav_alpha_max, alpha_gain=adav_alpha_gain, dev_mode=dev_mode
)
self.adav_gyro_y = AdaptiveMovingAverage(
alpha_min=0.01, alpha_max=adav_alpha_max, alpha_gain=adav_alpha_gain, dev_mode=dev_mode
)
self.adav_gyro_z = AdaptiveMovingAverage(
alpha_min=0.01, alpha_max=adav_alpha_max, alpha_gain=adav_alpha_gain, dev_mode=dev_mode
)
# Weights for weighted averages
self.vert_pos_average_weights = None
self.set_position_average_weights()
# Dampening factors to counteract integration drift
self.vel_dampening_factor = 0.001
self.pos_dampening_factor = 0.001
# Dampening factors right after a discarded package
self.vel_dampening_factor_big = 0.05
self.pos_dampening_factor_big = 0.05
# Dampening factor end points
self.vel_dampening_factor_end = 0.001
self.pos_dampening_factor_end = 0.001
# Finally, a dampening factor to dampen the dampening factors
self.dampening_factor_dampener = 0.05
# Some constants for reversing input to match mathematical convention
self.perform_axis_reversal = True
self.gyro_reversal_coefficient = -1.0
self.axis_reversal_coefficient = -1.0
# With the current physical implementation, the y-axis (index 1) is reversed
self.axis_reversal_index = 1
# Sensor sampling frequency and period
self.sampling_rate = 104.0
self.sampling_period = 1.0 / self.sampling_rate
# Low-pass filtering coefficients
nyquist_freq = self.sampling_rate / 2
cutoff_rate = 0.1
self.low_b, self.low_a = butter(int(1 / (cutoff_rate * self.sampling_rate) * nyquist_freq + 0.5),
cutoff_rate,
btype='lowpass',
output='ba')
# Kalman filter variables
self.rows_per_kalman_use = 10
self.kal_state_pri = np.array([0.0, 0.0])
self.kal_state_post = np.array([0.0, 0.0])
self.kal_p_pri = np.array([[0.5, 0.0],
[0.0, 0.5]])
self.kal_p_post = np.array([[0.5, 0.0],
[0.0, 0.5]])
_Q = 0.001 * np.pi # Prone to change following testing but works fine
_R = 0.001 * np.pi # Prone to change following testing but works fine
self.kal_Q = np.array([[_Q, 0.0],
[0.0, _Q]])
self.kal_R = np.array([[_R, 0.0],
[0.0, _R]])
self.kal_K = np.array([[0.0, 0.0],
[0.0, 0.0]])
# Variables for generating and storing information on the wave function
self.n_points_for_fft = int(self.sampling_rate)*10
self.points_between_fft = int(self.sampling_rate)*5
self.last_fft = -1
self.n_saved_wave_functions = 50
self.wave_function_buffer = np.zeros(shape=[self.n_saved_wave_functions, self.n_points_for_fft//2], dtype=float)
# Pointer points to the last saved wave function
self.wave_function_buffer_pointer = -1
# Determines whether or not wave function information is used in vertical position bias control
self.fft_aided_bias = False
self.rotations = Rotations()
# Development mode variables
self.dev_mode = dev_mode
if dev_mode:
# Extended internal memory to examine different internal variables post processing
self.dev_bank_angle = np.zeros(shape=[self.input_len], dtype=float)
self.dev_vertical_velocity = np.zeros(shape=[self.input_len], dtype=float)
self.dev_gyro_state = np.zeros(shape=[self.input_len, 2], dtype=float)
self.dev_acc_state = np.zeros(shape=[self.input_len, 2], dtype=float)
self.n_bias_updates = np.zeros(shape=[5], dtype=int) # Gyro, xy acc, vert acc, vel, pos
# Biases for each timestep are also kept for examination
self.acc_bias_array = np.zeros(shape=[self.input_len, 3], dtype=float)
self.gyro_bias_array = np.zeros(shape=[self.input_len, 2], dtype=float)
self.vert_vel_bias_array = np.zeros(shape=[self.input_len], dtype=float)
self.vert_pos_bias_array = np.zeros(shape=[self.input_len], dtype=float)
# Some control variables for testing with a purpose of controling vertical position output
self.no_vert_pos_bias = False
self.no_vert_vel_bias = False
# Controlling output post-processing
self.use_output_filtering = True
warnings.filterwarnings('error')
def process(self, number_of_rows: int):
"""
Tell FloatService to process the next number_of_rows rows in input, starting from last_row + 1.
:param number_of_rows: Number of input data rows to be processed.
Format of output: N rows x [x-angle, y-angle, vertical position]
"""
if self.last_row + number_of_rows + 1 <= n_rows:
start = self.last_row + 1
else:
start = 0
# Information on last actual buffer index is kept
self.last_valid = self.last_row
self.update_counters_on_buffer_reuse()
self.copy_data_to_last_index_on_buffer_reuse()
end = start + number_of_rows
if self.use_minibursts:
self.minibursts(start=start, end=end)
self.postprocess_output(start=start, end=end)
else:
self.preprocess_data(start=start, end=end)
# Check whether burst is declared discarded
if not self.burst_is_discarded:
self.run_processing_iterations(start=start, end=end)
self.postprocess_output(start=start, end=end)
self.last_row = end - 1
def minibursts(self, start: int, end: int):
"""
Minibursts are activated when a given burst is of greater size than some threshold. This is to make sure
some preprocessing steps like real time calibration of sensors (averaging of sensor input) is performed
regularily
:param start: Start index of miniburst
:param end: End index of miniburst
"""
s_i = start
e_i = min(end, s_i + self.miniburst_size)
while s_i < end:
self.preprocess_data(start=s_i, end=e_i)
# Check whether burst is declared discarded
if not self.burst_is_discarded:
self.run_processing_iterations(start=s_i, end=e_i)
s_i += self.miniburst_size
e_i = min(end, s_i+self.miniburst_size)
def preprocess_data(self, start: int, end: int):
"""
NaN-handling
Bias updates
Set processed input
Data transformations
Outlier correction/smoothing
"""
# NaN-handling
self.nan_handling(start=start, end=end)
# Check whether burst was declared as discarded due to NaN_handling
if self.burst_is_discarded:
return
# Update gyroscope and accelerometer bias
self.update_acc_bias(row_no=start)
self.update_gyro_bias(row_no=start)
# If nan_handling() detected any NaN-values in the burst without discarding the burst, separate methods for
# inserting processed input are used
if self.burst_contains_nan:
self.set_processed_acc_input_nan(start=start, end=end)
self.set_processed_gyro_input_nan(start=start, end=end)
else:
# Insert raw acceleration data into processed_input
self.set_processed_acc_input(start=start, end=end)
# Adjust current gyro burst according to gyro bias and insert the result in preprocessed_input
self.set_processed_gyro_input(start=start, end=end)
# Convert angular velocities from deg/s to rad/s
self.degrees_to_radians(start=start, end=end)
# Transform gyro data and a single axis of accelerometer data so that the input matches mathematical convention
self.reverse_some_processed_input(start=start, end=end)
# Filtering of both accelerometer and gyroscope data using a low-pass filter
self.low_pass_filter_input(start=start, end=end)
def run_processing_iterations(self, start: int, end: int):
for i in range(start, end):
self.wave_function(row_no=i)
self.estimate_pose(row_no=i)
# Adjust velocity and position dampening factors
self.adjust_pos_and_vel_dampening_factors()
def estimate_pose(self, row_no: int):
# A Kalman filter iteration is performed to estimate x- and y-angles,
# which later makes up the bank angle
| |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# justice-platform-service (4.10.0)
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from __future__ import annotations
import re
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import Model
from ....core import StrEnum
class ItemTypeEnum(StrEnum):
APP = "APP"
COINS = "COINS"
INGAMEITEM = "INGAMEITEM"
BUNDLE = "BUNDLE"
CODE = "CODE"
SUBSCRIPTION = "SUBSCRIPTION"
SEASON = "SEASON"
MEDIA = "MEDIA"
class PaymentOrderCreate(Model):
"""Payment order create (PaymentOrderCreate)
Properties:
description: (description) REQUIRED str
ext_order_no: (extOrderNo) REQUIRED str
price: (price) REQUIRED int
title: (title) REQUIRED str
currency_code: (currencyCode) OPTIONAL str
currency_namespace: (currencyNamespace) OPTIONAL str
custom_parameters: (customParameters) OPTIONAL Dict[str, Any]
ext_user_id: (extUserId) OPTIONAL str
item_type: (itemType) OPTIONAL Union[str, ItemTypeEnum]
language: (language) OPTIONAL str
metadata: (metadata) OPTIONAL Dict[str, str]
notify_url: (notifyUrl) OPTIONAL str
omit_notification: (omitNotification) OPTIONAL bool
platform: (platform) OPTIONAL str
recurring_payment_order_no: (recurringPaymentOrderNo) OPTIONAL str
region: (region) OPTIONAL str
return_url: (returnUrl) OPTIONAL str
sandbox: (sandbox) OPTIONAL bool
sku: (sku) OPTIONAL str
subscription_id: (subscriptionId) OPTIONAL str
"""
# region fields
description: str # REQUIRED
ext_order_no: str # REQUIRED
price: int # REQUIRED
title: str # REQUIRED
currency_code: str # OPTIONAL
currency_namespace: str # OPTIONAL
custom_parameters: Dict[str, Any] # OPTIONAL
ext_user_id: str # OPTIONAL
item_type: Union[str, ItemTypeEnum] # OPTIONAL
language: str # OPTIONAL
metadata: Dict[str, str] # OPTIONAL
notify_url: str # OPTIONAL
omit_notification: bool # OPTIONAL
platform: str # OPTIONAL
recurring_payment_order_no: str # OPTIONAL
region: str # OPTIONAL
return_url: str # OPTIONAL
sandbox: bool # OPTIONAL
sku: str # OPTIONAL
subscription_id: str # OPTIONAL
# endregion fields
# region with_x methods
def with_description(self, value: str) -> PaymentOrderCreate:
self.description = value
return self
def with_ext_order_no(self, value: str) -> PaymentOrderCreate:
self.ext_order_no = value
return self
def with_price(self, value: int) -> PaymentOrderCreate:
self.price = value
return self
def with_title(self, value: str) -> PaymentOrderCreate:
self.title = value
return self
def with_currency_code(self, value: str) -> PaymentOrderCreate:
self.currency_code = value
return self
def with_currency_namespace(self, value: str) -> PaymentOrderCreate:
self.currency_namespace = value
return self
def with_custom_parameters(self, value: Dict[str, Any]) -> PaymentOrderCreate:
self.custom_parameters = value
return self
def with_ext_user_id(self, value: str) -> PaymentOrderCreate:
self.ext_user_id = value
return self
def with_item_type(self, value: Union[str, ItemTypeEnum]) -> PaymentOrderCreate:
self.item_type = value
return self
def with_language(self, value: str) -> PaymentOrderCreate:
self.language = value
return self
def with_metadata(self, value: Dict[str, str]) -> PaymentOrderCreate:
self.metadata = value
return self
def with_notify_url(self, value: str) -> PaymentOrderCreate:
self.notify_url = value
return self
def with_omit_notification(self, value: bool) -> PaymentOrderCreate:
self.omit_notification = value
return self
def with_platform(self, value: str) -> PaymentOrderCreate:
self.platform = value
return self
def with_recurring_payment_order_no(self, value: str) -> PaymentOrderCreate:
self.recurring_payment_order_no = value
return self
def with_region(self, value: str) -> PaymentOrderCreate:
self.region = value
return self
def with_return_url(self, value: str) -> PaymentOrderCreate:
self.return_url = value
return self
def with_sandbox(self, value: bool) -> PaymentOrderCreate:
self.sandbox = value
return self
def with_sku(self, value: str) -> PaymentOrderCreate:
self.sku = value
return self
def with_subscription_id(self, value: str) -> PaymentOrderCreate:
self.subscription_id = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "description"):
result["description"] = str(self.description)
elif include_empty:
result["description"] = ""
if hasattr(self, "ext_order_no"):
result["extOrderNo"] = str(self.ext_order_no)
elif include_empty:
result["extOrderNo"] = ""
if hasattr(self, "price"):
result["price"] = int(self.price)
elif include_empty:
result["price"] = 0
if hasattr(self, "title"):
result["title"] = str(self.title)
elif include_empty:
result["title"] = ""
if hasattr(self, "currency_code"):
result["currencyCode"] = str(self.currency_code)
elif include_empty:
result["currencyCode"] = ""
if hasattr(self, "currency_namespace"):
result["currencyNamespace"] = str(self.currency_namespace)
elif include_empty:
result["currencyNamespace"] = ""
if hasattr(self, "custom_parameters"):
result["customParameters"] = {str(k0): v0 for k0, v0 in self.custom_parameters.items()}
elif include_empty:
result["customParameters"] = {}
if hasattr(self, "ext_user_id"):
result["extUserId"] = str(self.ext_user_id)
elif include_empty:
result["extUserId"] = ""
if hasattr(self, "item_type"):
result["itemType"] = str(self.item_type)
elif include_empty:
result["itemType"] = Union[str, ItemTypeEnum]()
if hasattr(self, "language"):
result["language"] = str(self.language)
elif include_empty:
result["language"] = ""
if hasattr(self, "metadata"):
result["metadata"] = {str(k0): str(v0) for k0, v0 in self.metadata.items()}
elif include_empty:
result["metadata"] = {}
if hasattr(self, "notify_url"):
result["notifyUrl"] = str(self.notify_url)
elif include_empty:
result["notifyUrl"] = ""
if hasattr(self, "omit_notification"):
result["omitNotification"] = bool(self.omit_notification)
elif include_empty:
result["omitNotification"] = False
if hasattr(self, "platform"):
result["platform"] = str(self.platform)
elif include_empty:
result["platform"] = ""
if hasattr(self, "recurring_payment_order_no"):
result["recurringPaymentOrderNo"] = str(self.recurring_payment_order_no)
elif include_empty:
result["recurringPaymentOrderNo"] = ""
if hasattr(self, "region"):
result["region"] = str(self.region)
elif include_empty:
result["region"] = ""
if hasattr(self, "return_url"):
result["returnUrl"] = str(self.return_url)
elif include_empty:
result["returnUrl"] = ""
if hasattr(self, "sandbox"):
result["sandbox"] = bool(self.sandbox)
elif include_empty:
result["sandbox"] = False
if hasattr(self, "sku"):
result["sku"] = str(self.sku)
elif include_empty:
result["sku"] = ""
if hasattr(self, "subscription_id"):
result["subscriptionId"] = str(self.subscription_id)
elif include_empty:
result["subscriptionId"] = ""
return result
# endregion to methods
# region static methods
@classmethod
def create(
cls,
description: str,
ext_order_no: str,
price: int,
title: str,
currency_code: Optional[str] = None,
currency_namespace: Optional[str] = None,
custom_parameters: Optional[Dict[str, Any]] = None,
ext_user_id: Optional[str] = None,
item_type: Optional[Union[str, ItemTypeEnum]] = None,
language: Optional[str] = None,
metadata: Optional[Dict[str, str]] = None,
notify_url: Optional[str] = None,
omit_notification: Optional[bool] = None,
platform: Optional[str] = None,
recurring_payment_order_no: Optional[str] = None,
region: Optional[str] = None,
return_url: Optional[str] = None,
sandbox: Optional[bool] = None,
sku: Optional[str] = None,
subscription_id: Optional[str] = None,
) -> PaymentOrderCreate:
instance = cls()
instance.description = description
instance.ext_order_no = ext_order_no
instance.price = price
instance.title = title
if currency_code is not None:
instance.currency_code = currency_code
if currency_namespace is not None:
instance.currency_namespace = currency_namespace
if custom_parameters is not None:
instance.custom_parameters = custom_parameters
if ext_user_id is not None:
instance.ext_user_id = ext_user_id
if item_type is not None:
instance.item_type = item_type
if language is not None:
instance.language = language
if metadata is not None:
instance.metadata = metadata
if notify_url is not None:
instance.notify_url = notify_url
if omit_notification is not None:
instance.omit_notification = omit_notification
if platform is not None:
instance.platform = platform
if recurring_payment_order_no is not None:
instance.recurring_payment_order_no = recurring_payment_order_no
if region is not None:
instance.region = region
if return_url is not None:
instance.return_url = return_url
if sandbox is not None:
instance.sandbox = sandbox
if sku is not None:
instance.sku = sku
if subscription_id is not None:
instance.subscription_id = subscription_id
return instance
@classmethod
def create_from_dict(cls, dict_: dict, include_empty: bool = False) -> PaymentOrderCreate:
instance = cls()
if not dict_:
return instance
if "description" in dict_ and dict_["description"] is not None:
instance.description = str(dict_["description"])
elif include_empty:
instance.description = ""
if "extOrderNo" in dict_ and dict_["extOrderNo"] is not None:
instance.ext_order_no = str(dict_["extOrderNo"])
elif include_empty:
instance.ext_order_no = ""
if "price" in dict_ and dict_["price"] is not None:
instance.price = int(dict_["price"])
elif include_empty:
instance.price = 0
if "title" in dict_ and dict_["title"] is not None:
instance.title = str(dict_["title"])
elif include_empty:
instance.title = ""
if "currencyCode" in dict_ and dict_["currencyCode"] is not None:
instance.currency_code = str(dict_["currencyCode"])
elif include_empty:
instance.currency_code = ""
if "currencyNamespace" in dict_ and dict_["currencyNamespace"] is not None:
instance.currency_namespace = str(dict_["currencyNamespace"])
elif include_empty:
instance.currency_namespace = ""
if "customParameters" in dict_ and dict_["customParameters"] is not None:
instance.custom_parameters = {str(k0): v0 for k0, v0 in dict_["customParameters"].items()}
elif include_empty:
instance.custom_parameters = {}
if "extUserId" in dict_ and dict_["extUserId"] is not None:
instance.ext_user_id = str(dict_["extUserId"])
elif include_empty:
instance.ext_user_id = ""
if "itemType" in dict_ and dict_["itemType"] is not None:
instance.item_type = str(dict_["itemType"])
elif include_empty:
instance.item_type = Union[str, ItemTypeEnum]()
if "language" in dict_ and dict_["language"] is not None:
instance.language = str(dict_["language"])
elif include_empty:
instance.language = ""
if "metadata" in dict_ and dict_["metadata"] is not None:
instance.metadata = {str(k0): str(v0) for k0, v0 in dict_["metadata"].items()}
elif include_empty:
instance.metadata = {}
if "notifyUrl" in dict_ and dict_["notifyUrl"] is not None:
instance.notify_url = str(dict_["notifyUrl"])
elif include_empty:
instance.notify_url = ""
if "omitNotification" in dict_ and dict_["omitNotification"] is not None:
instance.omit_notification = bool(dict_["omitNotification"])
elif include_empty:
instance.omit_notification = False
if "platform" in dict_ and dict_["platform"] is not None:
instance.platform = str(dict_["platform"])
elif include_empty:
instance.platform = ""
if "recurringPaymentOrderNo" in dict_ and dict_["recurringPaymentOrderNo"] is not None:
| |
<reponame>zhuokaizhao/artifice
"""Implements artifice's detection scheme from end to end.
"""
import os
from time import time
import itertools
import numpy as np
from stringcase import snakecase
import tensorflow as tf
from tensorflow import keras
from artifice.log import logger
from artifice import dat
from artifice import utils
from artifice import lay
def _get_optimizer(learning_rate):
if tf.executing_eagerly():
return tf.train.AdadeltaOptimizer(learning_rate)
else:
return keras.optimizers.Adadelta(learning_rate)
def _update_hist(a, b):
"""Concat the lists in b onto the lists in a.
If b has elements that a does not, includes them. Behavior is undefined for
elements that are not lists.
:param a:
:param b:
:returns:
:rtype:
"""
c = a.copy()
for k, v in b.items():
if isinstance(v, list) and isinstance(c.get(k), list):
c[k] += v
else:
c[k] = v
return c
def _unbatch_outputs(outputs):
"""Essentially transpose the batch dimension to the outer dimension outputs.
:param outputs: batched outputs of the model, like
[[pose 0, pose 1, ....],
[output_0 0, output_0 1, ...],
[output_1 0, output_1 1, ...]]
:returns: result after unbatching, like
[[pose 0, output_0 0, output_1 0, ...],
[pose 1, output_0 1, output_1 1, ...],
...]
"""
unbatched_outputs = []
for i in range(outputs[0].shape[0]):
unbatched_outputs.append([output[i] for output in outputs])
return unbatched_outputs
def crop(inputs, shape=None, size=None):
if size is None:
assert shape is not None, 'one of `size` or `shape` must be provided'
size = shape[1:3]
top_crop = int(np.floor(int(inputs.shape[1] - size[0]) / 2))
bottom_crop = int(np.ceil(int(inputs.shape[1] - size[0]) / 2))
left_crop = int(np.floor(int(inputs.shape[2] - size[1]) / 2))
right_crop = int(np.ceil(int(inputs.shape[2] - size[1]) / 2))
outputs = keras.layers.Cropping2D(cropping=((top_crop, bottom_crop),
(left_crop, right_crop)),
input_shape=inputs.shape)(inputs)
return outputs
def _crop_like_conv(inputs,
kernel_size=[3, 3],
padding='valid'):
"""Crop the height, width dims of inputs as if convolved with a stride of 1.
:param inputs:
:param kernel_size:
:param padding:
:returns:
:rtype:
"""
assert padding in {'same', 'valid'}
if padding == 'same':
return inputs
top_crop = kernel_size[0] // 2
bottom_crop = (kernel_size[0] - 1) // 2
left_crop = kernel_size[1] // 2
right_crop = (kernel_size[1] - 1) // 2
outputs = keras.layers.Cropping2D(cropping=((top_crop, bottom_crop),
(left_crop, right_crop)),
input_shape=inputs.shape)(inputs)
return outputs
def conv(inputs,
filters,
kernel_shape=[3, 3],
activation='relu',
padding='valid',
kernel_initializer='glorot_normal',
norm=True,
mask=None,
batch_size=None,
activation_name=None,
norm_name=None,
**kwargs):
"""Perform 3x3 convolution on the layer.
:param inputs: input tensor
:param filters: number of filters or kernels
:param kernel_shape:
:param activation: keras activation to use. Default is 'relu'
:param padding: 'valid' or 'same'
:param norm: whether or not to perform batch normalization on the output
:param mask: if not None, performs a sparse convolution with mask.
:param batch_size: needed for sparse layers. Required if mask is not None
Other kwargs passed to the convolutional layer.
:returns:
:rtype:
"""
if mask is None:
inputs = keras.layers.Conv2D(
filters,
kernel_shape,
activation=None,
padding=padding,
use_bias=False,
kernel_initializer=kernel_initializer,
**kwargs)(inputs)
else:
inputs = lay.SparseConv2D(
filters,
kernel_shape,
batch_size=batch_size,
activation=None,
padding=padding,
use_bias=False,
kernel_initializer=kernel_initializer,
**kwargs)([inputs, mask])
if norm:
inputs = keras.layers.BatchNormalization(name=norm_name)(inputs)
if activation is not None:
inputs = keras.layers.Activation(activation, name=activation_name)(inputs)
return inputs
def conv_upsample(inputs,
filters,
size=2,
activation='relu',
mask=None,
batch_size=None,
**kwargs):
"""Upsample the inputs in dimensions 1,2 with a transpose convolution.
:param inputs:
:param filters:
:param scale: scale by which to upsample. Can be an int or a list of 2 ints,
specifying scale in each direction.
:param activation: relu by default
:param mask: if not None, use a SparseConv2DTranspose layer.
:param batch_size:
Additional kwargs passed to the conv transpose layer.
:returns:
:rtype:
"""
size = utils.listify(size, 2)
if mask is None:
inputs = keras.layers.Conv2DTranspose(
filters, size,
strides=size,
padding='same',
activation=activation,
use_bias=False,
**kwargs)(inputs)
else:
inputs = lay.SparseConv2DTranspose(
filters,
size,
batch_size=batch_size,
strides=size,
padding='same',
activation=activation,
use_bias=False,
**kwargs)([inputs, mask])
return inputs
def upsample(inputs, size=2, interpolation='nearest'):
"""Upsamples the inputs by `size`, using interpolation.
:param inputs:
:param scale: int or 2-list of ints to scale the inputs by.
:returns:
:rtype:
"""
return keras.layers.UpSampling2D(size, interpolation=interpolation)(inputs)
class Builder(type):
"""Metaclass that calls build *after* init but before finishing
instantiation."""
def __call__(cls, *args, **kwargs):
obj = type.__call__(cls, *args, **kwargs)
obj.build()
return obj
class ArtificeModel(metaclass=Builder):
"""A wrapper around keras models.
If loading an existing model, this class is sufficient, since the save file
will have the model topology and optimizer. Otherwise, a subclass should
implement the `forward()` and `compile()` methods, which are called during
__init__. In this case, super().__init__() should be called last in the
subclass __init__() method.
"""
def __init__(self, input_shape, model_dir='.', learning_rate=0.1,
overwrite=False):
"""Describe a model using keras' functional API.
Compiles model here, so all other instantiation should be finished.
:param inputs: tensor or list of tensors to input into the model (such as
layers.Input)
:param model_dir: directory to save the model. Default is cwd.
:param learning_rate:
:param overwrite: prefer to create a new model rather than load an existing
one in `model_dir`. Note that if a subclass uses overwrite=False, then the
loaded architecture may differ from the stated architecture in the
subclass, although the structure of the saved model names should prevent
this.
"""
self.input_shape = input_shape
self.overwrite = overwrite
self.model_dir = model_dir
self.learning_rate = learning_rate
self.name = snakecase(type(self).__name__).lower()
self.model_path = os.path.join(self.model_dir, f"{self.name}.hdf5")
self.checkpoint_path = os.path.join(
self.model_dir, f"{self.name}_ckpt.hdf5")
self.history_path = os.path.join(
self.model_dir, f"{self.name}_history.json")
def build(self):
"""Called after all subclasses have finished __init__()"""
inputs = keras.layers.Input(self.input_shape)
outputs = self.forward(inputs)
self.model = keras.Model(inputs, outputs)
self.compile()
if not self.overwrite:
self.load_weights()
def __str__(self):
output = f"{self.name}:\n"
for layer in self.model.layers:
output += "layer:{} -> {}:{}\n".format(
layer.input_shape, layer.output_shape, layer.name)
return output
@property
def layers(self):
return self.model.layers
def forward(self, inputs):
raise NotImplementedError("subclasses should implement")
def compile(self):
raise NotImplementedError("subclasses should implement")
@property
def callbacks(self):
return [keras.callbacks.ModelCheckpoint(
self.checkpoint_path, verbose=1, save_weights_only=True)]
def load_weights(self, checkpoint_path=None):
"""Update the model weights from the chekpoint file.
:param checkpoint_path: checkpoint path to use. If not provided, uses the
class name to construct a checkpoint path.
"""
if checkpoint_path is None:
checkpoint_path = self.checkpoint_path
if os.path.exists(checkpoint_path):
self.model.load_weights(checkpoint_path, by_name=True) # todo: by_name?
logger.info(f"loaded model weights from {checkpoint_path}")
else:
logger.info(f"no checkpoint at {checkpoint_path}")
def save(self, filename=None, overwrite=True):
if filename is None:
filename = self.model_path
return keras.models.save_model(self.model, filename, overwrite=overwrite,
include_optimizer=False)
# todo: would like to have this be True, but custom loss function can't be
# found in keras library. Look into it during training. For now, we're fine
# with just weights in the checkpoint file.
def fit(self, art_data, hist=None, cache=False, **kwargs):
"""Thin wrapper around model.fit(). Preferred method is `train()`.
:param art_data:
:param hist: existing hist. If None, starts from scratch. Use train for
loading from existing hist.
:param cache: cache the dataset.
:returns:
:rtype:
"""
kwargs['callbacks'] = kwargs.get('callbacks', []) + self.callbacks
new_hist = self.model.fit(art_data.training_input(cache=cache),
steps_per_epoch=art_data.steps_per_epoch,
**kwargs).history
new_hist = utils.jsonable(new_hist)
if hist is not None:
new_hist = _update_hist(hist, new_hist)
utils.json_save(self.history_path, new_hist)
return new_hist
def train(self, art_data, initial_epoch=0, epochs=1, seconds=0,
**kwargs):
"""Fits the model, saving it along the way, and reloads every epoch.
:param art_data: ArtificeData set
:param initial_epoch: epoch that training is starting from
:param epochs: epoch number to stop at. If -1, training continues forever.
:param seconds: seconds after which to stop reloading every epoch. If -1,
reload is never stopped. If 0, dataset is loaded only once, at beginning.
:returns: history dictionary
"""
if (initial_epoch > 0
and os.path.exists(self.history_path)
and not self.overwrite):
hist = utils.json_load(self.history_path)
else:
hist = {}
start_time = time()
epoch = initial_epoch
while epoch != epochs and time() - start_time > seconds > 0:
logger.info("reloading dataset (not cached)...")
hist = self.fit(art_data, hist=hist, initial_epoch=epoch,
epochs=(epoch + 1), **kwargs)
epoch += 1
if epoch != epochs:
hist = self.fit(art_data, hist=hist, initial_epoch=epoch,
epochs=epochs, **kwargs)
self.save()
return hist
def predict(self, art_data, multiscale=False):
"""Run prediction, reassembling tiles, with the Artifice data.
:param art_data: ArtificeData object
:returns: iterator over predictions
"""
raise NotImplementedError("subclasses should implement.")
def predict_visualization(self, art_data):
"""Run prediction, reassembling tiles, with the ArtificeData.
Intended for visualization. Implementation will depend on the model.
:param art_data: ArtificeData object
:returns: iterator over (image, field, prediction)
"""
raise NotImplementedError()
def predict_outputs(self, art_data):
"""Run prediction for single tiles images with the Artifice data.
Returns the raw outputs, with no prediction. Depends on subclass
implementation.
:param art_data: ArtificeData object
:returns: iterator over (tile, prediction, model_outputs)
"""
raise NotImplementedError("subclasses should implement")
def evaluate(self, art_data):
"""Run evaluation for object detection with the ArtificeData object.
Depends on the structure of the model.
:param art_data: ArtificeData object
:returns: `errors, total_num_failed` | |
"""Manager utility implementations of authorization managers."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from ..osid import managers as osid_managers
from ..osid.osid_errors import NullArgument
from ..osid.osid_errors import Unimplemented
from ..type.objects import TypeList
from dlkit.abstract_osid.authorization import managers as abc_authorization_managers
class AuthorizationProfile(abc_authorization_managers.AuthorizationProfile, osid_managers.OsidProfile):
"""The ``AuthorizationProfile`` describes the interoperability among authorization services."""
def supports_visible_federation(self):
"""Tests if federation is visible.
return: (boolean) - ``true`` if visible federation is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_authorization(self):
"""Tests for the availability of an authorization service which is the basic service for checking authorizations.
return: (boolean) - ``true`` if authorization is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_authorization_lookup(self):
"""Tests if an authorization lookup service is supported.
An authorization lookup service defines methods to access
authorizations.
return: (boolean) - true if authorization lookup is supported,
false otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_authorization_query(self):
"""Tests if an authorization query service is supported.
return: (boolean) - ``true`` if authorization query is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_authorization_search(self):
"""Tests if an authorization search service is supported.
return: (boolean) - ``true`` if authorization search is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_authorization_admin(self):
"""Tests if an authorization administrative service is supported.
return: (boolean) - ``true`` if authorization admin is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_authorization_notification(self):
"""Tests if authorization notification is supported.
Messages may be sent when authorizations are created, modified,
or deleted.
return: (boolean) - ``true`` if authorization notification is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_authorization_vault(self):
"""Tests if an authorization to vault lookup session is available.
return: (boolean) - ``true`` if authorization vault lookup
session is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_authorization_vault_assignment(self):
"""Tests if an authorization to vault assignment session is available.
return: (boolean) - ``true`` if authorization vault assignment
is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_authorization_smart_vault(self):
"""Tests if an authorization smart vaulting session is available.
return: (boolean) - ``true`` if authorization smart vaulting is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_function_lookup(self):
"""Tests if a function lookup service is supported.
A function lookup service defines methods to access
authorization functions.
return: (boolean) - ``true`` if function lookup is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_function_query(self):
"""Tests if a function query service is supported.
return: (boolean) - ``true`` if function query is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_function_search(self):
"""Tests if a function search service is supported.
return: (boolean) - ``true`` if function search is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_function_admin(self):
"""Tests if a function administrative service is supported.
return: (boolean) - ``true`` if function admin is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_function_notification(self):
"""Tests if function notification is supported.
Messages may be sent when functions are created, modified, or
deleted.
return: (boolean) - ``true`` if function notification is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_function_vault(self):
"""Tests if a function to vault lookup session is available.
return: (boolean) - ``true`` if function vault lookup session is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_function_vault_assignment(self):
"""Tests if a function to vault assignment session is available.
return: (boolean) - ``true`` if function vault assignment is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_function_smart_vault(self):
"""Tests if a function smart vaulting session is available.
return: (boolean) - ``true`` if function smart vaulting is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_qualifier_lookup(self):
"""Tests if a qualifier lookup service is supported.
A function lookup service defines methods to access
authorization qualifiers.
return: (boolean) - ``true`` if qualifier lookup is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_qualifier_query(self):
"""Tests if a qualifier query service is supported.
return: (boolean) - ``true`` if qualifier query is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_qualifier_search(self):
"""Tests if a qualifier search service is supported.
return: (boolean) - ``true`` if qualifier search is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_qualifier_admin(self):
"""Tests if a qualifier administrative service is supported.
return: (boolean) - ``true`` if qualifier admin is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_qualifier_notification(self):
"""Tests if qualifier notification is supported.
Messages may be sent when qualifiers are created, modified, or
deleted.
return: (boolean) - ``true`` if qualifier notification is
supported ``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_qualifier_hierarchy(self):
"""Tests if a qualifier hierarchy traversal is supported.
return: (boolean) - ``true`` if a qualifier hierarchy traversal
is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_qualifier_hierarchy_design(self):
"""Tests if qualifier hierarchy design is supported.
return: (boolean) - ``true`` if a qualifier hierarchy design is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_qualifier_vault(self):
"""Tests if a qualifier to vault lookup session is available.
return: (boolean) - ``true`` if qualifier vault lookup session
is supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_qualifier_vault_assignment(self):
"""Tests if a qualifier to vault assignment session is available.
return: (boolean) - ``true`` if qualifier vault assignment is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_qualifier_smart_vault(self):
"""Tests if a qualifier smart vaulting session is available.
return: (boolean) - ``true`` if qualifier smart vault session is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_vault_lookup(self):
"""Tests if a vault lookup service is supported.
A vault lookup service defines methods to access authorization
vaults.
return: (boolean) - ``true`` if function lookup is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_vault_query(self):
"""Tests if a vault query service is supported.
return: (boolean) - ``true`` if vault query is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_vault_search(self):
"""Tests if a vault search service is supported.
return: (boolean) - ``true`` if vault search is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_vault_admin(self):
"""Tests if a vault administrative service is supported.
return: (boolean) - ``true`` if vault admin is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_vault_notification(self):
"""Tests if vault notification is supported.
Messages may be sent when vaults are created, modified, or
deleted.
return: (boolean) - ``true`` if vault notification is supported
``,`` ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_vault_hierarchy(self):
"""Tests if a vault hierarchy traversal is supported.
return: (boolean) - ``true`` if a vault hierarchy traversal is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_vault_hierarchy_design(self):
"""Tests if vault hierarchy design is supported.
return: (boolean) - ``true`` if a function hierarchy design is
| |
self.assertEqual(cert_pattern, f"software:x_misp_certificate = '{certificate}'")
self.assertEqual(domain_pattern, f"software:x_misp_domain = '{domain}'")
self._populate_documentation(
misp_object = event['Event']['Object'][0],
indicator = self.parser.stix_objects[-1]
)
def test_event_with_android_app_observable_object(self):
event = get_event_with_android_app_object()
misp_object = deepcopy(event['Event']['Object'][0])
attributes, grouping_refs, object_refs, observable = self._run_observable_from_object_tests(event)
software = observable[0]
object_ref = object_refs[0]
name, certificate, domain = (attribute['value'] for attribute in attributes)
self.assertEqual(object_ref, grouping_refs[0])
self.assertEqual(software.type, 'software')
self._assert_multiple_equal(
software.id,
object_ref,
f"software--{misp_object['uuid']}"
)
self.assertEqual(software.name, name)
self.assertEqual(software.x_misp_certificate, certificate)
self.assertEqual(software.x_misp_domain, domain)
self._populate_documentation(
misp_object = misp_object,
observed_data = self.parser.stix_objects[-2:]
)
def test_event_with_asn_indicator_object(self):
event = get_event_with_asn_object()
attributes, pattern = self._run_indicator_from_object_tests(event)
asn, description, subnet1, subnet2 = (attribute['value'] for attribute in attributes)
asn_pattern, description_pattern, subnet1_pattern, subnet2_pattern = pattern[1:-1].split(' AND ')
self.assertEqual(asn_pattern, f"autonomous-system:number = '{int(asn[2:])}'")
self.assertEqual(description_pattern, f"autonomous-system:name = '{description}'")
self.assertEqual(
subnet1_pattern,
f"autonomous-system:x_misp_subnet_announced = '{subnet1}'"
)
self.assertEqual(
subnet2_pattern,
f"autonomous-system:x_misp_subnet_announced = '{subnet2}'"
)
self._populate_documentation(
misp_object = event['Event']['Object'][0],
indicator = self.parser.stix_objects[-1]
)
def test_event_with_asn_observable_object(self):
event = get_event_with_asn_object()
misp_object = deepcopy(event['Event']['Object'][0])
attributes, grouping_refs, object_refs, observable = self._run_observable_from_object_tests(event)
asn, description, subnet1, subnet2 = (attribute['value'] for attribute in attributes)
autonomous_system = observable[0]
self._assert_multiple_equal(
autonomous_system.id,
grouping_refs[0],
object_refs[0],
f"autonomous-system--{misp_object['uuid']}"
)
self.assertEqual(autonomous_system.type, 'autonomous-system')
self.assertEqual(autonomous_system.number, int(asn[2:]))
self.assertEqual(autonomous_system.name, description)
self.assertEqual(
autonomous_system.x_misp_subnet_announced,
[subnet1, subnet2]
)
self._populate_documentation(
misp_object = misp_object,
observed_data = self.parser.stix_objects[-2:]
)
def test_event_with_attack_pattern_object(self):
event = get_event_with_attack_pattern_object()
orgc = event['Event']['Orgc']
misp_object = deepcopy(event['Event']['Object'][0])
self.parser.parse_misp_event(event)
stix_objects = self.parser.stix_objects
self._check_spec_versions(stix_objects)
identity, grouping, attack_pattern = stix_objects
identity_id = self._check_identity_features(
identity,
orgc,
self._datetime_from_timestamp(event['Event']['timestamp'])
)
args = (grouping, event['Event'], identity_id)
object_ref = self._check_grouping_features(*args)[0]
self._assert_multiple_equal(
attack_pattern.id,
grouping['object_refs'][0],
object_ref,
f"attack-pattern--{misp_object['uuid']}"
)
self._check_attack_pattern_object(attack_pattern, misp_object, identity_id)
self._populate_documentation(misp_object=misp_object, attack_pattern=attack_pattern)
def test_event_with_course_of_action_object(self):
event = get_event_with_course_of_action_object()
orgc = event['Event']['Orgc']
misp_object = deepcopy(event['Event']['Object'][0])
self.parser.parse_misp_event(event)
stix_objects = self.parser.stix_objects
self._check_spec_versions(stix_objects)
identity, grouping, course_of_action = stix_objects
identity_id = self._check_identity_features(
identity,
orgc,
self._datetime_from_timestamp(event['Event']['timestamp'])
)
args = (grouping, event['Event'], identity_id)
object_ref = self._check_grouping_features(*args)[0]
self._assert_multiple_equal(
course_of_action.id,
grouping['object_refs'][0],
object_ref,
f"course-of-action--{misp_object['uuid']}"
)
self._check_course_of_action_object(course_of_action, misp_object, identity_id)
self._populate_documentation(misp_object=misp_object, course_of_action=course_of_action)
def test_event_with_cpe_asset_indicator_object(self):
event = get_event_with_cpe_asset_object()
attributes, pattern = self._run_indicator_from_object_tests(event)
cpe, language, product, vendor, version, description = (attribute['value'] for attribute in attributes)
cpe_pattern, language_pattern, name, vendor_pattern, version_pattern, description_pattern = pattern[1:-1].split(' AND ')
self.assertEqual(cpe_pattern, f"software:cpe = '{cpe}'")
self.assertEqual(language_pattern, f"software:languages = '{language}'")
self.assertEqual(name, f"software:name = '{product}'")
self.assertEqual(vendor_pattern, f"software:vendor = '{vendor}'")
self.assertEqual(version_pattern, f"software:version = '{version}'")
self.assertEqual(description_pattern, f"software:x_misp_description = '{description}'")
self._populate_documentation(
misp_object = event['Event']['Object'][0],
indicator = self.parser.stix_objects[-1]
)
def test_event_with_cpe_asset_observable_object(self):
event = get_event_with_cpe_asset_object()
misp_object = deepcopy(event['Event']['Object'][0])
attributes, grouping_refs, object_refs, observable = self._run_observable_from_object_tests(event)
cpe, language, product, vendor, version, description = (attribute['value'] for attribute in attributes)
software = observable[0]
self._assert_multiple_equal(
software.id,
grouping_refs[0],
object_refs[0],
f"software--{misp_object['uuid']}"
)
self.assertEqual(software.type, 'software')
self.assertEqual(software.cpe, cpe)
self.assertEqual(software.name, product)
self.assertEqual(software.languages, [language])
self.assertEqual(software.vendor, vendor)
self.assertEqual(software.version, version)
self.assertEqual(software.x_misp_description, description)
self._populate_documentation(
misp_object = misp_object,
observed_data = self.parser.stix_objects[-2:]
)
def test_event_with_credential_indicator_object(self):
event = get_event_with_credential_object()
attributes, pattern = self._run_indicator_from_object_tests(event)
text, username, password, *attributes = ((attribute['object_relation'], attribute['value']) for attribute in attributes)
attributes.insert(0, text)
username_pattern, password_pattern, *pattern = pattern[1:-1].split(' AND ')
self.assertEqual(username_pattern, f"user-account:user_id = '{username[1]}'")
self.assertEqual(password_pattern, f"user-account:credential = '{password[1]}'")
for pattern_part, attribute in zip(pattern, attributes):
feature, value = attribute
self.assertEqual(pattern_part, f"user-account:x_misp_{feature} = '{value}'")
self._populate_documentation(
misp_object = event['Event']['Object'][0],
indicator = self.parser.stix_objects[-1]
)
def test_event_with_credential_observable_object(self):
event = get_event_with_credential_object()
misp_object = deepcopy(event['Event']['Object'][0])
attributes, grouping_refs, object_refs, observable = self._run_observable_from_object_tests(event)
text, username, password, *attributes = ((attribute['object_relation'], attribute['value']) for attribute in attributes)
attributes.insert(0, text)
user_account = observable[0]
self._assert_multiple_equal(
user_account.id,
grouping_refs[0],
object_refs[0],
f"user-account--{misp_object['uuid']}"
)
self.assertEqual(user_account.type, 'user-account')
self.assertEqual(user_account.user_id, username[1])
self.assertEqual(user_account.credential, password[1])
for feature, value in attributes:
self.assertEqual(getattr(user_account, f'x_misp_{feature}'), value)
self._populate_documentation(
misp_object = misp_object,
observed_data = self.parser.stix_objects[-2:]
)
def test_event_with_custom_objects(self):
event = get_event_with_custom_objects()
orgc = event['Event']['Orgc']
misp_objects = deepcopy(event['Event']['Object'])
self.parser.parse_misp_event(event)
stix_objects = self.parser.stix_objects
self._check_spec_versions(stix_objects)
identity, grouping, *custom_objects = stix_objects
identity_id = self._check_identity_features(
identity,
orgc,
self._datetime_from_timestamp(event['Event']['timestamp'])
)
args = (
grouping,
event['Event'],
identity_id
)
object_refs = self._check_grouping_features(*args)
for misp_object, custom_object, object_ref in zip(misp_objects, custom_objects, object_refs):
self._run_custom_object_tests(misp_object, custom_object, object_ref, identity_id)
def test_event_with_domain_ip_indicator_object(self):
event = get_event_with_domain_ip_object_custom()
attributes, pattern = self._run_indicator_from_object_tests(event)
_domain, _hostname, _ip, _port = (attribute['value'] for attribute in attributes)
domain_, hostname_, ip_, port_ = pattern[1:-1].split(' AND ')
self.assertEqual(domain_, f"domain-name:value = '{_domain}'")
self.assertEqual(hostname_, f"domain-name:x_misp_hostname = '{_hostname}'")
self.assertEqual(ip_, f"domain-name:resolves_to_refs[*].value = '{_ip}'")
self.assertEqual(port_, f"domain-name:x_misp_port = '{_port}'")
self._populate_documentation(
misp_object = event['Event']['Object'][0],
indicator = self.parser.stix_objects[-1]
)
def test_event_with_domain_ip_observable_object_custom(self):
event = get_event_with_domain_ip_object_custom()
misp_object = deepcopy(event['Event']['Object'][0])
attributes, grouping_refs, object_refs, observable = self._run_observable_from_object_tests(event)
_domain, hostname, _ip, port = (attribute for attribute in attributes)
domain_id, ip_id = grouping_refs
domain_ref, ip_ref = object_refs
domain_, address = observable
self._assert_multiple_equal(
domain_.id,
domain_id,
domain_ref,
f"domain-name--{misp_object['uuid']}"
)
self.assertEqual(domain_.type, 'domain-name')
self.assertEqual(domain_.value, _domain['value'])
self.assertEqual(domain_.x_misp_hostname, hostname['value'])
self.assertEqual(domain_.x_misp_port, port['value'])
self._assert_multiple_equal(
address.id,
domain_.resolves_to_refs[0],
ip_id,
ip_ref,
f"ipv4-addr--{_ip['uuid']}"
)
self.assertEqual(address.type, 'ipv4-addr')
self.assertEqual(address.value, _ip['value'])
self._populate_documentation(
misp_object = misp_object,
observed_data = self.parser.stix_objects[-3:]
)
def test_event_with_domain_ip_observable_object_standard(self):
event = get_event_with_domain_ip_object_standard()
attributes, grouping_refs, object_refs, observable = self._run_observable_from_object_tests(event)
_domain1, _domain2, _ip1, _ip2 = ((attribute['value'], attribute['uuid']) for attribute in attributes)
for grouping_ref, object_ref in zip(grouping_refs, object_refs):
self.assertEqual(grouping_ref, object_ref)
ip1_ref, ip2_ref, domain1_ref, domain2_ref = object_refs
ip1_, ip2_, domain1_, domain2_ = observable
self._check_SCO(domain1_, _domain1, domain1_ref, 'domain-name')
self.assertEqual(domain1_.resolves_to_refs, [ip1_ref, ip2_ref])
self._check_SCO(domain2_, _domain2, domain2_ref, 'domain-name')
self.assertEqual(domain2_.resolves_to_refs, [ip1_ref, ip2_ref])
self._check_SCO(ip1_, _ip1, ip1_ref, 'ipv4-addr')
self._check_SCO(ip2_, _ip2, ip2_ref, 'ipv4-addr')
self._populate_documentation(
misp_object = event['Event']['Object'][0],
observed_data = self.parser.stix_objects[-5:],
name = 'domain-ip with the perfect domain & ip matching',
summary = 'A tuple of IPv4/IPv6 Address & Network Objects for each associated domain & ip'
)
def test_event_with_email_indicator_object(self):
event = get_event_with_email_object()
attributes, pattern = self._run_indicator_from_object_tests(event)
_from, _from_dn, _to, _to_dn, _cc1, _cc1_dn, _cc2, _cc2_dn, _bcc, _bcc_dn, _reply_to, _subject, _attachment1, _attachment2, _x_mailer, _user_agent, _boundary, _message_id = (attribute['value'] for attribute in attributes)
to_, to_dn, cc1_, cc1_dn, cc2_, cc2_dn, bcc_, bcc_dn, from_, from_dn, message_id_, reply_to_, subject_, x_mailer_, attachment1_, content1, attachment2_, content2, user_agent_, boundary_ = pattern[1:-1].split(' AND ')
self.assertEqual(from_, f"email-message:from_ref.value = '{_from}'")
self.assertEqual(from_dn, f"email-message:from_ref.display_name = '{_from_dn}'")
self.assertEqual(to_, f"email-message:to_refs[0].value = '{_to}'")
self.assertEqual(to_dn, f"email-message:to_refs[0].display_name = '{_to_dn}'")
self.assertEqual(cc1_, f"email-message:cc_refs[0].value = '{_cc1}'")
self.assertEqual(cc1_dn, f"email-message:cc_refs[0].display_name = '{_cc1_dn}'")
self.assertEqual(cc2_, f"email-message:cc_refs[1].value = '{_cc2}'")
self.assertEqual(cc2_dn, f"email-message:cc_refs[1].display_name = '{_cc2_dn}'")
self.assertEqual(bcc_, f"email-message:bcc_refs[0].value = '{_bcc}'")
self.assertEqual(bcc_dn, f"email-message:bcc_refs[0].display_name = '{_bcc_dn}'")
self.assertEqual(message_id_, f"email-message:message_id = '{_message_id}'")
self.assertEqual(
reply_to_,
f"email-message:additional_header_fields.reply_to = '{_reply_to}'"
)
self.assertEqual(subject_, f"email-message:subject = '{_subject}'")
self.assertEqual(
attachment1_,
f"email-message:body_multipart[0].body_raw_ref.name = '{_attachment1}'"
)
self.assertEqual(
content1,
f"email-message:body_multipart[0].content_disposition = 'attachment'"
)
self.assertEqual(
attachment2_,
f"email-message:body_multipart[1].body_raw_ref.name = '{_attachment2}'"
)
self.assertEqual(
content2,
f"email-message:body_multipart[1].content_disposition = 'attachment'"
)
self.assertEqual(
x_mailer_,
f"email-message:additional_header_fields.x_mailer = '{_x_mailer}'"
)
self.assertEqual(user_agent_, f"email-message:x_misp_user_agent = '{_user_agent}'")
self.assertEqual(boundary_, f"email-message:x_misp_mime_boundary = '{_boundary}'")
self._populate_documentation(
misp_object = event['Event']['Object'][0],
indicator = self.parser.stix_objects[-1]
)
def test_event_with_email_observable_object(self):
event = get_event_with_email_object()
misp_object = deepcopy(event['Event']['Object'][0])
attributes, grouping_refs, object_refs, observables = self._run_observable_from_object_tests(event)
_from, _from_dn, _to, _to_dn, _cc1, _cc1_dn, _cc2, _cc2_dn, _bcc, _bcc_dn, _reply_to, _subject, _attachment1, _attachment2, _x_mailer, _user_agent, _boundary, _message_id = attributes
message, address1, address2, address3, address4, address5, file1, file2 = observables
message_id, address1_id, address2_id, address3_id, address4_id, address5_id, file1_id, file2_id = grouping_refs
message_ref, address1_ref, address2_ref, address3_ref, address4_ref, address5_ref, file1_ref, file2_ref = object_refs
self._assert_multiple_equal(
message.id,
message_id,
message_ref,
f"email-message--{misp_object['uuid']}"
)
self.assertEqual(message.type, 'email-message')
self.assertEqual(message.is_multipart, True)
self.assertEqual(message.subject, _subject['value'])
self.assertEqual(message.message_id, _message_id['value'])
additional_header = message.additional_header_fields
self.assertEqual(additional_header['Reply-To'], _reply_to['value'])
self.assertEqual(additional_header['X-Mailer'], _x_mailer['value'])
self.assertEqual(message.x_misp_mime_boundary, _boundary['value'])
self.assertEqual(message.x_misp_user_agent, _user_agent['value'])
self.assertEqual(message.from_ref, address1_ref)
self.assertEqual(message.to_refs, [address2_ref])
self.assertEqual(message.cc_refs, [address3_ref, address4_ref])
self.assertEqual(message.bcc_refs, [address5_ref])
self._assert_multiple_equal(
message.from_ref,
address1.id,
address1_id,
address1_ref,
f"email-addr--{_from['uuid']}"
)
self._check_email_address(address1, _from['value'], display_name=_from_dn['value'])
self._assert_multiple_equal(
message.to_refs[0],
address2.id,
address2_id,
address2_ref,
f"email-addr--{_to['uuid']}"
)
self._check_email_address(address2, _to['value'], display_name=_to_dn['value'])
self._assert_multiple_equal(
message.cc_refs[0],
address3.id,
address3_id,
address3_ref,
f"email-addr--{_cc1['uuid']}"
)
self._check_email_address(address3, _cc1['value'], display_name=_cc1_dn['value'])
self._assert_multiple_equal(
message.cc_refs[1],
address4.id,
address4_id,
address4_ref,
f"email-addr--{_cc2['uuid']}"
)
self._check_email_address(address4, _cc2['value'], display_name=_cc2_dn['value'])
self._assert_multiple_equal(
message.bcc_refs[0],
address5.id,
address5_id,
address5_ref,
f"email-addr--{_bcc['uuid']}"
)
self._check_email_address(address5, _bcc['value'], display_name=_bcc_dn['value'])
body1, body2 = message.body_multipart
self.assertEqual(
body1['content_disposition'],
f"attachment; filename='{_attachment1['value']}'"
)
self.assertEqual(
body2['content_disposition'],
f"attachment; filename='{_attachment2['value']}'"
)
self._assert_multiple_equal(
body1['body_raw_ref'],
file1.id,
file1_id,
file1_ref,
f"file--{_attachment1['uuid']}"
)
self.assertEqual(file1.type, 'file')
self.assertEqual(file1.name, _attachment1['value'])
self._assert_multiple_equal(
body2['body_raw_ref'],
file2.id,
file2_id,
file2_ref,
f"file--{_attachment2['uuid']}"
)
self.assertEqual(file2.type, 'file')
self.assertEqual(file2.name, _attachment2['value'])
self._populate_documentation(
misp_object = misp_object,
observed_data = self.parser.stix_objects[-9:]
)
def test_event_with_email_indicator_object_with_display_names(self):
event = get_event_with_email_object_with_display_names()
attributes, pattern = self._run_indicator_from_object_tests(event)
_from, _from_name, _to, _to_name, _cc1, _cc2_name, _bcc, _bcc_name = (attribute['value'] for attribute in attributes)
to_, to_name_, cc1_, cc2_name_, bcc_, bcc_name_, from_, from_name_ = pattern[1:-1].split(' AND ')
self.assertEqual(to_, f"email-message:to_refs[0].value = '{_to}'")
self.assertEqual(to_name_, f"email-message:to_refs[0].display_name = '{_to_name}'")
self.assertEqual(cc1_, f"email-message:cc_refs[0].value = '{_cc1}'")
self.assertEqual(cc2_name_, f"email-message:cc_refs[1].display_name = '{_cc2_name}'")
self.assertEqual(bcc_, f"email-message:bcc_refs[0].value = '{_bcc}'")
self.assertEqual(bcc_name_, f"email-message:bcc_refs[0].display_name = '{_bcc_name}'")
self.assertEqual(from_, f"email-message:from_ref.value = '{_from}'")
self.assertEqual(from_name_, f"email-message:from_ref.display_name = '{_from_name}'")
self._populate_documentation(
misp_object = event['Event']['Object'][0],
indicator = self.parser.stix_objects[-1],
name = 'email with display names'
)
def test_event_with_email_observable_object_with_display_names(self):
event = get_event_with_email_object_with_display_names()
misp_object = deepcopy(event['Event']['Object'][0])
attributes, grouping_refs, object_refs, observables = self._run_observable_from_object_tests(event)
_from, _from_name, _to, _to_name, _cc1, _cc2_name, _bcc, _bcc_name = attributes
message, from_, to_, cc_, bcc_ = observables
message_id, from_id, to_id, cc_id, bcc_id = grouping_refs
message_ref, from_ref, to_ref, cc_ref, bcc_ref = object_refs
self._assert_multiple_equal(
message.id,
message_id,
message_ref,
f"email-message--{misp_object['uuid']}"
)
self.assertEqual(message.type, 'email-message')
self.assertEqual(message.is_multipart, False)
self._assert_multiple_equal(
message.from_ref,
from_.id,
from_id,
from_ref,
f"email-addr--{_from['uuid']}"
)
self._check_email_address(from_, _from['value'], display_name=_from_name['value'])
self._assert_multiple_equal(
message.to_refs[0],
to_.id,
to_id,
to_ref,
f"email-addr--{_to['uuid']}"
)
self._check_email_address(to_, _to['value'], display_name=_to_name['value'])
self._assert_multiple_equal(
| |
""" Admin objects declaration for Nine CMS """
__author__ = '<NAME>'
__copyright__ = 'Copyright 2015, <NAME>'
__licence__ = 'BSD-3'
__email__ = '<EMAIL>'
from django.contrib import admin, messages
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.conf import settings
from django.conf.urls import url
from django.core.exceptions import PermissionDenied
from django.utils.translation import ugettext as _
from mptt.admin import MPTTModelAdmin
# noinspection PyPackageRequirements
from guardian.shortcuts import get_objects_for_user
from ninecms import models, forms, views
# noinspection PyMethodMayBeStatic
@admin.register(models.PageType)
class PageTypeAdmin(admin.ModelAdmin):
""" Get a list of Page Types """
list_display = ('name', 'description', 'url_pattern', 'elements', 'operations')
list_editable = ('description', 'url_pattern')
search_fields = ['name']
form = forms.PageTypeForm
save_as = True
def elements(self, obj):
""" Return a custom column with blocks in the page type
:param obj: a page type object
:return: column output
"""
return obj.pagelayoutelement_set.count()
elements.short_description = "Blocks"
def operations(self, obj):
""" Return a custom column with operations edit, perms
:param obj: a node object
:return: column output
"""
return ' | '.join((
'<a href="%s">%s</a>' % (reverse('admin:ninecms_pagetype_change', args=(obj.id,)), _("edit")),
'<a href="%s">%s</a>' % (reverse('admin:ninecms_pagetype_perms', args=(obj.id,)), _("permissions")),
))
operations.allow_tags = True
def get_urls(self):
""" Override urls to add permissions view
:return: urls list
"""
urls = [
url(r'^(?P<type_id>\d+)/perms/$', self.admin_site.admin_view(views.ContentTypePermsView.as_view()),
name='ninecms_pagetype_perms')
]
return urls + super(PageTypeAdmin, self).get_urls()
class NodeRevisionInline(admin.StackedInline):
""" Node Revision stacked inline to be displayed in Nodes (NodeAdmin) """
model = models.NodeRevision
extra = 0
class ImageInline(admin.StackedInline):
""" Images inline to be displayed in Nodes (NodeAdmin) """
model = models.Image
form = forms.ImageForm
extra = 0
template = 'admin/ninecms/image/stacked.html'
class FileInline(admin.StackedInline):
""" Files inline to be displayed in Nodes (NodeAdmin) """
model = models.File
form = forms.FileForm
extra = 0
class VideoInline(admin.StackedInline):
""" Videos inline to be displayed in Nodes (NodeAdmin) """
model = models.Video
form = forms.VideoForm
extra = 0
# noinspection PyMethodMayBeStatic
# noinspection PyUnusedLocal
@admin.register(models.Node)
class NodeAdmin(admin.ModelAdmin):
""" Get a list of Nodes, also use inlines in Node form """
list_display = ('title', 'page_type', 'language', 'alias', 'user', 'status', 'promote', 'sticky', 'created',
'changed', 'original_translation', 'redirect', 'operations')
list_editable = ('status', 'promote', 'sticky', 'redirect')
list_filter = ['page_type', 'created', 'changed']
search_fields = ['title', 'summary', 'body', 'highlight']
actions = ['node_publish', 'node_unpublish', 'node_promote', 'node_demote', 'node_sticky', 'node_unsticky',
'node_reset_alias']
date_hierarchy = 'created'
form = forms.ContentNodeEditForm
# fieldsets returned from overridden get_fieldsets method below
inlines = [ImageInline, FileInline, VideoInline, NodeRevisionInline]
def operations(self, obj):
""" Return a custom column with 9cms operations view, edit
:param obj: a node object
:return: column output
"""
return ' | '.join((
'<a href="%s" target="_blank">%s</a>' % (obj.get_absolute_url(), _("view")),
'<a href="%s">%s</a>' % (reverse('admin:ninecms_node_change', args=(obj.id,)), _("edit")),
))
operations.allow_tags = True
def node_publish(self, request, queryset):
""" Mark all selected nodes as published setting status True
:param request: the request object
:param queryset: the Node queryset
:return: None
"""
r = queryset.update(status=True)
messages.success(request, _("%d nodes successfully updated as published.") % r)
node_publish.short_description = _("Mark selected nodes status as published")
def node_unpublish(self, request, queryset):
""" Mark all selected nodes as unpublished setting status False
:param request: the request object
:param queryset: the Node queryset
:return: None
"""
r = queryset.update(status=False)
messages.success(request, _("%d nodes successfully updated as not published.") % r)
node_unpublish.short_description = _("Mark selected nodes status as not published")
def node_promote(self, request, queryset):
""" Mark all selected nodes as promoted setting promote True
:param request: the request object
:param queryset: the Node queryset
:return: None
"""
r = queryset.update(promote=True)
messages.success(request, _("%d nodes successfully updated as promoted.") % r)
node_promote.short_description = _("Mark selected nodes as promoted")
def node_demote(self, request, queryset):
""" Mark all selected nodes as not promoted setting promote False
:param request: the request object
:param queryset: the Node queryset
:return: None
"""
r = queryset.update(promote=False)
messages.success(request, _("%d nodes successfully updated as not promoted.") % r)
node_demote.short_description = _("Mark selected nodes as not promoted")
def node_sticky(self, request, queryset):
""" Mark all selected nodes as sticky setting True
:param request: the request object
:param queryset: the Node queryset
:return: None
"""
r = queryset.update(sticky=True)
messages.success(request, _("%d nodes successfully updated as sticky.") % r)
node_sticky.short_description = _("Mark selected nodes as sticky")
def node_unsticky(self, request, queryset):
""" Mark all selected nodes as not sticky setting False
:param request: the request object
:param queryset: the Node queryset
:return: None
"""
r = queryset.update(sticky=False)
messages.success(request, _("%d nodes successfully updated as not sticky.") % r)
node_unsticky.short_description = _("Mark selected nodes as not sticky")
def node_reset_alias(self, request, queryset):
""" Reset url alias for all selected nodes
:param request: the request object
:param queryset: the Node queryset
:return: None
"""
for node in queryset:
node.alias = ''
node.save()
messages.success(request, _("%d nodes successfully updated.") % len(queryset))
node_reset_alias.short_description = _("Reset url alias for all selected nodes")
def check_perm(self, request, obj, perm):
""" Check if a user has permission on the Node
:param request: the request object
:param obj: the Node object, if any
:param perm: the permission to check: has meaning for values 'change', 'delete'
:return: bool
"""
if not obj:
return request.user.has_perm('ninecms.%s_node' % perm)
types = get_objects_for_user(request.user, 'ninecms.%s_node_pagetype' % perm, klass=models.PageType)
return obj.page_type in types
def has_change_permission(self, request, obj=None):
""" Check user permission on Node change
:param request: the request object
:param obj: the Node object
:return: bool
"""
return self.check_perm(request, obj, 'change')
def has_delete_permission(self, request, obj=None):
""" Check user permission on Node delete
:param request: the request object
:param obj: the Node object
:return: bool
"""
return self.check_perm(request, obj, 'delete')
def get_actions(self, request):
""" Override actions list to check for perms
If the user sees the actions, then he sees the list, so he already has the change perm
:param request: the request object
:return: actions list
"""
actions = super(NodeAdmin, self).get_actions(request)
if not request.user.has_perm('ninecms.delete_node') and 'delete_selected' in actions:
del actions['delete_selected']
return actions
def get_queryset(self, request):
""" Return only objects on which user has permission
:param request: the request object
:return: Node queryset
"""
qs = super(NodeAdmin, self).get_queryset(request)
types = get_objects_for_user(request.user, 'ninecms.change_node_pagetype', klass=models.PageType)
return qs.filter(page_type__id__in=types.values_list('id'))
def get_form(self, request, obj=None, **kwargs):
""" Override form to pass the current user
:param request: the request object
:param obj: the current node if any
:param kwargs: keyword arguments
:return: overridden form
"""
form = super(NodeAdmin, self).get_form(request, obj, **kwargs)
form.current_user = request.user
return form
def get_fieldsets(self, request, obj=None):
""" Provide different fieldsets depending on user level
:param request: the request object
:param obj: the current node if any
:return: a dictionary of fieldsets
"""
if request.user.is_superuser:
return (
("Node", {'fields': ('page_type', 'language', 'alias', 'title')}),
("Body", {'fields': ('highlight', 'summary', 'body', 'link')}),
("Node management", {'fields': ('status', 'promote', 'sticky', 'redirect', 'user',
'created', 'original_translation', 'weight')}),
("Terms", {'fields': ('terms',)}),
)
else:
return (
("Node", {'fields': ('page_type', 'language', 'title')}),
("Body", {'fields': ('highlight', 'summary', 'body', 'link')}),
("Node management", {'fields': ('status', 'promote', 'sticky', 'user',
'created', 'original_translation', 'weight')}),
("Terms", {'fields': ('terms',)}),
)
def get_changeform_initial_data(self, request):
""" Set initial values
:param request: the request object
:return: a dictionary with initial values
"""
return {'user': request.user, 'promote': False, 'sticky': False, 'redirect': False}
def formfield_for_foreignkey(self, db_field, request=None, **kwargs):
""" Override queryset of page types field to respect permissions
Restrict users field for non-superusers to same user
:param db_field: the database field name
:param request: the request object
:param kwargs: keyword arguments such as the queryset
:return: parent method return
"""
if db_field.name == 'page_type':
page_types = get_objects_for_user(request.user, 'ninecms.add_node_pagetype', klass=models.PageType)
if len(page_types) < 1 and not request.user.is_superuser:
raise PermissionDenied
kwargs['queryset'] = page_types
elif db_field.name == 'user' and not request.user.is_superuser:
kwargs['queryset'] = User.objects.filter(pk=request.user.pk)
return super(NodeAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs)
def formfield_for_choice_field(self, db_field, request=None, **kwargs):
""" Override choices of languages field to respect settings
:param db_field: the database field name
:param request: the request object
:param kwargs: keyword arguments such as the queryset
:return: parent method return
"""
if db_field.name == 'language':
kwargs['choices'] = (('', '---------'),) + settings.LANGUAGES
return super(NodeAdmin, self).formfield_for_choice_field(db_field, request, **kwargs)
@admin.register(models.MenuItem)
class MenuItemAdmin(MPTTModelAdmin):
""" Get a list of Menu Items """
list_display = ('title', 'language', 'path', 'disabled', 'weight')
search_fields = ['path', 'title']
def formfield_for_choice_field(self, db_field, request=None, **kwargs):
""" Override choices of languages field to respect settings
:param db_field: the database field name
:param request: the request object
:param kwargs: keyword arguments such as the queryset
:return: parent method return
"""
if db_field.name == 'language':
kwargs['choices'] = (('', | |
"heatsource/4/state"
]
self.checkEvents(send, expect)
def testHeatSourceSolarEast(self):
# test the heat source logic
self._log.debug( "testHeatSourceSolarEast" )
self.loadPrimitive( "PersistZonesAll", TestHeatingVentilationACConfigHeatSource3, (heatSource3FileList,) )
# the events we send
send = [
(Events.evtMinute1,9), # startup
(evtHS3PanelEastTemp5,2),
(evtHS3HeatExTemp5,3),
(evtHS3PanelEastTemp25,4),
(evtHS3PanelEastTemp95,4),
(evtHS3RequestRun,1),
(Events.evtMinute1,7),
(Events.evtMinute1,7),
(evtHS3RequestStop,1),
(Events.evtMinute1,7)
]
# the events that we expect to be logged.
expect = [
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3WestDoStop,
evtHS3EastDoStop,
evtHS3SouthDoStop,
"webbrick/903/DO/4",
evtHS3Stopped,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelEastTemp5,
"heatsource/3/elevation/east",
evtHS3HeatExTemp5,
"heatsource/3/heatexbot",
"heatsource/3/heatex",
evtHS3PanelEastTemp25,
"heatsource/3/elevation/east",
("heatsource/3/east/availability","availability",1),
("heatsource/3/availability","availability",1),
evtHS3PanelEastTemp95,
"heatsource/3/elevation/east",
("heatsource/3/east/availability","availability",2),
("heatsource/3/availability","availability",2),
evtHS3RequestRun,
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3EastDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3EastDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3RequestStop,
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3EastDoStop,
"webbrick/903/DO/4",
evtHS3Stopped,
"heatsource/3/state",
"heatsource/3/state"
]
self.checkEvents(send, expect)
def testHeatSourceSolarSouth(self):
# test the heat source logic
self._log.debug( "testHeatSourceSolarSouth" )
self.loadPrimitive( "PersistZonesAll", TestHeatingVentilationACConfigHeatSource3, (heatSource3FileList,) )
# the events we send
send = [
(Events.evtMinute1,9), # startup
(evtHS3PanelSouthTemp5,2),
(evtHS3HeatExTemp5,3),
(evtHS3PanelSouthTemp25,4),
(evtHS3PanelSouthTemp95,4),
(evtHS3RequestRun,1),
(Events.evtMinute1,7),
(Events.evtMinute1,7),
(evtHS3RequestStop,1),
(Events.evtMinute1,7)
]
# the events that we expect to be logged.
expect = [
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3WestDoStop,
evtHS3EastDoStop,
evtHS3SouthDoStop,
"webbrick/903/DO/4",
evtHS3Stopped,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp5,
"heatsource/3/elevation/south",
evtHS3HeatExTemp5,
"heatsource/3/heatexbot",
"heatsource/3/heatex",
evtHS3PanelSouthTemp25,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",1),
("heatsource/3/availability","availability",1),
evtHS3PanelSouthTemp95,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",2),
("heatsource/3/availability","availability",2),
evtHS3RequestRun,
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3SouthDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3SouthDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3RequestStop,
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3SouthDoStop,
"webbrick/903/DO/4",
evtHS3Stopped,
"heatsource/3/state",
"heatsource/3/state"
]
self.checkEvents(send, expect)
def testHeatSourceSolarWest(self):
# test the heat source logic
self._log.debug( "testHeatSourceSolarWest" )
self.loadPrimitive( "PersistZonesAll", TestHeatingVentilationACConfigHeatSource3, (heatSource3FileList,) )
# the events we send
send = [
(Events.evtMinute1,9), # startup
(evtHS3PanelWestTemp5,2),
(evtHS3HeatExTemp5,3),
(evtHS3PanelWestTemp25,4),
(evtHS3PanelWestTemp95,4),
(evtHS3RequestRun,1),
(Events.evtMinute1,7),
(Events.evtMinute1,7),
(evtHS3RequestStop,1),
(Events.evtMinute1,7),
]
# the events that we expect to be logged.
expect = [
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3WestDoStop,
evtHS3EastDoStop,
evtHS3SouthDoStop,
"webbrick/903/DO/4",
evtHS3Stopped,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelWestTemp5,
"heatsource/3/elevation/west",
evtHS3HeatExTemp5,
"heatsource/3/heatexbot",
"heatsource/3/heatex",
evtHS3PanelWestTemp25,
"heatsource/3/elevation/west",
("heatsource/3/west/availability","availability",1),
("heatsource/3/availability","availability",1),
evtHS3PanelWestTemp95,
"heatsource/3/elevation/west",
("heatsource/3/west/availability","availability",2),
("heatsource/3/availability","availability",2),
evtHS3RequestRun,
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3WestDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3WestDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3RequestStop,
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3WestDoStop,
"webbrick/903/DO/4",
evtHS3Stopped,
"heatsource/3/state",
"heatsource/3/state"
]
self.checkEvents(send, expect)
def testHeatSourceSolarOverall(self):
# test the heat source logic
self._log.debug( "testHeatSourceSolarOverall" )
self.loadPrimitive( "PersistZonesAll", TestHeatingVentilationACConfigHeatSource3, (heatSource3FileList,) )
# the events we send
send = [
(Events.evtMinute1,9), # startup
(evtHS3HeatExTemp5,3),
(evtHS3PanelEastTemp95,4),
(evtHS3RequestRun,1),
(Events.evtMinute1,7),
(evtHS3PanelSouthTemp5,2),
(Events.evtMinute1,7),
(evtHS3PanelSouthTemp25,3),
(Events.evtMinute1,7),
(evtHS3PanelSouthTemp95,3),
(Events.evtMinute1,8),
(evtHS3PanelEastTemp25,3),
(Events.evtMinute1,8),
(evtHS3PanelSouthTemp25,4),
(Events.evtMinute1,8),
(evtHS3PanelSouthTemp95,4),
(Events.evtMinute1,8),
(evtHS3PanelSouthTemp5,4),
(Events.evtMinute1,8),
(evtHS3RequestStop,1),
(Events.evtMinute1,7)
]
# the events that we expect to be logged.
expect = [
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3WestDoStop,
evtHS3EastDoStop,
evtHS3SouthDoStop,
"webbrick/903/DO/4",
evtHS3Stopped,
"heatsource/3/state",
"heatsource/3/state",
evtHS3HeatExTemp5,
"heatsource/3/heatexbot",
"heatsource/3/heatex",
evtHS3PanelEastTemp95,
"heatsource/3/elevation/east",
("heatsource/3/east/availability","availability",2),
("heatsource/3/availability","availability",2),
evtHS3RequestRun,
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3EastDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp5,
"heatsource/3/elevation/south",
#availability due to east elevation at 2
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3EastDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp25,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",1),
#availability due to east elevation at 2
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3EastDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp95,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",2),
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3EastDoRun,
evtHS3SouthDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelEastTemp25,
"heatsource/3/elevation/east",
("heatsource/3/east/availability","availability",1),
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3EastDoStop,
evtHS3SouthDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp25,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",1),
("heatsource/3/availability","availability",1),
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3EastDoRun,
evtHS3SouthDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp95,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",2),
("heatsource/3/availability","availability",2),
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3EastDoStop,
evtHS3SouthDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp5,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",0),
("heatsource/3/availability","availability",1), #availability due to east elevation at 1
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3EastDoRun,
evtHS3SouthDoStop,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3RequestStop,
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3EastDoStop,
"webbrick/903/DO/4",
evtHS3Stopped,
"heatsource/3/state",
"heatsource/3/state"
]
self.checkEvents(send, expect)
def testHeatSourceSolarAvailibility_1(self):
# this is to test that a heatsource will start to run if it becomes available
# initial conditions:
# only heatsource for the zone is not available (i.e. avail == 0)
# zone is demanding heat, therefore zone and zonegroup are running,
# expected:
# heatsource will start to run as soon as it becomes available
# NOTE
# NOTE completed yet
self._log.debug( "testHeatSourceSolarAvailibility_1" )
self.loadPrimitive( "PersistZonesAll", TestHeatingVentilationACSolar, (heatSource3FileList, masterMultipleFileList, groupFileList, zone17FileList) )
# the events we send
send = [
(Events.evtMinute1,12), # startup
(Events.evtRuntime30,6),
(evtZone17Temp15,3),
(evtHS3PanelSouthTemp5,2),
(evtHS3HeatExTemp5,3),
(evtZone17SetPoint18,13),
(evtHS3PanelSouthTemp25,8),
(Events.evtMinute1,7),
(evtHS3PanelSouthTemp95,4),
(Events.evtMinute1,7),
(evtHS3PanelSouthTemp5,8),
(Events.evtMinute1,7),
]
# the events that we expect to be logged.
expect = [
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3SouthDoStop,
"zone17/stop",
"zonegroup/6/stop",
"webbrick/903/DO/4",
"zone17/stopped",
# "zonegroup/6/stop",
"webbrick/906/DO/1",
evtHS3Stopped,
"heatsource/3/state",
"zonegroup/6/stopped",
"heatsource/3/state",
# "zonegroup/6/stopped",
"time/runtime",
"zone17/stop",
"zone17/targetset",
"zone17/state",
"zone17/name",
"zone17/stopped",
#11
("webbrick/17/CT/0","val",15.0),
"zone17/sensor",
("zone17/state","cmdsource","Frost"),
#14
evtHS3PanelSouthTemp5,
"heatsource/3/elevation/south",
#16
evtHS3HeatExTemp5,
"heatsource/3/heatexbot",
"heatsource/3/heatex",
#19
evtZone17SetPoint18,
"zone17/schedulesetpoint",
("zone17/targetset","val",18.0),
evtZone17Run,
("zone17/state","status","Demand"),
"zone17/schedulesetpoint",
evtZone17Running,
evtZG6Run,
"webbrick/906/DO/1",
evtZG6Running,
evtMasterRun,
"webbrick/900/DO/3",
evtMasterRunning,
#
evtHS3PanelSouthTemp25,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",1),
("heatsource/3/availability","availability",1),
evtHS3RequestRun,
"zonemaster/zonegroup6/heatsource3/run",
("zonegroup/6/heatsource","name", "Multi Solar"),
("zone17/heatsource","name", "Multi Solar"),
#34
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3SouthDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp95,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",2),
("heatsource/3/availability","availability",2),
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3SouthDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp5,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",0),
("heatsource/3/availability","availability",0),
"zonemaster/zonegroup6/heatsource3/stop",
evtHS3RequestStop,
("zonegroup/6/heatsource","name", 'Idle'),
("zone17/heatsource","name", 'Idle'),
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3SouthDoStop,
"webbrick/903/DO/4",
evtHS3Stopped,
"heatsource/3/state",
"heatsource/3/state"
]
self.checkEvents(send, expect)
def testHeatSourceSolarAvailibility_2(self):
# this is to test that a heatsource will start to run if it becomes available
# initial conditions:
# only heatsource for the zone is not available (i.e. avail == 0)
# zone is demanding heat, therefore zone and zonegroup are running,
# expected:
# heatsource will start to run as soon as it becomes available
# NOTE
# Focus of this test is to determine what happens if due to temperature fluctuations both request run and request stop
# are issued within one minute (i.e. without a minute event inbetween)
self._log.debug( "testHeatSourceSolarAvailibility_2" )
self.loadPrimitive( "PersistZonesAll", TestHeatingVentilationACSolar, (heatSource3FileList, masterMultipleFileList, groupFileList, zone17FileList) )
# the events we send
send = [
(Events.evtMinute1,12), # startup
(Events.evtRuntime30,6),
(evtZone17Temp15,3),
(evtHS3PanelSouthTemp5,2),
(evtHS3HeatExTemp5,3),
(evtZone17SetPoint18,13),
(evtHS3PanelSouthTemp25,8), # results in requestrun
(evtHS3PanelSouthTemp5,8), # results in requeststop
(Events.evtMinute1,1), # nothing should happen since last command was requeststop, which overrides the requestrun
(evtHS3PanelSouthTemp25,8),
(Events.evtMinute1,7),
(evtHS3PanelSouthTemp5,8), # results in requeststop
(evtHS3PanelSouthTemp25,8), # results in requestrun
(Events.evtMinute1,7), # should continue to run (dorun) since last command was requestrun, which overrides the requetstop
(evtHS3PanelSouthTemp5,8),
(Events.evtMinute1,7),
]
# the events that we expect to be logged.
expect = [
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3SouthDoStop,
"zone17/stop",
"zonegroup/6/stop",
"webbrick/903/DO/4",
"zone17/stopped",
# "zonegroup/6/stop",
"webbrick/906/DO/1",
evtHS3Stopped,
"heatsource/3/state",
"zonegroup/6/stopped",
"heatsource/3/state",
# "zonegroup/6/stopped",
"time/runtime",
"zone17/stop",
"zone17/targetset",
"zone17/state",
"zone17/name",
"zone17/stopped",
("webbrick/17/CT/0","val",15.0),
"zone17/sensor",
("zone17/state","cmdsource","Frost"),
evtHS3PanelSouthTemp5,
"heatsource/3/elevation/south",
evtHS3HeatExTemp5,
"heatsource/3/heatexbot",
"heatsource/3/heatex",
evtZone17SetPoint18,
"zone17/schedulesetpoint",
("zone17/targetset","val",18.0),
evtZone17Run,
("zone17/state","status","Demand"),
"zone17/schedulesetpoint",
evtZone17Running,
evtZG6Run,
"webbrick/906/DO/1",
evtZG6Running,
evtMasterRun,
"webbrick/900/DO/3",
evtMasterRunning,
evtHS3PanelSouthTemp25,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",1),
("heatsource/3/availability","availability",1),
evtHS3RequestRun,
"zonemaster/zonegroup6/heatsource3/run",
("zonegroup/6/heatsource","name", "Multi Solar"),
("zone17/heatsource","name", "Multi Solar"),
evtHS3PanelSouthTemp5,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",0),
("heatsource/3/availability","availability",0),
"zonemaster/zonegroup6/heatsource3/stop",
evtHS3RequestStop,
("zonegroup/6/heatsource","name", 'Idle'),
("zone17/heatsource","name", 'Idle'),
Events.evtMinute1,
evtHS3PanelSouthTemp25,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",1),
("heatsource/3/availability","availability",1),
evtHS3RequestRun,
"zonemaster/zonegroup6/heatsource3/run",
("zonegroup/6/heatsource","name", "Multi Solar"),
("zone17/heatsource","name", "Multi Solar"),
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3SouthDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp5,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",0),
("heatsource/3/availability","availability",0),
"zonemaster/zonegroup6/heatsource3/stop",
evtHS3RequestStop,
("zonegroup/6/heatsource","name", 'Idle'),
("zone17/heatsource","name", 'Idle'),
evtHS3PanelSouthTemp25,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",1),
("heatsource/3/availability","availability",1),
evtHS3RequestRun,
"zonemaster/zonegroup6/heatsource3/run",
("zonegroup/6/heatsource","name", "Multi Solar"),
("zone17/heatsource","name", "Multi Solar"),
Events.evtMinute1,
evtHS3CommonDoRun,
evtHS3SouthDoRun,
"webbrick/903/DO/4",
evtHS3Running,
"heatsource/3/state",
"heatsource/3/state",
evtHS3PanelSouthTemp5,
"heatsource/3/elevation/south",
("heatsource/3/south/availability","availability",0),
("heatsource/3/availability","availability",0),
"zonemaster/zonegroup6/heatsource3/stop",
evtHS3RequestStop,
("zonegroup/6/heatsource","name", 'Idle'),
("zone17/heatsource","name", 'Idle'),
Events.evtMinute1,
evtHS3CommonDoStop,
evtHS3SouthDoStop,
"webbrick/903/DO/4",
evtHS3Stopped,
"heatsource/3/state",
"heatsource/3/state",
]
self.checkEvents(send, expect)
def testHeatSourceGroundSource(self):
# test the heat source logic
self._log.debug( "testHeatSourceGroundSource" )
self.loadPrimitive( "PersistZonesAll", TestHeatingVentilationACConfigHeatSource2, (heatSource2FileList,) )
# the events we send
send = [ (Events.evtRuntime20,2),
(evtHS2Enable, 2),
(evtHS2RequestRun,1),
(Events.evtMinute1,6),
(Events.evtMinute1,6),
(evtHS2RequestStop,1),
(Events.evtMinute1,6)
]
# the events that we expect to be logged.
expect = [ Events.evtRuntime20,
("heatsource/2/availability","availability",0),
evtHS2Enable,
("heatsource/2/availability","enabled",1),
evtHS2RequestRun,
Events.evtMinute1,
evtHS2DoRun,
"webbrick/902/DO/4",
evtHS2Running,
"heatsource/2/state",
"heatsource/2/state",
Events.evtMinute1,
evtHS2DoRun,
"webbrick/902/DO/4",
evtHS2Running,
"heatsource/2/state",
"heatsource/2/state",
evtHS2RequestStop,
Events.evtMinute1,
evtHS2DoStop,
"webbrick/902/DO/4",
evtHS2Stopped,
"heatsource/2/state",
"heatsource/2/state"
]
self.checkEvents(send, expect)
def testHeatSourceGeneric_1(self):
# tests that the heatsource is loaded with defualt parameters
self._log.debug( "testHeatSourceGeneric_1" )
self.loadPrimitive( "PersistZonesAll", TestHeatingVentilationACConfigHeatSourceGeneric, (heatSourceGenericFileList,) )
# the events we send
send = [
(Events.evtMinute1,7), # startup
(Events.evtRuntime20,3),
(Events.evtMinute1,1),
(Events.evtMinute1,1),
(evtHS1RequestRun,1),
(evtHS2RequestRun,1),
(Events.evtMinute1,1),
(Events.evtMinute1,1),
(Events.evtMinute1,1),
(Events.evtMinute1,4),
(Events.evtMinute1,4),
(Events.evtMinute1,4),
(evtHS1RequestStop,1),
(Events.evtMinute1,1),
(Events.evtMinute1,1),
(Events.evtMinute1,1),
(Events.evtMinute1,4),
(Events.evtMinute1,1),
]
# the events that we expect to be logged.
expect = [
Events.evtMinute1,
evtHS1DoStop,
evtHS2DoStop,
"webbrick/91/DO/0",
"webbrick/92/DO/0",
evtHS1Stopped,
evtHS2Stopped,
Events.evtRuntime20,
("heatsource/1/availability","availability",2),
("heatsource/2/availability","availability",2),
Events.evtMinute1,
Events.evtMinute1,
evtHS1RequestRun,
evtHS2RequestRun,
Events.evtMinute1,
Events.evtMinute1,
Events.evtMinute1,
| |
description
self._description=[]
for rn,newcol in self.col:
if newcol.iFldType==_SQL_NUMERIC or newcol.iFldType==_SQL_DECIMAL:
dsize=newcol.iUnits1+newcol.iUnits2+1
isize=newcol.iUnits1+newcol.iUnits2
elif newcol.iFldType==_SQL_SMALLINT:
dsize=5
isize=2
elif newcol.iFldType==_SQL_INTEGER:
dsize=10
isize=4
elif newcol.iFldType==_SQL_REAL:
dsize=7
isize=4
elif newcol.iFldType==_SQL_FLOAT or newcol.iFldType==_SQL_DOUBLE:
dsize=15
isize=8
elif newcol.iFldType==_SQL_TYPE_DATE:
dsize=10
isize=4
elif newcol.iFldType==_SQL_TYPE_TIME:
dsize=8
if newcol.iUnits2>0:
dsize=9+newcol.iUnits2
isize=8
elif newcol.iFldType==_SQL_TYPE_TIMESTAMP:
dsize=19
if newcol.iUnits2>0:
dsize=20+newcol.iUnits2
isize=12
else:
dsize=newcol.iUnits1
isize=newcol.iUnits1
self._description.append((newcol.colName, newcol.iFldType,
dsize, isize, newcol.iUnits1, newcol.iUnits2, None))
#else no late result set: leave as was (i.e. don't zeroise self.description)
#else young server cannot handle this
if self.resultCode==_SQL_SUCCESS or self.resultCode==_SQL_SUCCESS_WITH_INFO:
#we SQLendTran now if in autocommit mode & if not select/result-set
if not self.resultSet and self._Con._Autocommit:
self._Con.commit()
elif self.resultCode==_SQL_NEED_DATA:
if self.prepared:
rn=self._Con.marshalBuffer.getSQLSMALLINT() #this is the parameter id that's missing
raise InterfaceError, str(_seMissingParameter)+' '+_seMissingParameterText
def execute(self, operation=None, parameters=None):
'''Execute the specified SQL operation, possibly opening a result set.
'''
self._doPrepare(operation)
self._doExecute(parameters)
return None
def executemany(self, operation=None, seq_of_parameters=None):
'''Repeatedly execute the specified SQL operation, once for each set of
parameters in the sequence.
This is more efficient than repeatedly calling the execute method.
'''
self._doPrepare(operation)
self.prepared=True
for parameters in seq_of_parameters:
if self.resultSet: #close any existing query result set on this cursor before we re-execute
self._resultSetClose()
last=self._doExecute(parameters)
return last
def callproc(self, procname, parameters=None):
'''Call the specified stored procedure with the parameters.
May return a result set accessible using the fetchXXX methods.
'''
return self.execute('CALL '+procname+replace(replace(str(parameters),'[','('),']',')'))
def _setArraySize(self, arraySize=1):
'''Tell the server how many rows to return per fetch.
'''
self._Con.marshalBuffer.clearToSend()
self._Con.marshalBuffer.putFunction(_SQL_API_SQLSETDESCFIELD)
self._Con.marshalBuffer.putSQLHSTMT(self.serverStatementHandle)
self._Con.marshalBuffer.putSQLSMALLINT(_SQL_ATTR_APP_ROW_DESC)
self._Con.marshalBuffer.putSQLSMALLINT(0)
self._Con.marshalBuffer.putSQLSMALLINT(_SQL_DESC_ARRAY_SIZE)
self._Con.marshalBuffer.putSQLUINTEGER(arraySize)
self._Con.marshalBuffer.putSQLINTEGER(0) #bufferLength = n/a here
if self._Con.marshalBuffer.send()!=_ok:
raise ConnectionError, str(_seConnectionFailed)+' '+_seConnectionFailedText
#Wait for response
if self._Con.marshalBuffer.read()!=_ok:
raise ConnectionError, str(_seConnectionFailed)+' '+_seConnectionFailedText
functionId=self._Con.marshalBuffer.getFunction()
if functionId!=_SQL_API_SQLSETDESCFIELD:
raise ConnectionError, str(_seConnectionFailed)+' '+_seConnectionFailedText
self.resultCode=self._Con.marshalBuffer.getRETCODE()
#if error, then get error details: local-number, default-text
self.errCount=self._Con.marshalBuffer.getSQLINTEGER() #error count
if self.resultCode==_SQL_ERROR:
for err in range(self.errCount):
self.resultErrCode=self._Con.marshalBuffer.getSQLINTEGER()
self.resultErrText=self._Con.marshalBuffer.getpUCHAR_SWORD()
raise DatabaseError, str(self.resultErrCode)+' '+self.resultErrText
#record the latest value
self.lastArraySize=arraySize
def _fetch(self, arraySize=None):
if self.resultSet:
res=[]
if arraySize!=None and arraySize!=self.lastArraySize:
self._setArraySize(arraySize)
#call server fetchScroll
self._Con.marshalBuffer.clearToSend()
self._Con.marshalBuffer.putFunction(_SQL_API_SQLFETCHSCROLL)
self._Con.marshalBuffer.putSQLHSTMT(self.serverStatementHandle)
self._Con.marshalBuffer.putSQLSMALLINT(_SQL_FETCH_NEXT)
self._Con.marshalBuffer.putSQLINTEGER(0)
if self._Con.marshalBuffer.send()!=_ok:
raise ConnectionError, str(_seConnectionFailed)+' '+_seConnectionFailedText
#Wait for response
if self._Con.marshalBuffer.read()!=_ok:
raise ConnectionError, str(_seConnectionFailed)+' '+_seConnectionFailedText
functionId=self._Con.marshalBuffer.getFunction()
if functionId!=_SQL_API_SQLFETCHSCROLL:
raise ConnectionError, str(_seConnectionFailed)+' '+_seConnectionFailedText
#resultCode comes later: first we retrieve any result data
#Read row count
rowCount=self._Con.marshalBuffer.getSQLUINTEGER()
for row in range(rowCount):
rowres=[]
#Now get the col count & data for this row
self.colCount=self._Con.marshalBuffer.getSQLINTEGER()
for i in range(self.colCount):
rn=self._Con.marshalBuffer.getSQLSMALLINT()
if rn<=self.colCount:
#Get the null flag
tempNull=self._Con.marshalBuffer.getSQLSMALLINT()
if tempNull==_SQL_TRUE:
rowres.append(None)
else:
#Note: we only get length+data if not null
self.col[i].data=self._Con.marshalBuffer.getpDataSDWORD()
#Convert the raw data to Python data
if self.col[i].iFldType==_SQL_CHAR or self.col[i].iFldType==_SQL_VARCHAR:
rowres.append(self.col[i].data)
elif self.col[i].iFldType==_SQL_NUMERIC or self.col[i].iFldType==_SQL_DECIMAL:
if have_fixedpoint:
fp=str(struct.unpack('<q',self.col[i].data)[0])
fp=fp[:len(fp)-self.col[i].iUnits2]+'.'+fp[len(fp)-self.col[i].iUnits2:]
rowres.append(FixedPoint(fp,self.col[i].iUnits2))
else:
rowres.append((struct.unpack('<q',self.col[i].data)[0]) / float(10**self.col[i].iUnits2)) #i.e. shift scale decimal places to the right
elif self.col[i].iFldType==_SQL_INTEGER:
rowres.append(struct.unpack('<i',self.col[i].data)[0])
elif self.col[i].iFldType==_SQL_SMALLINT:
rowres.append(struct.unpack('<h',self.col[i].data)[0])
elif self.col[i].iFldType==_SQL_FLOAT or self.col[i].iFldType==_SQL_REAL or self.col[i].iFldType==_SQL_DOUBLE:
rowres.append(struct.unpack('<d',self.col[i].data)[0])
elif self.col[i].iFldType==_SQL_TYPE_DATE:
p=struct.unpack(_sqlDate,self.col[i].data)
rowres.append(datetime.date(p[0],p[1],p[2]))
elif self.col[i].iFldType==_SQL_TYPE_TIME:
p=struct.unpack(_sqlTime,self.col[i].data)
rowres.append(datetime.time(p[0],p[1],int(p[2] / float(10**_TIME_MAX_SCALE)))) #todo Adjust the scale? p[3]
elif self.col[i].iFldType==_SQL_TYPE_TIMESTAMP:
p=struct.unpack(_sqlTimestamp,self.col[i].data)
rowres.append(datetime.datetime(p[0],p[1],p[2],p[3],p[4],int(p[5] / float(10**_TIME_MAX_SCALE)))) #todo Adjust the scale? p[6]
elif self.col[i].iFldType==_SQL_LONGVARCHAR or self.col[i].iFldType==_SQL_LONGVARBINARY:
rowres.append(self.col[i].data)
#todo SQL_INTERVAL etc.
else:
rowres.append('?') #todo use raw data or None instead?
else:
raise OperationalError, str(_seInvalidColumnIndex)+' '+_seInvalidColumnIndexText
#get row status
sqlRowStatus=self._Con.marshalBuffer.getSQLUSMALLINT()
if sqlRowStatus==_SQL_ROW_NOROW:
break #-> no more data
if arraySize>1:
res.append(tuple(rowres))
else:
res=tuple(rowres)
self.resultCode=self._Con.marshalBuffer.getRETCODE()
if self.resultCode==_SQL_NO_DATA and arraySize==1:
res=None #-> no more data
#if error, then get error details: local-number, default-text
self.errCount=self._Con.marshalBuffer.getSQLINTEGER() #error count
if self.resultCode==_SQL_ERROR:
for err in range(self.errCount):
self.resultErrCode=self._Con.marshalBuffer.getSQLINTEGER()
self.resultErrText=self._Con.marshalBuffer.getpUCHAR_SWORD()
raise DatabaseError, str(self.resultErrCode)+' '+self.resultErrText
return res
else:
raise InterfaceError, str(_seResultSetNotOpen)+' '+_seResultSetNotOpenText
def fetchone(self):
'''Fetch the next row from the cursor's result set.
'''
return self._fetch(1)
def fetchmany(self, size=None):
'''Fetch a number of rows from the cursor's result set.
'''
if size is None:
size=self.lastArraySize
return self._fetch(size)
def fetchall(self):
'''Fetch all the remaining rows from the cursor's result set.
'''
res=[]
while 1:
r=self.fetchone()
if r is None:
break
res.append(r)
return res
def _getaffectedRowCount(self):
return self._affectedRowCount
rowcount=property(_getaffectedRowCount, doc='Number of affected row(s)')
def _getdescription(self):
return self._description
def _getconnection(self):
warnings.warn('DB-API extension cursor.connection used')
return self._Con
description=property(_getdescription, doc='Column description(s)')
def _getarraysize(self):
return self.lastArraySize
arraysize=property(_getarraysize, _setArraySize, doc='Number of rows to fetch with fetchmany()')
def __iter__(self):
return self
def next(self):
x=self.fetchone()
if x is None:
raise StopIteration
return x
def __del__(self):
self.close()
#Column/Parameter classes -----------------------------------------------------
class _columnSQL:
def __init__(self):
self.iFldNum=None
self.iFldType=None
self.iUnits1=None
self.iUnits2=None
self.iNullOffset=None
self.colName=None
self.data=None
class _paramSQL:
def __init__(self):
self.iParamNum=None
self.colName=None
self.iDataType=None
self.iArgType=None
self.iUnits1=None
self.iUnits2=None
self.buffer=None
self.bufferLen=None
self.isNull=None
_clientCLIversion =100 #client parameter passing version
_CLI_ODBC=1
_CLI_JDBC=2
_CLI_DBEXPRESS=3
_CLI_ADO_NET=4
_CLI_PYTHON_DBAPI=5
_DriverName='ThinkSQL'
_DriverVersion='1.03.01'
__version__ = 1, 03, 01
_DriverMajorVersion=1
_DriverMinorVersion=03
_ok=0
_fail=-1
_failString=''
_stateClosed=0
_stateOpen=1
_sizeof_short=2
_sizeof_int=4
_sizeof_long=8
_sizeof_float=4
_sizeof_double=8
_sizeof_byte=8 #in bits
_sizeof_date=4
_sizeof_dateY=2
_sizeof_dateM=1
_sizeof_dateD=1
_sizeof_time=7
_sizeof_timeH=1
_sizeof_timeM=1
_sizeof_timeS=4
_sizeof_timeSc=1
_TIME_MAX_SCALE=6
_MAX_COL_PER_TABLE=300
_MAX_PARAM_PER_QUERY=300
_SQL_FALSE =0
_SQL_TRUE =1
#parameter types
_ptInput = 0
_ptOutput = 1
_EscapeChar='\\'
_SQL_ERROR=-1
_SQL_ERROR2=_SQL_ERROR
_SQL_SUCCESS=0
_SQL_SUCCESS_WITH_INFO=1
_SQL_STILL_EXECUTING=2
_SQL_NEED_DATA=99
_SQL_NO_DATA=100
_SQL_CHAR =1
_SQL_NUMERIC =2
_SQL_DECIMAL =3
_SQL_INTEGER =4
_SQL_SMALLINT =5
_SQL_FLOAT =6
_SQL_REAL =7
_SQL_DOUBLE =8
_SQL_DATETIME =9
_SQL_INTERVAL =10
_SQL_VARCHAR =12
_SQL_TYPE_DATE =91
_SQL_TYPE_TIME =92
_SQL_TYPE_TIMESTAMP =93
_SQL_LONGVARCHAR =-1
#SQL_BINARY =-2
#SQL_VARBINARY =-3
_SQL_LONGVARBINARY =-4
#future use: SQL_BIGINT =-5
_SQL_API_SQLCONNECT =7
_SQL_API_SQLDISCONNECT =9
_SQL_API_SQLEXECUTE =12
_SQL_API_SQLPREPARE =19
_SQL_API_SQLGETDATA =43
_SQL_API_SQLGETINFO =45
_SQL_API_SQLALLOCHANDLE =1001
_SQL_API_SQLCLOSECURSOR =1003
_SQL_API_SQLENDTRAN =1005
_SQL_API_SQLFREEHANDLE =1006
_SQL_API_SQLSETDESCFIELD =1017
_SQL_API_SQLFETCHSCROLL =1021
_SQL_ATTR_APP_ROW_DESC =10010
_SQL_DESC_ARRAY_SIZE =20
_SQL_DESC_DATA_POINTER =1010
_SQL_ROW_SUCCESS =0
_SQL_ROW_NOROW =3
_SQL_API_handshake =9999
_SQL_HANDLE_STMT =3
_SQL_ROLLBACK =1
_SQL_COMMIT =0
_SQL_FETCH_NEXT =1
_SQL_DBMS_NAME =17
_SQL_DBMS_VERSION =18
#Errors:
_seNotImplementedYet=500
_seNotImplementedYetText='Not implemented yet'
_seHandshakeFailed=1500
_seHandshakeFailedText='Handshake failed'
_seConnectionFailed=1502
_seConnectionFailedText='Communication link failure'
_seInvalidColumnIndex=1600
_seInvalidColumnIndexText='Invalid column index'
_seInvalidConversion=1602
_seInvalidConversionText='Invalid data conversion'
_seInvalidParameterIndex=1604
_seInvalidParameterIndexText='Invalid parameter index'
_seConnectionNotOpen=1700
_seConnectionNotOpenText='Connection not open'
_seResultSetNotOpen=1702
_seResultSetNotOpenText='No result set'
_seMissingParameter=1704
_seMissingParameterText='Not enough parameters passed'
_ss08001='08001'
_ss08S01='08S01'
_ss42000='42000'
_ssHY000='HY000'
_ssHY010='HT010'
_ssHYC00='HYC00' #optional feature not implemented yet
_ssNA='NA'
_sqlDate='<hbb' #year:smallint; month:shortint; day:shortint
_sqlTimezone='<bbbx' #sign:shortint (-1=negative, +1=positive, 0=no timezone); hour:shortint; minute:shortint
_sqlTime='<bbxxibxxx' # hour:shortint; minute:shortint; second:integer; (stored normalised as SSFFFFFF where number of Fs=TIME_MAX_SCALE) scale:shortint (used when formatting to dictate how many fractional places to display)
_sqlTimestamp='<hbb bbxxibxxx'
_TIME_MAX_SCALE=6
#Python specifics
#Exception classes ------------------------------------------------------------
class Error(StandardError):
'''Top-level DB API exception.'''
class Warning(StandardError):
'''Top-level DB API warning.'''
class InterfaceError(Error):
'''Interface error.'''
class DatabaseError(Error):
'''Database error.'''
class DataError(DatabaseError):
'''Data error.'''
class OperationalError(DatabaseError):
'''Operational error.'''
class IntegrityError(DatabaseError):
'''Integrity error.'''
class InternalError(DatabaseError):
'''Internal error.'''
class ProgrammingError(DatabaseError):
'''Programming error.'''
class NotSupportedError(DatabaseError):
'''Not supported error.'''
#ThinkSQL specific errors
class HandshakeError(OperationalError):
'''Handshake error.'''
class ConnectionError(OperationalError):
'''Connection error.'''
class DBAPITypeObject:
def __init__(self, name, *values):
self.name = name
self.values = values
def __repr__(self):
return self.name
def __cmp__(self, other):
if other in self.values:
return 0
elif other < self.values:
return 1
else:
return -1
#Type mappings
BINARY = DBAPITypeObject('BINARY', _SQL_LONGVARBINARY)
DATETIME = DBAPITypeObject('DATETIME', _SQL_DATETIME, _SQL_INTERVAL,
_SQL_TYPE_DATE, _SQL_TYPE_TIME, _SQL_TYPE_TIMESTAMP
)
NUMBER = DBAPITypeObject('NUMBER', _SQL_NUMERIC, _SQL_DECIMAL, _SQL_INTEGER, _SQL_SMALLINT,
_SQL_FLOAT, _SQL_REAL, _SQL_DOUBLE)
STRING = DBAPITypeObject('STRING', _SQL_CHAR, _SQL_VARCHAR, _SQL_LONGVARCHAR)
from time import localtime
def Date(year, month, day):
return '%04d/%02d/%02d' % (year, month, day)
def Time(hour, minute, second):
return '%02d:%02d:%02d' % (hour, minute, second)
def Timestamp(year, month, day, hour, minute, second):
return Date(year, month, day)+' '+Time(hour, minute, second)
def DateFromTicks(ticks):
t=localtime(ticks)
return Date(t[0],t[1],t[2])
def TimeFromTicks(ticks):
t=localtime(ticks)
return Time(t[3],t[4],t[5])
def TimestampFromTicks(ticks):
t=localtime(ticks)
return Timestamp(t[0],t[1],t[2],t[3],t[4],t[5])
class BinaryString:
def __init__(self,s):
self.value=s
def __str__(self):
return self.value
def Binary(string):
return BinaryString(string) #todo encode/make binary
#MarshalBuffer class ----------------------------------------------------------
class marshalBuffer:
marshalBufSize=16384
connectionTimeout=30
def __init__(self, host, port):
socket.setdefaulttimeout(self.__class__.connectionTimeout)
self.clientSocket=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.buffer=cStringIO.StringIO()
try:
self.clientSocket.connect((host, port))
except:
raise ConnectionError, str(_seConnectionFailed)+' '+_seConnectionFailedText
self.clearToSend() #initially clear buffer
self.bufferLen=0
self.bufferPtr=0
def __del__(self):
self.close()
def clearToSend(self):
#Clear before send
self.bufferLen=0
self.bufferPtr=0
self.buffer.truncate(0)
self.buffer.seek(0)
return _ok
def clearToReceive(self):
#Clear before receive
self.bufferPtr=0
self.bufferLen=0
self.buffer.truncate(0)
self.buffer.seek(0)
return _ok
def sendHandshake(self):
#Send raw handshake
try:
s=struct.pack('<h',_SQL_API_handshake)
self.clientSocket.send(s)
return _ok
except:
return _fail
def send(self):
'''Send a response and then clear the buffer.
'''
try:
i=(self.bufferLen+_sizeof_int)
#todo assert i=buffer size
s=struct.pack('<i', i)
self.clientSocket.send(s)
self.buffer.flush()
sent=0
while sent<(i-4):
sent=sent+self.clientSocket.send(self.buffer.getvalue()[sent:])
except:
return _fail
try:
self.clearToSend() #clear send buffer, once sent ok
return _ok
except:
return _fail
def read(self):
'''Wait for a response (clear buffer before receiving).
'''
self.clearToReceive()
#try:
s=self.clientSocket.recv(_sizeof_int)
i=struct.unpack('<i', s)
dataCount=i[0]
dataCount=(dataCount-_sizeof_int) #inclusive
if self.bufferLen+dataCount>self.__class__.marshalBufSize:
return _fail #overflow
#Read the block data into the marshal buffer
while self.buffer.tell()<dataCount:
self.buffer.write(self.clientSocket.recv(dataCount-self.buffer.tell()))
self.bufferLen=(self.bufferLen+dataCount)
self.buffer.seek(0) #reset the get pointer
return _ok
#except:
# return _fail
def putSQLUSMALLINT(self, usi):
if self.bufferLen+_sizeof_short>self.__class__.marshalBufSize:
if self.send()!=_ok:
return _fail #buffer overflow
if (self.bufferLen+_sizeof_short>self.__class__.marshalBufSize):
return _fail
s=struct.pack('<H', usi)
self.buffer.write(s)
self.bufferLen+=_sizeof_short
return _ok
def getSQLUSMALLINT(self):
if self.bufferPtr+_sizeof_short>self.bufferLen:
if self.bufferPtr==self.bufferLen:
self.read()
else:
return _fail
s=self.buffer.read(_sizeof_short)
self.bufferPtr+=_sizeof_short
return struct.unpack('<H', s)[0]
def getSQLSMALLINT(self):
if self.bufferPtr+_sizeof_short>self.bufferLen:
if self.bufferPtr==self.bufferLen:
self.read()
else:
return _fail
s=self.buffer.read(_sizeof_short)
self.bufferPtr+=_sizeof_short
return struct.unpack('<h', s)[0]
def putSQLINTEGER(self, i):
if self.bufferLen+_sizeof_int>self.__class__.marshalBufSize:
if self.send()!=_ok:
return _fail #buffer overflow
if (self.bufferLen+_sizeof_int>self.__class__.marshalBufSize):
return _fail
s=struct.pack('<i', i)
self.buffer.write(s)
self.bufferLen+=_sizeof_int
return _ok
def getSQLINTEGER(self):
if self.bufferPtr+_sizeof_int>self.bufferLen:
if self.bufferPtr==self.bufferLen:
self.read()
else:
return _fail
s=self.buffer.read(_sizeof_int)
self.bufferPtr+=_sizeof_int
return struct.unpack('<i', s)[0]
def putpUCHAR_SWORD(self,ss):
if self.bufferLen+_sizeof_short+len(ss)>self.__class__.marshalBufSize:
if self.send()!=_ok:
return _fail #buffer overflow
if (self.bufferLen+_sizeof_short+len(ss)>self.__class__.marshalBufSize):
return _fail
s=struct.pack('<h', len(ss))
self.buffer.write(s)
self.bufferLen+=_sizeof_short
self.buffer.write(ss)
self.bufferLen+=len(ss)
return _ok
def getpUCHAR_SWORD(self):
if self.bufferPtr+_sizeof_short>self.bufferLen:
if self.bufferPtr==self.bufferLen:
self.read()
else:
return _fail
s=self.buffer.read(_sizeof_short)
self.bufferPtr+=_sizeof_short
si=struct.unpack('<H', s)[0]
self.bufferPtr+=si
return self.buffer.read(si)
def putpUCHAR_SDWORD(self, ss):
if self.bufferLen+_sizeof_int+len(ss)>self.__class__.marshalBufSize:
if self.send()!=_ok:
return _fail #buffer overflow
if (self.bufferLen+_sizeof_int+len(ss)>self.__class__.marshalBufSize):
return _fail
s=struct.pack('<i', len(ss))
self.buffer.write(s)
self.bufferLen+=_sizeof_int
self.buffer.write(ss)
self.bufferLen+=len(ss)
return _ok
def putpDataSDWORD(self, ss):
if self.bufferLen>0 and self.bufferLen+_sizeof_int+len(ss)>self.__class__.marshalBufSize:
if self.send()!=_ok:
return _fail #buffer overflow
ui=len(ss)
s=struct.pack('<i', ui)
self.buffer.write(s)
self.bufferLen+=_sizeof_int
| |
import numpy as np
import os
import h5py
import json
from .utils import asymptotic_error_quantile, bootstrap,\
dict_to_margins, margins_to_dict, matrix_to_list
from sklearn.utils import check_random_state
from scipy.stats import gaussian_kde, norm
from .vinecopula import Conversion
class ListDependenceResult(list):
"""The result from the Conservative Estimation.
The results gather in the list must have the same configurations: the same
copula families, vine structure, grid.
Parameters
----------
margins : list of OpenTURNS distributions
The OT distributions.
families : array
The matrix array of the families.
vine_structure : array
The matrix array of the R-vine. If None, it is considered as Gaussian.
bounds_tau : array,
The matrix array of the bounds for the dependence parameters.
dep_param : array
The dependence parameters.
input_sample : array
The input sample.
output_sample : array
The output sample.
q_func : callable or None
The output quantity of intereset function.
run_type : str
The type of estimation: independence, grid-search, iterative, ...
grid_type : str
The type of grid use if it was a grid search.
random_state : int, RandomState or None,
The random state of the computation.
"""
def __init__(self,
margins=None,
families=None,
vine_structure=None,
bounds_tau=None,
fixed_params=None,
dep_params=None,
input_samples=None,
output_samples=None,
q_func=None,
run_type=None,
n_evals=None,
grid_type=None,
random_state=None,
**kwargs):
self.margins = margins
self.families = families
self.vine_structure = vine_structure
self.bounds_tau = bounds_tau
self.fixed_params = fixed_params
self._q_func = q_func
self.run_type = run_type
self.grid_type = grid_type
self.input_dim = len(margins)
self.corr_dim = int(self.input_dim * (self.input_dim - 1) / 2)
self.grid_filename = None
if "grid_filename" in kwargs:
self.grid_filename = kwargs["grid_filename"]
self.lhs_grid_criterion = None
if "lhs_grid_criterion" in kwargs:
self.lhs_grid_criterion = kwargs["lhs_grid_criterion"]
self.output_id = 0
if "output_id" in kwargs:
self.output_id = kwargs["output_id"]
if run_type in ['grid-search', 'iterative']:
assert output_samples is not None, \
"Add some output sample if you're adding dependence parameters"
for k, dep_param in enumerate(dep_params):
input_sample = None if input_samples is None else input_samples[k]
output_sample = output_samples[k]
result = DependenceResult(margins=margins,
families=families,
vine_structure=vine_structure,
fixed_params=fixed_params,
dep_param=dep_param,
input_sample=input_sample,
output_sample=output_sample,
q_func=q_func,
random_state=random_state,
output_id=self.output_id)
self.append(result)
if output_sample.shape[0] == output_sample.size:
self.output_dim = 1
else:
self.output_dim = output_sample.shape[1]
elif run_type == 'independence':
# There is data and we suppose it's at independence or a fixed params
result = DependenceResult(margins=margins,
families=families,
vine_structure=vine_structure,
fixed_params=fixed_params,
dep_param=0,
input_sample=input_samples,
output_sample=output_samples[0],
q_func=q_func,
random_state=random_state,
output_id=self.output_id)
self.families = 0
self.vine_structure = 0
self.bounds_tau = 0
self.fixed_params = 0
self.grid_type = 0
self.append(result)
self.output_dim = result.output_dim
elif run_type == 'incomplete':
# There is data and we suppose it's at independence or a fixed params
result = DependenceResult(margins=margins,
families=families,
vine_structure=vine_structure,
fixed_params=fixed_params,
dep_param=0,
input_sample=input_samples,
output_sample=output_samples[0],
q_func=q_func,
random_state=random_state,
output_id=self.output_id)
self.grid_type = 0
self.append(result)
self.output_dim = result.output_dim
self.rng = check_random_state(random_state)
self._bootstrap_samples = None
def __add__(self, results):
"""
"""
if self.n_params > 0:
# Assert the results are the same categories
np.testing.assert_equal(
self.margins, results.margins, err_msg="Same margins")
np.testing.assert_array_equal(
self.families, results.families, err_msg="Different copula families")
np.testing.assert_array_equal(
self.vine_structure, results.vine_structure, err_msg="Different copula structures")
np.testing.assert_array_equal(
self.bounds_tau, results.bounds_tau, err_msg="Different bounds on Tau")
np.testing.assert_array_equal(
self.fixed_params, results.fixed_params, err_msg="Different fixed params")
np.testing.assert_allclose(
self.dep_params, results.dep_params, err_msg="Different dependence parameters")
assert self.run_type == results.run_type, "Different run type"
assert self.grid_type == results.grid_type, "Different grid type"
assert self.grid_filename == results.grid_filename, "Different grid type"
assert self.lhs_grid_criterion == results.lhs_grid_criterion, "Different grid type"
input_samples = []
output_samples = []
for res1, res2 in zip(self, results):
if res1.input_sample is not None:
input_samples.append(
np.r_[res1.input_sample, res2.input_sample])
output_samples.append(
np.r_[res1.output_sample, res2.output_sample])
if len(input_samples) == 0:
input_samples = None
new_results = ListDependenceResult(
margins=self.margins,
families=self.families,
vine_structure=self.vine_structure,
bounds_tau=self.bounds_tau,
fixed_params=self.fixed_params,
dep_params=self.dep_params,
input_samples=input_samples,
output_samples=output_samples,
grid_type=self.grid_type,
q_func=self.q_func,
run_type=self.run_type,
grid_filename=self.grid_filename,
lhs_grid_criterion=self.lhs_grid_criterion,
output_id=self.output_id)
return new_results
def extend(self, value):
super(ListDependenceResult, self).extend(value)
self.families = value.families
@property
def output_id(self):
"""Id of the output.
"""
return self._output_id
@output_id.setter
def output_id(self, output_id):
for result in self:
result.output_id = output_id
self._output_id = output_id
@property
def q_func(self):
"""The quantity function
"""
return self._q_func
@q_func.setter
def q_func(self, q_func):
assert callable(q_func), "Function must be callable"
if self.n_params == 0:
print("There is no data...")
else:
for result in self:
result.q_func = q_func
self._q_func = q_func
@property
def pairs(self):
"""The dependent pairs of the problem.
"""
if self.families is None:
print('Family matrix was not defined')
else:
return matrix_to_list(self.families)[1]
@property
def dep_params(self):
"""The dependence parameters.
"""
if self.n_params == 0:
print("There is no data...")
else:
return np.asarray([result.dep_param for result in self])
@property
def kendalls(self):
"""The Kendall's tau dependence measure.
"""
if self.n_params == 0:
print("There is no data...")
else:
return np.asarray([result.kendall_tau for result in self])
@property
def n_pairs(self):
"""The number of dependente pairs.
"""
if self.n_params == 0:
return 0
else:
return (self.families > 0).sum()
@property
def output_samples(self):
if self.n_params == 0:
print("There is no data...")
else:
return [result.output_sample for result in self]
@property
def input_samples(self):
if self.n_params == 0:
print("There is no data...")
else:
return [result.input_sample for result in self]
@property
def n_input_sample(self):
"""The sample size for each dependence parameter.
"""
# TODO: maybe not all the samples have the same number of observations...
if self.n_params == 0:
return 0
else:
return self[0].n_sample
@property
def n_evals(self):
"""The total number of observations.
"""
return self.n_params*self.n_input_sample
@property
def n_params(self):
"""The number of dependence parameters.
"""
return len(self)
@property
def quantities(self):
"""The quantity values of each parameters.
"""
if self.n_params == 0:
print("There is no data...")
else:
return np.asarray([result.quantity for result in self])
@property
def min_result(self):
"""The dependence parameter that minimizes the output quantity.
"""
if self.n_params == 0:
print("There is no data...")
else:
return self[self.quantities.argmin()]
@property
def min_quantity(self):
"""The minimum quantity from all the dependence parameters.
"""
if self.n_params == 0:
print("There is no data...")
else:
return self.quantities.min()
@property
def full_dep_params(self):
"""The dependence parameters with the columns from the fixed parameters.
"""
if self.n_params == 0:
print("There is no data...")
else:
return np.asarray([result.full_dep_params for result in self])
@property
def bootstrap_samples(self):
"""The computed bootstrap sample of all the dependence parameters.
"""
sample = [result._bootstrap_sample for result in self]
if not any((boot is None for boot in sample)):
return np.asarray(sample)
else:
raise AttributeError('The boostrap must be computed first')
def compute_bootstraps(self, n_bootstrap=1000, inplace=True):
"""Compute bootstrap of the quantity for each element of the list
"""
if self.n_params == 0:
print("There is no data...")
else:
for result in self:
result.compute_bootstrap(n_bootstrap)
if not inplace:
return self.bootstrap_samples
def to_hdf(self, path_or_buf, input_names=[], output_names=[], verbose=False, with_input_sample=True):
"""Write the contained data to an HDF5 file using HDFStore.
Parameters
----------
path_or_buf : the path (string) or HDFStore object
The path of the file or an hdf instance.
input_names : list of strings, optional
The name of the inputs variables.
output_names : list of strings, optional
The name of the outputs.
"""
filename, extension = os.path.splitext(path_or_buf)
dirname = os.path.dirname(path_or_buf)
if not os.path.exists(dirname):
os.makedirs(dirname)
assert extension in ['.hdf', '.hdf5'], "File extension should be hdf"
# List of input variable names
if input_names:
assert len(input_names) == self.input_dim, \
AttributeError("Dimension problem for input_names")
else:
for i in range(self.input_dim):
input_names.append("x_%d" % (i + 1))
# List of output variable names
if output_names:
assert len(output_names) == self.output_dim, \
AttributeError("Dimension problem for output_names")
else:
for i in range(self.output_dim):
output_names.append("y_%d" % (i + 1))
margin_dict = margins_to_dict(self.margins)
filename_exists = True
k = 0
while filename_exists:
# If the file has the same run configuration
try:
with h5py.File(path_or_buf, 'a') as hdf_store:
# If the file already exists and already has data
if hdf_store.attrs.keys():
# Check the attributes of the file, if it already exists
np.testing.assert_allclose(
hdf_store['dependence_params'].value, self.dep_params, err_msg="Different dependence parameters")
assert hdf_store.attrs['Input Dimension'] == self.input_dim, "Different input dimension"
assert hdf_store.attrs['Output Dimension'] == self.output_dim, "Different output dimension"
assert hdf_store.attrs['Run Type'] == self.run_type, "Different run type"
np.testing.assert_array_equal(
hdf_store.attrs['Copula Families'], self.families, err_msg="Different copula families")
if 'Fixed Parameters' in hdf_store.attrs.keys():
np.testing.assert_array_equal(
hdf_store.attrs['Fixed Parameters'], self.fixed_params, err_msg="Different fixed copulas")
elif self._fixed_pairs:
# Save only if there is no fixed params
raise ValueError(
'It should not have constraints to be in the same output file.')
if 'Bounds Tau' in hdf_store.attrs.keys():
np.testing.assert_array_equal(
hdf_store.attrs['Bounds Tau'], self.bounds_tau, err_msg="Different bounds on Tau")
elif self._fixed_pairs:
raise ValueError(
'It should not have constraints to be in the same output file.')
np.testing.assert_array_equal(
hdf_store.attrs['Copula Structure'], self.vine_structure, err_msg="Different vine structures")
np.testing.assert_array_equal(
hdf_store.attrs['Input Names'], input_names, err_msg="Different Input Names")
np.testing.assert_array_equal(
hdf_store.attrs['Output Names'], output_names, err_msg="Different output Names")
loaded_margin_dict = json.loads(
hdf_store.attrs['Margins'])
assert all(loaded_margin_dict[str(
k)] == margin_dict[k] for k in margin_dict), "Not the same dictionary"
if self.run_type | |
# ==============================================================================
# This file is part of the SPNC project under the Apache License v2.0 by the
# Embedded Systems and Applications Group, TU Darmstadt.
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
# SPDX-License-Identifier: Apache-2.0
# ==============================================================================
import logging
import os
import numpy as np
import capnp
from spn.algorithms.Validity import is_valid
from spn.structure.Base import Product, Sum, rebuild_scopes_bottom_up, assign_ids, get_number_of_nodes
from spn.structure.StatisticalTypes import Type, MetaType
from spn.structure.leaves.histogram.Histograms import Histogram
from spn.structure.leaves.parametric.Parametric import Gaussian, Categorical
from xspn.structure.Model import SPNModel
from xspn.structure.Query import Query, JointProbability, ErrorModel, ErrorKind
# Magic import making the schema defined in the schema language available
from xspn.serialization.binary.capnproto import spflow_capnp
logger = logging.getLogger(__name__)
metaType2Enum = {MetaType.REAL : "real", MetaType.BINARY : "binary", MetaType.DISCRETE : "discrete"}
enum2MetaType = {v : k for k, v in metaType2Enum.items()}
type2Enum = {Type.REAL : "real",
Type.INTERVAL : "interval",
Type.POSITIVE : "positive",
Type.CATEGORICAL : "categorical",
Type.ORDINAL : "ordinal",
Type.COUNT : "count",
Type.BINARY : "binary"}
enum2Type = {v : k for k, v in type2Enum.items()}
class ListHandler:
def __init__(self, list):
self._list = list
self._index = 0
def getElement(self):
element = self._list[self._index]
self._index = self._index + 1
return element
class BinarySerializer:
"""Interface to serialize SPNs from SPFlow into an efficient binary format."""
def __init__(self, fileName, bufferSize = 10 * (2**10), clearFile = True):
"""Initialize the serializer.
Keyword arguments:
fileName -- The name of the output file.
bufferSize -- Buffer size used during writing.
clearFile -- If set to True (default), completely erase the file before writing.
"""
self.assignedIDs = {}
self.fileName = fileName
self.bufferSize = bufferSize
if clearFile:
# Clear the content of the file if not instructed otherwise.
open(fileName, "w").close()
def serialize_to_file(self, content):
with open(self.fileName, "a+b", buffering=self.bufferSize*(2**10)) as outFile:
header = spflow_capnp.Header.new_message()
if isinstance(content, SPNModel):
header.model = self._serialize_model(content)
elif isinstance(content, Query):
header.query = self._serialize_query(content)
serializer = self._serialize_query
else:
raise NotImplementedError(f"No serialization defined for content {content} of type {type(content)}")
header.write(outFile)
def _serialize_query(self, query):
query_msg = spflow_capnp.Query.new_message()
query_msg.batchSize = query.batchSize
if query.errorModel.kind is ErrorKind.ABSOLUTE:
query_msg.errorKind = "absolute"
elif query.errorModel.kind is ErrorKind.RELATIVE:
query_msg.errorKind = "relative"
else:
raise NotImplementedError(f"No serialization defined for error kind {query.errorModel.kind}")
query_msg.maxError = query.errorModel.error
if isinstance(query, JointProbability):
query_msg.joint = self._serialize_joint(query)
else:
raise NotImplementedError(f"No serialization defined for query {query} of type {type(query)}")
return query_msg
def _serialize_joint(self, joint):
joint_msg = spflow_capnp.JointProbability.new_message()
joint_msg.model = self._serialize_model(joint.graph)
joint_msg.supportMarginal = joint.supportsMarginal()
return joint_msg
def _serialize_model(self, model):
msg = spflow_capnp.Model.new_message()
assert is_valid(model.root), "SPN invalid before serialization"
# Assign (new) IDs to the nodes
# Keep track of already assigned IDs, so the IDs are
# unique for the whole file.
assign_ids(model.root, self.assignedIDs)
# Rebuild scopes bottom-up
rebuild_scopes_bottom_up(model.root)
msg.rootNode = model.root.id
msg.numFeatures = len(model.root.scope)
msg.featureType = model.featureType
scope = msg.init("scope", len(model.root.scope))
for i,v in enumerate(model.root.scope):
scope[i] = self._unwrap_value(v)
name = ""
if model.name is not None:
name = model.name
msg.name = name
numNodes = get_number_of_nodes(model.root)
nodes = msg.init("nodes", numNodes)
nodeList = ListHandler(nodes)
self._serialize_graph([model.root], nodeList)
return msg
def _serialize_graph(self, rootNodes, nodeList):
"""Serialize SPN graphs to binary format. SPN graphs are given by their root node."""
# Buffering write, buffer size was specified at initialization (defaults to 10 MiB).
# The buffer size is specified in KiB during initialization, scale to bytes here.
numNodes = 0
for spn in rootNodes:
visited = set()
self._binary_serialize(spn, True, visited, nodeList)
numNodes += len(visited)
print(f"INFO: Serialized {numNodes} nodes to {self.fileName}")
def _binary_serialize(self, node, is_rootNode, visited_nodes, nodeList):
if node.id not in visited_nodes:
if isinstance(node, Product):
nodeList = self._serialize_product(node, is_rootNode, visited_nodes, nodeList)
elif isinstance(node, Sum):
nodeList = self._serialize_sum(node, is_rootNode, visited_nodes, nodeList)
elif isinstance(node, Histogram):
nodeList = self._serialize_histogram(node, is_rootNode, visited_nodes, nodeList)
elif isinstance(node,Gaussian):
nodeList = self._serialize_gaussian(node, is_rootNode, visited_nodes, nodeList)
elif isinstance(node, Categorical):
nodeList = self._serialize_categorical(node, is_rootNode, visited_nodes, nodeList)
else:
raise NotImplementedError(f"No serialization defined for node {node} of type {type(node)}")
visited_nodes.add(node.id)
return nodeList
def _serialize_product(self, product, is_rootNode, visited_nodes, nodeList):
# Serialize child nodes before node itself
for c in product.children:
self._binary_serialize(c, False, visited_nodes, nodeList)
# Construct inner product node message.
prod_msg = spflow_capnp.ProductNode.new_message()
children = prod_msg.init("children", len(product.children))
for i, child in enumerate(product.children):
children[i] = child.id
# Construct surrounding node message
node = nodeList.getElement()
node.id = product.id
node.product = prod_msg
node.rootNode = is_rootNode
def _serialize_sum(self, sum, is_rootNode, visited_nodes, nodeList):
# Serialize child nodes before node itself
for c in sum.children:
self._binary_serialize(c, False, visited_nodes, nodeList)
# Construct innner sum node message
sum_msg = spflow_capnp.SumNode.new_message()
children = sum_msg.init("children", len(sum.children))
for i, child in enumerate(sum.children):
children[i] = child.id
weights = sum_msg.init("weights", len(sum.weights))
for i, w in enumerate(sum.weights):
weights[i] = BinarySerializer._unwrap_value(w)
# Construct surrounding node message
node = nodeList.getElement()
node.id = sum.id
node.sum = sum_msg
node.rootNode = is_rootNode
def _serialize_histogram(self, hist, is_rootNode, visited_nodes, nodeList):
# Construct inner histogram leaf message.
hist_msg = spflow_capnp.HistogramLeaf.new_message()
breaks = hist_msg.init("breaks", len(hist.breaks))
for i,b in enumerate(hist.breaks):
breaks[i] = int(b)
densities = hist_msg.init("densities", len(hist.densities))
for i,d in enumerate(hist.densities):
densities[i] = BinarySerializer._unwrap_value(d)
reprPoints = hist_msg.init("binReprPoints", len(hist.bin_repr_points))
for i,r in enumerate(hist.bin_repr_points):
reprPoints[i] = BinarySerializer._unwrap_value(r)
hist_msg.type = type2Enum.get(hist.type)
hist_msg.metaType = metaType2Enum.get(hist.meta_type)
# Check that scope is defined over a single variable
assert len(hist.scope) == 1, "Expecting Histogram to be univariate"
hist_msg.scope = BinarySerializer._unwrap_value(hist.scope[0])
# Construct surrounding node message.
node = nodeList.getElement()
node.hist = hist_msg
node.rootNode = is_rootNode
node.id = hist.id
def _serialize_gaussian(self, gauss, is_rootNode, visited_nodes, nodeList):
# Construct inner Gaussian leaf message
gauss_msg = spflow_capnp.GaussianLeaf.new_message()
gauss_msg.mean = BinarySerializer._unwrap_value(gauss.mean)
gauss_msg.stddev = BinarySerializer._unwrap_value(gauss.stdev)
# Check that scope is defined over a single variable
assert len(gauss.scope) == 1, "Expecting Gauss to be univariate"
gauss_msg.scope = BinarySerializer._unwrap_value(gauss.scope[0])
# Construct surrounding node message.
node = nodeList.getElement()
node.gaussian = gauss_msg
node.rootNode = is_rootNode
node.id = gauss.id
def _serialize_categorical(self, categorical, is_rootNode, visited_nodes, nodeList):
# Construct inner categorical leaf message.
cat_msg = spflow_capnp.CategoricalLeaf.new_message()
probabilities = cat_msg.init("probabilities", len(categorical.p))
for i,p in enumerate(categorical.p):
probabilities[i] = BinarySerializer._unwrap_value(p)
# Check that the scope is defined over a single variable
assert len(categorical.scope) == 1, "Expecting Categorical leaf to be univariate"
cat_msg.scope = BinarySerializer._unwrap_value(categorical.scope[0])
node = nodeList.getElement()
node.categorical = cat_msg
node.rootNode = is_rootNode
node.id = categorical.id
@staticmethod
def _unwrap_value(value):
# If the value was defined in the module numpy, convert it to a
# Python primitive type for serialization.
if type(value).__module__ == np.__name__:
return value.item()
return value
class BinaryDeserializer:
"""Interface to de-serialize (read) SPNs from SPFlow from an efficient binary format."""
def __init__(self, fileName):
"""Initialize the de-serializer."""
self.fileName = fileName
def deserialize_from_file(self):
"""Deserialize all SPN graphs from the file. Returns a list of SPN graph root nodes."""
with open(self.fileName, "rb") as inFile:
# Read header message first
traversal_limit_in_words = (2**64) - 1 # default: 8*1024*1024 [words à (32|64) bit]
print("deserialization traversal limit in words:", traversal_limit_in_words)
header = spflow_capnp.Header.read(inFile, traversal_limit_in_words=traversal_limit_in_words)
if header.which() == "query":
return self._deserialize_query(header.query)
elif header.which() == "model":
return self._deserialize_model(header.model)
else:
raise NotImplementedError(f"No deserialization defined for {header.content}")
def _deserialize_query(self, query):
batchSize = query.batchSize
maxError = query.maxError
if query.errorKind == "absolute":
errorKind = ErrorKind.ABSOLUTE
elif query.errorKind == "relative":
errorKind = ErrorKind.RELATIVE
else:
raise NotImplementedError(f"Cannot deserialize error kind {query.errorKind}")
errorModel = ErrorModel(errorKind, maxError)
model = self._deserialize_model(query.joint.model)
supportsMarginal = query.joint.supportMarginal
return JointProbability(model, batchSize, supportsMarginal, errorModel)
def _deserialize_model(self, model):
rootID = model.rootNode
featureType = model.featureType
name = model.name
if name == "":
name = None
rootNodes = self._binary_deserialize_graph(model.nodes)
for root in rootNodes:
rebuild_scopes_bottom_up(root)
assert is_valid(root), "SPN invalid after deserialization"
rootNode = next((root for root in rootNodes if root.id == rootID), None)
if rootNode is None:
logger.error(f"Did not find serialized root node {rootID}")
return SPNModel(rootNode, featureType, name)
def _binary_deserialize_graph(self, nodeList):
node_map = {}
nodes = []
for node in nodeList:
which = node.which()
deserialized = None
if which == "product":
deserialized = self._deserialize_product(node, node_map)
elif which == "sum":
deserialized = self._deserialize_sum(node, node_map)
elif which == "hist":
deserialized = self._deserialize_histogram(node, node_map)
elif which == "gaussian":
deserialized = self._deserialize_gaussian(node, node_map)
elif which == "categorical":
deserialized = self._deserialize_categorical(node, node_map)
else:
raise NotImplementedError(f"No deserialization defined for {which}")
node_map[node.id] = deserialized
if node.rootNode:
nodes.append(deserialized)
print(f"Deserialized {len(node_map)} nodes from {self.fileName}")
return nodes
def _deserialize_product(self, node, node_map):
child_ids = node.product.children
# Resolve references to child nodes by ID.
children = [node_map.get(id) for id in child_ids]
# Check all childs have been resolved.
assert None not in children, "Child node ID | |
445 446 380
1 535 3 0 419 420 377
1 536 3 0 418 419 376
1 537 3 0 417 418 375
1 538 3 0 444 417 374
1 539 3 0 443 444 373
1 540 3 0 414 415 370
1 541 3 0 413 414 369
1 542 3 0 412 413 368
1 543 3 0 442 412 367
1 544 3 0 441 442 366
1 545 3 0 440 451 363
1 546 3 0 439 452 360
1 547 3 0 428 455 388
1 548 3 0 427 456 385
1 549 3 0 426 425 382
1 550 3 0 425 424 381
1 551 3 0 424 423 380
1 552 3 0 423 422 379
1 553 3 0 422 446 378
1 554 3 0 421 420 375
1 555 3 0 420 419 374
1 556 3 0 419 418 373
1 557 3 0 418 417 372
1 558 3 0 417 444 371
1 559 3 0 416 415 368
1 560 3 0 415 414 367
1 561 3 0 414 413 366
1 562 3 0 413 412 365
1 563 3 0 412 442 364
1 564 3 0 411 451 361
1 565 3 0 410 452 358
1 566 0 0
1 567 3 0 416 421 384
1 568 3 0 415 420 383
1 569 3 0 414 419 382
1 570 3 0 413 418 381
1 571 3 0 412 417 380
1 572 3 0 442 444 379
1 573 3 0 441 443 378
1 574 3 0 427 414 375
1 575 3 0 456 413 374
1 576 3 0 447 412 373
1 577 3 0 428 427 368
1 578 3 0 455 456 367
1 579 3 0 448 447 366
1 580 3 0 419 424 363
1 581 3 0 418 423 362
1 582 3 0 417 422 361
1 583 3 0 424 411 360
1 584 3 0 423 451 359
1 585 3 0 422 440 358
1 586 3 0 414 427 390
1 587 3 0 413 456 389
1 588 3 0 412 447 388
1 589 3 0 419 414 387
1 590 3 0 418 413 386
1 591 3 0 417 412 385
1 592 3 0 410 411 382
1 593 3 0 452 451 381
1 594 3 0 439 440 380
1 595 3 0 411 424 375
1 596 3 0 451 423 374
1 597 3 0 440 422 373
1 598 3 0 426 421 370
1 599 3 0 425 420 369
1 600 3 0 424 419 368
1 601 3 0 423 418 367
1 602 3 0 422 417 366
1 603 3 0 446 444 365
1 604 3 0 445 443 364
1 605 0 0
1 1 2 1 594 87
1 1 2 1 593 86
1 1 2 1 592 85
1 1 2 1 597 84
1 1 2 1 596 83
1 1 2 1 595 82
1 1 1 0 81
1 1 1 0 80
1 1 2 1 588 79
1 1 2 1 587 78
1 1 2 1 586 77
1 1 1 0 76
1 1 1 0 75
1 1 1 0 74
1 1 1 0 73
1 1 2 1 591 72
1 1 2 1 590 71
1 1 2 1 589 70
1 1 1 0 69
1 1 1 0 68
1 1 2 1 604 67
1 1 2 1 603 66
1 1 2 1 602 65
1 1 2 1 601 64
1 1 2 1 600 63
1 1 2 1 599 62
1 1 2 1 598 61
1 1 1 0 60
1 1 1 0 59
1 1 1 0 58
1 1 1 0 57
1 1 1 0 56
1 1 1 0 55
1 1 1 0 120
1 1 1 0 119
1 1 1 0 118
1 1 1 0 117
1 1 1 0 116
1 1 1 0 115
1 1 2 1 573 114
1 1 2 1 572 113
1 1 2 1 571 112
1 1 2 1 570 111
1 1 2 1 569 110
1 1 2 1 568 109
1 1 2 1 567 108
1 1 1 0 107
1 1 1 0 106
1 1 2 1 582 105
1 1 2 1 581 104
1 1 2 1 580 103
1 1 1 0 102
1 1 1 0 101
1 1 1 0 100
1 1 1 0 99
1 1 2 1 585 98
1 1 2 1 584 97
1 1 2 1 583 96
1 1 1 0 95
1 1 1 0 94
1 1 2 1 576 93
1 1 2 1 575 92
1 1 2 1 574 91
1 1 2 1 579 90
1 1 2 1 578 89
1 1 2 1 577 88
1 1 1 0 153
1 1 1 0 152
1 1 2 1 565 151
1 1 1 0 150
1 1 1 0 149
1 1 2 1 564 148
1 1 1 0 147
1 1 1 0 146
1 1 2 1 563 145
1 1 2 1 562 144
1 1 2 1 561 143
1 1 2 1 560 142
1 1 2 1 559 141
1 1 1 0 140
1 1 1 0 139
1 1 2 1 558 138
1 1 2 1 557 137
1 1 2 1 556 136
1 1 2 1 555 135
1 1 2 1 554 134
1 1 1 0 133
1 1 1 0 132
1 1 2 1 553 131
1 1 2 1 552 130
1 1 2 1 551 129
1 1 2 1 550 128
1 1 2 1 549 127
1 1 1 0 126
1 1 1 0 125
1 1 2 1 548 124
1 1 1 0 123
1 1 1 0 122
1 1 2 1 547 121
1 1 2 1 546 186
1 1 1 0 185
1 1 1 0 184
1 1 2 1 545 183
1 1 1 0 182
1 1 1 0 181
1 1 2 1 544 180
1 1 2 1 543 179
1 1 2 1 542 178
1 1 2 1 541 177
1 1 2 1 540 176
1 1 1 0 175
1 1 1 0 174
1 1 2 1 539 173
1 1 2 1 538 172
1 1 2 1 537 171
1 1 2 1 536 170
1 1 2 1 535 169
1 1 1 0 168
1 1 1 0 167
1 1 2 1 534 166
1 1 2 1 533 165
1 1 2 1 532 164
1 1 2 1 531 163
1 1 2 1 530 162
1 1 1 0 161
1 1 1 0 160
1 1 2 1 529 159
1 1 1 0 158
1 1 1 0 157
1 1 2 1 528 156
1 1 1 0 155
1 1 1 0 154
1 1 1 0 222
1 1 1 0 221
1 1 1 0 220
1 1 1 0 219
1 1 1 0 217
1 1 1 0 216
1 1 1 0 215
1 1 1 0 214
1 1 1 0 213
1 1 1 0 212
1 1 1 0 211
1 1 1 0 210
1 1 1 0 209
1 1 1 0 208
1 1 1 0 207
1 1 1 0 206
1 1 1 0 205
1 1 1 0 204
1 1 1 0 203
1 1 1 0 202
1 1 1 0 201
1 1 1 0 200
1 1 1 0 199
1 1 1 0 198
1 1 1 0 197
1 1 1 0 196
1 1 1 0 195
1 1 1 0 194
1 1 1 0 193
1 1 1 0 192
1 1 1 0 191
1 1 1 0 190
1 1 1 0 255
1 1 1 0 254
1 1 1 0 253
1 1 1 0 252
1 1 1 0 251
1 1 1 0 250
1 1 1 0 249
1 1 1 0 248
1 1 1 0 247
1 1 1 0 246
1 1 1 0 245
1 1 1 0 244
1 1 1 0 243
1 1 1 0 242
1 1 1 0 241
1 1 1 0 240
1 1 1 0 239
1 1 1 0 238
1 1 1 0 237
1 1 1 0 236
1 1 1 0 235
1 1 1 0 234
1 1 1 0 233
1 1 1 0 232
1 1 1 0 231
1 1 1 0 230
1 1 1 0 229
1 1 1 0 228
1 1 1 0 227
1 1 1 0 226
1 1 1 0 225
1 1 | |
0)
BridgeType (str(bridges|providerBridges)): NOT DEFINED
CistRegRootCost (number): (For use with PVST+ and RPVST+ only) The Common Spanning Tree (CST) root path cost. The valid range is 0 to 4294967295. (default = 0)
CistRegRootMac (str): (For use with PVST+ and RPVST+ only) The Common Spanning Tree (CST) 6-byte root MAC address. (default = 00:00:00:00:00:00)
CistRegRootPriority (str(0|4096|8192|12288|16384|20480|24576|28672|32768|36864|40960|45056|49152|53248|57344|61440)): (For use with PVST+ and RPVST+ only) The Common Spanning Tree (CST) priority of the root. The valid range is 0 to 61,440, in increments of 4,096. (default = 32,768)
CistRemainingHop (number): (For use with MSTP only) The number of additional bridge-to-bridge hops that will be allowed for the MSTP BPDUs. The root sets the maximum hop count, and each subsequent bridge decrements this value by 1. The valid range is 1 to 255. (default = 20)
Enabled (bool): Enables or disables the bridge's simulation. (default = disabled)
ExternalRootCost (number): Common and Internal Spanning Tree (CIST) external root path cost. A 4-byte unsigned integer. The default is 0.
ExternalRootMac (str): Common and Internal Spanning Tree (CIST) external root MAC address. A 6-byte MAC address.The default is 00 00 00 00 00 00.
ExternalRootPriority (str(0|4096|8192|12288|16384|20480|24576|28672|32768|36864|40960|45056|49152|53248|57344|61440)): (For use with MSTP only) The priority value of the root bridge for the CIST/MSTP region (external). Part of the CIST External Root Identifier. The valid range is 0 to 61,440, in increments of 4096. (default = 32,768)
ForwardDelay (number): The delay used for a port's change to the Forwarding state. (in milliseconds) The valid range is 500 msec to 255 sec. (default = 15,000 msec (15 sec)
HelloInterval (number): The length of time between transmission of Hello messages from the root bridge (in milliseconds). The valid range is 500 msec to 255 sec. (default = 2,000 msec (2 sec)
MaxAge (number): The maximum Configuration message aging time. (in milliseconds) The valid range is 500 msec to 255 sec. (default = 20,000 msec (20 sec)
MessageAge (number): The message age time parameter in the BPDU (in milliseconds). (It should be less than the Max. Age.) The valid range is 500 msec to 255 sec. (default = 0)
Mode (str(stp|rstp|mstp|pvst|rpvst|pvstp)): The version of the STP protocol that is being used on the Bridge.
MstcName (str): (For use with MSTP only) The name of the Multiple Spanning Tree Configuration being used. Format = MSTC ID-n (editable by user).
MstcRevisionNumber (number): (For use with MSTP only) The Revision Number of the Multiple Spanning Tree Configuration being used. A 2-byte unsigned integer. (default = 0)
PortPriority (str(0|16|32|48|64|80|96|112|128|144|160|176|192|208|224|240)): The port priority. The valid range is to 240, in multiples of 16. (default = 0)
PvstpMode (str(stp|rstp)): The version of the pvSTP protocol that is being used on the Bridge.
RootCost (number): (For STP and RSTP) The administrative cost for the shortest path from this bridge to the Root Bridge. The valid range is 0 to 4294967295. (default = 0)
RootMac (str): (For STP and RSTP) The 6-byte MAC Address for the Root Bridge. (default = 00:00:00:00:00:00)
RootPriority (str(0|4096|8192|12288|16384|20480|24576|28672|32768|36864|40960|45056|49152|53248|57344|61440)): (For STP and RSTP) The Bridge Priority for the root bridge. The valid range is 0 to 61,440, in increments of 4096. (default = 32,768)
RootSystemId (number): (For STP and RSTP) The System ID for the root bridge. The valid range is 0 to 4,095. (default = 0)
UpdateRequired (number): Indicates that an updated is required.
VlanPortPriority (number): (For use with PVST+ and RPVST+ only) The Common Spanning Tree (CST) VLAN port priority. The valid range is 0 to 63. (default = 32)
VlanRootMac (str): Common and Internal Spanning Tree (CIST) Regional (external) MAC address. Part of the CIST External Root Identifier. A 6-byte MAC address.
VlanRootPathCost (number): Common and Internal Spanning Tree (CIST) regional (external) root path cost.
VlanRootPriority (str(0|4096|8192|12288|16384|20480|24576|28672|32768|36864|40960|45056|49152|53248|57344|61440)): The priority value of the root bridge for the Common Spanning Tree (CST).
Returns:
self: This instance with all currently retrieved bridge data using find and the newly added bridge data available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._create(locals())
def remove(self):
"""Deletes all the bridge data in this instance from server.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, AutoPickBridgeMac=None, BridgeMac=None, BridgePriority=None, BridgeSystemId=None, BridgeType=None, CistRegRootCost=None, CistRegRootMac=None, CistRegRootPriority=None, CistRemainingHop=None, Enabled=None, ExternalRootCost=None, ExternalRootMac=None, ExternalRootPriority=None, ForwardDelay=None, HelloInterval=None, IsRefreshComplete=None, MaxAge=None, MessageAge=None, Mode=None, MstcName=None, MstcRevisionNumber=None, PortPriority=None, PvstpMode=None, RootCost=None, RootMac=None, RootPriority=None, RootSystemId=None, UpdateRequired=None, VlanPortPriority=None, VlanRootMac=None, VlanRootPathCost=None, VlanRootPriority=None):
"""Finds and retrieves bridge data from the server.
All named parameters support regex and can be used to selectively retrieve bridge data from the server.
By default the find method takes no parameters and will retrieve all bridge data from the server.
Args:
AutoPickBridgeMac (bool): If enabled, the MAC address for one of the STP interfaces will be automatically assigned as the MAC address for this bridge.
BridgeMac (str): The 6-byte MAC address assigned to this bridge. Part of the bridge identifier (bridge ID).
BridgePriority (str(0|4096|8192|12288|16384|20480|24576|28672|32768|36864|40960|45056|49152|53248|57344|61440)): The Bridge Priority for this bridge.The valid range is 0 to 61,440, in multiples of 4,096. (default = 32,768)
BridgeSystemId (number): The System ID for the bridge. The valid range is 0 to 4,095. (default = 0)
BridgeType (str(bridges|providerBridges)): NOT DEFINED
CistRegRootCost (number): (For use with PVST+ and RPVST+ only) The Common Spanning Tree (CST) root path cost. The valid range is 0 to 4294967295. (default = 0)
CistRegRootMac (str): (For use with PVST+ and RPVST+ only) The Common Spanning Tree (CST) 6-byte root MAC address. (default = 00:00:00:00:00:00)
CistRegRootPriority (str(0|4096|8192|12288|16384|20480|24576|28672|32768|36864|40960|45056|49152|53248|57344|61440)): (For use with PVST+ and RPVST+ only) The Common Spanning Tree (CST) priority of the root. The valid range is 0 to 61,440, in increments of 4,096. (default = 32,768)
CistRemainingHop (number): (For use with MSTP only) The number of additional bridge-to-bridge hops that will be allowed for the MSTP BPDUs. The root sets the maximum hop count, and each subsequent bridge decrements this value by 1. The valid range is 1 to 255. (default = 20)
Enabled (bool): Enables or disables the bridge's simulation. (default = disabled)
ExternalRootCost (number): Common and Internal Spanning Tree (CIST) external root path cost. A 4-byte unsigned integer. The default is 0.
ExternalRootMac (str): Common and Internal Spanning Tree (CIST) external root MAC address. A 6-byte MAC address.The default is 00 00 00 00 00 00.
ExternalRootPriority (str(0|4096|8192|12288|16384|20480|24576|28672|32768|36864|40960|45056|49152|53248|57344|61440)): (For use with MSTP only) The priority value of the root bridge for the CIST/MSTP region (external). Part of the CIST External Root Identifier. The valid range is 0 to 61,440, in increments of 4096. (default = 32,768)
ForwardDelay (number): The delay used for a port's change to the Forwarding state. (in milliseconds) The valid range is 500 msec to 255 sec. (default = 15,000 msec (15 sec)
HelloInterval (number): The length of time between transmission of Hello messages from the root bridge (in milliseconds). The valid range is 500 msec to 255 sec. (default = 2,000 msec (2 sec)
IsRefreshComplete (bool): If true, this causes the STP bridge to update.
MaxAge (number): The maximum Configuration message aging time. (in milliseconds) The valid range is 500 msec to 255 sec. (default = 20,000 msec (20 sec)
MessageAge (number): The message age time parameter in the BPDU (in milliseconds). (It should be less than the Max. Age.) The valid range is 500 msec to 255 sec. (default = 0)
Mode (str(stp|rstp|mstp|pvst|rpvst|pvstp)): The version of the STP protocol that is being used on the Bridge.
MstcName (str): (For use with MSTP only) The name of the Multiple Spanning Tree Configuration being used. Format = MSTC ID-n (editable by user).
MstcRevisionNumber (number): (For use with MSTP only) The Revision Number of the Multiple Spanning Tree Configuration being used. A 2-byte unsigned integer. (default = 0)
PortPriority (str(0|16|32|48|64|80|96|112|128|144|160|176|192|208|224|240)): The port priority. The valid range is to 240, in multiples of 16. (default = 0)
PvstpMode (str(stp|rstp)): The version of the pvSTP protocol that is being used on the Bridge.
RootCost (number): (For STP and RSTP) The administrative cost for the shortest path from this bridge to the Root Bridge. The valid range is 0 to 4294967295. (default = 0)
RootMac (str): (For STP and RSTP) The 6-byte MAC Address for the Root | |
"""
Provides functionality related to Maya GUI controls.
"""
import re
import maya.cmds as mc
import maya.mel as mel
class Control(object):
"""
Represents a single UI control contained within a Gui. Provides a wrapper
for the MEL command associated with whatever control type, including
methods to edit and query the parameters of the control.
"""
def __init__(self, name, control_type, creation_flags, parent_name):
"""
Initializes a new control declaration with the given name and control
type. creation_flags is a string containing the MEL flags and arguments
used to create the control (excluding the command, the name, and the
-p[arent] flag). parent_name is the name of this control's parent, or
None if no parent is specified.
"""
self.name = name
self.control_type = control_type
self.creation_flags = creation_flags
self.parent_name = parent_name
def delete(self):
"""
Deletes this control and all of its children.
"""
mc.deleteUI(self.name)
def create(self):
"""
Executes the MEL command that creates this control based on its
creation parameters.
"""
# Construct the MEL command to create this control based on its
# parameters
parent_flag = (' -p %s' % self.parent_name) if self.parent_name else ''
command = '%s%s %s %s;' % (
self.control_type,
parent_flag,
self.creation_flags,
self.name)
# Attempt to execute the command as MEL. If unsuccessful, print the
# full command so we can diagnose the problem.
try:
mel.eval(command)
except RuntimeError, exc:
print '// %s //' % command
raise exc
def edit(self, **flags):
"""
Edits this control with the given new flag values. The provided
dictionary of flags need not contain the edit flag.
"""
def thunk_commands(flags):
"""
Modifies and returns the given dictionary so that all function
values associated with command flags are thunked into anonymous
functions that ignore the arguments passed to them by Maya.
"""
for flag, value in flags.iteritems():
if 'command' in flag.lower() and hasattr(value, '__call__'):
flags[flag] = lambda _: value()
return flags
flags['edit'] = True
self._call_command(thunk_commands(flags))
def query(self, flag):
"""
Returns the current value of the specified flag.
"""
return self._call_command({'query': True, flag: True})
def _call_command(self, flags):
"""
Private helper method that calls the MEL command associated with the
relevant type of control, passing in this control's name and the given
set of flag mappings.
"""
command = mc.__dict__[self.control_type]
return command(self.name, **flags)
@classmethod
def from_string(cls, name, command, parent_name):
"""
Instantiates a new Control object from the provided pieces of its
string declaration.
"""
# Capture an explicitly specified parent name in the declaration
parent_name_regex = re.search(r' -p(?:arent)? "?([A-Za-z0-9_]+)"? ?',
command)
# If a parent name has been specified, extract it from the command
if parent_name_regex:
parent_name = parent_name_regex.group(1)
command = command.replace(parent_name_regex.group(0), ' ')
# Split the MEL command used to create the control: the first word is
# the control type, and everything after that represents flags
command_tokens = command.split()
control_type = command_tokens[0]
creation_flags = ' '.join(command_tokens[1:])
# Instantiate a new control declaration from these parameters
return cls(name, control_type, creation_flags, parent_name)
class Gui(object):
"""
Represents a set of controls created from a string declaration via the
from_string classmethod. Once a Gui is created (by calling the create
method after a window has been created), individual controls from the
declaration can be accessed with square-bracket notation to be manipulated
individually. In addition, the edit method can be used to process a batch
of edits in a single call.
"""
def __init__(self, controls):
"""
Initializes a new Gui from the given list of Control objects.
"""
self._controls = []
self._control_lookup = {}
for control in controls:
self.add(control)
def __getitem__(self, key):
"""
Allows individual controls to be accessed by name using array-style
indexing into the Gui object.
"""
return self._control_lookup[key]
def add(self, control):
"""
Adds the specified control object to the Gui.
"""
self._controls.append(control)
self._control_lookup[control.name] = control
def create(self):
"""
Creates the Gui by creating all of its controls.
"""
for control in self._controls:
control.create()
def extend(self, other):
"""
Extends this Gui by adding and creating the controls contained in
another Gui object.
"""
for control in other._controls:
self.add(control)
other.create()
def edit(self, per_control_edits):
"""
Processes an unordered batch of edits for a subset of this Gui's
controls. per_control_edits is a dictionary mapping each control name
with a dictionary containing the flags and values specifying the edits
to be made to that control.
"""
for control_name, edit_flags in per_control_edits.iteritems():
self[control_name].edit(**edit_flags)
@classmethod
def from_string(cls, s):
"""
Instantiates a new Gui object from a string declaration.
"""
def strip_comments(line):
"""
Given a line, returns the same line with any comments stripped away.
Comments begin with a hash character ("#") and continue to the end
of the line thereafter.
"""
# Establish some local state to use in scanning the string.
# quote_open indicates whether the characters over which we're
# currently iterating are contained within a quoted span, and
# quote_chars contains the set of characters currently considered
# valid opening or closing characters for a quoted span.
quote_open = False
quote_chars = ['"', "'"]
def open_quote(quote_char):
"""
Modifies local state to indicate that we're scanning over a
region of the string that's enclosed in quotes. quote_char is
the character that opens the quote.
"""
quote_open = True
quote_chars = [quote_char]
def close_quote():
"""
Modifies local state to indicate that we're no longer scanning
over a quoted region of the string.
"""
quote_open = False
quote_chars = ['"', "'"]
# Iterate over each character in the string. If we encounter an
# unquoted hash character, we can immediately strip it away and
# return the part of the string before it. Otherwise, we keep
# iterating, checking each character to determine if we need to
# open or close a quote.
for i, c in enumerate(line):
if c == '#' and not quote_open:
return line[:i]
elif c in quote_chars:
close_quote() if quote_open else open_quote(c)
# Return the entire line unmodified if we encounter no hashes.
return line
def parse_line(lines):
"""
Parses the given line, returning a triple containing the line's
indentation level, the name of the control declared on that line,
and the creation command associated with that control.
"""
def get_indentation_level(line):
"""
Returns the number of spaces at the beginning of the line.
Treats each tab character as four spaces.
"""
match = re.match(r'[ \t]*', line)
if not match:
return 0
return len(match.group(0).replace('\t', ' '))
def split_control(line):
"""
Splits the given line at the first colon, returning the pair of
the control name and the creation command associated with that
control.
"""
first_colon_index = line.find(':')
return (line[:first_colon_index].strip(),
line[first_colon_index+1:].strip())
declaration_triples = []
for line in lines:
indentation_level = get_indentation_level(line)
name, command = split_control(line)
declaration_triples.append((indentation_level, name, command))
return declaration_triples
class ControlStack(object):
"""
Data structure used to keep track of the controls encountered when
parsing the input string.
"""
def __init__(self):
"""
Initializes an empty control stack.
"""
self._controls = [(-1, None)]
def pop(self, indentation_level):
"""
Pops controls off the top of the stack until the topmost
control is below the given indentation level.
"""
while self._controls[-1][0] >= indentation_level:
self._controls.pop()
def push(self, indentation_level, control_name):
"""
Pushes a new control onto the stack at the given indentation
level.
"""
assert indentation_level > self._controls[-1][0]
self._controls.append((indentation_level, control_name))
@property
def top_control(self):
"""
Returns the topmost control name on the stack.
"""
return self._controls[-1][1]
# Strip comments and blank lines to give us only the meaningful lines
commentless_lines = [strip_comments(line) for line in s.splitlines()]
meaningful_lines = [line for line in commentless_lines if line.strip()]
# Iterate over each line to collect control declarations, using a stack
# to infer parent controls based on indentation
controls = []
control_stack = ControlStack()
for (indentation_level,
control_name,
control_command) in parse_line(meaningful_lines):
# Slice off the top of the stack so that we're back to the last-seen
# control that's below the indentation level of the current one
control_stack.pop(indentation_level)
# Create a new control declaration, using the new top of the stack
# as its parent control
controls.append(Control.from_string(control_name,
control_command,
| |
<filename>core/ability.py<gh_stars>0
from core.afflic import AFFLICT_LIST
class Ability:
COND_ELE = ('flame', 'water', 'wind', 'light', 'shadow')
COND_WT = ('axe', 'blade', 'bow', 'dagger', 'lance', 'staff', 'sword', 'wand')
def __init__(self, name, mod=None):
self.name = name
self.mod = mod or []
def check_ele_wt(self, m, adv):
cond = m[3]
if '_' in cond:
classifier, cond = cond.split('_')
else:
classifier = cond
cond = None
new_m = (m[0], m[1], m[2], cond)
if classifier in self.COND_ELE:
return adv.slots.c.ele == classifier, new_m
elif classifier in self.COND_WT:
return adv.slots.c.wt == classifier, new_m
else:
return True, m
def flurry_modifier(self, m, adv):
cond = m[3]
if cond.startswith('hit'):
flurry_hits = int(cond[3:])
def flurry_get():
return adv.hits > flurry_hits
adv.uses_combo = True
return (m[0], m[1], m[2], cond, flurry_get)
return m
def oninit(self, adv, afrom=None):
if afrom is not None:
afrom += '_'
else:
afrom = ''
for idx, m in enumerate(self.mod):
if len(m) > 3 and m[3] is not None:
is_ele_wt, m = self.check_ele_wt(m, adv)
if not is_ele_wt:
continue
if m[3] is not None:
m = self.flurry_modifier(m, adv)
mod_name = '{}{}_{}'.format(afrom, self.name, idx)
self.mod_object = adv.Modifier(mod_name,*m)
if m[1] == 'buff':
adv.Buff(f'{mod_name}_buff', duration=-1, modifier=self.mod_object, source='ability').on()
ability_dict = {}
class Strength(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('att', 'passive', value, cond)])
ability_dict['a'] = Strength
ability_dict['au'] = Strength # united strength
class Strength_Chain(Ability):
def __init__(self, name, value, cond=None):
# is buff bracket for some hecking reason
super().__init__(name, [('att','buff',value, cond)])
ability_dict['achain'] = Strength_Chain
class Resist(Ability):
def __init__(self, name, value, cond=None):
if not cond or cond.startswith('hp'):
super().__init__(name, [(name,'passive',value, cond)])
else:
super().__init__(name, [(name,'buff',value, cond)])
ability_dict['res'] = Resist
class Skill_Damage(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('s','passive',value, cond)])
ability_dict['s'] = Skill_Damage
ability_dict['sd'] = Skill_Damage
class Force_Strike(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('fs','passive',value, cond)])
ability_dict['fs'] = Force_Strike
class Health_Points(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('maxhp','passive',value, cond)])
ability_dict['hp'] = Health_Points
class Buff_Time(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('buff','passive',value, cond)])
ability_dict['bt'] = Buff_Time
class Debuff_Time(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('debuff','passive',value, cond)])
ability_dict['dbt'] = Debuff_Time
class ConditionalModifierAbility(Ability):
def __init__(self, name, mtype, morder, value, cond=None):
if '_' in name:
afflict = name.split('_', 1)[1]
super().__init__(name, [(f'{afflict}_{mtype}', morder, value, cond)])
else:
super().__init__(name, [(mtype, morder, value, cond)])
class Critical_Chance(ConditionalModifierAbility):
def __init__(self, name, value, cond=None):
super().__init__(name, 'crit','chance', value, cond)
ability_dict['cc'] = Critical_Chance
class Critical_Damage(ConditionalModifierAbility):
def __init__(self, name, value, cond=None):
super().__init__(name, 'crit','damage',value, cond)
ability_dict['cd'] = Critical_Damage
class Killer(ConditionalModifierAbility):
def __init__(self, name, value, cond=None):
super().__init__(name, 'killer','passive',value, cond)
ability_dict['k'] = Killer
class Skill_Haste(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('sp', 'passive', value, cond)])
ability_dict['sp'] = Skill_Haste
ability_dict['spu'] = Skill_Haste # united haste
class Striking_Haste(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('spf', 'passive', value, cond)])
ability_dict['spf'] = Striking_Haste
class Broken_Punisher(Ability):
EFFICIENCY = 0.15
def __init__(self, name, value, cond=None):
super().__init__(name, [('att','bk',value*self.EFFICIENCY, cond)])
ability_dict['bk'] = Broken_Punisher
class Overdrive_Punisher(Ability):
EFFICIENCY = 0.45
def __init__(self, name, value, cond=None):
super().__init__(name, [('killer','passive',value*self.EFFICIENCY, cond)])
ability_dict['od'] = Overdrive_Punisher
class Dragon_Damage(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('da','passive',value,cond)])
ability_dict['da'] = Dragon_Damage
class Dragon_Time(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('dt','passive',value,cond)])
ability_dict['dt'] = Dragon_Time
class Dragon_Haste(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('dh','passive',value,cond)])
ability_dict['dh'] = Dragon_Haste
class Attack_Speed(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('spd','passive',value, cond)])
ability_dict['spd'] = Attack_Speed
class Charge_Speed(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('cspd','passive',value, cond)])
ability_dict['cspd'] = Charge_Speed
class Combo_Time(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('ctime','passive',value, cond)])
ability_dict['ctime'] = Combo_Time
class Bleed_Killer(Ability):
def __init__(self, name, value, cond=None):
super().__init__(name, [('killer','passive',value, cond)])
def oninit(self, adv, afrom=None):
super().oninit(adv, afrom=afrom)
value = self.mod_object.get()
def get_bleed():
try:
return value if adv.bleed.get() > 0 else 0
except AttributeError:
return 0
self.mod_object.get = get_bleed
ability_dict['bleed'] = Bleed_Killer
class Co_Ability(Ability):
EX_MAP = {
'blade': [('att','ex',0.10)],
'dagger': [('crit','chance',0.10)],
'bow': [('sp','passive',0.15)],
'wand': [('s','ex',0.15)],
'sword': [('dh','passive',0.15)],
'axe2': [('crit','damage',0.30)],
'dagger2': [('x','ex',0.20)],
'geuden': [('da','passive',0.10),('dt','passive',0.20)],
'megaman': [('killer','passive',0.15*Overdrive_Punisher.EFFICIENCY)],
'tobias': [('buff','ex',0.20)],
'grace': [('fs','ex',0.20)],
'sharena': [('paralysis_killer', 'passive', 0.08)],
'peony': [('light','ele',0.20)],
'gleif': [('debuff_killer', 'passive', 0.08)]
}
def __init__(self, name, value):
super().__init__(name, self.EX_MAP[value])
ability_dict['ex'] = Co_Ability
class Union_Ability(Ability):
UNION_MAP = {
1: {4: [('s', 'passive', 0.10)]},
2: {4: [('att','bk', 0.10*Broken_Punisher.EFFICIENCY)]},
3: {4: [('att','passive', 0.08)]},
4: {3: [('sp','passive', 0.06)], 4: [('sp','passive', 0.10)]},
5: {2: [('da','passive', 0.10)], 3: [('da','passive', 0.18)], 4: [('da','passive', 0.30)]},
6: {2: [('fs','passive', 0.05)], 3: [('fs','passive', 0.08)], 4: [('fs','passive', 0.15)]},
# 7: burn res, 8: stun res, 9: para res, 10: curse res
11: {2: [('buff','passive', 0.05)], 3: [('buff','passive', 0.08)], 4: [('buff','passive', 0.15)]},
}
def __init__(self, name, value, level):
super().__init__(name, self.UNION_MAP[value][level].copy())
ability_dict['union'] = Union_Ability
class BuffingAbility(Ability):
def __init__(self, name, value, duration):
self.buff_args = (name, value, duration, 'att', 'buff')
if '_' in name:
self.buff_args = (name, value, duration, *name.split('_')[1:])
super().__init__(name)
class Last_Buff(BuffingAbility):
HEAL_TO = 30
def __init__(self, name, value, duration=15, chances=1):
super().__init__(name, value, duration)
self.proc_chances = chances
self.auto_proc = 'regen' not in self.buff_args
Last_Buff.HEAL_TO = 30
def oninit(self, adv, afrom=None):
def l_lo_buff(e):
if self.proc_chances > 0 and e.hp <= 30 and (e.hp - e.delta) > 30:
self.proc_chances -= 1
adv.Buff(*self.buff_args).no_bufftime().on()
adv.Event('hp').listener(l_lo_buff)
if self.auto_proc and 'hp' not in adv.conf and adv.condition('last offense'):
def lo_damaged(t):
if adv.hp > 30 and self.proc_chances > 0:
next_hp = adv.condition.hp_threshold_list()
if next_hp and next_hp[0] < 30:
adv.set_hp(next_hp)
else:
adv.set_hp(30)
adv.Timer(lo_healed).on(10)
def lo_healed(t):
next_hp = adv.condition.hp_threshold_list(Last_Buff.HEAL_TO)
try:
adv.set_hp(next_hp[0])
except:
adv.set_hp(100)
adv.Timer(lo_damaged).on(0.1)
ability_dict['lo'] = Last_Buff
class Doublebuff(BuffingAbility):
def __init__(self, name, value, duration=15):
super().__init__(name, value, duration)
def oninit(self, adv, afrom=None):
if self.name == 'bc_energy':
def defchain(e):
if hasattr(e, 'rate'):
adv.energy.add(self.buff_args[1] * e.rate)
else:
adv.energy.add(self.buff_args[1])
adv.Event('defchain').listener(defchain)
else:
def defchain(e):
if hasattr(e, 'rate'):
adv.Buff(self.buff_args[0], self.buff_args[1] * e.rate, *self.buff_args[2:], source=e.source).on()
else:
adv.Buff(*self.buff_args, source=e.source).on()
adv.Event('defchain').listener(defchain)
ability_dict['bc'] = Doublebuff
class Doublebuff_CD(Doublebuff):
DB_CD = 14.999 # inaccurate, but avoids a potential unintuitive race condition
def oninit(self, adv, afrom=None):
self.is_cd = False
def cd_end(t):
self.is_cd = False
if self.name == 'bcc_energy':
def defchain(e):
if not self.is_cd:
adv.energy.add(self.buff_args[1])
self.is_cd = True
adv.Timer(cd_end).on(self.DB_CD)
adv.Event('defchain').listener(defchain)
else:
def defchain(e):
if not self.is_cd:
adv.Buff(*self.buff_args, source=e.source).on()
self.is_cd = True
adv.Timer(cd_end).on(self.DB_CD)
adv.Event('defchain').listener(defchain)
ability_dict['bcc'] = Doublebuff_CD
class Slayer_Strength(BuffingAbility):
def __init__(self, name, value):
super().__init__(name, value, -1)
def oninit(self, adv, afrom=None):
pass
# for _ in range(5):
# adv.Buff(*self.buff_args).on()
ability_dict['sts'] = Slayer_Strength
ability_dict['sls'] = Slayer_Strength
class Dragon_Buff(Ability):
def __init__(self, name, dc_values, buff_args=()):
self.dc_values = dc_values
self.buff_args = buff_args
super().__init__(name)
def oninit(self, adv, afrom=None):
self.dc_level = 0
def l_dc_buff(t):
if self.dc_level < len(self.dc_values):
adv.Buff(self.name, self.dc_values[self.dc_level], -1, *self.buff_args).on()
self.dc_level += 1
adv.Event('dragon').listener(l_dc_buff)
class Dragon_Claw(Dragon_Buff):
DC_LEVELS = {
1: (0.04,0.06,0.10),
2: (0.05,0.08,0.12),
3: (0.06,0.09,0.15),
4: (0.10,0.15,0.15)
}
def __init__(self, name, value):
super().__init__('dca', self.DC_LEVELS[value])
ability_dict['dc'] = Dragon_Claw
class Dragon_Might(Dragon_Buff):
DM_LEVELS = {
1: (0.10, 0.10)
}
def __init__(self, name, value):
super().__init__('dca', self.DM_LEVELS[value])
ability_dict['dm'] = Dragon_Might
class Dragon_Claw_Chain(Dragon_Buff):
DCC_LEVELS = {
3: (0.08, 0.09, 0.15),
5: (0.09, 0.10, 0.15),
6: (0.10, 0.10, 0.15)
}
def __init__(self, name, value):
super().__init__('dca', self.DCC_LEVELS[value])
ability_dict['dcc'] = Dragon_Claw_Chain
class Dragon_Skill(Dragon_Buff):
DS_LEVELS = {
3: (0.08, 0.08, 0.08)
}
def __init__(self, name, value):
super().__init__('dcs', self.DS_LEVELS[value], buff_args=('s','buff'))
ability_dict['dcs'] = Dragon_Skill
class Dragon_Scale(Dragon_Buff):
DD_LEVELS = {
3: (0.10, 0.11, 0.12)
}
def __init__(self, name, value):
super().__init__('dcd', self.DD_LEVELS[value], buff_args=('defense','buff'))
ability_dict['dcd'] = Dragon_Scale
class Resilient_Offense(BuffingAbility):
def __init__(self, name, value, interval=None):
super().__init__(name, value, -1)
self.interval = interval
if name[0:2] == 'ro':
self.proc_chances = 3
self.hp_threshold = 30
elif name[0:2] == 'uo':
self.proc_chances = 5
self.hp_threshold = 70
def oninit(self, adv, afrom=None):
def l_ro_buff(e):
if self.proc_chances > 0 and e.hp <= 30 and (e.hp - e.delta) > 30:
self.proc_chances -= 1
adv.Buff(*self.buff_args).on()
adv.Event('hp').listener(l_ro_buff)
if self.interval and 'hp' not in adv.conf:
def ro_damaged(t):
if adv.hp > self.hp_threshold:
next_hp = adv.condition.hp_threshold_list()
if next_hp and next_hp[0] < self.hp_threshold:
adv.set_hp(next_hp)
else:
adv.set_hp(self.hp_threshold)
adv.Timer(ro_healed).on(10)
def ro_healed(t):
next_hp = adv.condition.hp_threshold_list(self.hp_threshold)
try:
adv.set_hp(next_hp[0])
except:
adv.set_hp(100)
if self.interval < adv.duration and adv.condition(f'hp={self.hp_threshold}% every {self.interval}s'):
for i in range(1, self.proc_chances):
adv.Timer(ro_damaged).on(self.interval*i)
adv.Timer(ro_damaged).on(0.1)
ability_dict['ro'] = Resilient_Offense
ability_dict['uo'] = Resilient_Offense
class Skill_Prep(Ability):
def __init__(self, name, value):
self.value = value
if isinstance(self.value, str):
self.value = float(value.replace('%', ''))
if self.value > 1:
self.value /= 100
super().__init__(name)
def oninit(self, adv, afrom=None):
adv.charge_p('skill_prep',self.value)
ability_dict['prep'] = Skill_Prep
class Primed(BuffingAbility):
PRIMED_CD = 14.999
def __init__(self, name, value, duration=None):
self.is_cd = False
super().__init__(name, value, duration or 10)
def oninit(self, adv, afrom=None):
def pm_cd_end(t):
self.is_cd = False
primed_buff = adv.Buff(*self.buff_args).no_bufftime()
def l_primed(e):
if not self.is_cd:
primed_buff.on()
self.is_cd = True
adv.Timer(pm_cd_end).on(self.PRIMED_CD)
adv.Event('s1_charged').listener(l_primed)
ability_dict['primed'] = Primed
class Dragon_Prep(Ability):
def __init__(self, name, value):
self.value = value
super().__init__(name)
def oninit(self, adv, afrom=None):
adv.dragonform.charge_gauge(self.value * 10, dhaste=False)
ability_dict['dp'] = Dragon_Prep
class Affliction_Guard(Ability):
def __init__(self, name, value):
self.value = value
super().__init__(name)
def oninit(self, adv, afrom=None):
adv.afflict_guard = self.value
adv.dragonform.disabled = False
ability_dict['ag'] = Affliction_Guard
class Energy_Prep(Ability):
def __init__(self, name, value):
self.energy_count = value
super().__init__(name)
def oninit(self, adv, afrom=None):
adv.energy.add(self.energy_count)
ability_dict['eprep'] = Energy_Prep
class Force_Charge(Ability):
def __init__(self, name, charge, value=0.25):
self.charge = charge
self.value = value
super().__init__(name)
def oninit(self, adv, afrom=None):
if hasattr(adv, 'fs_prep_c'):
adv.fs_prep_v += self.value
else:
def l_fs_charge(e):
if not e.is_hit:
| |
repos_by_tag:
result.append(rel_matcher.match_releases_by_tag(
repos_by_tag, time_from, time_to, release_settings))
if repos_by_branch:
result.append(rel_matcher.match_releases_by_branch(
repos_by_branch, branches, default_branches, time_from, time_to, release_settings))
result = await gather(*result)
result = pd.concat(result, ignore_index=True) if result else dummy_releases_df()
if index is not None:
result.set_index(index, inplace=True)
return result
@classmethod
@sentry_span
async def _fetch_precomputed_releases(cls,
match_groups: Dict[ReleaseMatch, Dict[str, List[str]]],
time_from: datetime,
time_to: datetime,
prefixer: Prefixer,
account: int,
pdb: Database,
index: Optional[Union[str, Sequence[str]]] = None,
) -> pd.DataFrame:
prel = PrecomputedRelease
or_items, _ = match_groups_to_sql(match_groups, prel)
if pdb.url.dialect == "sqlite":
query = (
select([prel])
.where(and_(or_(*or_items) if or_items else false(),
prel.published_at.between(time_from, time_to),
prel.acc_id == account))
.order_by(desc(prel.published_at))
)
else:
query = union_all(*(
select([prel])
.where(and_(item,
prel.published_at.between(time_from, time_to),
prel.acc_id == account))
.order_by(desc(prel.published_at))
for item in or_items))
df = await read_sql_query(query, pdb, prel)
df = set_matched_by_from_release_match(df, True, prel.repository_full_name.name)
if index is not None:
df.set_index(index, inplace=True)
else:
df.reset_index(drop=True, inplace=True)
user_node_to_login_get = prefixer.user_node_to_login.get
df[Release.author.name] = [
user_node_to_login_get(u) for u in df[Release.author_node_id.name].values
]
return df
@classmethod
@sentry_span
async def _fetch_release_events(cls,
repos: Mapping[str, List[str]],
account: int,
meta_ids: Tuple[int, ...],
time_from: datetime,
time_to: datetime,
logical_settings: LogicalRepositorySettings,
prefixer: Prefixer,
mdb: Database,
rdb: Database,
index: Optional[Union[str, Sequence[str]]] = None,
) -> pd.DataFrame:
"""Load pushed releases from persistentdata DB."""
if not repos:
return dummy_releases_df(index)
repo_name_to_node = prefixer.repo_name_to_node.get
repos_inverted = {}
for pattern, pattern_repos in repos.items():
pattern = re.compile(pattern)
for repo in pattern_repos:
repos_inverted[repo] = pattern
repos = repos_inverted
repo_ids = {repo_name_to_node(r): r for r in coerce_logical_repos(repos)}
release_rows = await rdb.fetch_all(
select([ReleaseNotification])
.where(and_(
ReleaseNotification.account_id == account,
ReleaseNotification.published_at.between(time_from, time_to),
ReleaseNotification.repository_node_id.in_(repo_ids),
))
.order_by(desc(ReleaseNotification.published_at)))
unresolved_commits_short = defaultdict(list)
unresolved_commits_long = defaultdict(list)
for row in release_rows:
if row[ReleaseNotification.resolved_commit_node_id.name] is None:
repo = row[ReleaseNotification.repository_node_id.name]
commit = row[ReleaseNotification.commit_hash_prefix.name]
if len(commit) == 7:
unresolved_commits_short[repo].append(commit)
else:
unresolved_commits_long[repo].append(commit)
author_node_ids = {r[ReleaseNotification.author_node_id.name]
for r in release_rows} - {None}
queries = []
queries.extend(
select([NodeCommit.repository_id, NodeCommit.node_id, NodeCommit.sha])
.where(and_(NodeCommit.acc_id.in_(meta_ids),
NodeCommit.repository_id == repo,
func.substr(NodeCommit.sha, 1, 7).in_(commits)))
for repo, commits in unresolved_commits_short.items()
)
queries.extend(
select([NodeCommit.repository_id, NodeCommit.node_id, NodeCommit.sha])
.where(and_(NodeCommit.acc_id.in_(meta_ids),
NodeCommit.repository_id == repo,
NodeCommit.sha.in_(commits)))
for repo, commits in unresolved_commits_long.items()
)
if len(queries) == 1:
sql = queries[0]
elif len(queries) > 1:
sql = union_all(*queries)
else:
sql = None
resolved_commits = {}
user_map = {}
tasks = []
if sql is not None:
async def resolve_commits():
commit_rows = await mdb.fetch_all(sql)
for row in commit_rows:
repo = row[NodeCommit.repository_id.name]
node_id = row[NodeCommit.node_id.name]
sha = row[NodeCommit.sha.name]
resolved_commits[(repo, sha)] = node_id, sha
resolved_commits[(repo, sha[:7])] = node_id, sha
tasks.append(resolve_commits())
if author_node_ids:
async def resolve_users():
user_rows = await mdb.fetch_all(select([User.node_id, User.login])
.where(and_(User.acc_id.in_(meta_ids),
User.node_id.in_(author_node_ids))))
nonlocal user_map
user_map = {r[User.node_id.name]: r[User.login.name] for r in user_rows}
tasks.append(resolve_users())
await gather(*tasks)
releases = []
updated = []
for row in release_rows:
repo = row[ReleaseNotification.repository_node_id.name]
repo_name = repo_ids[repo]
if not repos[repo_name].match(repo_name):
continue
if (commit_node_id := row[ReleaseNotification.resolved_commit_node_id.name]) is None:
commit_node_id, commit_hash = resolved_commits.get(
(repo, commit_prefix := row[ReleaseNotification.commit_hash_prefix.name]),
(None, None))
if commit_node_id is not None:
updated.append((repo, commit_prefix, commit_node_id, commit_hash))
else:
continue
else:
commit_hash = row[ReleaseNotification.resolved_commit_hash.name]
author = row[ReleaseNotification.author_node_id.name]
name = row[ReleaseNotification.name.name]
try:
logical_repos = logical_settings.prs(repo_name).logical_repositories
except KeyError:
logical_repos = [repo_name]
for logical_repo in logical_repos:
releases.append({
Release.author.name: user_map.get(author, author),
Release.author_node_id.name: author,
Release.commit_id.name: commit_node_id,
Release.node_id.name: commit_node_id,
Release.name.name: name,
Release.published_at.name:
row[ReleaseNotification.published_at.name].replace(tzinfo=timezone.utc),
Release.repository_full_name.name: logical_repo,
Release.repository_node_id.name: repo,
Release.sha.name: commit_hash,
Release.tag.name: None,
Release.url.name: row[ReleaseNotification.url.name],
matched_by_column: ReleaseMatch.event.value,
})
if updated:
async def update_pushed_release_commits():
for repo, prefix, node_id, full_hash in updated:
await rdb.execute(
update(ReleaseNotification)
.where(and_(ReleaseNotification.account_id == account,
ReleaseNotification.repository_node_id == repo,
ReleaseNotification.commit_hash_prefix == prefix))
.values({
ReleaseNotification.updated_at: datetime.now(timezone.utc),
ReleaseNotification.resolved_commit_node_id: node_id,
ReleaseNotification.resolved_commit_hash: full_hash,
}))
await defer(update_pushed_release_commits(),
"update_pushed_release_commits(%d)" % len(updated))
if not releases:
return dummy_releases_df(index)
df = _adjust_release_dtypes(pd.DataFrame(releases))
if index:
df.set_index(index, inplace=True)
return df
@classmethod
@sentry_span
async def _store_precomputed_release_match_spans(
cls,
match_groups: Dict[ReleaseMatch, Dict[str, List[str]]],
matched_bys: Dict[str, ReleaseMatch],
time_from: datetime,
time_to: datetime,
account: int,
pdb: morcilla.core.Connection) -> None:
assert isinstance(pdb, morcilla.core.Connection)
inserted = []
time_to = min(time_to, datetime.now(timezone.utc))
for rm, pair in match_groups.items():
if rm == ReleaseMatch.tag:
prefix = "tag|"
elif rm == ReleaseMatch.branch:
prefix = "branch|"
elif rm == ReleaseMatch.event:
continue
else:
raise AssertionError("Impossible release match: %s" % rm)
for val, repos in pair.items():
rms = prefix + val
for repo in repos:
# Avoid inserting the span with branch releases if we release by tag
# and the release settings are ambiguous. See DEV-1137.
if rm == matched_bys[repo] or rm == ReleaseMatch.tag:
inserted.append(GitHubReleaseMatchTimespan(
acc_id=account,
repository_full_name=repo,
release_match=rms,
time_from=time_from,
time_to=time_to,
).explode(with_primary_keys=True))
if not inserted:
return
async with pdb.raw_connection() as raw_connection:
postgres = isinstance(raw_connection, asyncpg.Connection)
if postgres:
sql = postgres_insert(GitHubReleaseMatchTimespan)
sql = sql.on_conflict_do_update(
constraint=GitHubReleaseMatchTimespan.__table__.primary_key,
set_={
GitHubReleaseMatchTimespan.time_from.name: least(
sql.excluded.time_from, GitHubReleaseMatchTimespan.time_from),
GitHubReleaseMatchTimespan.time_to.name: greatest(
sql.excluded.time_to, GitHubReleaseMatchTimespan.time_to),
},
)
else:
sql = insert(GitHubReleaseMatchTimespan).prefix_with("OR REPLACE")
with sentry_sdk.start_span(op="_store_precomputed_release_match_spans/execute_many"):
await pdb.execute_many(sql, inserted)
@classmethod
@sentry_span
async def _store_precomputed_releases(cls, releases: pd.DataFrame,
default_branches: Dict[str, str],
settings: ReleaseSettings,
account: int,
pdb: morcilla.core.Connection) -> None:
assert isinstance(pdb, morcilla.core.Connection)
if not isinstance(releases.index, pd.RangeIndex):
releases = releases.reset_index()
inserted = []
columns = [Release.node_id.name,
Release.repository_full_name.name,
Release.repository_node_id.name,
Release.author_node_id.name,
Release.name.name,
Release.tag.name,
Release.url.name,
Release.sha.name,
Release.commit_id.name,
matched_by_column,
Release.published_at.name]
for row in zip(*(releases[c].values for c in columns[:-1]),
releases[Release.published_at.name]):
obj = {columns[i]: v for i, v in enumerate(row)}
obj[Release.acc_id.name] = account
repo = row[1]
if obj[matched_by_column] == ReleaseMatch.branch:
obj[PrecomputedRelease.release_match.name] = "branch|" + \
settings.native[repo].branches.replace(
default_branch_alias, default_branches[repo])
elif obj[matched_by_column] == ReleaseMatch.tag:
obj[PrecomputedRelease.release_match.name] = \
"tag|" + settings.native[row[1]].tags
else:
raise AssertionError("Impossible release match: %s" % obj)
del obj[matched_by_column]
inserted.append(obj)
if inserted:
async with pdb.raw_connection() as raw_connection:
if isinstance(raw_connection, asyncpg.Connection):
sql = postgres_insert(PrecomputedRelease)
sql = sql.on_conflict_do_nothing()
else:
sql = insert(PrecomputedRelease).prefix_with("OR IGNORE")
with sentry_sdk.start_span(op="_store_precomputed_releases/execute_many"):
await pdb.execute_many(sql, inserted)
def dummy_releases_df(index: Optional[Union[str, Sequence[str]]] = None) -> pd.DataFrame:
"""Create an empty releases DataFrame."""
df = pd.DataFrame(columns=[
c.name for c in Release.__table__.columns if c.name != Release.acc_id.name
] + [matched_by_column])
df = _adjust_release_dtypes(df)
if index:
df.set_index(index, inplace=True)
return df
_tsdt = pd.Timestamp(2000, 1, 1).to_numpy().dtype
def _adjust_release_dtypes(df: pd.DataFrame) -> pd.DataFrame:
for ic, fillna in ((Release.node_id.name, False),
(Release.author_node_id.name, True),
(Release.commit_id.name, False),
(matched_by_column, False)):
if fillna:
df[ic] = df[ic].fillna(0)
try:
df[ic] = df[ic].astype(int, copy=False)
except KeyError:
assert ic == Release.node_id.name
return postprocess_datetime(df, [Release.published_at.name])
def group_repos_by_release_match(repos: Iterable[str],
default_branches: Dict[str, str],
release_settings: ReleaseSettings,
) -> Tuple[Dict[ReleaseMatch, Dict[str, List[str]]],
int]:
"""
Aggregate repository lists by specific release matches.
:return: 1. map ReleaseMatch => map Required match regexp => list of repositories. \
2. number of processed repositories.
"""
match_groups = {
ReleaseMatch.tag: {},
ReleaseMatch.branch: {},
ReleaseMatch.event: {},
}
count = 0
for repo in repos:
count += 1
rms = release_settings.native[repo]
if rms.match in (ReleaseMatch.tag, ReleaseMatch.tag_or_branch):
match_groups[ReleaseMatch.tag].setdefault(rms.tags, []).append(repo)
if rms.match in (ReleaseMatch.branch, ReleaseMatch.tag_or_branch):
match_groups[ReleaseMatch.branch].setdefault(
rms.branches.replace(default_branch_alias, default_branches[repo]), [],
).append(repo)
if rms.match == ReleaseMatch.event:
match_groups[ReleaseMatch.event].setdefault(rms.events, []).append(repo)
return match_groups, count
def match_groups_to_sql(match_groups: Dict[ReleaseMatch, Dict[str, Iterable[str]]],
model) -> Tuple[List[ClauseElement], List[Iterable[str]]]:
"""
Convert the grouped release matches to a list of SQL conditions.
:return: 1. List of the alternative SQL filters. \
2. List of involved repository names for each SQL filter.
"""
or_conditions, repos = match_groups_to_conditions(match_groups, model)
or_items = [
and_(
model.release_match == cond[model.release_match.name],
model.repository_full_name.in_(cond[model.repository_full_name.name]),
) for cond in or_conditions
]
return or_items, repos
def match_groups_to_conditions(
match_groups: Dict[ReleaseMatch, Dict[str, Iterable[str]]],
model,
) -> Tuple[List[List[dict]], List[Iterable[str]]]:
"""
Convert the grouped release matches to a list of conditions.
:return: 1. List of the filters to OR/UNION later. \
2. List of involved repository names for each filter.
"""
or_conditions, repos = [], []
for match, suffix in [
(ReleaseMatch.tag, "|"),
(ReleaseMatch.branch, "|"),
(ReleaseMatch.rejected, ""),
(ReleaseMatch.force_push_drop, ""),
(ReleaseMatch.event, "|"),
]:
if not (match_group := match_groups.get(match)):
continue
or_conditions.extend({
model.release_match.name: "".join([match.name, suffix, v]),
model.repository_full_name.name: r,
} for v, r in match_group.items())
repos.extend(match_group.values())
return or_conditions, repos
def set_matched_by_from_release_match(df: pd.DataFrame,
remove_ambiguous_tag_or_branch: bool,
repo_column: Optional[str] = None,
) -> pd.DataFrame:
"""
Set `matched_by_column` from `PrecomputedRelease.release_match` column. Drop the latter.
:param df: DataFrame of Release-compatible models.
:param remove_ambiguous_tag_or_branch: Indicates whether to remove ambiguous \
"tag_or_branch" precomputed releases.
:param repo_column: Required if `remove_ambiguous_tag_or_branch` is True.
"""
release_matches = df[PrecomputedRelease.release_match.name].values.astype("S")
matched_by_tag_mask = np.char.startswith(release_matches, b"tag|")
matched_by_branch_mask = np.char.startswith(release_matches, b"branch|")
matched_by_event_mask = release_matches == b"event|"
if remove_ambiguous_tag_or_branch:
assert repo_column is not None
repos = df[repo_column].values
ambiguous_repos = np.intersect1d(repos[matched_by_tag_mask], repos[matched_by_branch_mask])
if len(ambiguous_repos):
matched_by_branch_mask[np.in1d(repos, ambiguous_repos)] = False
matched_values = np.full(len(df), ReleaseMatch.rejected)
matched_values[matched_by_tag_mask] = ReleaseMatch.tag
matched_values[matched_by_branch_mask] = ReleaseMatch.branch
matched_values[matched_by_event_mask] = ReleaseMatch.event
df[matched_by_column] = matched_values
df.drop(PrecomputedRelease.release_match.name, inplace=True, axis=1)
df = df.take(np.flatnonzero(df[matched_by_column].values != ReleaseMatch.rejected))
return df
@cached_methods
class ReleaseMatcher:
"""Release matcher for tag and branch."""
def __init__(self, account: int, meta_ids: Tuple[int, ...],
mdb: Database, pdb: Database,
cache: Optional[aiomcache.Client]):
"""Create a `ReleaseMatcher`."""
self._account = account
self._meta_ids = meta_ids
self._mdb = mdb
self._pdb = pdb
self._cache = | |
# MIT License
#
# Copyright (c) 2019 SSL-Roots
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# coding: UTF-8
import rospy
import math
import copy
from python_qt_binding.QtCore import Qt, QPointF, QRectF
from python_qt_binding.QtGui import QPainter, QPen ,QColor, QPolygonF
from python_qt_binding.QtGui import QMouseEvent
from python_qt_binding.QtWidgets import QWidget
from geometry_msgs.msg import Pose2D
from consai2_msgs.msg import VisionGeometry, BallInfo, RobotInfo
from consai2_msgs.msg import Replacements, ReplaceBall, ReplaceRobot
from consai2_msgs.msg import ControlTarget
from consai2_msgs.msg import DecodedReferee
import tool
class PaintWidget(QWidget):
def __init__(self, parent=None):
super(PaintWidget, self).__init__(parent)
self._WHITE_LINE_THICKNESS = 2 # 白線の太さ
self._ZOOM_RATE = 0.1 # 拡大縮小率
self._SCALE_LIMIT = 0.2 # 縮小率の限界値
self._ALPHA_DETECTED = 255 # 検出できたロボット・ボールの透明度
self._ALPHA_NOT_DETECTED = 127 # 検出できなかったロボット・ボールの透明度
self._COLOR_BALL = QColor(Qt.red)
self._COLOR_ROBOT = {'blue':QColor(Qt.cyan), 'yellow':QColor(Qt.yellow)}
self._ID_POS = (0.15, 0.15) # IDを描画するロボット中心からの位置
self._FLAG_POS = (0.15, 0) # control_targetのフラグを描画するロボット中心からの位置
# Replace
self._REPLACE_CLICK_POS_THRESHOLD = 0.1
self._REPLACE_CLICK_VEL_ANGLE_THRESHOLD = self._REPLACE_CLICK_POS_THRESHOLD + 0.1
self._REPLACE_BALL_VELOCITY_GAIN = 3.0
self._REPLACE_MAX_BALL_VELOCITY = 8.0
self._BALL_RADIUS = rospy.get_param('consai2_description/ball_radius', 0.0215)
self._ROBOT_RADIUS = rospy.get_param('consai2_description/robot_radius', 0.09)
self._MAX_ID = rospy.get_param('consai2_description/max_id', 15)
self._SIDE = rospy.get_param('consai2_description/our_side', 'left')
# チームサイドの反転
self._invert_side = False
if self._SIDE != 'left':
self._invert_side = True
# GUIパラメータ
self._trans = QPointF(0.0, 0.0) # x, y方向の移動
self._mouse_trans = QPointF(0.0, 0.0) # マウス操作による移動
self._scale = QPointF(1.0, 1.0) # 拡大, 縮小
self._do_rotate_view = False # 90度回転判定
self._view_height = self.height() # 描画サイズ(縦)
self._view_width = self.width() # 描画サイズ(横)
self._scale_field_to_view = 1.0 # フィールドから描画領域に縮小するスケール
self._click_point = QPointF(0.0, 0.0) # マウスでクリックした描画座標
self._current_mouse_pos = QPointF(0.0, 0.0) # マウスカーソル位置
self._replace_func = None
self._replace_id = 0
self._replace_is_yellow = False
self._do_replacement = False
self._replacement_target = {'ball_pos':False, 'ball_vel':False,
'robot_pos':False, 'robot_angle':False}
# フィールド形状
# raw_vision_geometryの値で更新される
self._field_length = 9.0
self._field_width = 6.0
self._field_goal_width = 1.0
self._field_goal_depth = 0.2
self._field_boundary_width = 0.3
self._field_lines = []
self._field_arcs = []
# ジョイスティック情報
self._joy_target = ControlTarget()
# ロボット・ボール情報
self._ball_info = BallInfo()
self._robot_info = {'blue':[],'yellow':[]}
# レフェリー情報
self._decoded_referee = None
# Publisher
self._pub_replace = rospy.Publisher('sim_sender/replacements',
Replacements, queue_size=1)
# Subscribers
self._sub_decoded_referee = rospy.Subscriber(
'referee_wrapper/decoded_referee', DecodedReferee,
self._callback_referee, queue_size=1)
self._sub_geometry = rospy.Subscriber(
'vision_receiver/raw_vision_geometry', VisionGeometry,
self._callback_geometry, queue_size=1)
self._sub_ball_info = rospy.Subscriber(
'vision_wrapper/ball_info', BallInfo,
self._callback_ball_info, queue_size=1)
self._sub_joy_target = rospy.Subscriber(
'consai2_examples/joy_target', ControlTarget,
self._callback_joy_target, queue_size=1)
self._subs_robot_info = {'blue':[], 'yellow':[]}
self._control_targets = {'blue':[], 'yellow':[]}
self._subs_control_target = {'blue':[], 'yellow':[]}
for robot_id in range(self._MAX_ID +1):
self._robot_info['blue'].append(RobotInfo())
self._robot_info['yellow'].append(RobotInfo())
self._control_targets['blue'].append(ControlTarget())
self._control_targets['yellow'].append(ControlTarget())
# 末尾に16進数の文字列をつける
topic_id = hex(robot_id)[2:]
topic_name = 'vision_wrapper/robot_info_blue_' + topic_id
self._subs_robot_info['blue'].append(
rospy.Subscriber(topic_name, RobotInfo,
self._callback_blue_info, callback_args=robot_id))
topic_name = 'vision_wrapper/robot_info_yellow_' + topic_id
self._subs_robot_info['yellow'].append(
rospy.Subscriber(topic_name, RobotInfo,
self._callback_yellow_info, callback_args=robot_id))
topic_name = 'consai2_game/control_target_blue_' + topic_id
self._subs_control_target['blue'].append(
rospy.Subscriber(topic_name, ControlTarget,
self._callback_blue_target, callback_args=robot_id))
topic_name = 'consai2_game/control_target_blue_' + topic_id
self._subs_control_target['blue'].append(
rospy.Subscriber(topic_name, ControlTarget,
self._callback_blue_target, callback_args=robot_id))
topic_name = 'consai2_game/control_target_yellow_' + topic_id
self._subs_control_target['yellow'].append(
rospy.Subscriber(topic_name, ControlTarget,
self._callback_yellow_target, callback_args=robot_id))
# Configs
# This function enables mouse tracking without pressing mouse button
self.setMouseTracking(True)
def _callback_geometry(self, msg):
# フィールド形状を更新
if msg.field_length:
self._field_length = msg.field_length
if msg.field_width:
self._field_width = msg.field_width
if msg.goal_width:
self._field_goal_width = msg.goal_width
if msg.goal_depth:
self._field_goal_depth = msg.goal_depth
if msg.boundary_width:
self._field_boundary_width = msg.boundary_width
if msg.field_lines:
self._field_lines = []
for line in msg.field_lines:
self._field_lines.append(
{"name":line.name, "p1_x":line.p1_x, "p1_y":line.p1_y,
"p2_x":line.p2_x, "p2_y":line.p2_y, "thickness":line.thickness})
if msg.field_arcs:
self._field_arcs = []
for arc in msg.field_arcs:
self._field_arcs.append(
{"name":arc.name, "center_x":arc.center_x, "center_y":arc.center_y,
"radius":arc.radius, "a1":arc.a1, "a2":arc.a2, "thickness":arc.thickness})
self._resize_draw_world()
def _callback_referee(self, msg):
self._decoded_referee = msg
def _callback_ball_info(self, msg):
self._ball_info = msg
def _callback_blue_info(self, msg, robot_id):
self._robot_info['blue'][robot_id] = msg
def _callback_yellow_info(self, msg, robot_id):
self._robot_info['yellow'][robot_id] = msg
def _callback_blue_target(self, msg, robot_id):
self._control_targets['blue'][robot_id] = msg
def _callback_yellow_target(self, msg, robot_id):
self._control_targets['yellow'][robot_id] = msg
def _callback_joy_target(self, msg):
self._joy_target = msg
def mousePressEvent(self, event):
# マウスのドラッグ操作で描画領域を移動する
# 右クリックで移動と拡大縮小をリセットする
if event.buttons() == Qt.LeftButton:
self._click_point = event.localPos()
self._do_replacement = self._is_replacement_click(self._click_point)
elif event.buttons() == Qt.RightButton:
self._reset_painter_status()
self.update()
def mouseMoveEvent(self, event):
# マウスのドラッグ操作で描画領域を移動する
# Replacementを行うときは描画領域を移動しない
self._current_mouse_pos = event.localPos()
if self._do_replacement:
pass
elif event.buttons() == Qt.LeftButton:
self._mouse_trans = (
self._current_mouse_pos - self._click_point) / self._scale.x()
self.update()
def mouseReleaseEvent(self, event):
# マウスのドラッグ操作で描画領域を移動する
# Replacementを行うときは描画領域を移動しない
if self._do_replacement:
self._do_replacement = False
self._replace_func(event.localPos())
else:
self._trans += self._mouse_trans
self._mouse_trans = QPointF(0.0, 0.0)
self.update()
def wheelEvent(self, event):
# マウスのホイール操作で描画領域を拡大縮小する
s = self._scale.x()
if event.angleDelta().y() > 0:
self._scale.setX(s + self._ZOOM_RATE)
self._scale.setY(s + self._ZOOM_RATE)
else:
if s > self._SCALE_LIMIT:
self._scale.setX(s - self._ZOOM_RATE)
self._scale.setY(s - self._ZOOM_RATE)
self.update()
def paintEvent(self, event):
painter = QPainter(self)
# 描画の中心をWidgetの中心に持ってくる
cx = float(self.width()) * 0.5
cy = float(self.height()) * 0.5
painter.translate(cx,cy)
painter.scale(self._scale.x(), self._scale.y())
painter.translate(self._trans + self._mouse_trans)
if self._do_rotate_view is True:
painter.rotate(-90)
# これ以降に書きたいものを重ねる
self._draw_field(painter)
# Referee情報
self._draw_referee(painter)
self._draw_ball(painter)
self._draw_ball_velocity(painter)
# JoyStick関連
if len(self._joy_target.path) > 0:
self._draw_joy_target(painter)
self._draw_robots(painter)
# grSim Replacement関連
if self._replacement_target['ball_pos'] or self._replacement_target['robot_pos']:
self._draw_pos_replacement(painter)
self._draw_cursor_coordinate(painter)
elif self._replacement_target['ball_vel']:
self._draw_vel_replacement(painter)
elif self._replacement_target['robot_angle']:
self._draw_angle_replacement(painter)
else:
self._draw_cursor_coordinate(painter)
def resizeEvent(self, event):
self._resize_draw_world()
def _resize_draw_world(self):
# Widgetのサイズに合わせて、描くフィールドのサイズを変える
# 描画の回転判断もしてくれるすぐれもの
# Widgetの縦横比を算出
widget_height = float(self.height())
widget_width = float(self.width())
widget_w_per_h = widget_width / widget_height
# Fieldの縦横比を算出
field_width = self._field_length + self._field_boundary_width * 2.0
field_height = self._field_width + self._field_boundary_width * 2.0
field_w_per_h = field_width / field_height
field_h_per_w = 1.0 / field_w_per_h
if widget_w_per_h >= field_w_per_h:
# Widgetが横長のとき
self._view_height = widget_height
self._view_width = widget_height * field_w_per_h
self._do_rotate_view = False
elif widget_w_per_h <= field_h_per_w:
# Widgetが縦長のとき
self._view_height = widget_width
self._view_width = widget_width * field_w_per_h
self._do_rotate_view = True
else:
# 描画回転にヒステリシスをもたせる
if self._do_rotate_view is True:
self._view_height = widget_height * field_h_per_w
self._view_width = widget_height
else:
self._view_height = widget_width * field_h_per_w
self._view_width = widget_width
self._scale_field_to_view = self._view_width / field_width
def _convert_to_view(self, x, y):
# フィールド座標系を描画座標系に変換する
view_x = x * self._scale_field_to_view
view_y = -y * self._scale_field_to_view
point = QPointF(view_x, view_y)
return point
def _convert_to_field(self, x, y):
# 描画座標系をフィールド座標系に変換する
x /= self._scale.x()
y /= self._scale.y()
x -= (self._trans.x() + self._mouse_trans.x())
y -= (self._trans.y() + self._mouse_trans.y())
x -= self.width() * 0.5 / self._scale.x()
y -= self.height() * 0.5 / self._scale.y()
if self._do_rotate_view:
x, y = -y, x
field_x = x / self._scale_field_to_view
field_y = -y / self._scale_field_to_view
point = QPointF(field_x, field_y)
return point
def _reset_painter_status(self):
# 描画領域の移動と拡大縮小を初期化する
self._trans = QPointF(0.0, 0.0)
self._mouse_trans = QPointF(0.0, 0.0)
self._scale = QPointF(1.0, 1.0)
def _is_replacement_click(self, mouse_pos):
# クリックした描画位置にオブジェクトがあればReplacementと判定する
# ボールとロボットが近い場合、ボールのReplacementを優先する
field_point = self._convert_to_field(mouse_pos.x(), mouse_pos.y())
is_clicked = True
result = self._is_ball_clicked(field_point)
if result == 'pos':
self._replacement_target['ball_pos'] = True
self._replace_func = self._replace_ball_pos
elif result == 'vel_angle':
self._replacement_target['ball_vel'] = True
self._replace_func = self._replace_ball_vel
else:
result, robot_id, is_yellow = self._is_robot_clicked(field_point)
self._replace_id = robot_id
self._replace_is_yellow = is_yellow
if result == 'pos':
self._replacement_target['robot_pos'] = True
self._replace_func = self._replace_robot_pos
elif result == 'vel_angle':
self._replacement_target['robot_angle'] = True
self._replace_func = self._replace_robot_angle
else:
is_clicked = False
return is_clicked
def _is_clicked(self, field_point1, field_point2):
# フィールド上のオブジェクトをクリックしたかどうか判定する
diff_point = field_point1 - field_point2
diff_norm = math.hypot(diff_point.x(), diff_point.y())
if diff_norm < self._REPLACE_CLICK_POS_THRESHOLD:
return 'pos'
elif diff_norm < self._REPLACE_CLICK_VEL_ANGLE_THRESHOLD:
return 'vel_angle'
return False
def _is_ball_clicked(self, field_point):
# ボールをクリックしたか判定する
# ボールが消えていれば判定しない
if self._ball_info.disappeared:
return False
pos_x = self._ball_info.pose.x
pos_y = self._ball_info.pose.y
ball_pos = QPointF(pos_x, pos_y)
return self._is_clicked(field_point, ball_pos)
def _is_robot_clicked(self, field_point):
# ロボットをクリックしたか判定する
# 消えたロボットは対照外
is_clicked = False
replace_id = 0
is_yellow = False
for robot in self._robot_info['blue']:
if robot.disappeared:
continue
robot_point = QPointF(robot.pose.x, robot.pose.y)
is_clicked = self._is_clicked(field_point, robot_point)
if is_clicked:
is_yellow = False
return is_clicked, robot.robot_id, is_yellow
for robot in self._robot_info['yellow']:
if robot.disappeared:
continue
robot_point = QPointF(robot.pose.x, robot.pose.y)
is_clicked = self._is_clicked(field_point, robot_point)
if is_clicked:
is_yellow = True
return is_clicked, robot.robot_id, is_yellow
return is_clicked, replace_id, is_yellow
def _replace_ball_pos(self, mouse_pos):
# ボール位置のReplacement
field_point = self._convert_to_field(mouse_pos.x(), mouse_pos.y())
ball = ReplaceBall()
ball.x = field_point.x()
ball.y = field_point.y()
ball.is_enabled = True
if self._invert_side:
ball.x *= -1.0
ball.y *= -1.0
replacements = Replacements()
replacements.ball = ball
self._pub_replace.publish(replacements)
self._replacement_target['ball_pos'] = False
def _replace_ball_vel(self, mouse_pos):
# ボール速度のReplacement
ball_point = QPointF(self._ball_info.pose.x, | |
shuffle != 0:
if words[0] not in interactomeNodes:
interactomeNodes.append(words[0])
if words[1] not in interactomeNodes:
interactomeNodes.append(words[1])
line = e.readline()
e.close()
# dirEndpoints stores edge endpoints of all directed edges together in one set
# dirEdges is dict<str, dict<str, str>>
dirEndpoints = set()
for k, v in dirEdges.iteritems():
dirEndpoints.add(k)
for k2 in v.iterkeys():
dirEndpoints.add(k2)
self.interactomeNodes = interactomeNodes
if above1 > 0:
print 'WARNING!! All edgeweights should be a probability of protein '\
'interaction. '+str(above1)+' of your edge weights include a number greater than 0.99.'\
' These were changed to 0.99...\n'
if below_0 > 0:
print 'WARNING!! All edgeweights should be a probability of protein '\
'interaction. '+str(below_0)+' of your edge weights include a number below than 0. '\
'These were changed to 0...\n'
print 'Reading text file containing prizes: %s...\n' %prizeFile
origPrizes = {}
terminalTypes = {}
try:
p = open(prizeFile, 'rb')
except IOError:
sys.exit('ERROR: No such file %s, aborting program.\n' %prizeFile)
#Count how many of these proteins are not in the interactome
count = 0
#Add each node in prizeFile to origPrizes dictionary
line = p.readline()
try:
words = line.strip().split()
words[1] = float(words[1])
except ValueError:
#Skipping header line
line=p.readline()
while line:
words = line.strip().split()
if len(words) != 2:
print 'current line:', line
sys.exit('ERROR: File containing prizes should have exactly two columns: '\
'ProteinName\tPrizeValue. Protein names should not have spaces.')
#Increase count if this is not in the interactome
if words[0] not in undirEdges and words[0] not in dirEndpoints:
count += 1
else:
origPrizes[words[0]] = float(words[1])
terminalTypes[words[0]] = 'Proteomic'
line = p.readline()
p.close()
if garnet != None:
print 'Reading text file containing TF regression results: %s...\n' %garnet
if os.path.exists(garnet):
with open(garnet, 'rb') as g:
line = g.readline()
while line:
words = line.strip().split()
if len(words) != 2:
print 'current line:', line
sys.exit('ERROR: File containing TFs should have exactly two columns: '\
'TF_Name\tPrizeValue. TF names should not have spaces.')
#Increase count if this is not in the interactome
if words[0] not in undirEdges and words[0] not in dirEndpoints:
count += 1
else:
# Scale prize using garnetBeta
prize = float(words[1])*self.gb
#If the TF already has a prize value this will replace it.
origPrizes[words[0]] = prize
if words[0] in terminalTypes.keys():
terminalTypes[words[0]]+='_TF'
else:
terminalTypes[words[0]]='TF'
line = g.readline()
else:
sys.exit('ERROR: No such garnet file %s' %garnet)
#Warning if supplied proteins were not in the interactome
percentexcluded = (count/float(len(origPrizes.keys())+count)) * 100
if percentexcluded > 90:
sys.exit('ERROR: %i percent of your prize nodes are not included in the '\
'interactome! Make sure the protein names you are using are the same in your '\
'prize file as in your edge file. Aborting program.\n' %percentexcluded)
elif percentexcluded > 0:
print 'WARNING!! %.3f percent of your prize nodes are not included in the interactome!'\
' These nodes were ignored. Make sure the protein names you are using are the '\
'same in your prize file as in your edge file. Continuing program...\n' \
%percentexcluded
warnings += 1
#Warning for self-edges
if selfedges > 0:
print 'WARNING: There were %i self-edges in your interactome. We ignored these '\
'edges.\n' %selfedges
warnings += 1
#Notice for knockouts
if knockoutCount > 0:
print 'There were %i edges connected to your knockout protein(s). We ignored these '\
'edges.\n' %knockoutCount
print 'Input prize files and edge files have been successfully read.\n'
#Connect dummy node to nodes in interactome, depending on dummyMode
dummyNodeNeighbors = []
if dummyMode == 'terminals':
dummyNodeNeighbors = origPrizes.keys()
print 'Dummy node has been added, with edges to all %i nodes which were assigned '\
'prizes.\n' %len(origPrizes.keys())
elif dummyMode == 'all':
dummyNodeNeighbors = interactomeNodes
print 'Dummy node has been added, with edges to all %i nodes in the interactome.\n' \
%len(interactomeNodes)
elif dummyMode == 'others':
nonterminalNodes = []
for node1 in undirEdges:
for node2 in undirEdges[node1]:
if node2 not in origPrizes and node2 not in nonterminalNodes:
nonterminalNodes.append(node2)
if node1 not in origPrizes and node1 not in nonterminalNodes:
nonterminalNodes.append(node1)
for node1 in dirEdges:
for node2 in dirEdges[node1]:
if node2 not in nonterminalNodes and node2 not in origPrizes:
nonterminalNodes.append(node2)
if node1 not in nonterminalNodes and node1 not in origPrizes:
nonterminalNodes.append(node1)
dummyNodeNeighbors = nonterminalNodes
print 'Dummy node has been added, with edges to all %i nodes in the interactome '\
'which have not been assigned prizes.\n' %len(nonterminalNodes)
else:
#Keep track of how many genes on dummyNeighbors list are actually in interactome
countNeighbors = 0.00
numExcluded = 0
line = dummyFile.readline()
while line:
line = line.strip()
if line not in undirEdges and line not in dirEndpoints:
#protein not in interactome. Ignore edge but add to tally
numExcluded += 1
else:
dummyNodeNeighbors.append(line)
countNeighbors += 1
line = dummyFile.readline()
dummyFile.close()
if countNeighbors == 0:
sys.exit('The file you provided for dummyMode does not contain any proteins in' \
'the interactome. Each line in your text file should contain the name of'\
'one protein. Make sure the names are the same as in the interactome.')
percentexcluded = (numExcluded/countNeighbors) * 100
#Warning if too many proteins in the file are excluded from dummyNodeNeighbors
if percentexcluded > 0:
print 'WARNING!! %i percent of the proteins listed in dummyNeighbors.txt are not '\
'included in the interactome! Make sure the protein names you are using '\
'are the same in this file as in the interactome. Continuing program...\n' \
%percentexcluded
warnings += 1
print 'Dummy node has been added, with edges to all %i nodes in the interactome '\
'listed in your dummyMode file.\n' %int(countNeighbors)
self.terminalTypes = terminalTypes
self.origPrizes = origPrizes
self.dirEdges = dirEdges
self.undirEdges = undirEdges
self.dummyNodeNeighbors = dummyNodeNeighbors
self.musquared = musquared
self.assignNegPrizes(musquared, excludeT)
if warnings > 0:
print 'THERE WERE %s WARNING(S) WHEN READING THE INPUT FILES.\n' %warnings
def assignNegPrizes(self, musquared, excludeT):
"""
Scales original prizes by beta and adds negative prizes to penalize
nodes with high degrees if mu > 0.
Scales original prizes by beta if mu = 0.
mu < 0 will cause score function to throw a ValueError.
"""
negPrizes = {}
totalPrizes = {}
if self.mu != 0.0:
print 'Adding negative prizes to nodes in interactome using mu parameter...'
if musquared: print 'Negative prizes will be proportional to node degree^2.'
if excludeT: print 'Terminals will retain their assigned prizes, no negative prizes.'
DegreeDict = self.degreeNegPrize()
for prot in self.origPrizes:
if not excludeT:
try:
degree = DegreeDict[prot]
negprize = score(degree,self.mu,musquared)
prize = (self.b * float(self.origPrizes[prot])) +\
negprize
totalPrizes[prot] = prize
negPrizes[prot] = negprize
except KeyError:
continue
else:
totalPrizes[prot] = self.b * float(self.origPrizes[prot])
negPrizes[prot] = 0
for protein in DegreeDict:
if protein not in self.origPrizes:
degree = DegreeDict[protein]
if degree > 0:
negprize = score(degree,self.mu,musquared)
if negprize != 0:
negPrizes[protein] = negprize
totalPrizes[protein] = negprize
else:
for prot in self.origPrizes:
prize = float(self.origPrizes[prot]) * self.b
totalPrizes[prot] = prize
negPrizes[prot] = 0
self.negPrizes = negPrizes
self.totalPrizes = totalPrizes
def degreeNegPrize(self):
"""
Helper function for use in assigning negative prizes (when mu != 0)
"""
G = nx.Graph()
degreeDict = {}
for protA in self.dirEdges:
for protB in self.dirEdges[protA]:
G.add_edge(protA, protB)
for protA in self.undirEdges:
for protB in self.undirEdges[protA]:
G.add_edge(protA, protB)
for node in G.nodes():
degreeDict[node] = G.degree(node)
return degreeDict
def getInputInfo(self):
"""
Prints the input information that this input object contains. Mostly used in debugging.
"""
print 'The input prizes were', self.origPrizes
print 'All undirected edges in the input interactome were', self.undirEdges
print 'All directed edges in the input interactome were', self.dirEdges
print 'The dummy node was connected to nodes: '+ str(self.dummyNodeNeighbors)
print 'The parameters were: w= ' + self.w + ' b= ' +self.b+ ' D= ' + self.D + ' mu= '\
+ self.mu + ' r= ' + self.r + ' g= ' + self.g + ' garnetBeta= ' + self.gb\
+ ' noise= ' + self.noise
def runPCSF(self, msgpath, seed):
"""
Passes the information in this input object to msgsteiner, and returns | |
'2'), 2): 385.262403905,
(('0', '-1', '2', '2', '0', '-1', '1', '-1'), 1): -1000,
(('1', '2', '2', '-1', '1', '2', '-1', '-1'), 1): -76.76,
(('1', '1', '0', '1', '0', '1', '1', '1'), 3): -100,
(('0', '2', '1', '2', '2', '1', '0', '2'), 2): 256.668440335,
(('2', '-1', '1', '2', '1', '-1', '2', '-1'), 3): 738.022273229,
(('1', '-1', '1', '1', '0', '-1', '1', '-1'), 3): -100,
(('0', '2', '2', '2', '1', '2', '2', '2'), 3): -1000,
(('1', '-1', '1', '-1', '0', '-1', '-1', '-1'), 1): -96.02996927,
(('1', '-1', '1', '1', '1', '-1', '1', '-1'), 1): -521.041247578,
(('-1', '1', '0', '1', '-1', '0', '-1', '1'), 3): -100,
(('0', '-1', '0', '2', '0', '-1', '1', '-1'), 0): 919.812033623,
(('1', '2', '2', '-1', '2', '2', '-1', '-1'), 0): 592.325205901,
(('2', '2', '2', '2', '2', '2', '2', '2'), 0): -116.794544124,
(('1', '2', '0', '-1', '0', '0', '-1', '-1'), 2): 937.55233595,
(('1', '-1', '0', '-1', '1', '-1', '-1', '-1'), 2): 826.539713052,
(('1', '1', '-1', '1', '-1', '-1', '1', '1'), 3): 119.737404395,
(('1', '-1', '1', '2', '1', '-1', '0', '-1'), 0): 0,
(('0', '2', '2', '-1', '1', '1', '-1', '-1'), 0): 100,
(('1', '-1', '-1', '2', '-1', '-1', '2', '-1'), 3): -1000,
(('1', '2', '0', '2', '2', '0', '0', '2'), 0): 323.356086588,
(('1', '1', '1', '-1', '1', '0', '-1', '-1'), 3): -100,
(('1', '2', '2', '2', '1', '2', '1', '2'), 2): -1000,
(('0', '2', '1', '2', '1', '2', '2', '2'), 3): -1000,
(('1', '1', '0', '1', '1', '0', '1', '1'), 2): 751.191288325,
(('0', '-1', '1', '1', '1', '-1', '0', '-1'), 1): -1000,
(('0', '2', '2', '2', '2', '2', '2', '2'), 0): 513.855929252,
(('1', '2', '0', '-1', '2', '0', '-1', '-1'), 2): 676.360796916,
(('-1', '2', '0', '-1', '-1', '1', '-1', '-1'), 2): 802.441137467,
(('0', '2', '0', '2', '1', '2', '0', '2'), 1): -1000,
(('0', '2', '2', '-1', '1', '1', '-1', '-1'), 2): -1000,
(('1', '2', '0', '2', '2', '2', '0', '2'), 1): -1000,
(('-1', '2', '2', '2', '-1', '2', '-1', '2'), 0): 439.078941756,
(('-1', '2', '0', '-1', '-1', '2', '-1', '-1'), 3): 554.080472828,
(('2', '-1', '0', '2', '1', '-1', '2', '-1'), 1): -1000,
(('0', '-1', '-1', '2', '-1', '-1', '1', '-1'), 3): -1000,
(('1', '-1', '1', '2', '1', '-1', '1', '-1'), 1): 816.075421545,
(('0', '2', '0', '2', '1', '0', '2', '2'), 2): 288.229697825,
(('0', '-1', '0', '2', '2', '-1', '0', '-1'), 2): 685.223105313,
(('0', '2', '2', '-1', '2', '2', '-1', '-1'), 2): -1000,
(('1', '-1', '1', '1', '0', '-1', '1', '-1'), 1): -100,
(('1', '2', '1', '2', '1', '1', '1', '2'), 3): 828.798881096,
(('0', '-1', '-1', '1', '-1', '-1', '0', '-1'), 2): -100,
(('1', '-1', '0', '-1', '0', '-1', '-1', '-1'), 2): 935.720800591,
(('0', '-1', '0', '2', '0', '-1', '0', '-1'), 3): -120.0,
(('0', '2', '1', '2', '1', '1', '0', '2'), 3): 884.68996811,
(('1', '-1', '0', '2', '1', '-1', '2', '-1'), 0): 306.432492385,
(('1', '-1', '0', '2', '1', '-1', '0', '-1'), 3): 142.82168933,
(('2', '-1', '1', '2', '1', '-1', '1', '-1'), 1): 100,
(('0', '-1', '2', '-1', '2', '-1', '-1', '-1'), 0): 230.445294799,
(('1', '1', '1', '1', '0', '1', '1', '1'), 3): -89.4349931863,
(('1', '2', '1', '-1', '1', '2', '-1', '-1'), 3): 797.647152693,
(('-1', '1', '1', '1', '-1', '1', '-1', '1'), 1): 313.314419003,
(('-1', '2', '1', '-1', '-1', '2', '-1', '-1'), 0): 793.497741098,
(('1', '2', '-1', '2', '-1', '-1', '2', '2'), 1): 816.425020224,
(('0', '2', '1', '-1', '1', '1', '-1', '-1'), 2): 618.578874579,
(('0', '-1', '0', '-1', '0', '-1', '-1', '-1'), 2): 250.019165817,
(('1', '-1', '1', '1', '1', '-1', '1', '-1'), 0): -6.58162372467,
(('0', '1', '-1', '1', '-1', '-1', '0', '1'), 0): 938.226449603,
(('2', '2', '1', '-1', '1', '1', '-1', '-1'), 3): 223.578696627,
(('0', '-1', '1', '1', '0', '-1', '1', '-1'), 0): 464.588304998,
(('1', '2', '0', '2', '1', '0', '1', '2'), 2): 860.249250766,
(('-1', '2', '2', '-1', '-1', '2', '-1', '-1'), 0): 360.102450453,
(('0', '2', '0', '2', '2', '0', '0', '2'), 1): -1000,
(('1', '1', '1', '-1', '1', '1', '-1', '-1'), 3): 330.328787122,
(('2', '2', '0', '2', '2', '2', '2', '2'), 0): -1000,
(('1', '2', '0', '2', '2', '2', '1', '2'), 1): -1000,
(('0', '2', '1', '2', '1', '1', '1', '1'), 2): 117.412036127,
(('2', '-1', '2', '2', '2', '-1', '2', '-1'), 3): 627.437546191,
(('1', '1', '1', '1', '1', '0', '0', '1'), 1): -100,
(('1', '-1', '1', '1', '1', '-1', '0', '-1'), 0): 601.201914799,
(('0', '2', '1', '-1', '1', '0', '-1', '-1'), 2): 786.207108527,
(('1', '2', '2', '2', '2', '2', '1', '2'), 0): 371.640308823,
(('0', '2', '1', '2', '1', '1', '1', '2'), 0): 100,
(('0', '2', '1', '2', '1', '1', '2', '2'), 2): 44.5931180447,
(('-1', '2', '2', '2', '-1', '2', '-1', '1'), 3): 223.511533398,
(('0', '2', '0', '-1', '1', '0', '-1', '-1'), 3): 677.475056531,
(('-1', '1', '0', '-1', '-1', '0', '-1', '-1'), 1): -100,
(('0', '2', '1', '2', '0', '1', '0', '2'), 2): 0,
(('0', '2', '-1', '2', '-1', '-1', '1', '2'), 3): 0,
(('-1', '2', '1', '2', '-1', '1', '-1', '2'), 0): -256.702246625,
(('1', '-1', '-1', '2', '-1', '-1', '1', '-1'), 1): -120.0,
(('0', '2', '0', '-1', '1', '0', '-1', '-1'), 2): 240.686798799,
(('-1', '1', '0', '1', '-1', '1', '-1', '1'), 1): -100,
(('0', '-1', '2', '2', '1', '-1', '0', '-1'), 3): 191.712868353,
(('1', '-1', '0', '2', '0', '-1', '1', '-1'), 1): -17.2211150028,
(('0', '2', '0', '2', '2', '2', '0', '2'), 2): 790.196189336,
(('2', '-1', '2', '2', '0', '-1', '2', '-1'), 2): -1000,
(('2', '2', '1', '2', '2', '1', '2', '2'), 3): 130.848465161,
(('2', '2', '0', '2', '0', '2', '2', '2'), 3): -1000,
(('1', '-1', '2', '2', '0', '-1', '1', '-1'), 2): -85.0563627037,
(('2', '2', '0', '2', '2', '0', '2', '2'), 0): -1000,
(('0', '-1', '-1', '2', '-1', '-1', '2', '-1'), 0): 896.770596454,
(('0', '-1', '1', '-1', '1', '-1', '-1', '-1'), 2): -11.3878214013,
(('0', '2', '1', '2', '1', '1', '1', '1'), 3): 0,
(('0', '2', '0', '2', '2', '2', '2', '2'), 1): -1000,
(('1', '2', '1', '2', '1', '1', '2', '2'), 0): 151.921281227,
(('1', '2', '2', '2', '2', '2', '2', '2'), 1): -310.510783258,
(('0', '2', '0', '2', '0', '1', '1', '2'), 0): 680.893456358,
(('1', '-1', '2', '2', '1', '-1', '1', '-1'), 1): -128.783735913,
(('1', '-1', '-1', '1', '-1', '-1', '1', '-1'), 2): -190.006375426,
(('2', '2', '0', '-1', '1', '0', '-1', '-1'), 2): 219.781740747,
(('-1', '1', '-1', '1', '-1', '-1', '-1', '1'), 3): -276.71117503,
(('-1', '2', '0', '2', '-1', '0', '-1', '2'), 2): 887.03007369,
(('-1', '2', '0', '2', '-1', '1', '-1', '2'), 2): 858.440146993,
(('0', '-1', '2', '2', '0', '-1', '1', '-1'), 3): 245.301369043,
(('1', '2', '2', '-1', '1', '2', '-1', '-1'), 3): -120.0,
(('-1', '2', '0', '2', '-1', '2', '-1', '2'), 2): 789.147588783,
(('0', '-1', '1', '1', '1', '-1', '1', '-1'), 3): -100,
(('0', '2', '-1', '2', '-1', '-1', '0', '1'), 2): 789.795912529,
(('1', '-1', '1', '-1', '0', '-1', '-1', '-1'), 3): -50.3081303952,
(('2', '-1', '0', '2', '1', '-1', '2', '-1'), 3): 802.863671959,
(('2', '2', '2', '2', '2', '2', '2', '2'), 2): 74.2210691863,
(('1', '-1', '0', '-1', '1', '-1', '-1', '-1'), 0): 0,
(('1', '2', '0', '2', '1', '2', '0', '2'), 1): -353.069439195,
(('-1', '2', '1', '2', '-1', '0', '-1', '2'), 2): 252.276505449,
(('1', '1', '1', '1', '1', '0', '1', '0'), 3): 264.937447122,
(('0', '2', '-1', '2', '-1', '-1', '0', '2'), 2): -1000,
(('1', '-1', '2', '2', '2', '-1', '0', '-1'), 3): 900.581140029,
(('-1', '1', '-1', '1', '-1', '-1', '-1', '0'), 1): 637.09284633,
(('2', '2', '-1', '2', '-1', '-1', '2', '2'), 3): -338.970512232,
(('1', '2', '0', '2', '2', '0', '0', '2'), 2): 601.322570967,
(('1', '1', '1', '-1', '1', '0', '-1', '-1'), 1): 924.069758563,
(('2', '2', '0', '-1', '1', '2', '-1', '-1'), 2): 100,
(('2', '-1', '-1', '2', '-1', '-1', '2', '-1'), 2): -374.15594837,
(('0', '2', '0', '-1', '1', '2', '-1', '-1'), 0): 827.512521562,
(('0', '2', '0', '2', '1', '2', '0', '2'), 3): -1000,
(('1', '-1', '2', '-1', '1', '-1', '-1', '-1'), 3): 129.52,
(('-1', '2', '2', '2', '-1', '2', '-1', | |
import threadsafe_tkinter as tk
import tkinter.ttk as ttk
from copy import deepcopy
from traceback import format_exc
from binilla import editor_constants as e_c
from binilla.widgets.scroll_menu import ScrollMenu
from binilla.widgets.field_widgets import field_widget, container_frame,\
data_frame
class ArrayFrame(container_frame.ContainerFrame):
'''Used for array nodes. Displays a single element in
the ArrayBlock represented by it, and contains a combobox
for selecting which array element is displayed.'''
sel_index = -1
sel_menu = None
populated = False
option_cache = None
options_sane = False
def __init__(self, *args, **kwargs):
kwargs.update(relief='flat', bd=0, highlightthickness=0,
bg=self.default_bg_color)
field_widget.FieldWidget.__init__(self, *args, **kwargs)
tk.Frame.__init__(self, *args, **e_c.fix_kwargs(**kwargs))
show_frame = bool(kwargs.pop('show_frame', not self.blocks_start_hidden))
if self.is_empty and self.hide_if_blank:
show_frame = False
self.show = tk.BooleanVar()
self.show.set(show_frame)
self.options_sane = False
node_len = 0
try: node_len = len(self.node)
except Exception: pass
self.sel_index = (node_len > 0) - 1
# make the title, element menu, and all the buttons
self.controls = tk.Frame(self, relief='raised', bd=self.frame_depth)
self.title = title = tk.Frame(self.controls, relief='flat', bd=0)
self.buttons = buttons = tk.Frame(self.controls, relief='flat', bd=0)
toggle_text = '-' if show_frame else '+'
self.title_label = tk.Label(
title, text=self.gui_name, justify='left', anchor='w',
width=self.title_size, font=self.get_font("frame_title"),
disabledforeground=self.text_disabled_color)
self.title_label.font_type = "frame_title"
self.show_btn = ttk.Checkbutton(
title, width=3, text=toggle_text, command=self.toggle_visible,
style='ShowButton.TButton')
self.sel_menu = ScrollMenu(
title, f_widget_parent=self,
sel_index=self.sel_index, max_index=node_len-1,
option_getter=self.get_options, callback=self.select_option)
self.shift_up_btn = ttk.Button(
title, width=7, text='Shift ▲',
command=self.shift_entry_up)
self.shift_down_btn = ttk.Button(
buttons, width=7, text='Shift ▼',
command=self.shift_entry_down)
self.add_btn = ttk.Button(
buttons, width=4, text='Add',
command=self.add_entry)
self.insert_btn = ttk.Button(
buttons, width=6, text='Insert',
command=self.insert_entry)
self.duplicate_btn = ttk.Button(
buttons, width=9, text='Duplicate',
command=self.duplicate_entry)
self.delete_btn = ttk.Button(
buttons, width=6, text='Delete',
command=self.delete_entry)
self.delete_all_btn = ttk.Button(
buttons, width=10, text='Delete all',
command=self.delete_all_entries)
self.import_btn = ttk.Button(
buttons, width=6, text='Import',
command=self.import_node)
self.export_btn = ttk.Button(
buttons, width=6, text='Export',
command=self.export_node)
# pack the title, menu, and all the buttons
for w in (self.shift_down_btn, self.export_btn, self.import_btn,
self.delete_all_btn, self.delete_btn, self.duplicate_btn,
self.insert_btn, self.add_btn):
w.pack(side="right", padx=(0, 4), pady=(2, 2))
self.show_btn.pack(side="left")
if self.gui_name != '':
self.title_label.pack(side="left", fill="x", expand=True)
self.sel_menu.pack(side="left", fill="x", expand=True, padx=(0, 4))
self.shift_up_btn.pack(side="right", padx=(0, 1), pady=(2, 2))
self.title.pack(fill="x", expand=True, padx=0)
self.buttons.pack(fill="x", expand=True, padx=0)
self.controls.pack(fill="x", expand=True, padx=0)
self.populate()
self._initialized = True
@property
def is_empty(self):
if getattr(self, "node", None) is None:
return True
return len(self.node) == 0
def load_node_data(self, parent, node, attr_index, desc=None):
field_widget.FieldWidget.load_node_data(
self, parent, node, attr_index, desc)
sub_node = attr_index = None
if self.node:
attr_index = self.sel_index
if attr_index in range(len(self.node)):
sub_node = self.node[attr_index]
else:
attr_index = len(self.node) - 1
if attr_index < 0:
attr_index = None
if self.sel_menu is not None:
self.options_sane = self.sel_menu.options_menu_sane = False
for wid in self.f_widgets:
# there must be only one entry in self.f_widgets
w = self.f_widgets[wid]
if w.load_node_data(self.node, sub_node, attr_index):
return True
return False
def unload_node_data(self):
self.sel_menu.update_label(" ")
container_frame.ContainerFrame.unload_node_data(self)
def set_disabled(self, disable=True):
disable = disable or not self.editable
if self.node is None and not disable:
return
if getattr(self, "sel_menu", None):
self.sel_menu.set_disabled(disable)
if bool(disable) == self.disabled:
pass
elif not disable:
self.set_all_buttons_disabled(False)
self.disable_unusable_buttons()
else:
new_state = tk.DISABLED if disable else tk.NORMAL
for w in (self.shift_up_btn, self.shift_down_btn,
self.add_btn, self.insert_btn, self.duplicate_btn,
self.delete_btn, self.delete_all_btn):
if w:
w.config(state=new_state)
container_frame.ContainerFrame.set_disabled(self, disable)
def apply_style(self, seen=None):
container_frame.ContainerFrame.apply_style(self, seen)
self.controls.config(bd=self.frame_depth, bg=self.frame_bg_color)
self.title.config(bg=self.frame_bg_color)
self.title_label.config(bg=self.frame_bg_color)
self.buttons.config(bg=self.frame_bg_color)
#if self.show.get():
# self.pose_fields()
def destroy(self):
# These will linger and take up RAM, even if the widget is destroyed.
# Need to remove the references manually
self.option_cache = None
container_frame.ContainerFrame.destroy(self)
def export_node(self):
try:
# pass call to the export_node method of the array entry's widget
w = self.f_widgets[self.f_widget_ids[0]]
except Exception:
return
w.export_node()
def import_node(self):
try:
# pass call to the import_node method of the array entry's widget
w = self.f_widgets[self.f_widget_ids[0]]
except Exception:
return
w.import_node()
def get_options(self, opt_index=None):
'''
Returns a list of the option strings sorted by option index.
'''
if (self.option_cache is None or not self.options_sane or
opt_index is not None):
result = self.generate_options(opt_index)
if opt_index is not None:
return result
if opt_index is None:
return self.option_cache
elif opt_index == e_c.ACTIVE_ENUM_NAME:
opt_index = self.sel_index
if opt_index < 0: opt_index = -1
return self.option_cache.get(opt_index)
def generate_options(self, opt_index=None):
# sort the options by value(values are integers)
options = {i: n for n, i in self.desc.get('NAME_MAP', {}).items()}
if self.node:
node, desc = self.node, self.desc
sub_desc = desc['SUB_STRUCT']
def_struct_name = sub_desc['NAME']
if self.use_gui_names and 'GUI_NAME' in sub_desc:
def_struct_name = sub_desc['GUI_NAME']
options_to_generate = range(len(node))
if opt_index is not None:
options_to_generate = (
(opt_index, ) if opt_index in options_to_generate else ())
for i in options_to_generate:
if i in options:
continue
sub_node = node[i]
if not hasattr(sub_node, 'desc'):
continue
sub_desc = sub_node.desc
sub_struct_name = sub_desc.get('GUI_NAME', sub_desc['NAME'])
if sub_struct_name == def_struct_name:
continue
options[i] = sub_struct_name
if opt_index is None:
self.options_sane = True
self.option_cache = options
if self.sel_menu is not None:
self.sel_menu.options_menu_sane = False
self.sel_menu.max_index = len(node) - 1
return options
return options.get(opt_index, None)
def set_shift_up_disabled(self, disable=True):
'''
Disables the move up button if disable is True. Enables it if not.
'''
if disable: self.shift_up_btn.config(state="disabled")
else: self.shift_up_btn.config(state="normal")
def set_shift_down_disabled(self, disable=True):
'''
Disables the move down button if disable is True. Enables it if not.
'''
if disable: self.shift_down_btn.config(state="disabled")
else: self.shift_down_btn.config(state="normal")
def set_add_disabled(self, disable=True):
'''Disables the add button if disable is True. Enables it if not.'''
if disable: self.add_btn.config(state="disabled")
else: self.add_btn.config(state="normal")
def set_insert_disabled(self, disable=True):
'''Disables the insert button if disable is True. Enables it if not.'''
if disable: self.insert_btn.config(state="disabled")
else: self.insert_btn.config(state="normal")
def set_duplicate_disabled(self, disable=True):
'''
Disables the duplicate button if disable is True. Enables it if not.
'''
if disable: self.duplicate_btn.config(state="disabled")
else: self.duplicate_btn.config(state="normal")
def set_delete_disabled(self, disable=True):
'''Disables the delete button if disable is True. Enables it if not.'''
if disable: self.delete_btn.config(state="disabled")
else: self.delete_btn.config(state="normal")
def set_delete_all_disabled(self, disable=True):
'''
Disables the delete_all button if disable is True. Enables it if not.
'''
if disable: self.delete_all_btn.config(state="disabled")
else: self.delete_all_btn.config(state="normal")
def edit_apply(self=None, *, edit_state, undo=True):
state = edit_state
edit_type = state.edit_type
i = state.attr_index
undo_node = state.undo_node
redo_node = state.redo_node
edit_info = state.edit_info
sel_index = edit_info.get('sel_index', 0)
w, node = field_widget.FieldWidget.get_widget_and_node(
nodepath=state.nodepath, tag_window=state.tag_window)
if edit_type == 'shift_up':
node[i], node[i - 1] = node[i - 1], node[i]
elif edit_type == 'shift_down':
node[i], node[i + 1] = node[i + 1], node[i]
elif edit_type in ('add', 'insert', 'duplicate'):
if undo:
sel_index = None
node.pop(i)
else:
node.insert(i, redo_node)
elif edit_type == 'delete':
if undo:
node.insert(i, undo_node)
else:
sel_index = None
node.pop(i)
elif edit_type == 'delete_all':
if undo:
node[:] = undo_node
else:
del node[:]
sel_index = None
else:
raise TypeError('Unknown edit_state type')
if w is not None:
try:
if w.desc is not state.desc:
return
if sel_index is None:
pass
elif edit_type in ('add', 'insert', 'duplicate', 'delete'):
w.sel_index = sel_index
elif edit_type in ('shift_up', 'shift_down'):
w.sel_index = sel_index
if undo:
pass
elif 'down' in edit_type:
w.sel_index += 1
else:
w.sel_index -= 1
max_index = len(node) - 1
w.sel_menu.max_index = max_index
w.options_sane = w.sel_menu.options_menu_sane = False
if w.sel_index < 0:
w.select_option(0, force=True)
elif w.sel_index > max_index:
w.select_option(max_index, force=True)
else:
w.select_option(w.sel_index, force=True)
w.needs_flushing = False
w.set_edited()
except Exception:
print(format_exc())
def edit_create(self, **kwargs):
# add own stuff
kwargs.setdefault("sel_index", self.sel_index)
field_widget.FieldWidget.edit_create(self, **kwargs)
def shift_entry_up(self):
if not hasattr(self.node, '__len__') or len(self.node) < 2:
return
node = self.node
index = self.sel_index
if index <= 0:
return
self.set_edited() # do this first so the TagWindow detects that
# the title needs to be updated with an asterisk
self.edit_create(edit_type='shift_up', attr_index=index)
node[index], node[index - 1] = node[index - 1], node[index]
self.sel_index = self.sel_menu.sel_index = index - 1
self.options_sane = self.sel_menu.options_menu_sane = False
self.sel_menu.update_label()
def shift_entry_down(self):
if not hasattr(self.node, '__len__') or len(self.node) < 2:
return
node = self.node
index = self.sel_index
if index >= len(node) - 1:
return
self.set_edited() # do this first so the TagWindow detects that
# the title needs to be updated with an asterisk
self.edit_create(edit_type='shift_down', attr_index=index)
node[index], node[index + 1] = node[index + 1], node[index]
self.sel_index = self.sel_menu.sel_index = index + 1
self.options_sane = self.sel_menu.options_menu_sane = False
self.sel_menu.update_label()
def add_entry(self):
if not hasattr(self.node, '__len__'):
return
field_max = self.field_max
if field_max is not None and len(self.node) >= field_max:
if self.enforce_max:
return
attr_index = len(self.node)
self.set_edited() # do this first so the TagWindow detects that
# the title needs to be updated with an asterisk
self.node.append()
self.edit_create(edit_type='add', attr_index=attr_index,
redo_node=self.node[attr_index], sel_index=attr_index)
self.options_sane = self.sel_menu.options_menu_sane = False
self.set_all_buttons_disabled(self.disabled)
self.disable_unusable_buttons()
self.select_option(len(self.node) - 1, True)
def insert_entry(self):
if not hasattr(self.node, '__len__'):
return
field_max = | |
<reponame>Exlsunshine/mlprodict
"""
@file
@brief Implements a class able to compute the predictions
from on an :epkg:`ONNX` model.
"""
from collections import OrderedDict
from io import BytesIO
import json
from time import perf_counter
import warnings
import numpy
from onnx import load, load_model, checker, shape_inference
from onnx import onnx_pb as onnx_proto
from onnx import numpy_helper
from onnx.helper import make_model
from .onnx_inference_node import OnnxInferenceNode
from .onnx_inference_manipulations import select_model_inputs_outputs, enumerate_model_node_outputs
from .onnx2py_helper import _var_as_dict, _type_to_string
from .sklearn_helper import enumerate_fitted_arrays, pairwise_array_distances
class OnnxInference:
"""
Loads an :epkg:`ONNX` file or object or stream.
Computes the output of the :epkg:`ONNX` graph.
"""
def __init__(self, onnx_or_bytes_or_stream, runtime=None, skip_run=False):
"""
@param onnx_or_bytes_or_stream :epkg:`onnx` object,
bytes, or filename or stream
@param runtime runtime options
@param skip_run do not build the runtime
"""
if isinstance(onnx_or_bytes_or_stream, bytes):
self.obj = load_model(BytesIO(onnx_or_bytes_or_stream))
elif isinstance(onnx_or_bytes_or_stream, BytesIO):
self.obj = load_model(onnx_or_bytes_or_stream)
elif isinstance(onnx_or_bytes_or_stream, str):
self.obj = load(onnx_or_bytes_or_stream)
elif hasattr(onnx_or_bytes_or_stream, 'graph'):
self.obj = onnx_or_bytes_or_stream
elif isinstance(onnx_or_bytes_or_stream, onnx_proto.GraphProto):
self.obj = make_model(onnx_or_bytes_or_stream,
producer_name='mlprodict')
else:
raise TypeError("Unable to handle type {}.".format(
type(onnx_or_bytes_or_stream)))
self.runtime = runtime
self.skip_run = skip_run
self._init()
def __getstate__(self):
"""
To pickle the object.
"""
return {'onnx': self.obj.SerializeToString(),
'runtime': self.runtime,
'skip_run': self.skip_run}
def __setstate__(self, state):
"""
To unpickle the object.
"""
onx = state['onnx']
self.obj = load_model(BytesIO(onx))
self.runtime = state['runtime']
self.skip_run = state['skip_run']
self._init()
def _init(self):
"""
Prepares the instance to deliver predictions.
"""
self.graph_ = self.to_sequence()
self.outputs_ = self.graph_['outputs']
self.target_opset_ = self.graph_['targets'].get('', None)
if not self.skip_run:
if self.runtime == 'onnxruntime1':
# Loads the onnx with onnxruntime as a single file.
del self.graph_
from .ops_whole.session import OnnxWholeSession
self._whole = OnnxWholeSession(self.obj, self.runtime)
self._run = self._run_whole_runtime
else:
self.sequence_ = self.graph_['sequence']
self.inits_ = self.graph_['inits']
dtype = self._guess_input_dtype()
variables = self.inits_.copy()
for node in self.sequence_:
if self.runtime == 'onnxruntime2':
node.setup_runtime(self.runtime, variables, self.__class__,
target_opset=self.target_opset_,
dtype=dtype)
else:
node.setup_runtime(self.runtime, variables, self.__class__,
target_opset=self.target_opset_)
if hasattr(node, 'ops_') and hasattr(node.ops_, 'typed_outputs_'):
for k, v in node.ops_.typed_outputs_:
variables[k] = v
self._run = self._run_sequence_runtime
def _guess_input_dtype(self):
for _, v in self.graph_['inputs'].items():
if 'type' not in v:
continue
t = v['type']
if 'elem' not in t:
continue
if t['elem'] == 'double':
return numpy.float64
return numpy.float32
def __str__(self):
"""
usual
"""
return str(self.obj)
def __repr__(self):
"""
usual
"""
return "OnnxInference(...)"
def check_model(self):
"""
Checks the model follow :epkg:`ONNX` conventions.
"""
checker.check_model(self.obj)
def shape_inference(self):
"""
Infers the shape of the outputs.
@return A new :epkg:`ONNX` graph which defined outputs.
"""
return shape_inference.infer_shapes(self.obj)
@property
def input_names(self):
"""
Returns the names of all inputs.
"""
return [_.name for _ in self.obj.graph.input]
@property
def output_names(self):
"""
Returns the names of all outputs.
"""
return [_.name for _ in self.obj.graph.output]
def to_dot(self, recursive=False, prefix='', **params):
"""
Produces a :epkg:`DOT` language string for the graph.
@param params additional params to draw the graph
@param recursive also show subgraphs inside operator like
@see cl Scan
@param prefix prefix for every node name
@return string
Default options for the graph are:
::
options = {
'orientation': 'portrait',
'ranksep': '0.25',
'nodesep': '0.05',
'width': '0.5',
'height': '0.1',
}
One example:
.. exref::
:title: Convert ONNX into DOT
An example on how to convert an :epkg:`ONNX`
graph into :epkg:`DOT`.
.. runpython::
:showcode:
import numpy
from skl2onnx.algebra.onnx_ops import OnnxLinearRegressor
from skl2onnx.common.data_types import FloatTensorType
from mlprodict.onnxrt import OnnxInference
pars = dict(coefficients=numpy.array([1., 2.]),
intercepts=numpy.array([1.]),
post_transform='NONE')
onx = OnnxLinearRegressor('X', output_names=['Y'], **pars)
model_def = onx.to_onnx({'X': pars['coefficients'].astype(numpy.float32)},
outputs=[('Y', FloatTensorType([1]))])
oinf = OnnxInference(model_def)
print(oinf.to_dot())
See an example of representation in notebook
:ref:`onnxvisualizationrst`.
"""
options = {
'orientation': 'portrait',
'ranksep': '0.25',
'nodesep': '0.05',
'width': '0.5',
'height': '0.1',
}
options.update(params)
inter_vars = {}
exp = ["digraph{"]
for opt in {'orientation', 'pad', 'nodesep', 'ranksep'}:
if opt in options:
exp.append(" {}={};".format(opt, options[opt]))
fontsize = 10
# inputs
exp.append("")
for obj in self.obj.graph.input:
dobj = _var_as_dict(obj)
exp.append(' {3}{0} [shape=box color=red label="{0}\\n{1}" fontsize={2}];'.format(
dobj['name'], _type_to_string(dobj['type']), fontsize, prefix))
inter_vars[obj.name] = obj
# outputs
exp.append("")
for obj in self.obj.graph.output:
dobj = _var_as_dict(obj)
exp.append(' {3}{0} [shape=box color=green label="{0}\\n{1}" fontsize={2}];'.format(
dobj['name'], _type_to_string(dobj['type']), fontsize, prefix))
inter_vars[obj.name] = obj
# initializer
exp.append("")
for obj in self.obj.graph.initializer:
dobj = _var_as_dict(obj)
val = dobj['value']
flat = val.flatten()
if flat.shape[0] < 9:
st = str(val)
else:
st = str(val)
if len(st) > 30:
st = st[:30] + '...'
st = st.replace('\n', '\\n')
kind = ""
exp.append(' {6}{0} [shape=box label="{0}\\n{4}{1}({2})\\n{3}" fontsize={5}];'.format(
dobj['name'], dobj['value'].dtype,
dobj['value'].shape, st, kind, fontsize, prefix))
inter_vars[obj.name] = obj
# nodes
for node in self.obj.graph.node:
exp.append("")
for out in node.output:
if out not in inter_vars:
inter_vars[out] = out
exp.append(
' {2}{0} [shape=box label="{0}" fontsize={1}];'.format(out, fontsize, prefix))
dobj = _var_as_dict(node)
if dobj['name'].strip() == '':
raise RuntimeError(
"Issue with a node\n{}\n----\n{}".format(dobj, node))
atts = []
if 'atts' in dobj:
for k, v in sorted(dobj['atts'].items()):
val = None
if 'value' in v:
val = str(v['value']).replace(
"\n", "\\n").replace('"', "'")
sl = max(30 - len(k), 10)
if len(val) > sl:
val = val[:sl] + "..."
if val is not None:
atts.append('{}={}'.format(k, val))
satts = "" if len(atts) == 0 else ("\\n" + "\\n".join(atts))
if recursive and node.op_type in {'Scan'}:
# creates the subgraph
body = dobj['atts']['body']['value']
oinf = OnnxInference(
body, runtime=self.runtime, skip_run=self.skip_run)
subprefix = prefix + "B_"
subdot = oinf.to_dot(recursive=recursive, prefix=subprefix)
lines = subdot.split("\n")
start = 0
for i, line in enumerate(lines):
if '[' in line:
start = i
break
subgraph = "\n".join(lines[start:])
# connecting the subgraph
exp.append(" subgraph cluster_{}{} {{".format(
node.op_type, id(node)))
exp.append(' label="{0}\\n({1}){2}";'.format(
dobj['op_type'], dobj['name'], satts))
exp.append(' fontsize={0};'.format(fontsize))
exp.append(' color=black;')
exp.append(
'\n'.join(map(lambda s: ' ' + s, subgraph.split('\n'))))
for inp1, inp2 in zip(node.input, body.input):
exp.append(
" {0}{1} -> {2}{3};".format(prefix, inp1, subprefix, inp2.name))
for out1, out2 in zip(body.output, node.output):
exp.append(
" {0}{1} -> {2}{3};".format(subprefix, out1.name, prefix, out2))
else:
exp.append(' {4}{1} [shape=box style="filled,rounded" color=orange label="{0}\\n({1}){2}" fontsize={3}];'.format(
dobj['op_type'], dobj['name'], satts, fontsize, prefix))
for inp in node.input:
exp.append(
" {0}{1} -> {0}{2};".format(prefix, inp, node.name))
for out in node.output:
exp.append(
" {0}{1} -> {0}{2};".format(prefix, node.name, out))
exp.append('}')
return "\n".join(exp)
def to_json(self, indent=2):
"""
Converts an :epkg:`ONNX` model into :epkg:`JSON`.
@param indent indentation
@return string
.. exref::
:title: Convert ONNX into JSON
An example on how to convert an :epkg:`ONNX`
graph into :epkg:`JSON`.
.. runpython::
:showcode:
import numpy
from skl2onnx.algebra.onnx_ops import OnnxLinearRegressor
from skl2onnx.common.data_types import FloatTensorType
from mlprodict.onnxrt import OnnxInference
pars = dict(coefficients=numpy.array([1., 2.]),
intercepts=numpy.array([1.]),
post_transform='NONE')
onx = OnnxLinearRegressor('X', output_names=['Y'], **pars)
model_def = onx.to_onnx({'X': pars['coefficients'].astype(numpy.float32)},
outputs=[('Y', FloatTensorType([1]))])
oinf = OnnxInference(model_def)
print(oinf.to_json())
"""
def _to_json(obj):
s = str(obj)
rows = ['{']
leave = None
for line in s.split('\n'):
if line.endswith("{"):
rows.append('"%s": {' % line.strip('{ '))
elif ':' in line:
spl = line.strip().split(':')
if len(spl) != 2:
raise RuntimeError(
"Unable to interpret line '{}'.".format(line))
if spl[0].strip() in ('type', ):
st = spl[1].strip()
if st in {'INT', 'INTS', 'FLOAT', 'FLOATS', 'STRING', 'STRINGS'}:
spl[1] = '"{}"'.format(st)
if spl[0] in ('floats', 'ints'):
if leave:
rows.append("{},".format(spl[1]))
else:
rows.append('"{}": [{},'.format(
spl[0], spl[1].strip()))
leave = spl[0]
elif leave:
rows[-1] = rows[-1].strip(',')
rows.append('],')
rows.append('"{}": {},'.format(
spl[0].strip(), spl[1].strip()))
leave = None
else:
rows.append('"{}": {},'.format(
spl[0].strip(), spl[1].strip()))
elif line.strip() == "}":
rows[-1] = rows[-1].rstrip(",")
rows.append(line + ",")
elif line:
raise RuntimeError(
"Unable to interpret line '{}'.".format(line))
rows[-1] = rows[-1].rstrip(',')
rows.append("}")
js = "\n".join(rows)
try:
content = json.loads(js)
except json.decoder.JSONDecodeError as e:
js2 = "\n".join("%04d %s" % (i + 1, line)
for i, line in enumerate(js.split("\n")))
raise RuntimeError(
"Unable to parse JSON\n{}".format(js2)) from e
return content
# meta data
final_obj = {}
for k in {'ir_version', 'producer_name', 'producer_version',
'domain', 'model_version', 'doc_string'}:
if hasattr(self.obj, k):
final_obj[k] = getattr(self.obj, k)
# inputs
inputs = []
for obj in self.obj.graph.input:
st = _to_json(obj)
inputs.append(st)
final_obj['inputs'] = inputs
# outputs
outputs = []
for obj in self.obj.graph.output:
st = _to_json(obj)
outputs.append(st)
final_obj['outputs'] = outputs
# init
inits = {}
for obj in self.obj.graph.initializer:
value = numpy_helper.to_array(obj).tolist()
inits[obj.name] = value
final_obj['initializers'] = inits
# nodes
nodes = []
for obj in self.obj.graph.node:
node = dict(name=obj.name, op_type=obj.op_type, domain=obj.domain,
inputs=[str(_) for _ in obj.input],
outputs=[str(_) for _ in obj.output],
attributes={})
for att in obj.attribute:
st = _to_json(att)
node['attributes'][st['name']] = st
del st['name']
nodes.append(node)
final_obj['nodes'] = nodes
return json.dumps(final_obj, indent=indent)
def to_sequence(self):
"""
Produces a graph to facilitate the execution.
One example:
.. exref::
:title: Convert ONNX into graph
An example on how to convert an :epkg:`ONNX`
graph into a graph.
.. runpython::
:showcode:
| |
WorkflowExecutionId, the AWS queue and state machine resources assiciated with
the workflow execution and the current execution status of the workflow.
.. code-block:: python
{
"Name": string,
"StartAt": "Preprocess",
"Stages": {
"stage-name": {
"Type": "NestedQueue",
"Resource": queueARN,
"StateMachine": stateMachineARN,
"Next": "Analysis"
},
...,
"stage-name: {
"Type": "NestedQueue",
"Resource": queueARN,
"StateMachine": stateMachineARN,
"End": true
}
}
}
Raises:
200: The workflow execution was created successfully.
400: Bad Request - the input workflow was not found or was invalid
500: ChaliceViewError - internal server error
"""
workflow_execution = json.loads(app.current_request.raw_body.decode())
return create_workflow_execution("api", workflow_execution)
def create_workflow_execution(trigger, workflow_execution):
execution_table = DYNAMO_RESOURCE.Table(WORKFLOW_EXECUTION_TABLE_NAME)
dynamo_status_queued = False
create_asset = None
logger.info('create_workflow_execution workflow config: ' + str(workflow_execution))
if "Input" in workflow_execution and "AssetId" in workflow_execution["Input"]:
create_asset = False
elif "Input" in workflow_execution and "Media" in workflow_execution["Input"]:
create_asset = True
else:
raise BadRequestError('Input must contain either "AssetId" or "Media"')
try:
Name = workflow_execution["Name"]
Configuration = workflow_execution["Configuration"] if "Configuration" in workflow_execution else {}
# BRANDON - make an asset
dataplane = DataPlane()
if create_asset is True:
try:
input = workflow_execution["Input"]["Media"]
media_type = list(input.keys())[0]
s3bucket = input[media_type]["S3Bucket"]
s3key = input[media_type]["S3Key"]
except KeyError as e:
logger.error("Exception {}".format(e))
raise ChaliceViewError("Exception '%s'" % e)
else:
asset_creation = dataplane.create_asset(media_type, s3bucket, s3key)
# If create_asset fails, then asset_creation will contain the error
# string instead of the expected dict. So, we'll raise that error
# if we get a KeyError in the following try block:
try:
asset_input = {
"Media": {
media_type: {
"S3Bucket": asset_creation["S3Bucket"],
"S3Key": asset_creation["S3Key"]
}
}
}
asset_id = asset_creation["AssetId"]
except KeyError as e:
logger.error("Error creating asset {}".format(asset_creation))
raise ChaliceViewError("Error creating asset '%s'" % asset_creation)
else:
try:
input = workflow_execution["Input"]["AssetId"]
except KeyError as e:
logger.error("Exception {}".format(e))
raise ChaliceViewError("Exception '%s'" % e)
else:
asset_id = input
workflow_execution_list = list_workflow_executions_by_assetid(asset_id)
for workflow_execution in workflow_execution_list:
if workflow_execution["Status"] not in [awsmie.WORKFLOW_STATUS_COMPLETE, awsmie.WORKFLOW_STATUS_ERROR]:
raise ConflictError("There is currently another workflow execution(Id = {}) active on AssetId {}.".format(
workflow_execution["Id"], asset_id))
retrieve_asset = dataplane.retrieve_asset_metadata(asset_id)
if "results" in retrieve_asset:
s3bucket = retrieve_asset["results"]["S3Bucket"]
s3key = retrieve_asset["results"]["S3Key"]
media_type = retrieve_asset["results"]["MediaType"]
asset_input = {
"Media": {
media_type: {
"S3Bucket": s3bucket,
"S3Key": s3key
}
}
}
else:
raise ChaliceViewError("Unable to retrieve asset: {e}".format(e=asset_id))
workflow_execution = initialize_workflow_execution(trigger, Name, asset_input, Configuration, asset_id)
execution_table.put_item(Item=workflow_execution)
dynamo_status_queued = True
# FIXME - must set workflow status to error if this fails since we marked it as QUeued . we had to do that to avoid
# race condition on status with the execution itself. Once we hand it off to the state machine, we can't touch the status again.
response = SQS_CLIENT.send_message(QueueUrl=STAGE_EXECUTION_QUEUE_URL, MessageBody=json.dumps(workflow_execution))
# the response contains MD5 of the body, a message Id, MD5 of message attributes, and a sequence number (for FIFO queues)
logger.info('Message ID : {}'.format(response['MessageId']))
# Trigger the workflow_scheduler
response = LAMBDA_CLIENT.invoke(
FunctionName=WORKFLOW_SCHEDULER_LAMBDA_ARN,
InvocationType='Event'
)
except Exception as e:
logger.error("Exception {}".format(e))
if dynamo_status_queued:
update_workflow_execution_status(workflow_execution["Id"], awsmie.WORKFLOW_STATUS_ERROR, "Exception {}".format(e))
raise ChaliceViewError("Exception '%s'" % e)
return workflow_execution
def initialize_workflow_execution(trigger, Name, input, Configuration, asset_id):
workflow_table = DYNAMO_RESOURCE.Table(WORKFLOW_TABLE_NAME)
workflow_execution = {}
workflow_execution["Id"] = str(uuid.uuid4())
workflow_execution["Trigger"] = trigger
workflow_execution["CurrentStage"] = None
workflow_execution["Globals"] = {"Media": {}, "MetaData": {}}
workflow_execution["Globals"].update(input)
workflow_execution["Configuration"] = Configuration
workflow_execution["AssetId"] = asset_id
workflow_execution["Version"] = "v0"
workflow_execution["Created"] = str(datetime.now().timestamp())
workflow_execution["ResourceType"] = "WORKFLOW_EXECUTION"
workflow_execution["ApiVersion"] = API_VERSION
# lookup base workflow
response = workflow_table.get_item(
Key={
'Name': Name
},
ConsistentRead=True)
if "Item" in response:
workflow = response["Item"]
else:
raise ChaliceViewError(
"Exception: workflow name '%s' not found" % Name)
print(workflow)
# Override the default configuration with Configuration key-value pairs that are input to the
# /workflow/execution API. Update only keys that are passed in, leaving the
# defaults for any key that is not specified
for stage, sconfig in Configuration.items():
if stage in workflow["Stages"]:
for operation, oconfig in sconfig.items():
if operation in workflow["Stages"][stage]["Configuration"]:
for key, value in oconfig.items():
workflow["Stages"][stage]["Configuration"][operation][key] = value
else:
workflow_execution["Workflow"] = None
raise ChaliceViewError("Exception: Invalid operation '%s'" % operation)
else:
workflow_execution["Workflow"] = None
raise ChaliceViewError("Exception: Invalid stage found in Configuration '%s'" % stage)
for stage in workflow["Stages"]:
workflow["Stages"][stage]["Status"] = awsmie.STAGE_STATUS_NOT_STARTED
workflow["Stages"][stage]["Metrics"] = {}
workflow["Stages"][stage]["Name"] = stage
workflow["Stages"][stage]["AssetId"] = asset_id
workflow["Stages"][stage]["WorkflowExecutionId"] = workflow_execution["Id"]
if "MetaData" not in workflow["Stages"][stage]:
workflow["Stages"][stage]["MetaData"] = {}
workflow_execution["Workflow"] = workflow
# initialize top level workflow_execution state from the workflow
workflow_execution["Status"] = awsmie.WORKFLOW_STATUS_QUEUED
workflow_execution["CurrentStage"] = current_stage = workflow["StartAt"]
# setup the current stage for execution
workflow_execution["Workflow"]["Stages"][current_stage]["Input"] = workflow_execution["Globals"]
workflow_execution["Workflow"]["Stages"][current_stage]["Status"] = awsmie.STAGE_STATUS_STARTED
return workflow_execution
@app.route('/workflow/execution/{Id}', cors=True, methods=['PUT'], authorizer=authorizer)
def update_workflow_execution(Id):
""" Update a workflow execution
Options:
Resume a workflow that is in a Waiting Status in a specific stage.
Body:
.. code-block:: python
{
"WaitingStageName":"<stage-name>"
}
Returns:
A dict mapping keys to the corresponding workflow execution with its current status
.. code-block:: python
{
"Id: string,
"Status": "Resumed"
}
Raises:
200: The workflow execution was updated successfully.
400: Bad Request - the input stage was not found, the current stage did not match the WaitingStageName,
or the Workflow Status was not "Waiting"
500: ChaliceViewError - internal server error
"""
response = {}
params = json.loads(app.current_request.raw_body.decode())
logger.info(json.dumps(params))
if "WaitingStageName" in params:
response = resume_workflow_execution("api", Id, params["WaitingStageName"])
return response
def resume_workflow_execution(trigger, id, waiting_stage_name):
"""
Get the workflow execution by id from dyanamo and assign to this object
:param id: The id of the workflow execution
:param status: The new status of the workflow execution
"""
print("Resume workflow execution {} waiting stage = {}".format(id, waiting_stage_name))
execution_table = DYNAMO_RESOURCE.Table(WORKFLOW_EXECUTION_TABLE_NAME)
workflow_execution = {}
workflow_execution["Id"] = id
workflow_execution["Status"] = awsmie.WORKFLOW_STATUS_RESUMED
try:
response = execution_table.update_item(
Key={
'Id': id
},
UpdateExpression='SET #workflow_status = :workflow_status',
ExpressionAttributeNames={
'#workflow_status': "Status"
},
ConditionExpression="#workflow_status = :workflow_waiting_status AND CurrentStage = :waiting_stage_name",
ExpressionAttributeValues={
':workflow_waiting_status': awsmie.WORKFLOW_STATUS_WAITING,
':workflow_status': awsmie.WORKFLOW_STATUS_RESUMED,
':waiting_stage_name': waiting_stage_name
}
)
except ClientError as e:
if e.response['Error']['Code'] == "ConditionalCheckFailedException":
print(e.response['Error']['Message'])
raise BadRequestError("Workflow status is not 'Waiting' or Current stage doesn't match the request")
else:
raise
# Queue the resumed workflow so it can run when resources are available
# FIXME - must set workflow status to error if this fails since we marked it as QUeued . we had to do that to avoid
# race condition on status with the execution itself. Once we hand it off to the state machine, we can't touch the status again.
response = SQS_CLIENT.send_message(QueueUrl=STAGE_EXECUTION_QUEUE_URL, MessageBody=json.dumps(workflow_execution))
# the response contains MD5 of the body, a message Id, MD5 of message attributes, and a sequence number (for FIFO queues)
logger.info('Message ID : {}'.format(response['MessageId']))
# We just queued a workflow so, Trigger the workflow_scheduler
response = LAMBDA_CLIENT.invoke(
FunctionName=WORKFLOW_SCHEDULER_LAMBDA_ARN,
InvocationType='Event'
)
return workflow_execution
@app.route('/workflow/execution', cors=True, methods=['GET'], authorizer=authorizer)
def list_workflow_executions():
""" List all workflow executions
Returns:
A list of workflow executions.
Raises:
200: All workflow executions returned sucessfully.
500: ChaliceViewError - internal server error
"""
table = DYNAMO_RESOURCE.Table(WORKFLOW_EXECUTION_TABLE_NAME)
response = table.scan()
workflow_executions = response['Items']
while 'LastEvaluatedKey' in response:
response = table.scan(ExclusiveStartKey=response['LastEvaluatedKey'])
workflow_executions.extend(response['Items'])
return workflow_executions
@app.route('/workflow/execution/status/{Status}', cors=True, methods=['GET'], authorizer=authorizer)
def list_workflow_executions_by_status(Status):
""" Get all workflow executions with the specified status
Returns:
A list of dictionaries containing the workflow executions with the requested status
Raises:
200: All workflows returned sucessfully.
404: Not found
500: Internal server error
"""
table = DYNAMO_RESOURCE.Table(WORKFLOW_EXECUTION_TABLE_NAME)
projection_expression = "Id, AssetId, CurrentStage, StateMachineExecutionArn, #workflow_status, Workflow.#workflow_name"
response = table.query(
IndexName='WorkflowExecutionStatus',
ExpressionAttributeNames={
'#workflow_status': "Status",
'#workflow_name': "Name"
},
ExpressionAttributeValues={
':workflow_status': Status
},
KeyConditionExpression='#workflow_status = :workflow_status',
ProjectionExpression = projection_expression
)
workflow_executions = response['Items']
while 'LastEvaluatedKey' in response:
response = table.query(
ExclusiveStartKey=response['LastEvaluatedKey'],
IndexName='WorkflowExecutionStatus',
ExpressionAttributeNames={
'#workflow_status': "Status",
'#workflow_name': "Name"
},
ExpressionAttributeValues={
':workflow_status': Status
},
KeyConditionExpression='#workflow_status = :workflow_status',
ProjectionExpression = projection_expression
)
workflow_executions.extend(response['Items'])
return workflow_executions
@app.route('/workflow/execution/asset/{AssetId}', cors=True, methods=['GET'], authorizer=authorizer)
def list_workflow_executions_by_assetid(AssetId):
""" Get workflow executions by AssetId
Returns:
A list of dictionaries containing the workflow executions matching the AssetId.
Raises:
200: Workflow executions returned sucessfully.
404: Not found
500: ChaliceViewError - internal server error
"""
table = DYNAMO_RESOURCE.Table(WORKFLOW_EXECUTION_TABLE_NAME)
projection_expression = "Id, AssetId, CurrentStage, Created, StateMachineExecutionArn, #workflow_status, Workflow.#workflow_name"
response = table.query(
IndexName='WorkflowExecutionAssetId',
ExpressionAttributeNames={
'#workflow_status': "Status",
'#workflow_name': "Name"
},
ExpressionAttributeValues={
':assetid': AssetId
},
KeyConditionExpression='AssetId = :assetid',
ProjectionExpression = projection_expression
)
workflow_executions = response['Items']
while 'LastEvaluatedKey' in response:
response = table.query(
ExclusiveStartKey=response['LastEvaluatedKey'],
IndexName='WorkflowExecutionAssetId',
ExpressionAttributeNames={
'#workflow_status': "Status",
'#workflow_name': "Name"
},
ExpressionAttributeValues={
':assetid': AssetId
},
KeyConditionExpression='AssetId = :assetid',
ProjectionExpression = projection_expression
)
workflow_executions.extend(response['Items'])
sorted_executions = sorted(workflow_executions, key=itemgetter('Created'), | |
<reponame>pierricklee/tensorflow<gh_stars>0
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import numpy as np
import pva
from tensorflow.python import ipu
from tensorflow.compiler.plugin.poplar.tests import test_utils as tu
from tensorflow.python.client import session as sl
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ipu.config import IPUConfig
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import googletest
from tensorflow.python.training import gradient_descent
from tensorflow.compiler.plugin.poplar.ops import gen_popops_ops
class EmbeddingLookupTest(test_util.TensorFlowTestCase):
def validate_output(self, input_tensor, indices, output_tensor):
for i, value in enumerate(indices):
if isinstance(value, np.int32):
self.assertEqual(tuple(output_tensor[i]), tuple(input_tensor[value]))
else:
self.validate_output(input_tensor, value, output_tensor[i])
def _validate_gradient_output(self, indices, grads, output_tensor, scale,
visited):
for i, value in enumerate(indices):
if isinstance(value, np.int32):
visited.append(value)
self.assertEqual(tuple(grads[i] * scale), tuple(output_tensor[value]))
else:
self._validate_gradient_output(value, grads[i], output_tensor, scale,
visited)
def validate_gradient_output(self, indices, grads, output_tensor, scale):
visited = []
# Check all the indices contain the corresponding gradient slice.
self._validate_gradient_output(indices, grads, output_tensor, scale,
visited)
# Check the other values are 0:
for i, output_slice in enumerate(output_tensor):
if i not in visited:
self.assertFalse(output_slice.any())
else:
self.assertTrue(output_slice.any())
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def testGather(self):
def my_net(w, i):
out = ipu.ops.embedding_ops.embedding_lookup(w, i)
self.assertEqual(out.shape, (8, 200))
return [out]
with ops.device('cpu'):
i = array_ops.placeholder(np.int32, [8])
w = array_ops.placeholder(np.float32, [12000, 200])
with ipu.scopes.ipu_scope("/device:IPU:0"):
r = ipu.ipu_compiler.compile(my_net, inputs=[w, i])
cfg = IPUConfig()
tu.add_hw_ci_connection_options(cfg)
cfg.configure_ipu_system()
with sl.Session() as sess:
i_h = np.arange(0, 8)
w_h = np.arange(2400000).reshape([12000, 200])
result = sess.run(r, {i: i_h, w: w_h})
self.assertAllClose(result[0], np.take(w_h, i_h, axis=0))
self.assertEqual(result[0].shape, (8, 200))
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def testAutoFlatten(self):
with self.session() as sess:
with ops.device('cpu'):
x1 = array_ops.placeholder(np.int32, shape=[3, 4, 2])
def network(x1):
with variable_scope.variable_scope("vs", use_resource=True):
x = variable_scope.get_variable(
"x",
shape=[100, 16],
dtype=np.float32,
initializer=init_ops.random_normal_initializer(stddev=0.1))
out = ipu.ops.embedding_ops.embedding_lookup(x, x1)
self.assertEqual(out.shape, (3, 4, 2, 16))
return out, x, x1
with ops.device("/device:IPU:0"):
r = ipu.ipu_compiler.compile(network, inputs=[x1])
cfg = IPUConfig()
tu.add_hw_ci_connection_options(cfg)
cfg.configure_ipu_system()
sess.run(variables.global_variables_initializer())
out, input_tensor, indices = sess.run(
r, {
x1: [[[10, 11], [12, 13], [14, 15], [16, 17]],
[[20, 21], [22, 23], [24, 25], [26, 27]],
[[30, 31], [32, 33], [34, 35], [36, 37]]]
})
self.assertEqual(out.shape, (3, 4, 2, 16))
self.validate_output(input_tensor, indices, out)
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def testWithResourceVariable(self):
with self.session() as sess:
with ops.device('cpu'):
x1 = array_ops.placeholder(np.int32, shape=[10])
lr = array_ops.placeholder(np.float32, shape=[])
def network(x1, lr):
with variable_scope.variable_scope("vs", use_resource=True):
x = variable_scope.get_variable(
"x",
shape=[100, 16],
dtype=np.float32,
initializer=init_ops.random_normal_initializer(stddev=0.1))
g1 = ipu.ops.embedding_ops.embedding_lookup(x, x1)
self.assertEqual(g1.shape, (10, 16))
optimizer = gradient_descent.GradientDescentOptimizer(lr)
train = optimizer.minimize(g1)
return g1, x, x1, train
with ops.device("/device:IPU:0"):
r = ipu.ipu_compiler.compile(network, inputs=[x1, lr])
cfg = IPUConfig()
tu.add_hw_ci_connection_options(cfg)
cfg.configure_ipu_system()
sess.run(variables.global_variables_initializer())
out, input_tensor, indices = sess.run(
r, {
x1: [4, 8, 15, 16, 23, 42, 8, 4, 15, 16],
lr: 0.1,
})
self.assertEqual(out.shape, (10, 16))
self.validate_output(input_tensor, indices, out)
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def testWithResourceVariableAutoFlatten(self):
with self.session() as sess:
def network(x1, lr):
with variable_scope.variable_scope("vs", use_resource=True):
x = variable_scope.get_variable(
"x",
shape=[100, 16],
dtype=np.float32,
initializer=init_ops.random_normal_initializer(stddev=0.1))
g1 = ipu.ops.embedding_ops.embedding_lookup(x, x1)
self.assertEqual(g1.shape, (3, 4, 2, 16))
optimizer = gradient_descent.GradientDescentOptimizer(lr)
train = optimizer.minimize(g1)
return g1, x, x1, train
with ops.device('cpu'):
x1 = array_ops.placeholder(np.int32, shape=[3, 4, 2])
lr = array_ops.placeholder(np.float32, shape=[])
with ops.device("/device:IPU:0"):
r = ipu.ipu_compiler.compile(network, inputs=[x1, lr])
cfg = IPUConfig()
tu.add_hw_ci_connection_options(cfg)
cfg.configure_ipu_system()
sess.run(variables.global_variables_initializer())
out, input_tensor, indices = sess.run(
r, {
x1: [[[10, 11], [12, 13], [14, 15], [16, 17]],
[[20, 21], [22, 23], [24, 25], [26, 27]],
[[30, 31], [32, 33], [34, 35], [36, 37]]],
lr:
0.1,
})
self.assertEqual(out.shape, (3, 4, 2, 16))
self.validate_output(input_tensor, indices, out)
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def testGradient(self):
with self.session() as sess:
with ops.device('cpu'):
x1 = array_ops.placeholder(np.int32, shape=[3, 4, 2])
grads = array_ops.placeholder(np.float32, shape=[3, 4, 2, 16])
lr = array_ops.placeholder(np.float32, shape=[])
def network(x1, grads, lr):
with variable_scope.variable_scope("vs", use_resource=True):
x = variable_scope.get_variable(
"x",
shape=[100, 16],
dtype=np.float32,
initializer=init_ops.random_normal_initializer(stddev=0.1))
out = gen_popops_ops.ipu_multi_update_add(array_ops.zeros_like(x),
updates=grads,
indices=x1,
scale=lr)
self.assertEqual(out.shape, x.shape)
return out, x1, grads
with ops.device("/device:IPU:0"):
r = ipu.ipu_compiler.compile(network, inputs=[x1, grads, lr])
cfg = IPUConfig()
tu.add_hw_ci_connection_options(cfg)
cfg.configure_ipu_system()
sess.run(variables.global_variables_initializer())
out, indices, gradient = sess.run(
r, {
x1: [[[10, 11], [12, 13], [14, 15], [16, 17]],
[[20, 21], [22, 23], [24, 25], [26, 27]],
[[30, 31], [32, 33], [34, 35], [36, 37]]],
grads:
np.random.rand(*grads.shape),
lr:
0.1,
})
self.validate_gradient_output(indices, gradient, out, 0.1)
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def test4D(self):
def my_net(w, i):
out = ipu.ops.embedding_ops.embedding_lookup(w, i)
self.assertEqual(out.shape, (8, 2, 200, 4))
return [out]
with ops.device('cpu'):
i = array_ops.placeholder(np.int32, [8, 2])
w = array_ops.placeholder(np.float32, [32, 200, 4])
with ipu.scopes.ipu_scope("/device:IPU:0"):
r = ipu.ipu_compiler.compile(my_net, inputs=[w, i])
cfg = IPUConfig()
tu.add_hw_ci_connection_options(cfg)
cfg.configure_ipu_system()
with sl.Session() as sess:
i_h = np.arange(0, 16).reshape([8, 2])
w_h = np.arange(25600).reshape([32, 200, 4])
result = sess.run(r, {i: i_h, w: w_h})
self.assertAllClose(result[0], np.take(w_h, i_h, axis=0))
self.assertEqual(result[0].shape, (8, 2, 200, 4))
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def testEmbeddingUpdateWithMatMul(self):
def my_net(i):
w = variable_scope.get_variable(
"w",
shape=[1000, 200],
dtype=np.float32,
initializer=init_ops.random_normal_initializer(stddev=0.1))
out = ipu.ops.embedding_ops.embedding_lookup(w, i)
out = math_ops.matmul(w, out, transpose_b=True)
optimizer = gradient_descent.GradientDescentOptimizer(1e-3)
training_op = optimizer.minimize(out)
return training_op
with ops.device('cpu'):
i = array_ops.placeholder(np.int32, [8])
with ipu.scopes.ipu_scope("/device:IPU:0"):
r = ipu.ipu_compiler.compile(my_net, inputs=[i])
cfg = IPUConfig()
tu.add_hw_ci_connection_options(cfg)
cfg.configure_ipu_system()
with sl.Session() as sess:
i_h = np.arange(0, 8)
sess.run(variables.global_variables_initializer())
# Just checking that graph construction succeeds and the program runs
sess.run(r, {i: i_h})
@tu.test_may_use_ipus_or_model(num_ipus=1)
@test_util.deprecated_graph_mode_only
def testSerializedEmbeddingLookup(self):
with sl.Session() as sess:
def body(table, indices):
return ipu.ops.embedding_ops.embedding_lookup(table,
indices,
serialization_factor=4)
with ops.device('cpu'):
table = array_ops.placeholder(np.float16, [2000, 4, 4, 8])
indices = array_ops.placeholder(np.int32, [4, 4, 8])
cfg = IPUConfig()
tu.add_hw_ci_connection_options(cfg)
cfg.configure_ipu_system()
with ipu.scopes.ipu_scope("/device:IPU:0"):
res = ipu.ipu_compiler.compile(body, inputs=[table, indices])
table_h = np.arange(128, dtype=np.float16).reshape([4, 4, 8]) * np.ones(
[2000, 4, 4, 8], dtype=np.float16)
indices_h = np.random.random_integers(0, 2000, [4, 4, 8])
result = sess.run(res, {table: table_h, indices: indices_h})
self.assertAllClose(result[0], np.take(table_h, indices_h, axis=0))
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def testSerializedEmbeddingLookupDoesntDivide(self):
with sl.Session():
def body(table, indices):
return ipu.ops.embedding_ops.embedding_lookup(table,
indices,
serialization_factor=7)
with ops.device('cpu'):
table = array_ops.placeholder(np.float16, [2000, 4, 4, 8])
indices = array_ops.placeholder(np.int32, [4, 4, 8])
with ipu.scopes.ipu_scope("/device:IPU:0"):
with self.assertRaisesRegex(
ValueError,
r"The serialization_factor \(7\) must divide the size of the 0th "
r"dimension of params \(2000\)."):
ipu.ipu_compiler.compile(body, inputs=[table, indices])
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def testReallocationInGradientWhenInputAllocatedForMatmul(self):
# Tests the behaviour when the input is allocated for a different op.
# The input should be cloned into a correctly mapped tensor, to avoid
# memory spikes.
cfg = IPUConfig()
report_helper = tu.ReportHelper()
report_helper.set_autoreport_options(cfg)
cfg.ipu_model.compile_ipu_code = False
cfg.configure_ipu_system()
with self.session() as sess:
with ops.device('cpu'):
x = array_ops.placeholder(np.float32, shape=[16, 16])
updates = array_ops.placeholder(np.float32, shape=[1024, 16])
indices = array_ops.placeholder(np.int32, shape=[1024])
def model(x, updates, indices):
with variable_scope.variable_scope("vs", use_resource=True):
lhs = variable_scope.get_variable(
"lhs",
shape=[1024, 16],
dtype=np.float32,
initializer=init_ops.random_normal_initializer(stddev=0.1))
x = math_ops.matmul(lhs, x)
return gen_popops_ops.ipu_multi_update_add(x,
updates=updates,
indices=indices,
scale=1)
with ops.device("/device:IPU:0"):
result = ipu.ipu_compiler.compile(model, inputs=[x, updates, indices])
fd = {
x: np.random.rand(*x.shape),
updates: np.random.rand(*updates.shape),
indices: np.random.randint(1024, size=indices.shape),
}
sess.run(variables.global_variables_initializer())
report_helper.clear_reports()
sess.run(result, feed_dict=fd)
# Large memory spikes are generated when the input for a MultiUpdateAdd
# is not mapped in the scheme expected by poplibs.
report = pva.openReport(report_helper.find_report())
self.assert_max_tile_memory(report, 2520, tolerance=0.1)
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def testOutputWhenInputAllocatedForAnotherOp(self):
# Tests the output values for the code path where the input is allocated
# for a different op. In this path the input is cloned into a correctly
# mapped tensor.
with self.session() as sess:
with ops.device('cpu'):
x = array_ops.placeholder(np.float32, shape=[4, 4])
indices = array_ops.placeholder(np.int32, shape=[8])
def model(x, indices):
with variable_scope.variable_scope("vs", use_resource=True):
lhs = variable_scope.get_variable(
"lhs",
shape=[8, 4],
dtype=np.float32,
initializer=init_ops.ones_initializer())
x = math_ops.matmul(lhs, x)
gen_popops_ops.ipu_multi_slice(x, indices)
return x
with ops.device("/device:IPU:0"):
result, = ipu.ipu_compiler.compile(model, inputs=[x, indices])
fd = {
x: [[0, 1, 2, 3], [2, 3, 4, 5], [4, 5, 6, 7], [6, 7, 8, 9]],
indices: list(range(8)),
}
sess.run(variables.global_variables_initializer())
out = sess.run(result, feed_dict=fd)
self.assertAllClose(out, [[12, 16, 20, 24]] * 8)
@tu.skip_on_hw
@test_util.deprecated_graph_mode_only
def testGradientOutputWhenInputAllocatedForAnotherOp(self):
# Tests the output values for the code path where the input is allocated
# for a different op. In this path the input is cloned into a correctly
# mapped tensor, and output is copied back to the original input tensor.
with self.session() as sess:
with ops.device('cpu'):
x = array_ops.placeholder(np.float32, shape=[4, 4])
updates = array_ops.placeholder(np.float32, shape=[8, | |
<gh_stars>1-10
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Dict, List, Optional, Union
import msrest.serialization
from ._container_instance_management_client_enums import *
class AzureFileVolume(msrest.serialization.Model):
"""The properties of the Azure File volume. Azure File shares are mounted as volumes.
All required parameters must be populated in order to send to Azure.
:param share_name: Required. The name of the Azure File share to be mounted as a volume.
:type share_name: str
:param read_only: The flag indicating whether the Azure File shared mounted as a volume is
read-only.
:type read_only: bool
:param storage_account_name: Required. The name of the storage account that contains the Azure
File share.
:type storage_account_name: str
:param storage_account_key: The storage account access key used to access the Azure File share.
:type storage_account_key: str
"""
_validation = {
'share_name': {'required': True},
'storage_account_name': {'required': True},
}
_attribute_map = {
'share_name': {'key': 'shareName', 'type': 'str'},
'read_only': {'key': 'readOnly', 'type': 'bool'},
'storage_account_name': {'key': 'storageAccountName', 'type': 'str'},
'storage_account_key': {'key': 'storageAccountKey', 'type': 'str'},
}
def __init__(
self,
*,
share_name: str,
storage_account_name: str,
read_only: Optional[bool] = None,
storage_account_key: Optional[str] = None,
**kwargs
):
super(AzureFileVolume, self).__init__(**kwargs)
self.share_name = share_name
self.read_only = read_only
self.storage_account_name = storage_account_name
self.storage_account_key = storage_account_key
class CachedImages(msrest.serialization.Model):
"""The cached image and OS type.
All required parameters must be populated in order to send to Azure.
:param os_type: Required. The OS type of the cached image.
:type os_type: str
:param image: Required. The cached image name.
:type image: str
"""
_validation = {
'os_type': {'required': True},
'image': {'required': True},
}
_attribute_map = {
'os_type': {'key': 'osType', 'type': 'str'},
'image': {'key': 'image', 'type': 'str'},
}
def __init__(
self,
*,
os_type: str,
image: str,
**kwargs
):
super(CachedImages, self).__init__(**kwargs)
self.os_type = os_type
self.image = image
class CachedImagesListResult(msrest.serialization.Model):
"""The response containing cached images.
:param value: The list of cached images.
:type value: list[~azure.mgmt.containerinstance.models.CachedImages]
:param next_link: The URI to fetch the next page of cached images.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[CachedImages]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["CachedImages"]] = None,
next_link: Optional[str] = None,
**kwargs
):
super(CachedImagesListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class Capabilities(msrest.serialization.Model):
"""The regional capabilities.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar resource_type: The resource type that this capability describes.
:vartype resource_type: str
:ivar os_type: The OS type that this capability describes.
:vartype os_type: str
:ivar location: The resource location.
:vartype location: str
:ivar ip_address_type: The ip address type that this capability describes.
:vartype ip_address_type: str
:ivar gpu: The GPU sku that this capability describes.
:vartype gpu: str
:ivar capabilities: The supported capabilities.
:vartype capabilities: ~azure.mgmt.containerinstance.models.CapabilitiesAutoGenerated
"""
_validation = {
'resource_type': {'readonly': True},
'os_type': {'readonly': True},
'location': {'readonly': True},
'ip_address_type': {'readonly': True},
'gpu': {'readonly': True},
'capabilities': {'readonly': True},
}
_attribute_map = {
'resource_type': {'key': 'resourceType', 'type': 'str'},
'os_type': {'key': 'osType', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'ip_address_type': {'key': 'ipAddressType', 'type': 'str'},
'gpu': {'key': 'gpu', 'type': 'str'},
'capabilities': {'key': 'capabilities', 'type': 'CapabilitiesAutoGenerated'},
}
def __init__(
self,
**kwargs
):
super(Capabilities, self).__init__(**kwargs)
self.resource_type = None
self.os_type = None
self.location = None
self.ip_address_type = None
self.gpu = None
self.capabilities = None
class CapabilitiesAutoGenerated(msrest.serialization.Model):
"""The supported capabilities.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar max_memory_in_gb: The maximum allowed memory request in GB.
:vartype max_memory_in_gb: float
:ivar max_cpu: The maximum allowed CPU request in cores.
:vartype max_cpu: float
:ivar max_gpu_count: The maximum allowed GPU count.
:vartype max_gpu_count: float
"""
_validation = {
'max_memory_in_gb': {'readonly': True},
'max_cpu': {'readonly': True},
'max_gpu_count': {'readonly': True},
}
_attribute_map = {
'max_memory_in_gb': {'key': 'maxMemoryInGB', 'type': 'float'},
'max_cpu': {'key': 'maxCpu', 'type': 'float'},
'max_gpu_count': {'key': 'maxGpuCount', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(CapabilitiesAutoGenerated, self).__init__(**kwargs)
self.max_memory_in_gb = None
self.max_cpu = None
self.max_gpu_count = None
class CapabilitiesListResult(msrest.serialization.Model):
"""The response containing list of capabilities.
:param value: The list of capabilities.
:type value: list[~azure.mgmt.containerinstance.models.Capabilities]
:param next_link: The URI to fetch the next page of capabilities.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[Capabilities]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["Capabilities"]] = None,
next_link: Optional[str] = None,
**kwargs
):
super(CapabilitiesListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class CloudErrorBody(msrest.serialization.Model):
"""An error response from the Container Instance service.
:param code: An identifier for the error. Codes are invariant and are intended to be consumed
programmatically.
:type code: str
:param message: A message describing the error, intended to be suitable for display in a user
interface.
:type message: str
:param target: The target of the particular error. For example, the name of the property in
error.
:type target: str
:param details: A list of additional details about the error.
:type details: list[~azure.mgmt.containerinstance.models.CloudErrorBody]
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[CloudErrorBody]'},
}
def __init__(
self,
*,
code: Optional[str] = None,
message: Optional[str] = None,
target: Optional[str] = None,
details: Optional[List["CloudErrorBody"]] = None,
**kwargs
):
super(CloudErrorBody, self).__init__(**kwargs)
self.code = code
self.message = message
self.target = target
self.details = details
class Components10Wh5UdSchemasContainergroupidentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model):
"""Components10Wh5UdSchemasContainergroupidentityPropertiesUserassignedidentitiesAdditionalproperties.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar principal_id: The principal id of user assigned identity.
:vartype principal_id: str
:ivar client_id: The client id of user assigned identity.
:vartype client_id: str
"""
_validation = {
'principal_id': {'readonly': True},
'client_id': {'readonly': True},
}
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Components10Wh5UdSchemasContainergroupidentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs)
self.principal_id = None
self.client_id = None
class Container(msrest.serialization.Model):
"""A container instance.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param name: Required. The user-provided name of the container instance.
:type name: str
:param image: Required. The name of the image used to create the container instance.
:type image: str
:param command: The commands to execute within the container instance in exec form.
:type command: list[str]
:param ports: The exposed ports on the container instance.
:type ports: list[~azure.mgmt.containerinstance.models.ContainerPort]
:param environment_variables: The environment variables to set in the container instance.
:type environment_variables: list[~azure.mgmt.containerinstance.models.EnvironmentVariable]
:ivar instance_view: The instance view of the container instance. Only valid in response.
:vartype instance_view: ~azure.mgmt.containerinstance.models.ContainerPropertiesInstanceView
:param resources: Required. The resource requirements of the container instance.
:type resources: ~azure.mgmt.containerinstance.models.ResourceRequirements
:param volume_mounts: The volume mounts available to the container instance.
:type volume_mounts: list[~azure.mgmt.containerinstance.models.VolumeMount]
:param liveness_probe: The liveness probe.
:type liveness_probe: ~azure.mgmt.containerinstance.models.ContainerProbe
:param readiness_probe: The readiness probe.
:type readiness_probe: ~azure.mgmt.containerinstance.models.ContainerProbe
"""
_validation = {
'name': {'required': True},
'image': {'required': True},
'instance_view': {'readonly': True},
'resources': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'image': {'key': 'properties.image', 'type': 'str'},
'command': {'key': 'properties.command', 'type': '[str]'},
'ports': {'key': 'properties.ports', 'type': '[ContainerPort]'},
'environment_variables': {'key': 'properties.environmentVariables', 'type': '[EnvironmentVariable]'},
'instance_view': {'key': 'properties.instanceView', 'type': 'ContainerPropertiesInstanceView'},
'resources': {'key': 'properties.resources', 'type': 'ResourceRequirements'},
'volume_mounts': {'key': 'properties.volumeMounts', 'type': '[VolumeMount]'},
'liveness_probe': {'key': 'properties.livenessProbe', 'type': 'ContainerProbe'},
'readiness_probe': {'key': 'properties.readinessProbe', 'type': 'ContainerProbe'},
}
def __init__(
self,
*,
name: str,
image: str,
resources: "ResourceRequirements",
command: Optional[List[str]] = None,
ports: Optional[List["ContainerPort"]] = None,
environment_variables: Optional[List["EnvironmentVariable"]] = None,
volume_mounts: Optional[List["VolumeMount"]] = None,
liveness_probe: Optional["ContainerProbe"] = None,
readiness_probe: Optional["ContainerProbe"] = None,
**kwargs
):
super(Container, self).__init__(**kwargs)
self.name = name
self.image = image
self.command = command
self.ports = ports
self.environment_variables = environment_variables
self.instance_view = None
self.resources = resources
self.volume_mounts = volume_mounts
self.liveness_probe = liveness_probe
self.readiness_probe = readiness_probe
class ContainerExec(msrest.serialization.Model):
"""The container execution command, for liveness or readiness probe.
:param command: The commands to execute within the container.
:type command: list[str]
"""
_attribute_map = {
'command': {'key': 'command', 'type': '[str]'},
}
def __init__(
self,
*,
command: Optional[List[str]] = None,
**kwargs
):
super(ContainerExec, self).__init__(**kwargs)
self.command = | |
<gh_stars>0
#!/usr/bin/env python
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
"""Python MadMimi client library."""
__author__ = ('<EMAIL> (tav),'
'<EMAIL> (<NAME>)')
__maintainer__ = '<EMAIL> (<NAME>)'
import csv
import logging
try:
from cStringIO import StringIO
except ImportError:
#from StringIO import StringIO
from io import StringIO
try:
from urllib import quote, urlencode
except ImportError:
from urllib.parse import quote, urlencode
try:
from urllib2 import urlopen
except ImportError:
from urllib.request import urlopen
try:
from xml.etree import cElementTree as ElementTree
except ImportError:
try:
import cElementTree as ElementTree
except ImportError:
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
from yaml import dump, safe_dump
DEFAULT_CONTACT_FIELDS = ('first name', 'last_name', 'email', 'tags')
def parse_lists(response):
tree = ElementTree.ElementTree()
lists = {}
tree.parse(StringIO(response))
for elem in list(tree.getiterator('list')):
lists[elem.attrib['name']] = MailingList(elem.attrib['id'],
elem.attrib['name'],
elem.attrib['subscriber_count'])
return lists
class MailingList(object):
"""The main mailing list object."""
def __init__(self, list_id=0, list_name="", subscribers=0):
self.subscribers = subscribers
self.id = list_id
self.name = list_name
def __unicode__(self):
return u"<MailingList: %s>" % self.name
def __repr__(self):
return "<MailingList: %s>" % self.name
class MadMimi(object):
"""
The client is straightforward to use:
>>> mimi = MadMimi('<EMAIL>', 'account-api-key')
You can use it to list existing lists:
>>> mimi.lists()
{'test': <MailingList: test>}
>>> mimi.lists()["test"].subscribers
3
>>> mimi.lists()["test"].name
"test"
Delete any of them:
>>> mimi.delete_list('test')
Create new ones:
>>> mimi.add_list('ampify')
Add new contacts:
>>> mimi.add_contact(['Tav', 'Espian', '<EMAIL>'])
Subscribe contacts to a list:
>>> mimi.subscribe('<EMAIL>', 'ampify')
See what lists a contact is subscribed to:
>>> mimi.subscriptions('<EMAIL>')
<lists>
<list subscriber_count="1" name="ampify" id="77461"/>
</lists>
And, of course, unsubscribe a contact from a list:
>>> mimi.unsubscribe('<EMAIL>', 'ampify')
>>> mimi.subscriptions('<EMAIL>')
<lists>
</lists>
Send a transactional email:
>>> mimi.send_message('<NAME>','<EMAIL>','Promotion Name',
... 'Subject of the message','<EMAIL>',
... {'var1':'This will go to the template'})
'1146680279'
Send an email to a list:
>>> mimi.send_message_to_list('List Name', 'Promotion Name',
... {'var1':'This will go to the template'})
'1223645'
"""
base_url = 'http://api.madmimi.com/'
secure_base_url = 'https://api.madmimi.com/'
def __init__(self, username, api_key):
self.username = username
self.api_key = api_key
self.urlopen = urlopen
self.logger = logging.getLogger('madmimi')
self.logger.setLevel(logging.WARNING)
def _get(self, method, **params):
"""Issue a GET request to Madmimi.
Arguments:
method: The path to the API method you are accessing, relative
to the site root.
is_secure: If is_secure is True, the GET request will be issued
to MadMimi's secure server.
Returns:
The result of the HTTP request as a string.
"""
is_secure = params.get('is_secure')
if is_secure:
url = self.secure_base_url
else:
url = self.base_url
params['username'] = self.username
params['api_key'] = self.api_key
url = url + method + '?' + urlencode(params)
self.logger.debug('get url: %s' % url)
response = self.urlopen(url).read()
self.logger.debug('response: %s' % response)
return response
def _post(self, method, **params):
"""Issue a POST request to Madmimi.
Arguments:
method: The path to the API method you are accessing, relative
to the site root.
is_secure: If is_secure is True, the GET request will be issued
to MadMimi's secure server.
Returns:
The result of the HTTP request as a string.
"""
is_secure = params.get('is_secure')
if is_secure:
url = self.secure_base_url + method
else:
url = self.base_url + method
params['username'] = self.username
params['api_key'] = self.api_key
if params.get('sender'):
params['from'] = params['sender']
self.logger.debug('post url: %s' % url)
self.logger.debug('params: %s' % params)
response = self.urlopen(url, urlencode(params).encode('utf-8')).read()
self.logger.debug('response: %s' % response)
return response
def lists(self, as_xml=False):
"""Get a list of audience lists.
Arguments:
as_xml: If true, the result will be the raw XML response. If False
the result will be a python dictionary of lists.
Default is True. (Optional)
Returns:
The raw XML response or a dictionary of list names and objects.
{'list name': <list object>, 'list2 name': <list object>}
"""
response = self._get('audience_lists/lists.xml')
if as_xml:
return response
else:
return parse_lists(response)
def add_list(self, name):
"""Add a new audience list.
Arguments:
name: The name of the audience list to add.
Returns:
Nothing. The API doesn't provide a response.
"""
self._post('audience_lists', name=name)
def delete_list(self, name):
"""Delete an audience list.
Arguments:
name: The name of the audience list to delete.
Returns:
Nothing. The API doesn't provide a response.
"""
self._post('audience_lists/%s' % quote(name), _method='delete')
def add_contacts(self, contacts_data, fields=DEFAULT_CONTACT_FIELDS,
audience_list=None):
"""Add audience members to your database.
Arguments:
contacts_data: A list of tuples containting contact data.
fields: A tuple containing the fields that will be represented.
Returns:
Nothing. The API doesn't provide a response.
"""
contacts = []
contacts.append((fields))
contacts.extend(contacts_data)
csvdata = StringIO()
writer = csv.writer(csvdata)
[writer.writerow(row) for row in contacts]
self._post('audience_members', csv_file=csvdata.getvalue(),
audience_list=audience_list)
def subscribe(self, email, audience_list):
"""Add an audience member to an audience list.
Arguments:
email: The email address to add to a list.
audience_list: The audience list to add the email address to.
Return:
Nothing. The API doesn't provide a response.
"""
url = 'audience_lists/%s/add' % quote(audience_list)
self._post(url, email=email)
def unsubscribe(self, email, audience_list):
"""Remove an audience member from an audience list.
Arguments:
email: The email address to add to a list.
audience_list: The audience list to add the email address to.
Returns:
Nothing. The API doesn't provide a response.
"""
url = 'audience_lists/%s/remove' % quote(audience_list)
self._post(url, email=email)
def subscriptions(self, email, as_xml=False):
"""Get an audience member's current subscriptions.
Arguments:
email: The email address to look up.
as_xml: If true, the result will be the raw XML response. If False
the result will be a python dictionary of lists.
Default is True. (Optional)
Returns:
The raw XML response or a dictionary of list names and objects of which
the person is a member.
{'list name': <list object>, 'list2 name': <list object>}
"""
response = self._get('audience_members/%s/lists.xml' % quote(email))
if as_xml:
return response
else:
return parse_lists(response)
def send_message(self, name, email, promotion, subject, sender, body={},
raw_html=None, raw_plain_text=None):
"""Sends a message to a user.
Arguments:
name: Name of the person you are sending to.
email: Email address of the person you are sending to.
promotion: Name of the Mad Mimi promotion to send.
subject: Subject of the email.
sender: Email address the email should appear to be from.
Only one of body, raw_html or raw_plain_text should be provided.
Order of preference is html, plain text, body.
body: Optional. Dict holding variables for the promotion template.
{'variable': 'Replcement value'}
raw_html: Optional. If you want to send a message where the
promotion doesn't already exist. Make sure the promotion
name is unique.
raw_plain_text: Optional. Same as raw_html except it is plain
text.
Returns:
The transaction id of the message if successful.
The error if unsuccessful.
"""
recipients = "%s <%s>" % (name, email)
if raw_html:
post = self._post('mailer', promotion_name=promotion,
recipients=recipients, subject=subject, sender=sender,
raw_html=raw_html, is_secure=True)
elif raw_plain_text:
post = self._post('mailer', promotion_name=promotion,
recipients=recipients, subject=subject, sender=sender,
raw_plain_text=raw_plain_text, is_secure=True)
else:
# The YAML dump will fail if it encounters non-strings
for item, value in body.iteritems():
body[item] = str(value)
body = safe_dump(body) # to avoid !!python/str tags by dump(body)
post = self._post('mailer', promotion_name=promotion,
recipients=recipients, subject=subject, sender=sender,
body=body, is_secure=True)
return post
def send_message_to_list(self, list_name, promotion, body={}):
"""Send a promotion to a subscriber list.
Arguments:
list_name: Name of the subscriber list to send the promotion to.
promotion: Name of the Mad Mimi promotion to send.
body: Dict holding variables for the promotion template.
{'variable': 'Replcement value'}
Returns:
The transaction id of the message if successful.
The error if unsuccessful.
"""
# | |
<reponame>SeoFernando25/The-dawn-of-Otrozhny
# Just placed some draw functions here because
# otherwise it would become messy
import textDraw
import renderer
import colors
import entities
def render_tutorial_tab_1():
textDraw.message_display_MB(
renderer.SCREEN,
"Enemy Status",
renderer.SCREEN_WIDTH//1.5, renderer.VIEWPORT_Y_OFFSET * 1.5,
20, color=colors.RED)
row = 0
textDraw.message_display_L(
renderer.SCREEN,
"Enemy status refers to various states of alert that",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"affect the behaviour of enemy soldiers.",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"NORMAL",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, entities.EnemyStatus.Normal.value[1])
textDraw.message_display_L(
renderer.SCREEN,
"While in Normal mode, enemy soldiers will ",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"follow a set patrol route.",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"Alert",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, entities.EnemyStatus.Alert.value[1])
textDraw.message_display_L(
renderer.SCREEN,
"This is the state in which the player has",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"been discovered by enemy soldiers. ",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"enemy soldiers call for backup and attack.",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"Evasion",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, entities.EnemyStatus.Evasion.value[1])
textDraw.message_display_L(
renderer.SCREEN,
"Enemy soldiers will search the area",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"they last found the player",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"Caution",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, entities.EnemyStatus.Caution.value[1])
textDraw.message_display_L(
renderer.SCREEN,
"Enemy soldiers will search the vicinity",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"after losing sight of the player",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
def render_tutorial_tab_2():
textDraw.message_display_MB(
renderer.SCREEN,
"Map Editor",
renderer.SCREEN_WIDTH//1.5, renderer.VIEWPORT_Y_OFFSET * 1.5,
20, color=colors.RED)
row = 0
textDraw.message_display_L(
renderer.SCREEN,
"You can create and edit you levels in the level",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"editor. You can share maps through the maps folder.",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"NAV",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, colors.YELLOW)
textDraw.message_display_L(
renderer.SCREEN,
"You can edit and change node conections.",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"Making soldiers follow a set patrol route.",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"Draw",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, colors.YELLOW)
textDraw.message_display_L(
renderer.SCREEN,
"In draw mode you can add and remove",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"walls, as well as enemies and nodes",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"Wall",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, colors.YELLOW)
textDraw.message_display_L(
renderer.SCREEN,
"You can paint walls and add keys to them",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"Options",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, colors.YELLOW)
textDraw.message_display_L(
renderer.SCREEN,
"You can edit the settings of your map ",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"such as width and height",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
def render_tutorial_tab_3():
textDraw.message_display_MB(
renderer.SCREEN,
"Items and Objects",
renderer.SCREEN_WIDTH//1.5, renderer.VIEWPORT_Y_OFFSET * 1.5,
20, color=colors.RED)
row = 0
textDraw.message_display_L(
renderer.SCREEN,
"While on missions, you may find a variety of ",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"items and obstacles",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"Keys",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, colors.YELLOW)
textDraw.message_display_L(
renderer.SCREEN,
"Keys can open gates and allow you to",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"pass through before locked areas.",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"Gates",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, colors.YELLOW)
textDraw.message_display_L(
renderer.SCREEN,
"Gates block your path and may restrict",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"your way to an advantage point",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"Stars",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, colors.YELLOW)
textDraw.message_display_L(
renderer.SCREEN,
"Are collectables scattered around the",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"map. You need to collect all of them to",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"complete a mission",
renderer.VIEWPORT_X_OFFSET * 10, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
def render_tutorial_tab_4():
textDraw.message_display_MB(
renderer.SCREEN,
"Briefing",
renderer.SCREEN_WIDTH//1.5, renderer.VIEWPORT_Y_OFFSET * 1.5,
20, color=colors.RED)
row = 0
textDraw.message_display_L(
renderer.SCREEN,
"JULY 2019: The human species is on the edge of",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"extinction. ",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"A geneticaly modified creature created by Kephart",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"corporations of Keter class named by SPC-610 was",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"recently denounced uncontained of level Red after",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"members from Area-683 lost direct contact to",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"I DATA EXPUNGED I . Locals from Otrozhny in the ",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"Russia Confederation also recently reported ",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"anomalous sounds comming from Area-683.",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"Fortunately, one of the officers was able to",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"contact HQ via a 2000 MK VI Transreciever.",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"More details will be availible ASAP",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"I End of transmission I",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
def render_tutorial_tab_5():
textDraw.message_display_MB(
renderer.SCREEN,
"Unauthorized",
renderer.SCREEN_WIDTH//1.5, renderer.VIEWPORT_Y_OFFSET * 1.5,
20, color=colors.RED)
row = 0
textDraw.message_display_L(
renderer.SCREEN,
"Error 401 - Unauthorized",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, colors.RED)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"You do not have permission to view this directory",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12, color=colors.RED)
row += 2
textDraw.message_display_L(
renderer.SCREEN,
"olssv, ty ztpao",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"pm fvb hyl zllpun aopz tlzzhnl jvunyhabshapvuz.",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"bumvyabuhalsf, aol zavyf pz zapss pujvtwslal ",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"huk pa thf ulcly dpss p zhk ltvqp p . wslhzl ",
renderer.VIEWPORT_X_OFFSET, renderer.VIEWPORT_Y_OFFSET * 2 + 20 * row,
12)
row += 1
textDraw.message_display_L(
renderer.SCREEN,
"jvtl ihjr | |
<gh_stars>10-100
"""Test for serializer's get_collection."""
from sqlalchemy_jsonapi import errors
from sqlalchemy_jsonapi.unittests.utils import testcases
from sqlalchemy_jsonapi.unittests import models
from sqlalchemy_jsonapi import __version__
class GetCollection(testcases.SqlalchemyJsonapiTestCase):
"""Tests for serializer.get_collection."""
def test_get_collection_response_with_no_query_args(self):
"""Get collection with no query params returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is a comment', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {}, 'comments')
expected = {
'data': [{
'attributes': {
'content': 'This is a comment'
},
'type': 'comments',
'relationships': {
'author': {
'links': {
'related': '/comments/1/author',
'self': '/comments/1/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/1/post',
'self': '/comments/1/relationships/post'
}
}
},
'id': 1
}],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'included': []
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
@testcases.fragile
def test_get_collection_response_with_single_include_model(self):
"""Get collection with single included model returns 200.
This test is fragile.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is a comment', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'include': 'author'}, 'comments')
expected = {
'data': [{
'type': 'comments',
'id': 1,
'relationships': {
'author': {
'data': {
'type': 'users',
'id': 1
},
'links': {
'self': '/comments/1/relationships/author',
'related': '/comments/1/author'
}
},
'post': {
'links': {
'self': '/comments/1/relationships/post',
'related': '/comments/1/post'
}
}
},
'attributes': {
'content': u'This is a comment'
}
}],
'included': [{
'type': 'users',
'id': 1,
'relationships': {
'posts': {
'links': {
'self': '/users/1/relationships/posts',
'related': '/users/1/posts'
}
},
'comments': {
'links': {
'self': '/users/1/relationships/comments',
'related': '/users/1/comments'
}
},
'logs': {
'links': {
'self': '/users/1/relationships/logs',
'related': '/users/1/logs'
}
}
},
'attributes': {
'username': u'SallySmith1',
'last': u'Smith',
'first': u'Sally'
}
}],
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'jsonapi': {
'version': '1.0'
},
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
def test_get_collection_asc_sorted_response(self):
"""Get collection with ascending sorted response returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in reversed(range(2)):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'sort': 'content'}, 'comments')
expected = {
'data': [{
'relationships': {
'author': {
'links': {
'related': '/comments/2/author',
'self': '/comments/2/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/2/post',
'self': '/comments/2/relationships/post'
}
}
},
'type': 'comments',
'attributes': {
'content': u'This is comment 1'
},
'id': 2
}, {
'relationships': {
'author': {
'links': {
'related': '/comments/1/author',
'self': '/comments/1/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/1/post',
'self': '/comments/1/relationships/post'
}
}
},
'type': 'comments',
'attributes': {
'content': u'This is comment 2'
},
'id': 1
}],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'included': []
}
actual = response.data
self.assertEquals(expected, actual)
self.assertEquals(200, response.status_code)
def test_get_collection_desc_sorted_response(self):
"""Get collection with descending sorted response returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in range(2):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'sort': '-content'}, 'comments')
expected = {
'data': [{
'relationships': {
'author': {
'links': {
'related': '/comments/2/author',
'self': '/comments/2/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/2/post',
'self': '/comments/2/relationships/post'
}
}
},
'type': 'comments',
'attributes': {
'content': u'This is comment 2'
},
'id': 2
}, {
'relationships': {
'author': {
'links': {
'related': '/comments/1/author',
'self': '/comments/1/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/1/post',
'self': '/comments/1/relationships/post'
}
}
},
'type': 'comments',
'attributes': {
'content': u'This is comment 1'
},
'id': 1
}],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'included': []
}
actual = response.data
self.assertEquals(expected, actual)
self.assertEquals(200, response.status_code)
def test_get_collection_response_with_relationship_for_sorting(self):
"""Get collection with relationship for sorting results in 409.
A NotSortableError is returned.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='Thfsessis Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'sort': 'author'}, 'posts')
self.assertEquals(409, response.status_code)
def test_get_collection_response_given_invalid_sort_field(self):
"""Get collection given an invalid sort field results in 409.
A NotSortableError is returned.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'sort': 'invalid_field'}, 'posts')
expected = 'The requested field posts on type invalid_field is not a sortable field.'
self.assertEquals(expected, response.detail)
self.assertEquals(409, response.status_code)
def test_get_collection_access_denied(self):
"""Get collection with access denied results in 200.
The response data should be empty list.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
log = models.Log(user=user, user_id=user.id)
self.session.add(log)
self.session.commit()
response = models.serializer.get_collection(self.session, {}, 'logs')
expected = {
'data': [],
'included': [],
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'jsonapi': {
'version': '1.0'
}
}
actual = response.data
self.assertEquals(expected, actual)
self.assertEquals(200, response.status_code)
def test_get_collection_paginated_response_by_page(self):
"""Get collection with pagination by page returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in range(20):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session,
{'page[number]': u'1', 'page[size]': u'2'}, 'comments')
expected = {
'data': [{
'id': 3,
'attributes': {
'content': u'This is comment 3'
},
'type': 'comments',
'relationships': {
'author': {
'links': {
'self': '/comments/3/relationships/author',
'related': '/comments/3/author'
}
},
'post': {
'links': {
'self': '/comments/3/relationships/post',
'related': '/comments/3/post'
}
}
}
}, {
'id': 4,
'attributes': {
'content': u'This is comment 4'
},
'type': 'comments',
'relationships': {
'author': {
'links': {
'self': '/comments/4/relationships/author',
'related': '/comments/4/author'
}
},
'post': {
'links': {
'self': '/comments/4/relationships/post',
'related': '/comments/4/post'
}
}
}
}],
'included': [],
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'jsonapi': {
'version': '1.0'
}
}
actual = response.data
self.assertEquals(expected, actual)
self.assertEquals(200, response.status_code)
def test_get_collection_with_single_field(self):
"""Get collection with specific field returns 200.
The response will only contain attributes specific in field dictionary.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
log = models.Log(user_id=user.id, user=user)
self.session.add(log)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'fields[users]': 'first'}, 'users')
expected = {
'data': [{
'relationships': {},
'id': 1,
'type': 'users',
'attributes': {
'first': u'Sally'
}
}],
'included': [],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
@testcases.fragile
def test_get_collection_when_including_model_and_its_attribute(self):
"""Get collection when including the model and its attribute returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is comment 1', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'include': 'post.author'}, 'comments')
expected = {
'included': [{
'id': 1,
'type': 'users',
'relationships': {
'posts': {
'links': {
'self': '/users/1/relationships/posts',
'related': '/users/1/posts'
}
},
'comments': {
'links': {
'self': '/users/1/relationships/comments',
'related': '/users/1/comments'
}
},
'logs': {
'links': {
'self': '/users/1/relationships/logs',
'related': '/users/1/logs'
}
}
},
'attributes': {
'username': u'SallySmith1',
'first': u'Sally',
'last': u'Smith'
}
}, {
'id': 1,
'type': 'posts',
'relationships': {
'author': {
'data': {
'id': 1,
'type': 'users'
},
'links': {
'self': '/posts/1/relationships/author',
'related': '/posts/1/author'
}
},
'comments': {
'links': {
'self': '/posts/1/relationships/comments',
'related': '/posts/1/comments'
}
}
},
'attributes': {
'content': u'This is the content',
'title': u'This Is A Title'
}
}],
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'data': [{
'id': 1,
'type': 'comments',
'relationships': {
'post': {
'data': {
'id': 1,
'type': 'posts'
},
'links': {
'self': '/comments/1/relationships/post',
'related': '/comments/1/post'
}
},
'author': {
'links': {
'self': '/comments/1/relationships/author',
'related': '/comments/1/author'
}
}
},
'attributes': {
'content': u'This is comment 1'
}
}],
'jsonapi': {
'version': '1.0'
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
@testcases.fragile
def test_get_collection_given_an_included_model_that_is_null(self):
"""Get collection when given a included model that is null returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content')
self.session.add(blog_post)
self.session.commit()
response = | |
<gh_stars>0
"""
Functions to visualize matrices of data.
It is a custom version of a Heatmap allowing
cells size's customization.
It is based on matrix.py in https://github.com/mwaskom/seaborn
by <NAME>
( commit id: https://github.com/mwaskom/seaborn/pull/1830 )
"""
from __future__ import division
import itertools
import datetime
import matplotlib as mpl
from matplotlib.collections import LineCollection
import matplotlib.pyplot as plt
from matplotlib import gridspec
import matplotlib.patheffects as patheffects
import numpy as np
import pandas as pd
from scipy.cluster import hierarchy
import seaborn as sns
from seaborn import cm
from seaborn.axisgrid import Grid
from seaborn.utils import (
despine, axis_ticklabels_overlap, relative_luminance, to_utf8)
from seaborn.external.six import string_types
__all__ = ['afficher_heatmap', 'afficher_hetmap_avec_cellules_variables']
def _index_to_label(index):
"""
(Unchanged funtions)
Convert a pandas index or multiindex to an axis label.
"""
if isinstance(index, pd.MultiIndex):
return "-".join(map(to_utf8, index.names))
else:
return index.name
def _index_to_ticklabels(index):
"""
(Unchanged funtions)
Convert a pandas index or multiindex into ticklabels.
"""
if isinstance(index, pd.MultiIndex):
return ["-".join(map(to_utf8, i)) for i in index.values]
else:
return index.values
def _convert_colors(colors):
"""
(Unchanged funtions)
Convert either a list of colors or nested lists of colors to RGB.
"""
to_rgb = mpl.colors.colorConverter.to_rgb
if isinstance(colors, pd.DataFrame):
# Convert dataframe
return pd.DataFrame({col: colors[col].map(to_rgb)
for col in colors})
elif isinstance(colors, pd.Series):
return colors.map(to_rgb)
else:
try:
to_rgb(colors[0])
# If this works, there is only one level of colors
return list(map(to_rgb, colors))
except ValueError:
# If we get here, we have nested lists
return [list(map(to_rgb, l)) for l in colors]
def _matrix_mask(data, mask):
"""
(Unchanged funtions)
Ensure that data and mask are compatible and add missing values.
Values will be plotted for cells where ``mask`` is ``False``.
``data`` is expected to be a DataFrame; ``mask`` can be an array or
a DataFrame.
"""
if mask is None:
mask = np.zeros(data.shape, np.bool)
if isinstance(mask, np.ndarray):
# For array masks, ensure that shape matches data then convert
if mask.shape != data.shape:
raise ValueError("Mask must have the same shape as data.")
mask = pd.DataFrame(mask,
index=data.index,
columns=data.columns,
dtype=np.bool)
elif isinstance(mask, pd.DataFrame):
# For DataFrame masks, ensure that semantic labels match data
if not mask.index.equals(data.index) \
and mask.columns.equals(data.columns):
err = "Mask must have the same index and columns as data."
raise ValueError(err)
# Add any cells with missing data to the mask
# This works around an issue where `plt.pcolormesh` doesn't represent
# missing data properly
mask = mask | pd.isnull(data)
return mask
def _normalize_cell_size(size, size_min, size_max, size_true, size_false, size_nan):
""" """
if isinstance(size, bool):
return size_true if size else size_false
elif np.isnan(size):
return size_nan
elif size <= size_min:
return size_min
elif size >= size_max:
return size_max
else:
return size
class _CustomisedCellHeatMapper(object):
"""Custom version of _HeatMapper adding the control of the cell size."""
DEFAULT_VMIN_CELLS = .1
DEFAULT_VMAX_CELLS = 1
def __init__(self, data, vmin, vmax, cmap, center, robust, annot, fmt,
annot_kws, cbar, cbar_kws, shape_kws,
data_cells, vmin_cells, vmax_cells, robust_cells,
xticklabels=True, yticklabels=True, mask=None, normalize_cells=True,
square_shaped_cells=True):
"""
Initialize the plotting object.
"""
# We always want to have a DataFrame with semantic information
# and an ndarray to pass to matplotlib
if isinstance(data, pd.DataFrame):
plot_data = data.values
else:
plot_data = np.asarray(data)
data = pd.DataFrame(plot_data)
# We always want to have a DataFrame with semantic information
# and an ndarray to pass to matplotlib
if data_cells is None:
data_cells = pd.DataFrame(data=np.ones(data.shape, dtype=float),
columns=data.columns,
index=data.index)
if isinstance(data_cells, pd.DataFrame):
plot_cells = data_cells.values
else:
plot_cells = np.asarray(data_cells)
data_cells = pd.DataFrame(plot_cells)
# Validate the mask and convert to DataFrame
mask = _matrix_mask(data, mask)
plot_data = np.ma.masked_where(np.asarray(mask), plot_data)
plot_cells = np.ma.masked_where(np.asarray(mask), plot_cells)
# Get good names for the rows and columns
xtickevery = 1
if isinstance(xticklabels, int):
xtickevery = xticklabels
xticklabels = _index_to_ticklabels(data.columns)
elif xticklabels is True:
xticklabels = _index_to_ticklabels(data.columns)
elif xticklabels is False:
xticklabels = []
ytickevery = 1
if isinstance(yticklabels, int):
ytickevery = yticklabels
yticklabels = _index_to_ticklabels(data.index)
elif yticklabels is True:
yticklabels = _index_to_ticklabels(data.index)
elif yticklabels is False:
yticklabels = []
# Get the positions and used label for the ticks
nx, ny = data.T.shape
if not len(xticklabels):
self.xticks = []
self.xticklabels = []
elif isinstance(xticklabels, string_types) and xticklabels == "auto":
self.xticks = "auto"
self.xticklabels = _index_to_ticklabels(data.columns)
else:
self.xticks, self.xticklabels = self._skip_ticks(xticklabels,
xtickevery)
if not len(yticklabels):
self.yticks = []
self.yticklabels = []
elif isinstance(yticklabels, string_types) and yticklabels == "auto":
self.yticks = "auto"
self.yticklabels = _index_to_ticklabels(data.index)
else:
self.yticks, self.yticklabels = self._skip_ticks(yticklabels,
ytickevery)
# Get good names for the axis labels
xlabel = _index_to_label(data.columns)
ylabel = _index_to_label(data.index)
self.xlabel = xlabel if xlabel is not None else ""
self.ylabel = ylabel if ylabel is not None else ""
# Determine good default values for the colormapping
self._determine_cmap_params(plot_data, vmin, vmax,
cmap, center, robust)
# Determine good default values for the sizemapping
self._determine_cells_params(plot_cells, vmin_cells,
vmax_cells, robust_cells,
normalize_cells, square_shaped_cells)
# Sort out the annotations
if annot is None:
annot = False
annot_data = None
elif isinstance(annot, bool):
if annot:
annot_data = plot_data
else:
annot_data = None
else:
try:
annot_data = annot.values
except AttributeError:
annot_data = annot
if annot.shape != plot_data.shape:
raise ValueError('Data supplied to "annot" must be the same '
'shape as the data to plot.')
annot = True
# Save other attributes to the object
self.data = data
self.plot_data = plot_data
self.data_cells = data_cells
self.plot_cells = plot_cells
self.annot = annot
self.annot_data = annot_data
self.fmt = fmt
self.annot_kws = {} if annot_kws is None else annot_kws
self.cbar = cbar
self.cbar_kws = {} if cbar_kws is None else cbar_kws
self.cbar_kws.setdefault('ticks', mpl.ticker.MaxNLocator(6))
self.shape_kws = {} if shape_kws is None else shape_kws
def _determine_cmap_params(self, plot_data, vmin, vmax,
cmap, center, robust):
"""Use some heuristics to set good defaults for colorbar and range."""
calc_data = plot_data.data[~np.isnan(plot_data.data)]
if vmin is None:
vmin = np.percentile(calc_data, 2) if robust else calc_data.min()
if vmax is None:
vmax = np.percentile(calc_data, 98) if robust else calc_data.max()
self.vmin, self.vmax = vmin, vmax
# Choose default colormaps if not provided
if cmap is None:
if center is None:
self.cmap = cm.rocket
else:
self.cmap = cm.icefire
elif isinstance(cmap, string_types):
self.cmap = mpl.cm.get_cmap(cmap)
elif isinstance(cmap, list):
self.cmap = mpl.colors.ListedColormap(cmap)
else:
self.cmap = cmap
# Recenter a divergent colormap
if center is not None:
vrange = max(vmax - center, center - vmin)
normlize = mpl.colors.Normalize(center - vrange, center + vrange)
cmin, cmax = normlize([vmin, vmax])
cc = np.linspace(cmin, cmax, 256)
self.cmap = mpl.colors.ListedColormap(self.cmap(cc))
def _determine_cells_params(self, plot_cells, vmin_cells, vmax_cells, robust_cells, normalize_cells):
"""Use some heuristics to set good defaults for colorbar and range."""
# ( NEW )
if plot_cells is None:
self.plot_cells = np.ones(plot_cells.shape)
self.vmax_cells, self.vmin_cells = self.DEFAULT_VMAX_CELLS, self.DEFAULT_VMIN_CELLS
else:
# Handle incorrect types (only accepted or np.bool and np.numeric)
type_cells = plot_cells.applymap(type)
available_types = set(type_cells.values.flatten())
invalid_types = [
ctype for ctype in available_types if not isinstance(ctype, (bool, float))]
if invalid_types:
raise TypeError(f"Incorrect types: {invalid_types} ")
# Format into a unique type with the right imputation
plot_cells = plot_cells.replace({True: 1.0, False: 0})
# Normalize the the range of values
calc_cells = plot_cells.data[~np.isnan(plot_cells.data)]
if vmin_cells is None:
vmin_cells = 0
if vmax_cells is None:
vmax_cells = 1.0
robust_vmin_cells = np.percentile(
calc_cells, 5) if robust else calc_cells.min()
robust_vmax_cells = np.percentile(
calc_cells, 95) if robust else calc_cells.max()
if robust_vmin_cells == 0:
robust_vmin_cells = self.DEFAULT_VMIN_CELLS
# Normalize the values
plot_cells = plot_cells.applymap(_normalize_cell_size,
vmin=robust_vmin_cells,
vmax=robust_vmax_cells,
true_value=robust_vmax_cells,
false_value=robust_vmin_cells,
nan_value=0.0
)
# Store the values
self.plot_cells = plot_cells
self.vmax_cells = robust_vmax_cells
self. vmin_cells = robust_vmin_cells
def _annotate_and_size_cells(self, ax, mesh, square_shaped_cells):
"""Add textual labels with the value in each cell."""
# ( MODIFY: former _annotate_heatmap )
mesh.update_scalarmappable()
height, width = self.annot_data.shape
xpos, ypos = np.meshgrid(np.arange(width) + .5, np.arange(height) + .5)
for x, y, m, color, val, cell_size in zip(xpos.flat, ypos.flat,
mesh.get_array(), mesh.get_facecolors(),
self.annot_data.flat, self.plot_cells.flat):
if m is not np.ma.masked:
# vv = (val - self.vmin) / (self.vmax - self.vmin)# done
# size = np.clip(s / self.cellsize_vmax, 0.1, 1.0)
shape = None
if square_shaped_cells:
shape = plt.Rectangle((x - cell_size / 2, y - cell_size / 2),
cell_size,
cell_size,
facecolor=color,
**self.shape_kws)
else:
shape = plt.Circle((x - cell_size / 2, y - cell_size / 2),
cell_size,
facecolor=color,
fill=True,
**self.shape_kws)
ax.add_patch(shape)
if self.annot:
lum = relative_luminance(color)
text_color = ".15" if lum > .408 else "w"
annotation = ("{:" + self.fmt + "}").format(val)
text_kwargs = dict(
color=text_color, ha="center", va="center")
text_kwargs.update(self.annot_kws)
ax.text(x, y, annotation, **text_kwargs)
def | |
in use by
issuing the PLNSOL,TEMP command (with PowerGraphics and
/ESHAPE active).
The /ESHAPE,1 and /ESHAPE,FAC commands are incompatible with
the /CYCEXPAND command used in cyclic symmetry analyses.
This command is valid in any processor.
"""
warnings.warn(
"pymapdl does not support /ESHAPE when plotting in "
"Python using ``mapdl.eplot()``. "
"Use ``mapdl.eplot(vtk=False)`` "
)
command = f"/ESHAPE,{scale},{key}"
return self.run(command, **kwargs)
def facet(self, lab="", **kwargs):
"""Specifies the facet representation used to form solid model displays.
APDL Command: /FACET
Parameters
----------
lab
Valid labels:
FINE - Use finer tessellation to increase the number of facets for the display.
Provides the best representation (but decreases speed of
operation).
NORML - Use the basic number of facets for the display (default).
COAR - Use a limited number of facets for the display. This option will increase the
speed of the operations, but may produce poor
representations for some imported models.
WIRE - Display model with a wireframe representation (fast, but surfaces will not be
shown).
Notes
-----
Specifies the facet (or polygon) representation used to form solid
model displays. Used only with the APLOT, ASUM, VPLOT, and VSUM
commands.
This command is valid in any processor.
"""
command = f"/FACET,{lab}"
return self.run(command, **kwargs)
def gline(self, wn="", style="", **kwargs):
"""Specifies the element outline style.
APDL Command: /GLINE
Parameters
----------
wn
Window number (or ALL) to which command applies (defaults to 1).
style
Outline key:
0 - Solid element outlines (default)
1 - Dashed element outlines
-1 - No element outlines
Notes
-----
Determines the element outline style. Often used when node numbers are
displayed to prevent element lines from overwriting node numbers.
Unless you are using an OpenGL or Starbase driver, the dashed element
outline option (/GLINE,WN,1) is not available in the following
situations:
Z-buffered displays (/TYPE,WN,6).
Capped Z-buffered displays (/TYPE,WN,7).
Qslice Z-buffered displays (/TYPE,WN,8).
This command is valid in any processor.
"""
command = f"/GLINE,{wn},{style}"
return self.run(command, **kwargs)
def gmarker(self, curve="", key="", incr="", **kwargs):
"""Specifies the curve marking style.
APDL Command: /GMARKER
Parameters
----------
curve
Curve number markers will be applied on (integer value between 1
and 10).
key
Marker key:
0 - No markers will be applied (default).
1 - TRIANGLES will be applied.
2 - SQUARES will be applied.
3 - DIAMONDS will be applied.
4 - CROSSES will be applied.
incr
Determines the curve marking frequency. (a whole number value
between 1 and 255). If INCR = 1, markers are displayed at every
data point on the curve. If INCR = 2 then markers are displayed at
every second data point. If INCR = 3 then they are displayed at
every third data point.
Notes
-----
The user-specified markers will not be drawn when the area under the
curve is color-filled (/GROPT, FILL).
"""
command = f"/GMARKER,{curve},{key},{incr}"
return self.run(command, **kwargs)
def gmface(self, lab="", n="", **kwargs):
"""Specifies the facet representation used to form solid models.
APDL Command: GMFACE
Parameters
----------
lab
Valid Labels:
FINE - Value that determines how coarse the facets will be.
n
An integer value between one (small) and ten (large) that
determines the tolerances that will be applied to the creation of
arcs and surfaces. Ten will create many facets, which may in turn
cause ANSYS to run very slowly. One will create fewer facets, which
may in turn cause larger tolerance errors.
"""
command = f"GMFACE,{lab},{n}"
return self.run(command, **kwargs)
def light(self, wn="", num="", int_="", xv="", yv="", zv="", refl="", **kwargs):
"""Specifies the light direction for the display window.
APDL Command: /LIGHT
Parameters
----------
wn
Window number (or ALL) to which command applies (defaults to 1).
num
Ambient or directional light key:
0 - Ambient light (default).
1 - Directional light.
int\_
Light intensity factor (defaults to 0.3 for ambient, 1.0 for
directional). This option is valid only for 3-D devices).
xv, yv, zv
Light direction (valid only for NUM = 1). The directional light
source is parallel to the line from point XV, YV, ZV to the origin,
in the global Cartesian system origin. Defaults to the viewing
direction [/VIEW].
refl
Light reflectance factor (valid only for NUM = 1 and 3-D devices).
Notes
-----
Defines the light direction for the window. Use this command only with
3-D graphics devices or 2-D devices when Z-buffering is used [/TYPE,,(6
or 7)]. The ambient light has no direction, only an intensity. You
can position the directional light source by defining a point (in the
global Cartesian coordinate system) representing a point along the
light directional line. This point, and the global Cartesian
coordinate system origin, define the line along which the light is
positioned looking toward the origin. You can use any point along the
light line; for example, both (1.,1.,1.) and (2.,2.,2.) give the same
light effect. For 3-D graphics devices only, the directional light
source also has intensity and reflectance factors.
By choosing the highest intensity ambient light for 3-D graphics
devices (via the command /LIGHT,WN,0,1), you can nullify color shading
and other effects of directional lighting.
This command is valid in any processor.
"""
command = f"/LIGHT,{wn},{num},{int_},{xv},{yv},{zv},{refl}"
return self.run(command, **kwargs)
def normal(self, wn="", key="", **kwargs):
"""Allows displaying area elements by top or bottom faces.
APDL Command: /NORMAL
Parameters
----------
wn
Window number (or ALL) to which command applies (defaults to 1).
key
Display key:
0 - No face distinction.
1 - Show only area elements having their positive normals directed toward the
viewing point.
-1 - Show only area elements having their positive normals directed away from the
viewing point.
Notes
-----
/NORMAL allows you to select area elements and area plots by the top or
bottom faces. It is useful for checking the normal directions on shell
elements. The positive normal (element Z direction) is defined by the
right-hand rule following the node I, J, K, L input direction. This
command is available only with raster or hidden-line displays, for
WIN32 or X11 2-D displays only.
This command is valid in any processor.
"""
command = f"/NORMAL,{wn},{key}"
return self.run(command, **kwargs)
def shade(self, wn="", type_="", **kwargs):
"""Defines the type of surface shading used with Z-buffering.
APDL Command: /SHADE
Parameters
----------
wn
Window number (or ALL) to which command applies (defaults to 1).
type\_
Shading type:
FACET or 0 - Facet shading (one color per area face) (default).
GOURAUD or 1 - Gouraud smooth shading (smooth variation of color based on interpolated vertex
colors).
PHONG or 2 - Phong smooth shading (smooth variation of color based on interpolated vertex
normals).
Notes
-----
Defines the type of surface shading used on area, volume, and
PowerGraphics [/GRAPHICS,POWER] displays when software Z-buffering is
enabled [/TYPE]. This command is only functional for 2-D display
devices.
This command is valid in any processor.
"""
command = f"/SHADE,{wn},{type_}"
return self.run(command, **kwargs)
def trlcy(self, lab="", tlevel="", n1="", n2="", ninc="", **kwargs):
"""Specifies the level of translucency.
APDL Command: /TRLCY
Parameters
----------
lab
Apply translucency level to the items specified by the following
labels:
ELEM - Elements. Use N1, N2, NINC fields for element numbers.
AREA - Solid model areas. Use N1, N2, NINC fields for area numbers.
VOLU - Solid model volumes. Use N1, N2, NINC fields for volume numbers.
ISURF - Isosurfaces (surfaces of constant stress, etc., value). Translucency varies
with result value, to a maximum of the specified
translucency level.
CM - Component group. Use N1 for component name, ignore N2 and NINC.
CURVE - Filled areas under curves of line graphs. Use N1, N2, NINC fields for curve
numbers.
ZCAP - If /TYPE,WN,ZCAP is the current display type, then /TRLCY,ZCAP,TLEVEL will
display the model in window WN with the portion of the model
in front of the section plane displayed at the translucency
level TLEVEL.
ON, OFF - Sets the specified translucency display on or off. All other fields are
ignored.
tlevel
Translucency | |
<https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ parameter.
:type headline_count: int
:param market: The market where the results come from. Typically, mkt is the country where the
user is making the request from. However, it could be a different country if the user is not
located in a country where Bing delivers results. The market must be in the form
:code:`<language code>`-:code:`<country code>`. For example, en-US. The string is case
insensitive. For a list of possible market values, see `Market Codes
<https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_. NOTE: If known, you are
encouraged to always specify the market. Specifying the market helps Bing route the request and
return an appropriate and optimal response. If you specify a market that is not listed in
`Market Codes <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_\ , Bing uses
a best fit market code based on an internal mapping that is subject to change. This parameter
and the `cc <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ query parameter
are mutually exclusive—do not specify both.
:type market: str
:param offset: The zero-based offset that indicates the number of news to skip before returning
news. The default is 0. The offset should be less than (\ `totalEstimatedMatches
<https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ - count). Use this
parameter along with the count parameter to page results. For example, if your user interface
displays 20 news per page, set count to 20 and offset to 0 to get the first page of results.
For each subsequent page, increment offset by 20 (for example, 0, 20, 40). It is possible for
multiple pages to include some overlap in results. If you do not specify the `category
<https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ parameter, Bing ignores
this parameter.
:type offset: int
:param original_image: A Boolean value that determines whether the image's contentUrl contains
a URL that points to a thumbnail of the original article's image or the image itself. If the
article includes an image, and this parameter is set to true, the image's contentUrl property
contains a URL that you may use to download the original image from the publisher's website.
Otherwise, if this parameter is false, the image's contentUrl and thumbnailUrl URLs both point
to the same thumbnail image. Use this parameter only with the News Search API or News Category
API. Trending Topics API ignore this parameter.
:type original_image: bool
:param safe_search: Filter news for adult content. The following are the possible filter
values. Off: Return news articles with adult text, images, or videos. Moderate: Return news
articles with adult text but not adult images or videos. Strict: Do not return news articles
with adult text, images, or videos. If the request comes from a market that Bing's adult policy
requires that safeSearch is set to Strict, Bing ignores the safeSearch value and uses Strict.
If you use the site: query operator, there is the chance that the response may contain adult
content regardless of what the safeSearch query parameter is set to. Use site: only if you are
aware of the content on the site and your scenario supports the possibility of adult content.
:type safe_search: str or ~news_search_client.models.SafeSearch
:param set_lang: The language to use for user interface strings. Specify the language using the
ISO 639-1 2-letter language code. For example, the language code for English is EN. The default
is EN (English). Although optional, you should always specify the language. Typically, you set
setLang to the same language specified by mkt unless the user wants the user interface strings
displayed in a different language. This parameter and the `Accept-Language
<https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_ header are mutually
exclusive; do not specify both. A user interface string is a string that's used as a label in a
user interface. There are few user interface strings in the JSON response objects. Also, any
links to Bing.com properties in the response objects apply the specified language.
:type set_lang: str
:param text_decorations: A Boolean value that determines whether display strings contain
decoration markers such as hit highlighting characters. If true, the strings may include
markers. The default is false. To specify whether to use Unicode characters or HTML tags as the
markers, see the `textFormat <https://docs.microsoft.com/en-us/bing/bing-news-
search/overview>`_ query parameter. For information about hit highlighting, see `Hit
Highlighting <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_.
:type text_decorations: bool
:param text_format: The type of markers to use for text decorations (see the textDecorations
query parameter). Possible values are Raw—Use Unicode characters to mark content that needs
special formatting. The Unicode characters are in the range E000 through E019. For example,
Bing uses E000 and E001 to mark the beginning and end of query terms for hit highlighting.
HTML—Use HTML tags to mark content that needs special formatting. For example, use :code:`<b>`
tags to highlight query terms in display strings. The default is Raw. For display strings that
contain escapable HTML characters such as <, >, and &, if textFormat is set to HTML, Bing
escapes the characters as appropriate (for example, < is escaped to <).
:type text_format: str or ~news_search_client.models.TextFormat
:keyword callable cls: A custom type or function that will be passed the direct response
:return: News, or the result of cls(response)
:rtype: ~news_search_client.models.News
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.News"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
# Construct URL
url = self.category.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if country_code is not None:
query_parameters['cc'] = self._serialize.query("country_code", country_code, 'str')
if category is not None:
query_parameters['category'] = self._serialize.query("category", category, 'str')
if count is not None:
query_parameters['count'] = self._serialize.query("count", count, 'int')
if headline_count is not None:
query_parameters['headlineCount'] = self._serialize.query("headline_count", headline_count, 'int')
if market is not None:
query_parameters['mkt'] = self._serialize.query("market", market, 'str')
if offset is not None:
query_parameters['offset'] = self._serialize.query("offset", offset, 'int')
if original_image is not None:
query_parameters['originalImg'] = self._serialize.query("original_image", original_image, 'bool')
if safe_search is not None:
query_parameters['safeSearch'] = self._serialize.query("safe_search", safe_search, 'str')
if set_lang is not None:
query_parameters['setLang'] = self._serialize.query("set_lang", set_lang, 'str')
if text_decorations is not None:
query_parameters['textDecorations'] = self._serialize.query("text_decorations", text_decorations, 'bool')
if text_format is not None:
query_parameters['textFormat'] = self._serialize.query("text_format", text_format, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['X-BingApis-SDK'] = self._serialize.header("x_bing_apis_sdk", x_bing_apis_sdk, 'str')
if accept is not None:
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if accept_language is not None:
header_parameters['Accept-Language'] = self._serialize.header("accept_language", accept_language, 'str')
if user_agent_parameter is not None:
header_parameters['User-Agent'] = self._serialize.header("user_agent_parameter", user_agent_parameter, 'str')
if client_id is not None:
header_parameters['X-MSEdge-ClientID'] = self._serialize.header("client_id", client_id, 'str')
if client_ip is not None:
header_parameters['X-MSEdge-ClientIP'] = self._serialize.header("client_ip", client_ip, 'str')
if location is not None:
header_parameters['X-Search-Location'] = self._serialize.header("location", location, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('News', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
category.metadata = {'url': '/news'} # type: ignore
def trending(
self,
x_bing_apis_sdk =True, # type: Union[str, "_models.XBingApisSDK"]
accept=None, # type: Optional[str]
accept_language=None, # type: Optional[str]
user_agent_parameter=None, # type: Optional[str]
client_id=None, # type: Optional[str]
client_ip=None, # type: Optional[str]
location=None, # type: Optional[str]
country_code=None, # type: Optional[str]
count=None, # type: Optional[int]
market=None, # type: Optional[str]
offset=None, # type: Optional[int]
safe_search=None, # type: Optional[Union[str, "_models.SafeSearch"]]
set_lang=None, # type: Optional[str]
since=None, # type: Optional[int]
sort_by=None, # type: Optional[str]
text_decorations=None, # type: Optional[bool]
text_format=None, # type: Optional[Union[str, "_models.TextFormat"]]
**kwargs # type: Any
):
# type: (...) -> "_models.TrendingTopics"
"""The News Trending Topics API lets you search on Bing and get back a list of trending news topics that are currently trending on Bing. This section provides technical details about the query parameters and headers that you use to request news and the JSON response objects that contain them. For examples that show how to make requests, see `Searching the web for news <https://docs.microsoft.com/en-us/bing/bing-news-search/overview>`_.
The News Trending Topics API lets you search on Bing and get back a list of trending news
topics that are currently trending on Bing. This section provides technical details about the
query parameters and | |
<reponame>Komanawa-Solutions-Ltd/SLMACC-2020-CSRA
"""
Author: <NAME>
Created: 3/11/2020 9:07 AM
"""
import numpy as np
import pandas as pd
import copy
def calc_smd(rain, pet, h2o_cap, h2o_start, a=0.0073,
p=1, return_drn_aet=False):
"""
calculate the soil moisture deficit from aet, assuems that if these are arrays axis 0 is time
:param rain: array of rain fall amounts, mm, shape = (time, *other dimensions (if needed))
:param pet: array of pet amounts, mm, shape = (time, *other dimensions (if needed))
:param h2o_cap: maximum soil water capacity, mm, niwa uses 150mm as a standard
:param h2o_start: fraction of the soil water capacity to start at, fraction 0-1
:param a: "readily available" water coefficient (d mm-1)
default value from woodward 2010
:param p: proportion of readilay avalible water (RAW) abe to be extracted in one day (d-1)
default value from woodward, 2010
:param return_drn_aet: boolean, if True return AET and drainage
:return: (soil moisture deficit, mm) or (soil moisture deficit, mm), (drainage, mm), (aet, mm)
"""
# make this work if float/ndarray passed
if np.atleast_1d(pet).ndim == 1:
array_d = 1 # 1d array or list, return 1d data
pet = np.atleast_1d(pet[:, np.newaxis])
rain = np.atleast_1d(rain[:, np.newaxis])
else:
array_d = 2 # 2 or more dimensions return without modification
assert rain.shape == pet.shape, 'rain and PET must be same shape'
assert h2o_start <= 1 and h2o_start >= 0, 'h2o start must be the fraction, between 0-1'
smd = np.zeros(pet.shape, float)
if return_drn_aet:
drain = np.zeros(pet.shape, float)
aet_out = np.zeros(pet.shape, float)
iter_shp = pet.shape[1:]
soil_mois = np.zeros((iter_shp)) + h2o_cap * h2o_start
for i, (r, pe) in enumerate(zip(rain, pet)):
aet = calc_aet(pe, p=p, a=a, AWHC=h2o_cap, W=soil_mois - h2o_cap)
soil_mois = soil_mois + r - aet
soil_mois[soil_mois < 0] = 0
d = np.zeros((iter_shp))
idx = soil_mois > h2o_cap
d[idx] = soil_mois[idx] - h2o_cap
soil_mois[idx] = h2o_cap
smd[i] = (soil_mois - h2o_cap)
if return_drn_aet:
drain[i] = d
aet_out[i] = aet
# manage shape and return data
if array_d == 1:
smd = smd[:, 0]
if return_drn_aet:
drain = drain[:, 0]
aet_out = aet_out[:, 0]
if return_drn_aet:
return smd, drain, aet_out
else:
return smd
def calc_aet(PET, AWHC, W, p=1, a=0.0073):
"""
calculate AET for new zealand pasture from
# from woodward 2010, https://www.tandfonline.com/doi/pdf/10.1080/00288233.2001.9513464
:param PET: potential evapotranspriation (mm/day)
:param p: proportion of readilay avalible water (RAW) abe to be extracted in one day (d-1)
default value from woodward 2010
:param a: "readily available" water coefficient (d mm-1)
default value from woodward 2010
:param AWHC: available water holding capacity of soil to rooting depth (mm)
:param W: soil water deficit (usually negative) (mm)
:return:
"""
RAW = a * PET * (AWHC + W) * p
AET = copy.deepcopy(PET)
AET[AET > RAW] = RAW[AET > RAW]
return AET
def calc_sma_smd_historical(rain, pet, date, h2o_cap, h2o_start, average_start_year=1981, average_stop_year=2010,
a=0.0073,
p=1):
"""
calculate the soil moisture deficit from aet,
:param rain: array of precip amounts, mm
:param pet: array of pet amounts, mm
:param date: the dates for the pet/precip data
:param h2o_cap: maximum soil water capacity, mm, niwa uses 150mm as a standard
:param h2o_start: fraction of the soil water capacity to start at, fraction 0-1
:param average_start_year: start date for the averaging period, inclusive
:param average_stop_year: end date for the averaging period, inclusive
:param a: "readily available" water coefficient (d mm-1)
default value from woodward 2010
:param p: proportion of readilay avalible water (RAW) abe to be extracted in one day (d-1)
default value from woodward, 2010
:return: (soil moisture deficit, mm), (drainage, mm), (aet, mm)
"""
date = np.atleast_1d(date)
doy = pd.Series(date).dt.dayofyear
pet = np.atleast_1d(pet)
rain = np.atleast_1d(rain)
assert date.shape == pet.shape == rain.shape, 'date, pet, rain must be same shape'
smd, drain, aet_out = calc_smd(rain, pet, h2o_cap, h2o_start, a, p, return_drn_aet=True)
outdata = pd.DataFrame(data={'date': date, 'doy': doy, 'pet': pet, 'rain': rain, 'smd': smd, 'drain': drain,
'aet_out': aet_out},
)
# calculate mean smd for doy
idx = (outdata.date.dt.year >= average_start_year) & (outdata.date.dt.year <= average_stop_year)
temp = outdata.loc[idx, ['doy', 'smd']]
average_smd = temp.groupby(doy).mean().set_index('doy')
outdata.loc[:, 'mean_doy_smd'] = outdata.loc[:, 'doy']
outdata.replace({'mean_doy_smd': average_smd.loc[:, 'smd'].to_dict()}, inplace=True)
outdata.loc[:, 'sma'] = outdata.loc[:, 'smd'] - outdata.loc[:, 'mean_doy_smd']
return outdata
def calc_monthly_based_smd_sma_150mm(rain, pet, date):
month_start = {1: -79.0, 2: -92.0, 3: -84.0, 4: -71.0, 5: -46.0, 6: -21.0, 7: -9.0, 8: -7.0, 9: -12.0, 10: -30.0,
11: -47.0, 12: -67.0}
doy = pd.Series(date).dt.dayofyear.values
month = pd.Series(date).dt.month.values
year = pd.Series(date).dt.year.values
day = pd.Series(date).dt.day.values
assert date.shape == pet.shape == rain.shape, 'date, pet, rain must be same shape'
outdata = pd.DataFrame(index=date)
for k in ['doy', 'month', 'year', 'day', 'rain', 'pet']:
outdata.loc[:, k] = eval(k)
for m in range(1, 13):
for y in range(year.max()):
idx = (outdata.month == m) & (outdata.year == y)
smd = calc_smd(outdata.loc[idx, 'rain'].values, outdata.loc[idx, 'pet'].values,
h2o_cap=150, h2o_start=(150 + month_start[m]) / 150, return_drn_aet=False)
outdata.loc[idx, 'smd'] = smd
return outdata
def calc_penman_pet(rad, temp, rh, wind_10=None, wind_2=None, psurf=None, mslp=None,
elevation=None):
"""
calculate penman-monteith pet, works with either numeric values or with an np.ndarray.
:param rad: radiation mJ/m2/day
:param temp: mean temperature degrees C
:param rh: relative humidity (percent)
:param wind_10: 10 m wind speed (m/s) or None, one of (wind_10, wind_2) must be passed,
this is converted to 2m windspeed internally
:param wind_2: 2 m wind speed (m/s) or None, one of (wind_10, wind_2) must be passed
:param psurf: surface pressure (kpa) or None, one of psurf, mslp must be passed
:param mslp: mean sea level pressure (kpa) or None, one of psurf, mslp must be passed
:param elevation: elevation (m) of the point or None, needed only if mslp passed
:return: pet (mm/day)
"""
# check inputs
assert (wind_10 is not None) or (wind_2 is not None), 'either wind_10 or wind_2 must not be None'
assert (wind_10 is None) or (wind_2 is None), 'only one of wind_10 and wind_2 may not be None'
assert (psurf is not None) or (mslp is not None), 'either psurf or mslp must not be None'
assert (psurf is None) or (mslp is None), 'only one of psurf and mslp may not be None'
# calc psurf if necessary
if psurf is None:
assert elevation is not None, 'if mslp is passed instead of psurf elevation must also be passed'
# from https://keisan.casio.com/keisan/image/Convertpressure.pdf
psurf = mslp * (1 - 0.0065 * elevation / (temp + 273 + 0.0065 * elevation)) ** 5.257
# get the correct wind speed
if wind_2 is not None:
wind = wind_2
elif wind_10 is not None:
wind = wind_10 * (4.87 / (np.log(67.8 * 10 - 5.42)))
else:
raise ValueError('should not get here')
# assure it is the correct size etc.
err_mess = ('non-matching shapes, [rad, temp, rh, wind_10 | wind_2=None, psurf| mslp] must be the same shape, '
'problem with temp and {}')
for v in ['rad', 'wind', 'rh', 'psurf']:
assert np.atleast_1d(temp).shape == np.atleast_1d(eval(v)).shape, err_mess.format(v)
h_vap = 02.501 - 0.00236 * temp # latent heat of vaporisation
tmp = 4098 * (0.6108 * np.e ** ((17.27 * temp) / (temp + 237.3)))
delt = tmp / (temp + 237.3) ** 2 # gradient of the vapour pressure curve Based on equation 13 in Allen et al (1998)
soil = 0 # soil heat flux (set to zero by niwa)
y = (1.1013 * psurf) / (0.622 * h_vap * 1000) # piesometric constant
es = 0.61094 * np.e ** (17.625 * temp / (243.04 + temp))
ed = rh * es / 100
pet = ((h_vap ** -1 * delt * (rad - soil) + y * (900 / (temp + 273)) * wind * (es - ed)) /
(delt + y * (1 + 0.34 * wind)))
return pet
def calc_smd_sma_wah_monthly(months, days, rain, radn, tmax, tmin, rh_min, rh_max, wind_10, mslp, elv):
"""
calculate soil moisture deficit, soil moisture anomaly, and pet for weather at home data. this is a convenience
function for Bodeker Scientific. the expected inputs which are nd arrays are expected to be 2d arrays of
shape (one of (360,365,366), num | |
ключа
if key:
key = key
else:
# присваивание ключую значения введенного пользователем
key = message.text
# подключение к БД
sqlither = SQLighter(message.from_user.id)
# поиск класса по ключу введенному пользователем
name = sqlither.search_class(key)
# в случае, если такого класса не существует вывод Exception
if not name:
raise Exception
# проверка, является ли пользователь админом
if sqlither.user_is_admin(key):
# в случае если пользователь админ, предоставление функций администратора
sqlither.add_user_to_class(key)
ACTIVE_CLASS = key
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
# создание списка необходимых кнопок
buttons = ['📓Расписание', '📒ДЗ', '⚙Настройки',
'📝Объявление', '✅Назад в главную']
# вывод кнопок на экран
markup.add(*buttons)
bot.send_message(message.chat.id, f'Вы успешно перешли в "{name}"',
reply_markup=markup)
else:
# если пользователь не является админом, предоставление ему функционала ученика
sqlither.add_user_to_class(key)
ACTIVE_CLASS = key
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
# создание списка необходимых кнопок
buttons = ['📓Расписание', '📒ДЗ', '✅Назад в главную']
# вывод кнопок на экран
markup.add(*buttons)
bot.send_message(message.chat.id, f'Вы успешно перешли в "{name}".',
reply_markup=markup)
# вывод сообщения об ошибке в случае некоррекного ввода данных / неверного вызова
except Exception as e:
bot.send_message(message.chat.id, '❌Ошибка! Не удалось найти класс.')
# создание объявления от администратора для учеников
def make_ad(message):
global ACTIVE_CLASS
# возвращение к функциям в случае необходимости
if message.text == '❌Назад':
return back(message)
# подключение к БД
sqlighter = SQLighter(message.from_user.id)
# получение id пользователей состоящих в классе
ids = sqlighter.search_users_in_class(ACTIVE_CLASS)
# рассылка объявлений каждому ученику, состоящему в классе
for id in ids:
if id[0] != message.from_user.id:
bot.send_message(id[0], f'Объявление! {message.text}')
else:
bot.send_message(message.chat.id, text='Объявление успешно отправлено.')
# после выполнения рассылки возвращение на предыдущий экран
back(message)
# добавление нового администратора в класс
def new_admin(message):
global ACTIVE_CLASS
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
markup.add('✅Назад в главную')
# возвращение к главному меню в случае необходимости
if message.text == '✅Назад в главную':
return start_message(message)
# возвращение к функциям в случае необходимости
elif message.text == '🚫Назад':
return search_class(message, ACTIVE_CLASS)
try:
# получение id пользователя, которого хотим назначить админом
new_admins = message.text
# подключение к БД
sqlighter = SQLighter(message.from_user.id)
# создание в БД новго админа согласно внесенным данным пользователя
sqlighter.create_new_admin(new_admins)
bot.send_message(message.chat.id, 'Админ успешно добавлен')
# вывод сообщения об ошибке в случае некоррекного ввода данных / неверного вызова
except Exception as e:
bot.send_message(message.chat.id, '❌Ошибка! Не удалось добавить админа.')
# настройки администратора
def settings(message):
try:
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
# возможные настройки администратора
buttons = ['👨🏻🏫Добавить админа', '🔑Получить ключ', '🚫Назад', '✅Назад в главную']
# вывод кнопок из списка
markup.add(*buttons)
bot.send_message(message.chat.id, 'Вы перешли в настройки.', reply_markup=markup)
# вывод сообщения об ошибке в случае некоррекного ввода данных / неверного вызова
except Exception as e:
bot.send_message(message.chat.id, 'Не удалось перейти в настройки.')
def send_class_id(mesage):
try:
bot.send_message(mesage.chat.id, f'Ключ вашего класса - {ACTIVE_CLASS}')
return settings(mesage)
except Exception:
bot.send_message(mesage.chat.id, 'Не удалось получить ключ вашего класса.')
return settings(mesage)
# вывод классов к которых состоит пользователь
def list_of_classes(message):
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
try:
# подключение к БД
sqlighter = SQLighter(message.from_user.id)
# поиск пользователей в классе
classes = sqlighter.search_user_classes()
if not classes:
# вывод сообщения об ошибке в случае некоррекного ввода данных / неверного вызова
raise Exception
else:
# создание списка, в котором будут храниться кнопки классы
klass = []
# создание списка, где будут перечислены классы, в которых состоит пользователь
class_to_send = []
# перебор каждого класса
for clas in classes:
klass.append(f'{clas[0]}/{clas[1]}')
class_to_send.append(f'{clas[0]}')
markup.add(*klass)
markup.add('✅Назад в главную')
bot.send_message(message.chat.id, f'На данный момент вы состоите в классах: {", ".join(class_to_send)}.',
reply_markup=markup)
# вывод сообщения об ошибке в случае некоррекного ввода данных / неверного вызова
except Exception as e:
markup.add('✅Назад в главную')
bot.send_message(message.chat.id, 'К сожалению, вы не состоите не в одном классе.',
reply_markup=markup)
# функция расписания
def shedule(message):
global ACTIVE_CLASS, SHEDULE_ID
try:
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
sqlighter = SQLighter(message.from_user.id)
# ищем расписание
shedule = sqlighter.search_shedule(ACTIVE_CLASS)
# если есть, добавляем кнопки дней
if shedule:
buttons = ['Понедельник', 'Вторник', 'Среда', 'Четверг',
'Пятница', 'Суббота', '🚫Назад', '✅Назад в главную']
if sqlighter.user_is_admin(ACTIVE_CLASS):
markup.add('✍🏻Изменить расписание')
markup.add(*buttons)
# даем выбор дня пользователя
sent = bot.send_message(message.chat.id, 'Выберете день:', reply_markup=markup)
SHEDULE_ID = shedule[0][0]
bot.register_next_step_handler(sent, send_shedule)
else:
# иначе предлагаем добавить новое расписание
buttons1 = ['📖Добавить расписание', '🚫Назад']
markup.add(*buttons1)
# и присылаем сообщение
bot.send_message(message.chat.id, 'На данный момент расписание не добавлено.',
reply_markup=markup)
except Exception as e:
print(e)
# в случае ошибки выводим пользователю следующий текст
bot.send_message(message.chat.id, 'Не удалось найти расписание.')
# функция отправки расписания
def send_shedule(message):
global ACTIVE_CLASS, SHEDULE_ID
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
# если нажата кнопка изменения расписания, предлагаем какой день изменить
if message.text == '✍🏻Изменить расписание':
buttons = ['Понедельник', 'Вторник', 'Среда', 'Четверг',
'Пятница', 'Суббота', '🚫Назад']
markup.add(*buttons)
markup.add('✅Назад в главную')
sent = bot.send_message(message.chat.id, 'Выберете день:', reply_markup=markup)
return bot.register_next_step_handler(sent, add_shedule)
if message.text == '🚫Назад':
return search_class(message, ACTIVE_CLASS)
elif message.text == '✅Назад в главную':
return start_message(message)
markup.add('❌Назад')
markup.add('✅Назад в главную')
sqlighter = SQLighter(message.from_user.id)
# выводим расписание на разные дни
if message.text == 'Понедельник':
day = sqlighter.search_shedule_for_day(SHEDULE_ID, 'monday')
if day[0][0] is None:
bot.send_message(message.chat.id, 'На этот день расписание не добавлено.'
' Добавьте его с помощью кнопки "✍🏻Изменить расписание".',
reply_markup=markup)
else:
# выводим на новой строке с номером урока в начале
diary, digit = '', 1
for subject in day[0][0].split():
sub = diary
diary = sub + '\n' + str(digit) + '. ' + subject
digit += 1
bot.send_message(message.chat.id, diary, reply_markup=markup)
elif message.text == 'Вторник':
day = sqlighter.search_shedule_for_day(SHEDULE_ID, 'tuesday')
if day[0][0] is None:
bot.send_message(message.chat.id, 'На этот день расписание не добавлено.'
' Добавьте его с помощью кнопки "✍🏻Изменить расписание".',
reply_markup=markup)
else:
# выводим на новой строке с номером урока в начале
diary, digit = '', 1
for subject in day[0][0].split():
sub = diary
diary = sub + '\n' + str(digit) + '. ' + subject
digit += 1
bot.send_message(message.chat.id, diary, reply_markup=markup)
elif message.text == 'Среда':
day = sqlighter.search_shedule_for_day(SHEDULE_ID, 'wednesday')
if day[0][0] is None:
bot.send_message(message.chat.id, 'На этот день расписание не добавлено.'
' Добавьте его с помощью кнопки "✍🏻Изменить расписание".',
reply_markup=markup)
else:
# выводим на новой строке с номером урока в начале
diary, digit = '', 1
for subject in day[0][0].split():
sub = diary
diary = sub + '\n' + str(digit) + '. ' + subject
digit += 1
bot.send_message(message.chat.id, diary, reply_markup=markup)
elif message.text == 'Четверг':
day = sqlighter.search_shedule_for_day(SHEDULE_ID, 'thursday')
if day[0][0] is None:
bot.send_message(message.chat.id, 'На этот день расписание не добавлено.'
' Добавьте его с помощью кнопки "✍🏻Изменить расписание".',
reply_markup=markup)
else:
# выводим на новой строке с номером урока в начале
diary, digit = '', 1
for subject in day[0][0].split():
sub = diary
diary = sub + '\n' + str(digit) + '. ' + subject
digit += 1
bot.send_message(message.chat.id, diary, reply_markup=markup)
elif message.text == 'Пятница':
day = sqlighter.search_shedule_for_day(SHEDULE_ID, 'friday')
if day[0][0] is None:
bot.send_message(message.chat.id, 'На этот день расписание не добавлено.'
' Добавьте его с помощью кнопки "✍🏻Изменить расписание".',
reply_markup=markup)
else:
# выводим на новой строке с номером урока в начале
diary, digit = '', 1
for subject in day[0][0].split():
sub = diary
diary = sub + '\n' + str(digit) + '. ' + subject
digit += 1
bot.send_message(message.chat.id, diary, reply_markup=markup)
elif message.text == 'Суббота':
day = sqlighter.search_shedule_for_day(SHEDULE_ID, 'saturday')
if day[0][0] is None:
bot.send_message(message.chat.id, 'На этот день расписание не добавлено.'
' Добавьте его с помощью кнопки "✍🏻Изменить расписание".',
reply_markup=markup)
else:
# выводим на новой строке с номером урока в начале
diary, digit = '', 1
for subject in day[0][0].split():
sub = diary
diary = sub + '\n' + str(digit) + '. ' + subject
digit += 1
bot.send_message(message.chat.id, diary, reply_markup=markup)
else:
bot.send_message(message.chat.id, 'Такого дня нет в вашем расписании!')
# добавление расписания админом
def add_shedule(message):
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
markup.add('❌Назад')
markup.add('✅Назад в главную')
# в случае необходимости возвращение на главную
if message.text == '✅Назад в главную':
start_message(message)
# в случае небходимости возвращение к функциям админов
elif message.text == '❌Назад':
return back(message)
# в случае необходимости возвращение к расписанию
elif message.text == '🚫Назад':
return shedule(message)
# после того, как мы получаем день, на который нужно составить расписание, вызываем функцию соответствующую
# каждому учебному дню. Далее пользователь вводит данные о расписании на выбранный день недели
elif message.text == 'Понедельник':
sent = bot.send_message(message.chat.id, 'Введите предметы через пробел:', reply_markup=markup)
bot.register_next_step_handler(sent, add_shedule_on_monday)
elif message.text == 'Вторник':
sent = bot.send_message(message.chat.id, 'Введите предметы через пробел:', reply_markup=markup)
bot.register_next_step_handler(sent, add_shedule_on_tuesday)
elif message.text == 'Среда':
sent = bot.send_message(message.chat.id, 'Введите предметы через пробел:', reply_markup=markup)
bot.register_next_step_handler(sent, add_shedule_on_wednesday)
elif message.text == 'Четверг':
sent = bot.send_message(message.chat.id, 'Введите предметы через пробел:', reply_markup=markup)
bot.register_next_step_handler(sent, add_shedule_on_thursday)
elif | |
updated',
'Relatives': 'Relatives',
'Relief': 'Relief',
'Relief Team': 'Relief Team',
'Religion': 'Religion',
'Religious': 'Religious',
'Religious Leader': 'Religious Leader',
'Relocate as instructed in the <instruction>': 'Relocate as instructed in the <instruction>',
'Remote Error': 'Remote Error',
'Remove': 'Remove',
'Remove Activity from this event': 'Remove Activity from this event',
'Remove Asset from this event': 'Remove Asset from this event',
'Remove Asset from this scenario': 'Remove Asset from this scenario',
'Remove Document from this request': 'Remove Document from this request',
'Remove Facility from this event': 'Remove Facility from this event',
'Remove Facility from this scenario': 'Remove Facility from this scenario',
'Remove Human Resource from this event': 'Remove Human Resource from this event',
'Remove Human Resource from this scenario': 'Remove Human Resource from this scenario',
'Remove Incident from this event': 'Remove Incident from this event',
'Remove Item from Inventory': 'Remove Item from Inventory',
'Remove Item from Order': 'Remove Item from Order',
'Remove Item from Shipment': 'Remove Item from Shipment',
'Remove Map Configuration from this event': 'Remove Map Configuration from this event',
'Remove Map Configuration from this scenario': 'Remove Map Configuration from this scenario',
'Remove Organization from Project': 'Remove Organisation from Project',
'Remove Person from Commitment': 'Remove Person from Commitment',
'Remove Skill': 'Remove Skill',
'Remove Skill from Request': 'Remove Skill from Request',
'Remove Task from this event': 'Remove Task from this event',
'Remove Task from this scenario': 'Remove Task from this scenario',
'Remove this asset from this event': 'Remove this asset from this event',
'Remove this asset from this scenario': 'Remove this asset from this scenario',
'Remove this facility from this event': 'Remove this facility from this event',
'Remove this facility from this scenario': 'Remove this facility from this scenario',
'Remove this human resource from this event': 'Remove this human resource from this event',
'Remove this human resource from this scenario': 'Remove this human resource from this scenario',
'Remove this task from this event': 'Remove this task from this event',
'Remove this task from this scenario': 'Remove this task from this scenario',
'Repair': 'Repair',
'Repaired': 'Repaired',
'Repeat your password': 'Repeat your password',
'Report': 'Report',
'Report Another Assessment...': 'Report Another Assessment...',
'Report Details': 'Report Details',
'Report Resource': 'Report Resource',
'Report To': 'Report To',
'Report Types Include': 'Report Types Include',
'Report added': 'Report added',
'Report deleted': 'Report deleted',
'Report my location': 'Report my location',
'Report the contributing factors for the current EMS status.': 'Report the contributing factors for the current EMS status.',
'Report the contributing factors for the current OR status.': 'Report the contributing factors for the current OR status.',
'Report them as found': 'Report them as found',
'Report them missing': 'Report them missing',
'Report updated': 'Report updated',
'ReportLab module not available within the running Python - this needs installing for PDF output!': 'ReportLab module not available within the running Python - this needs installing for PDF output!',
'Reported To': 'Reported To',
'Reporter': 'Reporter',
'Reporter Name': 'Reporter Name',
'Reporting on the projects in the region': 'Reporting on the projects in the region',
'Reports': 'Reports',
'Repositories': 'Repositories',
'Repository': 'Repository',
'Repository Base URL': 'Repository Base URL',
'Repository Configuration': 'Repository Configuration',
'Repository Name': 'Repository Name',
'Repository UUID': 'Repository UUID',
'Repository configuration deleted': 'Repository configuration deleted',
'Repository configuration updated': 'Repository configuration updated',
'Repository configured': 'Repository configured',
'Request': 'Request',
'Request Added': 'Request Added',
'Request Canceled': 'Request Canceled',
'Request Details': 'Request Details',
'Request From': 'Request From',
'Request Item': 'Request Item',
'Request Item Details': 'Request Item Details',
'Request Item added': 'Request Item added',
'Request Item deleted': 'Request Item deleted',
'Request Item from Available Inventory': 'Request Item from Available Inventory',
'Request Item updated': 'Request Item updated',
'Request Items': 'Request Items',
'Request New People': 'Request New People',
'Request Number': 'Request Number',
'Request Status': 'Request Status',
'Request Type': 'Request Type',
'Request Updated': 'Request Updated',
'Request added': 'Request added',
'Request deleted': 'Request deleted',
'Request for Account': 'Request for Account',
'Request for Donations Added': 'Request for Donations Added',
'Request for Donations Canceled': 'Request for Donations Canceled',
'Request for Donations Details': 'Request for Donations Details',
'Request for Donations Updated': 'Request for Donations Updated',
'Request for Role Upgrade': 'Request for Role Upgrade',
'Request for Volunteers Added': 'Request for Volunteers Added',
'Request for Volunteers Canceled': 'Request for Volunteers Canceled',
'Request for Volunteers Details': 'Request for Volunteers Details',
'Request for Volunteers Updated': 'Request for Volunteers Updated',
'Request updated': 'Request updated',
'Request, Response & Session': 'Request, Response & Session',
'Requested': 'Requested',
'Requested By': 'Requested By',
'Requested By Facility': 'Requested By Facility',
'Requested For': 'Requested For',
'Requested For Facility': 'Requested For Facility',
'Requested From': 'Requested From',
'Requested Items': 'Requested Items',
'Requested Skill Details': 'Requested Skill Details',
'Requested Skill updated': 'Requested Skill updated',
'Requested Skills': 'Requested Skills',
'Requester': 'Requester',
'Requests': 'Requests',
'Requests Management': 'Requests Management',
'Requests for Donations': 'Requests for Donations',
'Requests for Volunteers': 'Requests for Volunteers',
'Required Skills': 'Required Skills',
'Requires Login': 'Requires Login',
'Requires Login!': 'Requires Login!',
'Rescue and recovery': 'Rescue and recovery',
'Reset': 'Reset',
'Reset Password': '<PASSWORD> Password',
'Resolve': 'Resolve',
'Resolve link brings up a new screen which helps to resolve these duplicate records and update the database.': 'Resolve link brings up a new screen which helps to resolve these duplicate records and update the database.',
'Resource': 'Resource',
'Resource Configuration': 'Resource Configuration',
'Resource Details': 'Resource Details',
'Resource Mapping System': 'Resource Mapping System',
'Resource Mapping System account has been activated': 'Resource Mapping System account has been activated',
'Resource Name': 'Resource Name',
'Resource added': 'Resource added',
'Resource configuration deleted': 'Resource configuration deleted',
'Resource configuration updated': 'Resource configuration updated',
'Resource configured': 'Resource configured',
'Resource deleted': 'Resource deleted',
'Resource updated': 'Resource updated',
'Resources': 'Resources',
'Respiratory Infections': 'Respiratory Infections',
'Response': 'Response',
'Restricted Access': 'Restricted Access',
'Restricted Use': 'Restricted Use',
'Results': 'Results',
'Retail Crime': 'Retail Crime',
'Retrieve Password': 'Retrieve Password',
'Return': 'Return',
'Return to Request': 'Return to Request',
'Returned': 'Returned',
'Returned From': 'Returned From',
'Review Incoming Shipment to Receive': 'Review Incoming Shipment to Receive',
'Rice': 'Rice',
'Riot': 'Riot',
'River': 'River',
'River Details': 'River Details',
'River added': 'River added',
'River deleted': 'River deleted',
'River updated': 'River updated',
'Rivers': 'Rivers',
'Road Accident': 'Road Accident',
'Road Closed': 'Road Closed',
'Road Conditions': 'Road Conditions',
'Road Delay': 'Road Delay',
'Road Hijacking': 'Road Hijacking',
'Road Usage Condition': 'Road Usage Condition',
'Roads Layer': 'Roads Layer',
'Role': 'Role',
'Role Details': 'Role Details',
'Role Required': 'Role Required',
'Role Updated': 'Role Updated',
'Role added': 'Role added',
'Role deleted': 'Role deleted',
'Role updated': 'Role updated',
'Roles': 'Roles',
'Roles Permitted': 'Roles Permitted',
'Roof tile': 'Roof tile',
'Roofs, floors (vertical load)': 'Roofs, floors (vertical load)',
'Room': 'Room',
'Room Details': 'Room Details',
'Room added': 'Room added',
'Room deleted': 'Room deleted',
'Room updated': 'Room updated',
'Rooms': 'Rooms',
'Rows in table': 'Rows in table',
'Rows selected': 'Rows selected',
'Running Cost': 'Running Cost',
'Russian': 'Russian',
'SMS': 'SMS',
'SMS Modems (Inbound & Outbound)': 'SMS Modems (Inbound & Outbound)',
'SMS Outbound': 'SMS Outbound',
'SMS Settings': 'SMS Settings',
'SMS settings updated': 'SMS settings updated',
'SMTP to SMS settings updated': 'SMTP to SMS settings updated',
'Safe environment for vulnerable groups': 'Safe environment for vulnerable groups',
'Safety Assessment Form': 'Safety Assessment Form',
'Safety of children and women affected by disaster?': 'Safety of children and women affected by disaster?',
'Sahana Eden': 'Sahana Eden',
'Sahana Eden Humanitarian Management Platform': 'Sahana Eden Humanitarian Management Platform',
'Sahana Eden portable application generator': 'Sahana Eden portable application generator',
'Salted Fish': 'Salted Fish',
'Sanitation problems': 'Sanitation problems',
'Satellite': 'Satellite',
'Satellite Layer': 'Satellite Layer',
'Saturday': 'Saturday',
'Save': 'Save',
'Save Search': 'Save Search',
'Save: Default Lat, Lon & Zoom for the Viewport': 'Save: Default Lat, Lon & Zoom for the Viewport',
'Saved Search Details': 'Saved Search Details',
'Saved Search added': 'Saved Search added',
'Saved Search deleted': 'Saved Search deleted',
'Saved Search updated': 'Saved Search updated',
'Saved Searches': 'Saved Searches',
'Saved.': 'Saved.',
'Saving...': 'Saving...',
'Scale of Results': 'Scale of Results',
'Scanned Copy': 'Scanned Copy',
'Scanned Forms Upload': 'Scanned Forms Upload',
'Scenario': 'Scenario',
'Scenario Details': 'Scenario Details',
'Scenario added': 'Scenario added',
'Scenario deleted': 'Scenario deleted',
'Scenario updated': 'Scenario updated',
'Scenarios': 'Scenarios',
'Schedule': 'Schedule',
'Schedule synchronization jobs': 'Schedule synchronisation jobs',
'Schema': 'Schema',
'School': 'School',
'School Closure': 'School Closure',
'School Lockdown': 'School Lockdown',
'School Teacher': 'School Teacher',
'School activities': 'School activities',
'School assistance': 'School assistance',
'School attendance': 'School attendance',
'School destroyed': 'School destroyed',
'School heavily damaged': 'School heavily damaged',
'School tents received': 'School tents received',
'School tents, source': 'School tents, source',
'School used for other purpose': 'School used for other purpose',
'School/studying': 'School/studying',
'Search': 'Search',
'Search Activities': 'Search Activities',
'Search Activity Report': 'Search Activity Report',
'Search Addresses': 'Search Addresses',
'Search Alternative Items': 'Search Alternative Items',
'Search Assessment Summaries': 'Search Assessment Summaries',
'Search Assessments': 'Search Assessments',
'Search Asset Log': 'Search Asset Log',
'Search Assets': 'Search Assets',
'Search Baseline Type': 'Search Baseline Type',
'Search Baselines': 'Search Baselines',
'Search Brands': 'Search Brands',
'Search Budgets': 'Search Budgets',
'Search Bundles': 'Search Bundles',
'Search Camp Services': 'Search Camp Services',
'Search Camp Types': 'Search Camp Types',
'Search Camps': 'Search Camps',
'Search Catalog Items': 'Search Catalogue Items',
'Search Catalogs': 'Search Catalogues',
'Search Certificates': 'Search Certificates',
'Search Certifications': 'Search Certifications',
'Search Checklists': 'Search Checklists',
'Search Cluster Subsectors': 'Search Cluster Subsectors',
'Search Clusters': 'Search Clusters',
'Search Commitment Items': 'Search Commitment Items',
'Search Commitments': 'Search Commitments',
'Search Committed People': 'Search Committed People',
'Search Competency Ratings': 'Search Competency Ratings',
'Search Contact Information': 'Search Contact Information',
'Search Contacts': 'Search Contacts',
'Search Course Certificates': 'Search Course Certificates',
'Search Courses': 'Search Courses',
'Search Credentials': 'Search Credentials',
'Search Criteria': 'Search Criteria',
'Search Documents': 'Search Documents',
'Search Donors': 'Search Donors',
'Search Entries': 'Search Entries',
'Search Events': 'Search Events',
'Search Facilities': 'Search Facilities',
'Search Feature Class': 'Search Feature Class',
'Search Feature Layers': 'Search Feature Layers',
'Search Flood Reports': 'Search Flood Reports',
'Search GPS data': 'Search GPS data',
'Search Geonames': 'Search Geonames',
'Search Groups': 'Search Groups',
'Search | |
curve_address_provider_abi = [
{
"name": "NewAddressIdentifier",
"inputs": [
{"type": "uint256", "name": "id", "indexed": True},
{"type": "address", "name": "addr", "indexed": False},
{"type": "string", "name": "description", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "AddressModified",
"inputs": [
{"type": "uint256", "name": "id", "indexed": True},
{"type": "address", "name": "new_address", "indexed": False},
{"type": "uint256", "name": "version", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "CommitNewAdmin",
"inputs": [
{"type": "uint256", "name": "deadline", "indexed": True},
{"type": "address", "name": "admin", "indexed": True},
],
"anonymous": False,
"type": "event",
},
{
"name": "NewAdmin",
"inputs": [{"type": "address", "name": "admin", "indexed": True}],
"anonymous": False,
"type": "event",
},
{
"outputs": [],
"inputs": [{"type": "address", "name": "_admin"}],
"stateMutability": "nonpayable",
"type": "constructor",
},
{
"name": "get_registry",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 1061,
},
{
"name": "max_id",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 1258,
},
{
"name": "get_address",
"outputs": [{"type": "address", "name": ""}],
"inputs": [{"type": "uint256", "name": "_id"}],
"stateMutability": "view",
"type": "function",
"gas": 1308,
},
{
"name": "add_new_id",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [
{"type": "address", "name": "_address"},
{"type": "string", "name": "_description"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 291275,
},
{
"name": "set_address",
"outputs": [{"type": "bool", "name": ""}],
"inputs": [
{"type": "uint256", "name": "_id"},
{"type": "address", "name": "_address"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 182430,
},
{
"name": "unset_address",
"outputs": [{"type": "bool", "name": ""}],
"inputs": [{"type": "uint256", "name": "_id"}],
"stateMutability": "nonpayable",
"type": "function",
"gas": 101348,
},
{
"name": "commit_transfer_ownership",
"outputs": [{"type": "bool", "name": ""}],
"inputs": [{"type": "address", "name": "_new_admin"}],
"stateMutability": "nonpayable",
"type": "function",
"gas": 74048,
},
{
"name": "apply_transfer_ownership",
"outputs": [{"type": "bool", "name": ""}],
"inputs": [],
"stateMutability": "nonpayable",
"type": "function",
"gas": 60125,
},
{
"name": "revert_transfer_ownership",
"outputs": [{"type": "bool", "name": ""}],
"inputs": [],
"stateMutability": "nonpayable",
"type": "function",
"gas": 21400,
},
{
"name": "admin",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 1331,
},
{
"name": "transfer_ownership_deadline",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 1361,
},
{
"name": "future_admin",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 1391,
},
{
"name": "get_id_info",
"outputs": [
{"type": "address", "name": "addr"},
{"type": "bool", "name": "is_active"},
{"type": "uint256", "name": "version"},
{"type": "uint256", "name": "last_modified"},
{"type": "string", "name": "description"},
],
"inputs": [{"type": "uint256", "name": "arg0"}],
"stateMutability": "view",
"type": "function",
"gas": 12168,
},
]
curve_registry_abi = [
{
"name": "PoolAdded",
"inputs": [
{"type": "address", "name": "pool", "indexed": True},
{"type": "bytes", "name": "rate_method_id", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "PoolRemoved",
"inputs": [{"type": "address", "name": "pool", "indexed": True}],
"anonymous": False,
"type": "event",
},
{
"outputs": [],
"inputs": [
{"type": "address", "name": "_address_provider"},
{"type": "address", "name": "_gauge_controller"},
],
"stateMutability": "nonpayable",
"type": "constructor",
},
{
"name": "find_pool_for_coins",
"outputs": [{"type": "address", "name": ""}],
"inputs": [
{"type": "address", "name": "_from"},
{"type": "address", "name": "_to"},
],
"stateMutability": "view",
"type": "function",
},
{
"name": "find_pool_for_coins",
"outputs": [{"type": "address", "name": ""}],
"inputs": [
{"type": "address", "name": "_from"},
{"type": "address", "name": "_to"},
{"type": "uint256", "name": "i"},
],
"stateMutability": "view",
"type": "function",
},
{
"name": "get_n_coins",
"outputs": [{"type": "uint256[2]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 1704,
},
{
"name": "get_coins",
"outputs": [{"type": "address[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 12285,
},
{
"name": "get_underlying_coins",
"outputs": [{"type": "address[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 12347,
},
{
"name": "get_decimals",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 8199,
},
{
"name": "get_underlying_decimals",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 8261,
},
{
"name": "get_rates",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 34780,
},
{
"name": "get_gauges",
"outputs": [
{"type": "address[10]", "name": ""},
{"type": "int128[10]", "name": ""},
],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 20310,
},
{
"name": "get_balances",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 16818,
},
{
"name": "get_underlying_balances",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 158953,
},
{
"name": "get_virtual_price_from_lp_token",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [{"type": "address", "name": "_token"}],
"stateMutability": "view",
"type": "function",
"gas": 2080,
},
{
"name": "get_A",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 1198,
},
{
"name": "get_parameters",
"outputs": [
{"type": "uint256", "name": "A"},
{"type": "uint256", "name": "future_A"},
{"type": "uint256", "name": "fee"},
{"type": "uint256", "name": "admin_fee"},
{"type": "uint256", "name": "future_fee"},
{"type": "uint256", "name": "future_admin_fee"},
{"type": "address", "name": "future_owner"},
{"type": "uint256", "name": "initial_A"},
{"type": "uint256", "name": "initial_A_time"},
{"type": "uint256", "name": "future_A_time"},
],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 6458,
},
{
"name": "get_fees",
"outputs": [{"type": "uint256[2]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 1603,
},
{
"name": "get_admin_balances",
"outputs": [{"type": "uint256[8]", "name": ""}],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "view",
"type": "function",
"gas": 36719,
},
{
"name": "get_coin_indices",
"outputs": [
{"type": "int128", "name": ""},
{"type": "int128", "name": ""},
{"type": "bool", "name": ""},
],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "address", "name": "_from"},
{"type": "address", "name": "_to"},
],
"stateMutability": "view",
"type": "function",
"gas": 27456,
},
{
"name": "estimate_gas_used",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "address", "name": "_from"},
{"type": "address", "name": "_to"},
],
"stateMutability": "view",
"type": "function",
"gas": 32329,
},
{
"name": "add_pool",
"outputs": [],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "uint256", "name": "_n_coins"},
{"type": "address", "name": "_lp_token"},
{"type": "bytes32", "name": "_rate_method_id"},
{"type": "uint256", "name": "_decimals"},
{"type": "uint256", "name": "_underlying_decimals"},
{"type": "bool", "name": "_has_initial_A"},
{"type": "bool", "name": "_is_v1"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 10196577,
},
{
"name": "add_pool_without_underlying",
"outputs": [],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "uint256", "name": "_n_coins"},
{"type": "address", "name": "_lp_token"},
{"type": "bytes32", "name": "_rate_method_id"},
{"type": "uint256", "name": "_decimals"},
{"type": "uint256", "name": "_use_rates"},
{"type": "bool", "name": "_has_initial_A"},
{"type": "bool", "name": "_is_v1"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 5590664,
},
{
"name": "add_metapool",
"outputs": [],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "uint256", "name": "_n_coins"},
{"type": "address", "name": "_lp_token"},
{"type": "uint256", "name": "_decimals"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 10226976,
},
{
"name": "remove_pool",
"outputs": [],
"inputs": [{"type": "address", "name": "_pool"}],
"stateMutability": "nonpayable",
"type": "function",
"gas": 779646579509,
},
{
"name": "set_pool_gas_estimates",
"outputs": [],
"inputs": [
{"type": "address[5]", "name": "_addr"},
{"type": "uint256[2][5]", "name": "_amount"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 355578,
},
{
"name": "set_coin_gas_estimates",
"outputs": [],
"inputs": [
{"type": "address[10]", "name": "_addr"},
{"type": "uint256[10]", "name": "_amount"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 357165,
},
{
"name": "set_gas_estimate_contract",
"outputs": [],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "address", "name": "_estimator"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 37747,
},
{
"name": "set_liquidity_gauges",
"outputs": [],
"inputs": [
{"type": "address", "name": "_pool"},
{"type": "address[10]", "name": "_liquidity_gauges"},
],
"stateMutability": "nonpayable",
"type": "function",
"gas": 365793,
},
{
"name": "address_provider",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 2111,
},
{
"name": "gauge_controller",
"outputs": [{"type": "address", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 2141,
},
{
"name": "pool_list",
"outputs": [{"type": "address", "name": ""}],
"inputs": [{"type": "uint256", "name": "arg0"}],
"stateMutability": "view",
"type": "function",
"gas": 2280,
},
{
"name": "pool_count",
"outputs": [{"type": "uint256", "name": ""}],
"inputs": [],
"stateMutability": "view",
"type": "function",
"gas": 2201,
},
{
"name": "get_pool_from_lp_token",
"outputs": [{"type": "address", "name": ""}],
"inputs": [{"type": "address", "name": "arg0"}],
"stateMutability": "view",
"type": "function",
"gas": 2446,
},
{
"name": "get_lp_token",
"outputs": [{"type": "address", "name": ""}],
"inputs": [{"type": "address", "name": "arg0"}],
"stateMutability": "view",
"type": "function",
"gas": 2476,
},
]
curve_pool_abi = [
{
"name": "TokenExchange",
"inputs": [
{"type": "address", "name": "buyer", "indexed": True},
{"type": "int128", "name": "sold_id", "indexed": False},
{"type": "uint256", "name": "tokens_sold", "indexed": False},
{"type": "int128", "name": "bought_id", "indexed": False},
{"type": "uint256", "name": "tokens_bought", "indexed": False},
],
"anonymous": False,
"type": "event",
},
{
"name": "TokenExchangeUnderlying",
"inputs": [
{"type": | |
<reponame>konradko/directory-api<filename>company/tests/test_views.py
import datetime
import http
from io import BytesIO
import uuid
from unittest import mock
from directory_constants import company_types, choices, sectors, user_roles
from elasticsearch_dsl import Index
from elasticsearch_dsl.connections import connections
import pytest
from freezegun import freeze_time
from rest_framework.test import APIClient
from rest_framework import status
from PIL import Image
from django.core.urlresolvers import reverse
from django.core.files.uploadedfile import SimpleUploadedFile
from company import helpers, models, serializers
from company.tests import (
MockInvalidSerializer,
MockValidSerializer,
VALID_REQUEST_DATA,
)
from company.tests import factories
from supplier.tests.factories import SupplierFactory
from supplier.models import Supplier
default_public_profile_data = {
'name': 'private company',
'website': 'http://example.com',
'description': 'Company description',
'has_exported_before': True,
'date_of_creation': '2010-10-10',
'email_address': '<EMAIL>',
'verified_with_code': True,
}
default_ordering_values = {
'keywords': '',
'sectors': [],
'expertise_industries': [],
'expertise_regions': [],
'expertise_languages': [],
'expertise_countries': [],
'expertise_products_services': {},
'is_published_investment_support_directory': True,
'is_published_find_a_supplier': True,
}
@pytest.mark.django_db
def test_company_retrieve_no_company(authed_client, authed_supplier):
authed_supplier.company = None
authed_supplier.save()
response = authed_client.get(reverse('company'))
assert response.status_code == status.HTTP_404_NOT_FOUND
@freeze_time('2016-11-23T11:21:10.977518Z')
@pytest.mark.django_db
def test_company_retrieve_view(authed_client, authed_supplier):
company = factories.CompanyFactory(
name='<NAME>', date_of_creation=datetime.date(2000, 10, 10)
)
authed_supplier.company = company
authed_supplier.save()
response = authed_client.get(reverse('company'))
expected = {
'address_line_1': company.address_line_1,
'address_line_2': company.address_line_2,
'company_type': company_types.COMPANIES_HOUSE,
'country': company.country,
'date_of_creation': '2000-10-10',
'description': company.description,
'email_address': company.email_address,
'email_full_name': company.email_full_name,
'employees': company.employees,
'expertise_countries': company.expertise_countries,
'expertise_industries': company.expertise_industries,
'expertise_languages': company.expertise_languages,
'expertise_products_services': company.expertise_products_services,
'expertise_regions': company.expertise_regions,
'export_destinations': [],
'export_destinations_other': '',
'facebook_url': company.facebook_url,
'has_exported_before': company.has_exported_before,
'has_valid_address': True,
'id': str(company.id),
'is_exporting_goods': False,
'is_exporting_services': False,
'is_identity_check_message_sent': False,
'is_publishable': company.is_publishable,
'is_published': False,
'is_published_find_a_supplier': False,
'is_published_investment_support_directory': False,
'is_registration_letter_sent': False,
'is_uk_isd_company': False,
'is_verification_letter_sent': False,
'is_verified': False,
'keywords': company.keywords,
'linkedin_url': company.linkedin_url,
'locality': company.locality,
'logo': None,
'mobile_number': company.mobile_number,
'modified': '2016-11-23T11:21:10.977518Z',
'name': '<NAME>',
'number': company.number,
'po_box': company.po_box,
'postal_code': company.postal_code,
'postal_full_name': company.postal_full_name,
'sectors': company.sectors,
'slug': 'test-company',
'summary': company.summary,
'supplier_case_studies': [],
'twitter_url': company.twitter_url,
'verified_with_code': False,
'verified_with_companies_house_oauth2': False,
'verified_with_identity_check': False,
'verified_with_preverified_enrolment': False,
'website': company.website,
}
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected
@freeze_time('2016-11-23T11:21:10.977518Z')
@pytest.mark.django_db
def test_company_update_with_put(authed_client, authed_supplier):
company = factories.CompanyFactory(
number='01234567',
has_exported_before=True,
)
authed_supplier.company = company
authed_supplier.save()
response = authed_client.put(
reverse('company'), VALID_REQUEST_DATA, format='json'
)
expected = {
'company_type': company_types.COMPANIES_HOUSE,
'email_address': company.email_address,
'email_full_name': company.email_full_name,
'employees': company.employees,
'expertise_countries': company.expertise_countries,
'expertise_industries': company.expertise_industries,
'expertise_languages': company.expertise_languages,
'expertise_products_services': company.expertise_products_services,
'expertise_regions': company.expertise_regions,
'export_destinations': ['DE'],
'export_destinations_other': 'LY',
'facebook_url': company.facebook_url,
'has_valid_address': True,
'id': str(company.id),
'is_exporting_goods': False,
'is_exporting_services': False,
'is_identity_check_message_sent': False,
'is_publishable': company.is_publishable,
'is_published': False,
'is_published_find_a_supplier': False,
'is_published_investment_support_directory': False,
'is_registration_letter_sent': False,
'is_uk_isd_company': False,
'is_verification_letter_sent': False,
'is_verified': False,
'keywords': company.keywords,
'linkedin_url': company.linkedin_url,
'logo': None,
'modified': '2016-11-23T11:21:10.977518Z',
'po_box': company.po_box,
'sectors': company.sectors,
'slug': 'test-company',
'summary': company.summary,
'supplier_case_studies': [],
'twitter_url': company.twitter_url,
'verified_with_code': False,
'verified_with_companies_house_oauth2': False,
'verified_with_identity_check': False,
'verified_with_preverified_enrolment': False,
}
expected.update(VALID_REQUEST_DATA)
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected
@freeze_time('2016-11-23T11:21:10.977518Z')
@pytest.mark.django_db
def test_company_update_with_mock_patch(authed_client, authed_supplier):
company = factories.CompanyFactory(
number='01234567',
)
authed_supplier.company = company
authed_supplier.save()
response = authed_client.patch(
reverse('company'), VALID_REQUEST_DATA, format='json'
)
expected = {
'company_type': company_types.COMPANIES_HOUSE,
'email_address': company.email_address,
'email_full_name': company.email_full_name,
'employees': company.employees,
'expertise_countries': company.expertise_countries,
'expertise_industries': company.expertise_industries,
'expertise_languages': company.expertise_languages,
'expertise_products_services': company.expertise_products_services,
'expertise_regions': company.expertise_regions,
'export_destinations': ['DE'],
'export_destinations_other': 'LY',
'facebook_url': company.facebook_url,
'has_valid_address': True,
'id': str(company.id),
'is_exporting_goods': False,
'is_exporting_services': False,
'is_identity_check_message_sent': False,
'is_publishable': company.is_publishable,
'is_published': False,
'is_published_find_a_supplier': False,
'is_published_investment_support_directory': False,
'is_registration_letter_sent': False,
'is_uk_isd_company': False,
'is_verification_letter_sent': False,
'is_verified': False,
'keywords': company.keywords,
'linkedin_url': company.linkedin_url,
'logo': None,
'modified': '2016-11-23T11:21:10.977518Z',
'po_box': company.po_box,
'sectors': company.sectors,
'slug': 'test-company',
'summary': company.summary,
'supplier_case_studies': [],
'twitter_url': company.twitter_url,
'verified_with_code': False,
'verified_with_companies_house_oauth2': False,
'verified_with_identity_check': False,
'verified_with_preverified_enrolment': False,
}
expected.update(VALID_REQUEST_DATA)
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected
@freeze_time('2016-11-23T11:21:10.977518Z')
@pytest.mark.django_db
def test_company_not_update_modified(authed_client, authed_supplier):
company = factories.CompanyFactory(
number='01234567',
has_exported_before=True,
)
authed_supplier.company = company
authed_supplier.save()
data = {
**VALID_REQUEST_DATA,
'modified': '2013-03-09T23:28:53.977518Z'
}
for method in [authed_client.put, authed_client.patch]:
response = method(reverse('company'), data, format='json')
assert response.status_code == status.HTTP_200_OK
# modified was not effected by the data we tried to pass
assert response.json()['modified'] == '2016-11-23T11:21:10.977518Z'
@pytest.mark.django_db
@mock.patch('company.views.CompanyNumberValidatorAPIView.get_serializer')
def test_company_number_validator_rejects_invalid_data(
mock_get_serializer, client
):
serializer = MockInvalidSerializer(data={})
mock_get_serializer.return_value = serializer
response = client.get(reverse('validate-company-number'), {})
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json() == serializer.errors
@pytest.mark.django_db
@mock.patch('company.views.CompanyNumberValidatorAPIView.get_serializer')
def test_company_number_validator_accepts_valid_data(
mock_get_serializer, client
):
mock_get_serializer.return_value = MockValidSerializer(data={})
response = client.get(reverse('validate-company-number'), {})
assert response.status_code == status.HTTP_200_OK
def mock_save(self, name, content, max_length=None):
return mock.Mock(url=content.name)
def get_test_image(extension='png'):
bytes_io = BytesIO()
Image.new('RGB', (300, 50)).save(bytes_io, extension)
bytes_io.seek(0)
return SimpleUploadedFile(f'test.{extension}', bytes_io.read())
@pytest.fixture(scope='session')
def image_one(tmpdir_factory):
return get_test_image()
@pytest.fixture(scope='session')
def image_two(tmpdir_factory):
return get_test_image()
@pytest.fixture(scope='session')
def image_three(tmpdir_factory):
return get_test_image()
@pytest.fixture(scope='session')
def video(tmpdir_factory):
return BytesIO(b'some text')
@pytest.fixture
def case_study_data(image_one, image_two, image_three, video, company):
return {
'company': company.pk,
'title': 'a title',
'description': 'a description',
'sector': choices.INDUSTRIES[1][0],
'website': 'http://www.example.com',
'keywords': 'good, great',
'image_one': image_one,
'image_two': image_two,
'image_three': image_three,
'video_one': video,
'testimonial': 'very nice',
'testimonial_name': '<NAME>',
'testimonial_job_title': 'Evil overlord',
'testimonial_company': 'Death Eaters',
}
@pytest.fixture
def company_data():
return {
'number': '01234567',
'name': 'Test Company',
'website': 'http://example.com',
'description': 'Company description',
'has_exported_before': True,
'date_of_creation': '2010-10-10',
}
@pytest.fixture
def api_client():
return APIClient()
@pytest.fixture
def company():
return models.Company.objects.create(
verified_with_code=True,
email_address='<EMAIL>',
**VALID_REQUEST_DATA
)
@pytest.fixture
def private_profile():
company = models.Company(**default_public_profile_data)
company.number = '0123456A'
company.verified_with_code = False
company.save()
return company
@pytest.fixture
def public_profile():
company = models.Company(**default_public_profile_data)
company.number = '0123456B'
company.is_published_find_a_supplier = True
company.save()
return company
@pytest.fixture
def public_profile_with_case_study():
company = factories.CompanyFactory(
is_published_find_a_supplier=True
)
factories.CompanyCaseStudyFactory(company=company)
return company
@pytest.fixture
def public_profile_with_case_studies():
company = factories.CompanyFactory(
is_published_find_a_supplier=True
)
factories.CompanyCaseStudyFactory(company=company)
factories.CompanyCaseStudyFactory(company=company)
return company
@pytest.fixture
def public_profile_software():
company = models.Company(**default_public_profile_data)
company.number = '0123456C'
company.is_published_find_a_supplier = True
company.sectors = ['SOFTWARE_AND_COMPUTER_SERVICES']
company.save()
return company
@pytest.fixture
def public_profile_cars():
company = models.Company(**default_public_profile_data)
company.number = '0123456D'
company.is_published_find_a_supplier = True
company.sectors = ['AUTOMOTIVE']
company.save()
return company
@pytest.fixture
def public_profile_smart_cars():
company = models.Company(**default_public_profile_data)
company.number = '0123456E'
company.is_published_find_a_supplier = True
company.sectors = ['SOFTWARE_AND_COMPUTER_SERVICES', 'AUTOMOTIVE']
company.save()
return company
@pytest.fixture
def supplier_case_study(case_study_data, company):
return models.CompanyCaseStudy.objects.create(
title=case_study_data['title'],
description=case_study_data['description'],
sector=case_study_data['sector'],
website=case_study_data['website'],
keywords=case_study_data['keywords'],
testimonial=case_study_data['testimonial'],
testimonial_name=case_study_data['testimonial_name'],
testimonial_job_title=case_study_data['testimonial_job_title'],
testimonial_company=case_study_data['testimonial_company'],
company=company,
)
@pytest.fixture
def supplier(company):
return Supplier.objects.create(
sso_id=2,
company_email='<EMAIL>',
company=company,
)
@pytest.fixture
def search_data(settings):
wolf_company = factories.CompanyFactory(
name='Wolf limited',
description='Providing the stealth and prowess of wolves.',
summary='Hunts in packs common',
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
keywords='Packs, Hunting, Stark, Teeth',
expertise_industries=[sectors.AEROSPACE, sectors.AIRPORTS],
expertise_regions=[
choices.EXPERTISE_REGION_CHOICES[4][0],
choices.EXPERTISE_REGION_CHOICES[5][0]
],
expertise_languages=[
choices.EXPERTISE_LANGUAGES[0][0],
choices.EXPERTISE_LANGUAGES[2][0]
],
expertise_countries=[
choices.COUNTRY_CHOICES[23][0],
choices.COUNTRY_CHOICES[24][0]
],
expertise_products_services={'other': ['Regulatory', 'Finance', 'IT']},
id=1,
)
aardvark_company = factories.CompanyFactory(
name='<NAME>',
description='Providing the power and beauty of Aardvarks.',
summary='Like an Aardvark common',
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
keywords='Ants, Tongue, Anteater',
expertise_industries=[sectors.AEROSPACE],
expertise_regions=[choices.EXPERTISE_REGION_CHOICES[4][0]],
expertise_languages=[choices.EXPERTISE_LANGUAGES[0][0]],
expertise_countries=[choices.COUNTRY_CHOICES[23][0]],
expertise_products_services={'other': ['Regulatory', 'Finance', 'IT']},
id=2,
)
factories.CompanyFactory(
name='Grapeshot limited',
description='Providing the destructiveness of grapeshot.',
summary='Like naval warfare common',
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
keywords='Pirates, Ocean, Ship',
expertise_industries=[sectors.AIRPORTS, sectors.FOOD_AND_DRINK],
expertise_regions=[choices.EXPERTISE_REGION_CHOICES[5][0],
choices.EXPERTISE_REGION_CHOICES[8][0]
],
expertise_languages=[choices.EXPERTISE_LANGUAGES[2][0],
choices.EXPERTISE_LANGUAGES[6][0]],
expertise_countries=[choices.COUNTRY_CHOICES[24][0],
choices.COUNTRY_CHOICES[27][0]
],
expertise_products_services={'other': ['Regulatory', 'IT']},
id=3,
)
factories.CompanyCaseStudyFactory(
id=1,
company=wolf_company,
title='Thick case study',
description='Gold is delicious.',
)
factories.CompanyCaseStudyFactory(
id=2,
company=aardvark_company,
title='Thick case study',
description='We determined lead sinks in water.',
)
Index(settings.ELASTICSEARCH_COMPANY_INDEX_ALIAS).refresh()
@pytest.fixture
def search_companies_highlighting_data(settings):
factories.CompanyFactory(
name='Wolf limited',
description=(
'Providing the stealth and prowess of wolves. This is a very long '
'thing about wolf stuff. Lets see what happens in the test when '
'ES encounters a long description. Perhaps it will concatenate. '
) + ('It is known. ' * 30) + (
'The wolf cries at night.'
),
summary='Hunts in packs',
is_published_find_a_supplier=True,
keywords='Packs, Hunting, Stark, Teeth',
sectors=[sectors.AEROSPACE, sectors.AIRPORTS],
id=1,
)
factories.CompanyFactory(
name='Aardvark limited',
description='Providing the power and beauty of Aardvarks.',
summary='Like an Aardvark',
is_published_find_a_supplier=True,
keywords='Ants, Tongue, Anteater',
sectors=[sectors.AEROSPACE],
id=2,
)
Index(settings.ELASTICSEARCH_COMPANY_INDEX_ALIAS).refresh()
@pytest.fixture
def search_highlighting_data(settings):
factories.CompanyFactory(
name='Wolf limited',
description=(
'Providing the stealth and prowess of wolves. This is a very long '
'thing about wolf stuff. Lets see what happens in the test when '
'ES encounters a long description. Perhaps it will concatenate. '
) + ('It is known. ' * 30) + (
'The wolf cries at night.'
),
summary='Hunts in packs',
is_published_find_a_supplier=True,
is_published_investment_support_directory=True,
keywords='Packs, Hunting, Stark, Teeth',
id=1,
)
factories.CompanyFactory(
name='Aardvark limited',
description='Providing the power and beauty of Aardvarks.',
summary='Like an Aardvark',
is_published_find_a_supplier=True,
is_published_investment_support_directory=True,
keywords='Ants, Tongue, Anteater',
id=2,
)
Index(settings.ELASTICSEARCH_COMPANY_INDEX_ALIAS).refresh()
@pytest.fixture
def search_data_and_or(settings):
factories.CompanyFactory(
name='Wolf limited',
expertise_industries=[sectors.AEROSPACE],
expertise_regions=['NORTH_EAST'],
expertise_countries=['AF'],
expertise_languages=['ab'],
expertise_products_services={
'financial': ['Accounting and tax']
},
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
id=1,
)
factories.CompanyFactory(
name='Wolf corp',
expertise_industries=[sectors.AIRPORTS],
expertise_regions=['NORTH_WEST'],
expertise_languages=['aa'],
expertise_countries=['AL'],
expertise_products_services={
'management-consulting': ['Business development']
},
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
id=2,
)
factories.CompanyFactory(
name='Wolf are us',
expertise_industries=[sectors.AEROSPACE],
expertise_regions=['NORTH_WEST'],
expertise_languages=['aa'],
expertise_countries=['AL'],
expertise_products_services={},
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
id=3
)
factories.CompanyFactory(
name='Ultra Wolf',
expertise_industries=[sectors.AEROSPACE],
expertise_regions=['NORTH_WEST'],
expertise_languages=['aa'],
expertise_countries=[],
expertise_products_services={
'management-consulting': ['Business development']
},
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
id=4,
)
factories.CompanyFactory(
name='Wolf nation',
expertise_industries=[sectors.AEROSPACE],
expertise_regions=['NORTH_WEST'],
expertise_languages=[],
expertise_countries=['AL'],
expertise_products_services={
'management-consulting': ['Business development']
},
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
id=5,
)
factories.CompanyFactory(
name='company of the Wolf',
expertise_industries=[sectors.AEROSPACE],
expertise_regions=[],
expertise_languages=['aa'],
expertise_countries=['AL'],
expertise_products_services={
'management-consulting': ['Business development']
},
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
id=6,
)
factories.CompanyFactory(
name='year of the wolf',
expertise_industries=[],
expertise_regions=['NORTH_WEST'],
expertise_languages=['aa'],
expertise_countries=['AL'],
expertise_products_services={
'management-consulting': ['Business development']
},
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
id=7,
)
factories.CompanyFactory(
name='Fish corp', # missing 'wolf match'
expertise_industries=[sectors.AEROSPACE],
expertise_regions=['NORTH_WEST'],
expertise_languages=['aa'],
expertise_countries=['AL'],
expertise_products_services={
'management-consulting': ['Business development']
},
is_published_investment_support_directory=True,
is_published_find_a_supplier=True,
id=8,
)
factories.CompanyFactory(
name='Wolf wild corp',
expertise_industries=[sectors.AEROSPACE],
expertise_regions=['NORTH_WEST'],
expertise_languages=['aa'],
expertise_countries=['AL'],
expertise_products_services={
'management-consulting': ['Business development']
},
is_published_investment_support_directory=False, # not published
is_published_find_a_supplier=False, # not published
id=9,
)
Index(settings.ELASTICSEARCH_COMPANY_INDEX_ALIAS).refresh()
@pytest.fixture
def search_companies_ordering_data(settings):
factories.CompanyFactory(
name='Wolf limited',
description='',
summary='Hunts in packs',
is_published_find_a_supplier=True,
keywords='Packs, Hunting, Stark, Wolf',
sectors=[sectors.AEROSPACE, sectors.AIRPORTS],
id=1,
)
wolf_three = factories.CompanyFactory(
name='Wolf from Gladiators limited',
description='',
summary='Hunters',
is_published_find_a_supplier=True,
keywords='Packs, Hunting, Stark, Teeth',
sectors=[sectors.FOOD_AND_DRINK, sectors.AIRPORTS],
id=2,
)
wolf_one_company = factories.CompanyFactory(
name='Wolf a kimbo Limited',
description='pack hunters',
summary='Hunts in packs',
is_published_find_a_supplier=True,
| |
at this stage?
vm.events_enabled = False
self._test_generic_bool_property(vm, 'autostart', False)
@vanir.tests.skipUnlessDom0
def test_281_autostart_systemd(self):
vm = self.get_vm()
self.assertFalse(os.path.exists(
'/etc/systemd/system/multi-user.target.wants/'
'vanir-vm@{}.service'.format(vm.name)),
"systemd service enabled before setting autostart")
vm.autostart = True
self.assertTrue(os.path.exists(
'/etc/systemd/system/multi-user.target.wants/'
'vanir-vm@{}.service'.format(vm.name)),
"systemd service not enabled by autostart=True")
vm.autostart = False
self.assertFalse(os.path.exists(
'/etc/systemd/system/multi-user.target.wants/'
'vanir-vm@{}.service'.format(vm.name)),
"systemd service not disabled by autostart=False")
vm.autostart = True
del vm.autostart
self.assertFalse(os.path.exists(
'/etc/systemd/system/multi-user.target.wants/'
'vanir-vm@{}.service'.format(vm.name)),
"systemd service not disabled by resetting autostart")
def test_290_management_dispvm(self):
vm = self.get_vm()
vm2 = self.get_vm('test2', qid=2)
self.app.management_dispvm = None
self.assertPropertyDefaultValue(vm, 'management_dispvm', None)
self.app.management_dispvm = vm
try:
self.assertPropertyDefaultValue(vm, 'management_dispvm', vm)
self.assertPropertyValue(vm, 'management_dispvm',
'test-inst-test2', vm2)
finally:
self.app.management_dispvm = None
def test_291_management_dispvm_template_based(self):
tpl = self.get_vm(name='tpl', cls=vanir.vm.templatevm.TemplateVM)
vm = self.get_vm(cls=vanir.vm.appvm.AppVM, template=tpl, qid=2)
vm2 = self.get_vm('test2', qid=3)
del vm.volumes
self.app.management_dispvm = None
try:
self.assertPropertyDefaultValue(vm, 'management_dispvm', None)
self.app.management_dispvm = vm
self.assertPropertyDefaultValue(vm, 'management_dispvm', vm)
tpl.management_dispvm = vm2
self.assertPropertyDefaultValue(vm, 'management_dispvm', vm2)
self.assertPropertyValue(vm, 'management_dispvm',
'test-inst-test2', vm2)
finally:
self.app.management_dispvm = None
@unittest.skip('TODO')
def test_320_seamless_gui_mode(self):
vm = self.get_vm()
self._test_generic_bool_property(vm, 'seamless_gui_mode')
# TODO: reject setting to True when guiagent_installed is false
def test_330_mac(self):
vm = self.get_vm()
# TODO: calculate proper default here
default_mac = vm.mac
self.assertIsNotNone(default_mac)
self.assertPropertyDefaultValue(vm, 'mac', default_mac)
self.assertPropertyValue(vm, 'mac', '00:11:22:33:44:55',
'00:11:22:33:44:55', '00:11:22:33:44:55')
del vm.mac
self.assertPropertyDefaultValue(vm, 'mac', default_mac)
def test_331_mac_invalid(self):
vm = self.get_vm()
self.assertPropertyInvalidValue(vm, 'mac', 123)
self.assertPropertyInvalidValue(vm, 'mac', 'invalid')
self.assertPropertyInvalidValue(vm, 'mac', '00:11:22:33:44:55:66')
def test_340_default_user(self):
vm = self.get_vm()
self.assertPropertyDefaultValue(vm, 'default_user', 'user')
self.assertPropertyValue(vm, 'default_user', 'someuser', 'someuser',
'someuser')
del vm.default_user
self.assertPropertyDefaultValue(vm, 'default_user', 'user')
self.assertPropertyValue(vm, 'default_user', 123, '123', '123')
vm.default_user = 'user'
# TODO: check propagation for template-based VMs
@unittest.skip('TODO')
def test_350_timezone(self):
vm = self.get_vm()
self.assertPropertyDefaultValue(vm, 'timezone', 'localtime')
self.assertPropertyValue(vm, 'timezone', 0, 0, '0')
del vm.timezone
self.assertPropertyDefaultValue(vm, 'timezone', 'localtime')
self.assertPropertyValue(vm, 'timezone', '0', 0, '0')
self.assertPropertyValue(vm, 'timezone', -3600, -3600, '-3600')
self.assertPropertyValue(vm, 'timezone', 7200, 7200, '7200')
@unittest.skip('TODO')
def test_350_timezone_invalid(self):
vm = self.get_vm()
self.assertPropertyInvalidValue(vm, 'timezone', 'xxx')
@unittest.skip('TODO')
def test_360_drive(self):
vm = self.get_vm()
self.assertPropertyDefaultValue(vm, 'drive', None)
# self.execute_tests('drive', [
# ('hd:dom0:/tmp/drive.img', 'hd:dom0:/tmp/drive.img', True),
# ('hd:/tmp/drive.img', 'hd:dom0:/tmp/drive.img', True),
# ('cdrom:dom0:/tmp/drive.img', 'cdrom:dom0:/tmp/drive.img', True),
# ('cdrom:/tmp/drive.img', 'cdrom:dom0:/tmp/drive.img', True),
# ('/tmp/drive.img', 'cdrom:dom0:/tmp/drive.img', True),
# ('hd:drive.img', '', False),
# ('drive.img', '', False),
# ])
def test_400_backup_timestamp(self):
vm = self.get_vm()
timestamp = datetime.datetime(2016, 1, 1, 12, 14, 2)
timestamp_str = timestamp.strftime('%s')
self.assertPropertyDefaultValue(vm, 'backup_timestamp', None)
self.assertPropertyValue(vm, 'backup_timestamp', int(timestamp_str),
int(timestamp_str), timestamp_str)
del vm.backup_timestamp
self.assertPropertyDefaultValue(vm, 'backup_timestamp', None)
self.assertPropertyValue(vm, 'backup_timestamp', timestamp_str,
int(timestamp_str))
def test_401_backup_timestamp_invalid(self):
vm = self.get_vm()
self.assertPropertyInvalidValue(vm, 'backup_timestamp', 'xxx')
self.assertPropertyInvalidValue(vm, 'backup_timestamp', None)
def test_500_property_migrate_virt_mode(self):
xml_template = '''
<domain class="VanirVM" id="domain-1">
<properties>
<property name="qid">1</property>
<property name="name">testvm</property>
<property name="label" ref="label-1" />
<property name="hvm">{hvm_value}</property>
</properties>
</domain>
'''
xml = lxml.etree.XML(xml_template.format(hvm_value='True'))
vm = vanir.vm.vanirvm.VanirVM(self.app, xml)
self.assertEqual(vm.virt_mode, 'hvm')
with self.assertRaises(AttributeError):
vm.hvm
xml = lxml.etree.XML(xml_template.format(hvm_value='False'))
vm = vanir.vm.vanirvm.VanirVM(self.app, xml)
self.assertEqual(vm.virt_mode, 'pv')
with self.assertRaises(AttributeError):
vm.hvm
def test_600_libvirt_xml_pv(self):
expected = '''<domain type="xen">
<name>test-inst-test</name>
<uuid>7db78950-c467-4863-94d1-af59806384ea</uuid>
<memory unit="MiB">500</memory>
<currentMemory unit="MiB">400</currentMemory>
<vcpu placement="static">2</vcpu>
<os>
<type arch="x86_64" machine="xenpv">linux</type>
<kernel>/tmp/kernel/vmlinuz</kernel>
<initrd>/tmp/kernel/initramfs</initrd>
<cmdline>root=/dev/mapper/dmroot ro nomodeset console=hvc0 rd_NO_PLYMOUTH rd.plymouth.enable=0 plymouth.enable=0 nopat</cmdline>
</os>
<features>
</features>
<clock offset='utc' adjustment='reset'>
<timer name="tsc" mode="native"/>
</clock>
<on_poweroff>destroy</on_poweroff>
<on_reboot>destroy</on_reboot>
<on_crash>destroy</on_crash>
<devices>
<disk type="block" device="disk">
<driver name="phy" />
<source dev="/tmp/kernel/modules.img" />
<target dev="xvdd" />
<backenddomain name="dom0" />
</disk>
<console type="pty">
<target type="xen" port="0"/>
</console>
</devices>
</domain>
'''
my_uuid = '7db78950-c467-4863-94d1-af59806384ea'
vm = self.get_vm(uuid=my_uuid)
vm.netvm = None
vm.virt_mode = 'pv'
with unittest.mock.patch('vanir.config.vanir_base_dir',
'/tmp/vanir-test'):
kernel_dir = '/tmp/vanir-test/vm-kernels/dummy'
os.makedirs(kernel_dir, exist_ok=True)
open(os.path.join(kernel_dir, 'vmlinuz'), 'w').close()
open(os.path.join(kernel_dir, 'initramfs'), 'w').close()
self.addCleanup(shutil.rmtree, '/tmp/vanir-test')
vm.kernel = 'dummy'
# tests for storage are later
vm.volumes['kernel'] = unittest.mock.Mock(**{
'kernels_dir': '/tmp/kernel',
'block_device.return_value.domain': 'dom0',
'block_device.return_value.script': None,
'block_device.return_value.path': '/tmp/kernel/modules.img',
'block_device.return_value.devtype': 'disk',
'block_device.return_value.name': 'kernel',
})
libvirt_xml = vm.create_config_file()
self.assertXMLEqual(lxml.etree.XML(libvirt_xml),
lxml.etree.XML(expected))
def test_600_libvirt_xml_hvm(self):
expected = '''<domain type="xen">
<name>test-inst-test</name>
<uuid>7db78950-c467-4863-94d1-af59806384ea</uuid>
<memory unit="MiB">400</memory>
<currentMemory unit="MiB">400</currentMemory>
<vcpu placement="static">2</vcpu>
<cpu mode='host-passthrough'>
<!-- disable nested HVM -->
<feature name='vmx' policy='disable'/>
<feature name='svm' policy='disable'/>
<!-- disable SMAP inside VM, because of Linux bug -->
<feature name='smap' policy='disable'/>
</cpu>
<os>
<type arch="x86_64" machine="xenfv">hvm</type>
<!--
For the libxl backend libvirt switches between OVMF (UEFI)
and SeaBIOS based on the loader type. This has nothing to
do with the hvmloader binary.
-->
<loader type="rom">hvmloader</loader>
<boot dev="cdrom" />
<boot dev="hd" />
</os>
<features>
<pae/>
<acpi/>
<apic/>
<viridian/>
</features>
<clock offset="variable" adjustment="0" basis="localtime" />
<on_poweroff>destroy</on_poweroff>
<on_reboot>destroy</on_reboot>
<on_crash>destroy</on_crash>
<devices>
<!-- server_ip is the address of stubdomain. It hosts it's own DNS server. -->
<emulator type="stubdom-linux" />
<input type="tablet" bus="usb"/>
<video>
<model type="vga"/>
</video>
<graphics type="vanir"/>
</devices>
</domain>
'''
my_uuid = '7db78950-c467-4863-94d1-af59806384ea'
vm = self.get_vm(uuid=my_uuid)
vm.netvm = None
vm.virt_mode = 'hvm'
libvirt_xml = vm.create_config_file()
self.assertXMLEqual(lxml.etree.XML(libvirt_xml),
lxml.etree.XML(expected))
def test_600_libvirt_xml_hvm_dom0_kernel(self):
expected = '''<domain type="xen">
<name>test-inst-test</name>
<uuid>7db78950-c467-4863-94d1-af59806384ea</uuid>
<memory unit="MiB">500</memory>
<currentMemory unit="MiB">400</currentMemory>
<vcpu placement="static">2</vcpu>
<cpu mode='host-passthrough'>
<!-- disable nested HVM -->
<feature name='vmx' policy='disable'/>
<feature name='svm' policy='disable'/>
<!-- disable SMAP inside VM, because of Linux bug -->
<feature name='smap' policy='disable'/>
</cpu>
<os>
<type arch="x86_64" machine="xenfv">hvm</type>
<!--
For the libxl backend libvirt switches between OVMF (UEFI)
and SeaBIOS based on the loader type. This has nothing to
do with the hvmloader binary.
-->
<loader type="rom">hvmloader</loader>
<boot dev="cdrom" />
<boot dev="hd" />
<cmdline>root=/dev/mapper/dmroot ro nomodeset console=hvc0 rd_NO_PLYMOUTH rd.plymouth.enable=0 plymouth.enable=0 nopat</cmdline>
</os>
<features>
<pae/>
<acpi/>
<apic/>
<viridian/>
</features>
<clock offset="variable" adjustment="0" basis="localtime" />
<on_poweroff>destroy</on_poweroff>
<on_reboot>destroy</on_reboot>
<on_crash>destroy</on_crash>
<devices>
<!-- server_ip is the address of stubdomain. It hosts it's own DNS server. -->
<emulator type="stubdom-linux" />
<input type="tablet" bus="usb"/>
<video>
<model type="vga"/>
</video>
<graphics type="vanir"/>
</devices>
</domain>
'''
my_uuid = '7db78950-c467-4863-94d1-af59806384ea'
vm = self.get_vm(uuid=my_uuid)
vm.netvm = None
vm.virt_mode = 'hvm'
vm.features['qrexec'] = True
with unittest.mock.patch('vanir.config.vanir_base_dir',
'/tmp/vanir-test'):
kernel_dir = '/tmp/vanir-test/vm-kernels/dummy'
os.makedirs(kernel_dir, exist_ok=True)
open(os.path.join(kernel_dir, 'vmlinuz'), 'w').close()
open(os.path.join(kernel_dir, 'initramfs'), 'w').close()
self.addCleanup(shutil.rmtree, '/tmp/vanir-test')
vm.kernel = 'dummy'
libvirt_xml = vm.create_config_file()
self.assertXMLEqual(lxml.etree.XML(libvirt_xml),
lxml.etree.XML(expected))
def test_600_libvirt_xml_hvm_dom0_kernel_kernelopts(self):
expected = '''<domain type="xen">
<name>test-inst-test</name>
<uuid>7db78950-c467-4863-94d1-af59806384ea</uuid>
<memory unit="MiB">500</memory>
<currentMemory unit="MiB">400</currentMemory>
<vcpu placement="static">2</vcpu>
<cpu mode='host-passthrough'>
<!-- disable nested HVM -->
<feature name='vmx' policy='disable'/>
<feature name='svm' policy='disable'/>
<!-- disable SMAP inside VM, because of Linux bug -->
<feature name='smap' policy='disable'/>
</cpu>
<os>
<type arch="x86_64" machine="xenfv">hvm</type>
<!--
For the libxl backend libvirt switches between OVMF (UEFI)
and SeaBIOS based on the loader type. This has nothing to
do with the hvmloader binary.
-->
<loader type="rom">hvmloader</loader>
<boot dev="cdrom" />
<boot dev="hd" />
<cmdline>kernel specific options nopat</cmdline>
</os>
<features>
<pae/>
<acpi/>
<apic/>
<viridian/>
</features>
<clock offset="variable" adjustment="0" basis="localtime" />
<on_poweroff>destroy</on_poweroff>
<on_reboot>destroy</on_reboot>
<on_crash>destroy</on_crash>
<devices>
<!-- server_ip is the address of stubdomain. It hosts it's own DNS server. -->
<emulator type="stubdom-linux" />
<input type="tablet" bus="usb"/>
<video>
<model type="vga"/>
</video>
<graphics type="vanir"/>
</devices>
</domain>
'''
my_uuid = '7db78950-c467-4863-94d1-af59806384ea'
vm = self.get_vm(uuid=my_uuid)
vm.netvm = None
vm.virt_mode = 'hvm'
vm.features['qrexec'] = True
with unittest.mock.patch('vanir.config.vanir_base_dir',
'/tmp/vanir-test'):
kernel_dir = '/tmp/vanir-test/vm-kernels/dummy'
os.makedirs(kernel_dir, exist_ok=True)
open(os.path.join(kernel_dir, 'vmlinuz'), 'w').close()
open(os.path.join(kernel_dir, 'initramfs'), 'w').close()
with open(os.path.join(kernel_dir,
'default-kernelopts-common.txt'), 'w') as f:
f.write('kernel specific options \n')
self.addCleanup(shutil.rmtree, '/tmp/vanir-test')
vm.kernel = 'dummy'
libvirt_xml = vm.create_config_file()
self.assertXMLEqual(lxml.etree.XML(libvirt_xml),
lxml.etree.XML(expected))
def test_600_libvirt_xml_pvh(self):
expected = '''<domain type="xen">
<name>test-inst-test</name>
<uuid>7db78950-c467-4863-94d1-af59806384ea</uuid>
<memory unit="MiB">500</memory>
<currentMemory unit="MiB">400</currentMemory>
<vcpu placement="static">2</vcpu>
<cpu mode='host-passthrough'>
<!-- disable nested HVM -->
<feature name='vmx' policy='disable'/>
<feature name='svm' policy='disable'/>
<!-- disable SMAP inside VM, because of Linux bug -->
<feature name='smap' policy='disable'/>
</cpu>
<os>
<type arch="x86_64" machine="xenpvh">xenpvh</type>
<kernel>/tmp/kernel/vmlinuz</kernel>
<initrd>/tmp/kernel/initramfs</initrd>
<cmdline>root=/dev/mapper/dmroot ro nomodeset console=hvc0 rd_NO_PLYMOUTH rd.plymouth.enable=0 plymouth.enable=0 nopat</cmdline>
</os>
<features>
<pae/>
<acpi/>
<apic/>
<viridian/>
</features>
<clock offset='utc' adjustment='reset'>
<timer name="tsc" mode="native"/>
</clock>
<on_poweroff>destroy</on_poweroff>
<on_reboot>destroy</on_reboot>
<on_crash>destroy</on_crash>
<devices>
<disk type="block" device="disk">
<driver name="phy" />
<source dev="/tmp/kernel/modules.img" />
<target dev="xvdd" />
<backenddomain name="dom0" />
</disk>
<console type="pty">
<target type="xen" port="0"/>
</console>
</devices>
</domain>
'''
my_uuid = '7db78950-c467-4863-94d1-af59806384ea'
vm = self.get_vm(uuid=my_uuid)
vm.netvm = None
vm.virt_mode = 'pvh'
with unittest.mock.patch('vanir.config.vanir_base_dir',
'/tmp/vanir-test'):
kernel_dir = '/tmp/vanir-test/vm-kernels/dummy'
os.makedirs(kernel_dir, exist_ok=True)
open(os.path.join(kernel_dir, 'vmlinuz'), 'w').close()
open(os.path.join(kernel_dir, 'initramfs'), 'w').close()
self.addCleanup(shutil.rmtree, '/tmp/vanir-test')
vm.kernel = 'dummy'
# tests for storage are later
vm.volumes['kernel'] = unittest.mock.Mock(**{
'kernels_dir': '/tmp/kernel',
'block_device.return_value.domain': 'dom0',
'block_device.return_value.script': None,
'block_device.return_value.path': '/tmp/kernel/modules.img',
'block_device.return_value.devtype': 'disk',
'block_device.return_value.name': 'kernel',
})
libvirt_xml = vm.create_config_file()
self.assertXMLEqual(lxml.etree.XML(libvirt_xml),
lxml.etree.XML(expected))
def test_600_libvirt_xml_pvh_no_membalance(self):
expected = '''<domain type="xen">
<name>test-inst-test</name>
<uuid>7db78950-c467-4863-94d1-af59806384ea</uuid>
<memory unit="MiB">400</memory>
<currentMemory unit="MiB">400</currentMemory>
<vcpu placement="static">2</vcpu>
<cpu mode='host-passthrough'>
<!-- disable nested HVM -->
<feature name='vmx' policy='disable'/>
<feature name='svm' policy='disable'/>
<!-- disable SMAP inside VM, because of Linux bug -->
<feature name='smap' policy='disable'/>
</cpu>
<os>
<type arch="x86_64" machine="xenpvh">xenpvh</type>
<kernel>/tmp/kernel/vmlinuz</kernel>
<initrd>/tmp/kernel/initramfs</initrd>
<cmdline>root=/dev/mapper/dmroot ro nomodeset console=hvc0 rd_NO_PLYMOUTH rd.plymouth.enable=0 plymouth.enable=0 nopat</cmdline>
</os>
<features>
<pae/>
<acpi/>
<apic/>
<viridian/>
</features>
<clock offset='utc' adjustment='reset'>
<timer name="tsc" mode="native"/>
</clock>
<on_poweroff>destroy</on_poweroff>
<on_reboot>destroy</on_reboot>
<on_crash>destroy</on_crash>
<devices>
<disk type="block" device="disk">
<driver name="phy" />
<source dev="/tmp/kernel/modules.img" />
<target dev="xvdd" />
<backenddomain name="dom0" />
</disk>
<console type="pty">
<target type="xen" port="0"/>
</console>
</devices>
</domain>
'''
my_uuid = '7db78950-c467-4863-94d1-af59806384ea'
vm | |
#!/usr/bin/env python2
import sys
from datetime import date, timedelta
from StringIO import StringIO
from copy import copy
import psycopg2
import codecs
import zipfile
#from settings.const import beltac_database_connect
#Can be run standalone as python beltacreader.py $path $filename
charset = 'cp1252' # yes, this isn't what the documentation suggests
def parse_time(time):
return time[0:2] + ':' + time[2:4] + ':00'
def open_beltac(zip,filename, delivery):
l_content = zip.read(filename).decode(charset).split('\r\n')
return l_content[:-1]
def simple_list_writer(conn,filename, arguments, data):
f = StringIO()
f.write('\t'.join(arguments) + '\n')
for y in data:
f.write('\t'.join([unicode(y[z] or '') for z in arguments]) + '\n')
f.seek(0)
cur = conn.cursor()
cur.copy_expert("COPY %s FROM STDIN USING DELIMITERS ' ' CSV HEADER" % (filename),f)
cur.close()
f.close()
def simple_dict_writer(conn,filename, arguments, data):
f = StringIO()
f.write('\t'.join(arguments) + '\n')
for x, y in data.items():
f.write('\t'.join([unicode(x)] + [unicode(y[z] or '') for z in arguments[1:]]) + '\n')
f.seek(0)
cur = conn.cursor()
cur.copy_expert("COPY %s FROM STDIN USING DELIMITERS ' ' CSV HEADER" % (filename),f)
cur.close()
f.close()
def simple_dict_list_writer(conn,filename, arguments, data):
f = StringIO()
f.write('\t'.join(arguments) + '\n')
for x, y in data.items():
for u in y:
f.write('\t'.join([unicode(x)] + [unicode(u[z] or '') for z in arguments[1:]]) + '\n')
f.seek(0)
cur = conn.cursor()
cur.copy_expert("COPY %s FROM STDIN USING DELIMITERS ' ' CSV HEADER" % (filename),f)
cur.close()
f.close()
def add24hours(time):
hours = str(int(time[0:2])+24)
return hours+time[2:]
def parse_timetables(zip,filename,validity):
l_timetables = open_beltac(zip,filename, validity)
timetables = {}
current_id = None
current_record = {}
s_stationshort = None
s_index = 0
last_time = None
for x in l_timetables:
if x[0] == '#':
if current_id is not None:
timetables[current_id] = current_record
s_index = 0
current_id = int(x[1:])
current_record = {'calendar': [], 'stop': [], 'note': []}
elif x[0] == '%':
s_range,s_cutoff = [int(y) for y in x[1:].split('|')]
elif x[0] == '-':
v_calendarid,v_blockid = x[1:].split('|')
current_record['calendar'].append({'calendar_id': int(v_calendarid), 'block_id': v_blockid})
elif x[0] == 'n':
v_noteid = x[1:].strip()
if s_index == 0:
current_record['note'].append({'idx': None, 'note_id': v_noteid})
else:
current_record['note'].append({'idx': s_index, 'note_id': v_noteid})
elif x[0] == '>':
s_index += 1
s_stationshort, s_departuretime = x[1:].split('|')
s_stationshort = s_stationshort.strip()
s_departuretime = parse_time(s_departuretime)
current_record['stop'].append({'stop_id': s_stationshort, 'index': s_index, 'arrivaltime': None, 'departuretime': s_departuretime})
last_time = s_departuretime
elif x[0] == '.':
s_index += 1
s_stationshort, s_arrivaldeparturetime = x[1:].split('|')
s_stationshort = s_stationshort.strip()
both = parse_time(s_arrivaldeparturetime)
if both < last_time:
both = add24hours(both)
current_record['stop'].append({'stop_id': s_stationshort, 'index': s_index, 'arrivaltime': both, 'departuretime': both})
last_time = both
elif x[0] == '+':
s_index += 1
s_stationshort, s_arrivaltime, s_departuretime = x[1:].split('|')
s_stationshort = s_stationshort.strip()
s_arrivaltime = parse_time(s_arrivaltime)
if s_arrivaltime < last_time:
s_arrivaltime = add24hours(s_arrivaltime)
s_departuretime = parse_time(s_departuretime)
if s_departuretime < s_arrivaltime:
s_departuretime = add24hours(s_departuretime)
current_record['stop'].append({'stop_id': s_stationshort, 'index': s_index, 'arrivaltime': s_arrivaltime, 'departuretime': s_departuretime})
last_time = s_departuretime
elif x[0] == '<':
s_index += 1
s_stationshort, s_arrivaltime = x[1:].split('|')
s_stationshort = s_stationshort.strip()
s_arrivaltime = parse_time(s_arrivaltime)
if s_arrivaltime < last_time:
s_arrivaltime = add24hours(s_arrivaltime)
current_record['stop'].append({'stop_id': s_stationshort, 'index': s_index, 'arrivaltime': s_arrivaltime, 'departuretime': None})
if current_id is not None:
timetables[current_id] = current_record
return timetables
def sql_timetables(conn,data):
f = {}
a = {}
f['note'] = StringIO()
f['stop'] = StringIO()
f['calendar'] = StringIO()
a['note'] = ['idx','note_id']
a['stop'] = ['index', 'stop_id', 'arrivaltime', 'departuretime']
a['calendar'] = ['calendar_id', 'block_id']
for x in f.keys():
f[x].write('\t'.join(['serviceid'] + a[x]) + '\n')
for x, y in data.items():
for z in f.keys():
for u in y[z]:
f[z].write('\t'.join([unicode(x)] + [unicode(u[w] or '') for w in a[z]]) + '\n')
cur = conn.cursor()
for filename,f in f.items():
f.seek(0)
cur.copy_expert("COPY timetable_%s FROM STDIN USING DELIMITERS ' ' CSV HEADER" % (filename),f)
f.close()
cur.close()
def parse_notes(zip,filename,validity):
l_notes = open_beltac(zip,filename, validity)
note_id,note_shortname = None,None
notes = {}
for line in l_notes:
if line[0] == '#':
note_id,note_preferred = line[1:].split('|')
if len(note_preferred) == 0:
note_preferred = None
if line[0] == '.':
notes[note_id] = {'note_text' : line[1:], 'note_id' : note_id, 'note_code_preferred' : note_preferred}
return notes
def parse_blocks(zip,filename,validity):
l_blocks = open_beltac(zip,filename, validity)
blocks = {}
for line in l_blocks:
if line[0] == '#':
block_id,calendar_id,accessible = line[1:].split('|')
blocks[block_id]= {'block_id' : block_id,
'calendar_id' : calendar_id,
'accessible' : int(accessible) == 1}
return blocks
def parse_version(zip,filename,validity):
version = copy(validity)
l_version = open_beltac(zip,filename, validity)
versions = {}
v_version,v_release = l_version[0].split('|')
version['version'] = int(v_version)
version['release'] = int(v_release)
versions[version['version']] = version
return versions
def parse_stops(zip,filename,validity):
l_stops = open_beltac(zip,filename, validity)
stops = {}
for line in l_stops:
values = line.split('|')
stop_id,description_nl,description_fr,municipality_nl,municipality_fr,country,streetname_nl = values[:7]
streetname_fr,aricode,accessibile,lambert72_x,lambert72_y,ispublic,uic = values[7:]
stops[stop_id] = {'stop_id' : stop_id,
'description_nl' : description_nl,
'description_fr' : description_fr,
'municipality_nl' : municipality_nl,
'municipality_fr' : municipality_fr,
'country' : country,
'streetname_nl' : streetname_nl,
'streetname_fr' : streetname_fr,
'aricode' : aricode,
'accessible' : int(accessibile) == 1,
'lambert72_x' : int(lambert72_x),
'lambert72_y' : int(lambert72_y),
'ispublic' : int(ispublic) == 1,
'uic' : uic}
return stops
def parse_tripcharacteristics(zip,filename,validity):
l_car = open_beltac(zip,filename, validity)
v_prefixincluded = int(l_car[0]) == 1
v_prefix = str(l_car[1]).strip()
trips = {}
routes = {}
for line in l_car[2:]:
if line[0] == '@':
v_routeid,v_routename,v_directionname1,v_directionname2,v_routepubliccode,v_routerating,v_routereliability = line[1:].split('|')
if len(v_routerating) == 0:
v_routerating = None
if len(v_routereliability) == 0:
v_routerating = None
routes[v_routeid] = {'route_id' : v_routeid,
'route_name' : v_routename,
'direction_name1' : v_directionname1,
'direction_name2' : v_directionname2,
'routepubliccode' : v_routepubliccode,
'route_rating' : v_routerating,
'route_reliability' : v_routereliability}
else:
v_tripid,v_routeid,v_trip_route_direction,v_routeservicemode,v_routeservicetype = line[1:].split('|')
trips[v_tripid] = {'trip_id' : v_tripid,
'route_id' : v_routeid,
'trip_route_direction' : v_trip_route_direction,
'route_service_mode' : v_routeservicemode,
'route_service_type' : v_routeservicetype}
return trips,routes
def parse_calendar(zip,filename,validity):
l_calendar = open_beltac(zip,filename, validity)
calendar = {}
current_id = None
for x in l_calendar:
if x[0] == '#':
current_id = int(x[1:])
continue
elif x[0] == '-':
calendar[current_id] = [y == '1' for y in x[1:]]
return calendar
def sql_calendar(conn,delivery, data):
f = StringIO()
f.write('\t'.join(['service_id', 'servicedate']) + '\n')
for x, y in data.items():
for z in range(0, len(y)):
if y[z] == True:
f.write('\t'.join([unicode(x), unicode(delivery['firstday'] + timedelta(days=z))]) + '\n')
f.seek(0)
cur = conn.cursor()
cur.copy_expert("COPY calendar FROM STDIN USING DELIMITERS ' ' CSV HEADER NULL AS '';",f)
cur.close()
f.close()
def create_schema(conn):
cur = conn.cursor()
cur.execute("""
drop table if exists version;
create table version(firstday date, lastdate date);
drop table if exists blocks;
create table blocks(block_id varchar(10), calendar_id integer, accessible boolean);
drop table if exists calendar;
create table calendar(calendar_id integer, servicedate date);
drop table if exists notes;
create table notes (note_id varchar(8), note_code_preferred boolean, note_text varchar(1000));
drop table if exists routes;
create table routes (route_id varchar(8), route_name varchar(50), direction_name1 varchar(60), direction_name2 varchar(60), routepubliccode varchar(5), route_rating integer, route_reliability integer);
drop table if exists trips;
create table trips (trip_id integer, route_id varchar(8), trip_route_direction integer, route_service_mode integer, route_service_type integer);
drop table if exists stops;
create table stops ( stop_id varchar(10), description_nl varchar(50), description_fr varchar(50),municipality_nl varchar(50), municipality_fr varchar(50), country varchar(2), streetname_nl varchar(50),
streetname_fr varchar(50),aricode varchar(4), accessible boolean, lambert72_x integer,lambert72_y integer,ispublic boolean,uic varchar(9));
drop table if exists timetable_stop;
create table timetable_stop (trip_id integer, idx integer, stop_id varchar(10), arrivaltime char(8), departuretime char(8), primary key(trip_id, idx));
drop table if exists timetable_note;
create table timetable_note (trip_id integer, idx integer, note_id varchar(8));
drop table if exists timetable_calendar;
create table timetable_calendar (trip_id integer, calendar_id integer NOT NULL, block_id varchar(8));
drop table if exists version;
create table version (id integer, release integer NOT NULL, firstday date, lastday date);
CREATE OR REPLACE FUNCTION
toseconds(time24 text, shift24 integer) RETURNS integer AS $$
SELECT total AS time
FROM
(SELECT
(cast(split_part($1, ':', 1) as int4) * 3600) -- hours
+ (cast(split_part($1, ':', 2) as int4) * 60) -- minutes
+ CASE WHEN $1 similar to '%:%:%' THEN (cast(split_part($1, ':', 3) as int4)) ELSE 0 END -- seconds when applicable
+ (shift24 * 86400) as total --Add 24 hours (in seconds) when shift occured
) as xtotal
$$ LANGUAGE SQL;
CREATE OR REPLACE FUNCTION
to32time(secondssincemidnight integer) RETURNS text AS $$
SELECT lpad(floor((secondssincemidnight / 3600))::text, 2, '0')||':'||lpad(((secondssincemidnight % 3600) / 60)::text, 2,
'0')||':'||lpad((secondssincemidnight % 60)::text, 2, '0') AS time
$$ LANGUAGE SQL;
CREATE OR REPLACE FUNCTION add32time(departuretime text, seconds integer) RETURNS text AS $$ SELECT lpad(floor((total / 3600))::text, 2,
'0')||':'||lpad(((total % 3600) / 60)::text, 2, '0')||':'||lpad((total % 60)::text, 2, '0') AS arrival_time FROM (SELECT (cast(split_part($1, ':', 1)
as int4) * 60 + cast(split_part($1, ':', 2) as int4)) * 60 + cast(split_part($1, ':', 3) as int4) + coalesce($2, 0) as total) as xtotal $$ LANGUAGE
SQL;
""")
def parse_day(day):
day = day.split('|')
return date(int(day[2]), int(day[1]), int(day[0]))
def filedict(zip):
dict = {}
for name in zip.namelist():
dict[name.split('.')[-1]] = name
return dict
def sql_trips(conn,data):
simple_dict_writer(conn,'trips', ['trip_id','route_id','trip_route_direction','route_service_mode','route_service_type'], data)
def sql_routes(conn,data):
simple_dict_writer(conn,'routes',['route_id','route_name','direction_name1','direction_name2','routepubliccode','route_rating','route_reliability'], data)
def sql_notes(conn,data):
simple_dict_writer(conn,'notes', ['note_id','note_code_preferred','note_text'], data)
def sql_stops(conn,data):
simple_dict_writer(conn,'stops', ['stop_id','description_nl','description_fr','municipality_nl','municipality_fr','country','streetname_nl','streetname_fr','aricode','accessible','lambert72_x','lambert72_y','ispublic','uic'], data)
def sql_blocks(conn,data):
simple_dict_writer(conn,'blocks', ['block_id','calendar_id','accessible'], data)
def sql_version(conn,data):
simple_dict_writer(conn,'version', ['id','release','firstday','lastday'], data)
def set_journeypatterns(conn):
cur = conn.cursor()
cur.execute("""
create index on stops (stop_id);
drop table if exists journeypattern;
CREATE TABLE journeypattern as (
SELECT route_id,
trip_id as trip_id,
route_id||':'||rank() OVER (PARTITION BY route_id ORDER BY route_id,pattern) as journeypatterncode,stops.stop_id as last_stopid,stops.description_nl as last_stopname
FROM
(SELECT trip_id,route_id,ARRAY_AGG(stop_id ORDER BY idx) as pattern
FROM timetable_stop JOIN trips using (trip_id)
GROUP BY route_id,trip_id) as x LEFT JOIN stops ON (stops.stop_id | |
value; if omitted, an empty list is used.
:args**kwds: Optional additional keyword arguments, passed to base class.
"""
if default is None:
default = []
if item_type is not None and item_type not in ALLOWED_PROPERTY_TYPES:
raise ValueError('item_type %s not in %s' % (item_type, ALLOWED_PROPERTY_TYPES))
self.item_type = item_type
Property.__init__(self, verbose_name, default=default,
required=required, **kwds)
data_type = list
def validate(self, value, required=True):
value = super(ListProperty, self).validate(value, required=required)
if value and value is not None:
if not isinstance(value, list):
raise BadValueError('Property %s must be a list' % self.name)
value = self.validate_list_contents(value)
return value
def validate_list_contents(self, value):
value = validate_list_content(value, item_type=self.item_type)
try:
value = validate_list_content(value, item_type=self.item_type)
except BadValueError:
raise BadValueError(
'Items of %s list must all be in %s' %
(self.name, ALLOWED_PROPERTY_TYPES))
return value
def default_value(self):
"""Default value for list.
Because the property supplied to 'default' is a static value,
that value must be shallow copied to prevent all fields with
default values from sharing the same instance.
Returns:
Copy of the default value.
"""
value = super(ListProperty, self).default_value()
if value is None:
value = []
return list(value)
def to_python(self, value):
return LazyList(value, item_type=self.item_type)
def to_json(self, value):
return value_to_json(value, item_type=self.item_type)
class StringListProperty(ListProperty):
""" shorthand for list that should containe only unicode"""
def __init__(self, verbose_name=None, default=None,
required=False, **kwds):
super(StringListProperty, self).__init__(verbose_name=verbose_name,
default=default, required=required, item_type=str, **kwds)
# dict proxy
class LazyDict(dict):
""" object to make sure we keep updated of dict
in _doc. We just override a dict and maintain change in
doc reference (doc[keyt] obviously).
if init_vals is specified, doc is overwritten
with the dict given. Otherwise, the values already in
doc are used.
"""
def __init__(self, doc, item_type=None, init_vals=None):
dict.__init__(self)
self.item_type = item_type
self.doc = doc
if init_vals is None:
self._wrap()
else:
for key, value in list(init_vals.items()):
self[key] = value
def _wrap(self):
for key, json_value in list(self.doc.items()):
if isinstance(json_value, dict):
value = LazyDict(json_value, item_type=self.item_type)
elif isinstance(json_value, list):
value = LazyList(json_value, item_type=self.item_type)
else:
value = value_to_python(json_value, self.item_type)
dict.__setitem__(self, key, value)
def __setitem__(self, key, value):
if isinstance(value, dict):
self.doc[key] = {}
value = LazyDict(self.doc[key], item_type=self.item_type, init_vals=value)
elif isinstance(value, list):
self.doc[key] = []
value = LazyList(self.doc[key], item_type=self.item_type, init_vals=value)
else:
self.doc.update({key: value_to_json(value, item_type=self.item_type) })
super(LazyDict, self).__setitem__(key, value)
def __delitem__(self, key):
del self.doc[key]
super(LazyDict, self).__delitem__(key)
def pop(self, key, *args):
default = len(args) == 1
if default:
self.doc.pop(key, args[-1])
return super(LazyDict, self).pop(key, args[-1])
self.doc.pop(key)
return super(LazyDict, self).pop(key)
def setdefault(self, key, default):
if key in self:
return self[key]
self.doc.setdefault(key, value_to_json(default, item_type=self.item_type))
super(LazyDict, self).setdefault(key, default)
return default
def update(self, value):
for k, v in list(value.items()):
self[k] = v
def popitem(self, value):
new_value = super(LazyDict, self).popitem(value)
self.doc.popitem(value_to_json(value, item_type=self.item_type))
return new_value
def clear(self):
self.doc.clear()
super(LazyDict, self).clear()
class LazyList(list):
""" object to make sure we keep update of list
in _doc. We just override a list and maintain change in
doc reference (doc[index] obviously).
if init_vals is specified, doc is overwritten
with the list given. Otherwise, the values already in
doc are used.
"""
def __init__(self, doc, item_type=None, init_vals=None):
list.__init__(self)
self.item_type = item_type
self.doc = doc
if init_vals is None:
# just wrap the current values
self._wrap()
else:
# initialize this list and the underlying list
# with the values given.
del self.doc[:]
for item in init_vals:
self.append(item)
def _wrap(self):
for json_value in self.doc:
if isinstance(json_value, dict):
value = LazyDict(json_value, item_type=self.item_type)
elif isinstance(json_value, list):
value = LazyList(json_value, item_type=self.item_type)
else:
value = value_to_python(json_value, self.item_type)
list.append(self, value)
def __delitem__(self, index):
del self.doc[index]
list.__delitem__(self, index)
def __setitem__(self, index, value):
if isinstance(value, dict):
self.doc[index] = {}
value = LazyDict(self.doc[index], item_type=self.item_type, init_vals=value)
elif isinstance(value, list):
self.doc[index] = []
value = LazyList(self.doc[index], item_type=self.item_type, init_vals=value)
else:
self.doc[index] = value_to_json(value, item_type=self.item_type)
list.__setitem__(self, index, value)
def __delslice__(self, i, j):
del self.doc[i:j]
list.__delslice__(self, i, j)
def __getslice__(self, i, j):
return LazyList(self.doc[i:j], self.item_type)
def __setslice__(self, i, j, seq):
self.doc[i:j] = (value_to_json(v, item_type=self.item_type) for v in seq)
list.__setslice__(self, i, j, seq)
def __contains__(self, value):
jvalue = value_to_json(value)
for m in self.doc:
if m == jvalue: return True
return False
def append(self, *args, **kwargs):
if args:
assert len(args) == 1
value = args[0]
else:
value = kwargs
index = len(self)
if isinstance(value, dict):
self.doc.append({})
value = LazyDict(self.doc[index], item_type=self.item_type, init_vals=value)
elif isinstance(value, list):
self.doc.append([])
value = LazyList(self.doc[index], item_type=self.item_type, init_vals=value)
else:
self.doc.append(value_to_json(value, item_type=self.item_type))
super(LazyList, self).append(value)
def extend(self, x):
self.doc.extend(
[value_to_json(v, item_type=self.item_type) for v in x])
super(LazyList, self).extend(x)
def index(self, x, *args):
x = value_to_json(x, item_type=self.item_type)
return self.doc.index(x)
def insert(self, i, x):
self.__setslice__(i, i, [x])
def pop(self, i=-1):
del self.doc[i]
v = super(LazyList, self).pop(i)
return value_to_python(v, item_type=self.item_type)
def remove(self, x):
del self[self.index(x)]
def sort(self, cmp=None, key=None, reverse=False):
self.doc.sort(cmp, key, reverse)
list.sort(self, cmp, key, reverse)
def reverse(self):
self.doc.reverse()
list.reverse(self)
if support_setproperty:
class SetProperty(Property):
"""A property that stores a Python set as a list of unique
elements.
Note that Python set operations like union that return a set
object do not alter list that will be stored with the next save,
while operations like update that change a set object in-place do
keep the list in sync.
"""
def __init__(self, verbose_name=None, default=None, required=None,
item_type=None, **kwds):
"""Construct SetProperty.
:args verbose_name: Optional verbose name.
:args default: Optional default value; if omitted, an empty
set is used.
:args required: True if field is required, default is False.
:args item_type: Optional data type of items that set
contains. Used to assist with JSON
serialization/deserialization when data is
stored/retireved.
:args **kwds: Optional additional keyword arguments, passed to
base class.
"""
if default is None:
default = set()
if item_type is not None and item_type not in ALLOWED_PROPERTY_TYPES:
raise ValueError('item_type %s not in %s'
% (item_type, ALLOWED_PROPERTY_TYPES))
self.item_type = item_type
super(SetProperty, self).__init__(
verbose_name=verbose_name, default=default, required=required,
**kwds)
data_type = set
def validate(self, value, required=True):
value = super(SetProperty, self).validate(value, required=required)
if value and value is not None:
if not isinstance(value, MutableSet):
raise BadValueError('Property %s must be a set' % self.name)
value = self.validate_set_contents(value)
return value
def validate_set_contents(self, value):
try:
value = validate_set_content(value, item_type=self.item_type)
except BadValueError:
raise BadValueError(
'Items of %s set must all be in %s' %
(self.name, ALLOWED_PROPERTY_TYPES))
return value
def default_value(self):
"""Return default value for set.
Because the property supplied to 'default' is a static value,
that value must be shallow copied to prevent all fields with
default values from sharing the same instance.
Returns:
Copy of the default value.
"""
value = super(SetProperty, self).default_value()
if value is None:
return set()
return value.copy()
def to_python(self, value):
return LazySet(value, item_type=self.item_type)
def to_json(self, value):
return value_to_json(value, item_type=self.item_type)
class LazySet(MutableSet):
"""Object to make sure that we keep set and _doc synchronized.
We sub-class MutableSet and maintain changes in doc.
Note that methods like union that return a set object do not
alter _doc, while methods like update that change a set object
in-place do keep _doc in sync.
"""
def _map_named_operation(opname):
fn = getattr(MutableSet, opname)
if hasattr(fn, 'im_func'):
fn = fn.__func__
def method(self, other, fn=fn):
if not isinstance(other, MutableSet):
other = self._from_iterable(other)
return fn(self, other)
return method
issubset = _map_named_operation('__le__')
issuperset = _map_named_operation('__ge__')
symmetric_difference = _map_named_operation('__xor__')
def __init__(self, doc, item_type=None):
self.item_type = item_type
self.doc = doc
self.elements = set(value_to_python(value, self.item_type)
for value in self.doc)
def __repr__(self):
return '%s(%r)' % (type(self).__name__, list(self))
@classmethod
def _from_iterable(cls, it):
return cls(it)
def __iand__(self, iterator):
for value in (self.elements - iterator):
self.elements.discard(value)
return self
def __iter__(self):
return iter(element for element in self.elements)
def __len__(self):
return len(self.elements)
def __contains__(self, item):
return item in self.elements
def __xor__(self, other):
if not isinstance(other, MutableSet):
if not is_iterable(other):
return NotImplemented
other = self._from_iterable(other)
return (self.elements - other) | (other - self.elements)
def __gt__(self, other):
if not isinstance(other, MutableSet):
return NotImplemented
return other < self.elements
def __ge__(self, other):
if not isinstance(other, MutableSet):
return NotImplemented
return other <= self.elements
def __ne__(self, other):
return not (self.elements == other)
def add(self, value):
self.elements.add(value)
if value not in self.doc:
self.doc.append(value_to_json(value, item_type=self.item_type))
def copy(self):
return self.elements.copy()
def difference(self, other, *args):
return self.elements.difference(other, *args)
def difference_update(self, other, *args):
for value in other:
self.discard(value)
for arg in args:
self.difference_update(arg)
def discard(self, value):
self.elements.discard(value)
try:
self.doc.remove(value)
except ValueError:
pass
def intersection(self, other, *args):
return self.elements.intersection(other, *args)
def intersection_update(self, | |
<filename>secv_guis/bimask_app/main_window.py
# -*- coding:utf-8 -*-
"""
This module contains the logic and widgets pertaining to the main window
of the bimask app: An app that allows displaying an image, editing a mask
on it and also displaying/editing a preannotation mask.
It can be used to efficiently annotate large images with pixel precision.
Check instructions.txt for more details.
"""
import os
from PySide2 import QtCore, QtWidgets, QtGui
import numpy as np
from PIL import Image
import json
#
from skimage.filters import apply_hysteresis_threshold
#
from .dialogs import InstructionsDialog, AboutDialog, KeymapsDialog, \
SavedStateTracker
#
from ..masked_scene import MaskedImageScene, DisplayView
from ..base_widgets import FileList, MaskPaintForm, SaveForm
from ..utils import load_img_and_exif, unique_filename
from ..commands import DrawCommand, EraseCommand, DrawOverlappingCommand
from ..objects import PointList
# #############################################################################
# ## APPLICATION LOGIC FOR QUICK MASKING
# #############################################################################
def pmap_to_mask(pmap,upper_percentile,lower_percentile,percentile_max=100):
pmap=np.array(pmap)
values = np.sort(pmap[::-1].flatten())
up = int((len(values)-1) * upper_percentile /percentile_max)
lp= int((len(values)-1) * lower_percentile / percentile_max )
pmap[pmap>values[up]]=0
pmap[pmap<values[lp]]=0
pmap=pmap>0
return pmap
# #############################################################################
# ## WIDGET EXTENSIONS AND COMPOSITIONS TO ADD SPECIFIC LOGIC+LAYOUT
# #############################################################################
class FileLists(QtWidgets.QWidget):
"""
A cluster of 3 file lists: one for images, one for masks and one for
preannotations.
"""
def __init__(self, parent=None, img_extensions=[".png", ".jpg", ".jpeg"],
mask_extensions=None, preannot_extensions=None):
"""
If given, the extensions are case-insensitive lists in the form
``[".png", ".jpg"]`` that filter the files that are shown in the list
by allowing only the given terminations.
"""
super().__init__(parent)
# create widgets
self.img_list = FileList("Images\nfolder", extensions=img_extensions)
self.mask_list = FileList("Masks\nfolder")
self.preannot_list = FileList("Pre-annotations\nfolder")
# add widgets to layout
self.main_layout = QtWidgets.QHBoxLayout()
self.main_layout.addWidget(self.img_list)
self.main_layout.addWidget(self.mask_list)
self.main_layout.addWidget(self.preannot_list)
self.setLayout(self.main_layout)
class IntegratedSaveForm(SaveForm):
"""
A ``SaveForm`` that implements this app's logic, namely, it features 2
masks, one for annot and one for preannot, and saves them as B&W png.
"""
def __init__(self, main_window, default_path=None,
save_dialog_timeout_ms=1000):
"""
:param main_window: A reference to the ``BimaskMainWindow``
:param str default_path: If non given, 'home' is picked.
:param save_dialog_timeout: When successfully saving, a dialog
will pop up, and disappear after this many miliseconds.
"""
super().__init__(None, default_path)
self.main_window = main_window
self.add_checkbox("preannot.", initial_val=False,
initial_txt="_preannot.png")
self.add_checkbox("annot.", initial_txt="_annot.png")
self.add_checkbox("points", initial_txt="_points.json")
# This reference is needed otherwise dialogs get garbage collected?
self.dialog = None
self.dialog_ms = save_dialog_timeout_ms
def save_masks(self, states, suffixes, overwrite):
"""
Overriden method that we don't call directly. See ``SaveForm`` for
interface details.
"""
save_preannot, save_annot, save_points = states
suff_preannot, suff_annot, suff_points = suffixes
img_name = self.main_window.current_img_basename
#
a_pmi = self.main_window.graphics_view.annot_pmi
pa_pmi = self.main_window.graphics_view.preannot_pmi
#
scene = self.main_window.graphics_view.scene()
saved = {}
if save_preannot and pa_pmi is not None:
pa_path = os.path.join(self.save_path, img_name + suff_preannot)
if not overwrite:
pa_path = unique_filename(pa_path)
pa_msk_arr = scene.mask_as_bool_arr(pa_pmi)
self.save_bool_arr_as_img(pa_msk_arr, pa_path, overwrite)
saved["preannotation mask"] = pa_path
if save_annot and a_pmi is not None:
a_path = os.path.join(self.save_path, img_name + suff_annot)
if not overwrite:
a_path = unique_filename(a_path)
msk_arr = scene.mask_as_bool_arr(a_pmi)
self.save_bool_arr_as_img(msk_arr, a_path, overwrite)
saved["annotation mask"] = a_path
if save_points and scene.objects:
state_dict = {k.__name__: [elt.state() for elt in v if elt.state()]
for k, v in scene.objects.items()}
p_path = os.path.join(self.save_path, img_name + suff_points)
if not overwrite:
p_path = unique_filename(p_path)
with open(p_path, "w") as f:
# f.write(str(state_dict))
json.dump(state_dict, f)
saved["point lists"] = p_path
#
if saved:
self.main_window.graphics_view.saved_state_tracker.save(
saved, self.dialog_ms)
def save_bool_arr_as_img(self, arr, outpath, overwrite_existing=False):
"""
Output: RGB PNG image where false is black (0, 0, 0) and true is white
(255, 255, 255).
"""
if not overwrite_existing:
outpath = unique_filename(outpath)
img = Image.fromarray(arr)
img.save(outpath)
class IntegratedDisplayView(DisplayView):
"""
This class implements the main component of the main window: it features a
view of the image and the masks, together with a set of operations that can
be done on them (painting, updating...), and the callback mechanisms to
trigger those operations.
"""
def __init__(self, main_window, scale_percent=15):
"""
:param scale_percent: Each zoom in/out operation will scale the view
by this much (in percent).
"""
super().__init__(scene=None, parent=None, scale_percent=scale_percent)
self._scene = MaskedImageScene()
self.main_window = main_window
self.shape = None
self.setScene(self._scene)
#
self._preannot_pmap = None
self.preannot_pmi = None
self.annot_pmi = None
#
#
self._current_clickdrag_action = None
#
self.saved_state_tracker = None
# MEMORY ACTIONS
def new_image(self, img_path, initial_mask_color=(219, 54, 148, 150),
initial_preannot_color=(102, 214, 123, 100)):
"""
If successful, removes all elements from the scene and the undo stack,
and loads a fresh image and masks. If there are unsaved changes, a
dialog asking for confirmation will pop up.
:returns: True if the action completed successfully, False if the user
decides to abort.
"""
if self.saved_state_tracker is not None:
is_delete_ok = self.saved_state_tracker.delete()
if not is_delete_ok:
# If user didn't want to delete unsaved changes
return False
# Go on with the update
img_arr = load_img_and_exif(img_path)[0]
self.shape = img_arr.shape
self._scene.update_image(img_arr)
dummy_preannot = np.zeros(img_arr.shape[:2], dtype=np.bool)
dummy_mask = np.zeros_like(dummy_preannot)
self.preannot_pmi = self._scene.add_mask(
dummy_preannot, initial_preannot_color)
self.annot_pmi = self._scene.add_mask(
dummy_mask, initial_mask_color)
self.fit_in_scene()
#
self.main_window.undo_stack.clear()
#
self.saved_state_tracker = SavedStateTracker()
return True
def preannot_from_path(self, preannot_path, rgba, upper_thresh=100,
lower_thresh=90, normalize=False):
"""
This method is prototype-ish: It loads an ``.npz`` file with and
'entropy' field, expected to have a numpy float matrix with same
shape as the image. Alternatively it takes a greyscale image file
suppoted by PIL.
"""
assert self.scene().img_pmi is not None, \
"You need to load an image first!"
if preannot_path.endswith(".npz") or preannot_path.endswith(".npy"):
self._preannot_pmap = np.load(preannot_path)["entropy"]
else:
img=np.asanyarray(Image.open(preannot_path))
if len(img.shape)>2:
img=img[:,:,0]
self._preannot_pmap = np.asarray(img)
normalize=True
if normalize:
try:
self._preannot_pmap = self._preannot_pmap/np.max(self._preannot_pmap)
except ZeroDivisionError:
pass
m = pmap_to_mask(self._preannot_pmap, upper_thresh, lower_thresh)
self.preannot_pmi = self.scene().replace_mask_pmi(
self.preannot_pmi, m)
#
self.saved_state_tracker.edit()
def mask_from_path(self, mask_path, rgba):
"""
:param mask_path: Path to an image containing a binary mask, where
zero pixels are considered false and non-zero true.
:param rgba: Color of the loaded mask
Loads a binary mask into the scene as an RGBA-colored mask.
"""
assert self.scene().img_pmi is not None, \
"You need to load an image first!"
arr = load_img_and_exif(mask_path)[0]
if len(arr.shape) == 2:
mask = arr > 0
elif len(arr.shape) == 3:
mask = arr.any(axis=-1)
else:
raise RuntimeError("Mask must be rank 2 or 3!")
self.annot_pmi = self.scene().replace_mask_pmi(
self.annot_pmi, mask)
#
self.saved_state_tracker.edit()
# MASK SINGLE-SHOT ACTIONS
def change_preannot_pval(self, upper_thresh, lower_thresh):
"""
Updates the preannot->mask threshold.
"""
if self._preannot_pmap is not None:
new_m = pmap_to_mask(self._preannot_pmap,
upper_thresh,
lower_thresh)
self.preannot_pmi = self.scene().replace_mask_pmi(
self.preannot_pmi, new_m)
#
if self.saved_state_tracker is not None:
self.saved_state_tracker.edit()
def change_preannot_rgba(self, rgba):
"""
Updates the preannot mask color.
"""
if self.preannot_pmi is not None:
m = self.scene().mask_as_bool_arr(self.preannot_pmi)
self.preannot_pmi = self.scene().replace_mask_pmi(
self.preannot_pmi, m, rgba)
def change_annot_rgba(self, rgba):
"""
Updates the annot mask color.
"""
if self.annot_pmi is not None:
m = self.scene().mask_as_bool_arr(self.annot_pmi)
self.annot_pmi = self.scene().replace_mask_pmi(
self.annot_pmi, m, rgba)
# MASK COMPOSITE ACTIONS
def _finish_clickdrag_action(self):
"""
finishes any click+drag action that may be active (does nothing if
none active).
"""
cmd = self._current_clickdrag_action
if cmd is not None:
cmd.finish(self.main_window.undo_stack)
self._current_clickdrag_action = None
def _perform_composite_action(self, action_class, action_args,
construction_args):
"""
This function is the recommended way to perform a composite
action for the following reasons:
1. If ``action_class`` is already running, it simply continues it.
2. If a different composite action was running, it closes it and starts
this one.
3. If no composite action was running, starts this one
And finally performs the action.
:param construction_args: If this action needs to be started, it will
be called via ``cmd = action_class(*construction_args)``
:param action_args: The command will be called via ``cmd(action_args)``
Usage example::
x, y = current_action_position...
pmi = ...
brush_size = ...
rgba = self.scene().mask_pmis[pmi]
self._perform_composite_action(DrawCommand, [x, y],
[pmi, rgba, brush_size])
"""
cmd = self._current_clickdrag_action
# if changed to this action without releasing the prior one, release it
action_changed = action_class is not cmd.__class__
cmd_finished = cmd is not None and cmd.finished
if action_changed:
self._finish_clickdrag_action() # sets current action to None
cmd = self._current_clickdrag_action
# if no open action of this class, create
if cmd is None or cmd_finished:
cmd = action_class(*construction_args)
self._current_clickdrag_action = cmd
cmd.action(*action_args)
def clickdrag_action(self, x, y):
"""
Paint to the currently selected mask, with the currently selected
brush type, at the given position.
The given ``x, y`` position is in 'scene coordinates', i.e. the
position from a mouse event has to be translated as follows::
xpos, ypos = self.mapToScene(event.pos()).toTuple()
self.clickdrag_action(xpos, ypos)
"""
# retrieve pmi info
# expected idx: 0 for preannot, 1 for annot
idx_map = {0: self.preannot_pmi, 1: self.annot_pmi}
mask_idx = self.main_window.paint_form.current_button_idx
pmi = idx_map[mask_idx]
# paint only |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.