input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
<filename>build_patch.py
#!/usr/bin/python3
import os
import subprocess
import sys
import csv
from ips_util import Patch
import text_util
import gfx_util
class StringPool:
def __init__(self, address, capacity):
self.address = address
self.capacity = capacity
self.pool = bytearray()
def can_add(self, bytes):
return len(self.pool) + len(bytes) < self.capacity
def add(self, bytes):
start = len(self.pool) + self.address
self.pool += bytes
return start
def free_space(self):
return self.capacity - len(self.pool)
def get_bytes(self):
return self.pool
def num_8bit(num):
return num.to_bytes(1, byteorder='little')
def num_16bit(num):
return num.to_bytes(2, byteorder='little')
def num_24bit(num):
return num.to_bytes(3, byteorder='little')
def write_with_size_check(patch, address, available_length, data, fill_byte=b'\x00'):
difference = available_length - len(data)
if difference < 0:
raise Exception('Not enough space for data! Received {0} bytes, but only have space allocated for {1}.'.format(len(data), available_length))
patch.add_record(address, data)
if difference > 0:
patch.add_rle_record(address + len(data), fill_byte, difference)
def write_strings_from_csv(patch, filename, reverse_font_map, pointer_table_address, pointer_table_length,
string_pool_address, string_pool_length, overflow_pool_address = None, overflow_pool_length = None,
column_to_encode=4, newline=b'\xfe', terminator=b'\xff', pad_to_line_count=1, pad_final_line=False, interleaved=False):
print('Writing strings from {0}...'.format(filename))
pointer_table_out = bytearray()
previously_encoded = {}
pools = [StringPool(string_pool_address, string_pool_length)]
if overflow_pool_address is not None and overflow_pool_length is not None:
pools.append(StringPool(overflow_pool_address, overflow_pool_length))
with open(filename, 'r', encoding='shift-jis') as in_file:
reader = csv.reader(in_file, lineterminator='\n')
for i, row in enumerate(reader):
if interleaved:
# This is only used for area names, which have some special flags that need to be set, except for index 15.
flag_map = {7: 0x2, 9: 0x4, 10: 0x8, 16: 0x8}
encoded_string = text_util.encode_text_interleaved(row[4], reverse_font_map, i != 15, flag_map[i] if i in flag_map else 0x1)
else:
encoded_string = text_util.encode_text(row[column_to_encode], reverse_font_map,
pad_to_line_count=pad_to_line_count, pad_final_line=pad_final_line,
newline=newline, terminator=terminator)
is_tiny = (len(encoded_string) < 100)
string_address = None
if encoded_string in previously_encoded:
string_address = previously_encoded[encoded_string]
else:
for pool in (reversed(pools) if is_tiny else pools):
if pool.can_add(encoded_string):
string_address = (0xffff & pool.add(encoded_string))
break
if string_address is not None:
previously_encoded[encoded_string] = string_address
if string_address is None:
print('Text {0} didn\'t fit! Size was {1}'.format(row[4], len(row[4])))
for poolIndex, pool in enumerate(pools):
print('Pool {0}: {1} free'.format(poolIndex, pool.free_space()))
pointer_table_out += (0xffff).to_bytes(2, byteorder='little')
else:
pointer_table_out += string_address.to_bytes(2, byteorder='little')
write_with_size_check(patch, pointer_table_address, pointer_table_length, pointer_table_out)
for poolIndex, pool in enumerate(pools):
write_with_size_check(patch, pool.address, pool.capacity, pool.get_bytes(), fill_byte=b'\xff')
print('Remaining in pool at {0:x}: {1}'.format(pool.address, pool.free_space()))
def write_gfx(patch, data, address, length):
write_with_size_check(patch, address, length, gfx_util.compress(data))
def write_gfx_from_file(patch, filename, address, length):
with open(filename, 'rb') as f:
write_gfx(patch, f.read(), address, length)
def write_code(patch, filename, address, length):
tmp_filename = 'build/_tmp.a65'
result = subprocess.run(['xa', '-o', tmp_filename, '-w', filename], shell=True, stderr=subprocess.PIPE)
if result.returncode == 0:
with open(tmp_filename, 'rb') as tmp_file:
write_with_size_check(patch, address, length, tmp_file.read(), fill_byte=b'\xea')
os.remove(tmp_filename)
else:
raise Exception('Assembler failed on {0} with error code {1}:\n\nErrors:\n{2}'.format(filename, result.returncode, result.stderr.decode(sys.stderr.encoding)))
def write_dialog_choice_entry(patch, address, dialog_index=None, page_index=None, options=None, dest1=None, dest2=None, dest3=None, first_option=None):
# Dialog choice data consists of 7 words:
# 0: Dialog index.
# 1: Line index. Should be a multiple of 6 for the window height.
# 2: Number of options. If this is 0, it simply redirects the dialog to the first destination automatically.
# 3: Destination line for option 1. Note that the destination lines need to be 6 lines (1 page) before the intended displayed line, as the line counter still gets advanced by 6 after the redirect.
# 4: Destination line for option 2.
# 5: Destination line for option 3, probably. Unused.
# 6: Index of first option. If this is 1, the space allocated for the first option is assumed to be static text instead and becomes unselectable.
if dialog_index is not None:
patch.add_record(address, num_16bit(dialog_index))
if page_index is not None:
patch.add_record(address + 2, num_16bit(page_index * 6))
if options is not None:
patch.add_record(address + 4, num_16bit(options))
for index, dest in enumerate([dest1, dest2, dest3]):
if dest is not None:
patch.add_record(address + 6 + (index * 2), b'\xff\xff' if dest == 0xffff else num_16bit((dest - 1) * 6))
if first_option is not None:
patch.add_record(address + 12, num_16bit(first_option))
def write_hdma_table_entry(patch, base_address, entry_index, row_count=None, left_value=None, right_value=None):
# The HDMA tables we care about all modify the window registers to form the shapes of window
# backgrounds. Each entry is three bytes: Number of rows affected, left bound, right bound.
entry_address = base_address + 3 * entry_index
if row_count is not None:
patch.add_record(entry_address, num_8bit(row_count))
if left_value is not None:
patch.add_record(entry_address + 1, num_8bit(left_value))
if right_value is not None:
patch.add_record(entry_address + 2, num_8bit(right_value))
if __name__ == '__main__':
os.makedirs('build', exist_ok=True)
reverse_font_map = text_util.load_map_reverse('assets/text/font.tbl')
patch = Patch()
# New tiles for digits in font.
patch.add_record(0x488a, b'\xB5\xB6\xB7\xB8')
# Some code that gets executed to get the evolution menu into a clean state after making a selection...
# it ensures that the "Yes" option is visible if it was in the middle of blinking. But because the "Yes"
# text is longer than the original, no easy way to expand it. Just NOP it out; it'll look fine.
patch.add_rle_record(0x4ade, b'\xea', 12)
# Evolution options...
# These instructions write blank to each possible location of the arrow.
# Nudge each one up by 0x40...
patch.add_record(0x626b, b'\x06')
patch.add_record(0x626f, b'\x86')
patch.add_record(0x6273, b'\x06')
patch.add_record(0x6277, b'\x86')
patch.add_record(0x627B, b'\x06')
# Do the same with a table of pointers used for writing the actual arrow.
patch.add_record(0x6325, b'\x06')
patch.add_record(0x6327, b'\x86')
patch.add_record(0x6329, b'\x06')
patch.add_record(0x632b, b'\x86')
patch.add_record(0x632d, b'\x06')
write_code(patch, 'assets/code/menu text.asm', 0x4f90, 309)
# Code for dialog choices starts at 0x1b6f8... the arrows all need to shift left and up.
# First, all three possible arrow spots are blanked out. Update those.
patch.add_record(0x1b752, num_24bit(0x7ee9ca))
patch.add_record(0x1b756, num_24bit(0x7eea4a))
patch.add_record(0x1b75a, num_24bit(0x7eeaca))
# Then, the base location to which the arrow actually gets written. (Gets offset by the current focus index.)
patch.add_record(0x1b76d, num_24bit(0x7ee9ca))
# The name entry window used by the fossil record...
# This is the check for the index of "End," which happens way before most of the name entry code for some reason.
patch.add_record(0x1a822, num_8bit(0x56))
# Remove a multiplication by 4 (two ASLs) when fetching the character to store.
patch.add_rle_record(0x1b9ff, b'\xea', 2)
# "Space" is now at index 0x55.
patch.add_record(0x1ba0d, num_8bit(0x55))
# We only write one row, and set the palette ourselves. This inserts "AND #$00ff: OR #$3000: NOP" in place of an extra write to the top row.
patch.add_record(0x1ba20, b'\x29\xff\x00\x09\x00\x30\xea')
# Code to read the characters in the name entry window is rewritten to use 1 byte per character instead of 4.
write_code(patch, 'assets/code/name entry grid.asm', 0x1ba69, 289)
# Scrolling arrows on name entry... goal is only one page of characters, so scrolling should never be supported. For this
# block that draws the arrows, just skip comparing to 0xb for the up arrow; this has the effect of doing the comparison
# against 0 instead.
patch.add_rle_record(0x1bb93, b'\xea', 3)
# Instructions to check a few bounds on the current index (after navigating down?).
patch.add_record(0x1bcff, num_16bit(85)) # Compare to index of "Space"
patch.add_record(0x1bd04, num_16bit(88)) # Compare to index of "End"
patch.add_record(0x1bd09, num_16bit(90)) # Compare to first overflow index
patch.add_record(0x1bd1b, num_16bit(85)) # Force to the index of "Space"
patch.add_record(0x1bd24, num_16bit(86)) # Force to the index of "End"
# When navigating up from "End," it needs to shift the current index forward a bit to align visually. This checks that value.
patch.add_record(0x1bd73, num_16bit(86 - 10))
# Handle wrapping when navigating right from "End."
patch.add_record(0x1bc51, num_16bit(87))
patch.add_record(0x1bc56, num_16bit(80))
# Handle wrapping to "End" when navigating left on the last row.
patch.add_record(0x1bca1, num_16bit(79))
patch.add_record(0x1bca6, num_16bit(86))
# At 0x1bdcc, there's a list of indices in the character grid that should be skipped over. We don't need most of them.
write_with_size_check(patch, 0x1bdcc, 14, num_8bit(28) + num_8bit(58), fill_byte=b'\xff')
# The characters used in the name entry. Control characters go immediately after them.
with open('assets/text/name entry grid.txt', 'r', encoding='shift-jis') as f:
grid_start = 0x1c6b3
data = text_util.encode_text(f.read(), reverse_font_map, newline=b'', terminator=b'')
ctrl_start = grid_start + len(data)
data += text_util.encode_text('Space End', reverse_font_map, newline=b'', terminator=b'')
write_with_size_check(patch, grid_start, 798, data)
# This assembly code sets the height of the area name window. Make it shorter.
patch.add_record(0x1c2af, num_16bit(4))
# Assembly code to render text for save slots. The text used comes from the area names, I think?
# NOP out some instructions that skip the first few bytes of the text.
patch.add_rle_record(0x1c441, b'\xea', 4)
# I rewrote one whole block for simplicity... I think the changes are still basically | |
<gh_stars>10-100
# Copyright (c) 2019 - The Procedural Generation for Gazebo authors
# For information on the respective copyright owner see the NOTICE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from .log import PCG_ROOT_LOGGER
try:
from bokeh.plotting import figure
import bokeh.palettes
BOKEH_AVAILABLE = True
except ImportError as ex:
PCG_ROOT_LOGGER.warning(ex)
BOKEH_AVAILABLE = False
try:
from matplotlib import pyplot as plt
from matplotlib import cm
import descartes
MATPLOTLIB_AVAILABLE = True
except ImportError as ex:
PCG_ROOT_LOGGER.warning(ex)
MATPLOTLIB_AVAILABLE = False
import os
import numpy as np
import trimesh
from shapely.geometry import Polygon, MultiPolygon, \
LineString, MultiLineString, Point, MultiPoint
from multiprocessing.pool import ThreadPool
def _get_footprints(inputs):
fp = inputs[0].get_footprint(mesh_type=inputs[1], z_limits=inputs[2])
return fp
def get_axes(fig=None, engine='matplotlib', fig_width=20, fig_height=15):
if fig is None:
fig = get_figure(engine=engine, fig_width=fig_width,
fig_height=fig_height)
ax = fig.add_subplot(111)
return fig, ax
def get_figure(engine='matplotlib', fig_width=20, fig_height=15):
if engine == 'matplotlib':
assert MATPLOTLIB_AVAILABLE, 'matplotlib is not available!'
elif engine == 'bokeh':
assert BOKEH_AVAILABLE, 'bokeh is not available!'
else:
raise ValueError('Plotting engine <{}> is not available'.format(engine))
use_matplotlib = engine == 'matplotlib' or not BOKEH_AVAILABLE
if use_matplotlib:
fig = plt.figure(figsize=(fig_width, fig_height))
else:
fig = figure(fig_width=fig_width, fig_height=fig_height)
return fig
def plot_shapely_geometry(polygon=None, fig=None, ax=None, alpha=0.5,
line_width=2, legend=None, color=None, line_style='solid',
use_matplotlib=True, fig_width=20, fig_height=15, marker_style='o',
grid=True):
if not use_matplotlib:
assert BOKEH_AVAILABLE, 'Bokeh is not available!'
if fig is None:
fig = get_figure(engine='bokeh', fig_width=fig_width, fig_height=fig_height)
if isinstance(polygon, (Polygon, MultiPolygon)):
vertices = np.concatenate([np.asarray(polygon.exterior)[:, :2]] +
[np.asarray(r)[:, :2] for r in polygon.interiors])
fig.patch(
vertices[:, 0],
vertices[:, 1],
alpha=alpha,
line_width=line_width,
legend=legend,
color=color,
line_dash=line_style)
else:
if ax is None:
fig, ax = get_axes(
fig=fig,
engine='matplotlib',
fig_width=fig_width,
fig_height=fig_height)
if isinstance(polygon, Polygon):
patch = descartes.PolygonPatch(
polygon,
facecolor=color,
edgecolor='black',
alpha=alpha,
zorder=2,
linestyle=line_style,
label=legend)
ax.add_patch(patch)
elif isinstance(polygon, MultiPolygon):
for geo in polygon.geoms:
plot_shapely_geometry(
ax=ax,
polygon=geo,
alpha=alpha,
line_width=line_width,
legend=legend,
color=color,
line_style=line_style,
use_matplotlib=use_matplotlib)
elif isinstance(polygon, LineString):
# Plot coordinates
x, y = polygon.xy
ax.plot(x, y, marker=marker_style, color=color, zorder=2)
# Plot lines
ax.plot(x, y, color=color, alpha=alpha, linewidth=line_width, zorder=2)
elif isinstance(polygon, MultiLineString):
for geo in polygon.geoms:
plot_shapely_geometry(
ax=ax,
polygon=geo,
alpha=alpha,
line_width=line_width,
marker_style=marker_style,
legend=legend,
color=color,
line_style=line_style,
use_matplotlib=use_matplotlib)
elif isinstance(polygon, Point):
# Plot coordinates
x, y = polygon.xy
ax.plot(x, y, marker=marker_style, color=color, zorder=2)
elif isinstance(polygon, MultiPoint):
for point in polygon.geoms:
plot_shapely_geometry(
ax=ax,
polygon=point,
alpha=alpha,
line_width=line_width,
marker_style=marker_style,
legend=legend,
color=color,
use_matplotlib=use_matplotlib)
ax.axis('equal')
ax.grid(grid)
return fig, ax
def plot_workspace(workspace, fig=None, ax=None, fig_width=800, fig_height=400,
color=None, alpha=0.5, line_width=2, legend=None, line_style='dashed',
engine='bokeh'):
assert BOKEH_AVAILABLE or MATPLOTLIB_AVAILABLE, \
'None of the plotting libraries matplotlib or bokeh could be imported'
use_matplotlib = engine == 'matplotlib' or not BOKEH_AVAILABLE
if fig is None and ax is None:
if use_matplotlib:
fig = plt.figure(figsize=(fig_width, fig_height))
ax = fig.add_subplot(111)
else:
fig = figure(fig_width=fig_width, fig_height=fig_height)
elif fig is not None and ax is None:
if use_matplotlib:
ax = fig.gca()
geo = workspace.get_geometry()
if isinstance(geo, Polygon):
if use_matplotlib:
patch = descartes.PolygonPatch(
geo,
facecolor=color,
edgecolor='black',
alpha=alpha,
zorder=2,
linestyle=line_style,
label=legend)
ax.add_patch(patch)
else:
plot_shapely_geometry(fig, geo, alpha, line_width, legend, color, line_style)
elif isinstance(geo, MultiPolygon):
for g in geo.geoms:
if use_matplotlib:
patch = descartes.PolygonPatch(
g,
facecolor=color,
edgecolor='black',
alpha=alpha,
zorder=2,
linestyle=line_style,
label=legend)
ax.add_patch(patch)
else:
plot_shapely_geometry(fig, g, alpha, line_width, legend, color, line_style)
return fig, ax
def plot_workspaces(workspaces, fig=None, ax=None, fig_width=800, fig_height=400,
alpha=1, line_width=2, line_style='dashed', engine='bokeh', colormap='viridis'):
assert BOKEH_AVAILABLE or MATPLOTLIB_AVAILABLE, \
'None of the plotting libraries matplotlib or bokeh could be imported'
use_matplotlib = engine == 'matplotlib' or not BOKEH_AVAILABLE
if fig is None and ax is None:
if use_matplotlib:
fig = plt.figure(figsize=(fig_width, fig_height))
ax = fig.add_subplot(111)
else:
fig = figure(
fig_width=fig_width,
fig_height=fig_height,
match_aspect=True)
elif fig is not None and ax is None:
if use_matplotlib:
ax = fig.gca()
if use_matplotlib:
if isinstance(colormap, str):
colors = cm.get_cmap(colormap)(np.linspace(0, 1, len(workspaces)))
else:
if isinstance(colormap, str):
colors = getattr(bokeh.palettes, colormap)(len(workspaces))
for tag, color in zip(workspaces.keys(), colors):
plot_workspace(
workspaces[tag],
fig=fig,
ax=ax,
color=color,
alpha=alpha,
line_width=line_width,
line_style=line_style,
legend=tag,
engine=engine)
return fig
def plot_footprint(footprint, fig=None, ax=None, fig_width=800, fig_height=400,
color=None, alpha=0.5, line_width=2, legend=None, line_style='solid', engine='bokeh'):
assert BOKEH_AVAILABLE or MATPLOTLIB_AVAILABLE, \
'None of the plotting libraries matplotlib or bokeh could be imported'
use_matplotlib = engine == 'matplotlib' or not BOKEH_AVAILABLE
if fig is None and ax is None:
if use_matplotlib:
fig = plt.figure(figsize=(fig_width, fig_height))
ax = fig.add_subplot(111)
else:
fig = figure(
fig_width=fig_width,
fig_height=fig_height,
match_aspect=True)
elif fig is not None and ax is None:
if use_matplotlib:
ax = fig.gca()
if isinstance(footprint, dict):
for tag in footprint:
plot_footprint(
footprint=footprint[tag],
fig=fig,
ax=ax,
fig_width=fig_width,
fig_height=fig_height,
color=color,
alpha=alpha,
line_width=line_width,
legend=None,
line_style=line_style,
engine=engine)
else:
footprints = list()
if isinstance(footprint, Polygon):
footprints.append(footprint)
elif isinstance(footprint, MultiPolygon):
footprints = [t for t in footprint.geoms]
if len(footprints) > 0:
use_legend_label = True
for fp in footprints:
if use_matplotlib:
patch = descartes.PolygonPatch(
fp,
facecolor=color,
edgecolor='black',
alpha=alpha,
zorder=2,
linestyle=line_style,
label=legend if use_legend_label else None)
ax.add_patch(patch)
use_legend_label = False
else:
plot_shapely_geometry(fig, fp, alpha, line_width, legend, color, line_style)
return fig
def plot_footprints(models, fig=None, ax=None, mesh_type='visual', fig_width=800, fig_height=400,
alpha=0, line_width=2, n_processes=4, line_style='solid', engine='bokeh', colormap='magma',
grid=True, ignore_ground_plane=True, z_limits=None, dpi=200):
assert n_processes > 0, 'Number of footprint calculation processes '\
'must be greater than zero'
assert BOKEH_AVAILABLE or MATPLOTLIB_AVAILABLE, \
'None of the plotting libraries matplotlib or bokeh could be imported'
use_matplotlib = engine == 'matplotlib' or not BOKEH_AVAILABLE
if fig is None and ax is None:
if use_matplotlib:
fig = plt.figure(figsize=(fig_width, fig_height), dpi=dpi)
ax = fig.add_subplot(111)
else:
fig = figure(
width=fig_width,
height=fig_height,
match_aspect=True)
elif fig is not None and ax is None:
if use_matplotlib:
ax = fig.gca()
pool = ThreadPool(n_processes)
if ignore_ground_plane:
footprints = pool.map(
_get_footprints,
[(models[tag], mesh_type, z_limits) for tag in models if not models[tag].is_ground_plane])
model_names = [tag for tag in models if not models[tag].is_ground_plane]
else:
footprints = pool.map(
_get_footprints,
[(models[tag], mesh_type, z_limits) for tag in models])
model_names = [tag for tag in models]
if use_matplotlib:
if isinstance(colormap, str):
colors = cm.get_cmap(colormap)(np.linspace(0, 1, len(model_names)))
else:
colors = [colormap for _ in range(len(model_names))]
else:
if isinstance(colormap, str):
colors = getattr(bokeh.palettes, colormap)(len(model_names))
else:
colors = [colormap for _ in range(len(model_names))]
for fp, color, model_name in zip(footprints, colors, model_names):
PCG_ROOT_LOGGER.info('Plotting footprint from model <{}>, polygons={}'.format(
model_name, fp))
for tag in fp:
plot_footprint(
fp[tag],
fig=fig,
ax=ax,
color=color,
alpha=alpha,
line_width=line_width,
legend=model_name,
line_style=line_style,
engine=engine)
if use_matplotlib:
ax.axis('equal')
ax.grid(grid)
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
ax.autoscale(enable=True, axis='both', tight=True)
ax.scatter(
[models[t].pose.x for t in model_names],
[models[t].pose.y for t in model_names],
marker='o')
else:
fig.circle_cross(
[models[t].pose.x for t in model_names],
[models[t].pose.y for t in model_names],
fill_color=color,
line_color='black')
fig.legend.click_policy = 'hide'
return fig
def create_scene(models, mesh_type='collision', add_pseudo_color=False, alpha=0.5):
scene = trimesh.scene.Scene()
scene.add_geometry(trimesh.creation.axis())
if isinstance(models, list):
meshes = list()
for item in models:
try:
new_meshes = item.get_meshes(mesh_type=mesh_type)
except RuntimeWarning as ex:
PCG_ROOT_LOGGER.error('Cannot display {}, message={}'.format(item.name, ex))
new_meshes = list()
meshes = meshes + new_meshes
else:
meshes = models.get_meshes(mesh_type=mesh_type)
if len(meshes) > 0:
if add_pseudo_color:
for i in range(len(meshes)):
meshes[i].visual.face_colors = np.hstack((255. * np.random.random(3), [alpha * 255]))
scene.add_geometry(meshes)
return scene
def plot_occupancy_grid(models, occupied_thresh=0.65,
free_thresh=0.196, occupied_color=[0, 0, 0], free_color=[1, 1, 1],
unavailable_color=[0.5, 0.5, 0.5], output_folder='/tmp',
output_filename='map.pgm', static_models_only=True, with_ground_plane=True,
z_levels=None, x_limits=None, y_limits=None, z_limits=None, step_x=0.01,
step_y=0.01, n_processes=None, fig_size=(5, 5), fig_size_unit='cm', dpi=200,
axis_x_limits=None, axis_y_limits=None, exclude_contains=None,
mesh_type='collision'):
if not MATPLOTLIB_AVAILABLE:
PCG_ROOT_LOGGER.error('Matplotlib is not available')
return None
if mesh_type not in ['collision', 'visual']:
PCG_ROOT_LOGGER.error('Mesh type must be either collision or visual, provided={}'.format(
mesh_type))
return None
else:
PCG_ROOT_LOGGER.info('Generating grid map from {} | |
import uuid
from contextlib import contextmanager
from django.test import TestCase
from django.utils.dateparse import parse_datetime
from celery import states
from celery.exceptions import Ignore
from mock import patch
from casexml.apps.case.mock import CaseFactory, CaseStructure
from casexml.apps.case.tests.util import delete_all_cases
from corehq.apps.case_importer import exceptions
from corehq.apps.case_importer.do_import import do_import
from corehq.apps.case_importer.tasks import bulk_import_async
from corehq.apps.case_importer.tracking.models import CaseUploadRecord
from corehq.apps.case_importer.util import ImporterConfig, WorksheetWrapper, \
get_interned_exception
from corehq.apps.commtrack.tests.util import make_loc
from corehq.apps.data_dictionary.tests.utils import setup_data_dictionary
from corehq.apps.domain.shortcuts import create_domain
from corehq.apps.enterprise.tests.utils import create_enterprise_permissions
from corehq.apps.groups.models import Group
from corehq.apps.locations.models import LocationType
from corehq.apps.locations.tests.util import restrict_user_by_location
from corehq.apps.users.models import CommCareUser, WebUser
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.util.test_utils import flag_enabled, flag_disabled
from corehq.util.timezones.conversions import PhoneTime
from corehq.util.workbook_reading import make_worksheet
class ImporterTest(TestCase):
def setUp(self):
super(ImporterTest, self).setUp()
self.domain_obj = create_domain("importer-test")
self.domain = self.domain_obj.name
self.default_case_type = 'importer-test-casetype'
self.couch_user = WebUser.create(None, "test", "foobar", None, None)
self.couch_user.add_domain_membership(self.domain, is_admin=True)
self.couch_user.save()
self.subdomain1 = create_domain('subdomain1')
self.subdomain2 = create_domain('subdomain2')
self.ignored_domain = create_domain('ignored-domain')
create_enterprise_permissions(self.couch_user.username, self.domain,
[self.subdomain1.name, self.subdomain2.name],
[self.ignored_domain.name])
self.accessor = CaseAccessors(self.domain)
self.factory = CaseFactory(domain=self.domain, case_defaults={
'case_type': self.default_case_type,
})
delete_all_cases()
def tearDown(self):
self.couch_user.delete(self.domain, deleted_by=None)
self.domain_obj.delete()
self.subdomain1.delete()
self.subdomain2.delete()
self.ignored_domain.delete()
super(ImporterTest, self).tearDown()
def _config(self, col_names, search_column=None, case_type=None,
search_field='case_id', create_new_cases=True):
return ImporterConfig(
couch_user_id=self.couch_user._id,
case_type=case_type or self.default_case_type,
excel_fields=col_names,
case_fields=[''] * len(col_names),
custom_fields=col_names,
search_column=search_column or col_names[0],
search_field=search_field,
create_new_cases=create_new_cases,
)
@patch('corehq.apps.case_importer.tasks.bulk_import_async.update_state')
def testImportFileMissing(self, update_state):
# by using a made up upload_id, we ensure it's not referencing any real file
case_upload = CaseUploadRecord(upload_id=str(uuid.uuid4()), task_id=str(uuid.uuid4()))
case_upload.save()
res = bulk_import_async.delay(self._config(['anything']).to_json(), self.domain, case_upload.upload_id)
self.assertIsInstance(res.result, Ignore)
update_state.assert_called_with(
state=states.FAILURE,
meta=get_interned_exception('Sorry, your session has expired. Please start over and try again.'))
self.assertEqual(0, len(self.accessor.get_case_ids_in_domain()))
def testImportBasic(self):
config = self._config(['case_id', 'age', 'sex', 'location'])
file = make_worksheet_wrapper(
['case_id', 'age', 'sex', 'location'],
['case_id-0', 'age-0', 'sex-0', 'location-0'],
['case_id-1', 'age-1', 'sex-1', 'location-1'],
['case_id-2', 'age-2', 'sex-2', 'location-2'],
['case_id-3', 'age-3', 'sex-3', 'location-3'],
['case_id-4', 'age-4', 'sex-4', 'location-4'],
)
res = do_import(file, config, self.domain)
self.assertEqual(5, res['created_count'])
self.assertEqual(0, res['match_count'])
self.assertFalse(res['errors'])
self.assertEqual(1, res['num_chunks'])
case_ids = self.accessor.get_case_ids_in_domain()
cases = list(self.accessor.get_cases(case_ids))
self.assertEqual(5, len(cases))
properties_seen = set()
for case in cases:
self.assertEqual(self.couch_user._id, case.user_id)
self.assertEqual(self.couch_user._id, case.owner_id)
self.assertEqual(self.default_case_type, case.type)
for prop in ['age', 'sex', 'location']:
self.assertTrue(prop in case.get_case_property(prop))
self.assertFalse(case.get_case_property(prop) in properties_seen)
properties_seen.add(case.get_case_property(prop))
def testCreateCasesWithDuplicateExternalIds(self):
config = self._config(['case_id', 'age', 'sex', 'location', 'external_id'])
file = make_worksheet_wrapper(
['case_id', 'age', 'sex', 'location', 'external_id'],
['case_id-0', 'age-0', 'sex-0', 'location-0', 'external_id-0'],
['case_id-1', 'age-1', 'sex-1', 'location-1', 'external_id-0'],
['case_id-2', 'age-2', 'sex-2', 'location-2', 'external_id-1'],
)
res = do_import(file, config, self.domain)
self.assertEqual(3, res['created_count'])
self.assertEqual(0, res['match_count'])
self.assertFalse(res['errors'])
case_ids = self.accessor.get_case_ids_in_domain()
self.assertItemsEqual(
[case.external_id for case in self.accessor.get_cases(case_ids)],
['external_id-0', 'external_id-0', 'external_id-1']
)
def testImportNamedColumns(self):
config = self._config(['case_id', 'age', 'sex', 'location'])
file = make_worksheet_wrapper(
['case_id', 'age', 'sex', 'location'],
['case_id-0', 'age-0', 'sex-0', 'location-0'],
['case_id-1', 'age-1', 'sex-1', 'location-1'],
['case_id-2', 'age-2', 'sex-2', 'location-2'],
['case_id-3', 'age-3', 'sex-3', 'location-3'],
)
res = do_import(file, config, self.domain)
self.assertEqual(4, res['created_count'])
self.assertEqual(4, len(self.accessor.get_case_ids_in_domain()))
def testImportTrailingWhitespace(self):
cols = ['case_id', 'age', 'sex\xa0', 'location']
config = self._config(cols)
file = make_worksheet_wrapper(
['case_id', 'age', 'sex\xa0', 'location'],
['case_id-0', 'age-0', 'sex\xa0-0', 'location-0'],
)
res = do_import(file, config, self.domain)
self.assertEqual(1, res['created_count'])
case_ids = self.accessor.get_case_ids_in_domain()
self.assertEqual(1, len(case_ids))
case = self.accessor.get_case(case_ids[0])
self.assertTrue(bool(case.get_case_property('sex'))) # make sure the value also got properly set
def testCaseIdMatching(self):
# bootstrap a stub case
[case] = self.factory.create_or_update_case(CaseStructure(attrs={
'create': True,
'update': {'importer_test_prop': 'foo'},
}))
self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))
config = self._config(['case_id', 'age', 'sex', 'location'])
file = make_worksheet_wrapper(
['case_id', 'age', 'sex', 'location'],
[case.case_id, 'age-0', 'sex-0', 'location-0'],
[case.case_id, 'age-1', 'sex-1', 'location-1'],
[case.case_id, 'age-2', 'sex-2', 'location-2'],
)
res = do_import(file, config, self.domain)
self.assertEqual(0, res['created_count'])
self.assertEqual(3, res['match_count'])
self.assertFalse(res['errors'])
# shouldn't create any more cases, just the one
case_ids = self.accessor.get_case_ids_in_domain()
self.assertEqual(1, len(case_ids))
[case] = self.accessor.get_cases(case_ids)
for prop in ['age', 'sex', 'location']:
self.assertTrue(prop in case.get_case_property(prop))
# shouldn't touch existing properties
self.assertEqual('foo', case.get_case_property('importer_test_prop'))
def testCaseLookupTypeCheck(self):
[case] = self.factory.create_or_update_case(CaseStructure(attrs={
'create': True,
'case_type': 'nonmatch-type',
}))
self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))
config = self._config(['case_id', 'age', 'sex', 'location'])
file = make_worksheet_wrapper(
['case_id', 'age', 'sex', 'location'],
[case.case_id, 'age-0', 'sex-0', 'location-0'],
[case.case_id, 'age-1', 'sex-1', 'location-1'],
[case.case_id, 'age-2', 'sex-2', 'location-2'],
)
res = do_import(file, config, self.domain)
# because the type is wrong these shouldn't match
self.assertEqual(3, res['created_count'])
self.assertEqual(0, res['match_count'])
self.assertEqual(4, len(self.accessor.get_case_ids_in_domain()))
def testCaseLookupDomainCheck(self):
self.factory.domain = 'wrong-domain'
[case] = self.factory.create_or_update_case(CaseStructure(attrs={
'create': True,
}))
self.assertEqual(0, len(self.accessor.get_case_ids_in_domain()))
config = self._config(['case_id', 'age', 'sex', 'location'])
file = make_worksheet_wrapper(
['case_id', 'age', 'sex', 'location'],
[case.case_id, 'age-0', 'sex-0', 'location-0'],
[case.case_id, 'age-1', 'sex-1', 'location-1'],
[case.case_id, 'age-2', 'sex-2', 'location-2'],
)
res = do_import(file, config, self.domain)
# because the domain is wrong these shouldn't match
self.assertEqual(3, res['created_count'])
self.assertEqual(0, res['match_count'])
self.assertEqual(3, len(self.accessor.get_case_ids_in_domain()))
def testExternalIdMatching(self):
# bootstrap a stub case
external_id = 'importer-test-external-id'
[case] = self.factory.create_or_update_case(CaseStructure(
attrs={
'create': True,
'external_id': external_id,
}
))
self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))
headers = ['external_id', 'age', 'sex', 'location']
config = self._config(headers, search_field='external_id')
file = make_worksheet_wrapper(
['external_id', 'age', 'sex', 'location'],
['importer-test-external-id', 'age-0', 'sex-0', 'location-0'],
['importer-test-external-id', 'age-1', 'sex-1', 'location-1'],
['importer-test-external-id', 'age-2', 'sex-2', 'location-2'],
)
res = do_import(file, config, self.domain)
self.assertEqual(0, res['created_count'])
self.assertEqual(3, res['match_count'])
self.assertFalse(res['errors'])
# shouldn't create any more cases, just the one
self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))
def test_external_id_matching_on_create_with_custom_column_name(self):
headers = ['id_column', 'age', 'sex', 'location']
external_id = 'external-id-test'
config = self._config(headers[1:], search_column='id_column', search_field='external_id')
file = make_worksheet_wrapper(
['id_column', 'age', 'sex', 'location'],
['external-id-test', 'age-0', 'sex-0', 'location-0'],
['external-id-test', 'age-1', 'sex-1', 'location-1'],
)
res = do_import(file, config, self.domain)
self.assertFalse(res['errors'])
self.assertEqual(1, res['created_count'])
self.assertEqual(1, res['match_count'])
case_ids = self.accessor.get_case_ids_in_domain()
self.assertEqual(1, len(case_ids))
case = self.accessor.get_case(case_ids[0])
self.assertEqual(external_id, case.external_id)
def testNoCreateNew(self):
config = self._config(['case_id', 'age', 'sex', 'location'], create_new_cases=False)
file = make_worksheet_wrapper(
['case_id', 'age', 'sex', 'location'],
['case_id-0', 'age-0', 'sex-0', 'location-0'],
['case_id-1', 'age-1', 'sex-1', 'location-1'],
['case_id-2', 'age-2', 'sex-2', 'location-2'],
['case_id-3', 'age-3', 'sex-3', 'location-3'],
['case_id-4', 'age-4', 'sex-4', 'location-4'],
)
res = do_import(file, config, self.domain)
# no matching and no create new set - should do nothing
self.assertEqual(0, res['created_count'])
self.assertEqual(0, res['match_count'])
self.assertEqual(0, len(self.accessor.get_case_ids_in_domain()))
def testBlankRows(self):
# don't create new cases for rows left blank
config = self._config(['case_id', 'age', 'sex', 'location'], create_new_cases=True)
file = make_worksheet_wrapper(
['case_id', 'age', 'sex', 'location'],
[None, None, None, None],
['', '', '', ''],
)
res = do_import(file, config, self.domain)
# no matching and no create new set - should do nothing
self.assertEqual(0, res['created_count'])
self.assertEqual(0, res['match_count'])
self.assertEqual(0, len(self.accessor.get_case_ids_in_domain()))
@patch('corehq.apps.case_importer.do_import.CASEBLOCK_CHUNKSIZE', 2)
def testBasicChunking(self):
config = self._config(['case_id', 'age', 'sex', 'location'])
file = make_worksheet_wrapper(
['case_id', 'age', 'sex', 'location'],
['case_id-0', 'age-0', 'sex-0', 'location-0'],
['case_id-1', 'age-1', 'sex-1', 'location-1'],
['case_id-2', 'age-2', 'sex-2', 'location-2'],
['case_id-3', 'age-3', 'sex-3', 'location-3'],
['case_id-4', 'age-4', 'sex-4', 'location-4'],
)
res = do_import(file, config, self.domain)
# 5 cases in chunks of 2 = 3 chunks
self.assertEqual(3, res['num_chunks'])
self.assertEqual(5, res['created_count'])
self.assertEqual(5, len(self.accessor.get_case_ids_in_domain()))
def testExternalIdChunking(self):
# bootstrap a stub case
external_id = 'importer-test-external-id'
headers = ['external_id', 'age', 'sex', 'location']
config = self._config(headers, search_field='external_id')
file = make_worksheet_wrapper(
['external_id', 'age', 'sex', 'location'],
['importer-test-external-id', 'age-0', 'sex-0', 'location-0'],
['importer-test-external-id', 'age-1', 'sex-1', 'location-1'],
['importer-test-external-id', 'age-2', 'sex-2', 'location-2'],
)
# the first one should create the case, and the remaining two should update it
res = do_import(file, config, self.domain)
self.assertEqual(1, res['created_count'])
self.assertEqual(2, res['match_count'])
self.assertFalse(res['errors'])
self.assertEqual(2, res['num_chunks']) # the lookup causes an extra chunk
# should just create the one case
case_ids = self.accessor.get_case_ids_in_domain()
self.assertEqual(1, len(case_ids))
[case] = self.accessor.get_cases(case_ids)
self.assertEqual(external_id, case.external_id)
for prop in ['age', 'sex', 'location']:
self.assertTrue(prop in case.get_case_property(prop))
def testParentCase(self):
headers = ['parent_id', 'name', 'case_id']
config = self._config(headers, create_new_cases=True, search_column='case_id')
rows = 3
[parent_case] = self.factory.create_or_update_case(CaseStructure(attrs={'create': True}))
self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))
file = make_worksheet_wrapper(
['parent_id', 'name', 'case_id'],
[parent_case.case_id, 'name-0', 'case_id-0'],
[parent_case.case_id, 'name-1', 'case_id-1'],
[parent_case.case_id, 'name-2', 'case_id-2'],
)
# Should successfully match on `rows` cases
res = do_import(file, config, self.domain)
self.assertEqual(rows, res['created_count'])
# Should create child cases
self.assertEqual(len(self.accessor.get_reverse_indexed_cases([parent_case.case_id])), 3)
self.assertEqual(self.accessor.get_extension_case_ids([parent_case.case_id]), [])
file_missing = make_worksheet_wrapper(
['parent_id', 'name', 'case_id'],
['parent_id-0', 'name-0', 'case_id-0'],
['parent_id-1', 'name-1', 'case_id-1'],
['parent_id-2', 'name-2', 'case_id-2'],
)
# Should be unable to find parent case on `rows` cases
res = do_import(file_missing, config, self.domain)
error_column_name = 'parent_id'
self.assertEqual(rows,
len(res['errors'][exceptions.InvalidParentId.title][error_column_name]['rows']),
"All cases should have missing parent")
def testExtensionCase(self):
headers = ['parent_id', 'name', 'case_id', 'parent_relationship_type', 'parent_identifier']
config = self._config(headers, create_new_cases=True, search_column='case_id')
[parent_case] = self.factory.create_or_update_case(CaseStructure(attrs={'create': True}))
self.assertEqual(1, len(self.accessor.get_case_ids_in_domain()))
file = make_worksheet_wrapper(
headers,
[parent_case.case_id, 'name-0', 'case_id-0', 'extension', 'host'],
[parent_case.case_id, 'name-1', 'case_id-1', 'extension', 'mother'],
[parent_case.case_id, 'name-2', 'case_id-2', 'child', 'parent'],
)
# Should successfully match on `rows` cases
res = do_import(file, config, self.domain)
self.assertEqual(res['created_count'], 3)
# Of the 3, 2 should be extension cases
extension_case_ids = self.accessor.get_extension_case_ids([parent_case.case_id])
self.assertEqual(len(extension_case_ids), 2)
extension_cases = self.accessor.get_cases(extension_case_ids)
# Check that identifier is set correctly
self.assertEqual(
{'host', 'mother'},
{
c.indices[0].identifier
for c in extension_cases
}
)
@flag_enabled('DOMAIN_PERMISSIONS_MIRROR')
def test_multiple_domain_case_import(self):
headers_with_domain = ['case_id', 'name', 'artist', 'domain']
config_1 = self._config(headers_with_domain, create_new_cases=True, search_column='case_id')
case_with_domain_file = make_worksheet_wrapper(
['case_id', 'name', 'artist', 'domain'],
['', 'name-0', 'artist-0', self.domain],
['', 'name-1', 'artist-1', self.subdomain1.name],
['', 'name-2', 'artist-2', self.subdomain2.name],
['', 'name-3', 'artist-3', self.domain],
['', 'name-4', 'artist-4', self.domain],
['', 'name-5', 'artist-5', | |
"""
This module contains a class for discrete
1-dimensional exponential families. The main
uses for this class are exact (post-selection)
hypothesis tests and confidence intervals.
"""
import numpy as np
import warnings
from ..truncated import find_root
def crit_func(test_statistic, left_cut, right_cut):
"""
A generic critical function for an interval,
with weights at the endpoints.
((test_statistic < CL) + (test_statistic > CR) +
gammaL * (test_statistic == CL) +
gammaR * (test_statistic == CR))
where (CL, gammaL) = left_cut, (CR, gammaR) = right_cut.
Parameters
----------
test_statistic : np.float
Observed value of test statistic.
left_cut : (float, float)
(CL, gammaL): left endpoint and value at exactly the left endpoint (should be in [0,1]).
right_cut : (float, float)
(CR, gammaR): right endpoint and value at exactly the right endpoint (should be in [0,1]).
Returns
-------
decision : np.float
"""
CL, gammaL = left_cut
CR, gammaR = right_cut
value = ((test_statistic < CL) + (test_statistic > CR)) * 1.
if gammaL != 0:
value += gammaL * (test_statistic == CL)
if gammaR != 0:
value += gammaR * (test_statistic == CR)
return value
class discrete_family(object):
def __init__(self, sufficient_stat, weights):
r"""
A discrete 1-dimensional
exponential family with reference measure $\sum_j w_j \delta_{X_j}$
and sufficient statistic `sufficient_stat`. For any $\theta$, the distribution
is
.. math::
P_{\theta} = \sum_{j} e^{\theta X_j - \Lambda(\theta)} w_j \delta_{X_j}
where
.. math::
\Lambda(\theta) = \log \left(\sum_j w_j e^{\theta X_j} \right).
Parameters
----------
sufficient_stat : `np.float((n))`
weights : `np.float(n)`
Notes
-----
The weights are normalized to sum to 1.
"""
xw = np.array(sorted(zip(sufficient_stat, weights)))
self._x = xw[:,0]
self._w = xw[:,1]
self._lw = np.log(xw[:,1])
self._w /= self._w.sum() # make sure they are a pmf
self.n = len(xw)
self._theta = np.nan
@property
def theta(self):
"""
The natural parameter of the family.
"""
return self._theta
@theta.setter
def theta(self, _theta):
if _theta != self._theta:
_thetaX = _theta * self.sufficient_stat + self._lw
_largest = _thetaX.max() - 5 # try to avoid over/under flow, 5 seems arbitrary
_exp_thetaX = np.exp(_thetaX - _largest)
_prod = _exp_thetaX
self._partition = np.sum(_prod)
self._pdf = _prod / self._partition
self._partition *= np.exp(_largest)
self._theta = _theta
@property
def partition(self):
r"""
Partition function at `self.theta`:
.. math::
\sum_j e^{\theta X_j} w_j
"""
if hasattr(self, "_partition"):
return self._partition
@property
def sufficient_stat(self):
"""
Sufficient statistics of the exponential family.
"""
return self._x
@property
def weights(self):
"""
Weights of the exponential family.
"""
return self._w
def pdf(self, theta):
r"""
Density of $P_{\theta}$ with respect to $P_0$.
Parameters
----------
theta : float
Natural parameter.
Returns
-------
pdf : np.float
"""
self.theta = theta # compute partition if necessary
return self._pdf
def cdf(self, theta, x=None, gamma=1):
r"""
The cumulative distribution function of $P_{\theta}$ with
weight `gamma` at `x`
.. math::
P_{\theta}(X < x) + \gamma * P_{\theta}(X = x)
Parameters
----------
theta : float
Natural parameter.
x : float (optional)
Where to evaluate CDF.
gamma : float(optional)
Weight given at `x`.
Returns
-------
cdf : np.float
"""
pdf = self.pdf(theta)
if x is None:
return np.cumsum(pdf) - pdf * (1 - gamma)
else:
tr = np.sum(pdf * (self.sufficient_stat < x))
if x in self.sufficient_stat:
tr += gamma * np.sum(pdf[np.where(self.sufficient_stat == x)])
return tr
def ccdf(self, theta, x=None, gamma=0, return_unnorm=False):
r"""
The complementary cumulative distribution function
(i.e. survival function) of $P_{\theta}$ with
weight `gamma` at `x`
.. math::
P_{\theta}(X > x) + \gamma * P_{\theta}(X = x)
Parameters
----------
theta : float
Natural parameter.
x : float (optional)
Where to evaluate CCDF.
gamma : float(optional)
Weight given at `x`.
Returns
-------
ccdf : np.float
"""
pdf = self.pdf(theta)
if x is None:
return np.cumsum(pdf[::-1])[::-1] - pdf * (1 - gamma)
else:
tr = np.sum(pdf * (self.sufficient_stat > x))
if x in self.sufficient_stat:
tr += gamma * np.sum(pdf[np.where(self.sufficient_stat == x)])
return tr
def E(self, theta, func):
r"""
Expectation of `func` under $P_{\theta}$
Parameters
----------
theta : float
Natural parameter.
func : callable
Assumed to be vectorized.
gamma : float(optional)
Weight given at `x`.
Returns
-------
E : np.float
"""
return (func(self.sufficient_stat) * self.pdf(theta)).sum()
def Var(self, theta, func):
r"""
Variance of `func` under $P_{\theta}$
Parameters
----------
theta : float
Natural parameter.
func : callable
Assumed to be vectorized.
Returns
-------
var : np.float
"""
mu = self.E(theta, func)
return self.E(theta, lambda x: (func(x)-mu)**2)
def Cov(self, theta, func1, func2):
r"""
Covariance of `func1` and `func2` under $P_{\theta}$
Parameters
----------
theta : float
Natural parameter.
func1, func2 : callable
Assumed to be vectorized.
Returns
-------
cov : np.float
"""
mu1 = self.E(theta, func1)
mu2 = self.E(theta, func2)
return self.E(theta, lambda x: (func1(x)-mu1)*(func2(x)-mu2))
def two_sided_acceptance(self, theta, alpha=0.05, tol=1e-6):
r"""
Compute cutoffs of UMPU two-sided test.
Parameters
----------
theta : float
Natural parameter.
alpha : float (optional)
Size of two-sided test.
tol : float
Tolerance for root-finding.
Returns
-------
left_cut : (float, float)
Boundary and randomization weight for left endpoint.
right_cut : (float, float)
Boundary and randomization weight for right endpoint.
"""
if theta != self._theta:
CL = np.max([x for x in self.sufficient_stat if self._critCovFromLeft(theta, (x, 0), alpha) >= 0])
gammaL = find_root(lambda x: self._critCovFromLeft(theta, (CL, x), alpha), 0., 0., 1., tol)
CR, gammaR = self._rightCutFromLeft(theta, (CL, gammaL), alpha)
self._left_cut, self._right_cut = (CL, gammaL), (CR, gammaR)
return self._left_cut, self._right_cut
def two_sided_test(self, theta0, observed, alpha=0.05, randomize=True, auxVar=None):
r"""
Perform UMPU two-sided test.
Parameters
----------
theta0 : float
Natural parameter under null hypothesis.
observed : float
Observed sufficient statistic.
alpha : float (optional)
Size of two-sided test.
randomize : bool
Perform the randomized test (or conservative test).
auxVar : [None, float]
If randomizing and not None, use this
as the random uniform variate.
Returns
-------
decision : np.bool
Is the null hypothesis $H_0:\theta=\theta_0$ rejected?
Notes
-----
We need an auxiliary uniform variable to carry out the randomized test.
Larger auxVar corresponds to x being slightly "larger." It can be passed in,
or chosen at random. If randomize=False, we get a conservative test.
"""
if randomize:
if auxVar is None:
auxVar = np.random.random()
rejLeft = self._test2RejectsLeft(theta0, observed, alpha, auxVar)
rejRight = self._test2RejectsRight(theta0, observed, alpha, auxVar)
else:
rejLeft = self._test2RejectsLeft(theta0, observed, alpha)
rejRight = self._test2RejectsRight(theta0, observed, alpha)
return rejLeft or rejRight
def one_sided_test(self, theta0, observed, alternative='greater', alpha=0.05, randomize=True, auxVar=None):
r"""
Perform UMPU one-sided test.
Parameters
----------
theta0 : float
Natural parameter under null hypothesis.
observed : float
Observed sufficient statistic.
alternative : str
One of ['greater', 'less']
alpha : float (optional)
Size of two-sided test.
randomize : bool
Perform the randomized test (or conservative test).
auxVar : [None, float]
If randomizing and not None, use this
as the random uniform variate.
Returns
-------
decision : np.bool
Is the null hypothesis $H_0:\theta=\theta_0$ rejected?
Notes
-----
We need an auxiliary uniform variable to carry out the randomized test.
Larger auxVar corresponds to x being slightly "larger." It can be passed in,
or chosen at random. If randomize=False, we get a conservative test.
"""
if alternative not in ['greater', 'less']:
raise ValueError('alternative must be one of ["greater", "less"]')
self.theta = theta0
if randomize:
if auxVar is None:
auxVar = np.random.random()
if alternative == 'greater':
return self.ccdf(theta0, observed, gamma=auxVar) < alpha
else:
return self.cdf(theta0, observed, gamma=auxVar) < alpha
else:
if alternative == 'greater':
return self.ccdf(theta0, observed) < alpha
else:
return self.cdf(theta0, observed) < alpha
def interval(self, observed, alpha=0.05, randomize=True, auxVar=None, tol=1e-6):
"""
Form UMAU confidence interval.
Parameters
----------
observed : float
Observed sufficient statistic.
alpha : float (optional)
Size of two-sided test.
randomize : bool
Perform the randomized test (or conservative test).
auxVar : [None, float]
If randomizing and not None, use this
as the random uniform variate.
Returns
-------
lower, upper : float
Limits of confidence interval.
"""
if randomize:
if auxVar is None:
auxVar = np.random.random()
upper = self._inter2Upper(observed, auxVar, alpha, tol)
lower = self._inter2Lower(observed, auxVar, alpha, tol)
else:
upper = self._inter2Upper(observed, 1., alpha, tol)
lower = self._inter2Lower(observed, 0., alpha, tol)
return lower, upper
def equal_tailed_interval(self, observed, alpha=0.05, randomize=True, auxVar=None, | |
# coding: utf-8
"""
Healthbot APIs
API interface for Healthbot application # noqa: E501
OpenAPI spec version: 1.0.0
Contact: <EMAIL>
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class ServicesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_services_device_groups_device_group_by_device_group_name(self, device_group_name, **kwargs): # noqa: E501
"""Start a device-group's services. # noqa: E501
Start services of a device group. Use this to start stopped services. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_services_device_groups_device_group_by_device_group_name(device_group_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str device_group_name: Name of device group (required)
:param str authorization: authentication header object
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_services_device_groups_device_group_by_device_group_name_with_http_info(device_group_name, **kwargs) # noqa: E501
else:
(data) = self.create_services_device_groups_device_group_by_device_group_name_with_http_info(device_group_name, **kwargs) # noqa: E501
return data
def create_services_device_groups_device_group_by_device_group_name_with_http_info(self, device_group_name, **kwargs): # noqa: E501
"""Start a device-group's services. # noqa: E501
Start services of a device group. Use this to start stopped services. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_services_device_groups_device_group_by_device_group_name_with_http_info(device_group_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str device_group_name: Name of device group (required)
:param str authorization: authentication header object
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_name', 'authorization'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_services_device_groups_device_group_by_device_group_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_name' is set
if ('device_group_name' not in params or
params['device_group_name'] is None):
raise ValueError("Missing the required parameter `device_group_name` when calling `create_services_device_groups_device_group_by_device_group_name`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_name' in params:
path_params['device_group_name'] = params['device_group_name'] # noqa: E501
query_params = []
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/services/device-group/{device_group_name}/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_services_network_group_by_network_group_name(self, network_group_name, **kwargs): # noqa: E501
"""Start a network-group's services. # noqa: E501
Start services of a network group. Use this to start stopped services. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_services_network_group_by_network_group_name(network_group_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str network_group_name: Name of network group (required)
:param str authorization: authentication header object
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_services_network_group_by_network_group_name_with_http_info(network_group_name, **kwargs) # noqa: E501
else:
(data) = self.create_services_network_group_by_network_group_name_with_http_info(network_group_name, **kwargs) # noqa: E501
return data
def create_services_network_group_by_network_group_name_with_http_info(self, network_group_name, **kwargs): # noqa: E501
"""Start a network-group's services. # noqa: E501
Start services of a network group. Use this to start stopped services. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_services_network_group_by_network_group_name_with_http_info(network_group_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str network_group_name: Name of network group (required)
:param str authorization: authentication header object
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['network_group_name', 'authorization'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_services_network_group_by_network_group_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'network_group_name' is set
if ('network_group_name' not in params or
params['network_group_name'] is None):
raise ValueError("Missing the required parameter `network_group_name` when calling `create_services_network_group_by_network_group_name`") # noqa: E501
collection_formats = {}
path_params = {}
if 'network_group_name' in params:
path_params['network_group_name'] = params['network_group_name'] # noqa: E501
query_params = []
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/services/network-group/{network_group_name}/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_services_device_groups_device_group_by_device_group_name(self, device_group_name, **kwargs): # noqa: E501
"""Stop and remove a device-group's services. # noqa: E501
Stop and clean services of a device-group. This will remove all the services for a device-group, however, it will not clean up the collected data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_services_device_groups_device_group_by_device_group_name(device_group_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str device_group_name: Name of device group (required)
:param str authorization: authentication header object
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_services_device_groups_device_group_by_device_group_name_with_http_info(device_group_name, **kwargs) # noqa: E501
else:
(data) = self.delete_services_device_groups_device_group_by_device_group_name_with_http_info(device_group_name, **kwargs) # noqa: E501
return data
def delete_services_device_groups_device_group_by_device_group_name_with_http_info(self, device_group_name, **kwargs): # noqa: E501
"""Stop and remove a device-group's services. # noqa: E501
Stop and clean services of a device-group. This will remove all the services for a device-group, however, it will not clean up the collected data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_services_device_groups_device_group_by_device_group_name_with_http_info(device_group_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str device_group_name: Name of device group (required)
:param str authorization: authentication header object
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['device_group_name', 'authorization'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_services_device_groups_device_group_by_device_group_name" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'device_group_name' is set
if ('device_group_name' not in params or
params['device_group_name'] is None):
raise ValueError("Missing the required parameter `device_group_name` when calling `delete_services_device_groups_device_group_by_device_group_name`") # noqa: E501
collection_formats = {}
path_params = {}
if 'device_group_name' in params:
path_params['device_group_name'] = params['device_group_name'] # noqa: E501
query_params = []
header_params = {}
if 'authorization' in params:
header_params['Authorization'] = params['authorization'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/octet-stream']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/services/device-group/{device_group_name}/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_services_network_group_by_network_group_name(self, network_group_name, **kwargs): # noqa: E501
"""Stop and remove a network-group's services. # noqa: E501
Stop and clean the services of a network group. This will remove all the services for a network-group, however, it will not clean up the collected data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_services_network_group_by_network_group_name(network_group_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str network_group_name: Name of network group | |
#!/usr/bin/env python
from __future__ import division
import sys
import copy
import rospy
import geometry_msgs.msg
## END_SUB_TUTORIAL
import tf2_ros
from actionlib import SimpleActionClient
from geometry_msgs.msg import PoseStamped, Quaternion, Point, WrenchStamped
from giskard_msgs.msg import ControllerListGoal, Controller, ControllerListAction
from sensor_msgs.msg import JointState
from numpy import pi
import numpy as np
from math import sqrt
from std_msgs.msg import String
from std_srvs.srv import Trigger, TriggerRequest
from tf.transformations import quaternion_about_axis, quaternion_from_euler
class MoveArm(object):
# Variablen die den Schnitt unmittelbar beeinflussen
offset_tip = 0.102 # Offset in cm (Dicke des Schneidbretts + Abstand zw. Fingerspitze und Klingenunterseite)
blade_len = 0.05 # Laenge der Klinge
ft_limit = 50 # Kraft Grenzwert
ft_threshold = 25 # Kraft Schwellwert
step_down = 0.01 # Standard Schnitttiefe
def __init__(self, enabled=True):
self.enabled = enabled
self.client = SimpleActionClient('/qp_controller/command', ControllerListAction)
rospy.loginfo('connecting to giskard')
self.client.wait_for_server()
rospy.loginfo('connected to giskard')
self.tip = 'gripper_tool_frame'
self.root = 'base_link'
self.joint_names = ['shoulder_pan_joint',
'shoulder_lift_joint',
'elbow_joint',
'wrist_1_joint',
'wrist_2_joint',
'wrist_3_joint', ]
self.tfBuffer = tf2_ros.Buffer()
self.listener = tf2_ros.TransformListener(self.tfBuffer)
# Subcriber fuer das Topic"/kms40/wrench_zeroed". Wenn Nachrichten empfangen werden, wird die Funktion
# ft_callback aufgerufen.
self.ft_sub = rospy.Subscriber("/kms40/wrench_zeroed", WrenchStamped, self.ft_callback)
self.ft_list = [] #Liste fuer die gemessenen Kraftwerte.
# Service, um den Offset des Kraftmomentensensors zu aktualisieren.
# Der Service wartet auf ein Objekt vom Typ Trigger.
self.reset_ft = rospy.ServiceProxy("/ft_cleaner/update_offset",Trigger)
rospy.sleep(1)
self.reset_ft.call(TriggerRequest()) #Trigger Objekt
# Publisher fuer die Position des Endeffektors
self.endeffector_pub = rospy.Publisher('endeffector_position',PoseStamped)
def send_cart_goal(self, goal_pose,translation_weight=1,rotation_weight=1):
if self.enabled:
goal = ControllerListGoal()
goal.type = ControllerListGoal.STANDARD_CONTROLLER
# translation
controller = Controller()
controller.type = Controller.TRANSLATION_3D
controller.tip_link = self.tip
controller.root_link = self.root
controller.goal_pose = goal_pose
controller.p_gain = 3
controller.enable_error_threshold = True
controller.threshold_value = 0.05
controller.weight = translation_weight
goal.controllers.append(controller)
# rotation
controller = Controller()
controller.type = Controller.ROTATION_3D
controller.tip_link = self.tip
controller.root_link = self.root
controller.goal_pose = goal_pose
controller.p_gain = 3
controller.enable_error_threshold = True
controller.threshold_value = 0.2
controller.weight = rotation_weight
goal.controllers.append(controller)
self.client.send_goal(goal)
result = self.client.wait_for_result(rospy.Duration(10))
print('finished in 10s?: {}'.format(result))
def relative_goal(self, position, orientation,translation_weight=1,rotation_weight=1):
p = PoseStamped()
p.header.frame_id = self.tip
p.pose.position = Point(*position)
p.pose.orientation = Quaternion(*orientation)
self.send_cart_goal(p,translation_weight,rotation_weight)
def send_joint_goal(self, joint_state):
if self.enabled:
goal = ControllerListGoal()
goal.type = ControllerListGoal.STANDARD_CONTROLLER
# translation
controller = Controller()
controller.type = Controller.JOINT
controller.tip_link = self.tip
controller.root_link = self.root
controller.goal_state = joint_state
controller.p_gain = 3
controller.enable_error_threshold = False
controller.threshold_value = 0.01
controller.weight = 1.0
goal.controllers.append(controller)
self.client.send_goal(goal)
result = self.client.wait_for_result(rospy.Duration(10))
print('finished in 10s?: {}'.format(result))
def ft_callback(self,data):
"""
Callback fuer den Kraft-/Momentensensor
:param data: Sensordaten
:type: WrenchStamped
"""
# Abbruch der Bewegung, wenn gemessene Kraft das Limit ueberschreitet, um Sicherheitsabschaltung des Arms zuvorzukommen.
if abs(data.wrench.force.z) > self.ft_limit:
print("Stop")
self.client.cancel_all_goals()
# Lokalisation des Endeffektors
trans = self.tfBuffer.lookup_transform('arm_mounting_plate', self.tip, rospy.Time())
p = PoseStamped()
p.header = trans.header
p.pose.position.x = trans.transform.translation.x
p.pose.position.y = trans.transform.translation.y
p.pose.position.z = trans.transform.translation.z
p.pose.orientation.x = trans.transform.rotation.x
p.pose.orientation.y = trans.transform.rotation.y
p.pose.orientation.z = trans.transform.rotation.z
p.pose.orientation.w = trans.transform.rotation.w
self.endeffector_pub.publish(p)
# Der absolute Kraftwert wird ausgelesen und zu einer Liste hinzugefuegt, die die Werte aneinander reiht, um
# spaeter daraus eine Schneidestrategie abzuleiten.
ft = abs(data.wrench.force.z)
self.ft_list.append(ft)
def move_tip_in_amp(self, x, y, z):
"""
Bewegung des Gripper Tool Frame in Bezug auf den Frame 'arm_mounting_plate' (amp).
:type x,y,z: Distanz in Meter
:param x: Bewegung entlang der x-Achse des Frame 'arm_mounting_plate'
:param y: Bewegung entlang der y-Achse des Frame 'arm_mounting_plate'
:param z: Bewegung entlang der z-Achse des Frame 'arm_mounting_plate'
"""
# Ermittelung der Position des Frames Gripper Tool in Bezug auf 'arm_mounting_plate'-Frame
trans = self.tfBuffer.lookup_transform('arm_mounting_plate', self.tip,
rospy.Time())
p = PoseStamped()
p.header.frame_id = 'arm_mounting_plate'
# Die soeben ermittelten Werte werden uebernommen und mit den Werten der gewuenschten Bewegung addiert.
p.pose.position = trans.transform.translation
p.pose.position.x += x
p.pose.position.y += y
p.pose.position.z += z
p.pose.orientation = trans.transform.rotation
cut.send_cart_goal(p)
def distance2table(self):
"""
Berechnung des Abstandes von Klingenunterkante zu Oberflaeche der Schneidunterlage.
:rtype: Distanz in Meter
:return: Abstand zum Tisch
"""
# Abfrage der Position des Frames 'gripper_tool_frame' in Bezug auf 'arm_mounting_plate'.
# Das Frame 'arm_mounting_plate' entspricht dabei der Tischoberkante.
trans = self.tfBuffer.lookup_transform('arm_mounting_plate', self.tip,rospy.Time())
# Kalkulation des Abstandes von Klingen-Unterseite zum Schneidebrett mit Offset.
distance2table = trans.transform.translation.z - self.offset_tip
return distance2table
def go_to_home(self):
"""
Definition der Standard Position.
:return:
"""
print ("Approach Home Pose")
goal_joint_state = JointState()
goal_joint_state.name = self.joint_names
goal_joint_state.position = [-2.417572323475973,
-1.530511204396383,
-1.6327641646014612,
-1.5507991949664515,
1.5708668231964111,
1.509663701057434]
self.send_joint_goal(goal_joint_state)
print ("Home Pose Approached")
def align(self):
"""
Ausrichtung des Messers. Das Messer wird nach vorne gekippt, da die Klinge gebogen ist und sonst nicht buendig
mit dem Schneidebrett abschliessen wuerde. Der tiefste Punkt der Klinge befindet sich so zentral ueber dem Objekt.
Ebenfalls wird das Messer um die z-Achse gedreht, da die provisorische Halterung das Messer nicht perfekt
ausgerichtet aufnimmt. Diese Werte sind bei Verwendung von anderem Messer und Halterung anzupassen.
"""
q = quaternion_from_euler(0, -0.15, 0.02, 'ryxz')
cut.relative_goal([0, 0, 0], q)
cut.move_tip_in_amp(-0.01, 0, 0)
# Weitere Bewegungen des Endeffektors, die nicht beruecksichtigt wurden.
# Einfache Schnittbewegung entlang der y-Achse (in Bezug auf gripper_tool_frame) bei gleicher Orientierung des Grippers
# def straight_cut(self):
# d2t = test.distance2table()
# test.relative_goal([0,-d2t,0],[0,0,0,1])
#
# Hackende Bewegung
# def straight_chop(self):
# max_step = 6
# for i in range(max_step):
# test.relative_goal([0,-0.02,0],[0,0,0,1])
# test.relative_goal([0,0.01,0], [0, 0, 0, 1])
#
# Saegende Bewegung
# def saw(self):
# max_step = 6
# for i in range(max_step):
# test.relative_goal([0, -0.005, 0.05], [0, 0, 0, 1])
# test.relative_goal([0, -0.005, -0.05], [0, 0, 0, 1])
#
# Einfache rollende Schnittbewegung
# def roll_simple(self):
# q = quaternion_from_euler(0, 0.3, 0, 'ryxz')
# test.relative_goal([0,0,0],q,translation_weight=100) # Erhohung der Gewichtung der Translation, damit die Spitze genauer in Position bleibt
# test.move_tip_in_amp(0, 0, -0.08)
# q_1 = quaternion_from_euler(0, -0.3, 0, 'ryxz')
# test.relative_goal([0, 0, 0],q_1,translation_weight=100)
#
# Erweiterte rollende Schnittbewegung
# def roll_advanced(self):
# q = quaternion_from_euler(0, 0.3, 0, 'ryxz')
# test.relative_goal([0, 0, 0], q, translation_weight=100)
# test.move_tip_in_amp(0, 0, -0.08)
# test.move_tip_in_amp(-0.05, 0, 0)
# q_1 = quaternion_from_euler(0, -0.3, 0, 'ryxz')
# test.relative_goal([0, 0, 0], q_1, translation_weight=100)
#
#
# def cross_cut(self):
# max_step = 5
# for i in range(max_step):
# q = quaternion_from_euler(0, 0.1, 0, 'ryxz')
# test.relative_goal([0, 0, 0], q, translation_weight=100)
# test.relative_goal([0, -0.01, 0.05], [0, 0, 0, 1])
# q_1 = quaternion_from_euler(0, -0.1, 0, 'ryxz')
# test.relative_goal([0, 0, 0], q_1,translation_weight=100)
# test.relative_goal([0, 0, -0.05], [0, 0, 0, 1])
def master_cut(self):
"""
Funktion fuer die Planung und Ausfuehrung des Schnitte
:return:
"""
# Abfrage des aktuellen Abstands von Klinge zu Schneidebrett.
d2t = cut.distance2table()
# Solange dieser Abstand positiv ist, also sich das Messer oberhalb des Schneidbretts befindet, wird geschnitten.
while d2t > 0:
# Aufruf der Funktion, die die Bewegung unter Beruecksichtig verschiedener Paramenter berechnet und zurueckgibt.
down, side, final = cut.calc_move()
# Bewegung, wenn der gemessene F/T Wert den Schwellwert nicht ueberschritten hat. In diesem Fall erfolgt die
# Bewegung rein entlang der z-Achse.
if side == 0:
cut.move_tip_in_amp(0, 0, -down)
# Wenn F/T-Wert den Grenzwert ueberschreitet, kommt eine Bewegung in x Richtung dazu.
# Dabei wird zunaechst die Klinge ohne Bewegung zurueck gefahren, um von der vollen Klingenlaenge
# zu profitieren. Anschliessend erfolgt eine diagonale Schnittbewegung ueber die gesamte Klingenlaenge.
# Abschliessend eine weitere diagonale Bewegung, um wieder in die Ausgangsposition (x-Achse) zu gelangen.
else:
cut.move_tip_in_amp(-side, 0, -(1 / 4) * down)
cut.move_tip_in_amp(2 * side, 0, -(2 / 4) * down)
cut.move_tip_in_amp(-side, 0, -(1 / 4) * down)
# Wenn die letze Bewegung ausgefuehrte wurde (also Final == True von calc_move() zurueckgegeben wird),
# wird die Funktion beendet. Der Schnittvorgang wird mit Bewegungen entlag der x-Achse abgeschlossen,
# um sicherzustellen, dass das Objekt in Gaenze durchtrennt wird. Abschliessend faehrt das Messer seitlich
# entlang der y-Achse um das abgetrennte Stueck zu separieren.
if final == True:
print ("Final")
cut.move_tip_in_amp(-self.blade_len, 0, 0)
cut.move_tip_in_amp(self.blade_len * 1.5, 0, 0)
cut.move_tip_in_amp(-self.blade_len / 2, 0, 0.005)
cut.move_tip_in_amp(0, 0.05, 0)
print ("Cut Finished")
return
# Funktion um die Schnittbewegung zu berechnen
def calc_move(self):
"""
Return of three values necessary for cut-move execution.
:return: 1.value:cutting depth; 2.value: lateral move; 3.value: final cut
:type: (float,float,bool)
"""
# Init
final = False
# Abfrage des maximalen F/T-Werts aus der letzten Bewegung
cur_ft = cut.max_ft()
# cur_ft = self.ft_threshold
# Abfrage des aktuellen Abstands von Klingenunterseite zu Schneidebrett
d2t = cut.distance2table()
print("Distance to Table %s" % d2t)
print("Current FT %s" %cur_ft)
# Wenn der | |
# -----------------------------------------------------------------------------
# Copyright * 2014, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration. All
# rights reserved.
#
# The Crisis Mapping Toolkit (CMT) v1 platform is licensed under the Apache
# License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# -----------------------------------------------------------------------------
import os, sys
import xml.etree.ElementTree as ET
import ee
import json
import traceback
import util.miscUtilities
import util.imageRetrievalFunctions
# Default search path for domain xml files: [root]/config/domains/[sensor_name]/
DOMAIN_SOURCE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), \
".." + os.path.sep + "config" + os.path.sep + "domains")
# Default search path for sensors description xml files: [root]/config/sensors
SENSOR_SOURCE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), \
".." + os.path.sep + "config" + os.path.sep + "sensors")
class SensorObservation(object):
'''A class for accessing a sensor's observation at one time.'''
def __init__(self):
'''Create an empty object'''
# Public class members
self.sensor_name = 'Unnamed' # Name of the sensor!
self.image = None # EE image object containing the selected sensor bands
self.band_names = [] # The name assigned to each band
self.log_scale = False # True if the sensor uses a log 10 scale
self.minimum_value = None # Min and max sensor values (shared across bands)
self.maximum_value = None
self.band_resolutions = dict() # Specified resolution of each band in meters
self.water_distributions = dict() # Info about water characteristics in each band
# You can also access each band as a member variable, e.g. self.hv
# gives access to the band named 'hv'
# Private class members
self._display_bands = None
self._display_gains = None
self._mask_info = None
self._band_sources = dict() # Where to get each band from
def init_from_xml(self, xml_source=None, ee_bounds=None, is_domain_file=False, manual_ee_ID=None):
'''Initialize the object from XML data and the desired bounding box'''
# xml_source can be a path to an xml file or a parsed xml object
try:
xml_root = ET.parse(xml_source).getroot()
except:
xml_root = xml_source
# Parse the xml file to fill in the class variables
self._load_xml(xml_root, is_domain_file, manual_ee_ID)
# Set up the EE image object using the band information
self._load_image(ee_bounds)
def init_from_image(self, ee_image, sensor_name):
'''Init from an already loaded Earth Engine Image'''
self.sensor_name = sensor_name
self.image = ee_image
# Fetch info from the sensor definition file
self._load_sensor_xml_file(sensor_name)
# Compare list of bands in ee_image with bands loaded from the definition file
bands_in_image = self.image.bandNames().getInfo()
shared_bands = list(set(bands_in_image) & set(self.band_names))
if not shared_bands:
#print self._band_sources
raise Exception('For sensor '+sensor_name+' expected bands: '
+str(self.band_names)+' but found '+str(bands_in_image))
# Set up band access in manner of self.red_channel.
# - Also prune sensor bands that were not included in the provided image.
for band_name in shared_bands:
self.__dict__[band_name] = self.image.select(band_name)
self.band_names = shared_bands
def __str__(self):
s = 'SensorObservation: ' + self.sensor_name + '\n'
s += ' - Bands'
for b in self.band_names:
s += ' : ' + b
return s
def __repr__(self):
return self.sensor_name
def get_date(self):
'''Returns the start date for the image if one was provided, None otherwise.'''
if ('start_date' in self._band_sources[0]):
return self._band_sources[0]['start_date']
else:
return None
def _loadPieceOfSourceInfo(self, source_band, info_name, dictionary):
'''Helper function - Look for and load source info about a band'''
result = source_band.find(info_name)
if result != None:
dictionary[info_name] = result.text
def _load_source(self, source_element):
'''load a data source for a band or mask, represented by the <source> tag.'''
# A source is stored like this: {'mosaic', 'source', 'eeid'}
d = dict()
source_band = source_element.find('source')
if source_band == None:
return d # Source not specified, leave the dictionary empty!
# if it's a mosaic, combine the images in an EE ImageCollection
mosaic = source_band.get('mosaic')
if mosaic != None:
if mosaic.lower() == 'true':
d['mosaic'] = True
elif mosaic.lower() == 'false':
d['mosaic'] = False
else:
raise Exception('Unexpected value of mosaic, %s.' % (source_band.get('mosaic')))
# The name of the band in the source data, maybe not what we will call it in the output image.
name = source_band.find('name')
if name != None:
# the name of the band in the original image
d['source'] = name.text
# Load more information about the band source
self._loadPieceOfSourceInfo(source_band, 'eeid', d) # The id of the image to load, if a single image.
self._loadPieceOfSourceInfo(source_band, 'collection', d) # The ImageCollection name of the data, if any.
self._loadPieceOfSourceInfo(source_band, 'start_date', d) # Start and end dates used to filter an ImageCollection.
self._loadPieceOfSourceInfo(source_band, 'end_date', d)
return d
def _load_distribution(self, root):
'''load a probability distribution into a python dictionary, which may, for
example, represent the expected distribution of water pixels'''
d = dict()
model = root.find('model')
if model != None:
d['model'] = model.text
mode = root.find('mode')
if mode != None:
d['mode'] = dict()
if mode.find('range') != None:
(d['mode']['min'], d['mode']['max']) = self._load_range(mode.find('range'))
r = root.find('range')
if r != None:
d['range'] = self._load_range(r)
b = root.find('buckets')
if b != None:
try:
d['buckets'] = int(b.text)
except:
raise Exception('Buckets in distribution must be integer.')
#print 'Created water distribution: '
#print d
return d
def _load_bands(self, root_element, manual_ee_ID=None):
'''Read the band specification and load it into _band_sources and _mask_source.
Does not load the bands'''
# Look for default water distribution info at the top level
default_water = dict()
for d in root_element.findall('distribution'):
if d.get('name').lower() == 'water':
default_water = self._load_distribution(d)
# Read bands, represented by <band> tag
bands = root_element.find('bands')
if bands == None:
return # Nothing to do if no bands tag!
# Look for display bands at the top band level
display_bands = bands.find('display_bands')
if display_bands != None:
display_band_list = display_bands.text.replace(' ','').split(',') # The band names are comma seperated
if len(display_band_list) > 3:
raise Exception('Cannot have more than three display bands!')
self._display_bands = display_band_list
# Looks for display band gains at the top level
display_gains = bands.find('display_gains')
if display_gains != None:
display_gain_list = display_gains.text.split(',') # The band names are comma seperated
if len(display_gain_list) > 3:
raise Exception('Cannot have more than three display band gains!')
self._display_gains = display_gain_list
# Shared information (e.g., all bands have same eeid) is loaded directly in <bands>
default_source = self._load_source(bands) # Located in <bands><source>
if manual_ee_ID: # Set manual EEID if it was passed in
default_source['eeid'] = manual_ee_ID
# If any bands are already loaded (meaning we are in the domain file), apply this source info to them.
for b in self.band_names:
self._band_sources[b].update(default_source)
if self._mask_info != None:
self._mask_info.update(default_source)
resolution = bands.find('resolution')
if resolution != None: # <bands><resolution>
default_resolution = float(resolution.text)
else:
default_resolution = 10 # Default resolution is 10 meters if not specified!
# load individual <band> tags
for b in bands.findall('band'):
try:
name = b.find('name').text
except:
raise Exception('Unnamed band.')
#print 'Getting info for band: ' + name
if name not in self.band_names: # Only append each band name once
self.band_names.append(name)
if name not in self._band_sources: # Only append each band source once
self._band_sources[name] = dict()
self._band_sources[name].update(default_source) # Start with the default source information
self._band_sources[name].update(self._load_source(b)) # Band source information is stored like: {'mosaic', 'source', 'eeid'}
#print 'Source for this band = '
#print str(self._band_sources[name])
# Look for water distribution information in this band
if name not in self.water_distributions:
self.water_distributions[name] = dict()
self.water_distributions[name].update(default_water)
for d in b.findall('distribution'):
if d.get('name').lower() == 'water':
self.water_distributions[name].update(self._load_distribution(d))
# Load resolution for this band
resolution = b.find('resolution')
if resolution != None:
self.band_resolutions[name] = float(resolution.text)
else:
self.band_resolutions[name] = default_resolution
#print 'For band name ' + name + ' found resolution = ' + str(self.band_resolutions[name])
# read mask, in <mask> tag
mask = bands.find('mask')
if mask != None:
if self._mask_info == None:
self._mask_info = dict()
if mask.get('self') == 'true': # Self mask means that zero-valued pixels in the source will be masked out.
self._mask_info['self'] = True
else: # Otherwise there must be an external source
| |
# Copyright 2017 Wind River
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
from cinderclient import client as cinderclient
from keystoneauth1 import exceptions as keystone_exceptions
from keystoneauth1 import loading
from keystoneauth1 import session
from keystoneclient import client as keystoneclient
from neutronclient.common import exceptions as neutronclient_exceptions
from neutronclient.neutron import client as neutronclient
from novaclient import client as novaclient
from novaclient import exceptions as novaclient_exceptions
from novaclient import utils as novaclient_utils
from requests_toolbelt import MultipartDecoder
from oslo_log import log as logging
from oslo_serialization import jsonutils
from dcmanager.common import consts as dcmanager_consts
from dcmanager.rpc import client as dcmanager_rpc_client
from dcorch.common import consts
from dcorch.common import context
from dcorch.common import exceptions
from dcorch.common import utils
from dcorch.drivers.openstack import sdk_platform as sdk
from dcorch.engine import quota_manager
from dcorch.objects import orchrequest
from dcorch.objects import resource
from dcorch.objects import subcloud_resource
from oslo_config import cfg
LOG = logging.getLogger(__name__)
STATUS_NEW = 'new'
STATUS_PROCESSING = 'processing'
STATUS_TIMEDOUT = 'timedout'
STATUS_SLEEPING = 'sleeping'
STATUS_SHUTTING_DOWN = 'shutting_down' # is this actually needed?
# sync request states, should be in SyncRequest class
STATE_QUEUED = 'queued'
STATE_IN_PROGRESS = 'in-progress'
STATE_TIMEDOUT = 'timedout'
STATE_ABORTED = 'aborted'
STATE_FAILED = 'failed'
STATE_COMPLETED = 'completed'
# Audit findings
AUDIT_RESOURCE_MISSING = 'missing'
AUDIT_RESOURCE_EXTRA = 'extra_resource'
class SyncThread(object):
"""Manages tasks related to resource management."""
MAX_RETRY = 2
def __init__(self, subcloud_engine):
super(SyncThread, self).__init__()
self.endpoint_type = None # endpoint type in keystone
self.subcloud_engine = subcloud_engine # engine that owns this obj
self.thread = None # thread running sync()
self.audit_thread = None
self.status = STATUS_NEW # protected by condition lock
self.audit_status = None # todo: needed?
self.condition = threading.Condition() # used to wake up the thread
self.ctxt = context.get_admin_context()
self.sync_handler_map = {}
self.master_region_name = consts.CLOUD_0
self.audit_resources = []
self.log_extra = {
"instance": self.subcloud_engine.subcloud.region_name + ": "}
self.dcmanager_rpc_client = dcmanager_rpc_client.ManagerClient()
self.sync_status = dcmanager_consts.SYNC_STATUS_UNKNOWN
self.subcloud_managed = False
def start(self):
if self.status == STATUS_NEW:
self.status = STATUS_PROCESSING
self.thread = threading.Thread(target=self.sync)
self.thread.start()
else:
LOG.error("unable to start, not in new status",
extra=self.log_extra)
def shutdown(self):
# Stop all work, optionally delete from DB
self.condition.acquire()
self.status = STATUS_SHUTTING_DOWN
self.condition.notify() # Wake the threads so they exit.
self.condition.release()
def should_exit(self):
# Return whether the sync/audit threads should exit.
# Caller must hold the condition lock.
return self.status == STATUS_SHUTTING_DOWN
def wake(self):
# Called when work has been saved to the DB
self.condition.acquire()
self.status = STATUS_PROCESSING
self.condition.notify()
self.condition.release()
def initialize(self):
# To be overridden by endpoint implementation if there
# are actions to be performed when a subcloud goes enabled.
pass
def enable(self):
# Called when DC manager thinks this subcloud is good to go.
self.initialize()
self.wake()
self.run_sync_audit()
def get_db_subcloud_resource(self, rsrc_id):
try:
subcloud_rsrc = \
subcloud_resource.SubcloudResource. \
get_by_resource_and_subcloud(
self.ctxt, rsrc_id, self.subcloud_engine.subcloud.id)
return subcloud_rsrc
except exceptions.SubcloudResourceNotFound:
LOG.info("{} not found in subcloud {} resource table".format(
rsrc_id, self.subcloud_engine.subcloud.id),
extra=self.log_extra)
return None
def persist_db_subcloud_resource(self, db_rsrc_id, subcloud_rsrc_id):
# This function can be invoked after creating a subcloud resource.
# Persist the subcloud resource to the DB for later
#
# Parameters:
# db_rsrc_id: the "id" field of the resource in the DB
# subcloud_rsrc_id: the unique identifier of the subcloud resource
subcloud_rsrc = self.get_db_subcloud_resource(db_rsrc_id)
if not subcloud_rsrc:
subcloud_rsrc = subcloud_resource.SubcloudResource(
self.ctxt, subcloud_resource_id=subcloud_rsrc_id,
resource_id=db_rsrc_id,
subcloud_id=self.subcloud_engine.subcloud.id)
# There is no race condition for creation of
# subcloud_resource as it is always done from the same thread.
subcloud_rsrc.create()
elif subcloud_rsrc.subcloud_resource_id != subcloud_rsrc_id:
# May be the resource was manually deleted from the subcloud.
# So, update the dcorch DB with the new resource id from subcloud.
subcloud_rsrc.subcloud_resource_id = subcloud_rsrc_id
LOG.info("Updating {}:{} [{}]".format(db_rsrc_id,
subcloud_rsrc.subcloud_resource_id, subcloud_rsrc_id),
extra=self.log_extra)
subcloud_rsrc.save()
else:
LOG.info("subcloud_rsrc {}:{} [{}] is up-to-date"
.format(db_rsrc_id, subcloud_rsrc.subcloud_resource_id,
subcloud_rsrc_id),
extra=self.log_extra)
return subcloud_rsrc.subcloud_resource_id
def sync_resource(self, sync_request):
rsrc = resource.Resource.get_by_id(self.ctxt,
sync_request.orch_job.resource_id)
handler = self.sync_handler_map[rsrc.resource_type]
LOG.info("Invoking {} for {} [{}]".format(
handler.func_name, rsrc.resource_type,
sync_request.orch_job.operation_type), extra=self.log_extra)
handler(sync_request, rsrc)
def set_sync_status(self, sync_status):
# Only report sync_status when managed
subcloud_managed = self.subcloud_engine.is_managed()
if not subcloud_managed:
LOG.debug("set_sync_status: skip update sync update for unmanaged "
"subcloud {}".format(
self.subcloud_engine.subcloud.region_name))
self.sync_status = dcmanager_consts.SYNC_STATUS_UNKNOWN
self.subcloud_managed = False
return
if ((self.sync_status == sync_status) and
(self.subcloud_managed != subcloud_managed)):
return
self.sync_status = sync_status
self.subcloud_managed = subcloud_managed
self.dcmanager_rpc_client.update_subcloud_endpoint_status(
self.ctxt, self.subcloud_engine.subcloud.region_name,
self.endpoint_type, sync_status)
def sync(self):
LOG.info("{}: starting sync routine".format(self.thread.name),
extra=self.log_extra)
self.condition.acquire()
self.status = STATUS_PROCESSING
region_name = self.subcloud_engine.subcloud.region_name
while self.status != STATUS_SHUTTING_DOWN:
sync_requests = []
# We want to check for pending work even if subcloud is disabled.
if self.status in (STATUS_PROCESSING, STATUS_TIMEDOUT):
states = [
consts.ORCH_REQUEST_QUEUED,
consts.ORCH_REQUEST_IN_PROGRESS,
]
sync_requests = orchrequest.OrchRequestList.get_by_attrs(
self.ctxt, self.endpoint_type,
target_region_name=region_name,
states=states)
LOG.info("Got " + str(len(sync_requests)) + " sync request(s)",
extra=self.log_extra)
# todo: for each request look up sync handler based on
# resource type (I'm assuming here we're not storing a python
# object in the DB)
# Update dcmanager with the current sync status.
subcloud_enabled = self.subcloud_engine.is_enabled()
if sync_requests:
self.set_sync_status(dcmanager_consts.SYNC_STATUS_OUT_OF_SYNC)
else:
self.set_sync_status(dcmanager_consts.SYNC_STATUS_IN_SYNC)
if (not sync_requests or not subcloud_enabled or
self.status == STATUS_TIMEDOUT):
# Either there are no sync requests, or subcloud is disabled,
# or we timed out trying to talk to it.
# We're not going to process any sync requests, just go
# back to sleep.
if not subcloud_enabled:
LOG.info("subcloud is disabled", extra=self.log_extra)
if self.status == STATUS_PROCESSING:
self.status = STATUS_SLEEPING
LOG.debug("calling condition.wait", extra=self.log_extra)
# no work to do, sleep till someone wakes us
self.condition.wait()
LOG.debug("back from condition.wait", extra=self.log_extra)
else:
# Subcloud is enabled and there are pending sync requests, so
# we have work to do.
self.condition.release()
try:
for request in sync_requests:
if not self.subcloud_engine.is_enabled() or \
self.should_exit():
# Oops, someone disabled the endpoint while
# we were processing work for it.
raise exceptions.EndpointNotReachable()
request.state = consts.ORCH_REQUEST_STATE_IN_PROGRESS
request.save() # save to DB
retry_count = 0
while retry_count < self.MAX_RETRY:
try:
self.sync_resource(request)
request.state = \
consts.ORCH_REQUEST_STATE_COMPLETED
request.save() # save to DB
break
except exceptions.SyncRequestTimeout:
request.try_count += 1
request.save()
retry_count += 1
if retry_count >= self.MAX_RETRY:
# todo: raise "unable to sync this
# subcloud/endpoint" alarm with fmapi
self.condition.acquire()
self.status = STATUS_TIMEDOUT
self.condition.release()
raise exceptions.EndpointNotReachable()
except exceptions.SyncRequestFailedRetry:
# todo: raise "unable to sync this
# subcloud/endpoint" alarm with fmapi
request.try_count += 1
request.state = \
consts.ORCH_REQUEST_STATE_FAILED
request.save()
retry_count += 1
# we'll retry
except exceptions.SyncRequestFailed:
request.state = \
consts.ORCH_REQUEST_STATE_FAILED
request.save()
retry_count = self.MAX_RETRY
# If we fall out of the retry loop we either succeeded
# or failed multiple times and want to move to the next
# request.
except exceptions.EndpointNotReachable:
# Endpoint not reachable, throw away all the sync requests.
LOG.info("EndpointNotReachable, {} sync requests pending"
.format(len(sync_requests)))
# del sync_requests[:] #This fails due to:
# 'OrchRequestList' object does not support item deletion
self.condition.acquire()
# if we get here it's because we want this thread to exit
self.condition.release()
LOG.info("exiting thread for subcloud", extra=self.log_extra)
def run_sync_audit(self):
if not self.subcloud_engine.is_enabled() or self.should_exit():
return
if self.endpoint_type in cfg.CONF.disable_audit_endpoints:
LOG.warn("Audit disabled!", extra=self.log_extra)
return
# This will be called periodically as well as when the subcloud is
# enabled. We want to make a new thread to do this so the caller
# doesn't get blocked.
thread = threading.Thread(target=self.do_sync_audit)
thread.start()
LOG.debug("{}: do_sync_audit started".format(thread.name),
extra=self.log_extra)
def do_sync_audit(self):
LOG.debug("In do sync audit", extra=self.log_extra)
# This first part just checks to see if we want to wake up the main
# sync thread. We want to run this unconditionally.
self.condition.acquire()
if self.status == STATUS_TIMEDOUT:
self.status = STATUS_PROCESSING
self.condition.notify()
# Now we want to look at the actual sync audit. If there's already a
# sync audit thread running don't make a new one.
if self.audit_thread is None or not self.audit_thread.is_alive():
LOG.debug("Creating sync audit thread", extra=self.log_extra)
self.audit_thread = threading.Thread(target=self.sync_audit)
self.audit_thread.start()
else:
LOG.info("Skipping sync audit thread creation, already running",
extra=self.log_extra)
self.condition.release()
def sync_audit(self):
LOG.debug("{}: starting sync audit".format(self.audit_thread.name),
extra=self.log_extra)
total_num_of_audit_jobs = 0
for resource_type in self.audit_resources:
if not self.subcloud_engine.is_enabled() or self.should_exit():
LOG.info("{}: aborting sync audit, as subcloud is disabled"
.format(self.audit_thread.name),
extra=self.log_extra)
return
# Skip resources with outstanding sync requests
region_name = self.subcloud_engine.subcloud.region_name
sync_requests = []
states = [
consts.ORCH_REQUEST_QUEUED,
| |
share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
:type FilterAttributeRanges: list
:param FilterAttributeRanges: Provides range filters for multiple attributes. When providing ranges to typed link selection, any inexact ranges must be specified at the end. Any attributes that do not have a range specified are presumed to match the entire range.
(dict) --Identifies the range of attributes that are used by a specified filter.
AttributeName (string) --The unique name of the typed link attribute.
Range (dict) -- [REQUIRED]The range of attribute values that are being selected.
StartMode (string) -- [REQUIRED]The inclusive or exclusive range start.
StartValue (dict) --The value to start the range at.
StringValue (string) --A string data value.
BinaryValue (bytes) --A binary data value.
BooleanValue (boolean) --A Boolean data value.
NumberValue (string) --A number data value.
DatetimeValue (datetime) --A date and time value.
EndMode (string) -- [REQUIRED]The inclusive or exclusive range end.
EndValue (dict) --The attribute value to terminate the range at.
StringValue (string) --A string data value.
BinaryValue (bytes) --A binary data value.
BooleanValue (boolean) --A Boolean data value.
NumberValue (string) --A number data value.
DatetimeValue (datetime) --A date and time value.
:type FilterTypedLink: dict
:param FilterTypedLink: Filters are interpreted in the order of the attributes defined on the typed link facet, not the order they are supplied to any API calls.
SchemaArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .
TypedLinkName (string) -- [REQUIRED]The unique name of the typed link facet.
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:type ConsistencyLevel: string
:param ConsistencyLevel: The consistency level to execute the request at.
:rtype: dict
:return: {
'TypedLinkSpecifiers': [
{
'TypedLinkFacet': {
'SchemaArn': 'string',
'TypedLinkName': 'string'
},
'SourceObjectReference': {
'Selector': 'string'
},
'TargetObjectReference': {
'Selector': 'string'
},
'IdentityAttributeValues': [
{
'AttributeName': 'string',
'Value': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
}
},
]
},
],
'NextToken': 'string'
}
:returns:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An objects identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
"""
pass
def list_policy_attachments(DirectoryArn=None, PolicyReference=None, NextToken=None, MaxResults=None, ConsistencyLevel=None):
"""
Returns all of the ObjectIdentifiers to which a given policy is attached.
See also: AWS API Documentation
:example: response = client.list_policy_attachments(
DirectoryArn='string',
PolicyReference={
'Selector': 'string'
},
NextToken='string',
MaxResults=123,
ConsistencyLevel='SERIALIZABLE'|'EVENTUAL'
)
:type DirectoryArn: string
:param DirectoryArn: [REQUIRED]
The Amazon Resource Name (ARN) that is associated with the Directory where objects reside. For more information, see arns .
:type PolicyReference: dict
:param PolicyReference: [REQUIRED]
The reference that identifies the policy object.
Selector (string) --A path selector supports easy selection of an object by the parent/child links leading to it from the directory root. Use the link names from each parent/child link to construct the path. Path selectors start with a slash (/) and link names are separated by slashes. For more information about paths, see Accessing Objects . You can identify an object in one of the following ways:
$ObjectIdentifier - An object identifier is an opaque string provided by Amazon Cloud Directory. When creating objects, the system will provide you with the identifier of the created object. An object s identifier is immutable and no two objects will ever share the same object identifier
/some/path - Identifies the object based on path
#SomeBatchReference - Identifies the object in a batch call
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of items to be retrieved in a single call. This is an approximate number.
:type ConsistencyLevel: string
:param ConsistencyLevel: Represents the manner and timing in which the successful write or update of an object is reflected in a subsequent read operation of that same object.
:rtype: dict
:return: {
'ObjectIdentifiers': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_published_schema_arns(NextToken=None, MaxResults=None):
"""
Retrieves each published schema Amazon Resource Name (ARN).
See also: AWS API Documentation
:example: response = client.list_published_schema_arns(
NextToken='string',
MaxResults=123
)
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
:return: {
'SchemaArns': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def list_tags_for_resource(ResourceArn=None, NextToken=None, MaxResults=None):
"""
Returns tags for a resource. Tagging is currently supported only for directories with a limit of 50 tags per directory. All 50 tags are returned for a given directory with this API call.
See also: AWS API Documentation
:example: response = client.list_tags_for_resource(
ResourceArn='string',
NextToken='string',
MaxResults=123
)
:type ResourceArn: string
:param ResourceArn: [REQUIRED]
The Amazon Resource Name (ARN) of the resource. Tagging is only supported for directories.
:type NextToken: string
:param NextToken: The pagination token. This is for future use. Currently pagination is not supported for tagging.
:type MaxResults: integer
:param MaxResults: The MaxResults parameter sets the maximum number of results returned in a single page. This is for future use and is not supported currently.
:rtype: dict
:return: {
'Tags': [
{
'Key': 'string',
'Value': 'string'
},
],
'NextToken': 'string'
}
"""
pass
def list_typed_link_facet_attributes(SchemaArn=None, Name=None, NextToken=None, MaxResults=None):
"""
Returns a paginated list of all attribute definitions for a particular TypedLinkFacet . For more information, see Typed link .
See also: AWS API Documentation
:example: response = client.list_typed_link_facet_attributes(
SchemaArn='string',
Name='string',
NextToken='string',
MaxResults=123
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]
The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .
:type Name: string
:param Name: [REQUIRED]
The unique name of the typed link facet.
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
:return: {
'Attributes': [
{
'Name': 'string',
'Type': 'STRING'|'BINARY'|'BOOLEAN'|'NUMBER'|'DATETIME',
'DefaultValue': {
'StringValue': 'string',
'BinaryValue': b'bytes',
'BooleanValue': True|False,
'NumberValue': 'string',
'DatetimeValue': datetime(2015, 1, 1)
},
'IsImmutable': True|False,
'Rules': {
'string': {
'Type': 'BINARY_LENGTH'|'NUMBER_COMPARISON'|'STRING_FROM_SET'|'STRING_LENGTH',
'Parameters': {
'string': 'string'
}
}
},
'RequiredBehavior': 'REQUIRED_ALWAYS'|'NOT_REQUIRED'
},
],
'NextToken': 'string'
}
:returns:
(string) --
(string) --
"""
pass
def list_typed_link_facet_names(SchemaArn=None, NextToken=None, MaxResults=None):
"""
Returns a paginated list of TypedLink facet names for a particular schema. For more information, see Typed link .
See also: AWS API Documentation
:example: response = client.list_typed_link_facet_names(
SchemaArn='string',
NextToken='string',
MaxResults=123
)
:type SchemaArn: string
:param SchemaArn: [REQUIRED]
The Amazon Resource Name (ARN) that is associated with the schema. For more information, see arns .
:type NextToken: string
:param NextToken: The pagination token.
:type MaxResults: integer
:param MaxResults: The maximum number of results to retrieve.
:rtype: dict
:return: {
'FacetNames': [
'string',
],
'NextToken': 'string'
}
:returns:
(string) --
"""
pass
def lookup_policy(DirectoryArn=None, ObjectReference=None, NextToken=None, MaxResults=None):
"""
Lists all policies from the root of the Directory to the object specified. If there are no policies present, an empty list is returned. If policies are present, and if some objects don't have the policies attached, it returns the ObjectIdentifier for such objects. If policies are present, it returns ObjectIdentifier , policyId , and policyType . Paths that don't lead to the root from the target object are ignored. For more information, see Policies .
See also: AWS API Documentation
:example: response = | |
\
str(self.router_db_source_identifier.source_uri) + \
'"' + ' not found or not accessible to this process' + os.linesep + \
'Exception detail: ' + str(fnfex.args)
self.logger.critical(error_string)
raise EmeraldEmailRouterDatabaseInitializationError(error_string)
except Exception as ex:
error_string = 'Exception reading router source DB JSON file "' + \
'"' + str(self.router_db_source_identifier.source_uri) + os.linesep + \
'Exception type: ' + str(type(ex)) + os.linesep + \
'Exception msg: ' + str(ex.args)
self.logger.critical(error_string)
raise EmeraldEmailRouterDatabaseInitializationError(error_string)
# now initialize from the dictionary
# We only want to use the entries for our instance type
###########
# REFACTOR NOTE
# TODO: this is a hurry up parser - can be refactored to be table driven
###########
# first parse top level parameters name and revision date - we must have these to create data
# store and do further work
required_top_level_attributes = ['name',
'revision_number',
'revision_datetime',
'instance_type',
'router_rules']
missing_but_required = []
for this_attribute in required_top_level_attributes:
if this_attribute not in json_data:
missing_but_required.append(this_attribute)
if len(missing_but_required) > 0:
raise \
EmeraldEmailRouterDatabaseInitializationError(
'Unable to initialize - source JSON missing these required attribute(s): ' +
os.linesep + ','.join([x for x in sorted(missing_but_required)]) +
os.linesep + 'Keys are CASE SPECIFIC and should be LOWERCASE' +
os.linesep + 'JSON found = ' + os.linesep + str(json_data) + os.linesep)
# ok we know they are here. In the hurry up parser we aren't being fancy in the data read
router_db_name = json_data['name']
if type(router_db_name) is not str or len(router_db_name) < 3:
raise EmeraldEmailRouterDatabaseInitializationError(
'Unable to initialize - source JSON' +
' must have a "name" attribute as string of at least 3 chars in length')
router_db_revision_number = json_data['revision_number']
if type(router_db_revision_number) is not int or router_db_revision_number < 0:
raise EmeraldEmailRouterDatabaseInitializationError(
'Unable to initialize - source JSON must have a "revision_number" attribute as ' +
'non-negative integer (not a string)' + os.linesep + 'Value provided = ' +
str(router_db_revision_number) + ' (type=' + str(type(router_db_revision_number))
)
#
# timestamp will be parsed as best effort BUT only time offsets in ISO8601 format will be used
# In other words 2019-04-12T07:00:12 EST will be processed as a naive timestamp (ignore timezone)
# BUT, 2019-04-12T07:00:12-0300 WILL be processed as a timestamp with GMT offset -3
#
try:
router_db_revision_datetime = parse(json_data['revision_datetime'],
dayfirst=False,
yearfirst=False)
except ValueError as vex:
raise EmeraldEmailRouterDatabaseInitializationError(
'Unable to initialize - source JSON has invalid' +
'revision_datetime parameter' +
' "' + str(json_data['revision_datetime']) + '"' +
os.linesep + 'Unable to parse' +
os.linesep + 'Exception data: ' + str(vex.args))
except Exception as ex:
raise EmeraldEmailRouterDatabaseInitializationError(
'Unable to initialize - source JSON has invalid' +
'revision_datetime parameter' +
' "' + str(json_data['revision_datetime']) + '"' +
os.linesep + 'Exception type: ' + str(type(ex)) +
os.linesep + 'Exception data: ' + str(ex.args))
# and convert to UTC. If naive, localize it to LOCAL. Otherwise scale
try:
local_timezone_zone_string = get_localzone().zone
router_db_revision_datetime_as_utc = \
timezone(local_timezone_zone_string).localize(router_db_revision_datetime)
except ValueError:
# not naive so scale
self.logger.debug('Provided timestamp includes timezone so scaling to UTC')
router_db_revision_datetime_as_utc = router_db_revision_datetime.astimezone(timezone('UTC'))
else:
self.logger.info('Naive timezone')
self.logger.info('Router datastore timestamp = ' + str(router_db_revision_datetime))
self.logger.info('Router datastore timestamp (UTC) = ' + str(router_db_revision_datetime_as_utc))
# now parse the instance type and make sure it matches us - abort if not
try:
router_db_instance_type = RouterInstanceType[json_data['instance_type'].upper()]
except KeyError:
raise EmeraldEmailRouterDatabaseInitializationError(
'Unable to initialize - specified instance type "' + str(json_data['instance_type']) + '"' +
' is not a valid instance type' + os.linesep +
'Must be one of ' + ','.join([x.name.lower() for x in RouterInstanceType])
)
# is it our type?
if router_db_instance_type != self.router_instance_type:
raise EmeraldEmailRouterDatabaseInitializationError(
'Specified JSON is for a different router instance type "' + router_db_instance_type.name.lower() +
'"' + os.linesep + 'Program specified this instance to be ' + self.router_instance_type.name.lower()
)
# now create the initial datastore
self._router_rules_datastore = \
EmailRouterRulesDatastore(name=router_db_name,
revision_datetime=router_db_revision_datetime_as_utc,
revision_number=router_db_revision_number,
instance_type=router_db_instance_type)
# next we have to see if included JSON has records for our instance type. If not we will abort
target_or_client_count = len(json_data['router_rules'])
if target_or_client_count < 1:
raise EmeraldEmailRouterDatabaseInitializationError(
'Caller did not provide any target / client entries in JSON data - no rules to parse' +
os.linesep + 'Aborting')
# pass 1 - because of complex json we have to do two level loop to get names of the instance keys
# provided. Remember, we only read rules for our specified instance type (i.e. BLUE)
#
target_or_client_keys_found = []
for this_target_or_client in json_data['router_rules']:
self.logger.info('Reading data for target / client = ' + str(this_target_or_client))
for tc_name, tc_router_rules in this_target_or_client.items():
target_or_client_keys_found.append(tc_name)
# now we have a valid instance set - time to parse the rules
# If we fail here we will abort initialization
self.logger.info('Parsing data for target / client "' + tc_name + '"')
self.logger.info('Rules = ' + str(tc_router_rules))
# now make sure required elements for instance data are there
required_elements = [
'match_rules',
'destination',
'target_priority']
required_but_not_found = []
for this_required in required_elements:
if this_required not in tc_router_rules:
required_but_not_found.append(this_required)
if len(required_but_not_found) > 0:
raise EmeraldEmailRouterDatabaseInitializationError(
'Aborting as instance data for router instance type "' +
self.router_instance_type.value.instance_type_name.lower() +
'" did not contain required element(s): ' +
','.join([x for x in required_but_not_found])
)
self.logger.info('Required elements found - parsing rules')
# target priority specifies which target is examined and handled first, since one inbound email
# may be handled to multiple targets. The priority CANNOT BE THE SAME for multiple entries
# This will be enforced in the database initialization
target_priority_from_json = tc_router_rules['target_priority']
try:
target_priority = float(target_priority_from_json)
if target_priority <= 0:
raise ValueError('Negative number not allowed for target_priority')
except ValueError as vex:
raise EmeraldEmailRouterDatabaseInitializationError(
'Cannot initialize as target "' + tc_name + '" contains an invalid target_priority value' +
os.linesep + 'Must be a positive number' +
os.linesep + 'Value provided = ' + str(target_priority_from_json) + ' (input type = ' +
type(target_priority_from_json).__name__ + ')' +
os.linesep + 'Exception message: ' + str(vex.args[0])
)
# match_rules is an list of elements (at least one), each of which is a dict
rule_count = len(tc_router_rules['match_rules'])
if rule_count < 1:
raise EmeraldEmailRouterDatabaseInitializationError(
'Provided match_rules structure for instance type "' +
self.router_instance_type.value.instance_type_name.lower() +
'" contains no actual rules. Aborting'
)
rules_parse_error_log = dict()
# accumulate all the rules in the source datastore and then we will write into config if all valid
rules_for_target: List[EmailRouterRule] = list()
for rule_count, this_rule in enumerate(tc_router_rules['match_rules'], start=1):
self.logger.info('Checking rule "' + str(this_rule) + '"')
try:
rule_match_priority = float(this_rule['match_priority'])
except KeyError:
raise EmeraldEmailRouterDatabaseInitializationError(
'Parameter "match_priority" not found in rule #' +
str(rule_count) + ' - aborting')
# initialize our text based fields, noting we treat empty strings as nulls
sender_domain = this_rule['sender_domain'] \
if ('sender_domain' in this_rule and len(this_rule['sender_domain']) > 0) \
else None
sender_name = this_rule['sender_name'] \
if ('sender_name' in this_rule and len(this_rule['sender_name']) > 0) \
else None
recipient_name = this_rule['recipient_name'] \
if ('recipient_name' in this_rule and len(this_rule['recipient_name'])) \
else None
attachment_included = this_rule['attachment_included'] \
if 'attachment_included' in this_rule and len(this_rule['attachment_included']) > 0 \
else None
body_size_minimum = this_rule['body_size_minimum'] \
if 'body_size_minimum' in this_rule and len(this_rule['body_size_minimum']) > 0 \
else None
body_size_maximum = this_rule['body_size_maximum'] \
if 'body_size_maximum' in this_rule and len(this_rule['body_size_maximum']) > 0 \
else None
# initialize the ip whitelisting which will arrive as an (optional) comma separated list of CIDRs
sender_ip_whitelist_csv = this_rule['sender_ip_whitelist'] \
if 'sender_ip_whitelist' in this_rule and len(this_rule['sender_ip_whitelist']) > 0 \
else None
# now if present, split on comma and parse the values
sender_ip_whitelist_set = None
if sender_ip_whitelist_csv is not None:
sender_ip_whitelist_set = set()
for this_ip_count, this_ip_entry in enumerate(sender_ip_whitelist_csv.split(','), start=1):
self.logger.debug('Testing entry #' + str(this_ip_count) + ' IP whitelist - value = ' +
str(this_ip_entry))
# now attempt to convert this entry into an IP network (i.e. CIDR)
try:
this_entry_as_ip_network = IPNetwork(this_ip_entry)
except AddrFormatError as afex:
if rule_match_priority not in rules_parse_error_log:
rules_parse_error_log[rule_match_priority] = list()
rules_parse_error_log[rule_match_priority].append(
'Unable to parse entry #' + str(this_ip_count) + ' (value ' +
str(this_ip_entry) + ') as an IP network (for whitelist)' + os.linesep +
'Exception detail: ' + str(afex.args[0])
)
# loop through all so we parse every error | |
think this is not a common reason for this mode
if pol == -1:
return 0
elif pol == -0.5:
return 1
elif 90 < pol <= 180:
return 3
else:
return 2
def sample_pol(self, pol):
th = self.rotation_motor.user_setpoint.get()
return (
np.arccos(np.cos(pol * np.pi / 180) * np.sin(th * np.pi / 180))
* 180
/ np.pi
)
def m3pitchcalc(self,energy,locked):
pitch = self.mir3Pitch.setpoint.get()
if locked:
return pitch
elif "1200" in self.monoen.gratingx.readback.get():
pitch = self.m3offset.get()+0.038807*np.exp(-(energy-100)/91.942)+0.050123*np.exp(-(energy-100)/1188.9)
elif "250" in self.monoen.gratingx.readback.get():
pitch = self.m3offset.get()+0.022665*np.exp(-(energy-90)/37.746)+0.024897*np.exp(-(energy-90)/450.9)
return round(100*pitch)/100
def choose_harmonic(self,energy,pol,locked):
if locked:
return self.harmonic.get()
elif energy < 1200:
return 1
else:
return 3
def base_set_polarization(pol, en):
yield from bps.mv(en.polarization, pol)
return 0
def base_grating_to_250(mono_en, en):
type = mono_en.gratingx.readback.get()
if '250' in type:
print("the grating is already at 250 l/mm")
return 0 # the grating is already here
print("Moving the grating to 250 l/mm. This will take a minute...")
yield from psh4.close_plan()
yield from bps.abs_set(mono_en.gratingx, 2, wait=True)
#yield from bps.sleep(60)
yield from bps.mv(mirror2.user_offset, 0.04) #0.0315)
yield from bps.mv(grating.user_offset, -0.0874)#-0.0959)
yield from bps.mv(mono_en.cff, 1.385)
yield from bps.mv(en, 270)
yield from psh4.open_plan()
print("the grating is now at 250 l/mm")
return 1
def base_grating_to_1200(mono_en, en):
type = mono_en.gratingx.readback.get()
if '1200' in type:
print("the grating is already at 1200 l/mm")
return 0 # the grating is already here
print("Moving the grating to 1200 l/mm. This will take a minute...")
yield from psh4.close_plan()
yield from bps.abs_set(mono_en.gratingx, 9, wait=True)
#yield from bps.sleep(60)
yield from bps.mv(mirror2.user_offset, 0.2044) #0.1962) #0.2052) # 0.1745) # 8.1264)
yield from bps.mv(grating.user_offset, 0.0769) #0.0687) # 0.0777) # 0.047) # 7.2964) # 7.2948)#7.2956
yield from bps.mv(mono_en.cff, 1.7)
yield from bps.mv(en, 270)
yield from psh4.open_plan()
print("the grating is now at 1200 l/mm")
return 1
def epugap_from_en_pol(energy, polarization):
gap = None
if polarization == 190: # vertical polarization (29500 phase)
if 145.212 <= energy < 1100:
enoff = energy - 145.212
gap = (
(enoff ** 0) * 14012.9679723399
+ (enoff ** 1) * 50.90077784479197
+ (enoff ** 2) * -0.151128059295173
+ (enoff ** 3) * 0.0007380466942855418
+ (enoff ** 4) * -2.88796126025716e-06
+ (enoff ** 5) * 7.334088791503296e-09
+ (enoff ** 6) * -1.138174337292876e-11
+ (enoff ** 7) * 1.043317214147193e-14
+ (enoff ** 8) * -5.190019656736424e-18
+ (enoff ** 9) * 1.081963010325867e-21
)
elif 1100 <= energy < 2200: # third harmonic
enoff = (energy / 3) - 145.212
gap = (
(enoff ** 0) * 14012.9679723399
+ (enoff ** 1) * 50.90077784479197
+ (enoff ** 2) * -0.151128059295173
+ (enoff ** 3) * 0.0007380466942855418
+ (enoff ** 4) * -2.88796126025716e-06
+ (enoff ** 5) * 7.334088791503296e-09
+ (enoff ** 6) * -1.138174337292876e-11
+ (enoff ** 7) * 1.043317214147193e-14
+ (enoff ** 8) * -5.190019656736424e-18
+ (enoff ** 9) * 1.081963010325867e-21
)
else:
gap = None
elif polarization == 126: # 26000 phase
if 159.381 <= energy < 1100:
enoff = energy - 159.381
gap = (
(enoff ** 0) * 14016.21086765142
+ (enoff ** 1) * 47.07181476458327
+ (enoff ** 2) * -0.1300551161025656
+ (enoff ** 3) * 0.0006150285348211382
+ (enoff ** 4) * -2.293881944658508e-06
+ (enoff ** 5) * 5.587375098889097e-09
+ (enoff ** 6) * -8.43630153398218e-12
+ (enoff ** 7) * 7.633856981759912e-15
+ (enoff ** 8) * -3.794296038862279e-18
+ (enoff ** 9) * 7.983637046811202e-22
)
elif 1100 <= energy < 2200: # third harmonic
enoff = (energy / 3) - 159.381
gap = (
(enoff ** 0) * 14016.21086765142
+ (enoff ** 1) * 47.07181476458327
+ (enoff ** 2) * -0.1300551161025656
+ (enoff ** 3) * 0.0006150285348211382
+ (enoff ** 4) * -2.293881944658508e-06
+ (enoff ** 5) * 5.587375098889097e-09
+ (enoff ** 6) * -8.43630153398218e-12
+ (enoff ** 7) * 7.633856981759912e-15
+ (enoff ** 8) * -3.794296038862279e-18
+ (enoff ** 9) * 7.983637046811202e-22
)
else:
gap = None
elif polarization == 123: # 23000 phase
if 182.5 <= energy < 1100:
enoff = energy - 182.5
gap = (
(enoff ** 0) * 14003.31346237464
+ (enoff ** 1) * 40.94577604418467
+ (enoff ** 2) * -0.06267710555062726
+ (enoff ** 3) * 0.0001737842192174001
+ (enoff ** 4) * -7.357701847539232e-07
+ (enoff ** 5) * 2.558819479531793e-09
+ (enoff ** 6) * -5.240182651164082e-12
+ (enoff ** 7) * 6.024494955600835e-15
+ (enoff ** 8) * -3.616738308743303e-18
+ (enoff ** 9) * 8.848652101678885e-22
)
elif 1100 <= energy < 2200: # third harmonic
enoff = (energy / 3) - 182.5
gap = (
(enoff ** 0) * 14003.31346237464
+ (enoff ** 1) * 40.94577604418467
+ (enoff ** 2) * -0.06267710555062726
+ (enoff ** 3) * 0.0001737842192174001
+ (enoff ** 4) * -7.357701847539232e-07
+ (enoff ** 5) * 2.558819479531793e-09
+ (enoff ** 6) * -5.240182651164082e-12
+ (enoff ** 7) * 6.024494955600835e-15
+ (enoff ** 8) * -3.616738308743303e-18
+ (enoff ** 9) * 8.848652101678885e-22
)
else:
gap = None
elif polarization == 121: # 21000 phase
if 198.751 <= energy < 1100:
enoff = energy - 198.751
gap = (
(enoff ** 0) * 14036.87876588605
+ (enoff ** 1) * 36.26534721487319
+ (enoff ** 2) * -0.02493769623114209
+ (enoff ** 3) * 7.394536103134409e-05
+ (enoff ** 4) * -7.431387500375352e-07
+ (enoff ** 5) * 3.111643242754014e-09
+ (enoff ** 6) * -6.397457929818655e-12
+ (enoff ** 7) * 7.103146460443289e-15
+ (enoff ** 8) * -4.1024632494443e-18
+ (enoff ** 9) * 9.715673261754361e-22
)
elif 1100 <= energy < 2200: # third harmonic
enoff = (energy / 3) - 198.751
gap = (
(enoff ** 0) * 14036.87876588605
+ (enoff ** 1) * 36.26534721487319
+ (enoff ** 2) * -0.02493769623114209
+ (enoff ** 3) * 7.394536103134409e-05
+ (enoff ** 4) * -7.431387500375352e-07
+ (enoff ** 5) * 3.111643242754014e-09
+ (enoff ** 6) * -6.397457929818655e-12
+ (enoff ** 7) * 7.103146460443289e-15
+ (enoff ** 8) * -4.1024632494443e-18
+ (enoff ** 9) * 9.715673261754361e-22
)
else:
gap = None
elif polarization == 118: # 18000 phase
if 207.503 <= energy < 1100:
enoff = energy - 207.503
gap = (
(enoff ** 0) * 14026.99244058688
+ (enoff ** 1) * 41.45793369967348
+ (enoff ** 2) * -0.05393526187293287
+ (enoff ** 3) * 0.000143951535786684
+ (enoff ** 4) * -3.934262835746608e-07
+ (enoff ** 5) * 6.627045869131144e-10
+ (enoff ** 6) * -4.544338541442881e-13
+ (enoff ** 7) * -8.922084434570775e-17
+ (enoff ** 8) * 2.598052818031009e-19
+ (enoff ** 9) * -8.57226301371417e-23
)
elif 1100 <= energy < 2200: # third harmonic
enoff = (energy / 3) - 207.503
gap = (
(enoff ** 0) * 14026.99244058688
+ (enoff ** 1) * 41.45793369967348
+ (enoff ** 2) * -0.05393526187293287
+ (enoff ** 3) * 0.000143951535786684
+ (enoff ** 4) * -3.934262835746608e-07
+ (enoff ** 5) * 6.627045869131144e-10
+ (enoff ** 6) * -4.544338541442881e-13
+ (enoff ** 7) * -8.922084434570775e-17
+ (enoff ** 8) * 2.598052818031009e-19
+ (enoff ** 9) * -8.57226301371417e-23
)
else:
gap = None
elif polarization == 115: # 15000 phase
if 182.504 <= energy < 1100:
enoff = energy - 182.504
gap = (
(enoff ** 0) * 13992.18828384784
+ (enoff ** 1) * 53.60817055119084
+ (enoff ** 2) * -0.1051753524422272
+ (enoff ** 3) * 0.0003593146854690839
+ (enoff ** 4) * -1.31756627781552e-06
+ (enoff ** 5) * 3.797812404620049e-09
+ (enoff ** 6) * -7.051992603620334e-12
+ (enoff ** 7) * 7.780656762625199e-15
+ (enoff ** 8) * -4.613775121707344e-18
+ (enoff ** 9) * 1.130384721733557e-21
)
elif 1100 <= energy < 2200: # third harmonic
enoff = (energy / 3) - 182.504
gap = (
(enoff ** 0) * 13992.18828384784
+ (enoff ** 1) * 53.60817055119084
+ (enoff ** 2) * -0.1051753524422272
+ (enoff ** 3) * 0.0003593146854690839
+ (enoff ** 4) * -1.31756627781552e-06
+ (enoff ** 5) * 3.797812404620049e-09
+ (enoff ** 6) * -7.051992603620334e-12
+ (enoff ** 7) * 7.780656762625199e-15
+ (enoff ** 8) * -4.613775121707344e-18
+ (enoff ** 9) * 1.130384721733557e-21
)
else:
gap = None
elif polarization == 112: # 12000 phase
if 144.997 <= energy < 1100:
enoff = energy - 144.997
gap = | |
<filename>lccserver/frontend/searchserver_handlers.py<gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''searchserver_handlers.py - <NAME> (<EMAIL>) -
Apr 2018
These are Tornado handlers for the searchserver.
'''
####################
## SYSTEM IMPORTS ##
####################
import os
import os.path
import logging
import numpy as np
from datetime import datetime, timedelta
import re
import hashlib
from cryptography.fernet import Fernet
######################################
## CUSTOM JSON ENCODER FOR FRONTEND ##
######################################
# we need this to send objects with the following types to the frontend:
# - bytes
# - ndarray
import json
class FrontendEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
elif isinstance(obj, datetime):
return obj.isoformat()
elif isinstance(obj, bytes):
return obj.decode()
elif isinstance(obj, complex):
return (obj.real, obj.imag)
elif (isinstance(obj, (float, np.float64, np.float_)) and
not np.isfinite(obj)):
return None
elif isinstance(obj, (np.int8, np.int16, np.int32, np.int64)):
return int(obj)
else:
return json.JSONEncoder.default(self, obj)
# this replaces the default encoder and makes it so Tornado will do the right
# thing when it converts dicts to JSON when a
# tornado.web.RequestHandler.write(dict) is called.
json._default_encoder = FrontendEncoder()
#############
## LOGGING ##
#############
# get a logger
LOGGER = logging.getLogger(__name__)
#####################
## TORNADO IMPORTS ##
#####################
import tornado.ioloop
import tornado.httpserver
import tornado.web
from tornado.escape import xhtml_escape, squeeze
from tornado.httpclient import AsyncHTTPClient
from tornado import gen
###################
## LOCAL IMPORTS ##
###################
from ..backend import dbsearch
from ..backend import datasets
from .basehandler import BaseHandler
from astrobase.coordutils import (
hms_to_decimal, dms_to_decimal,
hms_str_to_tuple, dms_str_to_tuple
)
###########################
## SOME USEFUL CONSTANTS ##
###########################
# single object coordinate search
# ra dec radius
COORD_DEGSEARCH_REGEX = re.compile(
r'^(\d{1,3}\.{0,1}\d*) ([+\-]?\d{1,2}\.{0,1}\d*) ?(\d{1,2}\.{0,1}\d*)?$'
)
COORD_HMSSEARCH_REGEX = re.compile(
r'^(\d{1,2}[: ]\d{2}[: ]\d{2}\.{0,1}\d*) '
r'([+\-]?\d{1,2}[: ]\d{2}[: ]\d{2}\.{0,1}\d*) ?'
r'(\d{1,2}\.{0,1}\d*)?$'
)
# multiple object search
# objectid ra dec, objectid ra dec, objectid ra dec, etc.
COORD_DEGMULTI_REGEX = re.compile(
r'^([a-zA-Z0-9_+\-\[\].]+)\s(\d{1,3}\.{0,1}\d*)\s([+\-]?\d{1,2}\.{0,1}\d*)$'
)
COORD_HMSMULTI_REGEX = re.compile(
r'^([a-zA-Z0-9_+\-\[\].]+)\s(\d{1,2}[: ]\d{2}[: ]\d{2}\.{0,1}\d*)\s'
r'([+\-]?\d{1,2}[: ]\d{2}[: ]\d{2}\.{0,1}\d*)$'
)
DATASET_READY_EMAIL_TEMPLATE = '''\
Hello,
This is an automated message from the LCC-Server at: {lccserver_baseurl}.
The result dataset generated from query {setid} is now ready.
Matched objects: {set_nobjects}
Dataset URL: {set_url}
Dataset CSV: {set_csv}
Thanks,
LCC-Server admins
{lccserver_baseurl}
'''
#############################
## SEARCH HELPER FUNCTIONS ##
#############################
def parse_coordstring(coordstring):
'''
This function parses a coordstring of the form:
<ra> <dec> <radiusarcmin>
'''
searchstr = squeeze(coordstring).strip()
# try all the regexes and see if one of them works
degcoordtry = COORD_DEGSEARCH_REGEX.match(searchstr)
hmscoordtry = COORD_HMSSEARCH_REGEX.match(searchstr)
# try HHMMSS first because we get false positives on some HH MM SS items in
# degcoordtry
if hmscoordtry:
ra, dec, radius = hmscoordtry.groups()
ra_tuple, dec_tuple = hms_str_to_tuple(ra), dms_str_to_tuple(dec)
ra_hr, ra_min, ra_sec = ra_tuple
dec_sign, dec_deg, dec_min, dec_sec = dec_tuple
# make sure the coordinates are all legit
if ((0 <= ra_hr < 24) and
(0 <= ra_min < 60) and
(0 <= ra_sec < 60) and
(0 <= dec_deg < 90) and
(0 <= dec_min < 60) and
(0 <= dec_sec < 60)):
ra_decimal = hms_to_decimal(ra_hr, ra_min, ra_sec)
dec_decimal = dms_to_decimal(dec_sign, dec_deg, dec_min, dec_sec)
paramsok = True
searchrad = float(radius)/60.0 if radius else 5.0/60.0
radeg, decldeg, radiusdeg = ra_decimal, dec_decimal, searchrad
else:
paramsok = False
radeg, decldeg, radiusdeg = None, None, None
elif degcoordtry:
ra, dec, radius = degcoordtry.groups()
try:
ra, dec = float(ra), float(dec)
if ((abs(ra) < 360.0) and (abs(dec) < 90.0)):
if ra < 0:
ra = 360.0 + ra
paramsok = True
searchrad = float(radius)/60.0 if radius else 5.0/60.0
radeg, decldeg, radiusdeg = ra, dec, searchrad
else:
paramsok = False
radeg, decldeg, radiusdeg = None, None, None
except Exception:
LOGGER.error('could not parse search string: %s' % coordstring)
paramsok = False
radeg, decldeg, radiusdeg = None, None, None
else:
paramsok = False
radeg, decldeg, radiusdeg = None, None, None
return paramsok, radeg, decldeg, radiusdeg
def parse_objectlist_item(objectline):
'''This function parses a objectlist line that is of the following form:
<objectid> <ra> <decl>
This is used for the xmatch function
'''
searchstr = squeeze(objectline).strip()
# try all the regexes and see if one of them works
degcoordtry = COORD_DEGMULTI_REGEX.match(searchstr)
hmscoordtry = COORD_HMSMULTI_REGEX.match(searchstr)
if hmscoordtry:
try:
objectid, ra, dec = hmscoordtry.groups()
objectid, ra, dec = (
xhtml_escape(objectid), xhtml_escape(ra), xhtml_escape(dec)
)
ra_tuple, dec_tuple = hms_str_to_tuple(ra), dms_str_to_tuple(dec)
# get rid of quotes and semicolons in objectid
objectid = objectid.replace('&','').replace(';','')
objectid = objectid.replace('#','').replace("'",'')
objectid = objectid.replace(''','')
ra_hr, ra_min, ra_sec = ra_tuple
dec_sign, dec_deg, dec_min, dec_sec = dec_tuple
# make sure the coordinates are all legit
if ((0 <= ra_hr < 24) and
(0 <= ra_min < 60) and
(0 <= ra_sec < 60) and
(0 <= dec_deg < 90) and
(0 <= dec_min < 60) and
(0 <= dec_sec < 60)):
ra_decimal = hms_to_decimal(ra_hr, ra_min, ra_sec)
dec_decimal = dms_to_decimal(dec_sign,
dec_deg,
dec_min,
dec_sec)
paramsok = True
objid, radeg, decldeg = objectid, ra_decimal, dec_decimal
else:
paramsok = False
objid, radeg, decldeg = None, None, None
except Exception:
LOGGER.error('could not parse object line: %s' % objectline)
paramsok = False
objid, radeg, decldeg = None, None, None
elif degcoordtry:
try:
objectid, ra, dec = degcoordtry.groups()
objectid, ra, dec = (
xhtml_escape(objectid), xhtml_escape(ra), xhtml_escape(dec)
)
ra, dec = float(ra), float(dec)
if ((abs(ra) < 360.0) and (abs(dec) < 90.0)):
if ra < 0:
ra = 360.0 + ra
paramsok = True
objid, radeg, decldeg = objectid, ra, dec
else:
paramsok = False
objid, radeg, decldeg = None, None, None
except Exception:
LOGGER.error('could not parse object line: %s' % objectline)
paramsok = False
objid, radeg, decldeg = None, None, None
else:
paramsok = False
objid, radeg, decldeg = None, None, None
return paramsok, objid, radeg, decldeg
def parse_xmatch_input(inputtext, matchradtext,
maxradius=30.0,
maxlines=5001,
maxlinelen=280):
'''
This tries to parse xmatch input.
'''
itext = inputtext
# parse the xmatchradius text
try:
matchrad = float(xhtml_escape(matchradtext))
if 0 < matchrad < maxradius:
xmatch_distarcsec = matchrad
else:
xmatch_distarcsec = 3.0
except Exception:
xmatch_distarcsec = 3.0
itextlines = itext.split('\n')
if len(itextlines) > maxlines:
LOGGER.error('too many lines to parse')
return None
# here, we'll truncate each line to maxlength
itextlines = [x[:maxlinelen] for x in itextlines if not x.startswith('#')]
parsed_lines = [parse_objectlist_item(x) for x in itextlines]
oklines = [x for x in parsed_lines if all(x)]
if 0 < len(oklines) < maxlines:
objectid = [x[1] for x in oklines]
ra = [x[2] for x in oklines]
decl = [x[3] for x in oklines]
# make sure to uniquify the objectids
uniques, counts = np.unique(objectid, return_counts=True)
duplicated_objectids = uniques[counts > 1]
if duplicated_objectids.size > 0:
objectid = np.array(objectid)
# redo the objectid array so it has a bit larger dtype so the extra
# tag can fit into the field
dt = objectid.dtype.str
dt = '<U%s' % (
int(dt.replace('<','').replace('U','').replace('S','')) + 4
)
objectid = np.array(
objectid,
dtype=dt
)
for dupe in duplicated_objectids:
objectid_inds = np.where(
objectid == dupe
)
# mark the duplicates, assume the first instance is the actual
# one
for ncounter, nind in enumerate(objectid_inds[0][1:]):
objectid[nind] = '%s_%s' % (
objectid[nind],
ncounter+2
)
LOGGER.warning(
'xmatch input: tagging '
'duplicated instance %s of objectid: '
'%s as %s_%s' %
(ncounter+2, dupe, dupe, ncounter+2)
)
objectid = objectid.tolist()
xmatchdict = {
'data':{'objectid':objectid,
'ra':ra,
'decl':decl},
'columns':['objectid','ra','decl'],
'types':['str','float','float'],
'colobjectid':'objectid',
'colra':'ra',
'coldec':'decl'
}
return xmatchdict, xmatch_distarcsec
else:
LOGGER.error('could not parse input xmatch spec')
return None, None
def parse_conditions(conditions, maxlength=1000):
'''This parses conditions provided in the query args.
'''
conditions = conditions[:maxlength]
try:
conditions = xhtml_escape(squeeze(conditions))
# return the "'" character that got escaped
conditions = conditions.replace(''',"'")
# replace the operators with their SQL equivalents
farr = conditions.split(' ')
farr = ['>' if x == 'gt' else x for x in farr]
farr = ['<' if x == 'lt' else x for x in farr]
farr = ['>=' if x == 'ge' else x for x in farr]
farr = ['<=' if x == 'le' else x for x in farr]
farr = ['=' if x == 'eq' else x for x in farr]
farr = ['!=' if x == 'ne' else x for x in farr]
farr = ['like' if x == 'ct' else x for x in farr]
LOGGER.info(farr)
# deal with like operator
# FIXME: this is ugly :(
for i, x in enumerate(farr):
if x == 'like':
LOGGER.info(farr[i+1])
farrnext = farr[i+1]
farrnext_left = farrnext.index("'")
farrnext_right = farrnext.rindex("'")
farrnext = list(farrnext)
farrnext.insert(farrnext_left+1,'%')
farrnext.insert(farrnext_right+1,'%')
farr[i+1] = ''.join(farrnext)
conditions = ' '.join(farr)
LOGGER.info('conditions = %s' % conditions)
return conditions
except Exception:
LOGGER.exception('could not parse the filter conditions')
return None
def query_to_cachestr(name, args):
'''
This turns the query specification into a cache string.
'''
cacheable_dict = {}
arg_keys = sorted(list(args.keys()) + ['type'])
| |
difficultTwoButton)
window.blit(difficultTwo, difficultTwoRect)
difficultThreeButton = pg.Rect(5 * (widthCheck / 8), 300, widthCheck / 4, 50)
difficultThree = mediumFont.render("Difficulty 3 - Hard", True, WHITE)
difficultThreeRect = difficultThree.get_rect()
difficultThreeRect.center = difficultThreeButton.center
pg.draw.rect(window, BLACK, difficultThreeButton)
window.blit(difficultThree, difficultThreeRect)
# button that allows user to go back to the main menu
goBackButton = pg.Rect((widthCheck // 16), 100, widthCheck // 5, 50)
goBack = mediumFont.render("Go Back", True, WHITE)
goBackRect = goBack.get_rect()
goBackRect.center = goBackButton.center
pg.draw.rect(window, BLACK, goBackButton)
window.blit(goBack, goBackRect)
click, _, _ = pg.mouse.get_pressed()
if click == 1:
mouse = pg.mouse.get_pos()
if difficultOneButton.collidepoint(mouse):
time.sleep(0.6)
AIPlayer = True
AiDifficulty = False
AiDepth = 1
updateChessScreen()
pg.display.update()
elif difficultTwoButton.collidepoint(mouse):
AIPlayer = True
AiDifficulty = False
AiDepth = 2
updateChessScreen()
pg.display.update()
elif difficultThreeButton.collidepoint(mouse):
AIPlayer = True
AiDifficulty = False
AiDepth = 3
updateChessScreen()
pg.display.update()
elif goBackButton.collidepoint(mouse):
AiDifficulty = False
pg.display.update()
pg.display.update()
def mainMenu():
"""Main Menu Function"""
global changeColor, twoPlayer, AIPlayer, AiDifficulty, playerMode
window.fill(BLACK)
widthCheck = display[0]
heightCheck = display[1]
# Title of the program
chessTitle = mediumFont.render("Chess With Me", True, RED)
window.blit(chessTitle, (widthCheck // 3 + 40, 20))
# button to select color of board
changeColButton = pg.Rect((widthCheck // 16), 50, widthCheck // 5, 50)
changeCol = mediumFont.render("Change Color", True, BLACK)
changeColRect = changeCol.get_rect()
changeColRect.center = changeColButton.center
pg.draw.rect(window, TURQUOISE, changeColButton)
window.blit(changeCol, changeColRect)
# Two buttons, against AI, or against others
playAIButton = pg.Rect((widthCheck / 8), (heightCheck / 2), widthCheck / 4, 50)
playAI = mediumFont.render("Against AI", True, BLACK)
playAIRect = playAI.get_rect()
playAIRect.center = playAIButton.center
pg.draw.rect(window, WHITE, playAIButton)
window.blit(playAI, playAIRect)
playTwoButton = pg.Rect(5 * (widthCheck / 8), (heightCheck / 2), widthCheck / 4, 50)
playTwo = mediumFont.render("Against others", True, BLACK)
playTwoRect = playTwo.get_rect()
playTwoRect.center = playTwoButton.center
pg.draw.rect(window, WHITE, playTwoButton)
window.blit(playTwo, playTwoRect)
pg.display.update()
# if the mouse clicks button then assign what menu to go to
click, _, _ = pg.mouse.get_pressed()
if click == 1:
mouse = pg.mouse.get_pos()
if playTwoButton.collidepoint(mouse):
playerMode = True
pg.display.update()
elif playAIButton.collidepoint(mouse):
AiDifficulty = True
pg.display.update()
elif changeColButton.collidepoint(mouse):
changeColor = True
def AIMinimax(positionCheck, alpha, beta, depth, maximise):
# maximise is True when it's AI's turn
if maximise:
maxValue = 9999
evaluationCheck = getEvaluation(positionCheck, True, True)
else:
maxValue = -9999
evaluationCheck = getEvaluation(positionCheck, False, True)
if evaluationCheck >= 900 or evaluationCheck <= -900 or depth == 0:
return evaluationCheck
for i in range(8):
for j in range(8):
if positionCheck[i][j] != "":
if (positionCheck[i][j].colour == "b" and maximise) or (
maximise is False and positionCheck[i][j].colour == "w"):
possibleCopy = [["" for i in range(8)] for j in range(8)]
possibleCopy = pieceMoves(i, j, possibleCopy, positionCheck)
if not any("green" in checkRow for checkRow in possibleCopy):
continue
for x in range(8):
for y in range(8):
if possibleCopy[x][y] == "green":
positionCopy = copy.deepcopy(positionCheck)
positionCopy[x][y] = positionCheck[i][j]
positionCopy[i][j] = ""
if maximise:
if kingInCheck(positionCopy, False) is False:
newEval = AIMinimax(positionCopy, alpha, beta, depth - 1, not maximise)
maxValue = min(newEval, maxValue)
beta = min(beta, maxValue)
if beta <= alpha:
return maxValue
else:
if kingInCheck(positionCopy, True) is False:
newEval = AIMinimax(positionCopy, alpha, beta, depth - 1, not maximise)
maxValue = max(newEval, maxValue)
alpha = max(alpha, maxValue)
if beta <= alpha:
return maxValue
return maxValue
def mainAIFunction(positionCheck):
global row, column, newY, newX, AiDepth, checkmateCondition
evaluation = 10000
rowMove, columnMove = -1, -1
previousRow, previousColumn = 0, 0
if checkmateCondition or chessBoard.playerOneTurn is True:
return 0
for i in range(8):
for j in range(8):
if positionCheck[i][j] != "":
if positionCheck[i][j].colour == "b":
possibleCopy = [["" for i in range(8)] for j in range(8)]
possibleCopy = pieceMoves(i, j, possibleCopy, positionCheck)
if not any("green" in row for row in possibleCopy):
continue
for x in range(8):
for y in range(8):
if possibleCopy[x][y] == "green":
positionCopy = copy.deepcopy(positionCheck)
positionCopy[x][y] = positionCheck[i][j]
positionCopy[i][j] = ""
if kingInCheck(positionCopy, False) is False:
newEval = AIMinimax(positionCopy, -10000, 10000, AiDepth, False)
if newEval <= evaluation:
evaluation = newEval
rowMove, columnMove = x, y
previousRow, previousColumn = i, j
row, column = previousRow, previousColumn
movePiece(columnMove, rowMove)
def movePiece(moveX, moveY):
# updateChessScreen()- not sure if necessary
print("Move Piece is being called")
if chessBoard.playerOneTurn:
return 0
chessBoard.movePiece(moveX, moveY, column, row)
chessBoard.playerOneTurn = True
updateChessScreen()
def mainMoveFunction():
global newPos, playerMove, newX, newY, row, column, AIPlayer
mousePos = pg.mouse.get_pos()
newX, newY = mousePos
newX, newY = newX // 70, (newY - 110) // 70
print(f"MOUSE POS {mousePos}")
playerMove = chessBoard.movePossible(mousePos, column, row, playerMove)
if playerMove is True:
print("PLAYER MOVE")
playerMove = False
if AIPlayer:
chessBoard.playerOneTurn = False
# Call checkmate after game function so text can be blited onto screen
updateChessScreen()
checkmateCheck(chessBoard.playerOneTurn)
def againstOthersMenu():
"""Against Others Menu"""
global onlinePlayer, twoPlayer, playerMode, networkClient, onlinePlayerOneTurn, onlineColourId, onlineBoardObject, onlinePreviousBoardPosition
window.fill(WHITE)
widthCheck = display[0]
heightCheck = display[1]
# Title of program
chessTitle = mediumFont.render("Choose Mode", True, RED)
window.blit(chessTitle, (widthCheck // 3 + 40, 20))
# Two Buttons
playOnlineButton = pg.Rect((widthCheck / 8), (heightCheck / 3), widthCheck / 4, 50)
playOnline = mediumFont.render("Online Play", True, WHITE)
playOnlineRect = playOnline.get_rect()
playOnlineRect.center = playOnlineButton.center
pg.draw.rect(window, BLACK, playOnlineButton)
window.blit(playOnline, playOnlineRect)
playLocalButton = pg.Rect(5 * (widthCheck / 8), (heightCheck / 3), widthCheck / 4, 50)
playLocal = mediumFont.render("Local Play", True, WHITE)
playLocalRect = playLocal.get_rect()
playLocalRect.center = playLocalButton.center
pg.draw.rect(window, BLACK, playLocalButton)
window.blit(playLocal, playLocalRect)
# button that allows user to go back to the main menu
goBackButton = pg.Rect((widthCheck // 16), 100, widthCheck // 5, 50)
goBack = mediumFont.render("Go Back", True, WHITE)
goBackRect = goBack.get_rect()
goBackRect.center = goBackButton.center
pg.draw.rect(window, BLACK, goBackButton)
window.blit(goBack, goBackRect)
# if the mouse clicks button then assign what menu to go to
click, _, _ = pg.mouse.get_pressed()
if click == 1:
mouse = pg.mouse.get_pos()
if playOnlineButton.collidepoint(mouse):
onlinePlayer = True
playerMode = False
networkClient = Client()
onlineColourId = networkClient.colourId
onlinePreviousBoardPosition = networkClient.chessBoard.board
print(f"Player has colour {onlineColourId}")
if onlineColourId == "b":
networkClient.chessBoard.otherPlayer = True
onlinePlayerOneTurn = True
updateChessScreen()
pg.display.update()
elif playLocalButton.collidepoint(mouse):
twoPlayer = True
playerMode = False
updateChessScreen()
pg.display.update()
elif goBackButton.collidepoint(mouse):
playerMode = False
pg.display.update()
pg.display.update()
# Make sure that there is another player in the game
def onlineCheckForOtherPlayer():
pass
def onlineCompareLists(currentBoardPosition):
global onlinePreviousBoardPosition
for i in range(8):
for j in range(8):
if onlinePreviousBoardPosition[i][j] == "":
if currentBoardPosition[i][j] == "":
continue
else:
return False
else:
if currentBoardPosition[i][j] == "":
return False
elif onlinePreviousBoardPosition[i][j].type == currentBoardPosition[i][j].type:
if onlinePreviousBoardPosition[i][j].colour == currentBoardPosition[i][j].colour:
continue
else:
return False
else:
return False
return True
def onlinePieceMoves(pieceY, pieceX, possibleMoves, boardPosition):
"""Returns the possible moves for the piece"""
global onlineBoardPosition
return onlineBoardPosition[pieceY][pieceX].possibleMoves(pieceY, pieceX, possibleMoves, boardPosition)
def OnlineCheckPiece(mousePos):
global onlineBoardPosition
columnPiece, rowPiece = mousePos
columnPiece, rowPiece = columnPiece // 70, (rowPiece - 110) // 70
onlineBoardPosition = networkClient.getCurrentBoardPosition()
if onlineBoardPosition[rowPiece][columnPiece] == "":
return False
return True
def OnlineCheckPlayerTurn(mousePos):
"""Get if the player can move piece"""
global onlineBoardPosition, onlineColourId
columnPiece, rowPiece = mousePos
columnPiece, rowPiece = columnPiece // 70, (rowPiece - 110) // 70
colour = onlineBoardPosition[rowPiece][columnPiece].colour
print(colour, onlinePlayerOneTurn, onlineColourId)
if (colour == "w" and onlinePlayerOneTurn and onlineColourId == "w") \
or (colour == "b" and onlinePlayerOneTurn is False and onlineColourId == "b"):
return True
return False
def createNetworkClient():
global networkClient
networkClient = Client()
def onlineMoveFunction():
global newPos, playerMove, newX, newY, row, column, AIPlayer, onlinePlayerOneTurn, onlinePreviousBoardPosition, onlinePossible
mousePos = pg.mouse.get_pos()
# Send over message to move piece
# has to look like "Move row column mousePos[0] mousePos[1]"
movePieceCommand = "Move " + str(row) + " " + str(column) + " " + str(mousePos[0]) + " " + str(
mousePos[1])
playerMove = networkClient.sendMoveData(movePieceCommand)
print(f"Player move = {playerMove}")
if playerMove is True:
playerMove = False
onlinePlayerOneTurn = not onlinePlayerOneTurn
onlinePreviousBoardPosition = networkClient.chessBoard.board
onlinePossible = [["" for i in range(8)] for j in range(8)]
# Call checkmate after game function so text can be blited onto screen
updateChessScreen()
# checkmateCheck(chessBoard.playerOneTurn)
def OnlineSendPossible(possibleMoves):
"""Send the possible moves to the server"""
global networkClient
networkClient.setPossible(possibleMoves)
def OnlineGetPossible():
"""Get the possible moves from the server"""
global networkClient
possible = networkClient.getCurrentPossible()
return possible
def OnlineGetBoard():
"""Get the current board object"""
global networkClient
currentBoard = networkClient.receiveBoard()
return currentBoard
def OnlineGetBoardPosition():
global networkClient
currentBoardPosition = networkClient.getCurrentBoardPosition()
return currentBoardPosition
def checkmateCheck(playerOneTurn):
global font, textPrint, text, checkmateCondition
if checkmate(chessBoard.board, playerOneTurn, False):
# print("CHECKMATE")
font = pg.font.SysFont("Helvetica", 75)
textPrint = "Player 1 Won!"
if chessBoard.playerOneTurn:
textPrint = "Player 2 Won!"
text = | |
self.api_sc.get_data("QS407SC", self.region, self.resolution, category_filters={"QS407SC_0_CODE": range(1,10)})
qs407.rename({"QS407SC_0_CODE": "C_ROOMS"}, axis=1, inplace=True)
qs407 = utils.cap_value(qs407, "C_ROOMS", 6, "OBS_VALUE")
#print(qs407.head())
assert qs407.OBS_VALUE.sum() == checksum
#print(self.api_sc.get_metadata("QS406SC", self.resolution))
qs406 = self.api_sc.get_data("QS406SC", self.region, self.resolution, category_filters={"QS406SC_0_CODE": range(1,9)})
qs406.rename({"QS406SC_0_CODE": "C_SIZHUK11"}, axis=1, inplace=True)
qs406 = utils.cap_value(qs406, "C_SIZHUK11", 4, "OBS_VALUE")
#print(qs406.head())
assert qs406.OBS_VALUE.sum() == checksum
nrooms = len(qs407.C_ROOMS.unique())
nsizes = len(qs406.C_SIZHUK11.unique())
m407 = utils.unlistify(qs407, ["GEOGRAPHY_CODE", "C_ROOMS"], [ngeogs, nrooms], "OBS_VALUE")
m406 = utils.unlistify(qs406, ["GEOGRAPHY_CODE", "C_SIZHUK11"], [ngeogs, nsizes], "OBS_VALUE")
a4404 = humanleague.qis([np.array([0,1]), np.array([0,2]), np.array([0,3])], [m4402, m407, m406])
utils.check_humanleague_result(a4404, [m4402, m407, m406])
self.lc4404 = utils.listify(a4404["result"], "OBS_VALUE", ["GEOGRAPHY_CODE", "C_TENHUK11", "C_ROOMS", "C_SIZHUK11"])
self.lc4404.GEOGRAPHY_CODE = utils.remap(self.lc4404.GEOGRAPHY_CODE, qs406.GEOGRAPHY_CODE.unique())
self.lc4404.C_TENHUK11 = utils.remap(self.lc4404.C_TENHUK11, tenure_table.C_TENHUK11.unique())
self.lc4404.C_ROOMS = utils.remap(self.lc4404.C_ROOMS, qs407.C_ROOMS.unique())
self.lc4404.C_SIZHUK11 = utils.remap(self.lc4404.C_SIZHUK11, qs406.C_SIZHUK11.unique())
#print(self.lc4404.head())
assert self.lc4404.OBS_VALUE.sum() == checksum
# no bedroom info is available
# for now randomly sample from survey on rooms
# TODO microsynth using tenure/occs also?
self.lc4405 = self.lc4404.copy()
# self.lc4405.rename({"C_ROOMS": "C_BEDROOMS"}, axis=1, inplace=True)
self.lc4405["C_BEDROOMS"] = Household.UNKNOWN
room_bed_dist = np.sum(seed.get_survey_TROBH(), axis=(0,2,4))
#print(room_bed_dist)
# c = [1,2,3,4]
# for i in range(0,6):
# p = room_bed_dist[i]/np.sum(room_bed_dist[i])
# n = len(self.lc4405[self.lc4405.C_ROOMS == i+1])
# #print(np.random.choice(c, n, p=p))
# self.lc4405.loc[self.lc4405.C_ROOMS == i+1, "C_BEDROOMS"] = np.random.choice(c, n, p=p)
#assert len(self.lc4405[self.lc4405.C_BEDROOMS == Household.UNKNOWN]) == 0
assert len(self.lc4405[self.lc4405.C_ROOMS < self.lc4405.C_BEDROOMS]) == 0
self.lc4405.drop("C_ROOMS", axis=1, inplace=True)
self.lc4405 = self.lc4405.groupby(["GEOGRAPHY_CODE", "C_TENHUK11", "C_SIZHUK11", "C_BEDROOMS"]).sum().reset_index()
#print(self.lc4405)
assert self.lc4405.OBS_VALUE.sum() == checksum
# synthesise LC4408
#print(self.api_sc.get_metadata("QS116SC", self.resolution))
# 1'One person household',
# 2'Married couple household: No dependent children',
# 3'Married couple household: With dependent children',
# 4'Same-sex civil partnership couple household',
# 5'Cohabiting couple household: No dependent children',
# 6'Cohabiting couple household: With dependent children',
# 7'Lone parent household: No dependent children',
# 8'Lone parent household: With dependent children',
# 9'Multi-person household: All full-time students',
# 10'Multi-person household: Other']}}
qs116 = self.api_sc.get_data("QS116SC", self.region, self.resolution, category_filters={"QS116SC_0_CODE": range(1,11)})
qs116.rename({"QS116SC_0_CODE": "C_AHTHUK11"}, axis=1, inplace=True)
# map to lower-resolution household types
# 1 -> 1 (single)
# (2,3,4) -> 2 (married/civil couple)
# (5,6) -> 3 (cohabiting couple)
# (7,8) -> 4 (single parent)
# (9,10) -> 5 (mixed)
qs116.loc[(qs116.C_AHTHUK11 == 2) | (qs116.C_AHTHUK11 == 3) | (qs116.C_AHTHUK11 == 4), "C_AHTHUK11"] = 2
qs116.loc[(qs116.C_AHTHUK11 == 5) | (qs116.C_AHTHUK11 == 6), "C_AHTHUK11"] = 3
qs116.loc[(qs116.C_AHTHUK11 == 7) | (qs116.C_AHTHUK11 == 8), "C_AHTHUK11"] = 4
qs116.loc[(qs116.C_AHTHUK11 == 9) | (qs116.C_AHTHUK11 == 10), "C_AHTHUK11"] = 5
# ...and consolidate
qs116 = qs116.groupby(["GEOGRAPHY_CODE", "C_AHTHUK11"]).sum().reset_index()
assert qs116.OBS_VALUE.sum() == checksum
nhhtypes = len(qs116.C_AHTHUK11.unique())
m116 = utils.unlistify(qs116, ["GEOGRAPHY_CODE", "C_AHTHUK11"], [ngeogs, nhhtypes], "OBS_VALUE")
a4408 = humanleague.qis([np.array([0,1]), np.array([0,2])], [m4402, m116])
utils.check_humanleague_result(a4408, [m4402, m116])
self.lc4408 = utils.listify(a4408["result"], "OBS_VALUE", ["GEOGRAPHY_CODE", "C_TENHUK11", "C_AHTHUK11"])
self.lc4408.GEOGRAPHY_CODE = utils.remap(self.lc4408.GEOGRAPHY_CODE, qs116.GEOGRAPHY_CODE.unique())
self.lc4408.C_TENHUK11 = utils.remap(self.lc4408.C_TENHUK11, self.lc4402.C_TENHUK11.unique())
self.lc4408.C_AHTHUK11 = utils.remap(self.lc4408.C_AHTHUK11, qs116.C_AHTHUK11.unique())
#print(self.lc4408.head())
assert self.lc4408.OBS_VALUE.sum() == checksum
# LC1105
#print(self.api_sc.get_metadata("KS101SC", self.resolution))
self.lc1105 = self.api_sc.get_data("KS101SC", self.region, self.resolution, category_filters={"KS101SC_0_CODE": [3,4]})
self.lc1105.rename({"KS101SC_0_CODE": "C_RESIDENCE_TYPE"}, axis=1, inplace=True)
# 3->1, 4->2
self.lc1105["C_RESIDENCE_TYPE"] = self.lc1105["C_RESIDENCE_TYPE"] - 2
#print(self.lc1105.OBS_VALUE.sum(), checksum)
# occupied vs unoccupied
#print(self.api_sc.get_metadata("KS401SC", self.resolution))
# 5'All household spaces: Occupied',
# 6'All household spaces: Unoccupied: Second residence/holiday accommodation',
# 7'All household spaces: Unoccupied: Vacant',
self.ks401 = self.api_sc.get_data("KS401SC", self.region, self.resolution, category_filters={"KS401SC_0_CODE": [5,6,7]})
self.ks401.rename({"KS401SC_0_CODE": "CELL"}, axis=1, inplace=True)
self.ks401 = utils.cap_value(self.ks401, "CELL", 6, "OBS_VALUE")
assert self.ks401[self.ks401.CELL == 5].OBS_VALUE.sum() == checksum
#print(self.api_sc.get_metadata("LC4202SC", self.resolution))
#{'table': 'LC4202SC', 'description': '', 'geography': 'OA11', 'fields': {'LC4202SC_1_CODE': [
# 'All households:',
# 'Owned:',
# 'Social rented:',
# 'Private rented or living rent free:'],
# 'LC4202SC_2_CODE': [
# 'Total',
# 'Number of cars or vans in household: No cars or vans',
# 'Number of cars or vans in household: One car or van',
# 'Number of cars or vans in household:Two or more cars or vans'],
# 'LC4202SC_0_CODE': [
# 'All households',
# 'White',
# 'Mixed or multiple ethnic groups',
# 'Asian Asian Scottish or Asian British',
# 'African',
# 'Caribbean or Black',
# 'Other ethnic groups']}}
self.lc4202 = self.api_sc.get_data("LC4202SC", self.region, self.resolution, category_filters={"LC4202SC_1_CODE": [1,2,3], "LC4202SC_2_CODE": [1,2,3], "LC4202SC_0_CODE": [1,2,3,4,5,6]})
self.lc4202.rename({"LC4202SC_2_CODE": "C_CARSNO", "LC4202SC_1_CODE": "C_TENHUK11", "LC4202SC_0_CODE": "C_ETHHUK11"}, axis=1, inplace=True)
# TODO how to map tenure 1->2/3?
self.lc4202.loc[self.lc4202.C_TENHUK11 == 3, "C_TENHUK11"] = 6
self.lc4202.loc[self.lc4202.C_TENHUK11 == 2, "C_TENHUK11"] = 5
self.lc4202.loc[self.lc4202.C_TENHUK11 == 1, "C_TENHUK11"] = 3 # OR 2?
assert self.lc4202.OBS_VALUE.sum() == checksum
#print(self.api_sc.get_metadata("LC4605SC", self.resolution))
#{'table': 'LC4605SC', 'description': '', 'geography': 'OA11', 'fields': {'LC4605SC_1_CODE': [
# 'All HRPs aged 16 to 74',
# 'Owned: Total',
# 'Owned: Owned outright',
# 'Owned: Owned witha mortgage or loan or shared ownership',
# 'Rented or living rent free: Total',
# 'Rented or living rent free: Social rented',
# 'Rented or living rent free: Private rented or living rent free'],
# 'LC4605SC_0_CODE': ['All HRPs aged 16 to 74',
# '1. Higher managerial administrative and professional occupations',
# '2. Lower managerial administrative and professional occupations',
# '3. Intermediate occupations',
# '4. Small employers and own account workers',
# '5. Lower supervisory and technical occupations',
# '6. Semi-routine occupations',
# '7. Routine occupations',
# '8. Never worked and long-term unemployed',
# 'L15 Full-time students']}}
self.lc4605 = self.api_sc.get_data("LC4605SC", self.region, self.resolution, category_filters={"LC4605SC_1_CODE": [2,3,5,6], "LC4605SC_0_CODE": range(1,10)})
self.lc4605.rename({"LC4605SC_1_CODE": "C_TENHUK11", "LC4605SC_0_CODE": "C_NSSEC"}, axis=1, inplace=True)
# TODO add retired?
print(self.lc4605.OBS_VALUE.sum(), checksum, "TODO add retired")
#print(self.api_sc.get_metadata("QS420SC", self.resolution))
cats = [2,6,11,14,22,23,24,25,26,27,28,29,30,31,32,33]
# merge the two communal tables (so we have establishment and people counts)
self.communal = self.api_sc.get_data("QS420SC", self.region, self.resolution, category_filters={"QS420SC_0_CODE": cats}).rename({"QS420SC_0_CODE": "CELL"}, axis=1)
qs421 = self.api_sc.get_data("QS421SC", self.region, self.resolution, category_filters={"QS421SC_0_CODE": cats}).rename({"OBS_VALUE": "CommunalSize"}, axis=1)
#print(qs421.head())
self.communal = self.communal.merge(qs421, left_on=["GEOGRAPHY_CODE", "CELL"], right_on=["GEOGRAPHY_CODE", "QS421SC_0_CODE"]).drop("QS421SC_0_CODE", axis=1)
#print(self.communal.CommunalSize.sum())
def __get_census_data_ew(self):
"""
Retrieves census tables for the specified geography
checks for locally cached data or calls nomisweb API
"""
# convert input string to enum
resolution = self.api_ew.GeoCodeLookup[self.resolution]
if self.region in self.api_ew.GeoCodeLookup.keys():
region_codes = self.api_ew.GeoCodeLookup[self.region]
else:
region_codes = self.api_ew.get_lad_codes(self.region)
if not region_codes:
raise ValueError("no regions match the input: \"" + self.region + "\"")
area_codes = self.api_ew.get_geo_codes(region_codes, resolution)
# assignment does shallow copy, need to use .copy() to avoid this getting query_params fields
common_params = {"MEASURES": "20100",
"date": "latest",
"geography": area_codes}
# LC4402EW - Accommodation type by type of central heating in household by tenure
query_params = common_params.copy()
query_params["C_TENHUK11"] = "2,3,5,6"
query_params["C_CENHEATHUK11"] = "1,2"
query_params["C_TYPACCOM"] = "2...5"
query_params["select"] = "GEOGRAPHY_CODE,C_TENHUK11,C_CENHEATHUK11,C_TYPACCOM,OBS_VALUE"
self.lc4402 = self.api_ew.get_data("LC4402EW", query_params)
# LC4404EW - Tenure by household size by number of rooms
query_params = common_params.copy()
query_params["C_ROOMS"] = "1...6"
query_params["C_TENHUK11"] = "2,3,5,6"
query_params["C_SIZHUK11"] = "1...4"
query_params["select"] = "GEOGRAPHY_CODE,C_ROOMS,C_TENHUK11,C_SIZHUK11,OBS_VALUE"
self.lc4404 = self.api_ew.get_data("LC4404EW", query_params)
# LC4405EW - Tenure by household size by number of bedrooms
query_params = common_params.copy()
query_params["C_TENHUK11"] = "2,3,5,6"
query_params["C_BEDROOMS"] = "1...4"
query_params["C_SIZHUK11"] = "1...4"
query_params["select"] = "GEOGRAPHY_CODE,C_SIZHUK11,C_TENHUK11,C_BEDROOMS,OBS_VALUE"
self.lc4405 = self.api_ew.get_data("LC4405EW", query_params)
# LC4408EW - Tenure by number of persons per bedroom in household by household type
query_params = common_params.copy()
#query_params["C_PPBROOMHEW11"] = "1...4"
query_params["C_PPBROOMHEW11"] = "0"
query_params["C_AHTHUK11"] = "1...5"
query_params["C_TENHUK11"] = "2,3,5,6"
query_params["select"] = "GEOGRAPHY_CODE,C_AHTHUK11,C_TENHUK11,OBS_VALUE"
self.lc4408 = self.api_ew.get_data("LC4408EW", query_params)
# LC1105EW - Residence type by sex by age
query_params = common_params.copy()
query_params["C_SEX"] = "0"
query_params["C_AGE"] = "0"
query_params["C_RESIDENCE_TYPE"] = "1,2"
query_params["select"] = "GEOGRAPHY_CODE,C_RESIDENCE_TYPE,OBS_VALUE"
self.lc1105 = self.api_ew.get_data("LC1105EW", query_params)
# KS401EW - Dwellings, household spaces and accommodation type
# Household spaces with at least one usual resident / Household spaces with no usual residents
query_params = common_params.copy()
query_params["RURAL_URBAN"] = "0"
query_params["CELL"] = "5,6"
query_params["select"] = "GEOGRAPHY_CODE,CELL,OBS_VALUE"
self.ks401 = self.api_ew.get_data("KS401EW", query_params)
# NOTE: common_params is passed by ref so take a copy
self.communal = self.__get_communal_data_ew(common_params.copy())
# LC4202EW - Tenure by car or van availability by ethnic group of Household Reference Person (HRP)
query_params = common_params.copy()
query_params["C_CARSNO"] = "1...3"
query_params["C_TENHUK11"] = "2,3,5,6"
query_params["C_ETHHUK11"] = "2...8"
query_params["select"] = "GEOGRAPHY_CODE,C_ETHHUK11,C_CARSNO,C_TENHUK11,OBS_VALUE"
self.lc4202 = self.api_ew.get_data("LC4202EW", query_params)
# LC4605EW - Tenure by NS-SeC - Household Reference Persons
query_params = common_params.copy()
query_params["C_TENHUK11"] = "2,3,5,6"
query_params["C_NSSEC"] = "1...9"
query_params["select"] = "GEOGRAPHY_CODE,C_TENHUK11,C_NSSEC,OBS_VALUE"
self.lc4605 = self.api_ew.get_data("LC4605EW", query_params)
def __get_communal_data_ew(self, query_params):
# TODO merge the tables rather than relying on the order being the same in both
query_params["RURAL_URBAN"] = 0
query_params["CELL"] = "2,6,11,14,22...34"
query_params["select"] = "GEOGRAPHY_CODE,CELL,OBS_VALUE"
# communal is qs420 plus qs421
communal = self.api_ew.get_data("QS420EW", query_params) # establishments
qs421 = self.api_ew.get_data("QS421EW", query_params) # people
# | |
# Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
from collections import namedtuple
from copy import deepcopy
from functools import partial
from jax import random
import jax.numpy as jnp
from jax.tree_util import register_pytree_node, tree_flatten, tree_unflatten
import numpyro
import numpyro.distributions as dist
from numpyro.primitives import mutable as numpyro_mutable
__all__ = [
"flax_module",
"haiku_module",
"random_flax_module",
"random_haiku_module",
]
def flax_module(
name, nn_module, *, input_shape=None, apply_rng=None, mutable=None, **kwargs
):
"""
Declare a :mod:`~flax` style neural network inside a
model so that its parameters are registered for optimization via
:func:`~numpyro.primitives.param` statements.
Given a flax ``nn_module``, in flax to evaluate the module with
a given set of parameters, we use: ``nn_module.apply(params, x)``.
In a NumPyro model, the pattern will be::
net = flax_module("net", nn_module)
y = net(x)
or with dropout layers::
net = flax_module("net", nn_module, apply_rng=["dropout"])
rng_key = numpyro.prng_key()
y = net(x, rngs={"dropout": rng_key})
:param str name: name of the module to be registered.
:param flax.linen.Module nn_module: a `flax` Module which has .init and .apply methods
:param tuple input_shape: shape of the input taken by the
neural network.
:param list apply_rng: A list to indicate which extra rng _kinds_ are needed for
``nn_module``. For example, when ``nn_module`` includes dropout layers, we
need to set ``apply_rng=["dropout"]``. Defaults to None, which means no extra
rng key is needed. Please see
`Flax Linen Intro <https://flax.readthedocs.io/en/latest/notebooks/linen_intro.html#Invoking-Modules>`_
for more information in how Flax deals with stochastic layers like dropout.
:param list mutable: A list to indicate mutable states of ``nn_module``. For example,
if your module has BatchNorm layer, we will need to define ``mutable=["batch_stats"]``.
See the above `Flax Linen Intro` tutorial for more information.
:param kwargs: optional keyword arguments to initialize flax neural network
as an alternative to `input_shape`
:return: a callable with bound parameters that takes an array
as an input and returns the neural network transformed output
array.
"""
try:
import flax # noqa: F401
except ImportError as e:
raise ImportError(
"Looking like you want to use flax to declare "
"nn modules. This is an experimental feature. "
"You need to install `flax` to be able to use this feature. "
"It can be installed with `pip install flax`."
) from e
module_key = name + "$params"
nn_params = numpyro.param(module_key)
if mutable:
nn_state = numpyro_mutable(name + "$state")
assert nn_state is None or isinstance(nn_state, dict)
assert (nn_state is None) == (nn_params is None)
if nn_params is None:
# feed in dummy data to init params
args = (jnp.ones(input_shape),) if input_shape is not None else ()
rng_key = numpyro.prng_key()
# split rng_key into a dict of rng_kind: rng_key
rngs = {}
if apply_rng:
assert isinstance(apply_rng, list)
for kind in apply_rng:
rng_key, subkey = random.split(rng_key)
rngs[kind] = subkey
rngs["params"] = rng_key
nn_vars = flax.core.unfreeze(nn_module.init(rngs, *args, **kwargs))
if "params" not in nn_vars:
raise ValueError(
"Your nn_module does not have any parameter. Currently, it is not"
" supported in NumPyro. Please make a github issue if you need"
" that feature."
)
nn_params = nn_vars["params"]
if mutable:
nn_state = {k: v for k, v in nn_vars.items() if k != "params"}
assert set(mutable) == set(nn_state)
numpyro_mutable(name + "$state", nn_state)
# make sure that nn_params keep the same order after unflatten
params_flat, tree_def = tree_flatten(nn_params)
nn_params = tree_unflatten(tree_def, params_flat)
numpyro.param(module_key, nn_params)
def apply_with_state(params, *args, **kwargs):
params = {"params": params, **nn_state}
out, new_state = nn_module.apply(params, mutable=mutable, *args, **kwargs)
nn_state.update(**new_state)
return out
def apply_without_state(params, *args, **kwargs):
return nn_module.apply({"params": params}, *args, **kwargs)
apply_fn = apply_with_state if mutable else apply_without_state
return partial(apply_fn, nn_params)
def haiku_module(name, nn_module, *, input_shape=None, apply_rng=False, **kwargs):
"""
Declare a :mod:`~haiku` style neural network inside a
model so that its parameters are registered for optimization via
:func:`~numpyro.primitives.param` statements.
Given a haiku ``nn_module``, in haiku to evaluate the module with
a given set of parameters, we use: ``nn_module.apply(params, None, x)``.
In a NumPyro model, the pattern will be::
net = haiku_module("net", nn_module)
y = net(x) # or y = net(rng_key, x)
or with dropout layers::
net = haiku_module("net", nn_module, apply_rng=True)
rng_key = numpyro.prng_key()
y = net(rng_key, x)
:param str name: name of the module to be registered.
:param nn_module: a `haiku` Module which has .init and .apply methods
:type nn_module: haiku.Transformed or haiku.TransformedWithState
:param tuple input_shape: shape of the input taken by the
neural network.
:param bool apply_rng: A flag to indicate if the returned callable requires
an rng argument (e.g. when ``nn_module`` includes dropout layers). Defaults
to False, which means no rng argument is needed. If this is True, the signature
of the returned callable ``nn = haiku_module(..., apply_rng=True)`` will be
``nn(rng_key, x)`` (rather than ``nn(x)``).
:param kwargs: optional keyword arguments to initialize flax neural network
as an alternative to `input_shape`
:return: a callable with bound parameters that takes an array
as an input and returns the neural network transformed output
array.
"""
try:
import haiku as hk # noqa: F401
except ImportError as e:
raise ImportError(
"Looking like you want to use haiku to declare "
"nn modules. This is an experimental feature. "
"You need to install `haiku` to be able to use this feature. "
"It can be installed with `pip install dm-haiku`."
) from e
if not apply_rng:
nn_module = hk.without_apply_rng(nn_module)
module_key = name + "$params"
nn_params = numpyro.param(module_key)
with_state = isinstance(nn_module, hk.TransformedWithState)
if with_state:
nn_state = numpyro_mutable(name + "$state")
assert nn_state is None or isinstance(nn_state, dict)
assert (nn_state is None) == (nn_params is None)
if nn_params is None:
args = (jnp.ones(input_shape),) if input_shape is not None else ()
# feed in dummy data to init params
rng_key = numpyro.prng_key()
if with_state:
nn_params, nn_state = nn_module.init(rng_key, *args, **kwargs)
nn_state = dict(nn_state)
numpyro_mutable(name + "$state", nn_state)
else:
nn_params = nn_module.init(rng_key, *args, **kwargs)
# haiku init returns an immutable dict
nn_params = hk.data_structures.to_mutable_dict(nn_params)
# we cast it to a mutable one to be able to set priors for parameters
# make sure that nn_params keep the same order after unflatten
params_flat, tree_def = tree_flatten(nn_params)
nn_params = tree_unflatten(tree_def, params_flat)
numpyro.param(module_key, nn_params)
def apply_with_state(params, *args, **kwargs):
out, new_state = nn_module.apply(params, nn_state, *args, **kwargs)
nn_state.update(**new_state)
return out
apply_fn = apply_with_state if with_state else nn_module.apply
return partial(apply_fn, nn_params)
# register an "empty" parameter which only stores its shape
# so that the optimizer can skip optimize this parameter, while
# it still provides shape information for priors
ParamShape = namedtuple("ParamShape", ["shape"])
register_pytree_node(
ParamShape, lambda x: ((None,), x.shape), lambda shape, x: ParamShape(shape)
)
def _update_params(params, new_params, prior, prefix=""):
"""
A helper to recursively set prior to new_params.
"""
for name, item in params.items():
flatten_name = ".".join([prefix, name]) if prefix else name
if isinstance(item, dict):
assert not isinstance(prior, dict) or flatten_name not in prior
new_item = new_params[name]
_update_params(item, new_item, prior, prefix=flatten_name)
elif (not isinstance(prior, dict)) or flatten_name in prior:
if isinstance(params[name], ParamShape):
param_shape = params[name].shape
else:
param_shape = jnp.shape(params[name])
params[name] = ParamShape(param_shape)
if isinstance(prior, dict):
d = prior[flatten_name]
elif callable(prior) and not isinstance(prior, dist.Distribution):
d = prior(flatten_name, param_shape)
else:
d = prior
param_batch_shape = param_shape[: len(param_shape) - d.event_dim]
# XXX: here we set all dimensions of prior to event dimensions.
new_params[name] = numpyro.sample(
flatten_name, d.expand(param_batch_shape).to_event()
)
def random_flax_module(
name, nn_module, prior, *, input_shape=None, apply_rng=None, mutable=None, **kwargs
):
"""
A primitive to place a prior over the parameters of the Flax module `nn_module`.
.. note::
Parameters of a Flax module are stored in a nested dict. For example,
the module `B` defined as follows::
class A(flax.linen.Module):
@flax.linen.compact
def __call__(self, x):
return nn.Dense(1, use_bias=False, name='dense')(x)
class B(flax.linen.Module):
@flax.linen.compact
def __call__(self, x):
return A(name='inner')(x)
has parameters `{'inner': {'dense': {'kernel': param_value}}}`. In the argument
`prior`, to specify `kernel` parameter, we join the path to it using dots:
`prior={"inner.dense.kernel": param_prior}`.
:param str name: name of NumPyro module
:param flax.linen.Module: the module to be registered with NumPyro
:param prior: a NumPyro distribution or a Python dict with parameter names as keys and
respective distributions as values. For example::
net = random_flax_module("net",
flax.linen.Dense(features=1),
prior={"bias": dist.Cauchy(), "kernel": dist.Normal()},
input_shape=(4,))
Alternatively, we can use a callable. For example the following are equivalent::
prior=(lambda name, shape: dist.Cauchy() if name == "bias" else dist.Normal())
| |
from collections import defaultdict
from copy import deepcopy
import csv
from datetime import datetime
import os
from pprint import pformat
import re
import string
import time
from mintamazontagger.algorithm_u import algorithm_u
from mintamazontagger import category
from mintamazontagger.currency import micro_usd_nearly_equal
from mintamazontagger.currency import micro_usd_to_usd_string
from mintamazontagger.currency import parse_usd_as_micro_usd
from mintamazontagger.currency import CENT_MICRO_USD, MICRO_USD_EPS
from mintamazontagger.mint import truncate_title
from mintamazontagger.my_progress import NoProgress, no_progress_factory
PRINTABLE = set(string.printable)
def rm_leading_qty(item_title):
"""Removes the '2x Item Name' from the front of an item title."""
return re.sub(r'^\d+x ', '', item_title)
def get_title(amzn_obj, target_length):
# Also works for a Refund record.
qty = amzn_obj.quantity
base_str = None
if qty > 1:
base_str = str(qty) + 'x'
# Remove non-ASCII characters from the title.
clean_title = ''.join(filter(lambda x: x in PRINTABLE, amzn_obj.title))
return truncate_title(clean_title, target_length, base_str)
CURRENCY_FIELD_NAMES = set([
'Item Subtotal',
'Item Subtotal Tax',
'Item Total',
'List Price Per Unit',
'Purchase Price Per Unit',
'Refund Amount',
'Refund Tax Amount',
'Shipping Charge',
'Subtotal',
'Tax Charged',
'Tax Before Promotions',
'Total Charged',
'Total Promotions',
])
DATE_FIELD_NAMES = set([
'Order Date',
'Refund Date',
'Shipment Date',
])
RENAME_FIELD_NAMES = {
'Carrier Name & Tracking Number': 'tracking',
}
def num_lines_csv(csv_file):
return sum([1 for r in csv.DictReader(
open(csv_file.name, encoding='utf-8'))])
def is_empty_csv(csv_file, num_records, key='Buyer Name'):
# Amazon likes to put "No data found for this time period" in the first
# row.
# Amazon appears to be giving 0 sized CSVs now!
if os.stat(csv_file.name).st_size == 0:
return True
return (num_records <= 1 and next(csv.DictReader(
open(csv_file.name, encoding='utf-8')))[key] is None)
def parse_from_csv_common(
cls,
csv_file,
progress_label='Parse from csv',
progress_factory=no_progress_factory):
num_records = num_lines_csv(csv_file)
if is_empty_csv(csv_file, num_records):
return []
progress = progress_factory(progress_label, num_records)
reader = csv.DictReader(csv_file)
result = []
for raw_dict in reader:
result.append(cls(raw_dict))
progress.next()
progress.finish()
return result
def pythonify_amazon_dict(raw_dict):
keys = set(raw_dict.keys())
# Convert to microdollar ints
for ck in keys & CURRENCY_FIELD_NAMES:
raw_dict[ck] = parse_usd_as_micro_usd(raw_dict[ck])
# Convert to datetime.date
for dk in keys & DATE_FIELD_NAMES:
raw_dict[dk] = parse_amazon_date(raw_dict[dk])
# Rename long or unpythonic names:
for old_key in keys & RENAME_FIELD_NAMES.keys():
new_key = RENAME_FIELD_NAMES[old_key]
raw_dict[new_key] = raw_dict[old_key]
del raw_dict[old_key]
if 'Quantity' in keys:
raw_dict['Quantity'] = int(raw_dict['Quantity'])
return dict([
(k.lower().replace(' ', '_').replace('/', '_'), v)
for k, v in raw_dict.items()
])
def parse_amazon_date(date_str):
if not date_str:
return None
try:
return datetime.strptime(date_str, '%m/%d/%Y').date()
except ValueError:
return datetime.strptime(date_str, '%m/%d/%y').date()
def get_invoice_url(order_id):
return (
'https://www.amazon.com/gp/css/summary/print.html?ie=UTF8&'
'orderID={oid}'.format(oid=order_id))
def associate_items_with_orders(
all_orders, all_items, item_progress=NoProgress()):
items_by_oid = defaultdict(list)
for i in all_items:
items_by_oid[i.order_id].append(i)
orders_by_oid = defaultdict(list)
for o in all_orders:
orders_by_oid[o.order_id].append(o)
for oid, orders in orders_by_oid.items():
oid_items = items_by_oid[oid]
if not micro_usd_nearly_equal(
Order.sum_subtotals(orders),
Item.sum_subtotals(oid_items)):
# This is likely due to reports being pulled before all outstanding
# orders have shipped. Just skip this order for now.
continue
if len(orders) == 1:
orders[0].set_items(oid_items, assert_unmatched=True)
item_progress.next(len(oid_items))
continue
# First try to divy up the items by tracking.
items_by_tracking = defaultdict(list)
for i in oid_items:
items_by_tracking[i.tracking].append(i)
# It is never the case that multiple orders with the same order id will
# have the same tracking number. Try using tracking number to split up
# the items between the orders.
for order in orders:
items = items_by_tracking[order.tracking]
if micro_usd_nearly_equal(
Item.sum_subtotals(items),
order.subtotal):
# A perfect fit.
order.set_items(items, assert_unmatched=True)
item_progress.next(len(items))
# Remove the selected items.
oid_items = [i for i in oid_items if i not in items]
# Remove orders that have items.
orders = [o for o in orders if not o.items]
if not orders and not oid_items:
continue
orders = sorted(orders, key=lambda o: o.subtotal)
# Partition the remaining items into every possible arrangement and
# validate against the remaining orders.
# TODO: Make a custom algorithm with backtracking.
# The number of combinations are factorial, so limit the number of
# attempts (by a 1 sec timeout) before giving up.
start_time = time.time()
for item_groupings in algorithm_u(oid_items, len(orders)):
if time.time() - start_time > 1:
break
subtotals_with_groupings = sorted(
[(Item.sum_subtotals(itms), itms)
for itms in item_groupings],
key=lambda g: g[0])
if all([micro_usd_nearly_equal(
subtotals_with_groupings[i][0],
orders[i].subtotal) for i in range(len(orders))]):
for idx, order in enumerate(orders):
items = subtotals_with_groupings[idx][1]
order.set_items(items,
assert_unmatched=True)
item_progress.next(len(items))
break
item_progress.finish()
ORDER_MERGE_FIELDS = {
'shipping_charge',
'subtotal',
'tax_before_promotions',
'tax_charged',
'total_charged',
'total_promotions',
}
class Order:
matched = False
items_matched = False
trans_id = None
items = []
is_debit = True
def __init__(self, raw_dict):
self.__dict__.update(pythonify_amazon_dict(raw_dict))
@classmethod
def parse_from_csv(cls, csv_file, progress_factory=no_progress_factory):
return parse_from_csv_common(
cls, csv_file, 'Parsing Amazon Orders', progress_factory)
@staticmethod
def sum_subtotals(orders):
return sum([o.subtotal for o in orders])
def total_by_items(self):
return (
Item.sum_totals(self.items) +
self.shipping_charge - self.total_promotions)
def total_by_subtotals(self):
return (
self.subtotal + self.tax_charged +
self.shipping_charge - self.total_promotions)
def transact_date(self):
return self.shipment_date
def transact_amount(self):
return self.total_charged
def match(self, trans):
self.matched = True
self.trans_id = trans.id
def set_items(self, items, assert_unmatched=False):
self.items = items
self.items_matched = True
for i in items:
if assert_unmatched:
assert not i.matched
i.matched = True
i.order = self
def get_note(self):
return (
'Amazon order id: {}\n'
'Buyer: {} ({})\n'
'Order date: {}\n'
'Ship date: {}\n'
'Tracking: {}\n'
'Invoice url: {}').format(
self.order_id,
self.buyer_name,
self.ordering_customer_email,
self.order_date,
self.shipment_date,
self.tracking,
get_invoice_url(self.order_id))
def attribute_subtotal_diff_to_misc_charge(self):
diff = self.total_charged - self.total_by_subtotals()
if diff < MICRO_USD_EPS:
return False
self.subtotal += diff
adjustment = deepcopy(self.items[0])
adjustment.title = 'Misc Charge (Gift wrap, etc)'
adjustment.category = 'Shopping'
adjustment.quantity = 1
adjustment.item_total = diff
adjustment.item_subtotal = diff
adjustment.item_subtotal_tax = 0
self.items.append(adjustment)
return True
def attribute_itemized_diff_to_shipping_tax(self):
# Shipping [sometimes] has tax. Include this in the shipping charge.
# Unfortunately Amazon doesn't provide this anywhere; it must be
# inferred as of now.
if not self.shipping_charge:
return False
diff = self.total_charged - self.total_by_items()
if diff < MICRO_USD_EPS:
return False
self.shipping_charge += diff
self.tax_charged -= diff
self.tax_before_promotions -= diff
return True
def attribute_itemized_diff_to_per_item_tax(self):
itemized_diff = self.total_charged - self.total_by_items()
if abs(itemized_diff) < MICRO_USD_EPS:
return False
tax_diff = self.tax_charged - Item.sum_subtotals_tax(self.items)
if abs(itemized_diff - tax_diff) > MICRO_USD_EPS:
return False
# The per-item tax was not computed correctly; the tax miscalculation
# matches the itemized difference. Sometimes AMZN is bad at math (lol),
# and most of the time it's simply a rounding error. To keep the line
# items adding up correctly, spread the tax difference across the
# items.
tax_rate_per_item = [i.tax_rate() for i in self.items]
while abs(tax_diff) > MICRO_USD_EPS:
if abs(tax_diff) < CENT_MICRO_USD:
# If the difference is under a penny, round that
# partial cent to the first item.
adjust_amount = tax_diff
adjust_idx = 0
elif tax_diff > 0:
# The order has more tax than the sum of all items.
# Find the lowest taxed item (by rate) and add a penny. Try to
# ignore items that have no tax (a rate of zero) however
# default to the first item if no items were taxed.
adjust_amount = CENT_MICRO_USD
adjust_idx = 0
min_rate = None
for (idx, rate) in enumerate(tax_rate_per_item):
if rate != 0 and (not min_rate or rate < min_rate):
adjust_idx = idx
min_rate = rate
else:
# The order has less tax than the sum of all items.
# Find the highest taxed item (by rate) and discount it
# a penny.
(adjust_idx, _) = max(
enumerate(tax_rate_per_item), key=lambda x: x[1])
adjust_amount = -CENT_MICRO_USD
adjust_item = self.items[adjust_idx]
adjust_item.item_subtotal_tax += adjust_amount
adjust_item.item_total += adjust_amount
tax_diff -= adjust_amount
tax_rate_per_item[adjust_idx] = adjust_item.tax_rate()
return True
def to_mint_transactions(self,
t,
skip_free_shipping=False):
new_transactions = []
# More expensive items are always more interesting when it comes to
# budgeting, so show those first (for both itemized and concatted).
items = sorted(
self.items, key=lambda item: item.item_total, reverse=True)
# Itemize line-items:
for i in items:
new_cat = category.get_mint_category_from_unspsc(i.unspsc_code)
item = t.split(
amount=i.item_total,
category=new_cat,
desc=i.get_title(88),
note=self.get_note())
new_transactions.append(item)
# Itemize the shipping cost, if any.
is_free_shipping = (
self.shipping_charge and
self.total_promotions and
micro_usd_nearly_equal(
self.total_promotions, self.shipping_charge))
if is_free_shipping and skip_free_shipping:
return new_transactions
if self.shipping_charge:
ship = t.split(
amount=self.shipping_charge,
category='Shipping',
desc='Shipping',
note=self.get_note())
new_transactions.append(ship)
# All promotion(s) as one line-item.
if self.total_promotions:
# If there was a promo that matches the shipping cost, it's nearly
# certainly a Free One-day/same-day/etc promo. In this case,
# categorize the promo instead as 'Shipping', which will cancel out
# in Mint trends.
cat = ('Shipping' if is_free_shipping else
category.DEFAULT_MINT_CATEGORY)
promo = t.split(
amount=-self.total_promotions,
category=cat,
desc='Promotion(s)',
note=self.get_note(),
is_debit=False)
new_transactions.append(promo)
return new_transactions
@classmethod
def merge(cls, orders):
if len(orders) == 1:
result = orders[0]
result.set_items(Item.merge(result.items))
return result
result = deepcopy(orders[0])
result.set_items(Item.merge([i for o in orders | |
"""
A Trainable ResNet Class is defined in this file
Author: <NAME>
"""
import math
import numpy as np
import tensorflow as tf
from functools import reduce
from configs import configs
class ResNet:
# some properties
"""
Initialize function
"""
def __init__(self, ResNet_npy_path=None, trainable=True, open_tensorboard=False, dropout=0.8):
if ResNet_npy_path is not None:
self.data_dict = np.load(ResNet_npy_path, encoding='latin1').item()
else:
self.data_dict = None
self.var_dict = {}
self.trainable = trainable
self.open_tensorboard = open_tensorboard
self.dropout = dropout
self.is_training = True
def set_is_training(self, isTrain):
self.is_training = isTrain
def build(self, rgb, label_num, train_mode=None, last_layer_type = "softmax"):
"""
load variable from npy to build the Resnet or Generate a new one
:param rgb: rgb image [batch, height, width, 3] values scaled [0, 1]
:param train_mode: a bool tensor, usually a placeholder: if True, dropout will be turned on
"""
red, green, blue = tf.split(axis=3, num_or_size_splits=3, value=rgb)
assert red.get_shape().as_list()[1:] == [224, 224, 1]
assert green.get_shape().as_list()[1:] == [224, 224, 1]
assert blue.get_shape().as_list()[1:] == [224, 224, 1]
bgr = tf.concat(axis=3, values=[
blue - configs['VGG_MEAN'][0],
green - configs['VGG_MEAN'][1],
red - configs['VGG_MEAN'][2],
])
print(bgr.get_shape().as_list())
assert bgr.get_shape().as_list()[1:] == [224, 224, 3]
self.bgr = bgr
self.conv1 = self.conv_layer(self.bgr, 7, 3, 64, 2, "conv1")# 112*112
self.pool1 = self.max_pool(self.conv1, 3, 2, "pool1")# 56*56 * 64
self.block1_1 = self.res_block_3_layers(self.pool1, [64, 64, 256], "res2a", True)# 56*56
self.block1_2 = self.res_block_3_layers(self.block1_1, [64, 64, 256], "res2b")# 56*56
self.block1_3 = self.res_block_3_layers(self.block1_2, [64, 64, 256], "res2c")# 56*56
self.pool2 = self.max_pool(self.block1_3, 2, 2, "pool2")# 56*56
self.block2_1 = self.res_block_3_layers(self.pool2, [128, 128, 512], "res3a", True)# 28*28
self.block2_2 = self.res_block_3_layers(self.block2_1, [128, 128, 512], "res3b")# 28*28
self.block2_3 = self.res_block_3_layers(self.block2_2, [128, 128, 512], "res3c")# 28*28
self.block2_4 = self.res_block_3_layers(self.block2_3, [128, 128, 512], "res3d")# 28*28
self.pool3 = self.max_pool(self.block2_4, 2, 2, "pool3")# 28*28
self.block3_1 = self.res_block_3_layers(self.pool3, [256, 256, 1024], "res4a", True)# 14*14
self.block3_2 = self.res_block_3_layers(self.block3_1, [256, 256, 1024], "res4b")# 14*14
self.block3_3 = self.res_block_3_layers(self.block3_2, [256, 256, 1024], "res4c")# 14*14
self.block3_4 = self.res_block_3_layers(self.block3_3, [256, 256, 1024], "res4d")# 14*14
self.block3_5 = self.res_block_3_layers(self.block3_4, [256, 256, 1024], "res4e")# 14*14
self.block3_6 = self.res_block_3_layers(self.block3_5, [256, 256, 1024], "res4f")# 14*14
#[None 7 7 512]
self.pool4 = self.max_pool(self.block3_6, 2, 2, "pool4")# 14*14
self.block4_1 = self.res_block_3_layers(self.pool4, [512, 512, 2048], "res5a", True)# 7*7
self.block4_2 = self.res_block_3_layers(self.block4_1, [512, 512, 2048], "res5b")# 7*7
self.block4_3 = self.res_block_3_layers(self.block4_2, [512, 512, 2048], "res5c")# 7*7
# upsample layer begins
self.deconv1_1 = self.deconv_bn_relu(self.block4_3, name = 'deconv_1',kernel_size = 3, output_channels = 1024,
initializer = tf.contrib.layers.variance_scaling_initializer(), stride=2, bn=True, training=self.is_training)# 14*14
self.conv1_1d =
self.deconv2_2 = self.deconv_bn_relu(self.deconv_1, name = 'deconv_2',kernel_size = 3, output_channels = 512,
initializer = tf.contrib.layers.variance_scaling_initializer(), stride=2, bn=True, training=self.is_training)# 28*28
self.deconv3_3 = self.deconv_bn_relu(self.deconv_2, name = 'deconv_3',kernel_size = 3, output_channels = 256,
initializer = tf.contrib.layers.variance_scaling_initializer(), stride=2, bn=True, training=self.is_training)# 56*56
self.deconv4_4 = self.deconv_bn_relu(self.deconv_3, name = 'deconv_4',kernel_size = 3, output_channels = 128,
initializer =tf.contrib.layers.variance_scaling_initializer(), stride=2, bn=True, training=self.is_training)# 112*112
self.deconv5_5 = self.deconv_bn_relu(self.deconv_4, name = 'deconv_5',kernel_size = 3, output_channels = 64,
initializer =tf.contrib.layers.variance_scaling_initializer(), stride=2, bn=True, training=self.is_training)# 224*224
# self.final_layer = self.conv_la self.deconv_1 = self.deconv_bn_relu(self.block4_3, name = 'deconv_1',kernel_size = 3, output_channels = 1024,
initializer = tf.contrib.layers.variance_scaling_initializer(), stride=2, bn=True, training=self.is_training)# 14*14yer(bottom = self.deconv_5, kernal_size = 1, in_channels = 64, out_channels = 3, stride = 1, name = 'final_layer')
self.final_layer = self.conv_bn_relu(bottom = self.deconv_5, name = 'final_layer', kernel_size = 1, output_channels = 3, initializer =tf.contrib.layers.variance_scaling_initializer(), bn = False, training = self.is_training, relu=False)
# self.pool5 = self.avg_pool(self.block4_3, 7, 1, "pool5")
#self.fc0 = self.fc_layer(self.pool5, 2048, 1024, "fc0")
#self.relu1 = tf.nn.relu(self.fc0)
#if train_mode is not None:
# self.relu1 = tf.cond(train_mode, lambda: tf.nn.dropout(self.relu1, self.dropout), lambda: self.relu1)
#elif self.trainable:
# self.relu1 = tf.nn.dropout(self.relu1, self.dropout)
self.y_soft = tf.nn.softmax(self.final_layer)
self.logits = tf.reshape(self.final_layer, (-1, 3))
print(self.logits)
self.predicted = tf.argmax(self.final_layer, axis = 3)
print(self.predicted.get_shape().as_list())
# cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=self.labels, logits=logits, name=None)
# self.loss = tf.reduce_mean(cross_entropy, name = 'xcross_entropy')
# if(last_layer_type == "sigmoid"):
# self.prob = tf.nn.sigmoid(self.fc1, name="prob")
# elif(last_layer_type == "softmax"):
# self.prob = tf.nn.softmax(self.fc1, name="prob")
self.data_dict = None
return self.predicted
def conv_bn_relu(self, bottom, kernel_size, out_channels, stride, name, train_mode):
input_filter = bottom.get_shape().as_list()[-1]
conv = self.conv_layer(bottom = bottom, kernel_size = kernel_size, in_channels = input_filter,
out_channels = out_channels, stride = 1, name)
norm = tf.layers.batch_normalization(inputs=conv, axis = 3,
momentum=configs['_BATCH_NORM_DECAY'],epsilon=configs['_BATCH_NORM_EPSILON'],
center=True, scale=True, training=self.is_training, fused=True)
relu = tf.nn.relu(norm)
return relu
def res_block_3_layers(self, bottom, channel_list, name, change_dimension = False):
if (change_dimension):
block_conv_input = self.conv_layer(bottom = bottom, kernal_size = 1, in_channels = bottom.get_shape().as_list()[-1],
out_channels = channel_list[2], stride = 1, name = name + "_branch1")
else:
block_conv_input = bottom
input_filter = bottom.get_shape().as_list()[-1]
block_conv_1 = self.conv_layer(bottom, 1, input_filter, channel_list[0], 1, name + "_branch2a")
block_norm_1 = tf.layers.batch_normalization(inputs=block_conv_1, axis = 3, momentum=configs['_BATCH_NORM_DECAY'], epsilon=configs['_BATCH_NORM_EPSILON'], center=True, scale=True, training=self.is_training, fused=True)
block_relu_1 = tf.nn.relu(block_norm_1)
block_conv_2 = self.conv_layer(block_relu_1, 3, channel_list[0], channel_list[1], 1, name + "_branch2b")
block_norm_2 = tf.layers.batch_normalization(inputs=block_conv_2, axis = 3, momentum=configs['_BATCH_NORM_DECAY'], epsilon=configs['_BATCH_NORM_EPSILON'], center=True, scale=True, training=self.is_training, fused=True)
block_relu_2 = tf.nn.relu(block_norm_2)
block_conv_3 = self.conv_layer(block_relu_2, 1, channel_list[1], channel_list[2], 1, name + "_branch2c")
block_res = tf.add(block_conv_input, block_conv_3)
relu = tf.nn.relu(block_res)
return relu
def ResNet_Block(self, bottom, channel_list, name):
input_filter = bottom.get_shape().as_list()[-1]
conv_bn_relu1 = self.Conv_Bn_Relu(name = name + '_branch2a', bottom = bottom, output_channels = channel_list[0], kernel_size = 1, stride = 1, relu = True, bn = True)
conv_bn_relu2 = self.Conv_Bn_Relu(name = name + '_branch2b', bottom = conv_bn_relu1, output_channels = channel_list[1], kernel_size = 3, stride = 1, relu = True, bn = True)
block_conv_3 = self.conv_layer(conv_bn_relu2, 1, channel_list[1], channel_list[2], 1, name + '_branch2c')
block_res = tf.add(bottom, block_conv_3)
relu = tf.nn.relu(block_res)
return relu
def Dense_Block(self, bottom, name, stride = 1):
"""
dense block composed with a down channel convlution with fiter_size =1
and a up channel convolution with fiter_size = 3
"""
input_channels = bottom.get_shape().as_list()[-1]
dense_block_1 = self.BN_Relu_Conv(name + '_x1', bottom, input_channels = input_channels,
output_channels = K*4, kernel_size = 1, stride = 1)
dense_block_2 = self.BN_Relu_Conv(name + '_x2', dense_block_1, input_channels = K*4,
output_channels = K, kernel_size = 3, stride = 1)
dense_block = tf.concat([bottom, dense_block_2], axis = 3)
print('Dense_Block layer {0} -> {1}'.format(bottom.get_shape().as_list(),dense_block.get_shape().as_list()))
return dense_block
def BN_Relu_Conv(self, name, bottom, input_channels, output_channels, kernel_size, stride = 1):
batch_norm_scale = self.batch_norm_layer(name, bottom,phase_train = self.train_mode)
relu = tf.nn.relu(batch_norm_scale)
conv = self.conv_layer(bottom = relu, kernel_size = kernel_size, in_channels = input_channels,
out_channels = output_channels, stride = stride, name = name)
return conv
def Conv_Bn_Relu(self, name, bottom, output_channels, kernel_size, stride = 1, relu = True, bn = True):
input_channels = bottom.get_shape().as_list()[-1]
conv_layer = self.conv_layer(bottom = bottom, kernel_size = kernel_size, in_channels = input_channels,
out_channels = output_channels, stride = stride, regularizer=tf.contrib.layers.l2_regularizer(0.0005) ,name = name)
if bn == True:
batch_norm_scale = self.batch_norm_layer(name = name, bottom = conv_layer, phase_train = self.train_mode)
else:
batch_norm_scale = conv_layer
if relu == True:
relu_layer = tf.nn.relu(batch_norm_scale)
else:
relu_layer = batch_norm_scale
return relu_layer
def avg_pool(self,bottom, kernel_size = 2, stride = 2, name = "avg"):
avg_pool = tf.nn.avg_pool(bottom, ksize=[1, kernel_size, kernel_size, 1], strides=[1, stride, stride, 1], padding='SAME', name=name)
print('avg_pool layer {0} -> {1}'.format(bottom.get_shape().as_list(),avg_pool.get_shape().as_list()))
return avg_pool
def max_pool(self,bottom, kernel_size = 3, stride = 2, name = "max"):
max_pool = tf.nn.max_pool(bottom, ksize=[1, kernel_size, kernel_size, 1], strides=[1, stride, stride, 1], padding='SAME', name=name)
print('max_pool layer {0} -> {1}'.format(bottom.get_shape().as_list(),max_pool.get_shape().as_list()))
return max_pool
def conv_layer(self, bottom, kernel_size, in_channels, out_channels, stride, name, regularizer = None):
with tf.variable_scope(name):
filt, conv_biases = self.get_conv_var(kernel_size, in_channels, out_channels, name, regularizer = regularizer)
conv = tf.nn.conv2d(bottom, filt, [1,stride,stride,1], padding='SAME')
bias = tf.nn.bias_add(conv, conv_biases)
tf.summary.histogram('weight', filt)
tf.summary.histogram('bias', conv_biases)
return bias
def batch_norm_layer(self, name, bottom, phase_train, decay=0.5):
"""
glabal batch norm with input [batch_size height width channel]
"""
n_out = bottom.get_shape().as_list()[-1]
#restore the stored moving_mean moving_variance, beta, gamma if use pretrained model
moving_mean,moving_variance,gamma,beta = self.get_batchnorm_var(n_out, name + '_bn')
def mean_var_with_update():
#if train model updata the moving mean and moving variance
mean, variance = tf.nn.moments(bottom, [0,1,2], name='moments')
with tf.control_dependencies([assign_moving_average(moving_mean, mean, decay),
assign_moving_average(moving_variance, variance, decay)]):
return tf.identity(mean), tf.identity(variance)
# if test eval model use the moving restored moving mean and moving_variance.
mean, variance = tf.cond(phase_train, mean_var_with_update, lambda: (moving_mean, moving_variance))
return tf.nn.batch_normalization(bottom, mean, variance, beta, gamma, configs['_BATCH_NORM_EPSILON'])
def deconv_bn_relu(self, bottom, name, kernel_size, output_channels, stride = 2, bn=False, training=False, relu=True):
deconv_layer = self.deconv_layer(bottom, name, output_channels, kernel_size, stride, regularizer=None)
if bn:
deconv_layer = self.batch_norm_layer(name, bottom = deconv_layer, phase_train = self.train_mode)
if relu:
deconv_layer = tf.nn.relu(deconv_layer, name=name)
print('Deconv layer {0} -> {1}'.format(bottom.get_shape().as_list(), deconv_layer.get_shape().as_list()))
return deconv_layer
def deconv_layer(self, bottom, name, output_channels, kernel_size, stride, regularizer=None):
input_shape = bottom.get_shape().as_list()
output_shape = [input_shape[0], input_shape[1]*stride, input_shape[2]*stride, output_channels]
kernel_shape = | |
return inp[::factor,:,:,:,:]
elif (axis + 8) % 8 == 1:
return inp[:,::factor,:,:,:]
elif (axis + 8) % 8 == 2:
return inp[:,:,::factor,:,:]
elif (axis + 8) % 8 == 3:
return inp[:,:,:,::factor,:]
elif (axis + 8) % 8 == 4:
return inp[:,:,:,:,::factor]
elif inp.ndim == 6:
if (axis + 8) % 8 == 0:
return inp[::factor,:,:,:,:,:]
elif (axis + 8) % 8 == 1:
return inp[:,::factor,:,:,:,:]
elif (axis + 8) % 8 == 2:
return inp[:,:,::factor,:,:,:]
elif (axis + 8) % 8 == 3:
return inp[:,:,:,::factor,:,:]
elif (axis + 8) % 8 == 4:
return inp[:,:,:,:,::factor,:]
elif (axis + 8) % 8 == 5:
return inp[:,:,:,:,:,::factor]
elif inp.ndim == 7:
if (axis + 8) % 8 == 0:
return inp[::factor,:,:,:,:,:,:]
elif (axis + 8) % 8 == 1:
return inp[:,::factor,:,:,:,:,:]
elif (axis + 8) % 8 == 2:
return inp[:,:,::factor,:,:,:,:]
elif (axis + 8) % 8 == 3:
return inp[:,:,:,::factor,:,:,:]
elif (axis + 8) % 8 == 4:
return inp[:,:,:,:,::factor,:,:]
elif (axis + 8) % 8 == 5:
return inp[:,:,:,:,:,::factor,:]
elif (axis + 8) % 8 == 6:
return inp[:,:,:,:,:,:,::factor]
elif inp.ndim == 8:
if (axis + 8) % 8 == 0:
return inp[::factor,:,:,:,:,:,:,:]
elif (axis + 8) % 8 == 1:
return inp[:,::factor,:,:,:,:,:,:]
elif (axis + 8) % 8 == 2:
return inp[:,:,::factor,:,:,:,:,:]
elif (axis + 8) % 8 == 3:
return inp[:,:,:,::factor,:,:,:,:]
elif (axis + 8) % 8 == 4:
return inp[:,:,:,:,::factor,:,:,:]
elif (axis + 8) % 8 == 5:
return inp[:,:,:,:,:,::factor,:,:]
elif (axis + 8) % 8 == 6:
return inp[:,:,:,:,:,:,::factor,:]
elif (axis + 8) % 8 == 7:
return inp[:,:,:,:,:,:,:,::factor]
else:
if method == 'interp':
if verbose:
print 'Determining the interpolating function for downsampling.'
tol = 1e-10
reqd_inds = NP.arange(0, inp.shape[axis]-1+tol, factor)
# intpfunc = interpolate.interp1d(NP.arange(inp.shape[axis]), inp,
# kind=kind, fill_value=fill_value,
# axis=axis)
# result = intpfunc(reqd_inds)
result = OPS.interpolate_array(inp, NP.arange(inp.shape[axis]), reqd_inds, axis=axis, kind=kind)
elif method in ['FFT', 'fft']:
nout = NP.round(inp.shape[axis] / factor).astype(int)
result = signal.resample(inp, nout, t=None, axis=axis, window=None)
else:
raise ValueError('Invalid method specified for downsampling')
if verbose:
print 'Returning the downsampled data.'
return result
#################################################################################
def upsampler(inp, factor, axis=-1, verbose=True, kind='linear',
fill_value=NP.nan):
"""
-----------------------------------------------------------------------------
Routine to upsample a given input sequence along a specific dimension
where the input could be multi-dimensional (up to 8 dimensions)
Inputs:
inp [Numpy array] array which has to be upsampled. Cannot have
more than 8 dimensions
factor [scalar] upsampling factor. positive integer or floating
point number greater than or equal to unity. Upsampling is
obtained by interpolation.
Keyword Inputs:
axis [scalar] Integer specifying the axis along which the array is
to be upsampled. Default = -1, the last axis.
verbose [Boolean] If set to True, will print progress and/or
diagnostic messages. If False, will suppress printing such
messages. Default = True
kind [string] Spcifies the kind of interpolation. Accepted values
are 'linear', 'quadratic' and 'cubic'. Default = 'linear'
fill_value [scalar] Value to fill locations outside the index range of
input array. Default = NaN
-----------------------------------------------------------------------------
"""
try:
inp
except NameError:
raise NameError('No input specified. Aborting upsampler().')
try:
factor
except NameError:
if verbose:
print 'No upsampling factor specified. No upsampling performed on input.'
return input
if not isinstance(inp, NP.ndarray):
raise TypeError('Input should be a numpy array. Aborting upsampler().')
if not isinstance(factor, (int, float)):
raise TypeError('Upsampling factor must be a scalar value.')
if factor < 1.0:
raise ValueError('Upsampling factor must be greater than 1.')
if len(inp.shape) < 2:
inp = inp.reshape(1,-1)
if (axis <= -len(inp.shape)) or (axis > len(inp.shape)):
raise IndexError('The axis specified does not exist in the input. Aborting upsampler().')
if len(inp.shape) > 8:
raise ValueError('The routine cannot handle inputs with more than 8 dimensions. Aborting upsampler().')
if factor == 1:
if verbose:
print 'Upsampling factor is 1. No upsampling performed. Returning the original array.'
return inp
else:
if verbose:
print 'Determing the interpolating function for upsampling.'
intpfunc = interpolate.interp1d(NP.arange(inp.shape[axis]), inp,
kind=kind, fill_value=fill_value,
axis=axis)
reqd_inds = NP.arange(0, inp.shape[axis], 1/factor)
if verbose:
print 'Returning the upsampled data.'
return intpfunc(reqd_inds)
#################################################################################
def XC(inp1, inp2=None, pow2=False, shift=True):
"""
-----------------------------------------------------------------------------
Cross-correlate two sequences.
Inputs:
inp1: [list or numpy array] First sequence.
inp2: [list or numpy array] If not given, auto-correlation of inp1 is
returned.
pow2 [boolean] If set to True, will pad the results of the correlation
with zeros so the length of the correlated sequence is equal to the
next power of 2. If set to False, the correlated sequence is just
padded with one sample of value 0. Default = False
shift: [Boolean] If True, shift the correlated product such that it is
represented in FFT format. i.e., the first sample corresponds to
zero lag followed by positive lags. The second half corresponds to
negative lags. Default = True
Output: The correlation of input sequences inp1 and inp2. The output is of
length len(inp1)+len(inp2)-1 zero padded to the nearest power of 2
(if pow2 is True) or zero padded by one sample (if pow2 is False)
and shifted to be identical to a Fourier transform based estimate.
-----------------------------------------------------------------------------
"""
try:
inp1
except NameError:
raise NameError('inp1 not defined. Aborting XC().')
if not isinstance(inp1, (list, tuple, NP.ndarray, int, float, complex)):
raise TypeError('inp1 is of the wrong data type. Check inputs again. Aborting XC().')
inp1 = NP.asarray(inp1)
if inp2 is None:
inp2 = NP.copy(inp1)
elif not isinstance(inp2, (list, tuple, int, float, complex, NP.ndarray)):
raise TypeError('inp2 has incompatible data type. Verify inputs. Aborting XC().')
inp2 = NP.asarray(inp2)
if pow2:
zero_pad_length = 2**NP.ceil(NP.log2(len(inp1)+len(inp2)-1))-(len(inp1)+len(inp2)-1)
else:
zero_pad_length = 1
xc = NP.pad(NP.correlate(inp1, inp2, mode='full'), (zero_pad_length,0), mode='constant', constant_values=(0.0,0.0))
xc = NP.roll(xc, -int(NP.floor(0.5*zero_pad_length)))
# xc = NP.append(NP.correlate(inp1, inp2, mode='full'), NP.zeros(zero_pad_length))
if shift:
# xc = NP.roll(xc, -(inp2.size-1))
xc = NP.fft.ifftshift(xc)
return xc
#################################################################################
def spectax(length, resolution=1.0, shift=True, use_real=False):
"""
-----------------------------------------------------------------------------
Determine the spectral axis after a Fourier Transform
Inputs:
length [Scalar] Positive integer specifying the length of sequence which is
to be Fourier transformed
resolution [Scalar] Positive value for resolution in the sequence before
Fourier Transform
Keyword Inputs:
use_real [Boolean] If true, the input sequence is assumed to consist only
of real values and the spectral axis is computed accordingly.
Default = False
shift [Boolean] If true, the spectral axis values are shifted
cooresponding to a fftshift. Default = True
Output:
Spectral axis for an input sequence of given length and resolution.
-----------------------------------------------------------------------------
"""
try:
length
except NameError:
raise NameError('Input length not defined. Aborting spectax().')
if not isinstance(resolution, (int, float)):
raise TypeError('Input resolution must be a positive scalar integer or floating point number. Aborting spectax().')
elif resolution < 0.0:
raise ValueError('Input resolution must be positive. Aborting spectax().')
return spectral_axis(length, resolution, shift, use_real)
#################################################################################
def smooth(inp, wts=None, width=None, stat='mean', verbose=True):
"""
-----------------------------------------------------------------------------
Smoothen the input data using a moving average or median window along an
axis
Inputs:
inp [Numpy vector or array] M x N numpy array which has to be
smoothed across columns.
Keyword Inputs:
wts [Numpy vector] 1 x P array which will be used as the window of
weights in case of a moving average. Will not be used if a
median is used in place of mean. P <= N. Sum of the weights
should equal unity, otherwise the weights will be accordingly
scaled. Default = None. If not set, then it will be set to a
rectangular window of width specified in width (see below)
width [scalar] Width of the moving window. Has to be positive. Default
is None. If width is None, wts should be set. One and only one
among wts and width should be set.
stat [string scalar] String specifying the statistic ('mean' or
'median') to be used. Default = 'mean'
verbose [boolean] If set to True (default), print messages indicating
progress
| |
import re
import csv
import logging
import math
import glob
# import argparse
import numpy as np
import os
import pandas as pd
import time
import datetime
import drms
import urllib
# import json
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import astropy.units as u
import telegram_handler
# import warnings
import sunpy.wcs
import sunpy.map
import pickle
import telepot
from colorlog import ColoredFormatter
from astropy.coordinates import SkyCoord
# from astropy.io import fits
# from astropy.time import Time
# from datetime import timedelta
# from sunpy.coordinates import frames
# from astropy.coordinates import SkyCoord
from tg_tqdm import tg_tqdm
# from tqdm import tqdm
# warnings.filterwarnings("ignore")
# define constants
EMAIL = '<EMAIL>'
SAVE_PATH = 'dataset'
tg_bot_token = 'TOKEN'
tm_chat_id = 1234
ik_chat_id = 1234
sun_group_id = -1234
DATE_DELIMIT = '2010-06-28'
TG_LOGGER = False
FILE_DELETE = False
LOGGER_LEVEL = logging.WARNING
# LOGGER_LEVEL = logging.DEBUG
VERBOSE = True
PERIOD = 300
START_DATE = '1996-04-01'
CROP_DATE = '2017-11-01'
SLEEP = 0.1
PROGRESS = 10
# logging.basicConfig(filename='futs_parse.log', level=logging.INFO)
def set_logger(level=logging.WARNING, name='logger', telegram=False):
"""Return a logger with a default ColoredFormatter."""
file_formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(name)s: %(funcName)s - %(message)s")
stream_formatter = ColoredFormatter(
"%(asctime)s [%(log_color)s%(levelname)-8s%(reset)s: %(funcName)s] %(white)s%(message)s",
datefmt=None,
reset=True,
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red',
}
)
logger = logging.getLogger(name)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(stream_formatter)
log_handler = logging.FileHandler("fits_parse.log")
log_handler.setFormatter(file_formatter)
logger.addHandler(stream_handler)
logger.addHandler(log_handler)
if telegram:
tg_handler = telegram_handler.TelegramHandler(tg_bot_token, sun_group_id)
tg_formatter = telegram_handler.HtmlFormatter()
tg_handler.setFormatter(tg_formatter)
logger.addHandler(tg_handler)
logger.setLevel(level)
return logger
logger = set_logger(level=LOGGER_LEVEL, name='sun_logger', telegram=TG_LOGGER)
def check_dataset_directory():
if not os.path.exists('HMIdataset/fragments'):
logger.warning('HMIdataset folders not exist, create them')
os.makedirs('HMIdataset/fragments')
if not os.path.exists('MDIdataset/fragments'):
logger.warning('MDIdataset folders not exist, create them')
os.makedirs('MDIdataset/fragments')
return True
def clean_folder(path):
for file in os.listdir(path):
file_path = os.path.join(path, file)
if os.path.isfile(file_path):
os.remove(file_path)
return True
def message_of_start(token=tg_bot_token, id=sun_group_id):
bot = telepot.Bot(token)
bot.sendMessage(id, 'Start parsing fits on remote server')
def message_of_start_cropping(token=tg_bot_token, id=sun_group_id):
bot = telepot.Bot(token)
bot.sendMessage(id, '-' * 30)
bot.sendMessage(id, 'Start cropping regions')
bot.sendMessage(id, '-' * 30)
def hook_for_download_fits(t):
"""Wraps tqdm instance.
Don't forget to close() or __exit__()
the tqdm instance once you're done with it (easiest using `with` syntax).
Example
-------
>>> with tqdm(...) as t:
... reporthook = my_hook(t)
... urllib.urlretrieve(..., reporthook=reporthook)
"""
last_b = [0]
def update_to(b=1, bsize=1, tsize=None):
"""
b : int, optional
Number of blocks transferred so far [default: 1].
bsize : int, optional
Size of each block (in tqdm units) [default: 1].
tsize : int, optional
Total size (in tqdm units). If [default: None] remains unchanged.
"""
if tsize is not None:
t.total = tsize
t.update((b - last_b[0]) * bsize)
last_b[0] = b
return update_to
def request_mfits_by_date_MDI(moment, email=EMAIL, path_to_save='MDIdataset', verbose=False):
"""
Function for request fits from JSOC database
moment: pd.datetime object
return: filepath to the magnetogram
"""
filename = 'mdi.fd_m_96m_lev182.' + moment.strftime('%Y%m%d_%H%M%S_TAI.data.fits')
filepath = os.path.join(path_to_save, filename)
if os.path.exists(filepath):
pass
else:
c = drms.Client(email=email, verbose=verbose)
str_for_query = 'mdi.fd_M_96m_lev182' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI]')
logger.info('Magnetogram: {} will be downloaded ... '.format(str_for_query))
r = c.export(str_for_query, method='url', protocol='fits')
logger.debug(r)
try:
r.wait()
logger.info(r.request_url)
except Exception as e:
logger.warning('Can not wait anymore, skip this. Get Exception: {}'.format(e))
try:
logger.info("Download data and save to path {}".format(filepath))
r.download(path_to_save, verbose=verbose)
except Exception as e:
logger.error('Get error while trying download: {}'.format(e))
logger.warning('Skip this date')
return filepath
def request_batch_mfits_by_date(moment,
period_of_days=30, email=EMAIL,
path_to_save='dataset',
verbose=False,
type_mag='MDI',
token=tg_<PASSWORD>_token,
chat_id=sun_group_id):
'''Request batch fits for a period of days and return:
request url
period of days that was apply
first date of batch
last date of batch
'''
c = drms.Client(email=email, verbose=verbose)
def set_str_for_query(period_of_days=period_of_days):
if type_mag == 'MDI':
str_for_query = 'mdi.fd_M_96m_lev182' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI/{}d@24h]'.format(period_of_days))
filename_to_check = 'mdi.fd_m_96m_lev182.' + moment.strftime('%Y%m%d_%H%M%S_TAI.data.fits')
path_to_save = 'MDIdataset'
if type_mag == 'HMI':
str_for_query = 'hmi.m_720s' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI/{}d@24h]'.format(period_of_days))
path_to_save = 'HMIdataset'
filename_to_check = 'hmi.m_720s.' + moment.strftime('%Y%m%d_%H%M%S_TAI.magnetogram.fits')
return str_for_query, path_to_save, filename_to_check
str_for_query, path_to_save, filename_to_check = set_str_for_query()
logger.debug('{}\n{}\n{}'.format(str_for_query, path_to_save, filename_to_check))
if os.path.exists(os.path.join(path_to_save, filename_to_check)):
period_of_days = 10
logger.info('Files already exists. Skip downloads this batch size of {}'.format(period_of_days))
return None, period_of_days, moment, moment + datetime.timedelta(days=period_of_days), period_of_days
logger.info('Magnetogram: {} will be downloaded ... '.format(str_for_query))
r = c.export(str_for_query, protocol='fits')
logger.debug(r)
logger.debug(r.has_failed())
treshold = round(math.log(period_of_days) ** 2 / 2)
while r.has_failed():
period_of_days -= round(treshold)
if period_of_days < round(treshold / 2):
logger.warning('Period of days is too small, skip this request to 10 days')
logger.warning('Export request was {}: '.format(str_for_query))
period_of_days = 10
return None, period_of_days, moment, moment + datetime.timedelta(days=period_of_days), period_of_days
time.sleep(1)
logger.info('Export request has failed. Reduce number of days in it on {}. Now days in request {}'.format(int(treshold), period_of_days))
str_for_query, _, _ = set_str_for_query(period_of_days=period_of_days)
logger.debug('Request string: {}'.format(str_for_query))
r = c.export(str_for_query, protocol='fits')
logger.debug(r)
logger.debug(len(r.data))
try:
r.wait(sleep=10, retries_notfound=10)
except Exception as e:
logger.error('Can not wait anymore, skip this. Get Exception: {}'.format(e))
logger.info("Download data and save to path {}".format(path_to_save))
first_date_batch = r.urls[0:]['record'].values[0].replace('[', ' ').split()[1].split('_')[0].replace('.', '-')
last_date_batch = r.urls[-1:]['record'].values[0].replace('[', ' ').split()[1].split('_')[0].replace('.', '-')
with tg_tqdm(r.urls.index, token=token, chat_id=chat_id, desc='DOWNLOAD BATCH',
postfix='start_date = {}, end_date = {}'.format(first_date_batch, last_date_batch)) as batch_d:
for ind in batch_d:
try:
# file_name = '.'.join(r.urls.filename[ind].split('.')[:3] + r.urls.filename[ind].split('.')[4:])
urllib.request.urlretrieve(r.urls.url[ind], os.path.join(path_to_save, r.urls.filename[ind]))
except Exception as e:
logger.error('Get error while trying download {}: {}'.format(r.urls.url[ind], repr(e)))
logger.warning('Skip this file')
len_batch = len(r.urls)
return r.request_url, period_of_days, first_date_batch, last_date_batch, len_batch
def request_mfits_by_date_HMI(moment, email=EMAIL, path_to_save='HMIdataset', verbose=False):
"""
Function for request fits from JSOC database
moment: pd.datetime object
return: filepath to the magnetogram
"""
filename = 'hmi.m_720s.' + moment.strftime('%Y%m%d_%H%M%S_TAI.magnetogram.fits')
filepath = os.path.join(path_to_save, filename)
if os.path.exists(filepath):
pass
else:
c = drms.Client(email=email, verbose=verbose)
str_for_query = 'hmi.m_720s' + moment.strftime('[%Y.%m.%d_%H:%M:%S_TAI]{magnetogram}')
logger.info('Magnetogram: {} will be downloaded ... '.format(str_for_query))
r = c.export(str_for_query, method='url', protocol='fits')
logger.debug(r)
try:
r.wait()
logger.info(r.request_url)
except Exception as e:
logger.warning('Can not wait anymore, skip this. Get Exception: {}'.format(e))
try:
logger.info("Download data and save to path {}".format(filepath))
r.download(path_to_save, verbose=verbose)
except Exception as e:
logger.error('Get error while trying download: {}'.format(e))
logger.warning('Skip this date')
return filepath
def read_fits_to_map(filepath, plot_show=False, ln=False):
"""
read fits to sunpy object and plot in logariphmic scale
return
mymap: sunpy object
"""
mymap = sunpy.map.Map(filepath)
if plot_show:
plt.figure(figsize=(12, 12))
if ln:
data = np.sign(mymap.data) * np.log1p(np.abs(mymap.data))
data = mymap.data
plt.imshow(data, cmap='gray')
return mymap
def region_coord_list(datestr, sunspots_df, limit_deg=45):
"""
Function for working with sunspot_1996_2017.pkl dataframe,
return list of tuples: (datestr, NOAA number, location)
used in cropping
args:
datestr: string for date in the format used in dataframe '2001-04-30'
sunspots_df: dataframe from file sunspot_1996_2017.pkl
return: list of tuples
"""
date_df = sunspots_df.loc[datestr]
date_df.index = date_df.index.droplevel()
rc_list = []
for index, row in date_df.iterrows():
try:
restriction_degree = (abs(float(row.location[1:3]) <= limit_deg)) and (abs(float(row.location[4:])) <= limit_deg)
if restriction_degree:
rc_list.append((pd.to_datetime(datestr, format='%Y-%m-%d'), index, row.location))
except ValueError as e:
if TG_LOGGER:
time.sleep(SLEEP)
logger.warning('Some error with read location {} in degree for date {}: {}'.format(row.location, datestr, e))
except Exception as e:
if TG_LOGGER:
time.sleep(SLEEP)
logger.error('Some error with read location {} in degree for date {}: {}'.format(row.location, datestr, e))
return rc_list
def return_pixel_from_map(mag_map, record, limit_deg=45):
'''
convert lon lat coordinate to coordinate in pixel in sun map and return it
'''
pattern = re.compile("[NS]\d{2}[EW]\d{2}")
assert bool(pattern.match(record)), 'Pattern should be in the same format as N20E18'
assert (abs(float(record[1:3]) <= limit_deg)) and (abs(float(record[4:])) <= limit_deg), 'Consider only regions between -{}, +{} degree'.format(limit_deg)
if record[0] == 'N':
lat = float(record[1:3])
else:
lat = -float(record[1:3])
if record[3] == 'W':
lon = float(record[4:])
else:
lon = -float(record[4:])
hpc_coord = sunpy.wcs.convert_hg_hpc(lon, lat, b0_deg=mag_map.meta['crlt_obs'])
coord = SkyCoord(hpc_coord[0] * u.arcsec, hpc_coord[1] * u.arcsec, frame=mag_map.coordinate_frame)
# pixel_pos = mag_map.world_to_pixel(coord)
pixel_pos = mag_map.world_to_pixel(coord) * u.pixel
# pixel_pos = pixel_pos.to_value()
return pixel_pos
def crop_regions(mag_map, rc_list, type_mag, delta=100, plot_rec=False, plot_crop=False, limit_deg=45, ln=False):
'''
Crop region by size delta and save it to disk,
if plot_rec, plot rectangle of regions on disk,
if plot_crop, plot only crop regions
'''
if ln:
data = np.sign(mag_map.data) * np.log1p(np.abs(mag_map.data))
data = mag_map.data
if type_mag == 'MDI':
delta = 100
if type_mag == 'HMI':
delta = 200
if plot_rec:
fig, ax = plt.subplots(1, figsize=(12, 12))
ax.matshow(data)
plt.gray()
ax.set_title('{} magnetogram at '.format(type_mag) + rc_list[0][0].strftime('%Y-%m-%d %H:%M'))
for record in rc_list:
try:
pxs = return_pixel_from_map(mag_map, record[2], limit_deg).to_value()
except Exception as e:
logger.error('Some error with get pixel coordinates from map: {}. Skip it'.format(e))
continue
rect = patches.Rectangle((pxs[0] - 1.25 * delta, pxs[1] - delta), 2.5 * delta, 2 * delta, linewidth=3, edgecolor='r', facecolor='none')
ax.add_patch(rect)
ax.annotate('{}.AR'.format(type_mag) + str(record[1]), xy=(pxs[0], pxs[1]), xytext=(pxs[0], pxs[1] - 50), color='yellow', fontsize='xx-large')
plt.show()
submaps = []
for record in rc_list:
filename = '{}.{}.AR{}.fits'.format(type_mag, record[0].strftime('%Y-%m-%d_%H%M%S'), record[1])
filepath = os.path.join('{}dataset/fragments'.format(type_mag), filename)
try:
pxs = return_pixel_from_map(mag_map, record[2], limit_deg)
except Exception as e:
logger.error('Some error with get pixel coordinates from map: {}. Skip it'.format(e))
continue
bot_l = [pxs[0] - delta * 1.25 * u.pixel, pxs[1] - delta * u.pixel]
top_r = [pxs[0] + | |
self.df.repartition(num_partitions).write.csv(path=path, mode=mode, header=header)
else:
self.df.write.csv(path=path, mode=mode, header=header)
def _concat(self, join="outer"):
def concat_inner(self, df2):
col_names_1 = set(self.schema.names)
col_names_2 = set(df2.schema.names)
for col in list(col_names_1.difference(col_names_2)):
self = self.drop(col)
for col in list(col_names_2.difference(col_names_1)):
df2 = df2.drop(col)
return self.unionByName(df2)
def concat_outer(self, df2):
col_names_1 = set(self.schema.names)
col_names_2 = set(df2.schema.names)
for col in col_names_1.difference(col_names_2):
df2 = df2.withColumn(col, lit(None).cast(self.schema[col].dataType))
for col in col_names_2.difference(col_names_1):
self = self.withColumn(col, lit(None).cast(df2.schema[col].dataType))
return self.unionByName(df2)
if join == "outer":
return concat_outer
else:
return concat_inner
def concat(self, tables, mode="inner", distinct=False):
"""
Concatenate a list of Tables into one Table in the dimension of row.
:param tables: a Table or a list of Tables.
:param mode: str, either inner or outer. For inner mode, the new Table would only
contain columns that are shared by all Tables. For outer mode, the resulting
Table would contain all the columns that appear in all Tables.
:param distinct: boolean. If True, the result Table would only contain distinct rows.
Default is False.
:return: A single concatenated Table.
"""
if mode not in ["outer", "inner"]:
raise ValueError("concat mode should be either outer or inner,\
but got {}.".format(mode))
if not isinstance(tables, list):
tables = [tables]
dfs = [table.df for table in tables] + [self.df]
df = reduce(self._concat(mode), dfs)
if distinct:
df = df.distinct()
return self._clone(df)
def drop_duplicates(self, subset=None, sort_cols=None, keep="min"):
"""
Return a new Table with duplicate rows removed.
:param subset: str or a list of str, specifies which column(s) to be considered when
referring to duplication. If subset is None, all the columns will be considered.
:param sort_cols: str or a list of str, specifies the column(s) to determine which
item to keep when duplicated. If sort_cols is None, duplicate rows will be
dropped randomly.
:param keep: str, the strategy to keep the duplicate, either min and max. Default is min.
It will only take effect when sort_cols is not None.
If keep is min, rows with the smallest values in sort_cols will be kept.
If keep is max, rows with the largest values in sort_cols will be kept.
:return: A new Table with duplicate rows removed.
"""
if subset is not None:
if not isinstance(subset, list):
subset = [subset]
check_col_exists(self.df, subset)
else:
subset = self.columns
if sort_cols is None:
return self._clone(self.df.dropDuplicates(subset=subset))
if not isinstance(sort_cols, list):
sort_cols = [sort_cols]
check_col_exists(self.df, sort_cols)
if keep == "min":
window = Window.partitionBy(subset).orderBy(*sort_cols, 'id')
elif keep == "max":
window = Window.partitionBy(subset).orderBy(*[self.df[sort_col].desc()
for sort_col in sort_cols], 'id')
else:
raise ValueError("keep should be either min or max, but got {}.".format(keep))
df = self.df.withColumn('id', monotonically_increasing_id()) \
.withColumn('rank', rank().over(window))
df = df.filter(pyspark_col('rank') == 1).drop('rank', 'id')
return self._clone(df)
def append_column(self, name, value):
"""
Append a column with a constant value to the Table.
:param name: str, the name of the new column.
:param value: The constant column value for the new column.
:return: A new Table with the appended column.
"""
return self._clone(self.df.withColumn(name, lit(value)))
def __getattr__(self, name):
"""
Get the target column of the Table.
"""
return self.df.__getattr__(name)
def col(self, name):
"""
Get the target column of the Table.
"""
return pyspark_col(name)
def sort(self, *cols, **kwargs):
"""
Sort table by the specified col(s).
:param cols: list of :class:`Column` or column names to sort by.
:param ascending: boolean or list of boolean (default ``True``).
Sort ascending vs. descending. Specify list for multiple sort orders.
If a list is specified, length of the list must equal length of the `cols`.
"""
if not cols:
raise ValueError("cols should be str or a list of str, but got None.")
return self._clone(self.df.sort(*cols, **kwargs))
order_by = sort
def to_pandas(self):
return self.df.toPandas()
@staticmethod
def from_pandas(pandas_df):
"""
Returns the contents of this :class:`pandas.DataFrame` as Table
:param pandas_df: pandas dataframe
"""
spark = OrcaContext.get_spark_session()
sparkDF = spark.createDataFrame(pandas_df)
return Table(sparkDF)
def cache(self):
"""
Persist this table in memory
:return: this Table
"""
self.df.cache()
return self
def uncache(self):
"""
Make this table as non-persistent, and remove all blocks for it from memory
:return: this Table
"""
if self.df.is_cached:
try:
self.df.unpersist()
except Py4JError:
print("Try to unpersist an uncached table")
return self
class FeatureTable(Table):
@classmethod
def read_parquet(cls, paths):
"""
Loads Parquet files as a FeatureTable.
:param paths: str or a list of str. The path(s) to Parquet file(s).
:return: A FeatureTable for recommendation data.
"""
return cls(Table._read_parquet(paths))
@classmethod
def read_json(cls, paths, cols=None):
return cls(Table._read_json(paths, cols))
@classmethod
def read_csv(cls, paths, delimiter=",", header=False, names=None, dtype=None):
"""
Loads csv files as a FeatureTable.
:param paths: str or a list of str. The path(s) to csv file(s).
:param delimiter: str, delimiter to use for parsing the csv file(s). Default is ",".
:param header: boolean, whether the first line of the csv file(s) will be treated
as the header for column names. Default is False.
:param names: str or a list of str, the column names for the csv file(s). You need to
provide this if the header cannot be inferred. If specified, names should
have the same length as the number of columns.
:param dtype: str or a list of str or dict, the column data type(s) for the csv file(s).\
You may need to provide this if you want to change the default inferred types
of specified columns.
If dtype is a str, then all the columns will be cast to the target dtype.
If dtype is a list of str, then it should have the same length as the number of
columns and each column will be cast to the corresponding str dtype.
If dtype is a dict, then the key should be the column name and the value should be
the str dtype to cast the column to.
:return: A FeatureTable for recommendation data.
"""
return cls(Table._read_csv(paths, delimiter, header, names, dtype))
def encode_string(self, columns, indices,
do_split=False, sep='\t', sort_for_array=False, keep_most_frequent=False,
broadcast=True):
"""
Encode columns with provided list of StringIndex.
:param columns: str or a list of str, the target columns to be encoded.
:param indices: StringIndex or a list of StringIndex, StringIndexes of target columns.
The StringIndex should at least have two columns: id and the corresponding
categorical column.
Or it can be a dict or a list of dicts. In this case,
the keys of the dict should be within the categorical column
and the values are the target ids to be encoded.
:param do_split: bool, whether need to split column value to array to encode string.
Default is False.
:param sep: str, a string representing a regular expression to split a column value.
Default is '\t'
:param sort_for_array: bool, whether need to sort array columns. Default is False.
:param keep_most_frequent: bool, whether need to keep most frequent value as the
column value. Default is False.
:param broadcast: bool, whether need to broadcast index when encode string.
Default is True.
:return: A new FeatureTable which transforms categorical features into unique integer
values with provided StringIndexes.
"""
if not isinstance(columns, list):
columns = [columns]
if not isinstance(indices, list):
indices = [indices]
assert len(columns) == len(indices)
if isinstance(indices[0], dict):
indices = list(map(lambda x: StringIndex.from_dict(x[1], columns[x[0]]),
enumerate(indices)))
data_df = self.df
for i in range(len(columns)):
index_tbl = indices[i]
col_name = columns[i]
if broadcast:
index_tbl.broadcast()
if not do_split:
data_df = data_df.join(index_tbl.df, col_name, how="left") \
.drop(col_name).withColumnRenamed("id", col_name)
else:
data_df = data_df.withColumn('row_id', F.monotonically_increasing_id())
tmp_df = data_df.select('row_id', col_name) \
.withColumn(col_name, F.explode(F.split(F.col(col_name), sep)))
tmp_df = tmp_df.join(index_tbl.df, col_name, how="left") \
.filter(F.col("id").isNotNull())
tmp_df = tmp_df.select('row_id', F.col("id"))
if keep_most_frequent:
tmp_df = tmp_df.groupby('row_id') \
.agg(F.array_sort(F.collect_list(F.col("id")))
.getItem(0).alias("id"))
elif sort_for_array:
tmp_df = tmp_df.groupby('row_id') \
.agg(F.array_sort(F.collect_list(F.col("id"))).alias("id"))
else:
tmp_df = tmp_df.groupby('row_id') \
.agg(F.collect_list(F.col("id")).alias("id"))
data_df = data_df.join(tmp_df, 'row_id', 'left') \
.drop('row_id').drop(col_name).withColumnRenamed("id", col_name)
return FeatureTable(data_df)
def filter_by_frequency(self, columns, min_freq=2):
"""
Filter the FeatureTable by the given minimum frequency on the target columns.
:param columns: str or a list of str, column names which are considered for filtering.
:param min_freq: int, min frequency. Columns with occurrence below this value
would be filtered.
:return: A new FeatureTable with filtered records.
"""
freq_df = self.df
if not isinstance(columns, list):
columns = [columns]
name_string = ''
for | |
Layer: ", l)
# First step: set the value of 'in_shape' for current component.
ds_factor = 2 ** l # downsample factor, for l=0,1 will be 1, 2.
if c == 'a' and l > 0:
nb_channels = self.R_stack_sizes[l-1] # nb of input channels
elif c == 'a' and l == 0:
nb_channels = None
elif c == 'ahat':
nb_channels = self.R_stack_sizes[l] # nb of input channels
else: # 'c', 'f', 'i', 'o'
# add up all of channels in all of the inputs to the R_module for layer l.
# recurrent bottom-up error
nb_channels = self.R_stack_sizes[l] + 2*self.stack_sizes[l]# Remember E vs R numbering offset
# Above: new for RBP model (2 doubles output channels of error module)
if l < self.nb_layers - 1:
# original RBP model: nb_channels += 2*self.stack_sizes[l+1] # In RBP model, adjacent input from E is 2*nb_core channels in R
nb_channels += self.R_stack_sizes[l+1] # Top-down channel count
# Note: in old RBP version, cLSTM does not receive top-down input from next higher cLSTM.
print(" nb_inp_channels : ", nb_channels)
# Below: Now we have info to define in_shape, which will be input to self.conv_layers[c][l].build(in_shape).
# ds_factor is used to calculate dimensions for 2x2 pooling
# in_shape
in_shape = (input_shape[0], nb_channels, nb_row // ds_factor, nb_col // ds_factor) # '//' is floor division
if self.data_format == 'channels_last': in_shape = (in_shape[0], in_shape[2], in_shape[3], in_shape[1])
print(" in_shape : ", in_shape)
# Second step: build the wt set for the current component.
# what does name scope do? (context manager when defining a Python op)
# Need to make sure wt dimensions match input in step() method.
if self.conv_layers[c][l] != None:
print(" kernel_size : ", self.conv_layers[c][l].kernel_size)
print(" nb_out_channels : ", self.conv_layers[c][l].filters) # tells nb of output channels
# Build WEIGHTs
if self.conv_layers[c][l] != None:
self.conv_layers[c][l].build(in_shape) # What is side-effect?
# Above: Conv2D() instance understands its own build() method.
# The build() method is defined for class '_Conv', direct superclass of Conv2D.
# This adds the weights and bias (b/c Conv2D.use_bias is True)
# After this, self.conv_layers[c][l].trainable_weights has wts and understands call().
if self.conv_layers[c][l] != None:
print(" trainable wts length : ", len(self.conv_layers[c][l].trainable_weights))
print(" trainable wts shape : ", self.conv_layers[c][l].trainable_weights[0])
print(" trainable bias shape : ", self.conv_layers[c][l].trainable_weights[1])
self.trainable_weights += self.conv_layers[c][l].trainable_weights
# Above: For some reason add the newly created trainable wts to a list.
# Not used in this file.
# associated w/ the prednet instance.
self.states = [None] * self.nb_layers*3
# Above: creates [None, None, None, None, None, None]
# Used in step().
# Doesn't appear to be executed in current version b/c test eval's to False
if self.extrap_start_time is not None:
self.t_extrap = K.variable(self.extrap_start_time, int if K.backend() != 'tensorflow' else 'int32')
self.states += [None] * 2 # [previous frame prediction, timestep]
print("RETURNING from build()")
# end build()
# STEP
#=====
# Apparently used by Recurrent class
# Arguments:
# a : actual (target) current input frame
# states: For each layer, there are three state entries.
# 1. Outputs of the representation (cLSTM) at t-1.
# 2. Cell states of cLSTM at t-1.
# 3. Error states at t-1.
# Returns: output, states
def step(self, a, states):
print("\n------------------------------------------------------------------")
print("prednet_RBP_28June2019.py: 'step()' called")
print("\noutput_mode: ", self.output_mode)
print(" target a: ", a)
print("\nStates at time t minus 1 (tm1):")
# print("\nstates: ", states)
for i in range(len(states)):
print(" ", states[i])
print("States length: ", len(states)) # states is a tuple of length 6, i.e., 3*nb_layers.
# Below: components used to make up the inputs
# r_tm1: representation (cLSTM) states (stack of images) at prev time step
# c_tm1: cell states at prev time step (different from c-gate)
# e_tm1: error states at prev time step
# tm1: t - 1
r_tm1 = states[:self.nb_layers] # 1st l elements of states tuple. One state per layer. LSTM output state
c_tm1 = states[self.nb_layers:2*self.nb_layers] # Next l elements. LSTM cell state
e_tm1 = states[2*self.nb_layers:3*self.nb_layers] # Last l elements. Error. (Don't know how this is calculated.)
# # Below: temporary to get code running.
# e2_tm1 = states[3*self.nb_layers:4*self.nb_layers] # Last l elements. Error. (Don't know how this is calculated.)
# Test eval's to False: ignore
if self.extrap_start_time is not None:
t = states[-1]
a = K.switch(t >= self.t_extrap, states[-2], a) # if past self.extrap_start_time,
# the previous prediction will be treated as the actual
# initialize state variables for current time step. 'states' will be: r + c + e (list append)
r_cell_outputs = []; r_cell_states = []; e = []
# LOOP1.
# LOOP1. DOWNWARD UPDATE SWEEP.
# Update R (cLSTM) units starting from the top
print("\nstarting Downward Sweep (LOOP1)\n")
cell_output = None
for l in reversed(range(self.nb_layers)): # reversed() starts from the top layer
# Calculating inputs for R modules.
# NEW code for RBP model.
# inputs
# if l < self.nb_layers - 1: # not the top layer
if l < self.nb_layers - 1:
r_up = self.upsample.call(cell_output)
inputs = [r_tm1[l], e_tm1[l], r_up]
"""
To perform RPBcut experiment.
============================
Temorarily delete the three lines below to do an experiment to see
the effect of not using E^1 error.
Uncomment the three lines below to restore original model.
"""
#upsample_e_tm1 = self.upsample.call(e_tm1[l+1])
#print(" Layer:", l, ". Shape of upsample_e_tm1: ", upsample_e_tm1)
#inputs = [r_tm1[l], e_tm1[l], upsample_e_tm1] # recurrent, horizontal
"""
Modification to replace E^1 error with E^0 error.
Purpose is to see if E^1 error has an effect.
This experiment only works for a 2-layer network.
Delete the line below to restore to the original.
"""
#inputs = [r_tm1[l], e_tm1[l], e_tm1[l]] # recurrent, horizontal, horizontal
else:
inputs = [r_tm1[l], e_tm1[l]] # top layer. Only recurrent inputs.
print(" Layer:", l, " Shape of e_tm1[l]: ", e_tm1[l])
# The activation updates are performed by the call() method.
# Seems to append current inputs to inputs from prev time step.
print("\n Inputs for R to concat: ")
for i in range(len(inputs)):
print(" ", inputs[i])
print(" Inputs length:", len(inputs))
inputs = K.concatenate(inputs, axis=self.channel_axis) # creates a stack of images
print(" Inputs after concat: ", inputs)
# Above: current input concatentated w/ previous output
# COMPUTE GATE OUTPUTS WITHIN R MODULE
i = self.conv_layers['i'][l].call(inputs) # activations for input gate are calculated
print(" Finished i-gate")
f = self.conv_layers['f'][l].call(inputs) # forget
o = self.conv_layers['o'][l].call(inputs) # output
# Above: the gate activations have been updated
# Below: compute the output of the constant error carosel (output of + operation)
cell_state = f * c_tm1[l] + i * self.conv_layers['c'][l].call(inputs)
# Below: modulate 'cell_state' by the output gate activation
cell_output = o * self.LSTM_activation(cell_state)
print(" cell_output.shape:", cell_output.shape)
# update c and r state lists
# Inserting into front of list sorts entries according to layer
r_cell_states.insert(0, cell_state) # Insert stack of images into list 'c' at the beginning (different than c gate)
r_cell_outputs.insert(0, cell_output)
print("")
# end of top-down sweep
# FINISHED UPDATING R MODULES
print("LOOP1 is finished. Examine states created:")
# END LOOP1
print(" cell states:")
for i in range(len(r_cell_states)):
print(" ", r_cell_states[i])
print(" cell states length:", len(r_cell_states))
print(" r_cell outputs:")
for i in range(len(r_cell_outputs)):
print(" ", r_cell_outputs[i])
print(" r states length:", len(r_cell_outputs))
# LOOP2: Update FEEDFORWARD path starting from the bottom
# UPDATE E's
# New code: replace 'e_up' and 'e_down' w/ ppe and npe. See "Predictive Processing," Keller et al., Neuron, 2018.
print("\nstarting Upward Sweep (LOOP2)")
for l in range(self.nb_layers): # start from bottom layer
# New code for RBP.
print(" Layer:", l, " r_cell_outputs[l].shape: ", r_cell_outputs[l])
print(" Layer:", l-1, " r_cell_outputs[l-1].shape: ", r_cell_outputs[l-1])
ahat = self.conv_layers['ahat'][l].call(r_cell_outputs[l]) # 'ahat' is prediction
if l > 0:
# a_intermediate = self.pool.call(r_cell_outputs[l-1])
# a = self.conv_layers['a'][l-1].call(a_intermediate)
# Above: old
# Below: swapped order b/c Matin's suggestion
a_intermediate = self.conv_layers['a'][l].call(r_cell_outputs[l-1])
a = self.pool.call(a_intermediate)
print(" Layer:", l, " a.shape: ", a.shape)
if l == | |
enzyme forms to the model
self.enzyme_module_forms += enzyme_module_forms
# Context manager
context = get_context(self)
if context:
context(partial(self.enzyme_module_ligands.__isub__, ligands))
context(partial(self.enzyme_module_forms.__isub__,
enzyme_module_forms))
def remove_metabolites(self, metabolite_list, destructive=False):
r"""Remove a list of metabolites and enzyme forms from the module.
The species' initial conditions will also be removed from the model.
The change is reverted upon exit when using the :class:`EnzymeModule`
as a context.
Notes
-----
Extends from :meth:`.MassModel.remove_metabolites`.
Parameters
----------
metabolite_list : list
A list of :class:`~.MassMetabolite`\ s and
:class:`~.EnzymeModuleForm` to remove from the
:class:`EnzymeModule`.
destructive : bool
If ``False``, the species are removed from all associated
:class:`~.EnzymeModuleReaction`\ s . If ``True``, also remove
associated :class:`~.EnzymeModuleReaction`\ s from the
:class:`EnzymeModule`.
"""
# Ensure list is iterable.
metabolite_list = ensure_iterable(metabolite_list)
ligands = [met for met in metabolite_list
if not isinstance(met, EnzymeModuleForm)
and met in self.metabolites
and met in self.enzyme_module_ligands]
# Remove metabolites from model using inherited method
super(EnzymeModule, self).remove_metabolites(
metabolite_list, destructive)
# Remove ligands from the enzyme_module_ligands DictList.
if ligands:
self.enzyme_module_ligands -= ligands
# Get items that are EnzymeModuleForm and check if the
# enzyme forms already exists in the EnzymeModule, ignoring those
# that do not.
enzyme_module_forms = [
enzyme_module_forms for enzyme_module_forms in metabolite_list
if isinstance(enzyme_module_forms, EnzymeModuleForm)
and enzyme_module_forms in self.enzyme_module_forms]
# Remove the enzyme forms to the model
if enzyme_module_forms:
self.enzyme_module_forms -= enzyme_module_forms
# Context manager
context = get_context(self)
if context:
context(partial(self.enzyme_module_ligands.__iadd__, ligands))
context(partial(self.enzyme_module_forms.__iadd__,
enzyme_module_forms))
def add_reactions(self, reaction_list):
r"""Add a list of reactions to the :class:`EnzymeModule`.
:class:`~.MassReaction`\ s and :class:`~.EnzymeModuleReaction`\ s
with identifiers identical to an existing reaction are ignored.
The change is reverted upon exit when using the :class:`EnzymeModule`
as a context.
Notes
-----
Extends from :meth:`.MassModel.add_reactions`.
Parameters
----------
reaction_list : list
A list of :class:`~.MassReaction` and
:class:`~.EnzymeModuleReaction` to add.
"""
# Ensure list is iterable.
reaction_list = ensure_iterable(reaction_list)
# Add reactions using inherited method
super(EnzymeModule, self).add_reactions(reaction_list)
# Get the enzyme module reactions and check whether reaction
# exists, ignoring those that do.
enzyme_module_reactions = [
r for r in reaction_list if isinstance(r, EnzymeModuleReaction)
and r in self.reactions and r not in self.enzyme_module_reactions]
# Add enzyme module reactions to the enzyme reaction DictList
if enzyme_module_reactions:
self.enzyme_module_reactions += enzyme_module_reactions
# Context manager
context = get_context(self)
if context:
context(partial(self.enzyme_module_reactions.__isub__,
enzyme_module_reactions))
def remove_reactions(self, reactions, remove_orphans=False):
r"""Remove reactions from the :class:`EnzymeModule`.
The change is reverted upon exit when using the :class:`EnzymeModule`
as a context.
Notes
-----
Extends from :meth:`.MassModel.remove_reactions`.
Parameters
----------
reactions : list
A list of :class:`~.MassReaction` and
:class:`~.EnzymeModuleReaction` to remove from the
:class:`EnzymeModule`.
remove_orphans : bool
If ``True``, will also remove orphaned genes,
:class:`~.MassMetabolite`\ s, and :class:`~.EnzymeModuleForm`
from the :class:`EnzymeModule`.
"""
# Ensure list is iterable.
reactions = ensure_iterable(reactions)
# Get the enzyme module reactions and then check whether reaction
# exists, ignoring those that do not.
enzyme_module_reactions = [
r for r in reactions if isinstance(r, EnzymeModuleReaction)
and r in self.reactions and r in self.enzyme_module_reactions]
# Remove reactions using inherited method
super(EnzymeModule, self).remove_reactions(reactions)
# Remove enzyme module reactions from DictList
if self.enzyme_module_reactions:
self.enzyme_module_reactions -= enzyme_module_reactions
# Context manager
context = get_context(self)
if context:
context(partial(self.enzyme_module_reactions.__iadd__,
enzyme_module_reactions))
def repair(self, rebuild_index=True, rebuild_relationships=True):
"""Update all indicies and pointers in the model.
In addition to updating indicies and pointers, the
:attr:`enzyme_module_reactions` attribute will be updated to
ensure it contains all existing reactions involving
:class:`~.EnzymeModuleForm`.
Notes
-----
Extends from :meth:`.MassModel.repair`.
Parameters
----------
rebuild_index : bool
If ``True``, then rebuild the indicies kept in the reactions,
metabolites, and genes.
rebuild_relationships: bool
If ``True``, then reset all associations between the reactions,
metabolites, genes, and the model, and rebuilds them.
"""
# Repair using inherited method
super(EnzymeModule, self).repair(rebuild_index, rebuild_relationships)
# # Repair enzyme_module_reactions DictList
self._get_current_enzyme_module_objs("reactions", update_enzyme=True)
self._update_object_pointers()
# # Rebuild DictList indices
if rebuild_index:
for attr in ["enzyme_module_ligands", "enzyme_module_forms",
"enzyme_module_reactions"]:
getattr(self, attr)._generate_index()
getattr(self, attr + "_categorized")._generate_index()
for enzyme_module_form in self.enzyme_module_forms:
enzyme_module_form._repair_bound_obj_pointers()
def copy(self):
r"""Create a partial "deepcopy" of the EnzymeModule.
All of the :class:`~.MassMetabolite`\ s, :class:`~.MassReaction`\ s,
:class:`~cobra.core.gene.Gene`\ s, :class:`~.EnzymeModuleForm`,
:class:`~.EnzymeModuleReaction`\ s, and :class:`~.EnzymeModuleDict`\ s,
the boundary conditions, custom rates, custom parameters, and the
stoichiometric matrix are created anew, but in a faster fashion than
``deepcopy``.
Notes
-----
* Overrides :meth:`.MassModel.copy` in order to exclude more items
to not copy by ref.
"""
# Define a new model
new_model = self.__class__()
# Define items that will not be copied by their references
do_not_copy_by_ref = [
"metabolites", "reactions", "genes", "enzyme_modules", "groups",
"_S", "enzyme_module_ligands", "enzyme_module_forms",
"enzyme_module_reactions", "_enzyme_module_ligands_categorized",
"_enzyme_module_forms_categorized",
"_enzyme_module_reactions_categorized", "boundary_conditions",
"custom_rates", "custom_parameters", "notes", "annotation"]
for attr in self.__dict__:
if attr not in do_not_copy_by_ref:
new_model.__dict__[attr] = self.__dict__[attr]
new_model.notes = deepcopy(self.notes)
new_model.annotation = deepcopy(self.annotation)
# Copy the metabolites
new_model.metabolites += self._copy_model_metabolites(new_model)
# Copy the genes
new_model.genes += self._copy_model_genes(new_model)
# Copy the reactions and rates (including custom rates)
new_model.reactions += self._copy_model_reactions(new_model)
# Copy the custom rate for the reaction:
if self.custom_rates:
new_model.custom_rates.update({
new_model.reactions.get_by_id(reaction.id): custom_rate
for reaction, custom_rate in iteritems(self.custom_rates)})
# Copy custom parameters
if self.custom_parameters:
new_model.custom_parameters.update(self.custom_parameters)
# Copy any existing groups
new_model.groups += self._copy_model_groups(new_model)
# Copy any existing enzyme_modules
new_model.enzyme_modules += self._copy_model_enzyme_modules(new_model)
# Add the newly copied objects to their appropriate DictLists
# in the enzyme_module_ligands, enzyme_module_forms and
# enzyme_module_reactions attributes
for attr in ["ligands", "forms", "reactions"]:
new_model._get_current_enzyme_module_objs(attr, update_enzyme=True)
# Update categorized dict attributes
attr = "enzyme_module_" + attr + "_categorized"
new_model_categorized_attr = getattr(new_model, attr)
new_model_categorized_attr += new_model.groups.get_by_any([
g.id for g in getattr(self, attr)])
# Create the new stoichiometric matrix for the model.
new_model._S = self._mk_stoich_matrix(array_type=self._array_type,
dtype=self._dtype,
update_model=True)
try:
new_model._solver = deepcopy(self.solver)
# Cplex has an issue with deep copies
except Exception:
new_model._solver = copy(self.solver)
# Doesn't make sense to retain the context of a copied model so
# assign a new empty context
new_model._contexts = []
return new_model
def merge(self, right, prefix_existing=None, inplace=True,
objective='left'):
"""Merge two models into one model with the objects from both.
The reactions, metabolites, genes, enzyme modules, boundary conditions,
custom rate expressions, rate parameters, compartments, units, notes,
and annotations from the right model are also copied to left model.
However, note that in cases where identifiers for objects are identical
or a dict item has an identical key(s), priority will be given to what
already exists in the left model.
Notes
-----
* When merging an :class:`.~EnzymeModule` into a :class:`.MassModel`,
the enzyme module is converted to an :class:`.~EnzymeModuleDict` and
stored in a :class:`~cobra.core.dictlist.DictList` accessible via the
:attr:`enzyme_modules` attribute. If an :class:`.~EnzymeModuleDict`
already exists in the model, it will be replaced.
* If an :class:`EnzymeModule` already exists in the model, it will
be replaced.
* When merging an :class:`EnzymeModule` with another
:class:`EnzymeModule`, a new :class:`EnzymeModule` will be returned,
where the EnzymeModule is a copy of the 'left' model (``self``)
with the ``'right'`` model is contained within.
* Overrides :meth:`.MassModel.merge`.
Parameters
----------
right : MassModel
The model to merge into the left model. If a :class:`.MassModel`
then the first model refers to the ``right`` model and the second
model refers to the ``left`` model. Otherwise the first model
refers to the ``left`` model and the second model refers to the
``right`` model.
prefix_existing : str
If provided, the string is used to prefix the reaction identifier
of a reaction in the second model if that reaction already exists
within the first model. Will also apply prefix to identifiers
of enzyme modules in the second model.
inplace : bool
If ``True`` then add reactions from second model directly to the
first model. Otherwise, create a new model leaving the first model
untouched. When done within the model as context, changes to the
models are reverted upon exit.
objective : str
One of ``"left"``, ``"right"`` or ``"sum"`` for setting the
objective of the resulting model to that of the corresponding
model or the sum of both. Default is ``"left"``. Note that when
merging a :class:`.MassModel` with an :class:`EnzymeModule`,
``"left"`` will refer to the :class:`.MassModel`.
Returns
-------
MassModel or EnzymeModule
A new :class:`~.MassModel` or :class:`EnzymeModule`
representing the merged model.
"""
if not isinstance(right, EnzymeModule):
# Always merge the EnzymeModule into the MassModel
return right.merge(self, prefix_existing, inplace, objective)
| |
import time
import inspect
import uuid
import atexit
import asyncio
import warnings
import requests
from flask import current_app, request, jsonify, abort
from nacl.exceptions import BadSignatureError
from nacl.signing import VerifyKey
from flask_discord_interactions.models.autocomplete import AutocompleteResult
try:
import aiohttp
except ImportError:
aiohttp = None
from flask_discord_interactions.command import Command, SlashCommandGroup
from flask_discord_interactions.context import Context, ApplicationCommandType
from flask_discord_interactions.models import Message, Modal, ResponseType
class InteractionType:
PING = 1
APPLICATION_COMMAND = 2
MESSAGE_COMPONENT = 3
APPLICATION_COMMAND_AUTOCOMPLETE = 4
MODAL_SUBMIT = 5
class DiscordInteractionsBlueprint:
"""
Represents a collection of :class:`ApplicationCommand` s.
Useful for splitting a bot across multiple files.
"""
def __init__(self):
self.discord_commands = {}
self.custom_id_handlers = {}
self.autocomplete_handlers = {}
def add_command(
self,
command,
name=None,
description=None,
options=None,
annotations=None,
type=ApplicationCommandType.CHAT_INPUT,
default_permission=None,
default_member_permissions=None,
dm_permission=None,
permissions=None,
name_localizations=None,
description_localizations=None,
):
"""
Create and add a new :class:`ApplicationCommand`.
Parameters
----------
command
Function to execute when the command is run.
name
The name of the command, as displayed in the Discord client.
name_localizations
A dictionary of localizations for the name of the command.
description
The description of the command.
description_localizations
A dictionary of localizations for the description of the command.
options
A list of options for the command, overriding the function's
keyword arguments.
annotations
If ``options`` is not provided, descriptions for each of the
options defined in the function's keyword arguments.
type
The ``ApplicationCommandType`` of the command.
default_permission
Deprecated as of v1.5! Whether the command is enabled by default.
default_member_permissions
A permission integer defining the required permissions a user must have to run the command
dm_permission
Indicates whether the command can be used in DMs
permissions
List of permission overwrites.
"""
command = Command(
command,
name,
description,
options,
annotations,
type,
default_permission,
default_member_permissions,
dm_permission,
permissions,
name_localizations,
description_localizations,
self,
)
self.discord_commands[command.name] = command
return command
def add_slash_command(self, *args, **kwargs):
"""
Deprecated! As of v1.1.0, ``add_slash_command`` has been renamed to
:meth:`add_command`, as it can now add User and Message commands.
"""
warnings.warn(
"Deprecated! As of v1.1.0, add_slash_command has been renamed to "
"add_command, as it can now add User and Message commands.",
DeprecationWarning,
stacklevel=2,
)
return self.add_command(*args, **kwargs)
def command(
self,
name=None,
description=None,
options=None,
annotations=None,
type=ApplicationCommandType.CHAT_INPUT,
default_permission=None,
default_member_permissions=None,
dm_permission=None,
permissions=None,
name_localizations=None,
description_localizations=None,
):
"""
Decorator to create a new :class:`Command`.
Parameters
----------
name
The name of the command, as displayed in the Discord client.
name_localizations
A dictionary of localizations for the name of the command.
description
The description of the command.
description_localizations
A dictionary of localizations for the description of the command.
options
A list of options for the command, overriding the function's
keyword arguments.
annotations
If ``options`` is not provided, descriptions for each of the
options defined in the function's keyword arguments.
type
The ``ApplicationCommandType`` of the command.
default_permission
Deprecated as of v1.5! Whether the command is enabled by default.
default_member_permissions
A permission integer defining the required permissions a user must have to run the command
dm_permission
Indicates whether the command can be used in DMs
permissions
List of permission overwrites.
"""
def decorator(func):
nonlocal name, description, type, options
command = self.add_command(
func,
name,
description,
options,
annotations,
type,
default_permission,
default_member_permissions,
dm_permission,
permissions,
name_localizations,
description_localizations,
)
return command
return decorator
def command_group(
self,
name,
description="No description",
is_async=False,
default_permission=None,
default_member_permissions=None,
dm_permission=None,
permissions=None,
name_localizations=None,
description_localizations=None,
):
"""
Create a new :class:`SlashCommandGroup`
(which can contain multiple subcommands)
Parameters
----------
name
The name of the command group, as displayed in the Discord client.
name_localizations
A dictionary of localizations for the name of the command group.
description
The description of the command group.
description_localizations
A dictionary of localizations for the description of the command group.
is_async
Whether the subgroup should be considered async (if subcommands
get an :class:`.AsyncContext` instead of a :class:`Context`.)
default_permission
Deprecated as of v1.5! Whether the command is enabled by default.
default_member_permissions
A permission integer defining the required permissions a user must have to run the command
dm_permission
Indicates whether the command canbe used in DMs
permissions
List of permission overwrites. These apply to the entire group.
"""
group = SlashCommandGroup(
name,
description,
is_async,
default_permission,
default_member_permissions,
dm_permission,
permissions,
name_localizations,
description_localizations,
)
self.discord_commands[name] = group
return group
def add_custom_handler(self, handler, custom_id=None):
"""
Add a handler for an incoming interaction with the specified custom ID.
Parameters
----------
handler
The function to call to handle the incoming interaction.
custom_id
The custom ID to respond to. If not specified, the ID will be
generated randomly.
Returns
-------
str
The custom ID that the handler will respond to.
"""
if custom_id is None:
custom_id = str(uuid.uuid4())
self.custom_id_handlers[custom_id] = handler
return custom_id
def custom_handler(self, custom_id=None):
"""
Returns a decorator to register a handler for a custom ID.
Parameters
----------
custom_id
The custom ID to respond to. If not specified, the ID will be
generated randomly.
"""
def decorator(func):
nonlocal custom_id
custom_id = self.add_custom_handler(func, custom_id)
return custom_id
return decorator
def add_autocomplete_handler(self, handler, command_name):
"""
Add a handler for an incoming autocomplete request.
Parameters
----------
handler
The function to call to handle the incoming autocomplete request.
command_name
The name of the command to autocomplete.
"""
self.autocomplete_handlers[command_name] = handler
class DiscordInteractions(DiscordInteractionsBlueprint):
"""
Handles registering a collection of :class:`Command` s, receiving
incoming interaction data, and sending/editing/deleting messages via
webhook.
"""
def __init__(self, app=None):
super().__init__()
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
"""
Initialize a Flask app with Discord-specific configuration and
attributes.
Parameters
----------
app
The Flask app to initialize.
"""
app.config.setdefault("DISCORD_BASE_URL", "https://discord.com/api/v9")
app.config.setdefault("DISCORD_CLIENT_ID", "")
app.config.setdefault("DISCORD_PUBLIC_KEY", "")
app.config.setdefault("DISCORD_CLIENT_SECRET", "")
app.config.setdefault("DONT_VALIDATE_SIGNATURE", False)
app.config.setdefault("DONT_REGISTER_WITH_DISCORD", False)
app.discord_commands = self.discord_commands
app.custom_id_handlers = self.custom_id_handlers
app.autocomplete_handlers = self.autocomplete_handlers
app.discord_token = None
def fetch_token(self, app=None):
"""
Fetch an OAuth2 token from Discord using the ``CLIENT_ID`` and
``CLIENT_SECRET`` with the ``applications.commands.update`` scope. This
can be used to register new application commands.
Parameters
----------
app
The Flask app with the relevant config (client ID and secret).
"""
if app is None:
app = self.app
if app.config["DONT_REGISTER_WITH_DISCORD"]:
app.discord_token = {
"token_type": "Bearer",
"scope": "applications.commands.update applications.commands.permissions.update",
"expires_in": 604800,
"access_token": "<PASSWORD>",
}
app.discord_token["expires_on"] = (
time.time() + app.discord_token["expires_in"] / 2
)
return
response = requests.post(
app.config["DISCORD_BASE_URL"] + "/oauth2/token",
data={
"grant_type": "client_credentials",
"scope": "applications.commands.update applications.commands.permissions.update",
},
headers={"Content-Type": "application/x-www-form-urlencoded"},
auth=(app.config["DISCORD_CLIENT_ID"], app.config["DISCORD_CLIENT_SECRET"]),
)
response.raise_for_status()
app.discord_token = response.json()
app.discord_token["expires_on"] = (
time.time() + app.discord_token["expires_in"] / 2
)
def auth_headers(self, app):
"""
Get the Authorization header required for HTTP requests to the
Discord API.
Parameters
----------
app
The Flask app with the relevant access token.
"""
if app.discord_token is None or time.time() > app.discord_token["expires_on"]:
self.fetch_token(app)
return {"Authorization": f"Bearer {app.discord_token['access_token']}"}
def update_commands(self, app=None, guild_id=None):
"""
Update the list of commands registered with Discord.
This method will overwrite all existing commands.
Make sure you aren't calling this every time a new worker starts! You
will run into rate-limiting issues if multiple workers attempt to
register commands simultaneously. Read :ref:`workers` for more
info.
Parameters
----------
app
The Flask app with the relevant Discord access token.
guild_id
The ID of the Discord guild to register commands to. If omitted,
the commands are registered globally.
"""
if app is None:
app = self.app
if guild_id:
url = (
f"{app.config['DISCORD_BASE_URL']}/applications/"
f"{app.config['DISCORD_CLIENT_ID']}/"
f"guilds/{guild_id}/commands"
)
else:
url = (
f"{app.config['DISCORD_BASE_URL']}/applications/"
f"{app.config['DISCORD_CLIENT_ID']}/commands"
)
overwrite_data = [command.dump() for command in app.discord_commands.values()]
if not app.config["DONT_REGISTER_WITH_DISCORD"]:
response = requests.put(
url, json=overwrite_data, headers=self.auth_headers(app)
)
try:
response.raise_for_status()
except requests.exceptions.HTTPError:
raise ValueError(
f"Unable to register commands:"
f"{response.status_code} {response.text}"
)
self.throttle(response)
for command in response.json():
if command["name"] in app.discord_commands:
app.discord_commands[command["name"]].id = command["id"]
else:
for command in app.discord_commands.values():
command.id = command.name
if guild_id:
for command in app.discord_commands.values():
if (
not app.config["DONT_REGISTER_WITH_DISCORD"]
and command.permissions is not None
):
response = requests.put(
url + "/" + command.id + "/permissions",
json={"permissions": command.dump_permissions()},
headers=self.auth_headers(app),
)
try:
response.raise_for_status()
except requests.exceptions.HTTPError:
raise ValueError(
f"Unable to register permissions for {command.id}:"
f"{response.status_code} {response.text}"
)
self.throttle(response)
def update_slash_commands(self, *args, **kwargs):
"""
Deprecated! As of v1.1.0, ``update_slash_commands`` has been renamed to
``update_commands``, as it updates User and Message commands as well.
"""
warnings.warn(
"Deprecated! As of v1.1.0, update_slash_commands has been renamed "
"to update_commands, as it updates User and Message commands too.",
DeprecationWarning,
stacklevel=2,
)
return self.update_commands(*args, **kwargs)
def throttle(self, response):
"""
Throttle the number of | |
j += 1
while 1:
name = op4names[j]
if name == "loop_end" or name == "se_start":
# go on to next se or to residual
break
if name not in nas:
nas[name] = {}
if se == 0 and name == "lambda":
# count number of rigid body modes
nrb = sum(op4vars[j] < 0.005)[0]
nas["nrb"] = nrb
nas["lambda"][0] = abs(op4vars[j].ravel())
elif name == "lambda":
nas[name][se] = op4vars[j].ravel()
elif name == "rfmodes":
nas[name][se] = np.nonzero(op4vars[j])[0]
else:
nas[name][se] = op4vars[j]
j += 1
if name == "loop_end":
j += 1
break
while j < len(op4vars):
nas[op4names[j]] = op4vars[j]
j += 1
return nas
def nastran_dr_descriptions():
"""
Get dictionary of descriptions for Nastran data recovery items.
Normally called by :func:`procdrm12`.
Returns
-------
desc : dictionary
Has keys: 'acce', 'spcf', 'force', 'stress':
.. code-block:: none
desc['acce'] : numpy string array
['T1', 'T2', 'T3', 'R1', 'R2', 'R3']
desc['spcf'] : numpy string array
['Fx', 'Fy', 'Fz', 'Mx', 'My', 'Mz']
desc['force'] : dict
Dictionary with element numbers as keys to numpy
string arrays.
desc['stress'] : dict
Dictionary with element numbers as keys to numpy
string arrays.
Notes
-----
The force and stress dictionaries are indexed by the element
id. For example, for the CBAR (which is element 34)::
desc['force'][34] = ['CBAR Bending Moment 1 - End A',
'CBAR Bending Moment 2 - End A',
...]
desc['stress'][34] = ['CBAR Bending Stress 1 - End A',
'CBAR Bending Stress 2 - End A',
...]
"""
# Acceleration, Velocity, Displacement Recovery Items:
accedesc = ["T1", "T2", "T3", "R1", "R2", "R3"]
spcfdesc = ["Fx", "Fy", "Fz", "Mx", "My", "Mz"]
stress = {}
force = {}
# CBAR Recovery Items (element 34): Item code
stress[34] = [
"CBAR Bending Stress 1 - End A", # 2
"CBAR Bending Stress 2 - End A", # 3
"CBAR Bending Stress 3 - End A", # 4
"CBAR Bending Stress 4 - End A", # 5
"CBAR Axial Stress", # 6
"CBAR Max. Bend. Stress -End A", # 7
"CBAR Min. Bend. Stress -End A", # 8
"CBAR M.S. Tension", # 9
"CBAR Bending Stress 1 - End B", # 10
"CBAR Bending Stress 2 - End B", # 11
"CBAR Bending Stress 3 - End B", # 12
"CBAR Bending Stress 4 - End B", # 13
"CBAR Max. Bend. Stress -End B", # 14
"CBAR Min. Bend. Stress -End B", # 15
"CBAR M.S. Compression",
] # 16
force[34] = [
"CBAR Bending Moment 1 - End A", # 2
"CBAR Bending Moment 2 - End A", # 3
"CBAR Bending Moment 1 - End B", # 4
"CBAR Bending Moment 2 - End B", # 5
"CBAR Shear 1", # 6
"CBAR Shear 2", # 7
"CBAR Axial Force", # 8
"CBAR Torque",
] # 9
# CBEAM Recovery Items (element 2): Item code
stress2_main = [
"CBEAM External grid pt. ID", # 2
"CBEAM Station dist./length", # 3
"CBEAM Long. Stress at Pt. C", # 4
"CBEAM Long. Stress at Pt. D", # 5
"CBEAM Long. Stress at Pt. E", # 6
"CBEAM Long. Stress at Pt. F", # 7
"CBEAM Maximum stress", # 8
"CBEAM Minimum stress", # 9
"CBEAM M.S. Tension", # 10
"CBEAM M.S. Compression",
] # 11
# expand and append station id for all 11 stations:
stress2 = [i + " End-A" for i in stress2_main]
for K in range(2, 11):
id_string = f" K={K:2}"
stress2 += [i + id_string for i in stress2_main]
stress2 += [i + " End-B" for i in stress2_main]
stress[2] = stress2
force2_main = [
"CBEAM External grid pt. ID", # 2
"CBEAM Station dist./length", # 3
"CBEAM Bending moment plane 1", # 4
"CBEAM Bending moment plane 2", # 5
"CBEAM Web shear plane 1", # 6
"CBEAM Web shear plane 2", # 7
"CBEAM Axial force", # 8
"CBEAM Total torque", # 9
"CBEAM Warping torque",
] # 10
# expand and append station id for all 11 stations:
force2 = [i + " End-A" for i in force2_main]
for K in range(2, 11):
id_string = f" K={K:2}"
force2 += [i + id_string for i in force2_main]
force2 += [i + " End-B" for i in force2_main]
force[2] = force2
# CBUSH Recovery Items (element 102): Item code
stress[102] = [
"CBUSH Translation-x", # 2
"CBUSH Translation-y", # 3
"CBUSH Translation-z", # 4
"CBUSH Rotation-x", # 5
"CBUSH Rotation-y", # 6
"CBUSH Rotation-z",
] # 7
force[102] = [
"CBUSH Force-x", # 2
"CBUSH Force-y", # 3
"CBUSH Force-z", # 4
"CBUSH Moment-x", # 5
"CBUSH Moment-y", # 6
"CBUSH Moment-z",
] # 7
# CROD Recovery Items (element 10=CONROD, 1=CROD):
stress1 = [
"Axial Stress", # 2
"M.S. Axial Stress", # 3
"Torsional Stress", # 4
"M.S. Torsional Stress",
] # 5
force1 = ["Axial Force", "Torque"] # 2 # 3
stress[1] = ["CROD " + i + " " for i in stress1]
force[1] = ["CROD " + i + " " for i in force1]
stress[10] = ["CONROD " + i for i in stress1]
force[10] = ["CONROD " + i for i in force1]
# CELAS1, 2, 3 Recovery Items (elements 11, 12, 13):
stress[11] = "CELAS1 Stress"
stress[12] = "CELAS2 Stress"
stress[13] = "CELAS3 Stress"
force[11] = "CELAS1 Force"
force[12] = "CELAS2 Force"
force[13] = "CELAS3 Force"
# CQUAD4 Recovery Items (element 33):
stress[33] = [
"CQUAD4 Fiber distance Z1", # 2
"CQUAD4 Z1 Normal x", # 3
"CQUAD4 Z1 Normal y", # 4
"CQUAD4 Z1 Shear xy", # 5
"CQUAD4 Z1 Shear angle", # 6
"CQUAD4 Z1 Major principal", # 7
"CQUAD4 Z1 Minor principal", # 8
"CQUAD4 Z1 von Mises or max shear", # 9
"CQUAD4 Fiber distance Z2", # 10
"CQUAD4 Z2 Normal x", # 11
"CQUAD4 Z2 Normal y", # 12
"CQUAD4 Z2 Shear xy", # 13
"CQUAD4 Z2 Shear angle", # 14
"CQUAD4 Z2 Major principal", # 15
"CQUAD4 Z2 Minor principal", # 16
"CQUAD4 Z2 von Mises or max shear",
] # 17
force[33] = [
"CQUAD4 Membrane force x", # 2
"CQUAD4 Membrane force y", # 3
"CQUAD4 Membrane force xy", # 4
"CQUAD4 Bending moment x", # 5
"CQUAD4 Bending moment y", # 6
"CQUAD4 Bending moment xy", # 7
"CQUAD4 Shear x", # 8
"CQUAD4 Shear y",
] # 9
# CQUADR Recovery Items (element 82, and CQUAD8-64):
stress[82] = [
"CQUADR EID ", # 1
"CQUADR CEN/ ", # 2
"CQUADR 4 ", # 3
"CQUADR Fiber distance Z1 ", # 4
"CQUADR Z1 Normal x ", # 5
"CQUADR Z1 Normal y ", # 6
"CQUADR Z1 Shear xy ", # 7
"CQUADR Z1 Shear angle ", # 8
"CQUADR Z1 Major principal ", # 9
"CQUADR Z1 Minor principal ", # 10
"CQUADR Z1 von Mises or max shear ", # 11
"CQUADR Fiber distance Z2 ", # 12
"CQUADR Z2 Normal x ", # 13
"CQUADR Z2 Normal y ", # 14
"CQUADR Z2 Shear xy ", # 15
"CQUADR Z2 Shear angle ", # 16
"CQUADR Z2 Major principal ", # 17
"CQUADR Z2 Minor principal ", # 18
"CQUADR Z2 von Mises or max shear ", # 19
"CQUADR Grid 1 ", # 20
"CQUADR Fiber distance Z1 c1", # 21
"CQUADR Z1 Normal x c1", # 22
"CQUADR Z1 Normal y c1", # 23
"CQUADR Z1 Shear xy c1", # 24
"CQUADR Z1 Shear angle c1", # 25
"CQUADR Z1 Major principal c1", # 26
"CQUADR Z1 Minor principal c1", # 27
"CQUADR Z1 von Mises or max shear c1", # 28
"CQUADR Fiber distance Z2 c1", # 29
"CQUADR Z2 Normal x c1", # 30
"CQUADR Z2 Normal y c1", # 31
"CQUADR Z2 Shear xy c1", # 32
"CQUADR Z2 Shear angle c1", # 33
"CQUADR Z2 | |
def OnParentBackgroundImageChanged(self,*args):
"""
OnParentBackgroundImageChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BackgroundImageChanged event when the
System.Windows.Forms.Control.BackgroundImage property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentBindingContextChanged(self,*args):
"""
OnParentBindingContextChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.BindingContextChanged event when the
System.Windows.Forms.Control.BindingContext property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentChanged(self,*args):
"""
OnParentChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ParentChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentCursorChanged(self,*args):
"""
OnParentCursorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.CursorChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentEnabledChanged(self,*args):
"""
OnParentEnabledChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.EnabledChanged event when the
System.Windows.Forms.Control.Enabled property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentFontChanged(self,*args):
"""
OnParentFontChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.FontChanged event when the
System.Windows.Forms.Control.Font property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentForeColorChanged(self,*args):
"""
OnParentForeColorChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.ForeColorChanged event when the
System.Windows.Forms.Control.ForeColor property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentRightToLeftChanged(self,*args):
"""
OnParentRightToLeftChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.RightToLeftChanged event when the
System.Windows.Forms.Control.RightToLeft property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnParentVisibleChanged(self,*args):
"""
OnParentVisibleChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.VisibleChanged event when the
System.Windows.Forms.Control.Visible property value of the control's container changes.
e: An System.EventArgs that contains the event data.
"""
pass
def OnPreviewKeyDown(self,*args):
"""
OnPreviewKeyDown(self: Control,e: PreviewKeyDownEventArgs)
Raises the System.Windows.Forms.Control.PreviewKeyDown event.
e: A System.Windows.Forms.PreviewKeyDownEventArgs that contains the event data.
"""
pass
def OnPrint(self,*args):
"""
OnPrint(self: Control,e: PaintEventArgs)
Raises the System.Windows.Forms.Control.Paint event.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def OnQueryContinueDrag(self,*args):
"""
OnQueryContinueDrag(self: Control,qcdevent: QueryContinueDragEventArgs)
Raises the System.Windows.Forms.Control.QueryContinueDrag event.
qcdevent: A System.Windows.Forms.QueryContinueDragEventArgs that contains the event data.
"""
pass
def OnRegionChanged(self,*args):
"""
OnRegionChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.RegionChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnResize(self,*args):
"""
OnResize(self: Panel,eventargs: EventArgs)
Fires the event indicating that the panel has been resized. Inheriting controls should use this
in favor of actually listening to the event,but should still call base.onResize to ensure that
the event is fired for external listeners.
eventargs: An System.EventArgs that contains the event data.
"""
pass
def OnRightToLeftChanged(self,*args):
"""
OnRightToLeftChanged(self: ScrollableControl,e: EventArgs)
e: An System.EventArgs that contains the event data.
"""
pass
def OnScroll(self,*args):
"""
OnScroll(self: ScrollableControl,se: ScrollEventArgs)
Raises the System.Windows.Forms.ScrollableControl.Scroll event.
se: A System.Windows.Forms.ScrollEventArgs that contains the event data.
"""
pass
def OnSizeChanged(self,*args):
"""
OnSizeChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.SizeChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnStyleChanged(self,*args):
"""
OnStyleChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.StyleChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnSystemColorsChanged(self,*args):
"""
OnSystemColorsChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.SystemColorsChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTabIndexChanged(self,*args):
"""
OnTabIndexChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.TabIndexChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTabStopChanged(self,*args):
"""
OnTabStopChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.TabStopChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnTextChanged(self,*args):
"""
OnTextChanged(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.TextChanged event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnValidated(self,*args):
"""
OnValidated(self: Control,e: EventArgs)
Raises the System.Windows.Forms.Control.Validated event.
e: An System.EventArgs that contains the event data.
"""
pass
def OnValidating(self,*args):
"""
OnValidating(self: Control,e: CancelEventArgs)
Raises the System.Windows.Forms.Control.Validating event.
e: A System.ComponentModel.CancelEventArgs that contains the event data.
"""
pass
def OnVisibleChanged(self,*args):
"""
OnVisibleChanged(self: ScrollableControl,e: EventArgs)
e: An System.EventArgs that contains the event data.
"""
pass
def ProcessCmdKey(self,*args):
"""
ProcessCmdKey(self: Control,msg: Message,keyData: Keys) -> (bool,Message)
Processes a command key.
msg: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
keyData: One of the System.Windows.Forms.Keys values that represents the key to process.
Returns: true if the character was processed by the control; otherwise,false.
"""
pass
def ProcessDialogChar(self,*args):
"""
ProcessDialogChar(self: Control,charCode: Char) -> bool
Processes a dialog character.
charCode: The character to process.
Returns: true if the character was processed by the control; otherwise,false.
"""
pass
def ProcessDialogKey(self,*args):
"""
ProcessDialogKey(self: Control,keyData: Keys) -> bool
Processes a dialog key.
keyData: One of the System.Windows.Forms.Keys values that represents the key to process.
Returns: true if the key was processed by the control; otherwise,false.
"""
pass
def ProcessKeyEventArgs(self,*args):
"""
ProcessKeyEventArgs(self: Control,m: Message) -> (bool,Message)
Processes a key message and generates the appropriate control events.
m: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
Returns: true if the message was processed by the control; otherwise,false.
"""
pass
def ProcessKeyMessage(self,*args):
"""
ProcessKeyMessage(self: Control,m: Message) -> (bool,Message)
Processes a keyboard message.
m: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
Returns: true if the message was processed by the control; otherwise,false.
"""
pass
def ProcessKeyPreview(self,*args):
"""
ProcessKeyPreview(self: Control,m: Message) -> (bool,Message)
Previews a keyboard message.
m: A System.Windows.Forms.Message,passed by reference,that represents the window message to
process.
Returns: true if the message was processed by the control; otherwise,false.
"""
pass
def ProcessMnemonic(self,*args):
"""
ProcessMnemonic(self: Control,charCode: Char) -> bool
Processes a mnemonic character.
charCode: The character to process.
Returns: true if the character was processed as a mnemonic by the control; otherwise,false.
"""
pass
def RaiseDragEvent(self,*args):
"""
RaiseDragEvent(self: Control,key: object,e: DragEventArgs)
Raises the appropriate drag event.
key: The event to raise.
e: A System.Windows.Forms.DragEventArgs that contains the event data.
"""
pass
def RaiseKeyEvent(self,*args):
"""
RaiseKeyEvent(self: Control,key: object,e: KeyEventArgs)
Raises the appropriate key event.
key: The event to raise.
e: A System.Windows.Forms.KeyEventArgs that contains the event data.
"""
pass
def RaiseMouseEvent(self,*args):
"""
RaiseMouseEvent(self: Control,key: object,e: MouseEventArgs)
Raises the appropriate mouse event.
key: The event to raise.
e: A System.Windows.Forms.MouseEventArgs that contains the event data.
"""
pass
def RaisePaintEvent(self,*args):
"""
RaisePaintEvent(self: Control,key: object,e: PaintEventArgs)
Raises the appropriate paint event.
key: The event to raise.
e: A System.Windows.Forms.PaintEventArgs that contains the event data.
"""
pass
def RecreateHandle(self,*args):
"""
RecreateHandle(self: Control)
Forces the re-creation of the handle for the control.
"""
pass
def RescaleConstantsForDpi(self,*args):
""" RescaleConstantsForDpi(self: Control,deviceDpiOld: int,deviceDpiNew: int) """
pass
def ResetMouseEventArgs(self,*args):
"""
ResetMouseEventArgs(self: Control)
Resets the control to handle the System.Windows.Forms.Control.MouseLeave event.
"""
pass
def RtlTranslateAlignment(self,*args):
"""
RtlTranslateAlignment(self: Control,align: ContentAlignment) -> ContentAlignment
Converts the specified System.Drawing.ContentAlignment to the appropriate
System.Drawing.ContentAlignment to support right-to-left text.
align: One of the System.Drawing.ContentAlignment values.
Returns: One of the System.Drawing.ContentAlignment values.
RtlTranslateAlignment(self: Control,align: LeftRightAlignment) -> LeftRightAlignment
Converts the specified System.Windows.Forms.LeftRightAlignment to the appropriate
System.Windows.Forms.LeftRightAlignment to support right-to-left text.
align: One of the System.Windows.Forms.LeftRightAlignment values.
Returns: One of the System.Windows.Forms.LeftRightAlignment values.
RtlTranslateAlignment(self: Control,align: HorizontalAlignment) -> HorizontalAlignment
Converts the specified System.Windows.Forms.HorizontalAlignment to the appropriate
System.Windows.Forms.HorizontalAlignment to support right-to-left text.
align: One of the System.Windows.Forms.HorizontalAlignment values.
Returns: One of the System.Windows.Forms.HorizontalAlignment values.
"""
pass
def RtlTranslateContent(self,*args):
"""
RtlTranslateContent(self: Control,align: ContentAlignment) -> ContentAlignment
Converts the specified System.Drawing.ContentAlignment to the appropriate
System.Drawing.ContentAlignment to support right-to-left text.
align: One of the System.Drawing.ContentAlignment values.
Returns: One of the System.Drawing.ContentAlignment values.
"""
pass
def RtlTranslateHorizontal(self,*args):
"""
RtlTranslateHorizontal(self: Control,align: HorizontalAlignment) -> HorizontalAlignment
Converts the specified System.Windows.Forms.HorizontalAlignment to the appropriate
System.Windows.Forms.HorizontalAlignment to support right-to-left text.
align: One of the System.Windows.Forms.HorizontalAlignment values.
Returns: One of the System.Windows.Forms.HorizontalAlignment values.
"""
pass
def RtlTranslateLeftRight(self,*args):
"""
RtlTranslateLeftRight(self: Control,align: LeftRightAlignment) -> LeftRightAlignment
Converts the specified System.Windows.Forms.LeftRightAlignment to the appropriate
System.Windows.Forms.LeftRightAlignment to support right-to-left text.
align: One of the | |
is a given column a primary key? foreign key? ...
Serves also as a pythonic 'property descriptor': when instance values are read and a given property is not present, returns None instead of exception"""
column_class = None
default = None
field_template = "%(column)s%(nullable)s%(default)s%(comment)s"
_field_counter = 0
_order = 0
def __get__(self, instance, instance_type=None):
if instance:
return instance.__dict__.get(self.name, None)
return self
def __init__(self, null=True, db_index=False, unique=False, verbose_name=None,
help_text=None, db_column=None, default=None, choices=None, doc=None, *args, **kwargs):
self.null = null
self.db_index = db_index
self.unique = unique
self.verbose_name = verbose_name
self.help_text = help_text
self.db_column = db_column
self.default = default
self.choices = choices
self.doc = doc # comment for the column, to be inserted into DB
self.attributes = kwargs
Field._field_counter += 1
self._order = Field._field_counter
def bind(self, klass, name):
"Record inside this Field to which Model class and property it is bounded. Needed for functionality of 'property descriptor' and for automatic inference of column names"
self.name = name
self.model = klass
self.verbose_name = self.verbose_name or re.sub('_+', ' ', name).title()
self.db_column = self.db_column or self.name
self.column = self.get_column()
# def add_to_class(self, klass, name):
# self.bind(klass, name)
# setattr(klass, name, self)
# #setattr(klass, name, FieldDescriptor(self))
def get_column(self):
return self.column_class(**self.attributes)
def render_field_template(self, quote_char=''):
params = {
'column': self.column.render(self.model._meta.database),
'nullable': ternary(self.null, '', ' NOT NULL'),
'qc': quote_char,
'default': ' DEFAULT %s' % quote(self.default) if self.default != None else '',
'comment': ' COMMENT %s' % quoteStr(self.doc) if self.doc else '',
}
params.update(self.column.attributes)
return self.field_template % params
def db_value(self, value):
if value is None:
return None
return self.column.db_value(value)
def python_value(self, value):
return self.column.python_value(value)
def lookup_value(self, lookup_type, value):
return self.db_value(value)
def class_prepared(self):
pass
__eq__ = qdict('eq')
__ne__ = qdict('ne')
__lt__ = qdict('lt')
__le__ = qdict('lte')
__gt__ = qdict('gt')
__ge__ = qdict('gte')
__lshift__ = qdict('in')
__rshift__ = qdict('isnull')
__mul__ = qdict('contains')
__pow__ = qdict('icontains')
__xor__ = qdict('istartswith')
def __neg__(self):
return (self.model, self.name, 'DESC')
class CharField(Field):
column_class = VarCharColumn
def __init__(self, len = 255, *args, **kwargs):
kwargs['len'] = len
Field.__init__(self, *args, **kwargs)
class TextField(Field):
column_class = TextColumn
class DateTimeField(Field):
column_class = DateTimeColumn
class TimestampField(Field):
column_class = TimestampColumn
def db_value(self, value):
if value == CURRENT_TIMESTAMP:
return time.strftime('%Y-%m-%d %H:%M:%S') # TODO: must be turned into server-side default, not client-side (may lead to data inconsistency)
return Field.db_value(self, value)
class DateField(Field):
column_class = DateColumn
class TimeField(Field):
column_class = TimeColumn
class IntegerField(Field):
column_class = IntegerColumn
class BigIntegerField(IntegerField):
column_class = BigIntegerColumn
class BooleanField(IntegerField):
column_class = BooleanColumn
class FloatField(Field):
column_class = FloatColumn
class DoubleField(Field):
column_class = DoubleColumn
class DecimalField(Field):
column_class = DecimalColumn
class PrimaryKeyField(IntegerField):
column_class = PrimaryKeyColumn
field_template = "%(column)s NOT NULL PRIMARY KEY%(nextval)s%(comment)s"
def __init__(self, column_class=None, *args, **kwargs):
if kwargs.get('null'):
raise ValueError('Primary keys cannot be nullable')
if column_class:
self.column_class = column_class
if 'nextval' not in kwargs:
kwargs['nextval'] = ''
super(PrimaryKeyField, self).__init__(*args, **kwargs)
def get_column_class(self):
# check to see if we're using the default pk column
if self.column_class == PrimaryKeyColumn:
# if we have a sequence and can support them, then use the special
# column class that supports sequences
if self.model._meta.pk_sequence != None and self.model._meta.database.adapter.sequence_support:
self.column_class = PrimaryKeySequenceColumn
return self.column_class
def get_column(self):
return self.get_column_class()(**self.attributes)
class ReverseForeignRelatedObject(object):
def __init__(self, related_model, name):
self.field_name = name
self.related_model = related_model
def __get__(self, instance, instance_type=None):
if not instance:
raise AttributeError('Reverse relations are only accessibly via instances of the class')
query = {self.field_name: instance.get_pk()}
qr = self.related_model.select().where(**query)
return qr
class ForeignRelatedObject(object):
def __init__(self, to, field):
self.to = to
self.field = field
self.field_name = self.field.name
self.field_column = self.field.id_storage
self.cache_name = '_cache_%s' % self.field_name
def __get__(self, instance, instance_type=None):
if not instance:
return self.field
if not getattr(instance, self.cache_name, None):
id = getattr(instance, self.field_column, 0)
qr = self.to.select().where(**{self.to._meta.pk_name: id})
try:
setattr(instance, self.cache_name, qr.get())
except self.to.DoesNotExist:
if not self.field.null:
raise
return getattr(instance, self.cache_name, None)
def __set__(self, instance, obj):
if self.field.null and obj is None:
setattr(instance, self.field_column, None)
setattr(instance, self.cache_name, None)
else:
if not isinstance(obj, Model):
setattr(instance, self.field_column, obj)
else:
assert isinstance(obj, self.to), "Cannot assign %s to %s, invalid type" % (obj, self.field.name)
setattr(instance, self.field_column, obj.get_pk())
setattr(instance, self.cache_name, obj)
class ForeignKeyField(IntegerField):
field_template = '%(column)s%(nullable)s REFERENCES %(qc)s%(to_table)s%(qc)s (%(qc)s%(to_pk)s%(qc)s)%(cascade)s%(extra)s%(default)s%(comment)s'
def __init__(self, to, null=True, related_name=None, cascade=False, extra=None, *args, **kwargs):
self.to = to
self._related_name = related_name
self.cascade = cascade
self.extra = extra
kwargs.update({
'cascade': ' ON DELETE CASCADE' if self.cascade else '',
'extra': self.extra or '',
})
super(ForeignKeyField, self).__init__(null=null, *args, **kwargs)
def bind(self, klass, name):
self.name = name
self.model = klass
self.db_column = self.db_column or self.name + '_id'
if self.name == self.db_column:
self.id_storage = self.db_column + '_id'
else:
self.id_storage = self.db_column
if self.to == 'self':
self.to = self.model
self.verbose_name = self.verbose_name or re.sub('_', ' ', name).title()
if self._related_name is not None:
self.related_name = self._related_name
else:
self.related_name = klass._meta.db_table + '_set'
klass._meta.rel_fields[name] = self.name
setattr(klass, self.name, ForeignRelatedObject(self.to, self))
setattr(klass, self.id_storage, None)
reverse_rel = ReverseForeignRelatedObject(klass, self.name)
setattr(self.to, self.related_name, reverse_rel)
self.to._meta.reverse_relations[self.related_name] = klass
def lookup_value(self, lookup_type, value):
if isinstance(value, Model):
return value.get_pk()
return value or None
def db_value(self, value):
if isinstance(value, Model):
return value.get_pk()
if self.null and value is None:
return None
return self.column.db_value(value)
def get_column(self):
to_pk = self.to._meta.get_field_by_name(self.to._meta.pk_name)
to_col_class = to_pk.get_column_class()
if to_col_class not in (PrimaryKeyColumn, PrimaryKeySequenceColumn):
self.column_class = to_pk.get_column_class()
return self.column_class(**self.attributes)
def class_prepared(self):
# unfortunately because we may not know the primary key field
# at the time this field's add_to_class() method is called, we
# need to update the attributes after the class has been built
self.attributes.update({
'to_table': self.to._meta.db_table,
'to_pk': self.to._meta.pk_col,
})
self.column = self.get_column()
########################################################################################################################################################
### Model
###
class BaseModelOptions(object):
indexes = None
ordering = None
pk_sequence = None
def __init__(self, model_class, options=None):
# configurable options
for k, v in options.items():
self.__dict__[k] = v
self.rel_fields = {}
self.reverse_relations = {}
self.fields = {}
self.columns = {}
self.model_class = model_class
def prepared(self):
# called when _meta is finished being initialized
self.defaults = {}
for field in self.fields.values():
if field.default is not None:
self.defaults[field.name] = field.default
def get_default_dict(self):
dd = {}
for field_name, default in self.defaults.items():
if callable(default):
dd[field_name] = default()
else:
dd[field_name] = default
return dd
def get_sorted_fields(self):
return sorted(self.fields.items(), key=lambda (k,v): (k == self.pk_name and 1 or 2, v._order))
def get_field_names(self):
return [f[0] for f in self.get_sorted_fields()]
def get_fields(self):
return [f[1] for f in self.get_sorted_fields()]
def get_field_by_name(self, name):
if name in self.fields:
return self.fields[name]
raise AttributeError('Field named %s not found' % name)
def get_column_names(self):
return self.columns.keys()
def get_column(self, field_or_col):
if field_or_col in self.fields:
return self.fields[field_or_col].db_column
return field_or_col
def get_related_field_by_name(self, name):
if name in self.rel_fields:
return self.fields[self.rel_fields[name]]
def get_related_field_for_model(self, model, name=None):
for field in self.fields.values():
if isinstance(field, ForeignKeyField) and field.to == model:
if name is None or name == field.name or name == field.db_column:
return field
def get_reverse_related_field_for_model(self, model, name=None):
for field in model._meta.fields.values():
if isinstance(field, ForeignKeyField) and field.to == self.model_class:
if name is None or name == field.name or name == field.db_column:
return field
def get_field_for_related_name(self, model, related_name):
for field in model._meta.fields.values():
if isinstance(field, ForeignKeyField) and field.to == self.model_class:
if field.related_name == related_name:
return field
def rel_exists(self, model):
return self.get_related_field_for_model(model) or \
self.get_reverse_related_field_for_model(model)
class BaseModel(type):
inheritable_options = ['database', 'indexes', 'ordering', 'pk_sequence']
toplevel_options = [('__table__','db_table'), ('__database__','database'), ('__indexes__','indexes'), ('__ordering__','ordering'), ('__pk_sequence__','pk_sequence')]
def __new__(cls, name, bases, attrs):
cls = super(BaseModel, cls).__new__(cls, name, bases, attrs)
if not bases:
return cls
attr_dict = {}
# translate top-level options (Model.__xxx__) into Meta subclass options (Model.Meta.xxx)
for op1,op2 in cls.toplevel_options:
if hasattr(cls, op1):
attr_dict[op2] = getattr(cls, op1)
# is there explicit Meta subclass given? then append its options to top-level ones
meta = attrs.pop('Meta', None)
if meta:
attr_dict.update(meta.__dict__)
for b in bases:
base_meta = getattr(b, '_meta', None)
if not base_meta:
continue
for (k, v) in base_meta.__dict__.items():
if k in cls.inheritable_options and k not in attr_dict:
attr_dict[k] = v
elif k == 'fields':
for field_name, field_obj in v.items():
if isinstance(field_obj, PrimaryKeyField):
continue
if field_name in cls.__dict__:
continue
field_copy = copy.deepcopy(field_obj)
setattr(cls, field_name, field_copy)
_meta = BaseModelOptions(cls, attr_dict)
if not hasattr(_meta, 'db_table'):
_meta.db_table = re.sub('[^\w]+', '_', cls.__name__.lower())
if hasattr(_meta,'database') and _meta.database and _meta.db_table in _meta.database.adapter.reserved_tables:
warnings.warn('Table for %s ("%s") is reserved, please override using Meta.db_table' % (
cls, _meta.db_table,
))
setattr(cls, '_meta', _meta)
_meta.pk_name = None
for name, attr in cls.__dict__.items():
if isinstance(attr, Field):
attr.bind(cls, name)
_meta.fields[name] = attr
_meta.columns[attr.db_column] = attr
if isinstance(attr, PrimaryKeyField):
_meta.pk_name = | |
<filename>chainer/training/trainer.py
import collections
import os
import six
from chainer import reporter as reporter_module
from chainer.training import extension as extension_module
from chainer.training import trigger as trigger_module
class _ExtensionEntry(object):
def __init__(self, extension, priority, trigger, invoke_before_training):
self.extension = extension
self.trigger = trigger
self.invoke_before_training = invoke_before_training
self.priority = priority
class Trainer(object):
"""The standard training loop in Chainer.
Trainer is an implementation of a training loop. Users can invoke the
training by calling the :meth:`run` method.
Each iteration of the training loop proceeds as follows.
- Update of the parameters. It includes the mini-batch loading, forward
and backward computations, and an execution of the update formula.
These are all done by the update object held by the trainer.
- Invocation of trainer extensions in the descending order of their
priorities. A trigger object is attached to each extension, and it
decides at each iteration whether the extension should be executed.
Trigger objects are callable objects that take the trainer object as the
argument and return a boolean value indicating whether the extension
should be called or not.
Extensions are callable objects that take the trainer object as the
argument. There are two ways to define custom extensions: inheriting the
:class:`Extension` class, and decorating functions by
:func:`make_extension`. See :class:`Extension` for more details on custom
extensions.
Users can register extensions to the trainer by calling the :meth:`extend`
method, where some configurations can be added.
- Trigger object, which is also explained above. In most cases,
:class:`IntervalTrigger` is used, in which case users can simply specify
a tuple of the interval length and its unit, like
``(1000, 'iteration')`` or ``(1, 'epoch')``.
- The order of execution of extensions is determined by their priorities.
Extensions of higher priorities are invoked earlier. There are three
standard values for the priorities:
- ``PRIORITY_WRITER``. This is the priority for extensions that write
some records to the :attr:`observation` dictionary. It includes cases
that the extension directly adds values to the observation dictionary,
or the extension uses the :func:`chainer.report` function to report
values to the observation dictionary.
- ``PRIORITY_EDITOR``. This is the priority for extensions that edit the
:attr:`observation` dictionary based on already reported values.
- ``PRIORITY_READER``. This is the priority for extensions that only read
records from the :attr:`observation` dictionary. This is also suitable
for extensions that do not use the :attr:`observation` dictionary at
all.
- Extensions with ``invoke_before_training`` flag on are also invoked at
the beginning of the training loop. Extensions that update the training
status (e.g., changing learning rates) should have this flag to be
``True`` to ensure that resume of the training loop correctly recovers
the training status.
The current state of the trainer object and objects handled by the trainer
can be serialized through the standard serialization protocol of Chainer.
It enables us to easily suspend and resume the training loop.
.. note::
The serialization does not recover everything of the training loop. It
only recovers the states which change over the training (e.g.
parameters, optimizer states, the batch iterator state, extension
states, etc.). You must initialize the objects correctly before
deserializing the states.
On the other hand, it means that users can change the settings on
deserialization. For example, the exit condition can be changed on the
deserialization, so users can train the model for some iterations,
suspend it, and then resume it with larger number of total iterations.
During the training, it also creates a :class:`~chainer.Reporter` object to
store observed values on each update. For each iteration, it creates a
fresh observation dictionary and stores it in the :attr:`observation`
attribute.
Links of the target model of each optimizer are registered to the reporter
object as observers, where the name of each observer is constructed as the
format ``<optimizer name><link name>``. The link name is given by the
:meth:`chainer.Link.namedlink` method, which represents the path to each
link in the hierarchy. Other observers can be registered by accessing the
reporter object via the :attr:`reporter` attribute.
The default trainer is `plain`, i.e., it does not contain any extensions.
Args:
updater (~chainer.training.Updater): Updater object. It defines how to
update the models.
stop_trigger: Trigger that determines when to stop the training loop.
If it is not callable, it is passed to :class:`IntervalTrigger`.
Attributes:
updater: The updater object for this trainer.
stop_trigger: Trigger that determines when to stop the training loop.
The training loop stops at the iteration on which this trigger
returns ``True``.
observation: Observation of values made at the last update. See the
:class:`Reporter` class for details.
out: Output directory.
reporter: Reporter object to report observed values.
"""
def __init__(self, updater, stop_trigger=None, out='result'):
self.updater = updater
self.stop_trigger = trigger_module.get_trigger(stop_trigger)
self.observation = {}
self.out = out
reporter = reporter_module.Reporter()
for name, optimizer in six.iteritems(updater.get_all_optimizers()):
reporter.add_observer(name, optimizer.target)
reporter.add_observers(
name, optimizer.target.namedlinks(skipself=True))
self.reporter = reporter
self._done = False
self._extensions = collections.OrderedDict()
updater.connect_trainer(self)
def extend(self, extension, name=None, trigger=None, priority=None,
invoke_before_training=None):
"""Registers an extension to the trainer.
:class:`Extension` is a callable object which is called after each
update unless the corresponding trigger object decides to skip the
iteration. The order of execution is determined by priorities:
extensions with higher priorities are called earlier in each iteration.
Extensions with the same priority are invoked in the order of
registrations.
If two or more extensions with the same name are registered, suffixes
are added to the names of the second to last extensions. The suffix is
``_N`` where N is the ordinal of the extensions.
See :class:`Extension` for the interface of extensions.
Args:
extension: Extension to register.
name (str): Name of the extension. If it is omitted, the
``default_name`` attribute of the extension is used instead.
Note that the name would be suffixed by an ordinal in case of
duplicated names as explained above.
trigger (tuple or Trigger): Trigger object that determines when to
invoke the extension. If it is ``None``, ``extension.trigger``
is used instead. If the trigger is not callable, it is passed
to :class:`IntervalTrigger` to build an interval trigger.
priority (int): Invocation priority of the extension. Extensions
are invoked in the descending order of priorities in each
iteration. If this is ``None``, ``extension.priority`` is used
instead.
invoke_before_training (bool): If ``True``, the extension is also
invoked just before entering the training loop. If this
``None``, ``extension.invoke_before_training`` is used instead.
This option is mainly used for extensions that alter the
training configuration (e.g., learning rates); in such a case,
resuming from snapshots require the call of extension to
recover the configuration before any updates.
"""
if name is None:
name = getattr(extension, 'name', None)
if name is None:
name = getattr(extension, 'default_name', None)
if name is None:
raise TypeError('name is not given for the extension')
if name == 'training':
raise ValueError(
'the name "training" is prohibited as an extension name')
if trigger is None:
trigger = getattr(extension, 'trigger', None)
trigger = trigger_module.get_trigger(trigger)
if priority is None:
priority = getattr(
extension, 'priority', extension_module.PRIORITY_READER)
if invoke_before_training is None:
invoke_before_training = getattr(
extension, 'invoke_before_training', False)
modified_name = name
ordinal = 0
while modified_name in self._extensions:
ordinal += 1
modified_name = '%s_%d' % (name, ordinal)
extension.name = modified_name
self._extensions[modified_name] = _ExtensionEntry(
extension, priority, trigger, invoke_before_training)
def get_extension(self, name):
"""Returns the extension of a given name.
Args:
name (str): Name of the extension.
Returns:
Extension.
"""
extensions = self._extensions
if name in extensions:
return extensions[name].extension
else:
raise ValueError('extension %s not found' % name)
def run(self):
"""Executes the training loop.
This method is the core of ``Trainer``. It executes the whole loop of
training the models.
Note that this method cannot run multiple times for one trainer object.
"""
if self._done:
raise RuntimeError('cannot run training loop multiple times')
try:
os.makedirs(self.out)
except OSError:
pass
# sort extensions by priorities
extension_order = sorted(
self._extensions.keys(),
key=lambda name: self._extensions[name].priority, reverse=True)
extensions = [(name, self._extensions[name])
for name in extension_order]
# invoke extensions before the loop
for _, entry in extensions:
if entry.invoke_before_training:
entry.extension(self)
update = self.updater.update
reporter = | |
"""
__author__ = "<NAME>"
__copyright__ = "Copyright 2020, Pomfort GmbH"
__license__ = "MIT"
__maintainer__ = "<NAME>, <NAME>"
__email__ = "<EMAIL>"
"""
import difflib
import filecmp
import glob
import os
import shutil
from importlib import reload
from typing import List
import pytest
from click.testing import CliRunner
from freezegun import freeze_time
from pyfakefs.fake_filesystem_unittest import Pause
import mhl.commands
scenario_output_path = "examples/scenarios/Output"
fake_ref_root_path = "/ref"
@pytest.fixture(autouse=True)
def version(monkeypatch):
monkeypatch.setattr("mhl.__version__.ascmhl_tool_version", "0.3 alpha")
monkeypatch.setattr("mhl.__version__.ascmhl_tool_name", "ascmhl.py")
reload(mhl.commands)
@pytest.fixture()
def reference(fs):
load_real_reference(fs)
@pytest.fixture
@freeze_time("2020-01-15 13:00:00")
def card_a002(fs):
fs.create_file("/A002R2EC/Sidecar.txt", contents="BLOREM ipsum dolor sit amet, consetetur sadipscing elitr.\n")
fs.create_file("/A002R2EC/Clips/A002C006_141024_R2EC.mov", contents="abcd\n")
fs.create_file("/A002R2EC/Clips/A002C007_141024_R2EC.mov", contents="def\n")
@pytest.fixture
@freeze_time("2020-01-15 13:30:00")
def card_a003(fs):
fs.create_file("/A003R2EC/Sidecar.txt", contents="Dolor sit amet, consetetur sadipscing elitr.\n")
fs.create_file("/A003R2EC/Clips/A003C011_141024_R2EC.mov", contents="vbgh\n")
fs.create_file("/A003R2EC/Clips/A003C012_141024_R2EC.mov", contents="zhgdr\n")
def load_real_reference(fake_fs):
fake_fs.add_real_directory(scenario_output_path, target_path=fake_ref_root_path)
# custom dircmp to compare file contents as suggested in https://stackoverflow.com/a/24860799
class ContentDircmp(filecmp.dircmp):
"""
Compare the content of dir1 and dir2. In contrast with filecmp.dircmp, this
subclass compares the content of files with the same path.
"""
# noinspection PyAttributeOutsideInit
def phase3(self):
"""
Find out differences between common files.
Ensure we are using content comparison with shallow=False.
"""
fcomp = filecmp.cmpfiles(self.left, self.right, self.common_files, shallow=False)
self.same_files, self.diff_files, self.funny_files = fcomp
def dirs_are_equal(dir1, dir2):
"""
Compare two directory trees content.
Return False if they differ, True is they are the same.
"""
result = True
compared = ContentDircmp(dir1, dir2, ignore=[".DS_Store"])
if compared.left_only or compared.right_only or compared.diff_files or compared.funny_files:
compared.report_partial_closure()
for file in compared.diff_files:
print_diff_of_files(os.path.join(dir1, file), os.path.join(dir2, file))
result = False
for subdir in compared.common_dirs:
if not dirs_are_equal(os.path.join(dir1, subdir), os.path.join(dir2, subdir)):
result = False
return result
def compare_dir_content(reference: str, dir_path: str) -> bool:
if os.path.isabs(dir_path):
relative_path = dir_path.lstrip(os.sep)
else:
relative_path = dir_path
ref_path = os.path.join(fake_ref_root_path, reference, relative_path)
if os.path.isdir(dir_path) is True and os.path.isdir(ref_path) is True:
result = dirs_are_equal(ref_path, dir_path)
else:
result = False
return result
def compare_file_content(reference: str, path: str) -> bool:
if os.path.isabs(path):
relative_path = path.lstrip(os.sep)
else:
relative_path = path
ref_path = os.path.join(fake_ref_root_path, reference, relative_path)
if os.path.isfile(path) is True and os.path.isfile(ref_path) is True:
result = filecmp.cmp(ref_path, path, shallow=False)
if result is not True:
print("\ngot different files:\n")
print_diff_of_files(ref_path, path)
else:
result = False
return result
def print_diff_of_files(path_a: str, path_b: str):
with open(path_a, "r") as file:
data_a = file.readlines()
with open(path_b, "r") as file:
data_b = file.readlines()
diff = difflib.unified_diff(data_a, data_b, fromfile=path_a, tofile=path_b, n=2, lineterm="")
for line in diff:
print(line.rstrip())
def compare_files_against_reference(scenario_reference: str, folder_paths: List[str], fake_fs) -> bool:
"""
checks if the scenario reference folder exists in the output folder if it doesn't we copy the folders there and
consider it as reference. This way we can easily recreate the reference files of a scenario by deleting it on disk
and running the tests
"""
real_ref_path = os.path.join(scenario_output_path, scenario_reference)
with Pause(fake_fs):
if os.path.isdir(real_ref_path):
# in case the reference path exists we compare the content
compare_mode = True
else:
# otherwise we just write the result to the output folder
# to be used as new reference for the next run
compare_mode = False
result = True
for folder_path in folder_paths:
validate_all_mhl_files_against_xml_schema(folder_path)
assert os.path.isabs(folder_path)
if compare_mode:
result &= compare_dir_content(scenario_reference, folder_path)
else:
copy_fake_directory_to_real_fs(folder_path, real_ref_path, fake_fs)
# also copy the log file
with open("/log.txt", "rb") as file:
data = file.read()
with Pause(fake_fs):
with open(os.path.join(real_ref_path, "log.txt"), "w+b") as dst_file:
dst_file.write(data)
if compare_mode:
# we always assume a log.txt to exists for each scenario, compare it as well to check differences in tool output
result &= compare_file_content(scenario_reference, "/log.txt")
return result
def validate_all_mhl_files_against_xml_schema(folder_path: str):
"""Find all mhl files created and validate them against the xsd"""
mhl_files = glob.glob(folder_path + r"/**/*.mhl", recursive=True)
runner = CliRunner()
for file in mhl_files:
result = runner.invoke(mhl.commands.xsd_schema_check, file)
assert result.exit_code == 0, result.output
def copy_fake_directory_to_real_fs(fake_dir: str, real_dir: str, fake_fs):
"""Utility function to copy a directory in the fake file system recursively to the real file system"""
for root, _, file_names in os.walk(fake_dir):
relative_root = root.lstrip(os.sep)
current_real_dir = os.path.join(real_dir, relative_root)
with Pause(fake_fs):
os.makedirs(current_real_dir)
for file_name in file_names:
with open(os.path.join(root, file_name), "rb") as file:
data = file.read()
with Pause(fake_fs):
with open(os.path.join(current_real_dir, file_name), "w+b") as dst_file:
dst_file.write(data)
def log_message(message: str):
log_path = "/log.txt"
with open(log_path, "a") as file:
file.write(message)
file.write("\n")
def execute_command(click_cmd, args):
runner = CliRunner()
log_message("")
log_message(f'$ ascmhl.py {click_cmd.name} {" ".join(args)}')
result = runner.invoke(click_cmd, args)
log_message(result.output)
log_message("")
return result
@freeze_time("2020-01-16 09:15:00")
def test_scenario_01(fs, reference, card_a002):
log_message("Scenario 01:")
log_message("This is the most basic example. A camera card is copied to a travel drive and an ASC-MHL file is")
log_message("created with hashes of all files on the card.")
log_message("")
log_message("Assume the source card /A002R2EC is copied to a travel drive /travel_01.")
shutil.copytree("/A002R2EC", "/travel_01/A002R2EC")
log_message("")
log_message("Seal the copy on the travel drive /travel_01 to create the original mhl generation.")
result = execute_command(mhl.commands.create, ["-v", "/travel_01/A002R2EC", "-h", "xxh64"])
assert result.exit_code == 0
assert compare_files_against_reference("scenario_01", ["/travel_01"], fs)
@freeze_time("2020-01-16 09:15:00")
def test_scenario_02(fs, reference, card_a002):
log_message("Scenario 02:")
log_message("In this scenario a copy is made, and then a copy of the copy. Two ASC-MHL are created during")
log_message("this process, documenting the history of both copy processes.")
log_message("")
log_message("Assume the source card /A002R2EC is copied to a travel drive /travel_01.")
shutil.copytree("/A002R2EC", "/travel_01/A002R2EC")
log_message("")
log_message("Seal the copy on the travel drive /travel_01 to create the original mhl generation.")
result = execute_command(mhl.commands.create, ["-v", "/travel_01/A002R2EC", "-h", "xxh64"])
assert result.exit_code == 0
with freeze_time("2020-01-17 14:30:00"):
log_message("")
log_message("Assume the travel drive arrives at a facility on the next day.")
log_message("And the folder A002R2EC is copied there from the travel drive to a file server at /file_server.")
shutil.copytree("/travel_01/A002R2EC", "/file_server/A002R2EC")
log_message("")
log_message("Sealing the folder A002R2EC again on the file server")
log_message("this will verify all hashes, check for completeness and create a second generation")
result = execute_command(mhl.commands.create, ["-v", "/file_server/A002R2EC", "-h", "xxh64"])
assert result.exit_code == 0
assert compare_files_against_reference("scenario_02", ["/travel_01", "/file_server"], fs)
@freeze_time("2020-01-16 09:15:00")
def test_scenario_03(fs, reference, card_a002):
log_message("Scenario 03:")
log_message("In this scenario the first hashes are created using the xxhash format. Different hash formats")
log_message("might be required by systems used further down the workflow, so the second copy is verified")
log_message("against the existing xxhash hashes, and additional MD5 hashes can be created and stored during")
log_message("that process on demand.")
log_message("")
log_message("Assume the source card /A002R2EC is copied to a travel drive /travel_01.")
shutil.copytree("/A002R2EC", "/travel_01/A002R2EC")
log_message("")
log_message("Seal the copy on the travel drive /travel_01 to create the original mhl generation.")
result = execute_command(mhl.commands.create, ["-v", "/travel_01/A002R2EC", "-h", "xxh64"])
assert result.exit_code == 0
with freeze_time("2020-01-17 14:30:00"):
log_message("")
log_message("Assume the travel drive arrives at a facility on the next day.")
log_message("And the folder A002R2EC is copied there from the travel drive to a file server at /file_server.")
shutil.copytree("/travel_01/A002R2EC", "/file_server/A002R2EC")
log_message("")
log_message("Sealing the folder A002R2EC again on the file server using MD5 hash format")
log_message("this will verify all existing xxHashes, check for completeness,")
log_message("and create a second generation with additional (new) MD5 hashes.")
result = execute_command(mhl.commands.create, ["-v", "-h", "md5", "/file_server/A002R2EC"])
assert result.exit_code == 0
assert compare_files_against_reference("scenario_03", ["/travel_01", "/file_server"], fs)
@freeze_time("2020-01-16 09:15:00")
def test_scenario_04(fs, reference, card_a002):
log_message("Scenario 04:")
log_message("Copying a folder to a travel drive and from there to a file server with a hash mismatch in")
log_message("one file.")
log_message("")
log_message("Assume the source card /A002R2EC is copied to a travel drive /travel_01.")
shutil.copytree("/A002R2EC", "/travel_01/A002R2EC")
log_message("")
log_message("Seal the copy on the travel drive /travel_01 to create the original mhl generation.")
result = execute_command(mhl.commands.create, ["-v", "/travel_01/A002R2EC", "-h", "xxh64"])
assert result.exit_code == 0
with freeze_time("2020-01-17 14:30:00"):
log_message("")
log_message("Assume the travel drive arrives at a facility on the next day.")
log_message("And the folder A002R2EC is copied there from the travel drive to a file server at /file_server.")
shutil.copytree("/travel_01/A002R2EC", "/file_server/A002R2EC")
log_message("")
log_message("afterwards we simulate that during the copy the Sidecar.txt got corrupt (altered")
log_message("by modifying the file content")
with open("/file_server/A002R2EC/Sidecar.txt", "a") as file:
file.write("!!")
log_message("")
log_message("Sealing the folder A002R2EC again on the file server.")
log_message("This will verify all existing hashes and fail because Sidecar.txt was altered.")
log_message("An error is shown and create a new generation that documents the failed verification")
result = execute_command(mhl.commands.create, ["-v", "/file_server/A002R2EC", "-h", "xxh64"])
assert result.exit_code == 12
assert compare_files_against_reference("scenario_04", ["/travel_01", "/file_server"], fs)
# the second generation will include the failed verification result
@freeze_time("2020-01-16 09:15:00")
def test_scenario_05(fs, reference, card_a002, card_a003):
log_message("Scenario 05:")
log_message("Copying two camera mags to a `Reels` folder on a travel drive, and the entire `Reels` folder")
log_message("folder to a server.")
log_message("")
os.makedirs("/travel_01/Reels")
os.makedirs("/file_server")
log_message("Assume the source card /A002R2EC is copied to a Reels folder on travel drive /travel_01.")
shutil.copytree("/A002R2EC", "/travel_01/Reels/A002R2EC")
log_message("")
log_message("Seal | |
International, Inc.",
"000436": "ELANsat Technologies, Inc.",
"000437": "Powin Information Technology, Inc.",
"000438": "Nortel Networks",
"000439": "Rosco Entertainment Technology, Inc.",
"00043A": "Intelligent Telecommunications, Inc.",
"00043B": "Lava Computer Mfg., Inc.",
"00043C": "SONOS Co., Ltd.",
"00043D": "INDEL AG",
"00043E": "Telencomm",
"00043F": "ESTeem Wireless Modems, Inc",
"000440": "cyberPIXIE, Inc.",
"000441": "Half Dome Systems, Inc.",
"000442": "NACT",
"000443": "Agilent Technologies, Inc.",
"000444": "Western Multiplex Corporation",
"000445": "LMS Skalar Instruments GmbH",
"000446": "CYZENTECH Co., Ltd.",
"000447": "Acrowave Systems Co., Ltd.",
"000448": "Polaroid Corporation",
"000449": "Mapletree Networks",
"00044A": "iPolicy Networks, Inc.",
"00044B": "NVIDIA",
"00044C": "JENOPTIK",
"00044D": "CISCO SYSTEMS, INC.",
"00044E": "CISCO SYSTEMS, INC.",
"00044F": "Leukhardt Systemelektronik GmbH",
"000450": "DMD Computers SRL",
"000451": "Medrad, Inc.",
"000452": "RocketLogix, Inc.",
"000453": "YottaYotta, Inc.",
"000454": "Quadriga UK",
"000455": "ANTARA.net",
"000456": "Cambium Networks Limited",
"000457": "Universal Access Technology, Inc.",
"000458": "Fusion X Co., Ltd.",
"000459": "Veristar Corporation",
"00045A": "The Linksys Group, Inc.",
"00045B": "Techsan Electronics Co., Ltd.",
"00045C": "Mobiwave Pte Ltd",
"00045D": "BEKA Elektronik",
"00045E": "PolyTrax Information Technology AG",
"00045F": "Avalue Technology, Inc.",
"000460": "Knilink Technology, Inc.",
"000461": "EPOX Computer Co., Ltd.",
"000462": "DAKOS Data & Communication Co., Ltd.",
"000463": "Bosch Security Systems",
"000464": "Pulse-Link Inc",
"000465": "i.s.t isdn-support technik GmbH",
"000466": "ARMITEL Co.",
"000467": "Wuhan Research Institute of MII",
"000468": "Vivity, Inc.",
"000469": "Innocom, Inc.",
"00046A": "Navini Networks",
"00046B": "Palm Wireless, Inc.",
"00046C": "Cyber Technology Co., Ltd.",
"00046D": "CISCO SYSTEMS, INC.",
"00046E": "CISCO SYSTEMS, INC.",
"00046F": "Digitel S/A Industria Eletronica",
"000470": "ipUnplugged AB",
"000471": "IPrad",
"000472": "Telelynx, Inc.",
"000473": "Photonex Corporation",
"000474": "LEGRAND",
"000475": "3 Com Corporation",
"000476": "3 Com Corporation",
"000477": "Scalant Systems, Inc.",
"000478": "G. Star Technology Corporation",
"000479": "Radius Co., Ltd.",
"00047A": "AXXESSIT ASA",
"00047B": "Schlumberger",
"00047C": "Skidata AG",
"00047D": "Pelco",
"00047E": "Siqura B.V.",
"00047F": "Chr. Mayr GmbH & Co. KG",
"000480": "Brocade Communications Systems, Inc",
"000481": "Econolite Control Products, Inc.",
"000482": "Medialogic Corp.",
"000483": "Deltron Technology, Inc.",
"000484": "Amann GmbH",
"000485": "PicoLight",
"000486": "ITTC, University of Kansas",
"000487": "Cogency Semiconductor, Inc.",
"000488": "Eurotherm Controls",
"000489": "YAFO Networks, Inc.",
"00048A": "Temia Vertriebs GmbH",
"00048B": "Poscon Corporation",
"00048C": "Nayna Networks, Inc.",
"00048D": "Tone Commander Systems, Inc.",
"00048E": "Ohm Tech Labs, Inc.",
"00048F": "TD Systems Corporation",
"000490": "Optical Access",
"000491": "Technovision, Inc.",
"000492": "Hive Internet, Ltd.",
"000493": "Tsinghua Unisplendour Co., Ltd.",
"000494": "Breezecom, Ltd.",
"000495": "Tejas Networks India Limited",
"000496": "Extreme Networks",
"000497": "MacroSystem Digital Video AG",
"000498": "Mahi Networks",
"000499": "Chino Corporation",
"00049A": "CISCO SYSTEMS, INC.",
"00049B": "CISCO SYSTEMS, INC.",
"00049C": "Surgient Networks, Inc.",
"00049D": "Ipanema Technologies",
"00049E": "Wirelink Co., Ltd.",
"00049F": "Freescale Semiconductor",
"0004A0": "Verity Instruments, Inc.",
"0004A1": "Pathway Connectivity",
"0004A2": "L.S.I. Japan Co., Ltd.",
"0004A3": "Microchip Technology, Inc.",
"0004A4": "NetEnabled, Inc.",
"0004A5": "Barco Projection Systems NV",
"0004A6": "SAF Tehnika Ltd.",
"0004A7": "FabiaTech Corporation",
"0004A8": "Broadmax Technologies, Inc.",
"0004A9": "SandStream Technologies, Inc.",
"0004AA": "Jetstream Communications",
"0004AB": "Comverse Network Systems, Inc.",
"0004AC": "IBM Corp",
"0004AD": "Malibu Networks",
"0004AE": "Sullair Corporation",
"0004AF": "Digital Fountain, Inc.",
"0004B0": "ELESIGN Co., Ltd.",
"0004B1": "Signal Technology, Inc.",
"0004B2": "ESSEGI SRL",
"0004B3": "Videotek, Inc.",
"0004B4": "CIAC",
"0004B5": "Equitrac Corporation",
"0004B6": "Stratex Networks, Inc.",
"0004B7": "AMB i.t. Holding",
"0004B8": "Kumahira Co., Ltd.",
"0004B9": "S.I. Soubou, Inc.",
"0004BA": "KDD Media Will Corporation",
"0004BB": "Bardac Corporation",
"0004BC": "Giantec, Inc.",
"0004BD": "ARRIS Group, Inc.",
"0004BE": "OptXCon, Inc.",
"0004BF": "VersaLogic Corp.",
"0004C0": "CISCO SYSTEMS, INC.",
"0004C1": "CISCO SYSTEMS, INC.",
"0004C2": "Magnipix, Inc.",
"0004C3": "CASTOR Informatique",
"0004C4": "Allen & Heath Limited",
"0004C5": "ASE Technologies, USA",
"0004C6": "Yamaha Motor Co., Ltd.",
"0004C7": "NetMount",
"0004C8": "LIBA Maschinenfabrik GmbH",
"0004C9": "Micro Electron Co., Ltd.",
"0004CA": "FreeMs Corp.",
"0004CB": "Tdsoft Communication, Ltd.",
"0004CC": "Peek Traffic B.V.",
"0004CD": "Extenway Solutions Inc",
"0004CE": "<NAME>",
"0004CF": "Seagate Technology",
"0004D0": "Softlink s.r.o.",
"0004D1": "Drew Technologies, Inc.",
"0004D2": "Adcon Telemetry GmbH",
"0004D3": "Toyokeiki Co., Ltd.",
"0004D4": "Proview Electronics Co., Ltd.",
"0004D5": "Hitachi Information & Communication Engineering, Ltd.",
"0004D6": "Takagi Industrial Co., Ltd.",
"0004D7": "Omitec Instrumentation Ltd.",
"0004D8": "IPWireless, Inc.",
"0004D9": "Titan Electronics, Inc.",
"0004DA": "Relax Technology, Inc.",
"0004DB": "Tellus Group Corp.",
"0004DC": "Nortel Networks",
"0004DD": "CISCO SYSTEMS, INC.",
"0004DE": "CISCO SYSTEMS, INC.",
"0004DF": "Teracom Telematica Ltda.",
"0004E0": "Procket Networks",
"0004E1": "Infinior Microsystems",
"0004E2": "SMC Networks, Inc.",
"0004E3": "Accton Technology Corp.",
"0004E4": "Daeryung Ind., Inc.",
"0004E5": "Glonet Systems, Inc.",
"0004E6": "Banyan Network Private Limited",
"0004E7": "Lightpointe Communications, Inc",
"0004E8": "IER, Inc.",
"0004E9": "Infiniswitch Corporation",
"0004EA": "Hewlett-Packard Company",
"0004EB": "Paxonet Communications, Inc.",
"0004EC": "Memobox SA",
"0004ED": "Billion Electric Co., Ltd.",
"0004EE": "Lincoln Electric Company",
"0004EF": "Polestar Corp.",
"0004F0": "International Computers, Ltd",
"0004F1": "WhereNet",
"0004F2": "Polycom",
"0004F3": "FS FORTH-SYSTEME GmbH",
"0004F4": "Infinite Electronics Inc.",
"0004F5": "SnowShore Networks, Inc.",
"0004F6": "Amphus",
"0004F7": "Omega Band, Inc.",
"0004F8": "QUALICABLE TV Industria E Com., Ltda",
"0004F9": "Xtera Communications, Inc.",
"0004FA": "NBS Technologies Inc.",
"0004FB": "Commtech, Inc.",
"0004FC": "Stratus Computer (DE), Inc.",
"0004FD": "Japan Control Engineering Co., Ltd.",
"0004FE": "Pelago Networks",
"0004FF": "Acronet Co., Ltd.",
"000500": "CISCO SYSTEMS, INC.",
"000501": "CISCO SYSTEMS, INC.",
"000502": "Apple",
"000503": "ICONAG",
"000504": "Naray Information & Communication Enterprise",
"000505": "Systems Integration Solutions, Inc.",
"000506": "Reddo Networks AB",
"000507": "Fine Appliance Corp.",
"000508": "Inetcam, Inc.",
"000509": "AVOC Nishimura Ltd.",
"00050A": "ICS Spa",
"00050B": "SICOM Systems, Inc.",
"00050C": "Network Photonics, Inc.",
"00050D": "Midstream Technologies, Inc.",
"00050E": "3ware, Inc.",
"00050F": "Tanaka S/S Ltd.",
"000510": "Infinite Shanghai Communication Terminals Ltd.",
"000511": "Complementary Technologies Ltd",
"000512": "MeshNetworks, Inc.",
"000513": "VTLinx Multimedia Systems, Inc.",
"000514": "KDT Systems Co., Ltd.",
"000515": "Nuark Co., Ltd.",
"000516": "SMART Modular Technologies",
"000517": "Shellcomm, Inc.",
"000518": "Jupiters Technology",
"000519": "Siemens Building Technologies AG,",
"00051A": "3Com Europe Ltd.",
"00051B": "Magic Control Technology Corporation",
"00051C": "Xnet Technology Corp.",
"00051D": "Airocon, Inc.",
"00051E": "Brocade Communications Systems, Inc.",
"00051F": "Taijin Media Co., Ltd.",
"000520": "Smartronix, Inc.",
"000521": "Control Microsystems",
"000522": "LEA*D Corporation, Inc.",
"000523": "AVL List GmbH",
"000524": "BTL System (HK) Limited",
"000525": "Puretek Industrial Co., Ltd.",
"000526": "IPAS GmbH",
"000527": "SJ Tek Co. Ltd",
"000528": "New Focus, Inc.",
"000529": "Shanghai Broadan Communication Technology Co., Ltd",
"00052A": "Ikegami Tsushinki Co., Ltd.",
"00052B": "HORIBA, Ltd.",
"00052C": "Supreme Magic Corporation",
"00052D": "Zoltrix International Limited",
"00052E": "Cinta Networks",
"00052F": "Leviton Network Solutions",
"000530": "Andiamo Systems, Inc.",
"000531": "CISCO SYSTEMS, INC.",
"000532": "CISCO SYSTEMS, INC.",
"000533": "Brocade Communications Systems, Inc.",
"000534": "Northstar Engineering Ltd.",
"000535": "Chip PC Ltd.",
"000536": "Danam Communications, Inc.",
"000537": "Nets Technology Co., Ltd.",
"000538": "Merilus, Inc.",
"000539": "A Brand New World in Sweden AB",
"00053A": "Willowglen Services Pte Ltd",
"00053B": "Harbour Networks Ltd., Co. Beijing",
| |
from malaya import home
MALAY_TEXT = home + '/dictionary/malay-text.txt'
MALAY_TEXT_200K = home + '/dictionary-200k/malay-text.txt'
# sorted based on modules, started from augmentation until toxic
PATH_AUGMENTATION = {
'synonym': {
'model': home + '/synonym/synonym0.json',
'model2': home + '/synonym/synonym1.json',
'version': 'v35',
}
}
S3_PATH_AUGMENTATION = {
'synonym': {
'model': 'https://raw.githubusercontent.com/huseinzol05/Malaya-Dataset/master/dictionary/synonym/synonym0.json',
'model2': 'https://raw.githubusercontent.com/huseinzol05/Malaya-Dataset/master/dictionary/synonym/synonym1.json',
}
}
PATH_CONSTITUENCY = {
'bert': {
'model': home + '/constituency/bert/base/model.pb',
'quantized': home + '/constituency/bert/base/quantized/model.pb',
'dictionary': home + '/constituency/bert/base/vocab.json',
'vocab': home + '/bert/sp10m.cased.bert.vocab',
'tokenizer': home + '/bert/sp10m.cased.bert.model',
'version': 'v38',
},
'tiny-bert': {
'model': home + '/constituency/bert/tiny/model.pb',
'quantized': home + '/constituency/bert/tiny/quantized/model.pb',
'dictionary': home + '/constituency/bert/tiny/vocab.json',
'vocab': home + '/bert/sp10m.cased.bert.vocab',
'tokenizer': home + '/bert/sp10m.cased.bert.model',
'version': 'v38',
},
'albert': {
'model': home + '/constituency/albert/base/model.pb',
'quantized': home + '/constituency/albert/base/quantized/model.pb',
'dictionary': home + '/constituency/albert/base/vocab.json',
'vocab': home + '/bert/sp10m.cased.v10.vocab',
'tokenizer': home + '/bert/sp10m.cased.v10.model',
'version': 'v38',
},
'tiny-albert': {
'model': home + '/constituency/albert/tiny/model.pb',
'quantized': home + '/constituency/albert/tiny/quantized/model.pb',
'dictionary': home + '/constituency/albert/tiny/vocab.json',
'vocab': home + '/bert/sp10m.cased.v10.vocab',
'tokenizer': home + '/bert/sp10m.cased.v10.model',
'version': 'v38',
},
'xlnet': {
'model': home + '/constituency/xlnet/base/model.pb',
'quantized': home + '/constituency/xlnet/base/quantized/model.pb',
'dictionary': home + '/constituency/xlnet/base/vocab.json',
'vocab': home + '/bert/sp10m.cased.v9.vocab',
'tokenizer': home + '/bert/sp10m.cased.v9.model',
'version': 'v38',
},
}
S3_PATH_CONSTITUENCY = {
'bert': {
'model': 'v38/constituency/bert-base.pb',
'quantized': 'v40/constituency/bert-base.pb.quantized',
'dictionary': 'v38/constituency/vocab-bert-base.json',
'vocab': 'tokenizer/sp10m.cased.bert.vocab',
'tokenizer': 'tokenizer/sp10m.cased.bert.model',
},
'tiny-bert': {
'model': 'v38/constituency/tiny-bert.pb',
'quantized': 'v40/constituency/tiny-bert.pb.quantized',
'dictionary': 'v38/constituency/vocab-tiny-bert.json',
'vocab': 'tokenizer/sp10m.cased.bert.vocab',
'tokenizer': 'tokenizer/sp10m.cased.bert.model',
},
'albert': {
'model': 'v38/constituency/albert-base.pb',
'quantized': 'v40/constituency/albert-base.pb.quantized',
'dictionary': 'v38/constituency/vocab-albert-base.json',
'vocab': 'tokenizer/sp10m.cased.v10.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v10.model',
},
'tiny-albert': {
'model': 'v38/constituency/albert-tiny.pb',
'quantized': 'v40/constituency/albert-tiny.pb.quantized',
'dictionary': 'v38/constituency/vocab-albert-tiny.json',
'vocab': 'tokenizer/sp10m.cased.v10.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v10.model',
},
'xlnet': {
'model': 'v40/constituency/xlnet-base.pb',
'quantized': 'v40/constituency/xlnet-base.pb.quantized',
'dictionary': 'v40/constituency/vocab-xlnet-base.json',
'vocab': 'tokenizer/sp10m.cased.v9.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v9.model',
},
}
PATH_DEPENDENCY = {
'bert': {
'model': home + '/dependency/bert/base/model.pb',
'quantized': home + '/dependency/bert/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.bert.vocab',
'tokenizer': home + '/bert/sp10m.cased.bert.model',
'version': 'v34',
},
'tiny-bert': {
'model': home + '/dependency/bert/tiny/model.pb',
'quantized': home + '/dependency/bert/tiny/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.bert.vocab',
'tokenizer': home + '/bert/sp10m.cased.bert.model',
'version': 'v34',
},
'albert': {
'model': home + '/dependency/albert/base/model.pb',
'quantized': home + '/dependency/albert/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v10.vocab',
'tokenizer': home + '/bert/sp10m.cased.v10.model',
'version': 'v34',
},
'tiny-albert': {
'model': home + '/dependency/albert/tiny/model.pb',
'quantized': home + '/dependency/albert/tiny/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v10.vocab',
'tokenizer': home + '/bert/sp10m.cased.v10.model',
'version': 'v34',
},
'xlnet': {
'model': home + '/dependency/xlnet/base/model.pb',
'quantized': home + '/dependency/xlnet/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v9.vocab',
'tokenizer': home + '/bert/sp10m.cased.v9.model',
'version': 'v34',
},
'alxlnet': {
'model': home + '/dependency/alxlnet/base/model.pb',
'quantized': home + '/dependency/alxlnet/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v9.vocab',
'tokenizer': home + '/bert/sp10m.cased.v9.model',
'version': 'v34',
},
}
S3_PATH_DEPENDENCY = {
'bert': {
'model': 'v34/dependency/bert-base-dependency.pb',
'quantized': 'v40/dependency/bert-base-dependency.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.bert.vocab',
'tokenizer': 'tokenizer/sp10m.cased.bert.model',
},
'tiny-bert': {
'model': 'v34/dependency/tiny-bert-dependency.pb',
'quantized': 'v40/dependency/tiny-bert-dependency.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.bert.vocab',
'tokenizer': 'tokenizer/sp10m.cased.bert.model',
},
'albert': {
'model': 'v34/dependency/albert-base-dependency.pb',
'quantized': 'v40/dependency/albert-base-dependency.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v10.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v10.model',
},
'tiny-albert': {
'model': 'v34/dependency/albert-tiny-dependency.pb',
'quantized': 'v40/dependency/albert-tiny-dependency.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v10.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v10.model',
},
'xlnet': {
'model': 'v34/dependency/xlnet-base-dependency.pb',
'quantized': 'v40/dependency/xlnet-base-dependency.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v9.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v9.model',
},
'alxlnet': {
'model': 'v34/dependency/alxlnet-base-dependency.pb',
'quantized': 'v34/dependency/alxlnet-base-dependency.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v9.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v9.model',
},
}
PATH_EMOTION = {
'multinomial': {
'model': home + '/emotion/multinomial/multinomial.pkl',
'vector': home + '/emotion/multinomial/tfidf.pkl',
'bpe': home + '/emotion/multinomial/bpe.model',
'version': 'v34',
},
'bert': {
'model': home + '/emotion/bert/base/model.pb',
'quantized': home + '/emotion/bert/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.bert.vocab',
'tokenizer': home + '/bert/sp10m.cased.bert.model',
'version': 'v34',
},
'tiny-bert': {
'model': home + '/emotion/bert/tiny/model.pb',
'quantized': home + '/emotion/bert/tiny/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.bert.vocab',
'tokenizer': home + '/bert/sp10m.cased.bert.model',
'version': 'v34',
},
'albert': {
'model': home + '/emotion/albert/base/model.pb',
'quantized': home + '/emotion/albert/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v10.vocab',
'tokenizer': home + '/bert/sp10m.cased.v10.model',
'version': 'v34',
},
'tiny-albert': {
'model': home + '/emotion/albert/tiny/model.pb',
'quantized': home + '/emotion/albert/tiny/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v10.vocab',
'tokenizer': home + '/bert/sp10m.cased.v10.model',
'version': 'v34',
},
'xlnet': {
'model': home + '/emotion/xlnet/base/model.pb',
'quantized': home + '/emotion/xlnet/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v9.vocab',
'tokenizer': home + '/bert/sp10m.cased.v9.model',
'version': 'v34',
},
'alxlnet': {
'model': home + '/emotion/alxlnet/base/model.pb',
'quantized': home + '/emotion/alxlnet/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v9.vocab',
'tokenizer': home + '/bert/sp10m.cased.v9.model',
'version': 'v34',
},
}
S3_PATH_EMOTION = {
'multinomial': {
'model': 'v34/emotion/multinomial.pkl',
'vector': 'v34/emotion/tfidf.pkl',
'bpe': 'v34/emotion/bpe.model',
},
'bert': {
'model': 'v34/emotion/bert-base-emotion.pb',
'quantized': 'v40/emotion/bert-base-emotion.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.bert.vocab',
'tokenizer': 'tokenizer/sp10m.cased.bert.model',
},
'tiny-bert': {
'model': 'v34/emotion/tiny-bert-emotion.pb',
'quantized': 'v40/emotion/tiny-bert-emotion.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.bert.vocab',
'tokenizer': 'tokenizer/sp10m.cased.bert.model',
},
'albert': {
'model': 'v34/emotion/albert-base-emotion.pb',
'quantized': 'v40/emotion/albert-base-emotion.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v10.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v10.model',
},
'tiny-albert': {
'model': 'v34/emotion/albert-tiny-emotion.pb',
'quantized': 'v40/emotion/albert-tiny-emotion.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v10.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v10.model',
},
'xlnet': {
'model': 'v34/emotion/xlnet-base-emotion.pb',
'quantized': 'v40/emotion/xlnet-base-emotion.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v9.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v9.model',
},
'alxlnet': {
'model': 'v34/emotion/alxlnet-base-emotion.pb',
'quantized': 'v40/emotion/alxlnet-base-emotion.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v9.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v9.model',
},
}
PATH_ENTITIES = {
'bert': {
'model': home + '/entity/bert/base/model.pb',
'quantized': home + '/entity/bert/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.bert.vocab',
'tokenizer': home + '/bert/sp10m.cased.bert.model',
'setting': home + '/entity/dictionary-entities.json',
'version': 'v34',
},
'tiny-bert': {
'model': home + '/entity/bert/tiny/model.pb',
'quantized': home + '/entity/bert/tiny/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.bert.vocab',
'tokenizer': home + '/bert/sp10m.cased.bert.model',
'setting': home + '/entity/dictionary-entities.json',
'version': 'v34',
},
'albert': {
'model': home + '/entity/albert/base/model.pb',
'quantized': home + '/entity/albert/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v10.vocab',
'tokenizer': home + '/bert/sp10m.cased.v10.model',
'setting': home + '/entity/dictionary-entities.json',
'version': 'v34',
},
'tiny-albert': {
'model': home + '/entity/albert/tiny/model.pb',
'quantized': home + '/entity/albert/tiny/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v10.vocab',
'tokenizer': home + '/bert/sp10m.cased.v10.model',
'setting': home + '/entity/dictionary-entities.json',
'version': 'v34',
},
'xlnet': {
'model': home + '/entity/xlnet/base/model.pb',
'quantized': home + '/entity/xlnet/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v9.vocab',
'tokenizer': home + '/bert/sp10m.cased.v9.model',
'setting': home + '/entity/dictionary-entities.json',
'version': 'v34',
},
'alxlnet': {
'model': home + '/entity/alxlnet/base/model.pb',
'quantized': home + '/entity/alxlnet/base/quantized/model.pb',
'vocab': home + '/bert/sp10m.cased.v9.vocab',
'tokenizer': home + '/bert/sp10m.cased.v9.model',
'setting': home + '/entity/dictionary-entities.json',
'version': 'v34',
},
}
S3_PATH_ENTITIES = {
'bert': {
'model': 'v34/entity/bert-base-entity.pb',
'quantized': 'v40/entity/bert-base-entity.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.bert.vocab',
'tokenizer': 'tokenizer/sp10m.cased.bert.model',
'setting': 'bert-bahasa/dictionary-entities.json',
},
'tiny-bert': {
'model': 'v34/entity/tiny-bert-entity.pb',
'quantized': 'v40/entity/tiny-bert-entity.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.bert.vocab',
'tokenizer': 'tokenizer/sp10m.cased.bert.model',
'setting': 'bert-bahasa/dictionary-entities.json',
},
'albert': {
'model': 'v34/entity/albert-base-entity.pb',
'quantized': 'v40/entity/albert-base-entity.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v10.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v10.model',
'setting': 'bert-bahasa/dictionary-entities.json',
},
'tiny-albert': {
'model': 'v34/entity/albert-tiny-entity.pb',
'quantized': 'v40/entity/albert-tiny-entity.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v10.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v10.model',
'setting': 'bert-bahasa/dictionary-entities.json',
},
'xlnet': {
'model': 'v34/entity/xlnet-base-entity.pb',
'quantized': 'v40/entity/xlnet-base-entity.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v9.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v9.model',
'setting': 'bert-bahasa/dictionary-entities.json',
},
'alxlnet': {
'model': 'v34/entity/alxlnet-base-entity.pb',
'quantized': 'v40/entity/alxlnet-base-entity.pb.quantized',
'vocab': 'tokenizer/sp10m.cased.v9.vocab',
'tokenizer': 'tokenizer/sp10m.cased.v9.model',
'setting': 'bert-bahasa/dictionary-entities.json',
},
}
PATH_GENERATOR = {
't5-compressed': {
'base': {
'path': home + '/generator/t5-compressed/base',
'directory': home + '/generator/t5-compressed/base/model/',
'model': {
'model': home
+ '/generator/t5-compressed/base/generator-t5-base.tar.gz',
'version': 'v35',
},
},
'small': {
'path': home + '/generator/t5-compressed/small',
'directory': home + '/generator/t5-compressed/small/model/',
'model': {
'model': home
+ '/generator/t5-compressed/small/generator-t5-small.tar.gz',
'version': 'v35',
},
},
},
't5': {
'base': {
'model': home + '/generator/t5/base/model.pb',
'quantized': home + '/generator/t5/base/quantized/model.pb',
'version': 'v38',
},
'small': {
'model': home + '/generator/t5/small/model.pb',
'quantized': home + '/generator/t5/small/quantized/model.pb',
'version': 'v38',
},
},
}
S3_PATH_GENERATOR = {
't5-compressed': {
'base': {'model': 'v35/generator/sample-generator-t5-base.tar.gz'},
'small': {'model': 'v35/generator/sample-generator-t5-small.tar.gz'},
},
't5': {
'base': {
'model': 'v38/generator/base.pb',
'quantized': 'v40/generator/base.pb.quantized',
},
'small': {
'model': 'v38/generator/small.pb',
'quantized': 'v40/generator/small.pb.quantized',
},
},
}
PATH_LANG_DETECTION = {
'fasttext-original': {
'model': home + '/language-detection/fasttext-original/fasstext.bin',
'version': 'v34',
},
'fasttext-quantized': {
'model': home + '/language-detection/fasttext-quantized/fasstext.tfz',
'version': 'v34',
},
'deep': {
'model': home
+ '/language-detection/deep/model.ckpt.data-00000-of-00001',
'index': home + '/language-detection/deep/model.ckpt.index',
'meta': home + '/language-detection/deep/model.ckpt.meta',
'vector': home
+ '/language-detection/deep/vectorizer-language-detection.pkl',
'bpe': home + '/language-detection/deep/bpe.model',
'version': 'v34',
},
}
S3_PATH_LANG_DETECTION = {
'fasttext-original': {
'model': 'v34/language-detection/fasttext-malaya.bin'
},
'fasttext-quantized': {
'model': 'v34/language-detection/fasttext-malaya.ftz'
},
'deep': {
'model': 'v34/language-detection/model.ckpt.data-00000-of-00001',
'index': 'v34/language-detection/model.ckpt.index',
'meta': 'v34/language-detection/model.ckpt.meta',
'vector': 'v34/language-detection/bow-language-detection.pkl',
'bpe': 'v34/language-detection/language-detection.model',
},
}
PATH_NGRAM = {
1: {
'model': home + '/preprocessing/ngram1/bm_1grams.json',
'version': 'v28',
},
2: {
'model': home + '/preprocessing/ngram2/bm_2grams.json',
'version': 'v23',
},
'symspell': {
'model': home + '/preprocessing/symspell/bm_1grams.txt',
'version': 'v28',
},
'sentencepiece': {
'vocab': home + '/preprocessing/sentencepiece/sp10m.cased.v4.vocab',
'model': home + '/preprocessing/sentencepiece/sp10m.cased.v4.model',
'version': 'v31',
},
}
S3_PATH_NGRAM = {
1: {'model': 'v27/preprocessing/bm_1grams.json'},
2: {'model': 'v23/preprocessing/bm_2grams.json'},
'symspell': {'model': 'v27/preprocessing/bm_1grams.txt'},
'sentencepiece': {
'vocab': 'bert-bahasa/sp10m.cased.v4.vocab',
'model': 'bert-bahasa/sp10m.cased.v4.model',
},
}
PATH_NSFW = {
'lexicon': {'model': home + '/nsfw/lexicon.json', 'version': 'v39'},
'multinomial': {
'model': home + '/nsfw/multinomial/multinomial.pkl',
'vector': home + '/nsfw/multinomial/tfidf.pkl',
'bpe': home + '/nsfw/multinomial/bpe.model',
'version': 'v39',
},
}
S3_PATH_NSFW = {
'lexicon': {'model': 'v39/nsfw/nsfw-corpus.json'},
'multinomial': {
'model': 'v39/nsfw/multinomial-nsfw.pkl',
'vector': 'v39/nsfw/tfidf-nsfw.pkl',
'bpe': 'v39/nsfw/nsfw.model',
},
}
PATH_PARAPHRASE = {
't5-compressed': {
'base': {
'path': home + '/paraphrase/t5-compressed/base',
'directory': home + '/paraphrase/t5-compressed/base/model/',
'model': {
'model': home
+ '/paraphrase/t5-compressed/base/paraphrase-t5-base.tar.gz',
'version': 'v36',
},
},
'small': {
'path': home + '/paraphrase/t5-compressed/small',
'directory': home + '/paraphrase/t5-compressed/small/model/',
'model': {
'model': home
+ '/paraphrase/t5-compressed/small/paraphrase-t5-small.tar.gz',
'version': 'v36',
},
},
},
't5': {
'base': {
'model': home + '/paraphrase/t5/base/model.pb',
'quantized': home + '/paraphrase/t5/base/quantized/model.pb',
'version': 'v38',
},
'small': {
'model': home + '/paraphrase/t5/small/model.pb',
'quantized': home + '/paraphrase/t5/small/quantized/model.pb',
'version': 'v38',
},
},
'transformer': {
'base': {
'model': home + '/paraphrase/transformer/base/model.pb',
'quantized': home
+ '/paraphrase/transformer/base/quantized/model.pb',
'vocab': home + '/paraphrase/sp10m.cased.t5.model',
'version': 'v39',
},
'small': {
'model': home + '/paraphrase/transformer/small/model.pb',
'quantized': home
+ '/paraphrase/transformer/small/quantized/model.pb',
'vocab': home + '/paraphrase/sp10m.cased.t5.model',
'version': 'v39',
},
},
}
S3_PATH_PARAPHRASE = {
't5-compressed': {
'base': {'model': 'v36/paraphrase/paraphrase-t5-base.tar.gz'},
'small': | |
'pp': ([],['run_card lpp1 1', 'run_card lpp2 1','run_card nb_proton1 1', 'run_card nb_neutron1 0', 'run_card mass_ion1 -1', 'run_card nb_proton2 1', 'run_card nb_neutron2 0', 'run_card mass_ion2 -1']),
})
self.special_shortcut_help.update({
'ebeam' : 'syntax: set ebeam VALUE:\n This parameter sets the energy to both beam to the value in GeV',
'lpp' : 'syntax: set ebeam VALUE:\n'+\
' Set the type of beam to a given value for both beam\n'+\
' 0 : means no PDF\n'+\
' 1 : means proton PDF\n'+\
' -1 : means antiproton PDF\n'+\
' 2 : means PDF for elastic photon emited from a proton\n'+\
' 3 : means PDF for elastic photon emited from an electron',
'lhc' : 'syntax: set lhc VALUE:\n Set for a proton-proton collision with that given center of mass energy (in TeV)',
'lep' : 'syntax: set lep VALUE:\n Set for a electron-positron collision with that given center of mass energy (in GeV)',
'fixed_scale' : 'syntax: set fixed_scale VALUE:\n Set all scales to the give value (in GeV)',
'no_parton_cut': 'remove all cut (but BW_cutoff)',
'cm_velocity': 'set sqrts to have the above velocity for the incoming particles',
'pbpb': 'setup heavy ion configuration for lead-lead collision',
'pbp': 'setup heavy ion configuration for lead-proton collision',
'pp': 'remove setup of heavy ion configuration to set proton-proton collision',
})
self.update_block += [b.name for b in self.run_card.blocks]
return self.run_set
def init_madweight(self, cards):
self.has_mw = False
if not self.get_path('madweight', cards):
return []
#add special function associated to MW
self.do_change_tf = self.mother_interface.do_define_transfer_fct
self.complete_change_tf = self.mother_interface.complete_define_transfer_fct
self.help_change_tf = self.mother_interface.help_define_transfer_fct
if not os.path.exists(self.paths['transfer']):
logger.warning('No transfer function currently define. Please use the change_tf command to define one.')
self.has_mw = True
try:
import madgraph.madweight.Cards as mwcards
except:
import internal.madweight.Cards as mwcards
self.mw_card = mwcards.Card(self.paths['MadWeight'])
self.mw_card = self.mw_card.info
self.mw_vars = []
for key in self.mw_card:
if key == 'comment':
continue
for key2 in self.mw_card.info[key]:
if isinstance(key2, str) and not key2.isdigit():
self.mw_vars.append(key2)
return self.mw_vars
def init_madloop(self, cards):
if isinstance(cards, dict):
for key in ['ML', 'madloop','MadLoop']:
if key in cards:
self.paths['ML'] = cards[key]
self.has_ml = False
if os.path.isfile(self.paths['ML']):
self.has_ml = True
self.MLcard = banner_mod.MadLoopParam(self.paths['ML'])
self.MLcardDefault = banner_mod.MadLoopParam()
self.ml_vars = [k.lower() for k in self.MLcard.keys()]
return self.ml_vars
return []
def init_shower(self, cards):
self.has_shower = False
if not self.get_path('shower', cards):
return []
self.has_shower = True
self.shower_card = shower_card_mod.ShowerCard(self.paths['shower'])
self.shower_vars = list(self.shower_card.keys())
return self.shower_vars
def init_pythia8(self, cards):
self.has_PY8 = False
if not self.get_path('pythia8', cards):
return []
self.has_PY8 = True
self.PY8Card = self.PY8Card_class(self.paths['pythia8'])
self.PY8CardDefault = self.PY8Card_class()
self.py8_vars = [k.lower() for k in self.PY8Card.keys()]
self.special_shortcut.update({
'simplepy8':([],['pythia8_card hadronlevel:all False',
'pythia8_card partonlevel:mpi False',
'pythia8_card BeamRemnants:primordialKT False',
'pythia8_card PartonLevel:Remnants False',
'pythia8_card Check:event False',
'pythia8_card TimeShower:QEDshowerByQ False',
'pythia8_card TimeShower:QEDshowerByL False',
'pythia8_card SpaceShower:QEDshowerByQ False',
'pythia8_card SpaceShower:QEDshowerByL False',
'pythia8_card PartonLevel:FSRinResonances False',
'pythia8_card ProcessLevel:resonanceDecays False',
]),
'mpi':([bool],['pythia8_card partonlevel:mpi %(0)s']),
})
self.special_shortcut_help.update({
'simplepy8' : 'Turn off non-perturbative slow features of Pythia8.',
'mpi' : 'syntax: set mpi value: allow to turn mpi in Pythia8 on/off',
})
return []
def init_madspin(self, cards):
if not self.get_path('madspin', cards):
return []
self.special_shortcut.update({
'spinmode':([str], ['add madspin_card --before_line="launch" set spinmode %(0)s']),
'nodecay':([], ['edit madspin_card --comment_line="decay"'])
})
self.special_shortcut_help.update({
'spinmode' : 'full|none|onshell. Choose the mode of madspin.\n - full: spin-correlation and off-shell effect\n - onshell: only spin-correlation,]\n - none: no spin-correlation and not offshell effects.',
'nodecay': 'remove all decay previously defined in madspin',
})
return []
def init_delphes(self, cards):
self.has_delphes = False
if not self.get_path('pythia8', cards):
return []
self.has_delphes = True
return []
def set_CM_velocity(self, line):
"""compute sqrts from the velocity in the center of mass frame"""
v = banner_mod.ConfigFile.format_variable(line, float, 'velocity')
# Define self.proc_characteristics
self.mother_interface.get_characteristics()
proc_info = self.mother_interface.proc_characteristics
if 'pdg_initial1' not in proc_info:
logger.warning('command not supported')
if len(proc_info['pdg_initial1']) == 1 == len(proc_info['pdg_initial2']) and\
abs(proc_info['pdg_initial1'][0]) == abs(proc_info['pdg_initial2'][0]):
m = self.param_card.get_value('mass', abs(proc_info['pdg_initial1'][0]))
sqrts = 2*m/ math.sqrt(1-v**2)
self.do_set('run_card ebeam1 %s' % (sqrts/2.0))
self.do_set('run_card ebeam2 %s' % (sqrts/2.0))
self.do_set('run_card lpp 0')
else:
logger.warning('This is only possible for a single particle in the initial state')
def do_help(self, line, conflict_raise=False, banner=True):
# TODO nicer factorization !
# try:
if banner:
logger.info('*** HELP MESSAGE ***', '$MG:BOLD')
args = self.split_arg(line)
# handle comand related help
if len(args)==0 or (len(args) == 1 and hasattr(self, 'do_%s' % args[0])):
out = cmd.BasicCmd.do_help(self, line)
if len(args)==0:
print('Allowed Argument')
print('================')
print('\t'.join(self.allow_arg))
print()
print('Special shortcut: (type help <name>)')
print('====================================')
print(' syntax: set <name> <value>')
print('\t'.join(self.special_shortcut))
print()
if banner:
logger.info('*** END HELP ***', '$MG:BOLD')
return out
# check for special shortcut.
# special shortcut:
if args[0] in self.special_shortcut:
if args[0] in self.special_shortcut_help:
print(self.special_shortcut_help[args[0]])
if banner:
logger.info('*** END HELP ***', '$MG:BOLD')
return
start = 0
card = ''
if args[0]+'_card' in self.all_card_name+ self.cards:
args[0] += '_card'
elif args[0]+'.dat' in self.all_card_name+ self.cards:
args[0] += '.dat'
elif args[0]+'_card.dat' in self.all_card_name+ self.cards:
args[0] += '_card.dat'
if args[0] in self.all_card_name + self.cards:
start += 1
card = args[0]
if len(args) == 1:
if args[0] == 'pythia8_card':
args[0] = 'PY8Card'
if args[0] == 'param_card':
logger.info("Param_card information: ", '$MG:color:BLUE')
print("File to define the various model parameter")
logger.info("List of the Block defined:",'$MG:color:BLUE')
print("\t".join(list(self.param_card.keys())))
elif args[0].startswith('madanalysis5'):
print('This card allow to make plot with the madanalysis5 package')
print('An example card is provided. For more information about the ')
print('syntax please refer to: https://madanalysis.irmp.ucl.ac.be/')
print('or to the user manual [arXiv:1206.1599]')
if args[0].startswith('madanalysis5_hadron'):
print()
print('This card also allow to make recasting analysis')
print('For more detail, see: arXiv:1407.3278')
elif hasattr(self, args[0]):
logger.info("%s information: " % args[0], '$MG:color:BLUE')
print((eval('self.%s' % args[0]).__doc__))
logger.info("List of parameter associated", '$MG:color:BLUE')
print("\t".join(list(eval('self.%s' % args[0]).keys())))
if banner:
logger.info('*** END HELP ***', '$MG:BOLD')
return card
#### RUN CARD
if args[start] in [l.lower() for l in self.run_card.keys()] and card in ['', 'run_card']:
if args[start] not in self.run_set:
args[start] = [l for l in self.run_set if l.lower() == args[start]][0]
if args[start] in self.conflict and not conflict_raise:
conflict_raise = True
logger.info('** AMBIGUOUS NAME: %s **', args[start], '$MG:BOLD')
if card == '':
logger.info('** If not explicitely speficy this parameter will modif the run_card file', '$MG:BOLD')
self.run_card.do_help(args[start])
### PARAM_CARD WITH BLOCK NAME -----------------------------------------
elif (args[start] in self.param_card or args[start] == 'width') \
and card in ['','param_card']:
if args[start] in self.conflict and not conflict_raise:
conflict_raise = True
logger.info('** AMBIGUOUS NAME: %s **', args[start], '$MG:BOLD')
if card == '':
logger.info('** If not explicitely speficy this parameter will modif the param_card file', '$MG:BOLD')
if args[start] == 'width':
args[start] = 'decay'
if len(args) == start+1:
self.param_card.do_help(args[start], tuple())
key = None
elif args[start+1] in self.pname2block:
all_var = self.pname2block[args[start+1]]
key = None
for bname, lhaid in all_var:
if bname == args[start]:
key = lhaid
break
else:
logger.warning('%s is not part of block "%s" but "%s". please correct.' %
(args[start+1], args[start], bname))
else:
try:
key = tuple([int(i) for i in args[start+1:]])
except ValueError:
logger.warning('Failed to identify LHA information')
return card
if key in self.param_card[args[start]].param_dict:
self.param_card.do_help(args[start], key, default=self.param_card_default)
elif key:
logger.warning('invalid information: %s not defined in the param_card' % (key,))
# PARAM_CARD NO BLOCK NAME ---------------------------------------------
elif args[start] in self.pname2block and card in ['','param_card']:
if args[start] in self.conflict and not conflict_raise:
conflict_raise = True
logger.info('** AMBIGUOUS NAME: %s **', args[start], '$MG:BOLD')
if card == '':
logger.info('** If not explicitely speficy this parameter will modif the param_card file', '$MG:BOLD')
all_var = self.pname2block[args[start]]
for bname, lhaid in all_var:
new_line = 'param_card %s %s %s' % (bname,
' '.join([ str(i) for i in lhaid]), ' '.join(args[start+1:]))
self.do_help(new_line, conflict_raise=True, banner=False)
# MadLoop Parameter ---------------------------------------------------
elif self.has_ml and args[start] in self.ml_vars \
and card in ['', 'MadLoop_card']:
if args[start] in self.conflict and not conflict_raise:
conflict_raise = True
logger.info('** AMBIGUOUS NAME: %s **', args[start], '$MG:BOLD')
if card == '':
logger.info('** If not explicitely speficy this parameter will modif the madloop_card file', '$MG:BOLD')
self.MLcard.do_help(args[start])
# Pythia8 Parameter ---------------------------------------------------
elif self.has_PY8 and args[start] in self.PY8Card:
if args[start] in self.conflict and not | |
<reponame>rsdoherty/azure-sdk-for-python<filename>sdk/machinelearning/azure-mgmt-machinelearningservices/azure/mgmt/machinelearningservices/models/_models_py3.py
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Dict, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._azure_machine_learning_workspaces_enums import *
class Compute(msrest.serialization.Model):
"""Machine Learning compute object.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: AKS, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HDInsight, VirtualMachine.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param compute_type: Required. The type of compute.Constant filled by server. Possible values
include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
"Databricks", "DataLakeAnalytics".
:type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType
:param compute_location: Location for the underlying compute.
:type compute_location: str
:ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
"Creating", "Deleting", "Succeeded", "Failed", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.machinelearningservices.models.ProvisioningState
:param description: The description of the Machine Learning compute.
:type description: str
:ivar created_on: The date and time when the compute was created.
:vartype created_on: ~datetime.datetime
:ivar modified_on: The date and time when the compute was last modified.
:vartype modified_on: ~datetime.datetime
:param resource_id: ARM resource id of the underlying compute.
:type resource_id: str
:ivar provisioning_errors: Errors during provisioning.
:vartype provisioning_errors:
list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError]
:ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
from outside if true, or machine learning service provisioned it if false.
:vartype is_attached_compute: bool
"""
_validation = {
'compute_type': {'required': True},
'provisioning_state': {'readonly': True},
'created_on': {'readonly': True},
'modified_on': {'readonly': True},
'provisioning_errors': {'readonly': True},
'is_attached_compute': {'readonly': True},
}
_attribute_map = {
'compute_type': {'key': 'computeType', 'type': 'str'},
'compute_location': {'key': 'computeLocation', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
}
_subtype_map = {
'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HDInsight', 'VirtualMachine': 'VirtualMachine'}
}
def __init__(
self,
*,
compute_location: Optional[str] = None,
description: Optional[str] = None,
resource_id: Optional[str] = None,
**kwargs
):
super(Compute, self).__init__(**kwargs)
self.compute_type = None # type: Optional[str]
self.compute_location = compute_location
self.provisioning_state = None
self.description = description
self.created_on = None
self.modified_on = None
self.resource_id = resource_id
self.provisioning_errors = None
self.is_attached_compute = None
class AKS(Compute):
"""A Machine Learning compute based on AKS.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param compute_type: Required. The type of compute.Constant filled by server. Possible values
include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
"Databricks", "DataLakeAnalytics".
:type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType
:param compute_location: Location for the underlying compute.
:type compute_location: str
:ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
"Creating", "Deleting", "Succeeded", "Failed", "Canceled".
:vartype provisioning_state: str or
~azure.mgmt.machinelearningservices.models.ProvisioningState
:param description: The description of the Machine Learning compute.
:type description: str
:ivar created_on: The date and time when the compute was created.
:vartype created_on: ~datetime.datetime
:ivar modified_on: The date and time when the compute was last modified.
:vartype modified_on: ~datetime.datetime
:param resource_id: ARM resource id of the underlying compute.
:type resource_id: str
:ivar provisioning_errors: Errors during provisioning.
:vartype provisioning_errors:
list[~azure.mgmt.machinelearningservices.models.MachineLearningServiceError]
:ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
from outside if true, or machine learning service provisioned it if false.
:vartype is_attached_compute: bool
:param properties: AKS properties.
:type properties: ~azure.mgmt.machinelearningservices.models.AKSProperties
"""
_validation = {
'compute_type': {'required': True},
'provisioning_state': {'readonly': True},
'created_on': {'readonly': True},
'modified_on': {'readonly': True},
'provisioning_errors': {'readonly': True},
'is_attached_compute': {'readonly': True},
}
_attribute_map = {
'compute_type': {'key': 'computeType', 'type': 'str'},
'compute_location': {'key': 'computeLocation', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
'properties': {'key': 'properties', 'type': 'AKSProperties'},
}
def __init__(
self,
*,
compute_location: Optional[str] = None,
description: Optional[str] = None,
resource_id: Optional[str] = None,
properties: Optional["AKSProperties"] = None,
**kwargs
):
super(AKS, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs)
self.compute_type = 'AKS' # type: str
self.properties = properties
class ComputeSecrets(msrest.serialization.Model):
"""Secrets related to a Machine Learning compute. Might differ for every type of compute.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
All required parameters must be populated in order to send to Azure.
:param compute_type: Required. The type of compute.Constant filled by server. Possible values
include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
"Databricks", "DataLakeAnalytics".
:type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType
"""
_validation = {
'compute_type': {'required': True},
}
_attribute_map = {
'compute_type': {'key': 'computeType', 'type': 'str'},
}
_subtype_map = {
'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
}
def __init__(
self,
**kwargs
):
super(ComputeSecrets, self).__init__(**kwargs)
self.compute_type = None # type: Optional[str]
class AksComputeSecrets(ComputeSecrets):
"""Secrets related to a Machine Learning compute based on AKS.
All required parameters must be populated in order to send to Azure.
:param compute_type: Required. The type of compute.Constant filled by server. Possible values
include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
"Databricks", "DataLakeAnalytics".
:type compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType
:param user_kube_config: Content of kubeconfig file that can be used to connect to the
Kubernetes cluster.
:type user_kube_config: str
:param admin_kube_config: Content of kubeconfig file that can be used to connect to the
Kubernetes cluster.
:type admin_kube_config: str
:param image_pull_secret_name: Image registry pull secret.
:type image_pull_secret_name: str
"""
_validation = {
'compute_type': {'required': True},
}
_attribute_map = {
'compute_type': {'key': 'computeType', 'type': 'str'},
'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
}
def __init__(
self,
*,
user_kube_config: Optional[str] = None,
admin_kube_config: Optional[str] = None,
image_pull_secret_name: Optional[str] = None,
**kwargs
):
super(AksComputeSecrets, self).__init__(**kwargs)
self.compute_type = 'AKS' # type: str
self.user_kube_config = user_kube_config
self.admin_kube_config = admin_kube_config
self.image_pull_secret_name = image_pull_secret_name
class AksNetworkingConfiguration(msrest.serialization.Model):
"""Advance configuration for AKS networking.
:param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
:type subnet_id: str
:param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
not overlap with any Subnet IP ranges.
:type service_cidr: str
:param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
the Kubernetes service address range specified in serviceCidr.
:type dns_service_ip: str
:param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
must not overlap with any Subnet IP ranges or the Kubernetes service address range.
:type docker_bridge_cidr: str
"""
_validation = {
'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
}
_attribute_map = {
'subnet_id': {'key': 'subnetId', 'type': 'str'},
'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
}
def __init__(
self,
*,
subnet_id: Optional[str] = None,
service_cidr: Optional[str] = None,
dns_service_ip: Optional[str] = None,
docker_bridge_cidr: Optional[str] = None,
**kwargs
):
super(AksNetworkingConfiguration, self).__init__(**kwargs)
self.subnet_id = subnet_id
self.service_cidr = service_cidr
self.dns_service_ip = dns_service_ip
self.docker_bridge_cidr = docker_bridge_cidr
class AKSProperties(msrest.serialization.Model):
"""AKS properties.
Variables are only populated by the server, and will be ignored when sending a request.
:param cluster_fqdn: Cluster full qualified domain name.
:type cluster_fqdn: str
:ivar system_services: System services.
:vartype system_services: list[~azure.mgmt.machinelearningservices.models.SystemService]
:param agent_count: Number of agents.
:type agent_count: int
:param agent_vm_size: Agent virtual machine size.
:type agent_vm_size: str
:param ssl_configuration: SSL configuration.
:type ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration
:param aks_networking_configuration: AKS networking configuration for vnet.
:type aks_networking_configuration:
~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration
"""
_validation = {
'system_services': {'readonly': True},
'agent_count': {'minimum': 1},
}
_attribute_map = {
'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
'agent_count': {'key': 'agentCount', 'type': 'int'},
'agent_vm_size': {'key': 'agentVMSize', | |
0)
m.c1228 = Constraint(expr= - m.x718 - 0.11270166537926*m.x723 - 0.00635083268962935*m.x728 + m.x2228 == 0)
m.c1229 = Constraint(expr= - m.x719 - 0.11270166537926*m.x724 - 0.00635083268962935*m.x729 + m.x2229 == 0)
m.c1230 = Constraint(expr= - m.x720 - 0.11270166537926*m.x725 - 0.00635083268962935*m.x730 + m.x2230 == 0)
m.c1231 = Constraint(expr= - m.x731 - 0.5*m.x736 - 0.125*m.x741 + m.x2231 == 0)
m.c1232 = Constraint(expr= - m.x732 - 0.5*m.x737 - 0.125*m.x742 + m.x2232 == 0)
m.c1233 = Constraint(expr= - m.x733 - 0.5*m.x738 - 0.125*m.x743 + m.x2233 == 0)
m.c1234 = Constraint(expr= - m.x734 - 0.5*m.x739 - 0.125*m.x744 + m.x2234 == 0)
m.c1235 = Constraint(expr= - m.x735 - 0.5*m.x740 - 0.125*m.x745 + m.x2235 == 0)
m.c1236 = Constraint(expr= - m.x731 - 0.88729833462074*m.x736 - 0.393649167310369*m.x741 + m.x2236 == 0)
m.c1237 = Constraint(expr= - m.x732 - 0.88729833462074*m.x737 - 0.393649167310369*m.x742 + m.x2237 == 0)
m.c1238 = Constraint(expr= - m.x733 - 0.88729833462074*m.x738 - 0.393649167310369*m.x743 + m.x2238 == 0)
m.c1239 = Constraint(expr= - m.x734 - 0.88729833462074*m.x739 - 0.393649167310369*m.x744 + m.x2239 == 0)
m.c1240 = Constraint(expr= - m.x735 - 0.88729833462074*m.x740 - 0.393649167310369*m.x745 + m.x2240 == 0)
m.c1241 = Constraint(expr= - m.x731 - 0.11270166537926*m.x736 - 0.00635083268962935*m.x741 + m.x2241 == 0)
m.c1242 = Constraint(expr= - m.x732 - 0.11270166537926*m.x737 - 0.00635083268962935*m.x742 + m.x2242 == 0)
m.c1243 = Constraint(expr= - m.x733 - 0.11270166537926*m.x738 - 0.00635083268962935*m.x743 + m.x2243 == 0)
m.c1244 = Constraint(expr= - m.x734 - 0.11270166537926*m.x739 - 0.00635083268962935*m.x744 + m.x2244 == 0)
m.c1245 = Constraint(expr= - m.x735 - 0.11270166537926*m.x740 - 0.00635083268962935*m.x745 + m.x2245 == 0)
m.c1246 = Constraint(expr= - m.x746 - 0.5*m.x751 - 0.125*m.x756 + m.x2246 == 0)
m.c1247 = Constraint(expr= - m.x747 - 0.5*m.x752 - 0.125*m.x757 + m.x2247 == 0)
m.c1248 = Constraint(expr= - m.x748 - 0.5*m.x753 - 0.125*m.x758 + m.x2248 == 0)
m.c1249 = Constraint(expr= - m.x749 - 0.5*m.x754 - 0.125*m.x759 + m.x2249 == 0)
m.c1250 = Constraint(expr= - m.x750 - 0.5*m.x755 - 0.125*m.x760 + m.x2250 == 0)
m.c1251 = Constraint(expr= - m.x746 - 0.88729833462074*m.x751 - 0.393649167310369*m.x756 + m.x2251 == 0)
m.c1252 = Constraint(expr= - m.x747 - 0.88729833462074*m.x752 - 0.393649167310369*m.x757 + m.x2252 == 0)
m.c1253 = Constraint(expr= - m.x748 - 0.88729833462074*m.x753 - 0.393649167310369*m.x758 + m.x2253 == 0)
m.c1254 = Constraint(expr= - m.x749 - 0.88729833462074*m.x754 - 0.393649167310369*m.x759 + m.x2254 == 0)
m.c1255 = Constraint(expr= - m.x750 - 0.88729833462074*m.x755 - 0.393649167310369*m.x760 + m.x2255 == 0)
m.c1256 = Constraint(expr= - m.x746 - 0.11270166537926*m.x751 - 0.00635083268962935*m.x756 + m.x2256 == 0)
m.c1257 = Constraint(expr= - m.x747 - 0.11270166537926*m.x752 - 0.00635083268962935*m.x757 + m.x2257 == 0)
m.c1258 = Constraint(expr= - m.x748 - 0.11270166537926*m.x753 - 0.00635083268962935*m.x758 + m.x2258 == 0)
m.c1259 = Constraint(expr= - m.x749 - 0.11270166537926*m.x754 - 0.00635083268962935*m.x759 + m.x2259 == 0)
m.c1260 = Constraint(expr= - m.x750 - 0.11270166537926*m.x755 - 0.00635083268962935*m.x760 + m.x2260 == 0)
m.c1261 = Constraint(expr= - m.x761 - 0.5*m.x766 - 0.125*m.x771 + m.x2261 == 0)
m.c1262 = Constraint(expr= - m.x762 - 0.5*m.x767 - 0.125*m.x772 + m.x2262 == 0)
m.c1263 = Constraint(expr= - m.x763 - 0.5*m.x768 - 0.125*m.x773 + m.x2263 == 0)
m.c1264 = Constraint(expr= - m.x764 - 0.5*m.x769 - 0.125*m.x774 + m.x2264 == 0)
m.c1265 = Constraint(expr= - m.x765 - 0.5*m.x770 - 0.125*m.x775 + m.x2265 == 0)
m.c1266 = Constraint(expr= - m.x761 - 0.88729833462074*m.x766 - 0.393649167310369*m.x771 + m.x2266 == 0)
m.c1267 = Constraint(expr= - m.x762 - 0.88729833462074*m.x767 - 0.393649167310369*m.x772 + m.x2267 == 0)
m.c1268 = Constraint(expr= - m.x763 - 0.88729833462074*m.x768 - 0.393649167310369*m.x773 + m.x2268 == 0)
m.c1269 = Constraint(expr= - m.x764 - 0.88729833462074*m.x769 - 0.393649167310369*m.x774 + m.x2269 == 0)
m.c1270 = Constraint(expr= - m.x765 - 0.88729833462074*m.x770 - 0.393649167310369*m.x775 + m.x2270 == 0)
m.c1271 = Constraint(expr= - m.x761 - 0.11270166537926*m.x766 - 0.00635083268962935*m.x771 + m.x2271 == 0)
m.c1272 = Constraint(expr= - m.x762 - 0.11270166537926*m.x767 - 0.00635083268962935*m.x772 + m.x2272 == 0)
m.c1273 = Constraint(expr= - m.x763 - 0.11270166537926*m.x768 - 0.00635083268962935*m.x773 + m.x2273 == 0)
m.c1274 = Constraint(expr= - m.x764 - 0.11270166537926*m.x769 - 0.00635083268962935*m.x774 + m.x2274 == 0)
m.c1275 = Constraint(expr= - m.x765 - 0.11270166537926*m.x770 - 0.00635083268962935*m.x775 + m.x2275 == 0)
m.c1276 = Constraint(expr= - m.x776 - 0.5*m.x781 - 0.125*m.x786 + m.x2276 == 0)
m.c1277 = Constraint(expr= - m.x777 - 0.5*m.x782 - 0.125*m.x787 + m.x2277 == 0)
m.c1278 = Constraint(expr= - m.x778 - 0.5*m.x783 - 0.125*m.x788 + m.x2278 == 0)
m.c1279 = Constraint(expr= - m.x779 - 0.5*m.x784 - 0.125*m.x789 + m.x2279 == 0)
m.c1280 = Constraint(expr= - m.x780 - 0.5*m.x785 - 0.125*m.x790 + m.x2280 == 0)
m.c1281 = Constraint(expr= - m.x776 - 0.88729833462074*m.x781 - 0.393649167310369*m.x786 + m.x2281 == 0)
m.c1282 = Constraint(expr= - m.x777 - 0.88729833462074*m.x782 - 0.393649167310369*m.x787 + m.x2282 == 0)
m.c1283 = Constraint(expr= - m.x778 - 0.88729833462074*m.x783 - 0.393649167310369*m.x788 + m.x2283 == 0)
m.c1284 = Constraint(expr= - m.x779 - 0.88729833462074*m.x784 - 0.393649167310369*m.x789 + m.x2284 == 0)
m.c1285 = Constraint(expr= - m.x780 - 0.88729833462074*m.x785 - 0.393649167310369*m.x790 + m.x2285 == 0)
m.c1286 = Constraint(expr= - m.x776 - 0.11270166537926*m.x781 - 0.00635083268962935*m.x786 + m.x2286 == 0)
m.c1287 = Constraint(expr= - m.x777 - 0.11270166537926*m.x782 - 0.00635083268962935*m.x787 + m.x2287 == 0)
m.c1288 = Constraint(expr= - m.x778 - 0.11270166537926*m.x783 - 0.00635083268962935*m.x788 + m.x2288 == 0)
m.c1289 = Constraint(expr= - m.x779 - 0.11270166537926*m.x784 - 0.00635083268962935*m.x789 + m.x2289 == 0)
m.c1290 = Constraint(expr= - m.x780 - 0.11270166537926*m.x785 - 0.00635083268962935*m.x790 + m.x2290 == 0)
m.c1291 = Constraint(expr= - m.x791 - 0.5*m.x796 - 0.125*m.x801 + m.x2291 == 0)
m.c1292 = Constraint(expr= - m.x792 - 0.5*m.x797 - 0.125*m.x802 + m.x2292 == 0)
m.c1293 = Constraint(expr= - m.x793 - 0.5*m.x798 - 0.125*m.x803 + m.x2293 == 0)
m.c1294 = Constraint(expr= - m.x794 - 0.5*m.x799 - 0.125*m.x804 + m.x2294 == 0)
m.c1295 = Constraint(expr= - m.x795 - 0.5*m.x800 - 0.125*m.x805 + m.x2295 == 0)
m.c1296 = Constraint(expr= - m.x791 - 0.88729833462074*m.x796 - 0.393649167310369*m.x801 + m.x2296 == 0)
m.c1297 = Constraint(expr= - m.x792 - 0.88729833462074*m.x797 - 0.393649167310369*m.x802 + m.x2297 == 0)
m.c1298 = Constraint(expr= - m.x793 - 0.88729833462074*m.x798 - 0.393649167310369*m.x803 + m.x2298 == 0)
m.c1299 = Constraint(expr= - m.x794 - 0.88729833462074*m.x799 - 0.393649167310369*m.x804 + m.x2299 == 0)
m.c1300 = Constraint(expr= - m.x795 - 0.88729833462074*m.x800 - 0.393649167310369*m.x805 + m.x2300 == 0)
m.c1301 = Constraint(expr= - m.x791 - 0.11270166537926*m.x796 - 0.00635083268962935*m.x801 + m.x2301 == 0)
m.c1302 = Constraint(expr= - m.x792 - 0.11270166537926*m.x797 - 0.00635083268962935*m.x802 + m.x2302 == 0)
m.c1303 = Constraint(expr= - m.x793 - 0.11270166537926*m.x798 - 0.00635083268962935*m.x803 + m.x2303 == 0)
m.c1304 = Constraint(expr= - m.x794 - 0.11270166537926*m.x799 - 0.00635083268962935*m.x804 + m.x2304 == 0)
m.c1305 = Constraint(expr= - m.x795 - 0.11270166537926*m.x800 - 0.00635083268962935*m.x805 + m.x2305 == 0)
m.c1306 = Constraint(expr= - m.x806 - 0.5*m.x811 - 0.125*m.x816 + m.x2306 == 0)
m.c1307 = Constraint(expr= - m.x807 - 0.5*m.x812 - 0.125*m.x817 + m.x2307 == 0)
m.c1308 = Constraint(expr= - m.x808 - 0.5*m.x813 - 0.125*m.x818 + m.x2308 == 0)
m.c1309 = Constraint(expr= - m.x809 - 0.5*m.x814 - 0.125*m.x819 + m.x2309 == 0)
m.c1310 = Constraint(expr= - m.x810 - 0.5*m.x815 - 0.125*m.x820 + m.x2310 == 0)
m.c1311 = Constraint(expr= - m.x806 - 0.88729833462074*m.x811 - 0.393649167310369*m.x816 + m.x2311 == 0)
m.c1312 = Constraint(expr= - m.x807 - 0.88729833462074*m.x812 - 0.393649167310369*m.x817 + m.x2312 == 0)
m.c1313 = Constraint(expr= - m.x808 - 0.88729833462074*m.x813 - 0.393649167310369*m.x818 + m.x2313 == 0)
m.c1314 = Constraint(expr= - m.x809 - 0.88729833462074*m.x814 - 0.393649167310369*m.x819 + m.x2314 == 0)
m.c1315 = Constraint(expr= - m.x810 - 0.88729833462074*m.x815 - 0.393649167310369*m.x820 + m.x2315 == 0)
m.c1316 = Constraint(expr= - m.x806 - 0.11270166537926*m.x811 - 0.00635083268962935*m.x816 + m.x2316 == 0)
m.c1317 = Constraint(expr= - m.x807 - 0.11270166537926*m.x812 - 0.00635083268962935*m.x817 + m.x2317 == 0)
m.c1318 = Constraint(expr= - m.x808 - 0.11270166537926*m.x813 - 0.00635083268962935*m.x818 + m.x2318 == 0)
m.c1319 = Constraint(expr= - m.x809 - 0.11270166537926*m.x814 - 0.00635083268962935*m.x819 + m.x2319 == 0)
m.c1320 = Constraint(expr= - m.x810 - 0.11270166537926*m.x815 - 0.00635083268962935*m.x820 + m.x2320 == 0)
m.c1321 = Constraint(expr= - m.x821 - 0.5*m.x826 - 0.125*m.x831 + m.x2321 == 0)
m.c1322 = Constraint(expr= - m.x822 - 0.5*m.x827 - 0.125*m.x832 + m.x2322 == 0)
m.c1323 = Constraint(expr= - m.x823 - 0.5*m.x828 - 0.125*m.x833 + m.x2323 == 0)
m.c1324 = Constraint(expr= - m.x824 - 0.5*m.x829 - 0.125*m.x834 + m.x2324 == 0)
m.c1325 = Constraint(expr= - m.x825 - 0.5*m.x830 - 0.125*m.x835 + m.x2325 == 0)
m.c1326 = Constraint(expr= - m.x821 - 0.88729833462074*m.x826 - 0.393649167310369*m.x831 + m.x2326 == 0)
m.c1327 = Constraint(expr= - m.x822 - 0.88729833462074*m.x827 - 0.393649167310369*m.x832 + m.x2327 == 0)
m.c1328 = Constraint(expr= - m.x823 - 0.88729833462074*m.x828 - 0.393649167310369*m.x833 + m.x2328 == 0)
m.c1329 = Constraint(expr= - m.x824 - 0.88729833462074*m.x829 - 0.393649167310369*m.x834 + m.x2329 == 0)
m.c1330 = Constraint(expr= - m.x825 - 0.88729833462074*m.x830 - 0.393649167310369*m.x835 + m.x2330 == 0)
m.c1331 = Constraint(expr= - m.x821 - 0.11270166537926*m.x826 - 0.00635083268962935*m.x831 + m.x2331 == 0)
m.c1332 = Constraint(expr= - m.x822 - 0.11270166537926*m.x827 - 0.00635083268962935*m.x832 + m.x2332 == 0)
m.c1333 = Constraint(expr= - m.x823 - 0.11270166537926*m.x828 - 0.00635083268962935*m.x833 + m.x2333 == 0)
m.c1334 = Constraint(expr= - m.x824 - 0.11270166537926*m.x829 - | |
TODO: @montoyjh: what if it's a cubic system? don't need 6. -computron
# TODO: Can add population method but want to think about how it should
# be done. -montoyjh
order = self.get('order', 2)
if order > 2:
method = 'finite_difference'
else:
method = self.get('fitting_method', 'finite_difference')
if method == 'finite_difference':
result = ElasticTensorExpansion.from_diff_fit(
strains, pk_stresses, eq_stress=eq_stress, order=order)
if order == 2:
result = ElasticTensor(result[0])
elif method == 'pseudoinverse':
result = ElasticTensor.from_pseudoinverse(strains, pk_stresses)
elif method == 'independent':
result = ElasticTensor.from_independent_strains(strains, pk_stresses, eq_stress=eq_stress)
else:
raise ValueError("Unsupported method, method must be finite_difference, "
"pseudoinverse, or independent")
ieee = result.convert_to_ieee(ref_struct)
d.update({
"elastic_tensor": {
"raw": result.voigt,
"ieee_format": ieee.voigt
}
})
if order == 2:
d.update({"derived_properties": ieee.get_structure_property_dict(ref_struct)})
else:
soec = ElasticTensor(ieee[0])
d.update({"derived_properties": soec.get_structure_property_dict(ref_struct)})
d["formula_pretty"] = ref_struct.composition.reduced_formula
d["fitting_method"] = method
d["order"] = order
d = jsanitize(d)
# Save analysis results in json or db
db_file = env_chk(self.get('db_file'), fw_spec)
if not db_file:
with open("elasticity.json", "w") as f:
f.write(json.dumps(d, default=DATETIME_HANDLER))
else:
db = VaspCalcDb.from_db_file(db_file, admin=True)
db.collection = db.db["elasticity"]
db.collection.insert_one(d)
logger.info("Elastic analysis complete.")
return FWAction()
@explicit_serialize
class RamanTensorToDb(FiretaskBase):
"""
Raman susceptibility tensor for each mode = Finite difference derivative of the dielectric
tensor wrt the displacement along that mode.
See: 10.1103/PhysRevB.73.104304.
The frequencies are in the units of cm^-1. To convert the frequency to THz: multiply by 0.1884.
optional_params:
db_file (str): path to the db file
"""
optional_params = ["db_file"]
def run_task(self, fw_spec):
nm_eigenvecs = np.array(fw_spec["normalmodes"]["eigenvecs"])
nm_eigenvals = np.array(fw_spec["normalmodes"]["eigenvals"])
nm_norms = np.linalg.norm(nm_eigenvecs, axis=2)
structure = fw_spec["normalmodes"]["structure"]
masses = np.array([site.specie.data['Atomic mass'] for site in structure])
nm_norms = nm_norms / np.sqrt(masses) # eigenvectors in vasprun.xml are not divided by sqrt(M_i)
# To get the actual eigenvals, the values read from vasprun.xml must be multiplied by -1.
# frequency_i = sqrt(-e_i)
# To convert the frequency to THZ: multiply sqrt(-e_i) by 15.633
# To convert the frequency to cm^-1: multiply sqrt(-e_i) by 82.995
nm_frequencies = np.sqrt(np.abs(nm_eigenvals)) * 82.995 # cm^-1
d = {"structure": structure.as_dict(),
"formula_pretty": structure.composition.reduced_formula,
"normalmodes": {"eigenvals": fw_spec["normalmodes"]["eigenvals"],
"eigenvecs": fw_spec["normalmodes"]["eigenvecs"]
},
"frequencies": nm_frequencies.tolist()}
# store the displacement & epsilon for each mode in a dictionary
mode_disps = fw_spec["raman_epsilon"].keys()
modes_eps_dict = defaultdict(list)
for md in mode_disps:
modes_eps_dict[fw_spec["raman_epsilon"][md]["mode"]].append(
[fw_spec["raman_epsilon"][md]["displacement"],
fw_spec["raman_epsilon"][md]["epsilon"]])
# raman tensor = finite difference derivative of epsilon wrt displacement.
raman_tensor_dict = {}
scale = np.sqrt(structure.volume/2.0) / 4.0 / np.pi
for k, v in modes_eps_dict.items():
raman_tensor = (np.array(v[0][1]) - np.array(v[1][1])) / (v[0][0] - v[1][0])
# frequency in cm^-1
omega = nm_frequencies[k]
if nm_eigenvals[k] > 0:
logger.warning("Mode: {} is UNSTABLE. Freq(cm^-1) = {}".format(k, -omega))
raman_tensor = scale * raman_tensor * np.sum(nm_norms[k]) / np.sqrt(omega)
raman_tensor_dict[str(k)] = raman_tensor.tolist()
d["raman_tensor"] = raman_tensor_dict
d["state"] = "successful"
# store the results
db_file = env_chk(self.get("db_file"), fw_spec)
if not db_file:
with open("raman.json", "w") as f:
f.write(json.dumps(d, default=DATETIME_HANDLER))
else:
db = VaspCalcDb.from_db_file(db_file, admin=True)
db.collection = db.db["raman"]
db.collection.insert_one(d)
logger.info("Raman tensor calculation complete.")
return FWAction()
# TODO: @computron: this requires a "tasks" collection to proceed. Merits of changing to FW passing
# method? -computron
# TODO: @computron: even if you use the db-centric method, embed information in tags rather than
# task_label? This workflow likely requires review with its authors. -computron
@explicit_serialize
class GibbsAnalysisToDb(FiretaskBase):
"""
Compute the quasi-harmonic gibbs free energy. There are 2 options available for the
quasi-harmonic approximation (set via 'qha_type' parameter):
1. use the phonopy package quasi-harmonic approximation interface or
2. use the debye model.
Note: Instead of relying on fw_spec, this task gets the required data directly from the
tasks collection for processing. The summary dict is written to 'gibbs.json' file.
required_params:
tag (str): unique tag appended to the task labels in other fireworks so that all the
required data can be queried directly from the database.
db_file (str): path to the db file
optional_params:
qha_type(str): quasi-harmonic approximation type: "debye_model" or "phonopy",
default is "debye_model"
t_min (float): min temperature
t_step (float): temperature step
t_max (float): max temperature
mesh (list/tuple): reciprocal space density
eos (str): equation of state used for fitting the energies and the volumes.
options supported by phonopy: "vinet", "murnaghan", "birch_murnaghan".
pressure (float): in GPa, optional.
poisson (float): poisson ratio. Defaults to 0.25.
anharmonic_contribution (bool): consider anharmonic contributions to
Gibbs energy from the Debye model. Defaults to False.
pressure (float): in GPa, optional.
metadata (dict): meta data
"""
required_params = ["tag", "db_file"]
optional_params = ["qha_type", "t_min", "t_step", "t_max", "mesh", "eos",
"pressure", "poisson", "anharmonic_contribution", "metadata"]
def run_task(self, fw_spec):
gibbs_dict = {}
tag = self["tag"]
t_step = self.get("t_step", 10)
t_min = self.get("t_min", 0)
t_max = self.get("t_max", 1000)
mesh = self.get("mesh", [20, 20, 20])
eos = self.get("eos", "vinet")
qha_type = self.get("qha_type", "debye_model")
pressure = self.get("pressure", 0.0)
poisson = self.get("poisson", 0.25)
anharmonic_contribution = self.get("anharmonic_contribution", False)
gibbs_dict["metadata"] = self.get("metadata", {})
db_file = env_chk(self.get("db_file"), fw_spec)
mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
# get the optimized structure
d = mmdb.collection.find_one({"task_label": "{} structure optimization".format(tag)},
{"calcs_reversed": 1})
structure = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
gibbs_dict["structure"] = structure.as_dict()
gibbs_dict["formula_pretty"] = structure.composition.reduced_formula
# get the data(energy, volume, force constant) from the deformation runs
docs = mmdb.collection.find({"task_label": {"$regex": "{} gibbs*".format(tag)},
"formula_pretty": structure.composition.reduced_formula},
{"calcs_reversed": 1})
energies = []
volumes = []
force_constants = []
for d in docs:
s = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
energies.append(d["calcs_reversed"][-1]["output"]['energy'])
if qha_type not in ["debye_model"]:
force_constants.append(d["calcs_reversed"][-1]["output"]['force_constants'])
volumes.append(s.volume)
gibbs_dict["energies"] = energies
gibbs_dict["volumes"] = volumes
if qha_type not in ["debye_model"]:
gibbs_dict["force_constants"] = force_constants
try:
# use quasi-harmonic debye approximation
if qha_type in ["debye_model"]:
from pymatgen.analysis.quasiharmonic import QuasiharmonicDebyeApprox
qhda = QuasiharmonicDebyeApprox(energies, volumes, structure, t_min, t_step, t_max,
eos, pressure=pressure, poisson=poisson,
anharmonic_contribution=anharmonic_contribution)
gibbs_dict.update(qhda.get_summary_dict())
gibbs_dict["anharmonic_contribution"] = anharmonic_contribution
gibbs_dict["success"] = True
# use the phonopy interface
else:
from atomate.vasp.analysis.phonopy import get_phonopy_gibbs
G, T = get_phonopy_gibbs(energies, volumes, force_constants, structure, t_min,
t_step, t_max, mesh, eos, pressure)
gibbs_dict["gibbs_free_energy"] = G
gibbs_dict["temperatures"] = T
gibbs_dict["success"] = True
# quasi-harmonic analysis failed, set the flag to false
except:
import traceback
logger.warning("Quasi-harmonic analysis failed!")
gibbs_dict["success"] = False
gibbs_dict["traceback"] = traceback.format_exc()
gibbs_dict['metadata'].update({"task_label_tag": tag})
gibbs_dict["created_at"] = datetime.utcnow()
gibbs_dict = jsanitize(gibbs_dict)
# TODO: @matk86: add a list of task_ids that were used to construct the analysis to DB?
# -computron
if not db_file:
dump_file = "gibbs.json"
logger.info("Dumping the analysis summary to {}".format(dump_file))
with open(dump_file, "w") as f:
f.write(json.dumps(gibbs_dict, default=DATETIME_HANDLER))
else:
coll = mmdb.db["gibbs_tasks"]
coll.insert_one(gibbs_dict)
logger.info("Gibbs free energy calculation complete.")
if not gibbs_dict["success"]:
return FWAction(defuse_children=True)
# TODO: @computron: review method of data passing with the workflow authors. -computron
@explicit_serialize
class FitEOSToDb(FiretaskBase):
"""
Retrieve the energy and volume data and fit it to the given equation of state. The summary dict
is written to 'bulk_modulus.json' file.
Required parameters:
tag (str): unique tag appended to the task labels in other fireworks so that all the
required data can be queried directly from the database.
db_file (str): path to the db file
Optional parameters:
to_db (bool): if True, the data will be inserted to "eos" collection; otherwise, dumped to a .json file.
eos (str): equation of state used for fitting the energies and the volumes.
options supported by pymatgen: "quadratic", "murnaghan", "birch", "birch_murnaghan",
"pourier_tarantola", "vinet", "deltafactor". Default: "vinet"
"""
required_params = ["tag", "db_file"]
optional_params = ["to_db", "eos"]
def run_task(self, fw_spec):
from pymatgen.analysis.eos import EOS
eos = self.get("eos", "vinet")
tag = self["tag"]
db_file = env_chk(self.get("db_file"), fw_spec)
summary_dict = {"eos": eos}
to_db = self.get("to_db", True)
# collect and store task_id of all related tasks to make unique links with "tasks" collection
all_task_ids = []
mmdb = VaspCalcDb.from_db_file(db_file, admin=True)
# get the optimized structure
d = mmdb.collection.find_one({"task_label": "{} structure optimization".format(tag)})
all_task_ids.append(d["task_id"])
structure = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
summary_dict["structure"] = structure.as_dict()
summary_dict["formula_pretty"] = structure.composition.reduced_formula
# get the data(energy, volume, force constant) from the deformation runs
docs = mmdb.collection.find({"task_label": {"$regex": "{} bulk_modulus*".format(tag)},
"formula_pretty": structure.composition.reduced_formula})
energies = []
volumes = []
for d in docs:
s = Structure.from_dict(d["calcs_reversed"][-1]["output"]['structure'])
energies.append(d["calcs_reversed"][-1]["output"]['energy'])
volumes.append(s.volume)
all_task_ids.append(d["task_id"])
summary_dict["energies"] = energies
summary_dict["volumes"] = volumes
summary_dict["all_task_ids"] = all_task_ids
# fit the equation of state
eos = EOS(eos)
eos_fit = eos.fit(volumes, energies)
summary_dict["bulk_modulus"] = eos_fit.b0_GPa
# TODO: find a better way for passing tags of the entire workflow to db - albalu
if fw_spec.get("tags", None):
summary_dict["tags"] = fw_spec["tags"]
summary_dict["results"] = dict(eos_fit.results)
summary_dict["created_at"] = datetime.utcnow()
# db_file itself is required but the user can choose to pass the results to db or not
if to_db:
mmdb.collection | |
reset_help_menu_entries(self):
"Update the additional help entries on the Help menu"
help_list = idleConf.GetAllExtraHelpSourcesList()
helpmenu = self.menudict['help']
# first delete the extra help entries, if any
helpmenu_length = helpmenu.index(END)
if helpmenu_length > self.base_helpmenu_length:
helpmenu.delete((self.base_helpmenu_length + 1), helpmenu_length)
# then rebuild them
if help_list:
helpmenu.add_separator()
for entry in help_list:
cmd = self.__extra_help_callback(entry[1])
helpmenu.add_command(label=entry[0], command=cmd)
# and update the menu dictionary
self.menudict['help'] = helpmenu
def __extra_help_callback(self, helpfile):
"Create a callback with the helpfile value frozen at definition time"
def display_extra_help(helpfile=helpfile):
if not helpfile.startswith(('www', 'http')):
helpfile = os.path.normpath(helpfile)
if sys.platform[:3] == 'win':
try:
os.startfile(helpfile)
except WindowsError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
webbrowser.open(helpfile)
return display_extra_help
def update_recent_files_list(self, new_file=None):
"Load and update the recent files list and menus"
rf_list = []
if os.path.exists(self.recent_files_path):
with open(self.recent_files_path, 'r') as rf_list_file:
rf_list = rf_list_file.readlines()
if new_file:
new_file = os.path.abspath(new_file) + '\n'
if new_file in rf_list:
rf_list.remove(new_file) # move to top
rf_list.insert(0, new_file)
# clean and save the recent files list
bad_paths = []
for path in rf_list:
if '\0' in path or not os.path.exists(path[0:-1]):
bad_paths.append(path)
rf_list = [path for path in rf_list if path not in bad_paths]
ulchars = "1234567890ABCDEFGHIJK"
rf_list = rf_list[0:len(ulchars)]
try:
with open(self.recent_files_path, 'w') as rf_file:
rf_file.writelines(rf_list)
except IOError as err:
if not getattr(self.root, "recentfilelist_error_displayed", False):
self.root.recentfilelist_error_displayed = True
tkMessageBox.showwarning(title='IDLE Warning',
message="Cannot update File menu Recent Files list. "
"Your operating system says:\n%s\n"
"Select OK and IDLE will continue without updating."
% str(err),
parent=self.text)
# for each edit window instance, construct the recent files menu
for instance in self.top.instance_dict.keys():
menu = instance.recent_files_menu
menu.delete(0, END) # clear, and rebuild:
for i, file_name in enumerate(rf_list):
file_name = file_name.rstrip() # zap \n
# make unicode string to display non-ASCII chars correctly
ufile_name = self._filename_to_unicode(file_name)
callback = instance.__recent_file_callback(file_name)
menu.add_command(label=ulchars[i] + " " + ufile_name,
command=callback,
underline=0)
def __recent_file_callback(self, file_name):
def open_recent_file(fn_closure=file_name):
self.io.open(editFile=fn_closure)
return open_recent_file
def saved_change_hook(self):
short = self.short_title()
long = self.long_title()
if short and long:
title = short + " - " + long + _py_version
elif short:
title = short
elif long:
title = long
else:
title = "Untitled"
icon = short or long or title
if not self.get_saved():
title = "*%s*" % title
icon = "*%s" % icon
self.top.wm_title(title)
self.top.wm_iconname(icon)
def get_saved(self):
return self.undo.get_saved()
def set_saved(self, flag):
self.undo.set_saved(flag)
def reset_undo(self):
self.undo.reset_undo()
def short_title(self):
filename = self.io.filename
if filename:
filename = os.path.basename(filename)
else:
filename = "Untitled"
# return unicode string to display non-ASCII chars correctly
return self._filename_to_unicode(filename)
def long_title(self):
# return unicode string to display non-ASCII chars correctly
return self._filename_to_unicode(self.io.filename or "")
def center_insert_event(self, event):
self.center()
def center(self, mark="insert"):
text = self.text
top, bot = self.getwindowlines()
lineno = self.getlineno(mark)
height = bot - top
newtop = max(1, lineno - height//2)
text.yview(float(newtop))
def getwindowlines(self):
text = self.text
top = self.getlineno("@0,0")
bot = self.getlineno("@0,65535")
if top == bot and text.winfo_height() == 1:
# Geometry manager hasn't run yet
height = int(text['height'])
bot = top + height - 1
return top, bot
def getlineno(self, mark="insert"):
text = self.text
return int(float(text.index(mark)))
def get_geometry(self):
"Return (width, height, x, y)"
geom = self.top.wm_geometry()
m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom)
tuple = (map(int, m.groups()))
return tuple
def close_event(self, event):
self.close()
def maybesave(self):
if self.io:
if not self.get_saved():
if self.top.state()!='normal':
self.top.deiconify()
self.top.lower()
self.top.lift()
return self.io.maybesave()
def close(self):
reply = self.maybesave()
if str(reply) != "cancel":
self._close()
return reply
def _close(self):
if self.io.filename:
self.update_recent_files_list(new_file=self.io.filename)
WindowList.unregister_callback(self.postwindowsmenu)
self.unload_extensions()
self.io.close()
self.io = None
self.undo = None
if self.color:
self.color.close(False)
self.color = None
self.text = None
self.tkinter_vars = None
self.per.close()
self.per = None
self.top.destroy()
if self.close_hook:
# unless override: unregister from flist, terminate if last window
self.close_hook()
def load_extensions(self):
self.extensions = {}
self.load_standard_extensions()
def unload_extensions(self):
for ins in self.extensions.values():
if hasattr(ins, "close"):
ins.close()
self.extensions = {}
def load_standard_extensions(self):
for name in self.get_standard_extension_names():
try:
self.load_extension(name)
except:
print "Failed to load extension", repr(name)
import traceback
traceback.print_exc()
def get_standard_extension_names(self):
return idleConf.GetExtensions(editor_only=True)
def load_extension(self, name):
try:
mod = __import__(name, globals(), locals(), [])
except ImportError:
print "\nFailed to import extension: ", name
return
cls = getattr(mod, name)
keydefs = idleConf.GetExtensionBindings(name)
if hasattr(cls, "menudefs"):
self.fill_menus(cls.menudefs, keydefs)
ins = cls(self)
self.extensions[name] = ins
if keydefs:
self.apply_bindings(keydefs)
for vevent in keydefs.keys():
methodname = vevent.replace("-", "_")
while methodname[:1] == '<':
methodname = methodname[1:]
while methodname[-1:] == '>':
methodname = methodname[:-1]
methodname = methodname + "_event"
if hasattr(ins, methodname):
self.text.bind(vevent, getattr(ins, methodname))
def apply_bindings(self, keydefs=None):
if keydefs is None:
keydefs = self.Bindings.default_keydefs
text = self.text
text.keydefs = keydefs
for event, keylist in keydefs.items():
if keylist:
text.event_add(event, *keylist)
def fill_menus(self, menudefs=None, keydefs=None):
"""Add appropriate entries to the menus and submenus
Menus that are absent or None in self.menudict are ignored.
"""
if menudefs is None:
menudefs = self.Bindings.menudefs
if keydefs is None:
keydefs = self.Bindings.default_keydefs
menudict = self.menudict
text = self.text
for mname, entrylist in menudefs:
menu = menudict.get(mname)
if not menu:
continue
for entry in entrylist:
if not entry:
menu.add_separator()
else:
label, eventname = entry
checkbutton = (label[:1] == '!')
if checkbutton:
label = label[1:]
underline, label = prepstr(label)
accelerator = get_accelerator(keydefs, eventname)
def command(text=text, eventname=eventname):
text.event_generate(eventname)
if checkbutton:
var = self.get_var_obj(eventname, BooleanVar)
menu.add_checkbutton(label=label, underline=underline,
command=command, accelerator=accelerator,
variable=var)
else:
menu.add_command(label=label, underline=underline,
command=command,
accelerator=accelerator)
def getvar(self, name):
var = self.get_var_obj(name)
if var:
value = var.get()
return value
else:
raise NameError, name
def setvar(self, name, value, vartype=None):
var = self.get_var_obj(name, vartype)
if var:
var.set(value)
else:
raise NameError, name
def get_var_obj(self, name, vartype=None):
var = self.tkinter_vars.get(name)
if not var and vartype:
# create a Tkinter variable object with self.text as master:
self.tkinter_vars[name] = var = vartype(self.text)
return var
# Tk implementations of "virtual text methods" -- each platform
# reusing IDLE's support code needs to define these for its GUI's
# flavor of widget.
# Is character at text_index in a Python string? Return 0 for
# "guaranteed no", true for anything else. This info is expensive
# to compute ab initio, but is probably already known by the
# platform's colorizer.
def is_char_in_string(self, text_index):
if self.color:
# Return true iff colorizer hasn't (re)gotten this far
# yet, or the character is tagged as being in a string
return self.text.tag_prevrange("TODO", text_index) or \
"STRING" in self.text.tag_names(text_index)
else:
# The colorizer is missing: assume the worst
return 1
# If a selection is defined in the text widget, return (start,
# end) as Tkinter text indices, otherwise return (None, None)
def get_selection_indices(self):
try:
first = self.text.index("sel.first")
last = self.text.index("sel.last")
return first, last
except TclError:
return None, None
# Return the text widget's current view of what a tab stop means
# (equivalent width in spaces).
def get_tabwidth(self):
current = self.text['tabs'] or TK_TABWIDTH_DEFAULT
return int(current)
# Set the text widget's current view of what a tab stop means.
def set_tabwidth(self, newtabwidth):
text = self.text
if self.get_tabwidth() != newtabwidth:
pixels = text.tk.call("font", "measure", text["font"],
"-displayof", text.master,
"n" * newtabwidth)
text.configure(tabs=pixels)
# If ispythonsource and guess are true, guess a good value for
# indentwidth based on file content (if possible), and if
# indentwidth != tabwidth set usetabs false.
# In any case, adjust the Text widget's view of what a tab
# character means.
def set_indentation_params(self, ispythonsource, guess=True):
if guess and ispythonsource:
i = self.guess_indent()
if 2 <= i <= 8:
self.indentwidth = i
if self.indentwidth != self.tabwidth:
self.usetabs = False
self.set_tabwidth(self.tabwidth)
def smart_backspace_event(self, event):
text = self.text
first, last = self.get_selection_indices()
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
return "break"
# Delete whitespace left, until hitting a real char or closest
# preceding virtual tab stop.
chars = text.get("insert linestart", "insert")
if chars == '':
if text.compare("insert", ">", "1.0"):
# easy: delete preceding newline
text.delete("insert-1c")
else:
text.bell() # at start of buffer
return "break"
if chars[-1] not in " \t":
# easy: delete preceding real char
text.delete("insert-1c")
return "break"
# Ick. It may require *inserting* spaces if we back up over a
# tab character! This is written to be clear, not fast.
tabwidth = self.tabwidth
have = len(chars.expandtabs(tabwidth))
assert have > 0
want | |
GetMimeType(*args, **kwargs):
"""GetMimeType(self) -> PyObject"""
return _misc_.FileType_GetMimeType(*args, **kwargs)
def GetMimeTypes(*args, **kwargs):
"""GetMimeTypes(self) -> PyObject"""
return _misc_.FileType_GetMimeTypes(*args, **kwargs)
def GetExtensions(*args, **kwargs):
"""GetExtensions(self) -> PyObject"""
return _misc_.FileType_GetExtensions(*args, **kwargs)
def GetIcon(*args, **kwargs):
"""GetIcon(self) -> Icon"""
return _misc_.FileType_GetIcon(*args, **kwargs)
def GetIconInfo(*args, **kwargs):
"""GetIconInfo(self) -> PyObject"""
return _misc_.FileType_GetIconInfo(*args, **kwargs)
def GetDescription(*args, **kwargs):
"""GetDescription(self) -> PyObject"""
return _misc_.FileType_GetDescription(*args, **kwargs)
def GetOpenCommand(*args, **kwargs):
"""GetOpenCommand(self, String filename, String mimetype=EmptyString) -> PyObject"""
return _misc_.FileType_GetOpenCommand(*args, **kwargs)
def GetPrintCommand(*args, **kwargs):
"""GetPrintCommand(self, String filename, String mimetype=EmptyString) -> PyObject"""
return _misc_.FileType_GetPrintCommand(*args, **kwargs)
def GetAllCommands(*args, **kwargs):
"""GetAllCommands(self, String filename, String mimetype=EmptyString) -> PyObject"""
return _misc_.FileType_GetAllCommands(*args, **kwargs)
def SetCommand(*args, **kwargs):
"""SetCommand(self, String cmd, String verb, bool overwriteprompt=True) -> bool"""
return _misc_.FileType_SetCommand(*args, **kwargs)
def SetDefaultIcon(*args, **kwargs):
"""SetDefaultIcon(self, String cmd=EmptyString, int index=0) -> bool"""
return _misc_.FileType_SetDefaultIcon(*args, **kwargs)
def Unassociate(*args, **kwargs):
"""Unassociate(self) -> bool"""
return _misc_.FileType_Unassociate(*args, **kwargs)
def ExpandCommand(*args, **kwargs):
"""ExpandCommand(String command, String filename, String mimetype=EmptyString) -> String"""
return _misc_.FileType_ExpandCommand(*args, **kwargs)
ExpandCommand = staticmethod(ExpandCommand)
AllCommands = property(GetAllCommands,doc="See `GetAllCommands`")
Description = property(GetDescription,doc="See `GetDescription`")
Extensions = property(GetExtensions,doc="See `GetExtensions`")
Icon = property(GetIcon,doc="See `GetIcon`")
IconInfo = property(GetIconInfo,doc="See `GetIconInfo`")
MimeType = property(GetMimeType,doc="See `GetMimeType`")
MimeTypes = property(GetMimeTypes,doc="See `GetMimeTypes`")
OpenCommand = property(GetOpenCommand,doc="See `GetOpenCommand`")
PrintCommand = property(GetPrintCommand,doc="See `GetPrintCommand`")
_misc_.FileType_swigregister(FileType)
def FileType_ExpandCommand(*args, **kwargs):
"""FileType_ExpandCommand(String command, String filename, String mimetype=EmptyString) -> String"""
return _misc_.FileType_ExpandCommand(*args, **kwargs)
class MimeTypesManager(object):
"""Proxy of C++ MimeTypesManager class"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def IsOfType(*args, **kwargs):
"""IsOfType(String mimeType, String wildcard) -> bool"""
return _misc_.MimeTypesManager_IsOfType(*args, **kwargs)
IsOfType = staticmethod(IsOfType)
def __init__(self, *args, **kwargs):
"""__init__(self) -> MimeTypesManager"""
_misc_.MimeTypesManager_swiginit(self,_misc_.new_MimeTypesManager(*args, **kwargs))
def Initialize(*args, **kwargs):
"""Initialize(self, int mailcapStyle=MAILCAP_ALL, String extraDir=EmptyString)"""
return _misc_.MimeTypesManager_Initialize(*args, **kwargs)
def ClearData(*args, **kwargs):
"""ClearData(self)"""
return _misc_.MimeTypesManager_ClearData(*args, **kwargs)
def GetFileTypeFromExtension(*args, **kwargs):
"""GetFileTypeFromExtension(self, String ext) -> FileType"""
return _misc_.MimeTypesManager_GetFileTypeFromExtension(*args, **kwargs)
def GetFileTypeFromMimeType(*args, **kwargs):
"""GetFileTypeFromMimeType(self, String mimeType) -> FileType"""
return _misc_.MimeTypesManager_GetFileTypeFromMimeType(*args, **kwargs)
def ReadMailcap(*args, **kwargs):
"""ReadMailcap(self, String filename, bool fallback=False) -> bool"""
return _misc_.MimeTypesManager_ReadMailcap(*args, **kwargs)
def ReadMimeTypes(*args, **kwargs):
"""ReadMimeTypes(self, String filename) -> bool"""
return _misc_.MimeTypesManager_ReadMimeTypes(*args, **kwargs)
def EnumAllFileTypes(*args, **kwargs):
"""EnumAllFileTypes(self) -> PyObject"""
return _misc_.MimeTypesManager_EnumAllFileTypes(*args, **kwargs)
def AddFallback(*args, **kwargs):
"""AddFallback(self, FileTypeInfo ft)"""
return _misc_.MimeTypesManager_AddFallback(*args, **kwargs)
def Associate(*args, **kwargs):
"""Associate(self, FileTypeInfo ftInfo) -> FileType"""
return _misc_.MimeTypesManager_Associate(*args, **kwargs)
def Unassociate(*args, **kwargs):
"""Unassociate(self, FileType ft) -> bool"""
return _misc_.MimeTypesManager_Unassociate(*args, **kwargs)
__swig_destroy__ = _misc_.delete_MimeTypesManager
__del__ = lambda self : None;
_misc_.MimeTypesManager_swigregister(MimeTypesManager)
TheMimeTypesManager = cvar.TheMimeTypesManager
def MimeTypesManager_IsOfType(*args, **kwargs):
"""MimeTypesManager_IsOfType(String mimeType, String wildcard) -> bool"""
return _misc_.MimeTypesManager_IsOfType(*args, **kwargs)
#---------------------------------------------------------------------------
class ArtProvider(object):
"""
The wx.ArtProvider class is used to customize the look of wxWidgets
application. When wxWidgets needs to display an icon or a bitmap (e.g.
in the standard file dialog), it does not use hard-coded resource but
asks wx.ArtProvider for it instead. This way the users can plug in
their own wx.ArtProvider class and easily replace standard art with
his/her own version. It is easy thing to do: all that is needed is
to derive a class from wx.ArtProvider, override it's CreateBitmap
method and register the provider with `wx.ArtProvider.Push`::
class MyArtProvider(wx.ArtProvider):
def __init__(self):
wx.ArtProvider.__init__(self)
def CreateBitmap(self, artid, client, size):
...
return bmp
"""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args, **kwargs):
"""
__init__(self) -> ArtProvider
The wx.ArtProvider class is used to customize the look of wxWidgets
application. When wxWidgets needs to display an icon or a bitmap (e.g.
in the standard file dialog), it does not use hard-coded resource but
asks wx.ArtProvider for it instead. This way the users can plug in
their own wx.ArtProvider class and easily replace standard art with
his/her own version. It is easy thing to do: all that is needed is
to derive a class from wx.ArtProvider, override it's CreateBitmap
method and register the provider with `wx.ArtProvider.Push`::
class MyArtProvider(wx.ArtProvider):
def __init__(self):
wx.ArtProvider.__init__(self)
def CreateBitmap(self, artid, client, size):
...
return bmp
"""
_misc_.ArtProvider_swiginit(self,_misc_.new_ArtProvider(*args, **kwargs))
ArtProvider._setCallbackInfo(self, self, ArtProvider)
__swig_destroy__ = _misc_.delete_ArtProvider
__del__ = lambda self : None;
def _setCallbackInfo(*args, **kwargs):
"""_setCallbackInfo(self, PyObject self, PyObject _class)"""
return _misc_.ArtProvider__setCallbackInfo(*args, **kwargs)
def Push(*args, **kwargs):
"""
Push(ArtProvider provider)
Add new provider to the top of providers stack.
"""
return _misc_.ArtProvider_Push(*args, **kwargs)
Push = staticmethod(Push)
PushProvider = Push
def Insert(*args, **kwargs):
"""
Insert(ArtProvider provider)
Add new provider to the bottom of providers stack.
"""
return _misc_.ArtProvider_Insert(*args, **kwargs)
Insert = staticmethod(Insert)
InsertProvider = Insert
def Pop(*args, **kwargs):
"""
Pop() -> bool
Remove latest added provider and delete it.
"""
return _misc_.ArtProvider_Pop(*args, **kwargs)
Pop = staticmethod(Pop)
PopProvider = Pop
def Delete(*args, **kwargs):
"""
Delete(ArtProvider provider) -> bool
Remove provider. The provider must have been added previously! The
provider is _not_ deleted.
"""
val = _misc_.ArtProvider_Delete(*args, **kwargs)
args[1].thisown = 1
return val
Delete = staticmethod(Delete)
RemoveProvider = Delete
def GetBitmap(*args, **kwargs):
"""
GetBitmap(String id, String client=ART_OTHER, Size size=DefaultSize) -> Bitmap
Query the providers for bitmap with given ID and return it. Return
wx.NullBitmap if no provider provides it.
"""
return _misc_.ArtProvider_GetBitmap(*args, **kwargs)
GetBitmap = staticmethod(GetBitmap)
def GetIcon(*args, **kwargs):
"""
GetIcon(String id, String client=ART_OTHER, Size size=DefaultSize) -> Icon
Query the providers for icon with given ID and return it. Return
wx.NullIcon if no provider provides it.
"""
return _misc_.ArtProvider_GetIcon(*args, **kwargs)
GetIcon = staticmethod(GetIcon)
def GetSizeHint(*args, **kwargs):
"""
GetSizeHint(String client, bool platform_dependent=False) -> Size
Get the size hint of an icon from a specific Art Client, queries the
topmost provider if platform_dependent = false
"""
return _misc_.ArtProvider_GetSizeHint(*args, **kwargs)
GetSizeHint = staticmethod(GetSizeHint)
def Destroy(*args, **kwargs):
"""Destroy(self)"""
args[0].this.own(False)
return _misc_.ArtProvider_Destroy(*args, **kwargs)
_misc_.ArtProvider_swigregister(ArtProvider)
ART_TOOLBAR = cvar.ART_TOOLBAR
ART_MENU = cvar.ART_MENU
ART_FRAME_ICON = cvar.ART_FRAME_ICON
ART_CMN_DIALOG = cvar.ART_CMN_DIALOG
ART_HELP_BROWSER = cvar.ART_HELP_BROWSER
ART_MESSAGE_BOX = cvar.ART_MESSAGE_BOX
ART_BUTTON = cvar.ART_BUTTON
ART_OTHER = cvar.ART_OTHER
ART_ADD_BOOKMARK = cvar.ART_ADD_BOOKMARK
ART_DEL_BOOKMARK = cvar.ART_DEL_BOOKMARK
ART_HELP_SIDE_PANEL = cvar.ART_HELP_SIDE_PANEL
ART_HELP_SETTINGS = cvar.ART_HELP_SETTINGS
ART_HELP_BOOK = cvar.ART_HELP_BOOK
ART_HELP_FOLDER = cvar.ART_HELP_FOLDER
ART_HELP_PAGE = cvar.ART_HELP_PAGE
ART_GO_BACK = cvar.ART_GO_BACK
ART_GO_FORWARD = cvar.ART_GO_FORWARD
ART_GO_UP = cvar.ART_GO_UP
ART_GO_DOWN = cvar.ART_GO_DOWN
ART_GO_TO_PARENT = cvar.ART_GO_TO_PARENT
ART_GO_HOME = cvar.ART_GO_HOME
ART_FILE_OPEN = cvar.ART_FILE_OPEN
ART_FILE_SAVE = cvar.ART_FILE_SAVE
ART_FILE_SAVE_AS = cvar.ART_FILE_SAVE_AS
ART_PRINT = cvar.ART_PRINT
ART_HELP = cvar.ART_HELP
ART_TIP = cvar.ART_TIP
ART_REPORT_VIEW = cvar.ART_REPORT_VIEW
ART_LIST_VIEW = cvar.ART_LIST_VIEW
ART_NEW_DIR = cvar.ART_NEW_DIR
ART_HARDDISK = cvar.ART_HARDDISK
ART_FLOPPY = cvar.ART_FLOPPY
ART_CDROM = cvar.ART_CDROM
ART_REMOVABLE = cvar.ART_REMOVABLE
ART_FOLDER = cvar.ART_FOLDER
ART_FOLDER_OPEN = cvar.ART_FOLDER_OPEN
ART_GO_DIR_UP = cvar.ART_GO_DIR_UP
ART_EXECUTABLE_FILE = cvar.ART_EXECUTABLE_FILE
ART_NORMAL_FILE = cvar.ART_NORMAL_FILE
ART_TICK_MARK = cvar.ART_TICK_MARK
ART_CROSS_MARK = cvar.ART_CROSS_MARK
ART_ERROR = cvar.ART_ERROR
ART_QUESTION = cvar.ART_QUESTION
ART_WARNING = cvar.ART_WARNING
ART_INFORMATION = cvar.ART_INFORMATION
ART_MISSING_IMAGE = cvar.ART_MISSING_IMAGE
ART_COPY = cvar.ART_COPY
ART_CUT = cvar.ART_CUT
ART_PASTE = cvar.ART_PASTE
ART_DELETE = cvar.ART_DELETE
ART_NEW = cvar.ART_NEW
ART_UNDO = cvar.ART_UNDO
ART_REDO = cvar.ART_REDO
ART_QUIT = cvar.ART_QUIT
ART_FIND = cvar.ART_FIND
ART_FIND_AND_REPLACE = cvar.ART_FIND_AND_REPLACE
def ArtProvider_Push(*args, **kwargs):
"""
ArtProvider_Push(ArtProvider provider)
Add new provider to the top of providers stack.
"""
return _misc_.ArtProvider_Push(*args, **kwargs)
def ArtProvider_Insert(*args, **kwargs):
"""
ArtProvider_Insert(ArtProvider provider)
Add new provider to the bottom of providers stack.
"""
return _misc_.ArtProvider_Insert(*args, **kwargs)
def ArtProvider_Pop(*args):
"""
ArtProvider_Pop() -> bool
Remove latest added provider and delete it.
"""
return _misc_.ArtProvider_Pop(*args)
def ArtProvider_Delete(*args, **kwargs):
"""
ArtProvider_Delete(ArtProvider provider) -> bool
Remove provider. The provider must have been added previously! The
provider is _not_ deleted.
"""
val = _misc_.ArtProvider_Delete(*args, **kwargs)
args[1].thisown = 1
return val
def ArtProvider_GetBitmap(*args, **kwargs):
"""
ArtProvider_GetBitmap(String id, String client=ART_OTHER, Size size=DefaultSize) -> Bitmap
Query the providers for bitmap with given ID and return it. Return
wx.NullBitmap if no provider provides it.
"""
return _misc_.ArtProvider_GetBitmap(*args, **kwargs)
def ArtProvider_GetIcon(*args, **kwargs):
"""
ArtProvider_GetIcon(String id, String client=ART_OTHER, Size size=DefaultSize) -> Icon
Query the providers for icon with given ID and return it. Return
wx.NullIcon if no provider provides it.
"""
return _misc_.ArtProvider_GetIcon(*args, **kwargs)
def ArtProvider_GetSizeHint(*args, **kwargs):
"""
ArtProvider_GetSizeHint(String client, bool platform_dependent=False) -> Size
Get the size hint of an icon from a specific Art Client, queries the
topmost provider if platform_dependent = false
"""
return _misc_.ArtProvider_GetSizeHint(*args, **kwargs)
#---------------------------------------------------------------------------
CONFIG_USE_LOCAL_FILE = _misc_.CONFIG_USE_LOCAL_FILE
CONFIG_USE_GLOBAL_FILE = _misc_.CONFIG_USE_GLOBAL_FILE
CONFIG_USE_RELATIVE_PATH = _misc_.CONFIG_USE_RELATIVE_PATH
CONFIG_USE_NO_ESCAPE_CHARACTERS = _misc_.CONFIG_USE_NO_ESCAPE_CHARACTERS
class ConfigBase(object):
"""
wx.ConfigBase class defines the basic interface of all config
classes. It can not be used by itself (it is an abstract base class)
and you will always use one of its derivations: wx.Config or
wx.FileConfig.
wx.ConfigBase organizes the items in a tree-like structure, modeled
after the Unix/Dos filesystem. There are groups that act like
directories and entries, key/value pairs that act like files. There
is always one current group given by the current path. As in the file
system case, to specify a key in the config class you must use a path
to it. Config classes also support the notion of the current group,
which makes it possible to use relative paths.
| |
<reponame>ipmb/salt
# -*- coding: utf-8 -*-
'''
State module to manage Elasticsearch.
.. versionadded:: 2017.7.0
'''
# Import python libs
from __future__ import absolute_import
import logging
# Import salt libs
import salt.utils.json
log = logging.getLogger(__name__)
def index_absent(name):
'''
Ensure that the named index is absent.
name
Name of the index to remove
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
index = __salt__['elasticsearch.index_get'](index=name)
if index and name in index:
if __opts__['test']:
ret['comment'] = 'Index {0} will be removed'.format(name)
ret['changes']['old'] = index[name]
ret['result'] = None
else:
ret['result'] = __salt__['elasticsearch.index_delete'](index=name)
if ret['result']:
ret['comment'] = 'Successfully removed index {0}'.format(name)
ret['changes']['old'] = index[name]
else:
ret['comment'] = 'Failed to remove index {0} for unknown reasons'.format(name)
else:
ret['comment'] = 'Index {0} is already absent'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def index_present(name, definition=None):
'''
Ensure that the named index is present.
name
Name of the index to add
definition
Optional dict for creation parameters as per https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html
**Example:**
.. code-block:: yaml
# Default settings
mytestindex:
elasticsearch_index.present
# Extra settings
mytestindex2:
elasticsearch_index.present:
- definition:
settings:
index:
number_of_shards: 10
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
index_exists = __salt__['elasticsearch.index_exists'](index=name)
if not index_exists:
if __opts__['test']:
ret['comment'] = 'Index {0} does not exist and will be created'.format(name)
ret['changes'] = {'new': definition}
ret['result'] = None
else:
output = __salt__['elasticsearch.index_create'](index=name, body=definition)
if output:
ret['comment'] = 'Successfully created index {0}'.format(name)
ret['changes'] = {'new': __salt__['elasticsearch.index_get'](index=name)[name]}
else:
ret['result'] = False
ret['comment'] = 'Cannot create index {0}, {1}'.format(name, output)
else:
ret['comment'] = 'Index {0} is already present'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def alias_absent(name, index):
'''
Ensure that the index alias is absent.
name
Name of the index alias to remove
index
Name of the index for the alias
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
alias = __salt__['elasticsearch.alias_get'](aliases=name, indices=index)
if alias and alias.get(index, {}).get("aliases", {}).get(name, None) is not None:
if __opts__['test']:
ret['comment'] = 'Alias {0} for index {1} will be removed'.format(name, index)
ret['changes']['old'] = alias.get(index, {}).get("aliases", {}).get(name, {})
ret['result'] = None
else:
ret['result'] = __salt__['elasticsearch.alias_delete'](aliases=name, indices=index)
if ret['result']:
ret['comment'] = 'Successfully removed alias {0} for index {1}'.format(name, index)
ret['changes']['old'] = alias.get(index, {}).get("aliases", {}).get(name, {})
else:
ret['comment'] = 'Failed to remove alias {0} for index {1} for unknown reasons'.format(name, index)
else:
ret['comment'] = 'Alias {0} for index {1} is already absent'.format(name, index)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def alias_present(name, index, definition=None):
'''
Ensure that the named index alias is present.
name
Name of the alias
index
Name of the index
definition
Optional dict for filters as per https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html
**Example:**
.. code-block:: yaml
mytestalias:
elasticsearch.alias_present:
- index: testindex
- definition:
filter:
term:
user: kimchy
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
alias = __salt__['elasticsearch.alias_get'](aliases=name, indices=index)
old = {}
if alias:
old = alias.get(index, {}).get("aliases", {}).get(name, {})
if not definition:
definition = {}
ret['changes'] = __utils__['dictdiffer.deep_diff'](old, definition)
if ret['changes'] or not definition:
if __opts__['test']:
if not old:
ret['comment'] = 'Alias {0} for index {1} does not exist and will be created'.format(name, index)
else:
ret['comment'] = 'Alias {0} for index {1} exists with wrong configuration and will be overriden'.format(name, index)
ret['result'] = None
else:
output = __salt__['elasticsearch.alias_create'](alias=name, indices=index, body=definition)
if output:
if not old:
ret['comment'] = 'Successfully created alias {0} for index {1}'.format(name, index)
else:
ret['comment'] = 'Successfully replaced alias {0} for index {1}'.format(name, index)
else:
ret['result'] = False
ret['comment'] = 'Cannot create alias {0} for index {1}, {2}'.format(name, index, output)
else:
ret['comment'] = 'Alias {0} for index {1} is already present'.format(name, index)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def index_template_absent(name):
'''
Ensure that the named index template is absent.
name
Name of the index to remove
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
index_template = __salt__['elasticsearch.index_template_get'](name=name)
if index_template and name in index_template:
if __opts__['test']:
ret['comment'] = 'Index template {0} will be removed'.format(name)
ret['changes']['old'] = index_template[name]
ret['result'] = None
else:
ret['result'] = __salt__['elasticsearch.index_template_delete'](name=name)
if ret['result']:
ret['comment'] = 'Successfully removed index template {0}'.format(name)
ret['changes']['old'] = index_template[name]
else:
ret['comment'] = 'Failed to remove index template {0} for unknown reasons'.format(name)
else:
ret['comment'] = 'Index template {0} is already absent'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def index_template_present(name, definition, check_definition=False):
'''
Ensure that the named index templat eis present.
name
Name of the index to add
definition
Required dict for creation parameters as per https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html
check_definition
If the template already exists and the definition is up to date
**Example:**
.. code-block:: yaml
mytestindex2_template:
elasticsearch_index_template.present:
- definition:
template: logstash-*
order: 1
settings:
number_of_shards: 1
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
index_template_exists = __salt__['elasticsearch.index_template_exists'](name=name)
if not index_template_exists:
if __opts__['test']:
ret['comment'] = 'Index template {0} does not exist and will be created'.format(name)
ret['changes'] = {'new': definition}
ret['result'] = None
else:
output = __salt__['elasticsearch.index_template_create'](name=name, body=definition)
if output:
ret['comment'] = 'Successfully created index template {0}'.format(name)
ret['changes'] = {'new': __salt__['elasticsearch.index_template_get'](name=name)[name]}
else:
ret['result'] = False
ret['comment'] = 'Cannot create index template {0}, {1}'.format(name, output)
else:
if check_definition:
definition_parsed = salt.utils.json.loads(definition)
current_template = __salt__['elasticsearch.index_template_get'](name=name)[name]
diff = __utils__['dictdiffer.deep_diff'](current_template, definition_parsed)
if len(diff) != 0:
if __opts__['test']:
ret['comment'] = 'Index template {0} exist but need to be updated'.format(name)
ret['changes'] = diff
ret['result'] = None
else:
output = __salt__['elasticsearch.index_template_create'](name=name, body=definition)
if output:
ret['comment'] = 'Successfully updated index template {0}'.format(name)
ret['changes'] = diff
else:
ret['result'] = False
ret['comment'] = 'Cannot update index template {0}, {1}'.format(name, output)
else:
ret['comment'] = 'Index template {0} is already present and up to date'.format(name)
else:
ret['comment'] = 'Index template {0} is already present'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def pipeline_absent(name):
'''
Ensure that the named pipeline is absent
name
Name of the pipeline to remove
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
pipeline = __salt__['elasticsearch.pipeline_get'](id=name)
if pipeline and name in pipeline:
if __opts__['test']:
ret['comment'] = 'Pipeline {0} will be removed'.format(name)
ret['changes']['old'] = pipeline[name]
ret['result'] = None
else:
ret['result'] = __salt__['elasticsearch.pipeline_delete'](id=name)
if ret['result']:
ret['comment'] = 'Successfully removed pipeline {0}'.format(name)
ret['changes']['old'] = pipeline[name]
else:
ret['comment'] = 'Failed to remove pipeline {0} for unknown reasons'.format(name)
else:
ret['comment'] = 'Pipeline {0} is already absent'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def pipeline_present(name, definition):
'''
Ensure that the named pipeline is present.
name
Name of the index to add
definition
Required dict for creation parameters as per https://www.elastic.co/guide/en/elasticsearch/reference/master/pipeline.html
**Example:**
.. code-block:: yaml
test_pipeline:
elasticsearch.pipeline_present:
- definition:
description: example pipeline
processors:
- set:
field: collector_timestamp_millis
value: '{{ '{{' }}_ingest.timestamp{{ '}}' }}'
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
pipeline = __salt__['elasticsearch.pipeline_get'](id=name)
old = {}
if pipeline and name in pipeline:
old = pipeline[name]
ret['changes'] = __utils__['dictdiffer.deep_diff'](old, definition)
if ret['changes'] or not definition:
if __opts__['test']:
if not pipeline:
ret['comment'] = 'Pipeline {0} does not exist and will be created'.format(name)
else:
ret['comment'] = 'Pipeline {0} exists with wrong configuration and will be overriden'.format(name)
ret['result'] = None
else:
output = __salt__['elasticsearch.pipeline_create'](id=name, body=definition)
if output:
if not pipeline:
ret['comment'] = 'Successfully created pipeline {0}'.format(name)
else:
ret['comment'] = 'Successfully replaced pipeline {0}'.format(name)
else:
ret['result'] = False
ret['comment'] = 'Cannot create pipeline {0}, {1}'.format(name, output)
else:
ret['comment'] = 'Pipeline {0} is already present'.format(name)
except Exception as e:
ret['result'] = False
ret['comment'] = str(e)
return ret
def search_template_absent(name):
'''
Ensure that the search template is absent
name
Name of the search template to remove
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
try:
template = __salt__['elasticsearch.search_template_get'](id=name)
if template:
if __opts__['test']:
ret['comment'] = 'Search template {0} will be removed'.format(name)
ret['changes']['old'] = salt.utils.json.loads(template["template"])
ret['result'] = None
else:
ret['result'] = __salt__['elasticsearch.search_template_delete'](id=name)
if ret['result']:
ret['comment'] = 'Successfully removed search template {0}'.format(name)
ret['changes']['old'] = salt.utils.json.loads(template["template"])
else:
ret['comment'] = 'Failed to remove search template {0} for unknown reasons'.format(name)
else:
ret['comment'] = 'Search template {0} is already absent'.format(name)
except Exception as e:
ret['result'] | |
# type: ignore
# MIT License
#
# Copyright (c) 2018-2019 Red Hat, Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging
from typing import List, Optional, Dict, Set
import gitlab
from gitlab.v4.objects import Project as GitlabObjectsProject
from ogr.abstract import (
PullRequest,
Issue,
Release,
IssueComment,
PRComment,
GitTag,
IssueStatus,
CommitFlag,
PRStatus,
CommitComment,
)
from ogr.exceptions import GitlabAPIException
from ogr.services import gitlab as ogr_gitlab
from ogr.services.base import BaseGitProject
from ogr.services.gitlab.release import GitlabRelease
logger = logging.getLogger(__name__)
class GitlabProject(BaseGitProject):
service: "ogr_gitlab.GitlabService"
def __init__(
self,
repo: str,
service: "ogr_gitlab.GitlabService",
namespace: str,
gitlab_repo=None,
**unprocess_kwargs,
) -> None:
if unprocess_kwargs:
logger.warning(
f"GitlabProject will not process these kwargs: {unprocess_kwargs}"
)
super().__init__(repo, service, namespace)
self._gitlab_repo = gitlab_repo
@property
def gitlab_repo(self) -> GitlabObjectsProject:
if not self._gitlab_repo:
self._gitlab_repo = self.service.gitlab_instance.projects.get(
f"{self.namespace}/{self.repo}"
)
return self._gitlab_repo
@property
def is_fork(self) -> bool:
return bool("forked_from_project" in self.gitlab_repo.attributes)
@property
def parent(self) -> Optional["GitlabProject"]:
"""
Return parent project if this project is a fork, otherwise return None
"""
if self.is_fork:
parent_dict = self.gitlab_repo.attributes["forked_from_project"]
return GitlabProject(
repo=parent_dict["path"],
service=self.service,
namespace=parent_dict["namespace"]["full_path"],
)
return None
def __str__(self) -> str:
return f'GitlabProject(namespace="{self.namespace}", repo="{self.repo}")'
def __eq__(self, o: object) -> bool:
if not isinstance(o, GitlabProject):
return False
return (
self.repo == o.repo
and self.namespace == o.namespace
and self.service == o.service
)
def _construct_fork_project(self) -> Optional["GitlabProject"]:
user_login = self.service.user.get_username()
try:
project = GitlabProject(
repo=self.repo, service=self.service, namespace=user_login
)
if project.gitlab_repo:
return project
except Exception as ex:
logger.debug(f"Project {self.repo}/{user_login} does not exist: {ex}")
return None
def is_forked(self) -> bool:
return bool(self._construct_fork_project())
def get_description(self) -> str:
return self.gitlab_repo.attributes["description"]
def get_fork(self, create: bool = True) -> Optional["GitlabProject"]:
"""
Provide GitlabProject instance of a fork of this project.
Returns None if this is a fork.
:param create: create a fork if it doesn't exist
:return: instance of GitlabProject
"""
username = self.service.user.get_username()
for fork in self.get_forks():
if fork.gitlab_repo.namespace["full_path"] == username:
return fork
if not self.is_forked():
if create:
return self.fork_create()
else:
logger.info(
f"Fork of {self.gitlab_repo.attributes['name']}"
" does not exist and we were asked not to create it."
)
return None
return self._construct_fork_project()
def get_owners(self) -> List[str]:
return self._get_collaborators_with_given_access(
access_levels=[gitlab.OWNER_ACCESS]
)
def who_can_close_issue(self) -> Set[str]:
return set(
self._get_collaborators_with_given_access(
access_levels=[
gitlab.REPORTER_ACCESS,
gitlab.DEVELOPER_ACCESS,
gitlab.MAINTAINER_ACCESS,
gitlab.OWNER_ACCESS,
]
)
)
def who_can_merge_pr(self) -> Set[str]:
return set(
self._get_collaborators_with_given_access(
access_levels=[
gitlab.DEVELOPER_ACCESS,
gitlab.MAINTAINER_ACCESS,
gitlab.OWNER_ACCESS,
]
)
)
def can_close_issue(self, username: str, issue: Issue) -> bool:
allowed_users = self.who_can_close_issue()
if username in allowed_users or username == issue.author:
return True
return False
def can_merge_pr(self, username) -> bool:
allowed_users = self.who_can_close_issue()
if username in allowed_users:
return True
return False
def _get_collaborators_with_given_access(
self, access_levels: List[int]
) -> List[str]:
"""
Get all project collaborators with one of the given access levels.
Access levels:
10 => Guest access
20 => Reporter access
30 => Developer access
40 => Maintainer access
50 => Owner access
:return: List of usernames
"""
return [
member.username
for member in self.gitlab_repo.members.all(all=True)
if member.access_level in access_levels
]
def _get_all_issue_comments(self, issue_id) -> List["IssueComment"]:
issue = self.gitlab_repo.issues.get(issue_id)
return [
self._issuecomment_from_gitlab_object(raw_comment)
for raw_comment in issue.notes.list(sort="asc")
]
def issue_close(self, issue_id: int) -> Issue:
issue = self.gitlab_repo.issues.get(issue_id)
issue.state_event = "close"
issue.save()
return self._issue_from_gitlab_object(issue)
def get_issue_labels(self, issue_id: int) -> List[str]:
try:
issue = self.gitlab_repo.issues.get(issue_id)
except gitlab.exceptions.GitlabGetError as ex:
logger.error(f"Issue {issue_id} was not found.")
raise GitlabAPIException(f"Issue {issue_id} was not found. ", ex)
return issue.labels
def add_issue_labels(self, issue_id, labels) -> None:
try:
issue = self.gitlab_repo.issues.get(issue_id)
except gitlab.exceptions.GitlabGetError as ex:
logger.error(f"Issue {issue_id} was not found.")
raise GitlabAPIException(f"Issue {issue_id} was not found. ", ex)
for label in labels:
issue.labels.append(label)
issue.save()
def get_pr_list(self, status: PRStatus = PRStatus.open) -> List["PullRequest"]:
# Gitlab API has status 'opened', not 'open'
mrs = self.gitlab_repo.mergerequests.list(
state=status.name if status != PRStatus.open else "opened",
order_by="updated_at",
sort="desc",
)
return [self._pr_from_gitlab_object(mr) for mr in mrs]
def get_sha_from_tag(self, tag_name: str) -> str:
try:
tag = self.gitlab_repo.tags.get(tag_name)
return tag.attributes["commit"]["id"]
except gitlab.exceptions.GitlabGetError as ex:
logger.error(f"Tag {tag_name} was not found.")
raise GitlabAPIException(f"Tag {tag_name} was not found.", ex)
def pr_create(
self, title: str, body: str, target_branch: str, source_branch: str
) -> "PullRequest":
mr = self.gitlab_repo.mergerequests.create(
{
"source_branch": source_branch,
"target_branch": target_branch,
"title": title,
"description": body,
}
)
return self._pr_from_gitlab_object(mr)
def commit_comment(
self, commit: str, body: str, filename: str = None, row: int = None
) -> "CommitComment":
"""
Create comment on a commit.
:param commit: str The SHA of the commit needing a comment.
:param body: str The text of the comment
:param filename: str The relative path to the file that necessitates a comment
:param row: int Line index in the diff to comment on.
:return: CommitComment
"""
try:
commit_object = self.gitlab_repo.commits.get(commit)
except gitlab.exceptions.GitlabGetError:
logger.error(f"Commit {commit} was not found.")
raise GitlabAPIException(f"Commit {commit} was not found.")
if filename and row:
raw_comment = commit_object.comments.create(
{"note": body, "path": filename, "line": row, "line_type": "new"}
)
else:
raw_comment = commit_object.comments.create({"note": body})
return self._commit_comment_from_gitlab_object(raw_comment, commit)
def set_commit_status(
self, commit: str, state: str, target_url: str, description: str, context: str
) -> "CommitFlag":
"""
Create a status on a commit
:param commit: The SHA of the commit.
:param state: The state of the status.
:param target_url: The target URL to associate with this status.
:param description: A short description of the status
:param context: A label to differentiate this status from the status of other systems.
:return: CommitFlag
"""
try:
commit_object = self.gitlab_repo.commits.get(commit)
except gitlab.exceptions.GitlabGetError:
logger.error(f"Commit {commit} was not found.")
raise GitlabAPIException(f"Commit {commit} was not found.")
data_dict = {
"state": state,
"target_url": target_url,
"context": context,
"description": description,
}
raw_status = commit_object.statuses.create(data_dict)
return self._commit_status_from_gitlab_object(raw_status)
def get_commit_statuses(self, commit: str) -> List[CommitFlag]:
"""
Get the statuses of a commit in a project.
:param commit: The SHA of the commit.
:return: [CommitFlag]
"""
try:
commit_object = self.gitlab_repo.commits.get(commit)
except gitlab.exceptions.GitlabGetError:
logger.error(f"Commit {commit} was not found.")
raise GitlabAPIException(f"Commit {commit} was not found.")
raw_statuses = commit_object.statuses.list()
return [
self._commit_status_from_gitlab_object(raw_status)
for raw_status in raw_statuses
]
def pr_close(self, pr_id: int) -> "PullRequest":
pr = self.gitlab_repo.mergerequests.get(pr_id)
pr.state_event = "close"
pr.save()
return self._pr_from_gitlab_object(pr)
def pr_merge(self, pr_id: int) -> "PullRequest":
pr = self.gitlab_repo.mergerequests.get(pr_id)
pr.merge()
return self._pr_from_gitlab_object(pr)
def get_pr_labels(self, pr_id: int) -> List[str]:
try:
pr = self.gitlab_repo.mergerequests.get(pr_id)
except gitlab.exceptions.GitlabGetError as ex:
logger.error(f"PR {pr_id} was not found.")
raise GitlabAPIException(f"PR {pr_id} was not found. ", ex)
return pr.labels
def add_pr_labels(self, pr_id, labels) -> None:
try:
pr = self.gitlab_repo.mergerequests.get(pr_id)
except gitlab.exceptions.GitlabGetError as ex:
logger.error(f"PR {pr_id} was not found.")
raise GitlabAPIException(f"PR {pr_id} was not found. ", ex)
for label in labels:
pr.labels.append(label)
pr.save()
def get_git_urls(self) -> Dict[str, str]:
return {
"git": self.gitlab_repo.attributes["http_url_to_repo"],
"ssh": self.gitlab_repo.attributes["ssh_url_to_repo"],
}
def fork_create(self) -> "GitlabProject":
"""
Fork this project using the authenticated user.
This may raise an exception if the fork already exists.
:return: fork GitlabProject instance
"""
try:
fork = self.gitlab_repo.forks.create({})
except gitlab.GitlabCreateError:
logger.error(f"Repo {self.gitlab_repo} cannot be forked")
raise GitlabAPIException(f"Repo {self.gitlab_repo} cannot be forked")
return GitlabProject(
namespace=fork.namespace["full_path"], service=self.service, repo=fork.path
)
def change_token(self, new_token: str):
self.service.change_token(new_token)
def get_branches(self) -> List[str]:
return [branch.name for branch in self.gitlab_repo.branches.list()]
def get_file_content(self, path, ref="master") -> str:
try:
file = self.gitlab_repo.files.get(file_path=path, ref=ref)
return str(file.decode())
except gitlab.exceptions.GitlabGetError as ex:
raise FileNotFoundError(f"File '{path}' on {ref} not found", ex)
def get_issue_list(self, status: IssueStatus = IssueStatus.open) -> List[Issue]:
# Gitlab API has status 'opened', not 'open'
issues = self.gitlab_repo.issues.list(
state=status.name if status != IssueStatus.open else "opened",
order_by="updated_at",
sort="desc",
)
return [self._issue_from_gitlab_object(issue) for issue in issues]
| |
<filename>csmserver/views/datatable.py
# =============================================================================
# Copyright (c) 2016, Cisco Systems, Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
from flask import Blueprint
from flask import request
from flask import jsonify
from flask import abort
from flask_login import login_required
from sqlalchemy import or_
from sqlalchemy import and_
from database import DBSession
from inventory import query_available_inventory
from inventory import query_in_use_inventory
from inventory import get_inventory_without_serial_number_query
from inventory import get_inventory_with_duplicate_serial_number_query
from models import Host
from models import HostInventory
from models import Inventory
from models import InventoryJob
from models import Region
from models import JumpHost
from models import ConnectionParam
from models import logger
from models import Satellite
from models import InstallJob
from models import InstallJobHistory
from models import DownloadJob
from models import DownloadJobHistory
from models import ConformanceReport
from models import ConformanceReportEntry
from common import get_host
from common import get_conformance_report_by_id
from common import get_software_profile_by_id
from common import get_last_successful_inventory_elapsed_time
from constants import UNKNOWN
from constants import JobStatus
from utils import is_empty
from install_dashboard import get_install_job_json_dict
from download_dashboard import get_download_job_json_dict
datatable = Blueprint('datatable', __name__, url_prefix='/datatable')
class DataTableParams(object):
def __init__(self, request):
self.draw = int(request.args.get('draw'))
self.search_value = request.args.get('search[value]')
self.start_length = int(request.args.get('start'))
self.display_length = int(request.args.get('length'))
self.sort_order = request.args.get('order[0][dir]')
self.column_order = int(request.args.get('order[0][column]'))
if request.args.get('column_names'):
self.columns_on_display = set(request.args.get('column_names').split(','))
else:
self.columns_on_display = None
@datatable.route('/api/get_managed_hosts/region/<int:region_id>', defaults={'chassis': None, 'filter_failed': 0})
@datatable.route('/api/get_managed_hosts/region/<int:region_id>/chassis/<path:chassis>', defaults={'filter_failed': 0})
@datatable.route('/api/get_managed_hosts/region/<int:region_id>/filter_failed/<int:filter_failed>',
defaults={'chassis': None})
@login_required
def get_server_managed_hosts(region_id, chassis, filter_failed):
dt_params = DataTableParams(request)
rows = []
db_session = DBSession()
clauses = []
if len(dt_params.search_value):
criteria = '%' + dt_params.search_value + '%'
clauses.append(Host.hostname.like(criteria))
clauses.append(Region.name.like(criteria))
clauses.append(Host.location.like(criteria))
clauses.append(ConnectionParam.host_or_ip.like(criteria))
clauses.append(Host.platform.like(criteria))
clauses.append(Host.software_platform.like(criteria))
clauses.append(Host.software_version.like(criteria))
query = db_session.query(Host)\
.join(Region, Host.region_id == Region.id)\
.join(ConnectionParam, Host.id == ConnectionParam.host_id)\
and_clauses = []
if region_id != 0:
and_clauses.append(Host.region_id == region_id)
if chassis is not None:
and_clauses.append(Host.platform == chassis)
if filter_failed != 0:
query = query.join(InventoryJob, Host.id == InventoryJob.host_id)
and_clauses.append(InventoryJob.status == JobStatus.FAILED)
if and_clauses:
query = query.filter(and_(*and_clauses))
total_count = query.count()
else:
total_count = db_session.query(Host).count()
query = query.filter(or_(*clauses))
filtered_count = query.count()
if dt_params.columns_on_display is None:
columns = [getattr(Host.hostname, dt_params.sort_order)(),
getattr(Region.name, dt_params.sort_order)(),
getattr(Host.location, dt_params.sort_order)(),
getattr(ConnectionParam.host_or_ip, dt_params.sort_order)(),
getattr(Host.platform, dt_params.sort_order)(),
getattr(Host.software_platform, dt_params.sort_order)(),
getattr(Host.software_version, dt_params.sort_order)()]
else:
columns = []
check_and_add_column(columns, 'hostname', Host.hostname, dt_params)
check_and_add_column(columns, 'region', Region.name, dt_params)
check_and_add_column(columns, 'location', Host.location, dt_params)
check_and_add_column(columns, 'host_or_ip', ConnectionParam.host_or_ip, dt_params)
check_and_add_column(columns, 'chassis', Host.platform, dt_params)
check_and_add_column(columns, 'platform', Host.software_platform, dt_params)
check_and_add_column(columns, 'software', Host.software_version, dt_params)
hosts = query.order_by(columns[dt_params.column_order])\
.slice(dt_params.start_length, dt_params.start_length + dt_params.display_length).all()
if hosts is not None:
for host in hosts:
row = dict()
row['hostname'] = host.hostname
row['region'] = '' if host.region is None else host.region.name
row['location'] = host.location
if len(host.connection_param) > 0:
row['host_or_ip'] = host.connection_param[0].host_or_ip
row['chassis'] = host.platform
row['platform'] = UNKNOWN if host.software_platform is None else host.software_platform
row['software'] = UNKNOWN if host.software_version is None else host.software_version
inventory_job = host.inventory_job[0]
if inventory_job is not None:
row['last_successful_retrieval'] = get_last_successful_inventory_elapsed_time(host)
row['inventory_retrieval_status'] = inventory_job.status
else:
row['last_successful_retrieval'] = ''
row['inventory_retrieval_status'] = ''
rows.append(row)
else:
logger.error('Host %s has no connection information.', host.hostname)
response = dict()
response['draw'] = dt_params.draw
response['recordsTotal'] = total_count
response['recordsFiltered'] = filtered_count
response['data'] = rows
return jsonify(**response)
def check_and_add_column(columns, check_column_name, db_field, dt_params):
if check_column_name in dt_params.columns_on_display:
columns.append(getattr(db_field, dt_params.sort_order)())
@datatable.route('/api/get_managed_host_details/region/<int:region_id>')
@login_required
def get_managed_host_details(region_id):
dt_params = DataTableParams(request)
rows = []
db_session = DBSession()
clauses = []
if len(dt_params.search_value):
criteria = '%' + dt_params.search_value + '%'
clauses.append(Host.hostname.like(criteria))
clauses.append(Region.name.like(criteria))
clauses.append(Host.location.like(criteria))
clauses.append(Host.roles.like(criteria))
clauses.append(Host.platform.like(criteria))
clauses.append(Host.software_platform.like(criteria))
clauses.append(Host.software_version.like(criteria))
clauses.append(ConnectionParam.connection_type.like(criteria))
clauses.append(ConnectionParam.host_or_ip.like(criteria))
clauses.append(ConnectionParam.port_number.like(criteria))
clauses.append(ConnectionParam.username.like(criteria))
clauses.append(JumpHost.hostname.like(criteria))
query = db_session.query(Host)\
.join(Region, Host.region_id == Region.id)\
.join(ConnectionParam, Host.id == ConnectionParam.host_id)\
.outerjoin(JumpHost, ConnectionParam.jump_host_id == JumpHost.id)\
if region_id == 0:
query = query.filter(or_(*clauses))
total_count = db_session.query(Host).count()
else:
query = query.filter(and_(Host.region_id == region_id), or_(*clauses))
total_count = db_session.query(Host).filter(Host.region_id == region_id).count()
filtered_count = query.count()
columns = [getattr(Host.hostname, dt_params.sort_order)(),
getattr(Region.name, dt_params.sort_order)(),
getattr(Host.location, dt_params.sort_order)(),
getattr(Host.roles, dt_params.sort_order)(),
getattr(Host.platform, dt_params.sort_order)(),
getattr(Host.software_platform, dt_params.sort_order)(),
getattr(Host.software_version, dt_params.sort_order)(),
getattr(ConnectionParam.connection_type, dt_params.sort_order)(),
getattr(ConnectionParam.host_or_ip, dt_params.sort_order)(),
getattr(ConnectionParam.port_number, dt_params.sort_order)(),
getattr(ConnectionParam.username, dt_params.sort_order)(),
getattr(JumpHost.hostname, dt_params.sort_order)()]
hosts = query.order_by(columns[dt_params.column_order])\
.slice(dt_params.start_length, dt_params.start_length + dt_params.display_length).all()
if hosts is not None:
for host in hosts:
row = dict()
row['hostname'] = host.hostname
row['region'] = '' if host.region is None else host.region.name
row['location'] = host.location
row['roles'] = host.roles
row['chassis'] = host.platform
row['platform'] = UNKNOWN if host.software_platform is None else host.software_platform
row['software'] = UNKNOWN if host.software_version is None else host.software_version
if len(host.connection_param) > 0:
connection_param = host.connection_param[0]
row['connection'] = connection_param.connection_type
row['host_or_ip'] = connection_param.host_or_ip
row['port_number'] = connection_param.port_number
if not is_empty(connection_param.jump_host):
row['jump_host'] = connection_param.jump_host.hostname
else:
row['jump_host'] = ''
row['username'] = connection_param.username
rows.append(row)
else:
logger.error('Host %s has no connection information.', host.hostname)
response = dict()
response['draw'] = dt_params.draw
response['recordsTotal'] = total_count
response['recordsFiltered'] = filtered_count
response['data'] = rows
return jsonify(**response)
@datatable.route('/api/get_scheduled_install_jobs/')
@login_required
def api_get_scheduled_install_jobs():
dt_params = DataTableParams(request)
db_session = DBSession()
clauses = []
if len(dt_params.search_value):
criteria = '%' + dt_params.search_value + '%'
clauses.append(Host.hostname.like(criteria))
clauses.append(InstallJob.install_action.like(criteria))
clauses.append(InstallJob.scheduled_time.like(criteria))
clauses.append(InstallJob.packages.like(criteria))
clauses.append(InstallJob.created_by.like(criteria))
query = db_session.query(InstallJob)\
.join(Host, Host.id == InstallJob.host_id)
total_count = query.filter(InstallJob.status == JobStatus.SCHEDULED).count()
filtered_count = query.filter(and_(InstallJob.status == JobStatus.SCHEDULED), or_(*clauses)).count()
columns = [getattr(Host.hostname, dt_params.sort_order)(),
getattr(InstallJob.install_action, dt_params.sort_order)(),
'',
getattr(InstallJob.scheduled_time, dt_params.sort_order)(),
getattr(InstallJob.packages, dt_params.sort_order)(),
getattr(InstallJob.created_by, dt_params.sort_order)(),
'']
install_jobs = query.order_by(columns[dt_params.column_order])\
.filter(and_(InstallJob.status == JobStatus.SCHEDULED), or_(*clauses))\
.slice(dt_params.start_length, dt_params.start_length + dt_params.display_length).all()
response = dict()
response['draw'] = dt_params.draw
response['recordsTotal'] = total_count
response['recordsFiltered'] = filtered_count
response.update(get_install_job_json_dict(install_jobs))
return jsonify(**response)
@datatable.route('/api/get_in_progress_install_jobs/')
@login_required
def api_get_in_progress_install_jobs():
dt_params = DataTableParams(request)
db_session = DBSession()
clauses = []
if len(dt_params.search_value):
criteria = '%' + dt_params.search_value + '%'
clauses.append(Host.hostname.like(criteria))
clauses.append(InstallJob.install_action.like(criteria))
clauses.append(InstallJob.scheduled_time.like(criteria))
clauses.append(InstallJob.start_time.like(criteria))
clauses.append(InstallJob.packages.like(criteria))
clauses.append(InstallJob.status.like(criteria))
clauses.append(InstallJob.created_by.like(criteria))
query = db_session.query(InstallJob)\
.join(Host, Host.id == InstallJob.host_id)
total_count = query.filter(InstallJob.status == JobStatus.IN_PROGRESS).count()
filtered_count = query.filter(and_(InstallJob.status == JobStatus.IN_PROGRESS), or_(*clauses)).count()
columns = [getattr(Host.hostname, dt_params.sort_order)(),
getattr(InstallJob.install_action, dt_params.sort_order)(),
getattr(InstallJob.scheduled_time, dt_params.sort_order)(),
getattr(InstallJob.start_time, dt_params.sort_order)(),
getattr(InstallJob.packages, dt_params.sort_order)(),
getattr(InstallJob.status, dt_params.sort_order)(),
'',
getattr(InstallJob.created_by, dt_params.sort_order)()]
install_jobs = query.order_by(columns[dt_params.column_order])\
.filter(and_(InstallJob.status == JobStatus.IN_PROGRESS), or_(*clauses))\
.slice(dt_params.start_length, dt_params.start_length + dt_params.display_length).all()
response = dict()
response['draw'] = dt_params.draw
response['recordsTotal'] = total_count
response['recordsFiltered'] = filtered_count
response.update(get_install_job_json_dict(install_jobs))
return jsonify(**response)
@datatable.route('/api/get_failed_install_jobs/')
@login_required
def api_get_failed_install_jobs():
dt_params = DataTableParams(request)
db_session = DBSession()
clauses = []
if len(dt_params.search_value):
criteria = '%' + dt_params.search_value + '%'
clauses.append(Host.hostname.like(criteria))
clauses.append(InstallJob.install_action.like(criteria))
clauses.append(InstallJob.scheduled_time.like(criteria))
clauses.append(InstallJob.start_time.like(criteria))
clauses.append(InstallJob.packages.like(criteria))
clauses.append(InstallJob.status_time.like(criteria))
clauses.append(InstallJob.created_by.like(criteria))
query = db_session.query(InstallJob)\
.join(Host, Host.id == InstallJob.host_id)
total_count = query.filter(InstallJob.status == JobStatus.FAILED).count()
filtered_count = query.filter(and_(InstallJob.status == JobStatus.FAILED), or_(*clauses)).count()
columns = [getattr(Host.hostname, dt_params.sort_order)(),
getattr(InstallJob.install_action, dt_params.sort_order)(),
getattr(InstallJob.scheduled_time, dt_params.sort_order)(),
getattr(InstallJob.start_time, dt_params.sort_order)(),
getattr(InstallJob.packages, dt_params.sort_order)(),
getattr(InstallJob.status_time, dt_params.sort_order)(),
'',
getattr(InstallJob.created_by, dt_params.sort_order)()]
install_jobs = query.order_by(columns[dt_params.column_order])\
.filter(and_(InstallJob.status == JobStatus.FAILED), or_(*clauses))\
.slice(dt_params.start_length, dt_params.start_length + dt_params.display_length).all()
response = dict()
response['draw'] = dt_params.draw
response['recordsTotal'] = total_count
response['recordsFiltered'] = filtered_count
response.update(get_install_job_json_dict(install_jobs))
return jsonify(**response)
@datatable.route('/api/get_completed_install_jobs/')
@login_required
def api_get_completed_install_jobs():
dt_params = DataTableParams(request)
db_session = DBSession()
clauses = []
if len(dt_params.search_value):
criteria = '%' + dt_params.search_value + '%'
clauses.append(Host.hostname.like(criteria))
clauses.append(InstallJobHistory.install_action.like(criteria))
clauses.append(InstallJobHistory.scheduled_time.like(criteria))
clauses.append(InstallJobHistory.start_time.like(criteria))
clauses.append(InstallJobHistory.packages.like(criteria))
clauses.append(InstallJobHistory.status_time.like(criteria))
clauses.append(InstallJobHistory.created_by.like(criteria))
query = db_session.query(InstallJobHistory)\
.join(Host, Host.id == InstallJobHistory.host_id)
total_count = query.filter(InstallJobHistory.status == JobStatus.COMPLETED).count()
filtered_count = query.filter(and_(InstallJobHistory.status == JobStatus.COMPLETED), or_(*clauses)).count()
columns = [getattr(Host.hostname, dt_params.sort_order)(),
getattr(InstallJobHistory.install_action, dt_params.sort_order)(),
getattr(InstallJobHistory.scheduled_time, dt_params.sort_order)(),
getattr(InstallJobHistory.start_time, dt_params.sort_order)(),
getattr(InstallJobHistory.packages, dt_params.sort_order)(),
getattr(InstallJobHistory.status_time, dt_params.sort_order)(),
'',
getattr(InstallJobHistory.created_by, dt_params.sort_order)()]
install_jobs = query.order_by(columns[dt_params.column_order])\
.filter(and_(InstallJobHistory.status == JobStatus.COMPLETED), or_(*clauses))\
.slice(dt_params.start_length, dt_params.start_length + dt_params.display_length).all()
response = dict()
response['draw'] = dt_params.draw
response['recordsTotal'] = total_count
response['recordsFiltered'] = filtered_count
response.update(get_install_job_json_dict(install_jobs))
return jsonify(**response)
@datatable.route('/api/get_scheduled_download_jobs/')
@login_required
def api_get_scheduled_download_jobs():
dt_params = DataTableParams(request)
db_session = DBSession()
clauses = []
if len(dt_params.search_value):
criteria = '%' + dt_params.search_value + '%'
clauses.append(DownloadJob.cco_filename.like(criteria))
clauses.append(DownloadJob.scheduled_time.like(criteria))
clauses.append(DownloadJob.created_by.like(criteria))
query = db_session.query(DownloadJob)
total_count = query.filter(DownloadJob.status == JobStatus.SCHEDULED).count()
filtered_count = query.filter(and_(DownloadJob.status == JobStatus.SCHEDULED), or_(*clauses)).count()
columns = [getattr(DownloadJob.cco_filename, dt_params.sort_order)(),
getattr(DownloadJob.scheduled_time, dt_params.sort_order)(),
'',
getattr(DownloadJob.created_by, dt_params.sort_order)()]
download_jobs = query.order_by(columns[dt_params.column_order])\
.filter(and_(DownloadJob.status == JobStatus.SCHEDULED), or_(*clauses))\
.slice(dt_params.start_length, dt_params.start_length + dt_params.display_length).all()
response = dict()
response['draw'] = dt_params.draw
response['recordsTotal'] = total_count
response['recordsFiltered'] = filtered_count
response.update(get_download_job_json_dict(db_session, download_jobs))
return jsonify(**response)
@datatable.route('/api/get_in_progress_download_jobs/')
@login_required
def api_get_in_progress_download_jobs():
dt_params = DataTableParams(request)
db_session = DBSession()
clauses = []
if len(dt_params.search_value):
criteria = '%' + dt_params.search_value + '%'
clauses.append(DownloadJob.cco_filename.like(criteria))
clauses.append(DownloadJob.scheduled_time.like(criteria))
clauses.append(DownloadJob.status.like(criteria))
clauses.append(DownloadJob.status_time.like(criteria))
clauses.append(DownloadJob.created_by.like(criteria))
query = db_session.query(DownloadJob)
total_count = query.filter(and_(DownloadJob.status == JobStatus.IN_PROGRESS)).count()
filtered_count = query.filter(and_(DownloadJob.status == JobStatus.IN_PROGRESS),
or_(*clauses)).count()
columns = [getattr(DownloadJob.cco_filename, dt_params.sort_order)(),
getattr(DownloadJob.scheduled_time, dt_params.sort_order)(),
| |
= gray < threshold_level
# color the pixels in the mask
# crop_img[mask] = (204, 119, 0)
gray = cv2.cvtColor(cropped_y_axis, cv2.COLOR_BGR2GRAY)
# set threshold level
threshold_level = 120
# find coordinates of all pixels below threshold
ycoords = np.column_stack(np.where(gray < threshold_level))
# create mask of all pixels lower than threshold level
mask = gray < threshold_level
# color the pixels in the mask
# crop_img[mask] = (204, 119, 0)
return xcoords, ycoords
def store_coords(crop_img, xcoords, ycoords, x_pixels_width, y_pixels_height, x_axis_exists, y_axis_exists):
global ref_points
# dictionary stores the y coordinates of pixels along with how many times they appear at one y position
y_values = {}
# coordinate values are added to this list to iterate through
ylist = []
# stores the y coordinates of each pixel under the threshold into the dictionary y_values
for i in range(len(xcoords)):
ylist.append(xcoords[i])
if xcoords[i][0] not in y_values:
y_values[xcoords[i][0]] = 1
else:
y_values[xcoords[i][0]] += 1
# sorts the dicctionary based on the number of times a pixel appears at one y coordinate
sorted_xdict = OrderedDict(
sorted(y_values.items(), key=itemgetter(1), reverse=True))
# the longest line is the first in the sorted dictionary
longest_yline_size = list(sorted_xdict.values())[0]
y_pixel_line = list(sorted_xdict.keys())[
0] + round(y_pixels_height*0.7)
x_values = {}
# coordinate values are added to this list to iterate through
xlist = []
# stores the y coordinates of each pixel under the threshold into the dictionary y_values
for i in range(len(ycoords)):
xlist.append(ycoords[i])
if ycoords[i][1] not in x_values:
x_values[ycoords[i][1]] = 1
else:
x_values[ycoords[i][1]] += 1
# sorts the dictionary based on the number of times a pixel appears at one y coordinate
sorted_ydict = OrderedDict(
sorted(x_values.items(), key=itemgetter(1), reverse=True))
# the longest line is the first in the sorted dictionary
longest_xline_size = list(sorted_ydict.values())[0]
# print(list(sorted_ydict.values())[1])
x_pixel_line = list(sorted_ydict.keys())[0]
origin = (x_pixel_line, y_pixel_line)
print(" info: origin: ", origin)
# if the longest line is bigger than half the width of the page it is the x-axis
if longest_yline_size > 0.5*x_pixels_width:
print("The x-axis is at y pixel ", y_pixel_line)
print("The x-axis is ", longest_yline_size, " pixels long")
else:
messagebox.showinfo(
title="Get x-axis", message="Click at the top of the y-axis and drag to the right of the x-axis.")
click_img_axes()
y_pixel_line = ref_points[1][1]
longest_yline_size = ref_points[1][0] - ref_points[0][0]
print("The x-axis is at y pixel ", y_pixel_line)
print("The x-axis is ", longest_yline_size, " pixels long")
x_axis_exists = True
if longest_xline_size > 0.5*y_pixels_height:
print("The y-axis is at x pixel ", x_pixel_line)
print("The y-axis is ", longest_xline_size, " pixels long")
else:
if len(ref_points) > 0:
pass
else:
messagebox.showinfo(
title="Get y-axis", message="Click at the top of the y-axis and drag to the right of the x-axis.")
click_img_axes()
x_pixel_line = ref_points[0][0]
longest_xline_size = ref_points[1][1] - ref_points[0][1]
print("The y-axis is at x pixel ", x_pixel_line)
print("The y-axis is ", longest_xline_size, " pixels long")
y_axis_exists = True
# makes a text file with all the y and x coordinates of the pixels under the threshold
# with open('listfile.txt', 'w') as filehandle:
# for listitem in ylist:
# filehandle.write('%s\n' % listitem)
# print(x_axis_exists)
return y_pixel_line, x_pixel_line, longest_yline_size, longest_xline_size, x_axis_exists, y_axis_exists, origin
def click_img_axes():
global ref_points
global draw_axes_img
global draw_axes_img_redo
draw_axes_img = draw_axes_img_redo.copy()
cv2.namedWindow('image')
cv2.setMouseCallback('image', get_axes)
cv2.imshow('image', draw_axes_img)
cv2.waitKey(0)
def get_axes(event, x, y, flags, param):
# grab references to the global variables
global ref_points
global draw_axes_img
# if the left mouse button was clicked, record the starting
# (x, y) coordinates
if event == cv2.EVENT_LBUTTONDOWN:
ref_points = [(x, y)]
print(ref_points)
# check to see if the left mouse button was released
elif event == cv2.EVENT_LBUTTONUP:
# record the ending (x, y) coordinates
ref_points.append((x, y))
# draw a rectangle around the region of interest
cv2.rectangle(
draw_axes_img, ref_points[0], ref_points[1], (255, 0, 0), 1)
cv2.imshow("image", draw_axes_img)
t3 = threading.Thread(target=redraw, args=())
t3.start()
cv2.waitKey(0)
def redraw():
global ref_points
if len(ref_points) == 2:
ans = messagebox.askyesno(
title="Redraw?", message="Would you like to redraw the rectangle?")
if ans == True:
click_img_axes()
else:
cv2.destroyAllWindows()
def get_xdata(crop_img, y_pixel_line, x_pixel_line, x_axis_exists, y_axis_values, longest_yline_size, longest_xline_size):
y_pixels_height = crop_img.shape[0]
x_pixels_width = crop_img.shape[1]
x_axis_img = crop_img[y_pixel_line +
5: y_pixels_height, 0: x_pixels_width]
# gets data from image
d2 = pytesseract.image_to_data(x_axis_img, output_type=Output.DICT)
text = d2['text']
left = d2['left']
width = d2['width']
# list that holds the x axis values
x_axis_values = []
# list that holds the x axis title
x_axis_title = []
# list that holds the pixel value of the median of the box that surrounds each x-axis value
x_axis_value_medians = []
not_space = ''
space = ''
# if the value in text is not '' then add its value to not_space and break the loop
for i in range(len(text)):
if text[i].isdigit() or text[i].isalpha():
not_space += text[i]
if not_space != '':
break
# the first index where an x-axis value appears in text
first_value = text.index(not_space)
last_value = 0
text = text[first_value:]
# the next index where a space occurs after the x-axis values are finished
if space in text:
last_value = text.index(space)
else:
last_value = -1
# a sliced list to the next index where a space occurs
xvalues_text = text[:last_value]
# if any character in the x-axis values is not a digit or alpha character then remove it
for i in range(len(xvalues_text)):
for x in xvalues_text[i]:
if not x.isdigit() or not x.isalpha():
new = re.sub('[^a-zA-Z0-9_]', '', xvalues_text[i])
xvalues_text.remove(xvalues_text[i])
xvalues_text.insert(i, new)
# all the values that are not a space should be added to the x_axis_values list
for i in xvalues_text:
if i != '' and i.isdigit() or i.isalpha():
x_axis_values.append(i)
# a sliced list from the value after the last space value appears to the end of the list
values_after_xvalues_text = text[last_value:]
# all the values that are not a space that occur after the x axis values
for i in values_after_xvalues_text:
if i != '':
x_axis_title.append(i)
if len(x_axis_title) == 0:
x_axis_title.append('None')
# the number of pixels each x-axis value box is from the left
left = left[first_value:]
# the width of each box around the x-axis values
width = width[first_value:]
print("x-axis title", x_axis_title)
print("x-axis values ", x_axis_values)
# finds the median pixel for each x-axis value box
for i in range(len(x_axis_values)):
median = round(left[i] + round(width[i] / 2))
x_axis_value_medians.append(median)
# the data from the graph has boxes created around it
n_boxes2 = len(d2['level'])
for i in range(n_boxes2):
(x, y, w, h) = (d2['left'][i], d2['top']
[i], d2['width'][i], d2['height'][i])
cv2.rectangle(x_axis_img, (x, y), (x + w, y + h), (0, 255, 0), 2)
return x_axis_values, x_axis_title, x_axis_value_medians
def get_ydata(crop_img, x_pixel_line, y_pixel_line, y_axis_exists, longest_xline_size):
y_axis_img = crop_img[0: y_pixel_line + 10, 0: x_pixel_line-5]
# gets data from image
d2 = pytesseract.image_to_data(y_axis_img, output_type=Output.DICT)
text = d2['text']
top = d2['top']
width = d2['width']
# list that holds the x axis values
y_axis_values = []
# list that holds the x axis title
y_axis_title = []
# list that holds the pixel value of the median of the box that surrounds each y-axis value
y_axis_value_medians = []
separated_text = []
new_text = []
# all the values that are not a space should be added to the x_axis_values list
for i in text:
if i != '':
new_text.append(i)
for i in range(len(new_text)):
separated_text.append(list(new_text[i]))
for i in range(len(separated_text)):
for j in range(len(separated_text[i])):
if separated_text[i][j] == 'o' or separated_text[i][j] == 'O':
separated_text[i][j] = '0'
if separated_text[i][j] == 's' or separated_text[i][j] == 'S':
separated_text[i][j] = '5'
if separated_text[i][j].isdigit():
y_axis_values.append("".join(separated_text[i]))
else:
y_axis_title.append(separated_text[i][j])
# all the values that are not a space that occur after the x axis values
for i in text:
if i != ''and i.isalpha():
y_axis_title.append(i)
if len(y_axis_title) == 0:
y_axis_title.append('None')
print("y-axis values", y_axis_values)
print("y-axis title", y_axis_title)
for i in range(len(y_axis_values)):
median = round(top[i] + round(width[i] / 2))
y_axis_value_medians.append(median)
n_boxes2 = len(d2['level'])
for i in range(n_boxes2):
(x, y, w, h) = (d2['left'][i], d2['top']
[i], d2['width'][i], d2['height'][i])
cv2.rectangle(y_axis_img, (x, y), (x + w, y + h), (0, 255, 0), 2)
biggest_max = y_axis_values[0]
smallest_min = y_axis_values[-1]
if type(smallest_min) != float:
smallest_min = 0
return y_axis_values, biggest_max, smallest_min, y_axis_title
def get_datapoints(crop_img, x_axis_exists, longest_xline_size, x_axis_values, x_axis_value_medians, | |
pc.pos )
return True
def trade_item( self, it, pc, redraw ):
"""Trade this item to another character."""
mymenu = charsheet.RightMenu( self.screen, predraw = redraw )
for opc in self.camp.party:
if opc != pc and opc.is_alright():
mymenu.add_item( str( opc ) , opc )
mymenu.add_item( "Cancel" , False )
mymenu.add_alpha_keys()
opc = mymenu.query()
if opc:
pc.contents.unequip( it )
if not it.equipped:
if opc.can_take_item( it ):
pc.contents.remove( it )
opc.contents.append( it )
else:
self.alert( "{0} can't carry any more.".format( str( opc ) ) )
return True
def use_item( self, it, pc, myredraw ):
it.use( pc, self )
def learn_spell_from_item( self, it, pc, myredraw ):
self.camp.known_spells.append( it.spell )
self.alert( "You have added {0} to your library.".format( it.spell ) )
if hasattr( it, "quantity" ):
it.quantity += -1
if it.quantity < 1:
pc.contents.remove( it )
def equip_or_whatevs( self, it, pc, myredraw ):
"""Equip, trade, drop, or whatever this item."""
mymenu = charsheet.RightMenu( self.screen, predraw = myredraw )
if it.equipped:
mymenu.add_item( "Unequip Item", self.unequip_item )
elif pc.can_equip( it ):
mymenu.add_item( "Equip Item", self.equip_item )
if hasattr( it, "use" ):
mymenu.add_item( "Use Item", self.use_item )
if hasattr( it, "spell" ) and not self.camp.library_has_spell( it.spell ):
mymenu.add_item( "Learn Spell", self.learn_spell_from_item )
mymenu.add_item( "Trade Item", self.trade_item )
mymenu.add_item( "Drop Item", self.drop_item )
mymenu.add_item( "Exit", False )
mymenu.add_alpha_keys()
n = mymenu.query()
if n:
result = n( it, pc, myredraw )
myredraw.csheet.regenerate_avatar()
self.view.regenerate_avatars( self.camp.party )
return result
else:
return True
def do_level_training( self, student ):
myredraw = charsheet.CharacterViewRedrawer( csheet=charsheet.CharacterSheet(student, screen=self.screen, camp=self.camp), screen=self.screen, predraw=self.view, caption="Advance Rank" )
mymenu = charsheet.RightMenu( self.screen, predraw = myredraw )
mymenu.add_item( "Advance {0}".format( student.mr_level.name ) , student.mr_level.__class__ )
for j in student.levels:
if j is not student.mr_level:
mymenu.add_item( "Change to {0}".format( j.name ) , j.__class__ )
jobs = set()
for pc in self.camp.party:
for j in pc.levels:
jobs.add( j.__class__ )
for j in student.levels:
jobs.remove( j.__class__ )
for j in jobs:
if j.can_take_level( student ):
mymenu.add_item( "Learn {0}".format( j.name ) , j )
mymenu.sort()
mymenu.add_alpha_keys()
mymenu.add_item( "Cancel", False )
myredraw.menu = mymenu
it = mymenu.query()
if it:
improved_stat = student.advance( it )
if improved_stat:
self.alert( "{0} gains a rank in {1} \n and +1 {2}.".format( student, it.name, improved_stat ) )
else:
self.alert( "{0} gains a rank in {1}.".format( student, it.name ) )
def view_party( self, n, can_switch=True ):
if n >= len( self.camp.party ):
n = 0
pc = self.camp.party[ n ]
keep_going = True
myredraw = charsheet.CharacterViewRedrawer( csheet=charsheet.CharacterSheet(pc, screen=self.screen, camp=self.camp), screen=self.screen, predraw=self.view, caption="View Party" )
while keep_going:
mymenu = charsheet.RightMenu( self.screen, predraw = myredraw )
pc.contents.tidy()
for i in pc.contents:
if i.equipped:
mymenu.add_item( "*" + str( i ) , i )
elif i.slot != items.NOSLOT and not pc.can_equip( i ):
mymenu.add_item( "#" + str( i ) , i )
else:
mymenu.add_item( str( i ) , i )
if pc.xp > pc.xp_for_next_level():
mymenu.add_item( "!!!Advance Rank!!!", 999 )
mymenu.sort()
mymenu.add_alpha_keys()
mymenu.add_item( "Exit", False )
myredraw.menu = mymenu
if can_switch:
mymenu.quick_keys[ pygame.K_LEFT ] = -1
mymenu.quick_keys[ pygame.K_RIGHT ] = 1
it = mymenu.query()
if it is -1:
n = ( n + len( self.camp.party ) - 1 ) % len( self.camp.party )
pc = self.camp.party[n]
myredraw.csheet = charsheet.CharacterSheet(pc, screen=self.screen, camp=self.camp)
elif it is 1:
n = ( n + 1 ) % len( self.camp.party )
pc = self.camp.party[n]
myredraw.csheet = charsheet.CharacterSheet(pc, screen=self.screen, camp=self.camp)
elif it is 999:
self.do_level_training( pc )
keep_going = False
elif it:
# An item was selected. Deal with it.
if not self.equip_or_whatevs( it, pc, myredraw ):
keep_going = False
else:
keep_going = False
def reorder_party( self ):
new_party_order = list()
psheet = charsheet.PartySheet( new_party_order, screen=self.screen, camp=self.camp )
myredraw = charsheet.CharacterViewRedrawer( csheet=psheet, screen=self.screen, predraw=self.view, caption="Reorder Party" )
while self.camp.party:
mymenu = charsheet.RightMenu( self.screen, predraw = myredraw )
for pc in self.camp.party:
mymenu.add_item( str( pc ), pc )
mymenu.add_alpha_keys()
mymenu.add_item( "Exit", False )
myredraw.menu = mymenu
it = mymenu.query()
if it:
self.camp.party.remove( it )
new_party_order.append( it )
psheet.regenerate_avatars()
else:
break
if self.camp.party:
new_party_order += self.camp.party
self.camp.party = new_party_order
def monster_inactive( self, mon ):
return mon not in self.camp.party and (( not self.camp.fight ) or mon not in self.camp.fight.active)
def update_monsters( self ):
for m in self.scene.contents:
if isinstance( m, characters.Character ) and self.monster_inactive(m):
# First handle movement.
if m.get_move() and ( ((self.time + hash(m)) % 35 == 1 ) or self.camp.fight ):
rdel = random.choice( self.scene.DELTA8 )
nupos = ( m.pos[0] + rdel[0], m.pos[1] + rdel[1] )
if self.scene.on_the_map(nupos[0],nupos[1]) and not self.scene.map[nupos[0]][nupos[1]].blocks_walking() and not self.scene.get_character_at_spot(nupos):
if m.team and m.team.home:
if m.team.home.collidepoint( nupos ):
m.pos = nupos
else:
m.pos = nupos
# Monsters that can hide may hide.
if m.can_use_stealth() and m.is_hostile( self.camp ) and random.randint(1,6) == 1:
m.hidden = True
# Next, check visibility to PC.
if m.team and m.team.on_guard() and m.pos in self.scene.in_sight:
pov = pfov.PointOfView( self.scene, m.pos[0], m.pos[1], 5 )
in_sight = False
for pc in self.camp.party:
if pc.pos in pov.tiles and pc in self.scene.contents:
in_sight = True
break
if in_sight:
react = m.get_reaction( self.camp )
if react < characters.FRIENDLY_THRESHOLD:
if react < characters.ENEMY_THRESHOLD:
anims = [ animobs.SpeakAttack(m.pos,loop=16), ]
animobs.handle_anim_sequence( self.screen, self.view, anims )
self.camp.activate_monster( m )
break
else:
anims = [ animobs.SpeakAngry(m.pos,loop=16), ]
animobs.handle_anim_sequence( self.screen, self.view, anims )
# Start by setting this team to hostile- just in case the player
# exits the dialogue w/o making a truce.
m.team.charm_roll = -999
self.converse_with_model( m, dialogue.CUE_THREATEN )
def check_trigger( self, trigger, thing=None ):
# Something is happened that plots may need to react to.
for p in self.camp.active_plots():
p.handle_trigger( self, trigger, thing )
def expand_puzzle_menu( self, thing, thingmenu ):
# Something is happened that plots may need to react to.
for p in self.camp.active_plots():
p.modify_puzzle_menu( thing, thingmenu )
if not thingmenu.items:
thingmenu.add_item( "[Continue]", None )
else:
thingmenu.sort()
thingmenu.add_alpha_keys()
def keep_exploring( self ):
return self.camp.first_living_pc() and self.no_quit and not pygwrap.GOT_QUIT and not self.camp.destination
def update_scene( self ):
"""If appropriate, move models back to their home zone and restock monsters."""
if self.scene.last_updated < self.camp.day:
for m in self.scene.contents:
if isinstance( m, characters.Character ) and m not in self.camp.party:
# Regenerate any damage suffered since last time.
m.hp_damage = 0
m.mp_damage = 0
if m.team and m.team.home and not m.team.home.collidepoint( m.pos ):
# This monster is lost. Send it back home.
m.pos = self.scene.find_entry_point_in_rect( m.team.home )
# Check the monster zones. Restock random monsters.
party_rank = self.camp.party_rank()
restock_chance = 50
if party_rank > self.scene.rank:
restock_chance = max( 10, ( restock_chance * 2 ) // ( 2 + party_rank - self.scene.rank ) )
for mz in self.scene.monster_zones:
if self.scene.monster_zone_is_empty( mz ) and random.randint(1,100) <= restock_chance:
NewTeam = teams.Team( default_reaction=characters.SAFELY_ENEMY, home=mz,
rank=max( self.scene.rank, ( self.scene.rank + party_rank ) // 2 ),
strength=100, habitat=self.scene.get_encounter_request() )
mlist = NewTeam.build_encounter(self.scene)
poslist = self.scene.find_free_points_in_rect( mz )
for m in mlist:
if poslist:
pos = random.choice( poslist )
m.place( self.scene, pos )
poslist.remove( pos )
else:
break
self.scene.last_updated = self.camp.day
def add_spells_for_pc( self, pc, mymenu ):
"""Add all of this pc's castable exploration spells to the menu."""
techs = pc.get_invocations( False )
for t in techs:
mymenu.add_item( t.menu_str(), t )
# In addition to the prepared spells, the character can cast directly
# from the library of known spells.
for t in self.camp.known_spells:
if t.can_be_learned( pc, False ) and t.can_be_invoked( pc, False ) and t not in techs:
mymenu.add_item( t.menu_str(), t )
def cast_explo_spell( self, n, can_switch=True ):
if n >= len( self.camp.party ):
n = 0
pc = self.camp.party[ n ]
keep_going = True
myredraw = charsheet.CharacterViewRedrawer( csheet=charsheet.CharacterSheet(pc, screen=self.screen, camp=self.camp), screen=self.screen, predraw=self.view, caption="Spells & Techniques" )
while keep_going:
mymenu = charsheet.RightMenu( self.screen, predraw = myredraw )
self.add_spells_for_pc( pc, mymenu )
mymenu.sort()
mymenu.add_alpha_keys()
mymenu.add_item( "Exit", False )
myredraw.menu = mymenu
if can_switch:
mymenu.quick_keys[ pygame.K_LEFT ] = -1
mymenu.quick_keys[ pygame.K_RIGHT ] = 1
it = mymenu.query()
if it is -1:
n = ( n + len( self.camp.party ) - 1 ) % len( self.camp.party )
pc = self.camp.party[n]
myredraw.csheet = charsheet.CharacterSheet(pc, screen=self.screen, camp=self.camp)
elif it is | |
formatted_lines.append(line)
# Here we look for the top months we have in the recorded data.
if len(all_uniques) != 0 and len(all_pageviews) != 0:
top_uniques = max(all_uniques)
top_pageviews = max(all_pageviews)
for key, data in traffic_dictionary.items():
if top_uniques in data:
top_month_uniques = key
if top_pageviews in data:
top_month_pageviews = key
else:
top_uniques = top_pageviews = None
# Get the estimated CURRENT monthly average for this month.
# This is generated from the current daily data.
daily_data = subreddit_traffic_daily_estimator(subreddit_name)
if daily_data is not None:
# We have daily estimated data that we can parse.
# Get month data and the current month as a YYYY-MM string.
current_month = timekeeping.month_convert_to_string(time.time())
current_month_dt = datetime.datetime.strptime(current_month, "%Y-%m").date()
prev_month = (current_month_dt + datetime.timedelta(-15)).strftime("%Y-%m")
# Estimate the change.
estimated_uniques = daily_data["estimated_uniques"]
estimated_pageviews = daily_data["estimated_pageviews"]
# Get the previous month's data for comparison.
# This will fail if the keys are not included in the dictionary
# or if a variable for division is set to zero.
try:
previous_uniques = traffic_dictionary[prev_month][0]
previous_pageviews = traffic_dictionary[prev_month][1]
uniques_diff = estimated_uniques - previous_uniques
pageviews_diff = estimated_pageviews - previous_pageviews
est_uniques_change = round((uniques_diff / previous_uniques) * 100, 2)
est_pageviews_change = round((pageviews_diff / previous_pageviews) * 100, 2)
ratio_raw = round(estimated_pageviews / estimated_uniques, 0)
ratio_est_uniques_pageviews = "≈1:{}".format(int(ratio_raw))
x_ratio = 1 + (est_pageviews_change * 0.01) # Est. ratio
# Interpolate estimated number of posts and comments based
# on the Pushshift data and the ratio we have for pageviews.
now_posts = correlated_data["submission"].get(prev_month, "0").replace(",", "")
if now_posts != "N/A":
now_posts = int(now_posts)
est_posts = "{:,.0f}".format(now_posts * x_ratio)
else:
est_posts = "N/A"
now_comments = correlated_data["comment"].get(prev_month, "0").replace(",", "")
if now_comments != "N/A":
now_comments = int(now_comments)
est_comments = "{:,.0f}".format(now_comments * x_ratio)
else:
est_comments = "N/A"
except (KeyError, ZeroDivisionError):
est_uniques_change = est_pageviews_change = ratio_est_uniques_pageviews = "---"
est_posts = est_comments = "N/A"
estimated_line = basic_line.format(
"*{} (estimated)*".format(current_month),
"",
estimated_uniques,
est_uniques_change,
"",
estimated_pageviews,
est_pageviews_change,
ratio_est_uniques_pageviews,
est_posts,
est_comments,
)
# Insert at the start of the formatted lines list, position 0.
formatted_lines.insert(0, estimated_line)
# Get the averages of both the total amounts and the percentages.
# If there's no data, set the averages to zero.
try:
num_avg_uniques = round(sum(all_uniques) / len(all_uniques), 2)
num_avg_pageviews = round(sum(all_pageviews) / len(all_uniques), 2)
except ZeroDivisionError:
num_avg_uniques = num_avg_pageviews = 0
# Make sure we have month over month data, because if we don't have
# more than one month's worth of data, we can't calculate the
# average per month increase.
if len(all_uniques_changes) > 0 and len(all_pageviews_changes) > 0:
num_avg_uniques_change = round(sum(all_uniques_changes) / len(all_uniques_changes), 2)
num_pageviews_changes = round(sum(all_pageviews_changes) / len(all_pageviews_changes), 2)
else:
num_avg_uniques_change = num_pageviews_changes = 0
# Form the Markdown for the "Average" section.
average_section = (
"* *Average Monthly Uniques*: {:,}\n* *Average Monthly Pageviews*: {:,}\n"
"* *Average Monthly Uniques Change*: {:+}%"
"\n* *Average Monthly Pageviews Change*: {:+}%\n"
)
average_section = average_section.format(
num_avg_uniques, num_avg_pageviews, num_avg_uniques_change, num_pageviews_changes
)
# Get the difference of the top months from the average and
# form the Markdown for the "Top" section that follows.
# Get the percentage increase for uniques and pageviews.
if top_uniques is not None and top_pageviews is not None:
if num_avg_uniques != 0 and num_avg_pageviews != 0:
i_uniques = (top_uniques - num_avg_uniques) / num_avg_uniques
i_pageviews = (top_pageviews - num_avg_pageviews) / num_avg_pageviews
top_increase_uniques = ", {:+.2%} more than the average month".format(i_uniques)
top_increase_pageviews = ", {:+.2%} more than the average month".format(i_pageviews)
else:
top_increase_uniques = top_increase_pageviews = ""
top_section = (
"* *Top Month for Uniques*: {} ({:,} uniques{})\n"
"* *Top Month for Pageviews*: {} ({:,} pageviews{})\n\n"
)
top_section = top_section.format(
top_month_uniques,
top_uniques,
top_increase_uniques,
top_month_pageviews,
top_pageviews,
top_increase_pageviews,
)
else:
# Leave it blank if there's not enough data to derive a
# top section.
top_section = ""
# Form the overall Markdown table with the header and body text.
header = (
"\n| Month | 📈 | Uniques | Uniques % Change | 📉 | "
"Pageviews | Pageviews % Change | Uniques : Pageviews | "
"\n|-------|----|---------|------------------|----|------|"
"--------------------|---------------------|\n"
)
body = average_section + top_section + header + "\n".join(formatted_lines)
return body
"""SUBREDDIT STATISTICS RETRIEVAL"""
def subreddit_pushshift_probe(test_count=5):
"""This function does a check of Pushshift to see if aggregations
are enabled or not, as Artemis depends on aggregations for some
statistics.
:param test_count: How many subreddit queries to test.
:return: `True` if aggregations are valid, returns `False if they
are not.
"""
global AGGS_ENABLED
aggs_valid_count = 0
# Return if there are few monitored subreddits.
if len(MONITORED_SUBREDDITS) < test_count:
AGGS_ENABLED = True
return
# Select some random subreddits to test.
random_selection = sample(MONITORED_SUBREDDITS, test_count)
# Get a time two weeks ago to test
two_weeks_prior = int(time.time() - 1209600)
start_search_at = timekeeping.month_convert_to_string(two_weeks_prior)
# We run a regular query to test if the database itself is up,
# then follow that up with an aggregations query. If aggregations
# are off, then there will be no `aggs` in the result for the query.
for subreddit in random_selection:
regular_query = (
"https://api.pushshift.io/reddit/search/"
"submission/?subreddit={}&after={}"
"&size=25".format(subreddit, start_search_at)
)
aggs_query = regular_query + "&aggs=author"
regular_data = subreddit_pushshift_access(regular_query)
aggs_data = subreddit_pushshift_access(aggs_query)
if regular_data and "aggs" in aggs_data:
logger.info("Pushshift Probe: r/{} aggregations are valid.".format(subreddit))
aggs_valid_count += 1
else:
logger.info("Pushshift Probe: r/{} aggregations are invalid.".format(subreddit))
# If there was no valid aggregate data, return False.
logger.info(
"Pushshift Probe: Detected {} valid aggregation "
"results out of {} tests.".format(aggs_valid_count, test_count)
)
if aggs_valid_count > 0:
AGGS_ENABLED = True
else:
AGGS_ENABLED = False
logger.info("Pushshift Probe: Aggregations are currently invalid.")
return
def subreddit_pushshift_access(query_string, retries=3, stream_possible=False):
"""This function is called by others as the main point of query to
Pushshift. It contains code to account for JSON decoding errors and
to retry if it encounters such problems. It also converts JSON data
into a Python dictionary.
:param query_string: The exact API call we want to make.
:param retries: The number of times (as an integer) that we want to
try connecting to the API. Default is 3.
:param stream_possible: Boolean that tells whether or not this query
can be covered by the stream database. If
aggregations are disabled and this is `True`
then the function will consult the stream.
:return: An empty dictionary if there was a connection error,
otherwise, a dictionary.
"""
# A temporary check to see if aggregations are currently active.
# If not and this isn't something we can get stream data for,
# the function returns an empty dictionary straightaway,
# as it will not get real data anyway.
if "&aggs" in query_string and not AGGS_ENABLED:
if stream_possible:
return stream_query_access(query_string)
else:
return {}
# Regular function iteration.
for _ in range(retries):
try:
returned_data = requests.get(query_string)
returned_data = returned_data.json()
return returned_data # Return data as soon as it is found.
except (ValueError, ConnectionError, HTTPError, requests.exceptions.ChunkedEncodingError):
continue
return {}
def subreddit_subscribers_recorder(subreddit_name, check_pushshift=False):
"""A quick routine that gets the number of subscribers for a
specific subreddit and saves it to our database.
This is intended to be run daily at midnight UTC.
:param subreddit_name: The name of a Reddit subreddit.
:param check_pushshift: Whether we want to get the live count of
the subscribers from Reddit (normal mode)
or we want to try and get the more accurate
one from Pushshift. This is because Artemis
may have been added at the end of a UTC day
and its current subscriber count would not
be as accurate as an earlier one.
:return: Nothing.
"""
# Get the date by converting the time to YYYY-MM-DD in UTC.
current_time = time.time()
current_day = timekeeping.convert_to_string(current_time)
# `check_pushshift`: We want to get a more accurate count from the
# start of the day. Set `current_subs` to `None` if there is no
# information retrieved. If we can get data, it'll be in a dict
# format: {'2018-11-11': 9999}
if check_pushshift:
ps_subscribers = subreddit_subscribers_pushshift_historical_recorder(
subreddit_name, fetch_today=True
)
if len(ps_subscribers.keys()) == 0:
current_subs = None
else:
current_subs = ps_subscribers[current_day]
else:
current_subs = None
# Get the current state of subscribers. If an exception is thrown
# the subreddit is likely | |
self.right.append(copy.deepcopy(i))
for i in pair[1]:
self.left.append(copy.deepcopy(i))
# self.staffs.append(Staff(None, None, melodyVariable.getText()))
# self.music_stream.insert(0, right)
# if self.checkInst in self.grandInst:
# self.music_stream.insert(0, left)
def checkInListContext(self, ctx):
line = ctx.IDENTIFIER().getSymbol().line
col = ctx.IDENTIFIER().getSymbol().column
if ctx.IDENTIFIER().getText() not in self.variables:
raise Exception("Variable called but not declared", line, col)
return False
def checkInListNode(self, node):
line = node.getSymbol().line
col = node.getSymbol().column
if node.getText() not in self.variables:
raise Exception("Variable called but not declared", line, col)
return False
def evaluateDeclaredMelody(self,
ctx: MyGrammerParser.Declare_melodyContext, instru):
# print("Declaring Melody", len(ctx.getChildren(), " found"))
for i in ctx:
# Gets a staff from music sheet
melody = MyGrammerVisitor().visitDeclare_melody(i)
identifier = melody.identifier
staffs = melody.staffs
melodyStaffs = []
print(identifier)
if melody.identifier.getText(
) not in self.variables: # If not then store the corresponding notes of a chord in a list each note is stored as tuple of values for each property of a note
for staff in staffs:
top = staff.beats_per_measure
if int(top.getText()) <= 0:
line = top.getSymbol().line
col = top.getSymbol().column
raise Exception(
"Number of beats in staff must be greater than 0",
line, col)
bottom = staff.note_value
if int(bottom.getText()) <= 0:
line = bottom.getSymbol().line
col = bottom.getSymbol().column
raise Exception(
"Note value of whole beats in staff must be greater than 0",
line, col)
staffUp = Staff(top.getText(), bottom.getText(), None)
staffDown = Staff(top.getText(), bottom.getText(), None)
for expr in staff.expressions:
self.evaluateStaffBlock(expr, top.getText(),
bottom.getText(), staffUp,
staffDown, False, False, self.ending_id)
# for x in expr:
# newStaff.expressions.append(x)
right = stream.Part()
left = stream.Part()
for measure in staffUp.expressions:
right.append(measure)
for measure in staffDown.expressions:
left.append(measure)
up_idx = None
down_idx = None
for id in self.ending_id:
if id[0] == "UP_START":
up_idx = right.index(id[1])
elif id[0] == "UP_END":
repeat.insertRepeatEnding(right, up_idx, right.index(id[1]), endingNumber=id[2])
for id in self.ending_id:
if id[0] == "DOWN_START":
down_idx = left.index(id[1])
elif id[0] == "DOWN_END":
repeat.insertRepeatEnding(left, down_idx, left.index(id[1]), endingNumber=id[2])
melodyStaffs.append((right, left))
self.variables[melody.identifier.getText()] = melodyStaffs
else: # Else if reassignment of a chord variable is attempted raise an exception
line = melody.identifier.getSymbol().line
col = melody.identifier.getSymbol().column
raise Exception(
"Reassignment is not allowed. Use a different identifier",
line, col)
def evaluateStaffBlock(self, ctx: list, beats_per_measure, note_value,
staffUp, staffDown, first_staff,
last_staff, ending_id): # List of Expressions of a staff block
staff_accidentals = {}
first_measure = False
last_measure = False
cur_beats_up = 0
for idx, x in enumerate(ctx):
measureUp = stream.Measure()
measureDown = stream.Measure()
cur_beats = 0
if first_staff:
first_staff = False
first_measure = True
if last_staff:
if idx == len(ctx) - 1 or all(isinstance(y, AccidentalExpressionNode) for y in ctx[idx + 1:]):
last_staff = False
last_measure = True
if isinstance(x, DeclareMeasuresNode) or isinstance(
x, DeclareMeasuresGrandNode):
# measureUp = stream.Measure()
# measureDown = stream.Measure()
measureUp.insert(0, meter.TimeSignature(beats_per_measure + "/" + note_value))
measureDown.insert(0, meter.TimeSignature(beats_per_measure + "/" + note_value))
if x.ending_start is not None:
if isinstance(x, DeclareMeasuresNode) and len(self.ending_ctr) > 0 or isinstance(x, DeclareMeasuresGrandNode) and len(self.ending_ctr) > 1:
line = x.ending_start.ENDSTART().getSymbol().line
col = x.ending_start.ENDSTART().getSymbol().column
raise(Exception("Endings should be ended first before declaring another", line, col))
else:
if isinstance(
x,
DeclareMeasuresGrandNode):
if x.direction == "UP":
ending_id.append(("UP_START", measureUp, [int(ending.getText()) for ending in x.ending_start.INTEGER()]))
else:
ending_id.append(("DOWN_START", measureDown, [int(ending.getText()) for ending in x.ending_start.INTEGER()]))
self.ending_ctr.append(x.ending_start)
else:
ending_id.append(measureUp)
self.ending_ctr.append(x.ending_start)
# ending_id.append(measureUp)
# self.ending_ctr.append(x.ending_start)
if isinstance(
x,
DeclareMeasuresGrandNode) and x.direction == "UP":
expDown = ctx[idx + 1]
if expDown.ending_start is None:
line = expDown.expressions[0].note_value.getSymbol(
).line - 1
col = expDown.expressions[0].note_value.getSymbol(
).column
raise Exception(
"measureUp and measureDown pairs must both have endingstart",
line, col)
else:
up_numbers = [int(ending.getText()) for ending in x.ending_start.INTEGER()]
down_numbers = [int(ending.getText()) for ending in expDown.ending_start.INTEGER()]
up_numbers.sort()
down_numbers.sort()
if up_numbers != down_numbers:
line = expDown.expressions[0].note_value.getSymbol(
).line - 1
col = expDown.expressions[0].note_value.getSymbol(
).column
raise Exception(
"measureUp and measureDown pairs must both have the same ending numbers",
line, col)
else:
for i in up_numbers:
self.ending_values.append((i, x.ending_start.INTEGER()))
if x.ending_end is not None:
if len(self.ending_ctr) == 0:
line = x.ending_end.ENDEND().getSymbol().line
col = x.ending_end.ENDEND().getSymbol().column
raise Exception("Invalid ending placement", line, col)
if isinstance(
x,
DeclareMeasuresGrandNode):
if x.direction == "UP":
ending_id.append(("UP_END", measureUp))
else:
ending_id.append(("DOWN_END", measureDown))
else:
ending_id.append(measureUp)
if isinstance(
x,
DeclareMeasuresGrandNode) and x.direction == "UP":
expDown = ctx[idx + 1]
if expDown.ending_end is None:
line = expDown.expressions[0].note_value.getSymbol(
).line - 1
col = expDown.expressions[0].note_value.getSymbol(
).column
raise Exception(
"measureUp and measureDown pairs must both have endingend",
line, col)
del self.ending_ctr[-1]
if x.repeat_start is not None:
if isinstance(x, DeclareMeasuresNode):
measureUp.leftBarline = bar.Repeat(direction='start')
elif isinstance(x, DeclareMeasuresGrandNode) and x.direction == "UP":
measureUp.leftBarline = bar.Repeat(direction='start')
else:
measureDown.leftBarline = bar.Repeat(direction='start')
self.repeat_ctr.append(x.repeat_start)
if isinstance(
x,
DeclareMeasuresGrandNode) and x.direction == "UP":
expDown = ctx[idx + 1]
if expDown.repeat_start is None:
line = expDown.expressions[0].getSymbol(
).line - 1
col = expDown.expressions[0].getSymbol(
).column
raise Exception(
"measureUp and measureDown pairs must both have repstart",
line, col)
if x.repeat_end is not None:
repeat_times = None
if x.repeat_end.INTEGER() is None:
repeat_times = 1
else:
repeat_times = int(x.repeat_end.INTEGER().getText())
if repeat_times < 0 or repeat_times > 10: # TODO: should we even count for this i think ok lang na wala restriction
line = x.repeat_end.INTEGER().getSymbol().line
col = x.repeat_end.INTEGER().getSymbol().column
raise Exception(
"Number of repeats must be less than or equal to 10",
line, col)
else:
if isinstance(x, DeclareMeasuresNode):
measureUp.rightBarline = bar.Repeat(
direction='end', times = repeat_times)
elif isinstance(x, DeclareMeasuresGrandNode) and x.direction == "UP":
measureUp.rightBarline = bar.Repeat(
direction='end', times = repeat_times)
else:
measureDown.rightBarline = bar.Repeat(
direction='end', times = repeat_times)
if len(self.repeat_ctr) > 0:
del self.repeat_ctr[-1]
if isinstance(
x,
DeclareMeasuresGrandNode) and x.direction == "UP":
expDown = ctx[idx + 1]
if expDown.repeat_end is None:
line = expDown.expressions[0].getSymbol(
).line - 1
col = expDown.expressions[0].getSymbol(
).column
raise Exception(
"measureUp and measureDown pairs must both have repend",
line, col)
repeat_times_down = None
if expDown.repeat_end.INTEGER() is None:
repeat_times_down = 1
else:
repeat_times_down = int(expDown.repeat_end.INTEGER().getText())
if repeat_times_down != repeat_times:
line = expDown.repeat_end.REPEND().getSymbol(
).line
col = expDown.repeat_end.REPEND().getSymbol(
).column
raise Exception(
"measureUp and measureDown pairs must both have the same number of repeats",
line, col)
if x.repeat_start is None and isinstance(x, DeclareMeasuresGrandNode) and x.direction == "UP":
expDown = ctx[idx + 1]
if expDown.repeat_start is not None:
if not isinstance(x.expressions[0], DeclarePatternNode):
line = x.expressions[0].note_value.getSymbol().line - 1
col = x.expressions[0].note_value.getSymbol().column
else:
print("error is ", type(x.expressions[0].expressions), len(x.expressions[0].expressions))
line = x.expressions[0].expressions[0].note_value.getSymbol().line - 1
col = x.expressions[0].expressions[0].note_value.getSymbol().column
raise Exception(
"measureUp and measureDown pairs must both have repstart",
line, col)
if x.repeat_end is None and isinstance(
x, DeclareMeasuresGrandNode) and x.direction == "UP":
expDown = ctx[idx + 1]
if expDown.repeat_end is not None:
line = x.expressions[0].note_value.getSymbol().line - 1
col = x.expressions[0].note_value.getSymbol().column
raise Exception(
"measureUp and measureDown pairs must both have repend",
line, col)
if isinstance(x, DeclareMeasuresGrandNode
) and self.checkInst not in self.grandInst:
line = x.expressions[0].note_value.getSymbol().line - 1
col = x.expressions[0].note_value.getSymbol().column
raise Exception(
"Grand staff directions are only allowed for keyboard instruments",
line, col)
elif isinstance(x, DeclareMeasuresNode
) and self.checkInst in self.grandInst:
line = x.expressions[0].note_value.getSymbol().line - 1
col = x.expressions[0].note_value.getSymbol().column
raise Exception(
"Grand staff directions are required for keyboard instruments",
line, col)
measure_accidentals = {}
print("--------- MEASURE -------")
for mIdx, m_expr in enumerate(x.expressions):
if isinstance(m_expr, ExprNoteNode):
cur_beats += valToBeat(str(m_expr.note_value),
float(note_value),
bool(m_expr.dotted))
if cur_beats > float(beats_per_measure):
line = m_expr.note_value.getSymbol().line
col = m_expr.note_value.getSymbol().column
raise Exception(
"Number of beats in measure has exceeded amount required within staff",
line, col)
else:
if isinstance(x, DeclareMeasuresGrandNode
) and x.direction == "DOWN":
print("down")
pitch = m_expr.pitch.getText()
octave = m_expr.num.getText()
if pitch in measure_accidentals:
updated_acc = measure_accidentals[(pitch)]
elif pitch in staff_accidentals:
updated_acc = staff_accidentals[(pitch)]
else:
updated_acc = m_expr.accidental
measureDown.append(
createNote(str(m_expr.num),
str(updated_acc),
str(m_expr.pitch),
str(m_expr.note_value),
bool(m_expr.dotted)))
else:
pitch = m_expr.pitch.getText()
octave = m_expr.num.getText()
if pitch in measure_accidentals:
updated_acc = measure_accidentals[(pitch)]
elif pitch in staff_accidentals:
updated_acc = staff_accidentals[(pitch)]
else:
updated_acc = m_expr.accidental
measureUp.append(
createNote(str(m_expr.num),
str(updated_acc),
str(m_expr.pitch),
str(m_expr.note_value),
bool(m_expr.dotted)))
# printExprNote(m_expr)
elif isinstance(m_expr, ExprChordNode):
expected_note_val, is_dotted = processExprChord(m_expr.notes, "EXPR")
cur_beats += valToBeat(expected_note_val,
float(note_value), is_dotted)
if cur_beats > float(beats_per_measure):
line = m_expr.notes[0].note_value.getSymbol().line
col = m_expr.notes[0].note_value.getSymbol().column
raise Exception(
"Number of beats in measure has exceeded amount required within staff",
line, col)
else:
new_notes = []
for n in m_expr.notes:
pitch = n.pitch.getText()
octave = n.num.getText()
if pitch in measure_accidentals:
n.accidental = measure_accidentals[(pitch)]
elif pitch in staff_accidentals:
n.accidental = staff_accidentals[(pitch)]
new_notes.append((str(n.num), str(n.pitch), str(n.accidental)))
if isinstance(x, DeclareMeasuresGrandNode) and x.direction == "DOWN":
measureDown.append(createChord(new_notes, expected_note_val, is_dotted))
else:
measureUp.append(createChord(new_notes, expected_note_val, is_dotted))
| |
from ..v2020_03_01.aio.operations_async import DdosCustomPoliciesOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations_async import DdosCustomPoliciesOperations as OperationClass
else:
raise NotImplementedError("APIVersion {} is not available".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def ddos_protection_plans(self):
"""Instance depends on the API version:
* 2018-02-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_02_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2018-04-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_04_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2018-06-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_06_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2018-07-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_07_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2018-08-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_08_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2018-10-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_10_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2018-11-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_11_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2018-12-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2018_12_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2019-02-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_02_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2019-04-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_04_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2019-06-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_06_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2019-07-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_07_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2019-08-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_08_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2019-09-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_09_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2019-11-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_11_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2019-12-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2019_12_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2020-03-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2020_03_01.aio.operations_async.DdosProtectionPlansOperations>`
* 2020-04-01: :class:`DdosProtectionPlansOperations<azure.mgmt.network.v2020_04_01.aio.operations_async.DdosProtectionPlansOperations>`
"""
api_version = self._get_api_version('ddos_protection_plans')
if api_version == '2018-02-01':
from ..v2018_02_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-04-01':
from ..v2018_04_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations_async import DdosProtectionPlansOperations as OperationClass
else:
raise NotImplementedError("APIVersion {} is not available".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def default_security_rules(self):
"""Instance depends on the API version:
* 2017-06-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2017_06_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2017-08-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2017_08_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2017-09-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2017_09_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2017-10-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2017_10_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2017-11-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2017_11_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2018-01-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_01_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2018-02-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_02_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2018-04-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_04_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2018-06-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_06_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2018-07-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_07_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2018-08-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_08_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2018-10-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_10_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2018-11-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_11_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2018-12-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2018_12_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2019-02-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2019_02_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2019-04-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2019_04_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2019-06-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2019_06_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2019-07-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2019_07_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2019-08-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2019_08_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2019-09-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2019_09_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2019-11-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2019_11_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2019-12-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2019_12_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2020-03-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2020_03_01.aio.operations_async.DefaultSecurityRulesOperations>`
* 2020-04-01: :class:`DefaultSecurityRulesOperations<azure.mgmt.network.v2020_04_01.aio.operations_async.DefaultSecurityRulesOperations>`
"""
api_version = self._get_api_version('default_security_rules')
if api_version == '2017-06-01':
from ..v2017_06_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2017-08-01':
from ..v2017_08_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2017-09-01':
from ..v2017_09_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2017-10-01':
from ..v2017_10_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2017-11-01':
from ..v2017_11_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2018-01-01':
from ..v2018_01_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2018-02-01':
from ..v2018_02_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2018-04-01':
from ..v2018_04_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations_async import DefaultSecurityRulesOperations as OperationClass
else:
raise NotImplementedError("APIVersion {} is not available".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def express_route_circuit_authorizations(self):
"""Instance depends on the API version:
* 2015-06-15: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2015_06_15.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2016-09-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2016_09_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2016-12-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2016_12_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2017-03-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2017_03_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2017-06-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2017_06_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2017-08-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2017_08_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2017-09-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2017_09_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2017-10-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2017_10_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2017-11-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2017_11_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2018-01-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2018_01_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2018-02-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2018_02_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2018-04-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2018_04_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2018-06-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2018_06_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2018-07-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2018_07_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2018-08-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2018_08_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2018-10-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2018_10_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2018-11-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2018_11_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2018-12-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2018_12_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2019-02-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2019_02_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2019-04-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2019_04_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2019-06-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2019_06_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2019-07-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2019_07_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2019-08-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2019_08_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2019-09-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2019_09_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2019-11-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2019_11_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2019-12-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2019_12_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2020-03-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2020_03_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
* 2020-04-01: :class:`ExpressRouteCircuitAuthorizationsOperations<azure.mgmt.network.v2020_04_01.aio.operations_async.ExpressRouteCircuitAuthorizationsOperations>`
"""
api_version = self._get_api_version('express_route_circuit_authorizations')
if api_version == '2015-06-15':
from ..v2015_06_15.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2016-09-01':
from ..v2016_09_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2016-12-01':
from ..v2016_12_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2017-03-01':
from ..v2017_03_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2017-06-01':
from ..v2017_06_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2017-08-01':
from ..v2017_08_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2017-09-01':
from ..v2017_09_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2017-10-01':
from ..v2017_10_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2017-11-01':
from ..v2017_11_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2018-01-01':
from ..v2018_01_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2018-02-01':
from ..v2018_02_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2018-04-01':
from ..v2018_04_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations_async import ExpressRouteCircuitAuthorizationsOperations as OperationClass
else:
raise NotImplementedError("APIVersion {} is not available".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def express_route_circuit_connections(self):
"""Instance depends on the API version:
* 2018-02-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2018_02_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2018-04-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2018_04_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2018-06-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2018_06_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2018-07-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2018_07_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2018-08-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2018_08_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2018-10-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2018_10_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2018-11-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2018_11_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2018-12-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2018_12_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2019-02-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2019_02_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2019-04-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2019_04_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2019-06-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2019_06_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2019-07-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2019_07_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2019-08-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2019_08_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2019-09-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2019_09_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2019-11-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2019_11_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2019-12-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2019_12_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2020-03-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2020_03_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
* 2020-04-01: :class:`ExpressRouteCircuitConnectionsOperations<azure.mgmt.network.v2020_04_01.aio.operations_async.ExpressRouteCircuitConnectionsOperations>`
"""
api_version = self._get_api_version('express_route_circuit_connections')
if api_version == '2018-02-01':
from ..v2018_02_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2018-04-01':
from ..v2018_04_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2018-06-01':
from ..v2018_06_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2018-07-01':
from ..v2018_07_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2018-08-01':
from ..v2018_08_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2018-10-01':
from ..v2018_10_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2018-11-01':
from ..v2018_11_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2018-12-01':
from ..v2018_12_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2019-02-01':
from ..v2019_02_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2019-04-01':
from ..v2019_04_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2019-06-01':
from ..v2019_06_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2019-07-01':
from ..v2019_07_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2019-08-01':
from ..v2019_08_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2019-09-01':
from ..v2019_09_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2019-11-01':
from ..v2019_11_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2019-12-01':
from ..v2019_12_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2020-03-01':
from ..v2020_03_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
elif api_version == '2020-04-01':
from ..v2020_04_01.aio.operations_async import ExpressRouteCircuitConnectionsOperations as OperationClass
else:
raise NotImplementedError("APIVersion {} is not available".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
@property
def express_route_circuit_peerings(self):
"""Instance depends on the API | |
= map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
return mx
def func_608aba9db10b46b3a333d3ed3a42a2c8(infile):
b, n = map(int, infile.readline().strip().split())
x = map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
return sx
def func_b5dd37f064d64026810eb571cd222ed7(infile):
b, n = map(int, infile.readline().strip().split())
x = map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
return x
def func_8739587816c74298b5d3813cf6870ed4(infile):
b, n = map(int, infile.readline().strip().split())
x = map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
return b
def func_69f1faa88bea4321a5a51b9d027de7c3(infile):
b, n = map(int, infile.readline().strip().split())
x = map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
return n
def func_b6b5065a5dee415693fc2d996d6e8d79(infile, b, n):
x = map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
return crem
def func_95eaef459b1543179e15091b424360b8(infile, b, n):
x = map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
return x
def func_5e6bae7aadfe4dac86adc361ae4541e6(infile, b, n):
x = map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
return cx
def func_d9e787e7934f43c9a0a02ac4540d6239(infile, b, n):
x = map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
return i
def func_97ea4480d4db4b68b5aa087268288504(infile, b, n):
x = map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
return mx
def func_94ba257382d2424ca0986b81faa6a350(infile, b, n):
x = map(int, infile.readline().strip().split())
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
return sx
def func_2952648717d44dc4b1892d3b4224b289(x, b, n):
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
return cx
def func_3e076ecac0f74a6aa0768e5157d84894(x, b, n):
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
return sx
def func_94d86a4218814d7ba0e7210049d46136(x, b, n):
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
return mx
def func_8ea3283e34d641b1a6b7ccd21bcb4e0b(x, b, n):
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
return y
def func_23bcf0ade954423ea8faf8334234a8eb(x, b, n):
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
return crem
def func_947d894e1c5e41d9b5cea2afa8f9f734(x, b, n):
x += [0] * (37 - n)
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
return i
def func_72dc87936dbc48da9d28b4925e726ebf(x, b):
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
cx = cx + [(y + 1) for y in cx]
return sx
def func_d3a08f356cf142ad85ebe4d2d3e36baa(x, b):
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
cx = cx + [(y + 1) for y in cx]
return crem
def func_11849c249aad47509e0a1064e987adb2(x, b):
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
cx = cx + [(y + 1) for y in cx]
return i
def func_f71e39ed428a4455af9fbd52a5eb78ed(x, b):
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
cx = cx + [(y + 1) for y in cx]
return cx
def func_6b0509362da64a74b9712bfee10d409f(x, b):
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
cx = cx + [(y + 1) for y in cx]
return mx
def func_17fe85c5cd4d4f6088e8e7f8eef96c1d(x, b):
x.sort()
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
cx = cx + [(y + 1) for y in cx]
return y
def func_cfa7b7355ff3423da23a4761951c1e56(x, b):
cx = list(x)
sx = sum(x)
mx = x[-1]
for i in xrange(1, 37):
crem = x[i - 1] * i - sum(x[:i])
if b >= crem:
cx.append(x[i - 1] + (b - crem) / i)
cx = cx + [(y - 1) for y in cx if y]
cx = cx + [(y + 1) for y in cx]
| |
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
Layout Plugin.
"""
# Standard library imports
import configparser as cp
import os
# Third party imports
from qtpy.QtCore import Qt, QByteArray, QSize, QPoint, Slot
from qtpy.QtWidgets import QApplication, QDesktopWidget, QDockWidget
# Local imports
from spyder.api.exceptions import SpyderAPIError
from spyder.api.plugins import Plugins, SpyderPluginV2
from spyder.api.translations import get_translation
from spyder.api.utils import get_class_values
from spyder.plugins.mainmenu.api import ApplicationMenus, ViewMenuSections
from spyder.plugins.layout.container import LayoutContainer
from spyder.plugins.layout.layouts import (DefaultLayouts,
HorizontalSplitLayout,
MatlabLayout, RLayout,
SpyderLayout, VerticalSplitLayout)
from spyder.plugins.preferences.widgets.container import PreferencesActions
from spyder.plugins.toolbar.api import (
ApplicationToolbars, MainToolbarSections)
from spyder.py3compat import qbytearray_to_str # FIXME:
# Localization
_ = get_translation("spyder")
# Constants
# Number of default layouts available
DEFAULT_LAYOUTS = get_class_values(DefaultLayouts)
# ----------------------------------------------------------------------------
# ---- Window state version passed to saveState/restoreState.
# ----------------------------------------------------------------------------
# This defines the layout version used by different Spyder releases. In case
# there's a need to reset the layout when moving from one release to another,
# please increase the number below in integer steps, e.g. from 1 to 2, and
# leave a mention below explaining what prompted the change.
#
# The current versions are:
#
# * Spyder 4: Version 0 (it was the default).
# * Spyder 5.0.0 to 5.0.5: Version 1 (a bump was required due to the new API).
# * Spyder 5.1.0: Version 2 (a bump was required due to the migration of
# Projects to the new API).
WINDOW_STATE_VERSION = 2
class Layout(SpyderPluginV2):
"""
Layout manager plugin.
"""
NAME = "layout"
CONF_SECTION = "quick_layouts"
REQUIRES = [Plugins.All] # Uses wildcard to require all the plugins
CONF_FILE = False
CONTAINER_CLASS = LayoutContainer
# --- SpyderDockablePlugin API
# ------------------------------------------------------------------------
def get_name(self):
return _("Layout")
def get_description(self):
return _("Layout manager")
def get_icon(self):
return self.create_icon("history") # FIXME:
def register(self):
container = self.get_container()
self._last_plugin = None
self._first_spyder_run = False
self._fullscreen_flag = None
# The following flag remember the maximized state even when
# the window is in fullscreen mode:
self._maximized_flag = None
# The following flag is used to restore window's geometry when
# toggling out of fullscreen mode in Windows.
self._saved_normal_geometry = None
self._state_before_maximizing = None
self._interface_locked = self.get_conf('panes_locked', section='main')
# Register default layouts
self.register_layout(self, SpyderLayout)
self.register_layout(self, RLayout)
self.register_layout(self, MatlabLayout)
self.register_layout(self, HorizontalSplitLayout)
self.register_layout(self, VerticalSplitLayout)
mainmenu = self.get_plugin(Plugins.MainMenu)
if mainmenu:
# Add Panes related actions to View application menu
panes_items = [
container._plugins_menu,
container._lock_interface_action,
container._close_dockwidget_action,
container._maximize_dockwidget_action]
for panes_item in panes_items:
mainmenu.add_item_to_application_menu(
panes_item,
menu_id=ApplicationMenus.View,
section=ViewMenuSections.Pane,
before_section=ViewMenuSections.Toolbar)
# Add layouts menu to View application menu
layout_items = [
container._layouts_menu,
container._toggle_next_layout_action,
container._toggle_previous_layout_action]
for layout_item in layout_items:
mainmenu.add_item_to_application_menu(
layout_item,
menu_id=ApplicationMenus.View,
section=ViewMenuSections.Layout)
# Add fullscreen action to View application menu
mainmenu.add_item_to_application_menu(
container._fullscreen_action,
menu_id=ApplicationMenus.View,
section=ViewMenuSections.Bottom)
toolbars = self.get_plugin(Plugins.Toolbar)
if toolbars:
# Add actions to Main application toolbar
before_action = self.get_action(
PreferencesActions.Show,
plugin=Plugins.Preferences
)
toolbars.add_item_to_application_toolbar(
container._maximize_dockwidget_action,
toolbar_id=ApplicationToolbars.Main,
section=MainToolbarSections.ApplicationSection,
before=before_action
)
# Update actions icons and text
self._update_fullscreen_action()
def before_mainwindow_visible(self):
# Update layout menu
self.update_layout_menu_actions()
# Setup layout
self.setup_layout(default=False)
def on_mainwindow_visible(self):
# Populate panes menu
self.create_plugins_menu()
# Update panes and toolbars lock status
self.toggle_lock(self._interface_locked)
# --- Plubic API
# ------------------------------------------------------------------------
def get_last_plugin(self):
"""
Return the last focused dockable plugin.
Returns
-------
SpyderDockablePlugin
The last focused dockable plugin.
"""
return self._last_plugin
def get_fullscreen_flag(self):
"""
Give access to the fullscreen flag.
The flag shows if the mainwindow is in fullscreen mode or not.
Returns
-------
bool
True is the mainwindow is in fullscreen. False otherwise.
"""
return self._fullscreen_flag
def register_layout(self, parent_plugin, layout_type):
"""
Register a new layout type.
Parameters
----------
parent_plugin: spyder.api.plugins.SpyderPluginV2
Plugin registering the layout type.
layout_type: spyder.plugins.layout.api.BaseGridLayoutType
Layout to register.
"""
self.get_container().register_layout(parent_plugin, layout_type)
def get_layout(self, layout_id):
"""
Get a registered layout by his ID.
Parameters
----------
layout_id : string
The ID of the layout.
Returns
-------
Instance of a spyder.plugins.layout.api.BaseGridLayoutType subclass
Layout.
"""
return self.get_container().get_layout(layout_id)
def update_layout_menu_actions(self):
self.get_container().update_layout_menu_actions()
def setup_layout(self, default=False):
"""Initialize mainwindow layout."""
prefix = 'window' + '/'
settings = self.load_window_settings(prefix, default)
hexstate = settings[0]
self._first_spyder_run = False
if hexstate is None:
# First Spyder execution:
self.main.setWindowState(Qt.WindowMaximized)
self._first_spyder_run = True
self.setup_default_layouts(DefaultLayouts.SpyderLayout, settings)
# Now that the initial setup is done, copy the window settings,
# except for the hexstate in the quick layouts sections for the
# default layouts.
# Order and name of the default layouts is found in config.py
section = 'quick_layouts'
get_func = self.get_conf_default if default else self.get_conf
order = get_func('order', section=section)
# Restore the original defaults if reset layouts is called
if default:
self.set_conf('active', order, section)
self.set_conf('order', order, section)
self.set_conf('names', order, section)
self.set_conf('ui_names', order, section)
for index, _name, in enumerate(order):
prefix = 'layout_{0}/'.format(index)
self.save_current_window_settings(prefix, section,
none_state=True)
# Store the initial layout as the default in spyder
prefix = 'layout_default/'
section = 'quick_layouts'
self.save_current_window_settings(prefix, section, none_state=True)
self._current_quick_layout = DefaultLayouts.SpyderLayout
self.set_window_settings(*settings)
def setup_default_layouts(self, layout_id, settings):
"""Setup default layouts when run for the first time."""
main = self.main
main.setUpdatesEnabled(False)
first_spyder_run = bool(self._first_spyder_run) # Store copy
if first_spyder_run:
self.set_window_settings(*settings)
else:
if self._last_plugin:
if self._last_plugin._ismaximized:
self.maximize_dockwidget(restore=True)
if not (main.isMaximized() or self._maximized_flag):
main.showMaximized()
min_width = main.minimumWidth()
max_width = main.maximumWidth()
base_width = main.width()
main.setFixedWidth(base_width)
# Layout selection
layout = self.get_layout(layout_id)
# Apply selected layout
layout.set_main_window_layout(self.main, self.get_dockable_plugins())
if first_spyder_run:
self._first_spyder_run = False
else:
self.main.setMinimumWidth(min_width)
self.main.setMaximumWidth(max_width)
if not (self.main.isMaximized() or self._maximized_flag):
self.main.showMaximized()
self.main.setUpdatesEnabled(True)
self.main.sig_layout_setup_ready.emit(layout)
return layout
def quick_layout_switch(self, index_or_layout_id):
"""
Switch to quick layout.
Using a number *index* or a registered layout id *layout_id*.
Parameters
----------
index_or_layout_id: int or str
"""
section = 'quick_layouts'
container = self.get_container()
try:
settings = self.load_window_settings(
'layout_{}/'.format(index_or_layout_id), section=section)
(hexstate, window_size, prefs_dialog_size, pos, is_maximized,
is_fullscreen) = settings
# The defaults layouts will always be regenerated unless there was
# an overwrite, either by rewriting with same name, or by deleting
# and then creating a new one
if hexstate is None:
# The value for hexstate shouldn't be None for a custom saved
# layout (ie, where the index is greater than the number of
# defaults). See spyder-ide/spyder#6202.
if index_or_layout_id not in DEFAULT_LAYOUTS:
container.critical_message(
_("Warning"),
_("Error opening the custom layout. Please close"
" Spyder and try again. If the issue persists,"
" then you must use 'Reset to Spyder default' "
"from the layout menu."))
return
self.setup_default_layouts(index_or_layout_id, settings)
else:
self.set_window_settings(*settings)
except cp.NoOptionError:
try:
layout = self.get_layout(index_or_layout_id)
layout.set_main_window_layout(
self.main, self.get_dockable_plugins())
self.main.sig_layout_setup_ready.emit(layout)
except SpyderAPIError:
container.critical_message(
_("Warning"),
_("Quick switch layout #%s has not yet "
"been defined.") % str(index_or_layout_id))
# Make sure the flags are correctly set for visible panes
for plugin in self.get_dockable_plugins():
try:
# New API
action = plugin.toggle_view_action
except AttributeError:
# Old API
action = plugin._toggle_view_action
action.setChecked(plugin.dockwidget.isVisible())
return index_or_layout_id
def load_window_settings(self, prefix, default=False, section='main'):
"""
Load window layout settings from userconfig-based configuration with
*prefix*, under *section*.
Parameters
----------
default: bool
if True, do not restore inner layout.
"""
get_func = self.get_conf_default if default else self.get_conf
window_size = get_func(prefix + 'size', section=section)
prefs_dialog_size = get_func(
prefix + 'prefs_dialog_size', section=section)
if default:
hexstate = None
else:
try:
hexstate = get_func(prefix + 'state', section=section)
except Exception:
hexstate = None
pos = get_func(prefix + 'position', section=section)
# It's necessary to verify if the window/position value is valid
# with the current screen. See spyder-ide/spyder#3748.
width = pos[0]
height = pos[1]
screen_shape = QApplication.desktop().geometry()
current_width = screen_shape.width()
current_height = screen_shape.height()
if current_width < width or current_height < height:
pos = self.get_conf_default(prefix + 'position', section)
is_maximized = get_func(prefix + 'is_maximized', section=section)
is_fullscreen = get_func(prefix + 'is_fullscreen', section=section)
return (hexstate, window_size, prefs_dialog_size, pos, is_maximized,
is_fullscreen)
def get_window_settings(self):
"""
Return current window settings.
Symetric to the 'set_window_settings' setter.
"""
# FIXME: Window size in main window is update on resize
window_size = (self.window_size.width(), self.window_size.height())
is_fullscreen = self.main.isFullScreen()
if is_fullscreen:
is_maximized = self._maximized_flag
else:
is_maximized = self.main.isMaximized()
pos = (self.window_position.x(), self.window_position.y())
prefs_dialog_size = (self.prefs_dialog_size.width(),
self.prefs_dialog_size.height())
hexstate = qbytearray_to_str(
self.main.saveState(version=WINDOW_STATE_VERSION)
)
return (hexstate, window_size, prefs_dialog_size, pos, is_maximized,
is_fullscreen)
def set_window_settings(self, hexstate, window_size, prefs_dialog_size,
pos, is_maximized, is_fullscreen):
"""
Set window settings Symetric to the 'get_window_settings' accessor.
"""
main = self.main
main.setUpdatesEnabled(False)
self.prefs_dialog_size = QSize(prefs_dialog_size[0],
prefs_dialog_size[1]) # width,height
main.set_prefs_size(self.prefs_dialog_size)
self.window_size = QSize(window_size[0],
window_size[1]) # width, height
self.window_position = QPoint(pos[0], pos[1]) # x,y
main.setWindowState(Qt.WindowNoState)
main.resize(self.window_size)
main.move(self.window_position)
# Window layout
if hexstate:
hexstate_valid = self.main.restoreState(
QByteArray().fromHex(str(hexstate).encode('utf-8')),
version=WINDOW_STATE_VERSION
)
# Check layout validity. Spyder 4 and below use the version 0
# state | |
<gh_stars>10-100
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_user
short_description: Manage user accounts and user attributes on a BIG-IP
description:
- Manage user accounts and user attributes on a BIG-IP. Typically this
module operates only on the REST API users and not the CLI users.
When specifying C(root), you may only change the password.
Your other parameters will be ignored in this case. Changing the C(root)
password is not an idempotent operation. Therefore, it will change it
every time this module attempts to change it.
version_added: 2.4
options:
full_name:
description:
- Full name of the user.
type: str
username_credential:
description:
- Name of the user to create, remove or modify.
- The C(root) user may not be removed.
type: str
required: True
aliases:
- name
password_credential:
description:
- Set the users password to this unencrypted value.
C(password_credential) is required when creating a new account.
type: str
shell:
description:
- Optionally set the users shell.
type: str
choices:
- bash
- none
- tmsh
partition_access:
description:
- Specifies the administrative partition to which the user has access.
C(partition_access) is required when creating a new account.
Should be in the form "partition:role".
- Valid roles include C(acceleration-policy-editor), C(admin), C(application-editor),
C(auditor), C(certificate-manager), C(guest), C(irule-manager), C(manager), C(no-access),
C(operator), C(resource-admin), C(user-manager), C(web-application-security-administrator),
and C(web-application-security-editor).
- Partition portion of tuple should be an existing partition or the value 'all'.
type: list
state:
description:
- Whether the account should exist or not, taking action if the state is
different from what is stated.
type: str
choices:
- present
- absent
default: present
update_password:
description:
- C(always) will allow to update passwords if the user chooses to do so.
C(on_create) will only set the password for newly created users.
- When C(username_credential) is C(root), this value will be forced to C(always).
type: str
choices:
- always
- on_create
default: always
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
version_added: 2.5
notes:
- Requires BIG-IP versions >= 12.0.0
extends_documentation_fragment: f5
author:
- <NAME> (@caphrim007)
- <NAME> (@wojtek0806)
'''
EXAMPLES = r'''
- name: Add the user 'johnd' as an admin
bigip_user:
username_credential: johnd
password_credential: password
full_name: <NAME>
partition_access: all:admin
update_password: on_create
state: present
provider:
server: lb.mydomain.com
user: admin
password: <PASSWORD>
delegate_to: localhost
- name: Change the user "johnd's" role and shell
bigip_user:
username_credential: johnd
partition_access: NewPartition:manager
shell: tmsh
state: present
provider:
server: lb.mydomain.com
user: admin
password: <PASSWORD>
delegate_to: localhost
- name: Make the user 'johnd' an admin and set to advanced shell
bigip_user:
name: johnd
partition_access: all:admin
shell: bash
state: present
provider:
server: lb.mydomain.com
user: admin
password: <PASSWORD>
delegate_to: localhost
- name: Remove the user 'johnd'
bigip_user:
name: johnd
state: absent
provider:
server: lb.mydomain.com
user: admin
password: <PASSWORD>
delegate_to: localhost
- name: Update password
bigip_user:
state: present
username_credential: johnd
password_credential: <PASSWORD>
provider:
server: lb.mydomain.com
user: admin
password: <PASSWORD>
delegate_to: localhost
# Note that the second time this task runs, it would fail because
# The password has been changed. Therefore, it is recommended that
# you either,
#
# * Put this in its own playbook that you run when you need to
# * Put this task in a `block`
# * Include `ignore_errors` on this task
- name: Change the Admin password
bigip_user:
state: present
username_credential: admin
password_credential: <PASSWORD>
provider:
server: lb.mydomain.com
user: admin
password: <PASSWORD>
delegate_to: localhost
- name: Change the root user's password
bigip_user:
username_credential: root
password_credential: <PASSWORD>
state: present
provider:
server: lb.mydomain.com
user: admin
password: <PASSWORD>
delegate_to: localhost
'''
RETURN = r'''
full_name:
description: Full name of the user
returned: changed and success
type: str
sample: <NAME>
partition_access:
description:
- List of strings containing the user's roles and which partitions they
are applied to. They are specified in the form "partition:role".
returned: changed and success
type: list
sample: ['all:admin']
shell:
description: The shell assigned to the user account
returned: changed and success
type: str
sample: tmsh
'''
import os
import tempfile
from ansible.module_utils._text import to_bytes
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.six import string_types
from distutils.version import LooseVersion
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.icontrol import tmos_version
from library.module_utils.network.f5.icontrol import upload_file
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.icontrol import tmos_version
from ansible.module_utils.network.f5.icontrol import upload_file
try:
# Crypto is used specifically for changing the root password via
# tmsh over REST.
#
# We utilize the crypto library to encrypt the contents of a file
# before we upload it, and then decrypt it on-box to change the
# password.
#
# To accomplish such a process, we need to be able to encrypt the
# temporary file with the public key found on the box.
#
# These libraries are used to do the encryption.
#
# Note that, if these are not available, the ability to change the
# root password is disabled and the user will be notified as such
# by a failure of the module.
#
# These libraries *should* be available on most Ansible controllers
# by default though as crypto is a dependency of Ansible.
#
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives import hashes
HAS_CRYPTO = True
except ImportError:
HAS_CRYPTO = False
class Parameters(AnsibleF5Parameters):
api_map = {
'partitionAccess': 'partition_access',
'description': 'full_name',
}
updatables = [
'partition_access',
'full_name',
'shell',
'password_credential',
]
returnables = [
'shell',
'partition_access',
'full_name',
'username_credential',
'password_credential',
]
api_attributes = [
'shell',
'partitionAccess',
'description',
'name',
'password',
]
@property
def partition_access(self):
"""Partition access values will require some transformation.
This operates on both user and device returned values.
Check if the element is a string from user input in the format of
name:role, if it is split it and create dictionary out of it.
If the access value is a dictionary (returned from device,
or already processed) and contains nameReference
key, delete it and append the remaining dictionary element into
a list.
If the nameReference key is removed just append the dictionary
into the list.
Returns:
List of dictionaries. Each item in the list is a dictionary
which contains the ``name`` of the partition and the ``role`` to
allow on that partition.
"""
if self._values['partition_access'] is None:
return
result = []
part_access = self._values['partition_access']
for access in part_access:
if isinstance(access, dict):
if 'nameReference' in access:
del access['nameReference']
result.append(access)
else:
result.append(access)
if isinstance(access, string_types):
acl = access.split(':')
if acl[0].lower() == 'all':
acl[0] = 'all-partitions'
value = dict(
name=acl[0],
role=acl[1]
)
result.append(value)
return result
@property
def temp_upload_file(self):
if self._values['temp_upload_file'] is None:
f = tempfile.NamedTemporaryFile()
name = os.path.basename(f.name)
self._values['temp_upload_file'] = name
return self._values['temp_upload_file']
class ApiParameters(Parameters):
@property
def shell(self):
if self._values['shell'] in [None, 'none']:
return None
return self._values['shell']
class ModuleParameters(Parameters):
@property
def shell(self):
if self._values['shell'] in [None, 'none']:
return None
return self._values['shell']
class Changes(Parameters):
def to_return(self):
result = {}
for returnable in self.returnables:
try:
result[returnable] = getattr(self, returnable)
except Exception:
pass
result = self._filter_params(result)
return result
class UsableChanges(Changes):
@property
def password(self):
if self._values['password_credential'] is None:
return None
return self._values['password_credential']
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def password_credential(self):
if self.want.password_credential is None:
return None
if self.want.update_password in ['always']:
return self.want.password_credential
return None
@property
def shell(self):
if self.want.shell is None:
if self.have.shell is not None:
return 'none'
else:
return None
if self.want.shell == 'bash':
self._validate_shell_parameter()
if self.want.shell == self.have.shell:
return None
else:
return self.want.shell
if self.want.shell != self.have.shell:
return self.want.shell
def _validate_shell_parameter(self):
"""Method to validate shell parameters.
Raise when shell attribute is set to 'bash' with roles set to
either 'admin' or 'resource-admin'.
NOTE: Admin and Resource-Admin roles | |
64MB
self.realloc_vrom(max_vrom, moveable_vrom)
def realloc_vrom(self, max_vrom, moveable_vrom):
# find strands where vrom isn't taken by an "immoveable vrom file"
vrom_allocator = Allocator()
vrom_allocator.free(0, max_vrom)
for file in self.files:
if file not in moveable_vrom:
vrom_start = file.dma_entry.vrom_start
vrom_end = file.dma_entry.vrom_end
vrom_allocator.alloc_range(vrom_start, vrom_end)
# the vrom offsets of this file won't be updated,
# check now if the file fits the reserved vrom
assert len(file.data) == (vrom_end - vrom_start)
if self.log is not None:
self.log.debug(
"After allocating immovable files: vrom_allocator = {}", vrom_allocator
)
# sort files from largest to smallest
# If same size, sort by name. This makes the ordering consistent across
# different executions, but TODO it would be nice to find something
# else (like, index in the dma table), it may be faster, and would also
# be more portable (only debug versions have the file name, I think?).
moveable_vrom_sorted = list(moveable_vrom)
moveable_vrom_sorted.sort(
key=lambda file: (len(file.data), file.dma_entry.name),
reverse=True,
)
if self.log is not None:
self.log.trace("Reallocating vrom for vrom-moveable files")
# fit moveable vrom files in the space highlighted by dynamic_vrom_ranges,
# shrinking those ranges as we go
for file in moveable_vrom_sorted:
# TODO is 16-align needed for vrom?
start, end = vrom_allocator.alloc(len(file.data), 0x10)
file.dma_entry.vrom_start = start
file.dma_entry.vrom_end = end
if self.log is not None:
self.log.trace("VROM> {}", file.dma_entry)
def realloc_moveable_rom(self, move_rom):
if move_rom:
moveable_rom = set(file for file in self.files if file.moveable_rom)
else:
moveable_rom = set(
file
for file in self.files
if file.moveable_rom and file.dma_entry.rom_start is None
)
# TODO is max_rom limited by anything apart from max_vrom to prevent eg a 128MB rom?
max_rom = 0x4000000 # 64MB
self.realloc_rom(max_rom, moveable_rom)
def realloc_rom(self, max_rom, moveable_rom):
# find strands where rom isn't taken by an "immoveable rom file"
rom_allocator = Allocator()
rom_allocator.free(0, max_rom)
for file in self.files:
if file not in moveable_rom:
rom_allocator.alloc_range(
file.dma_entry.rom_start,
file.dma_entry.rom_start + len(file.data),
)
if self.log is not None:
self.log.debug(
"After allocating immovable files: rom_allocator = {}", rom_allocator
)
# sort files from largest to smallest
moveable_rom_sorted = list(moveable_rom)
# TODO see the vrom equivalent moveable_vrom_sorted
moveable_rom_sorted.sort(
key=lambda file: (len(file.data), file.dma_entry.name),
reverse=True,
)
if self.log is not None:
self.log.trace("Reallocating rom for rom-moveable files")
# fit moveable rom files in the space highlighted by dynamic_rom_ranges,
# shrinking those ranges as we go
for file in moveable_rom_sorted:
start, end = rom_allocator.alloc(len(file.data), 0x10)
file.dma_entry.rom_start = start
file.dma_entry.rom_end = 0 # TODO ?
if self.log is not None:
self.log.trace("ROM> {}", file.dma_entry)
class ROMReader:
def __init__(
self,
version_info,
logging_helper=None, # type: Optional[LoggingHelper]
):
self.logging_helper = logging_helper
self.log = (
None if logging_helper is None else logging_helper.get_logger("ROMReader")
)
self.version_info = version_info
def read(
self,
data, # type: memoryview
):
dma_entries, file_boot = self.parse_dma_table(data)
files = [] # type: List[RomFile]
for dma_entry in dma_entries:
rom_end = dma_entry.rom_start + (dma_entry.vrom_end - dma_entry.vrom_start)
romfile = RomFile(
data[dma_entry.rom_start : rom_end],
dma_entry,
)
files.append(romfile)
files[self.version_info.dmaentry_index_boot] = file_boot
rom = ROM(self.version_info, files, self.logging_helper)
# TODO can all other files really move?
for file in rom.files:
file.moveable_rom = True
rom.file_makerom.moveable_rom = False
rom.file_boot.moveable_rom = False
rom.file_dmadata.moveable_rom = False
# TODO these audio files can move if the rom pointers in code are updated accordingly
# https://github.com/zeldaret/oot/blob/bf0f26db9b9c2325cea249d6c8e0ec3b5152bcd6/src/code/audio_load.c#L1109
rom.files[self.version_info.dmaentry_index_audiobank].moveable_rom = False
rom.files[self.version_info.dmaentry_index_audioseq].moveable_rom = False
rom.files[self.version_info.dmaentry_index_audiotable].moveable_rom = False
return rom
def parse_dma_table(self, data):
# type: (memoryview) -> Tuple[List[DmaEntry], RomFileEditable]
# read dmadata entry early, to get dma table length
(
dmadata_vrom_start,
dmadata_vrom_end,
dmadata_rom_start,
dmadata_rom_end,
) = dma_entry_struct.unpack_from(
data,
self.version_info.dmadata_rom_start
+ self.version_info.dmaentry_index_dmadata * dma_entry_struct.size,
)
assert dmadata_rom_start == self.version_info.dmadata_rom_start
assert dmadata_vrom_start <= dmadata_vrom_end
assert dmadata_rom_end == 0
dmadata_rom_end = dmadata_rom_start + (dmadata_vrom_end - dmadata_vrom_start)
assert dmadata_rom_end <= len(data)
# read boot entry early, to locate filenames
(
boot_vrom_start,
boot_vrom_end,
boot_rom_start,
boot_rom_end,
) = dma_entry_struct.unpack_from(
data,
dmadata_rom_start
+ self.version_info.dmaentry_index_boot * dma_entry_struct.size,
)
assert boot_vrom_start <= boot_vrom_end
assert boot_rom_end == 0
boot_dma_entry_temp = DmaEntry(
boot_vrom_start,
boot_vrom_end,
boot_rom_start,
boot_rom_end,
)
boot_rom_end = boot_rom_start + (boot_vrom_end - boot_vrom_start)
assert boot_rom_end <= len(data)
file_boot = RomFileEditable(
RomFile(data[boot_rom_start:boot_rom_end], boot_dma_entry_temp), False
)
filename_boot_offset_ranges = []
def get_filename(i):
(filename_vram_start,) = u32_struct.unpack_from(
file_boot.data,
self.version_info.dma_table_filenames_boot_offset + i * u32_struct.size,
)
filename_boot_offset_start = (
filename_vram_start - self.version_info.boot_vram_start
)
assert filename_boot_offset_start < len(file_boot.data)
filename_boot_offset_end = filename_boot_offset_start
while file_boot.data[filename_boot_offset_end] != 0:
filename_boot_offset_end += 1
assert filename_boot_offset_end < len(file_boot.data)
assert (filename_boot_offset_end - filename_boot_offset_start) < 100
filename_boot_offset_ranges.append(
(filename_boot_offset_start, filename_boot_offset_end + 1)
)
return codecs.decode(
file_boot.data[filename_boot_offset_start:filename_boot_offset_end],
"ascii",
)
if self.log is not None:
self.log.trace("Parsed DMA table:")
dma_entries = [] # type: List[DmaEntry]
dmaentry_rom_start = dmadata_rom_start
dmaentry_index = 0
while dmaentry_rom_start < dmadata_rom_end:
vrom_start, vrom_end, rom_start, rom_end = dma_entry_struct.unpack_from(
data, dmaentry_rom_start
)
if not (
vrom_start == 0 and vrom_end == 0 and rom_start == 0 and rom_end == 0
):
assert vrom_start <= vrom_end
assert rom_end == 0
assert rom_start + (vrom_end - vrom_start) <= len(data)
dmaentry = DmaEntry(
vrom_start,
vrom_end,
rom_start,
rom_end,
get_filename(dmaentry_index),
)
dma_entries.append(dmaentry)
if self.log is not None:
self.log.trace("{:04} {}", dmaentry_index, dmaentry)
dmaentry_rom_start += dma_entry_struct.size
dmaentry_index += 1
file_boot.dma_entry = dma_entries[self.version_info.dmaentry_index_boot]
free_strings(file_boot.allocator, filename_boot_offset_ranges, file_boot.data)
if self.log is not None:
self.log.debug("file_boot.allocator = {}", file_boot.allocator)
return dma_entries, file_boot
class ROMPacker:
def __init__(
self,
rom, # type: ROM
logging_helper=None, # type: Optional[LoggingHelper]
):
self.log = (
None if logging_helper is None else logging_helper.get_logger("ROMPacker")
)
self.rom = rom
self.filenames_vram_start = dict() # type: Dict[str, int]
def pack_boot_alloc_filenames(self):
rom = self.rom
for i, file in enumerate(rom.files):
dma_entry = file.dma_entry
name = dma_entry.name # type: str
if name is None:
# TODO not sure about how/where to handle default name
name = "#{}".format(i)
dma_entry.name = name
if name in self.filenames_vram_start:
continue
(
filename_boot_offset_start,
filename_boot_offset_end,
) = rom.file_boot.allocator.alloc(len(name) + 1)
rom.file_boot.data[filename_boot_offset_start:filename_boot_offset_end] = (
name.encode("ascii") + b"\x00"
)
filename_vram_start = (
rom.version_info.boot_vram_start + filename_boot_offset_start
)
self.filenames_vram_start[name] = filename_vram_start
def pack_boot_filenames(self):
rom = self.rom
for i, file in enumerate(rom.files):
dma_entry = file.dma_entry
# update filename in the boot file array
filename_vram_start = self.filenames_vram_start[dma_entry.name]
u32_struct.pack_into(
rom.file_boot.data,
rom.version_info.dma_table_filenames_boot_offset + i * u32_struct.size,
filename_vram_start,
)
def pack_dma_table(self):
rom = self.rom
assert rom.file_makerom == rom.files[rom.version_info.dmaentry_index_makerom]
assert rom.file_boot == rom.files[rom.version_info.dmaentry_index_boot]
assert rom.file_dmadata == rom.files[rom.version_info.dmaentry_index_dmadata]
assert rom.file_code == rom.files[rom.version_info.dmaentry_index_code]
dmadata_data = bytearray(len(rom.file_dmadata.data))
if self.log is not None:
self.log.trace("Built DMA table:")
for i, file in enumerate(rom.files):
dma_entry = file.dma_entry
if self.log is not None:
self.log.trace(dma_entry)
dma_entry_struct.pack_into(
dmadata_data,
i * dma_entry_struct.size,
dma_entry.vrom_start,
dma_entry.vrom_end,
dma_entry.rom_start,
dma_entry.rom_end,
)
rom.file_dmadata.data = dmadata_data
def write(
self,
out, # type: io.IOBase
):
rom = self.rom
rom_data = bytearray(
max(file.dma_entry.rom_start + len(file.data) for file in rom.files)
)
if self.log is not None:
self.log.debug("Written ROM size: {:X}", len(rom_data))
for file in rom.files:
rom_data[
file.dma_entry.rom_start : file.dma_entry.rom_start + len(file.data)
] = file.data
out.write(rom_data)
class ModuleInfo:
def __init__(
self,
task, # type: str
register, # type: Callable[[PyRTInterface],None]
task_dependencies=frozenset(), # type: Set[str]
description="", # type: str
):
self.task = task
self.register = register
self.task_dependencies = task_dependencies
assert type(self.task_dependencies) in {set, frozenset}
self.description = description
def __repr__(self) -> str:
return (
"ModuleInfo("
+ ", ".join(
repr(v)
for v in (
self.task,
self.register,
self.task_dependencies,
self.description,
)
)
+ ")"
)
def __str__(self) -> str:
return (
self.task
+ ((" - " + self.description) if self.description else "")
+ (
("(depends on " + ", ".join(self.task_dependencies) + ")")
if self.task_dependencies
else ""
)
)
class PyRTEvent:
def __init__(
self,
id, # type: str
description, # type: str
):
self.id = id
self.description = description
def __repr__(self) -> str:
return (
"PyRTEvent("
+ ", ".join(
repr(v)
for v in (
self.id,
self.description,
)
)
+ ")"
)
def __str__(self) -> str:
return self.id + " - " + self.description
EVENT_PARSE_ROM = PyRTEvent(
"Parse ROM",
"Find which files are what (eg scene or overlay), "
"after the dma table was parsed and the ROM split into files.",
)
EVENT_DUMP_FILES = PyRTEvent(
"Dump files",
"Write out files into an organized tree, outside of the ROM.",
)
EVENT_LOAD_FILES = PyRTEvent(
"Load files",
"Read files from a directory tree outside of the ROM.",
)
EVENT_PACK_ROM_BEFORE_FILE_ALLOC = PyRTEvent(
"Pack ROM (before file alloc)",
"Pack various data into files (before the file offsets in VROM/ROM get updated). "
"At this point ROM/VROM offsets are invalid, and files may be resized.",
)
EVENT_PACK_ROM_AFTER_FILE_ALLOC = PyRTEvent(
"Pack | |
<reponame>yesefujiang/shadowsocksr
ERROR_FIRST = 1000
HASHCHK = 1000
NISAMCHK = 1001
NO = 1002
YES = 1003
CANT_CREATE_FILE = 1004
CANT_CREATE_TABLE = 1005
CANT_CREATE_DB = 1006
DB_CREATE_EXISTS = 1007
DB_DROP_EXISTS = 1008
DB_DROP_DELETE = 1009
DB_DROP_RMDIR = 1010
CANT_DELETE_FILE = 1011
CANT_FIND_SYSTEM_REC = 1012
CANT_GET_STAT = 1013
CANT_GET_WD = 1014
CANT_LOCK = 1015
CANT_OPEN_FILE = 1016
FILE_NOT_FOUND = 1017
CANT_READ_DIR = 1018
CANT_SET_WD = 1019
CHECKREAD = 1020
DISK_FULL = 1021
DUP_KEY = 1022
ERROR_ON_CLOSE = 1023
ERROR_ON_READ = 1024
ERROR_ON_RENAME = 1025
ERROR_ON_WRITE = 1026
FILE_USED = 1027
FILSORT_ABORT = 1028
FORM_NOT_FOUND = 1029
GET_ERRNO = 1030
ILLEGAL_HA = 1031
KEY_NOT_FOUND = 1032
NOT_FORM_FILE = 1033
NOT_KEYFILE = 1034
OLD_KEYFILE = 1035
OPEN_AS_READONLY = 1036
OUTOFMEMORY = 1037
OUT_OF_SORTMEMORY = 1038
UNEXPECTED_EOF = 1039
CON_COUNT_ERROR = 1040
OUT_OF_RESOURCES = 1041
BAD_HOST_ERROR = 1042
HANDSHAKE_ERROR = 1043
DBACCESS_DENIED_ERROR = 1044
ACCESS_DENIED_ERROR = 1045
NO_DB_ERROR = 1046
UNKNOWN_COM_ERROR = 1047
BAD_NULL_ERROR = 1048
BAD_DB_ERROR = 1049
TABLE_EXISTS_ERROR = 1050
BAD_TABLE_ERROR = 1051
NON_UNIQ_ERROR = 1052
SERVER_SHUTDOWN = 1053
BAD_FIELD_ERROR = 1054
WRONG_FIELD_WITH_GROUP = 1055
WRONG_GROUP_FIELD = 1056
WRONG_SUM_SELECT = 1057
WRONG_VALUE_COUNT = 1058
TOO_LONG_IDENT = 1059
DUP_FIELDNAME = 1060
DUP_KEYNAME = 1061
DUP_ENTRY = 1062
WRONG_FIELD_SPEC = 1063
PARSE_ERROR = 1064
EMPTY_QUERY = 1065
NONUNIQ_TABLE = 1066
INVALID_DEFAULT = 1067
MULTIPLE_PRI_KEY = 1068
TOO_MANY_KEYS = 1069
TOO_MANY_KEY_PARTS = 1070
TOO_LONG_KEY = 1071
KEY_COLUMN_DOES_NOT_EXITS = 1072
BLOB_USED_AS_KEY = 1073
TOO_BIG_FIELDLENGTH = 1074
WRONG_AUTO_KEY = 1075
READY = 1076
NORMAL_SHUTDOWN = 1077
GOT_SIGNAL = 1078
SHUTDOWN_COMPLETE = 1079
FORCING_CLOSE = 1080
IPSOCK_ERROR = 1081
NO_SUCH_INDEX = 1082
WRONG_FIELD_TERMINATORS = 1083
BLOBS_AND_NO_TERMINATED = 1084
TEXTFILE_NOT_READABLE = 1085
FILE_EXISTS_ERROR = 1086
LOAD_INFO = 1087
ALTER_INFO = 1088
WRONG_SUB_KEY = 1089
CANT_REMOVE_ALL_FIELDS = 1090
CANT_DROP_FIELD_OR_KEY = 1091
INSERT_INFO = 1092
INSERT_TABLE_USED = 1093
UPDATE_TABLE_USED = 1093
NO_SUCH_THREAD = 1094
KILL_DENIED_ERROR = 1095
NO_TABLES_USED = 1096
TOO_BIG_SET = 1097
NO_UNIQUE_LOGFILE = 1098
TABLE_NOT_LOCKED_FOR_WRITE = 1099
TABLE_NOT_LOCKED = 1100
BLOB_CANT_HAVE_DEFAULT = 1101
WRONG_DB_NAME = 1102
WRONG_TABLE_NAME = 1103
TOO_BIG_SELECT = 1104
UNKNOWN_ERROR = 1105
UNKNOWN_PROCEDURE = 1106
WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
WRONG_PARAMETERS_TO_PROCEDURE = 1108
UNKNOWN_TABLE = 1109
FIELD_SPECIFIED_TWICE = 1110
INVALID_GROUP_FUNC_USE = 1111
UNSUPPORTED_EXTENSION = 1112
TABLE_MUST_HAVE_COLUMNS = 1113
RECORD_FILE_FULL = 1114
UNKNOWN_CHARACTER_SET = 1115
TOO_MANY_TABLES = 1116
TOO_MANY_FIELDS = 1117
TOO_BIG_ROWSIZE = 1118
STACK_OVERRUN = 1119
WRONG_OUTER_JOIN = 1120
NULL_COLUMN_IN_INDEX = 1121
CANT_FIND_UDF = 1122
CANT_INITIALIZE_UDF = 1123
UDF_NO_PATHS = 1124
UDF_EXISTS = 1125
CANT_OPEN_LIBRARY = 1126
CANT_FIND_DL_ENTRY = 1127
FUNCTION_NOT_DEFINED = 1128
HOST_IS_BLOCKED = 1129
HOST_NOT_PRIVILEGED = 1130
PASSWORD_ANONYMOUS_USER = <PASSWORD>1
PASSWORD_NOT_ALLOWED = 1132
PASSWORD_NO_MATCH = 1133
UPDATE_INFO = 1134
CANT_CREATE_THREAD = 1135
WRONG_VALUE_COUNT_ON_ROW = 1136
CANT_REOPEN_TABLE = 1137
INVALID_USE_OF_NULL = 1138
REGEXP_ERROR = 1139
MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
NONEXISTING_GRANT = 1141
TABLEACCESS_DENIED_ERROR = 1142
COLUMNACCESS_DENIED_ERROR = 1143
ILLEGAL_GRANT_FOR_TABLE = 1144
GRANT_WRONG_HOST_OR_USER = 1145
NO_SUCH_TABLE = 1146
NONEXISTING_TABLE_GRANT = 1147
NOT_ALLOWED_COMMAND = 1148
SYNTAX_ERROR = 1149
DELAYED_CANT_CHANGE_LOCK = 1150
UNUSED1 = 1150
TOO_MANY_DELAYED_THREADS = 1151
UNUSED2 = 1151
ABORTING_CONNECTION = 1152
NET_PACKET_TOO_LARGE = 1153
NET_READ_ERROR_FROM_PIPE = 1154
NET_FCNTL_ERROR = 1155
NET_PACKETS_OUT_OF_ORDER = 1156
NET_UNCOMPRESS_ERROR = 1157
NET_READ_ERROR = 1158
NET_READ_INTERRUPTED = 1159
NET_ERROR_ON_WRITE = 1160
NET_WRITE_INTERRUPTED = 1161
TOO_LONG_STRING = 1162
TABLE_CANT_HANDLE_BLOB = 1163
TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
DELAYED_INSERT_TABLE_LOCKED = 1165
UNUSED3 = 1165
WRONG_COLUMN_NAME = 1166
WRONG_KEY_COLUMN = 1167
WRONG_MRG_TABLE = 1168
DUP_UNIQUE = 1169
BLOB_KEY_WITHOUT_LENGTH = 1170
PRIMARY_CANT_HAVE_NULL = 1171
TOO_MANY_ROWS = 1172
REQUIRES_PRIMARY_KEY = 1173
NO_RAID_COMPILED = 1174
UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
KEY_DOES_NOT_EXITS = 1176
CHECK_NO_SUCH_TABLE = 1177
CHECK_NOT_IMPLEMENTED = 1178
CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
ERROR_DURING_COMMIT = 1180
ERROR_DURING_ROLLBACK = 1181
ERROR_DURING_FLUSH_LOGS = 1182
ERROR_DURING_CHECKPOINT = 1183
NEW_ABORTING_CONNECTION = 1184
DUMP_NOT_IMPLEMENTED = 1185
FLUSH_MASTER_BINLOG_CLOSED = 1186
INDEX_REBUILD = 1187
MASTER = 1188
MASTER_NET_READ = 1189
MASTER_NET_WRITE = 1190
FT_MATCHING_KEY_NOT_FOUND = 1191
LOCK_OR_ACTIVE_TRANSACTION = 1192
UNKNOWN_SYSTEM_VARIABLE = 1193
CRASHED_ON_USAGE = 1194
CRASHED_ON_REPAIR = 1195
WARNING_NOT_COMPLETE_ROLLBACK = 1196
TRANS_CACHE_FULL = 1197
SLAVE_MUST_STOP = 1198
SLAVE_NOT_RUNNING = 1199
BAD_SLAVE = 1200
MASTER_INFO = 1201
SLAVE_THREAD = 1202
TOO_MANY_USER_CONNECTIONS = 1203
SET_CONSTANTS_ONLY = 1204
LOCK_WAIT_TIMEOUT = 1205
LOCK_TABLE_FULL = 1206
READ_ONLY_TRANSACTION = 1207
DROP_DB_WITH_READ_LOCK = 1208
CREATE_DB_WITH_READ_LOCK = 1209
WRONG_ARGUMENTS = 1210
NO_PERMISSION_TO_CREATE_USER = 1211
UNION_TABLES_IN_DIFFERENT_DIR = 1212
LOCK_DEADLOCK = 1213
TABLE_CANT_HANDLE_FT = 1214
TABLE_CANT_HANDLE_FULLTEXT = 1214
CANNOT_ADD_FOREIGN = 1215
NO_REFERENCED_ROW = 1216
ROW_IS_REFERENCED = 1217
CONNECT_TO_MASTER = 1218
QUERY_ON_MASTER = 1219
ERROR_WHEN_EXECUTING_COMMAND = 1220
WRONG_USAGE = 1221
WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
CANT_UPDATE_WITH_READLOCK = 1223
MIXING_NOT_ALLOWED = 1224
DUP_ARGUMENT = 1225
USER_LIMIT_REACHED = 1226
SPECIFIC_ACCESS_DENIED_ERROR = 1227
LOCAL_VARIABLE = 1228
GLOBAL_VARIABLE = 1229
NO_DEFAULT = 1230
WRONG_VALUE_FOR_VAR = 1231
WRONG_TYPE_FOR_VAR = 1232
VAR_CANT_BE_READ = 1233
CANT_USE_OPTION_HERE = 1234
NOT_SUPPORTED_YET = 1235
MASTER_FATAL_ERROR_READING_BINLOG = 1236
SLAVE_IGNORED_TABLE = 1237
INCORRECT_GLOBAL_LOCAL_VAR = 1238
WRONG_FK_DEF = 1239
KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
OPERAND_COLUMNS = 1241
SUBQUERY_NO_1_ROW = 1242
UNKNOWN_STMT_HANDLER = 1243
CORRUPT_HELP_DB = 1244
CYCLIC_REFERENCE = 1245
AUTO_CONVERT = 1246
ILLEGAL_REFERENCE = 1247
DERIVED_MUST_HAVE_ALIAS = 1248
SELECT_REDUCED = 1249
TABLENAME_NOT_ALLOWED_HERE = 1250
NOT_SUPPORTED_AUTH_MODE = 1251
SPATIAL_CANT_HAVE_NULL = 1252
COLLATION_CHARSET_MISMATCH = 1253
SLAVE_WAS_RUNNING = 1254
SLAVE_WAS_NOT_RUNNING = 1255
TOO_BIG_FOR_UNCOMPRESS = 1256
ZLIB_Z_MEM_ERROR = 1257
ZLIB_Z_BUF_ERROR = 1258
ZLIB_Z_DATA_ERROR = 1259
CUT_VALUE_GROUP_CONCAT = 1260
WARN_TOO_FEW_RECORDS = 1261
WARN_TOO_MANY_RECORDS = 1262
WARN_NULL_TO_NOTNULL = 1263
WARN_DATA_OUT_OF_RANGE = 1264
WARN_DATA_TRUNCATED = 1265
WARN_USING_OTHER_HANDLER = 1266
CANT_AGGREGATE_2COLLATIONS = 1267
DROP_USER = 1268
REVOKE_GRANTS = 1269
CANT_AGGREGATE_3COLLATIONS = 1270
CANT_AGGREGATE_NCOLLATIONS = 1271
VARIABLE_IS_NOT_STRUCT = 1272
UNKNOWN_COLLATION = 1273
SLAVE_IGNORED_SSL_PARAMS = 1274
SERVER_IS_IN_SECURE_AUTH_MODE = 1275
WARN_FIELD_RESOLVED = 1276
BAD_SLAVE_UNTIL_COND = 1277
MISSING_SKIP_SLAVE = 1278
UNTIL_COND_IGNORED = 1279
WRONG_NAME_FOR_INDEX = 1280
WRONG_NAME_FOR_CATALOG = 1281
WARN_QC_RESIZE = 1282
BAD_FT_COLUMN = 1283
UNKNOWN_KEY_CACHE = 1284
WARN_HOSTNAME_WONT_WORK = 1285
UNKNOWN_STORAGE_ENGINE = 1286
WARN_DEPRECATED_SYNTAX = 1287
NON_UPDATABLE_TABLE = 1288
FEATURE_DISABLED = 1289
OPTION_PREVENTS_STATEMENT = 1290
DUPLICATED_VALUE_IN_TYPE = 1291
TRUNCATED_WRONG_VALUE = 1292
TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293
INVALID_ON_UPDATE = 1294
UNSUPPORTED_PS = 1295
GET_ERRMSG = 1296
GET_TEMPORARY_ERRMSG = 1297
UNKNOWN_TIME_ZONE = 1298
WARN_INVALID_TIMESTAMP = 1299
INVALID_CHARACTER_STRING = 1300
WARN_ALLOWED_PACKET_OVERFLOWED = 1301
CONFLICTING_DECLARATIONS = 1302
SP_NO_RECURSIVE_CREATE = 1303
SP_ALREADY_EXISTS = 1304
SP_DOES_NOT_EXIST = 1305
SP_DROP_FAILED = 1306
SP_STORE_FAILED = 1307
SP_LILABEL_MISMATCH = 1308
SP_LABEL_REDEFINE = 1309
SP_LABEL_MISMATCH = 1310
SP_UNINIT_VAR = 1311
SP_BADSELECT = 1312
SP_BADRETURN = 1313
SP_BADSTATEMENT = 1314
UPDATE_LOG_DEPRECATED_IGNORED = 1315
UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
QUERY_INTERRUPTED = 1317
SP_WRONG_NO_OF_ARGS = 1318
SP_COND_MISMATCH = 1319
SP_NORETURN = 1320
SP_NORETURNEND = 1321
SP_BAD_CURSOR_QUERY = 1322
SP_BAD_CURSOR_SELECT = 1323
SP_CURSOR_MISMATCH = 1324
SP_CURSOR_ALREADY_OPEN = 1325
SP_CURSOR_NOT_OPEN = 1326
SP_UNDECLARED_VAR = 1327
SP_WRONG_NO_OF_FETCH_ARGS = 1328
SP_FETCH_NO_DATA = 1329
SP_DUP_PARAM = 1330
SP_DUP_VAR = 1331
SP_DUP_COND = 1332
SP_DUP_CURS = 1333
SP_CANT_ALTER = 1334
SP_SUBSELECT_NYI = 1335
STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
SP_VARCOND_AFTER_CURSHNDLR = 1337
SP_CURSOR_AFTER_HANDLER = 1338
SP_CASE_NOT_FOUND = 1339
FPARSER_TOO_BIG_FILE = 1340
FPARSER_BAD_HEADER = 1341
FPARSER_EOF_IN_COMMENT = 1342
FPARSER_ERROR_IN_PARAMETER = 1343
FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
VIEW_NO_EXPLAIN = 1345
FRM_UNKNOWN_TYPE = 1346
WRONG_OBJECT = 1347
NONUPDATEABLE_COLUMN = 1348
VIEW_SELECT_DERIVED = 1349
VIEW_SELECT_CLAUSE = 1350
VIEW_SELECT_VARIABLE = 1351
VIEW_SELECT_TMPTABLE = 1352
VIEW_WRONG_LIST = 1353
WARN_VIEW_MERGE = 1354
WARN_VIEW_WITHOUT_KEY = 1355
VIEW_INVALID = 1356
SP_NO_DROP_SP = 1357
SP_GOTO_IN_HNDLR = 1358
TRG_ALREADY_EXISTS = 1359
TRG_DOES_NOT_EXIST = 1360
TRG_ON_VIEW_OR_TEMP_TABLE = 1361
TRG_CANT_CHANGE_ROW = 1362
TRG_NO_SUCH_ROW_IN_TRG = 1363
NO_DEFAULT_FOR_FIELD = 1364
DIVISION_BY_ZERO = 1365
TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
ILLEGAL_VALUE_FOR_TYPE = 1367
VIEW_NONUPD_CHECK = 1368
VIEW_CHECK_FAILED = 1369
PROCACCESS_DENIED_ERROR = 1370
RELAY_LOG_FAIL = 1371
PASSWD_LENGTH = 1372
UNKNOWN_TARGET_BINLOG = 1373
IO_ERR_LOG_INDEX_READ = 1374
BINLOG_PURGE_PROHIBITED = 1375
FSEEK_FAIL = 1376
BINLOG_PURGE_FATAL_ERR = 1377
LOG_IN_USE = 1378
LOG_PURGE_UNKNOWN_ERR = 1379
RELAY_LOG_INIT = 1380
NO_BINARY_LOGGING = 1381
RESERVED_SYNTAX = 1382
WSAS_FAILED = 1383
DIFF_GROUPS_PROC = 1384
NO_GROUP_FOR_PROC = 1385
ORDER_WITH_PROC = 1386
LOGGING_PROHIBIT_CHANGING_OF = 1387
NO_FILE_MAPPING = 1388
WRONG_MAGIC = 1389
PS_MANY_PARAM = 1390
KEY_PART_0 = 1391
VIEW_CHECKSUM = 1392
VIEW_MULTIUPDATE = 1393
VIEW_NO_INSERT_FIELD_LIST = 1394
VIEW_DELETE_MERGE_VIEW = 1395
CANNOT_USER = 1396
XAER_NOTA = 1397
XAER_INVAL = 1398
XAER_RMFAIL = 1399
XAER_OUTSIDE = 1400
XAER_RMERR = 1401
XA_RBROLLBACK = 1402
NONEXISTING_PROC_GRANT = 1403
PROC_AUTO_GRANT_FAIL = 1404
PROC_AUTO_REVOKE_FAIL = 1405
DATA_TOO_LONG = 1406
SP_BAD_SQLSTATE = 1407
STARTUP = 1408
LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
CANT_CREATE_USER_WITH_GRANT = 1410
WRONG_VALUE_FOR_TYPE = 1411
TABLE_DEF_CHANGED = 1412
SP_DUP_HANDLER = 1413
SP_NOT_VAR_ARG = 1414
SP_NO_RETSET = 1415
CANT_CREATE_GEOMETRY_OBJECT = 1416
FAILED_ROUTINE_BREAK_BINLOG = 1417
BINLOG_UNSAFE_ROUTINE = 1418
BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
EXEC_STMT_WITH_OPEN_CURSOR = 1420
STMT_HAS_NO_OPEN_CURSOR = 1421
COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
NO_DEFAULT_FOR_VIEW_FIELD = 1423
SP_NO_RECURSION = 1424
TOO_BIG_SCALE = 1425
TOO_BIG_PRECISION = 1426
M_BIGGER_THAN_D = 1427
WRONG_LOCK_OF_SYSTEM_TABLE = 1428
CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
QUERY_ON_FOREIGN_DATA_SOURCE = 1430
FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
FOREIGN_DATA_STRING_INVALID = 1433
CANT_CREATE_FEDERATED_TABLE = 1434
TRG_IN_WRONG_SCHEMA = 1435
STACK_OVERRUN_NEED_MORE = 1436
TOO_LONG_BODY = 1437
WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
TOO_BIG_DISPLAYWIDTH = 1439
XAER_DUPID = 1440
DATETIME_FUNCTION_OVERFLOW = 1441
CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
VIEW_PREVENT_UPDATE = 1443
PS_NO_RECURSION = 1444
SP_CANT_SET_AUTOCOMMIT = 1445
MALFORMED_DEFINER = 1446
VIEW_FRM_NO_USER = 1447
VIEW_OTHER_USER = 1448
NO_SUCH_USER = 1449
FORBID_SCHEMA_CHANGE = 1450
ROW_IS_REFERENCED_2 = 1451
NO_REFERENCED_ROW_2 = 1452
SP_BAD_VAR_SHADOW = 1453
TRG_NO_DEFINER = 1454
OLD_FILE_FORMAT = 1455
SP_RECURSION_LIMIT = 1456
SP_PROC_TABLE_CORRUPT = 1457
SP_WRONG_NAME = 1458
TABLE_NEEDS_UPGRADE = 1459
SP_NO_AGGREGATE = 1460
MAX_PREPARED_STMT_COUNT_REACHED = 1461
VIEW_RECURSIVE = 1462
NON_GROUPING_FIELD_USED = 1463
TABLE_CANT_HANDLE_SPKEYS = 1464
NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
REMOVED_SPACES = 1466
AUTOINC_READ_FAILED = 1467
USERNAME = 1468
HOSTNAME = 1469
WRONG_STRING_LENGTH = 1470
NON_INSERTABLE_TABLE = 1471
ADMIN_WRONG_MRG_TABLE = 1472
TOO_HIGH_LEVEL_OF_NESTING_FOR_SELECT = 1473
NAME_BECOMES_EMPTY = 1474
AMBIGUOUS_FIELD_TERM = 1475
FOREIGN_SERVER_EXISTS = 1476
FOREIGN_SERVER_DOESNT_EXIST = 1477
ILLEGAL_HA_CREATE_OPTION = 1478
PARTITION_REQUIRES_VALUES_ERROR = 1479
PARTITION_WRONG_VALUES_ERROR = 1480
PARTITION_MAXVALUE_ERROR = 1481
PARTITION_SUBPARTITION_ERROR = 1482
PARTITION_SUBPART_MIX_ERROR = 1483
PARTITION_WRONG_NO_PART_ERROR = 1484
PARTITION_WRONG_NO_SUBPART_ERROR = 1485
WRONG_EXPR_IN_PARTITION_FUNC_ERROR = 1486
NO_CONST_EXPR_IN_RANGE_OR_LIST_ERROR = 1487
FIELD_NOT_FOUND_PART_ERROR = 1488
LIST_OF_FIELDS_ONLY_IN_HASH_ERROR = 1489
INCONSISTENT_PARTITION_INFO_ERROR = 1490
PARTITION_FUNC_NOT_ALLOWED_ERROR = 1491
PARTITIONS_MUST_BE_DEFINED_ERROR = 1492
RANGE_NOT_INCREASING_ERROR = 1493
INCONSISTENT_TYPE_OF_FUNCTIONS_ERROR = 1494
MULTIPLE_DEF_CONST_IN_LIST_PART_ERROR = 1495
PARTITION_ENTRY_ERROR = 1496
MIX_HANDLER_ERROR = 1497
PARTITION_NOT_DEFINED_ERROR = 1498
TOO_MANY_PARTITIONS_ERROR = 1499
SUBPARTITION_ERROR = 1500
CANT_CREATE_HANDLER_FILE = 1501
BLOB_FIELD_IN_PART_FUNC_ERROR = 1502
UNIQUE_KEY_NEED_ALL_FIELDS_IN_PF = 1503
NO_PARTS_ERROR = 1504
PARTITION_MGMT_ON_NONPARTITIONED = 1505
FOREIGN_KEY_ON_PARTITIONED = 1506
DROP_PARTITION_NON_EXISTENT = 1507
DROP_LAST_PARTITION = 1508
COALESCE_ONLY_ON_HASH_PARTITION = 1509
REORG_HASH_ONLY_ON_SAME_NO = 1510
REORG_NO_PARAM_ERROR = 1511
ONLY_ON_RANGE_LIST_PARTITION = 1512
ADD_PARTITION_SUBPART_ERROR = 1513
ADD_PARTITION_NO_NEW_PARTITION = 1514
COALESCE_PARTITION_NO_PARTITION = 1515
REORG_PARTITION_NOT_EXIST = 1516
SAME_NAME_PARTITION = 1517
NO_BINLOG_ERROR = 1518
CONSECUTIVE_REORG_PARTITIONS = 1519
REORG_OUTSIDE_RANGE = 1520
PARTITION_FUNCTION_FAILURE = 1521
PART_STATE_ERROR = 1522
LIMITED_PART_RANGE = 1523
PLUGIN_IS_NOT_LOADED = 1524
WRONG_VALUE = 1525
NO_PARTITION_FOR_GIVEN_VALUE = 1526
FILEGROUP_OPTION_ONLY_ONCE = 1527
CREATE_FILEGROUP_FAILED = 1528
DROP_FILEGROUP_FAILED = 1529
TABLESPACE_AUTO_EXTEND_ERROR = 1530
WRONG_SIZE_NUMBER = 1531
SIZE_OVERFLOW_ERROR = 1532
ALTER_FILEGROUP_FAILED = 1533
BINLOG_ROW_LOGGING_FAILED = 1534
BINLOG_ROW_WRONG_TABLE_DEF = 1535
BINLOG_ROW_RBR_TO_SBR = 1536
EVENT_ALREADY_EXISTS = 1537
EVENT_STORE_FAILED = 1538
EVENT_DOES_NOT_EXIST = 1539
EVENT_CANT_ALTER = 1540
EVENT_DROP_FAILED = 1541
EVENT_INTERVAL_NOT_POSITIVE_OR_TOO_BIG = 1542
EVENT_ENDS_BEFORE_STARTS = 1543
EVENT_EXEC_TIME_IN_THE_PAST = 1544
EVENT_OPEN_TABLE_FAILED = 1545
EVENT_NEITHER_M_EXPR_NOR_M_AT = 1546
COL_COUNT_DOESNT_MATCH_CORRUPTED = 1547
OBSOLETE_COL_COUNT_DOESNT_MATCH_CORRUPTED = 1547
CANNOT_LOAD_FROM_TABLE = 1548
OBSOLETE_CANNOT_LOAD_FROM_TABLE = 1548
EVENT_CANNOT_DELETE = 1549
EVENT_COMPILE_ERROR = 1550
EVENT_SAME_NAME = 1551
EVENT_DATA_TOO_LONG = 1552
DROP_INDEX_FK = 1553
WARN_DEPRECATED_SYNTAX_WITH_VER = 1554
CANT_WRITE_LOCK_LOG_TABLE = 1555
CANT_LOCK_LOG_TABLE = 1556
FOREIGN_DUPLICATE_KEY = 1557
FOREIGN_DUPLICATE_KEY_OLD_UNUSED = 1557
COL_COUNT_DOESNT_MATCH_PLEASE_UPDATE = 1558
TEMP_TABLE_PREVENTS_SWITCH_OUT_OF_RBR = 1559
STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1560
NDB_CANT_SWITCH_BINLOG_FORMAT = 1561
PARTITION_NO_TEMPORARY = 1562
PARTITION_CONST_DOMAIN_ERROR = 1563
PARTITION_FUNCTION_IS_NOT_ALLOWED = 1564
DDL_LOG_ERROR = 1565
NULL_IN_VALUES_LESS_THAN = 1566
WRONG_PARTITION_NAME = 1567
CANT_CHANGE_TX_CHARACTERISTICS = 1568
CANT_CHANGE_TX_ISOLATION = 1568
DUP_ENTRY_AUTOINCREMENT_CASE = 1569
EVENT_MODIFY_QUEUE_ERROR = 1570
EVENT_SET_VAR_ERROR = 1571
PARTITION_MERGE_ERROR = 1572
CANT_ACTIVATE_LOG = 1573
RBR_NOT_AVAILABLE = 1574
BASE64_DECODE_ERROR = 1575
EVENT_RECURSION_FORBIDDEN = 1576
EVENTS_DB_ERROR = 1577
ONLY_INTEGERS_ALLOWED = 1578
UNSUPORTED_LOG_ENGINE = 1579
BAD_LOG_STATEMENT = 1580
CANT_RENAME_LOG_TABLE = 1581
WRONG_PARAMCOUNT_TO_NATIVE_FCT = 1582
WRONG_PARAMETERS_TO_NATIVE_FCT = 1583
WRONG_PARAMETERS_TO_STORED_FCT = 1584
NATIVE_FCT_NAME_COLLISION = 1585
DUP_ENTRY_WITH_KEY_NAME = 1586
BINLOG_PURGE_EMFILE = 1587
EVENT_CANNOT_CREATE_IN_THE_PAST = 1588
EVENT_CANNOT_ALTER_IN_THE_PAST = 1589
SLAVE_INCIDENT = 1590
NO_PARTITION_FOR_GIVEN_VALUE_SILENT = 1591
BINLOG_UNSAFE_STATEMENT = 1592
SLAVE_FATAL_ERROR = 1593
SLAVE_RELAY_LOG_READ_FAILURE = 1594
SLAVE_RELAY_LOG_WRITE_FAILURE = 1595
SLAVE_CREATE_EVENT_FAILURE = 1596
SLAVE_MASTER_COM_FAILURE = 1597
BINLOG_LOGGING_IMPOSSIBLE = 1598
VIEW_NO_CREATION_CTX = 1599
VIEW_INVALID_CREATION_CTX = 1600
SR_INVALID_CREATION_CTX = 1601
TRG_CORRUPTED_FILE = 1602
TRG_NO_CREATION_CTX = 1603
TRG_INVALID_CREATION_CTX = 1604
EVENT_INVALID_CREATION_CTX = 1605
TRG_CANT_OPEN_TABLE = 1606
CANT_CREATE_SROUTINE = 1607
NEVER_USED = 1608
NO_FORMAT_DESCRIPTION_EVENT_BEFORE_BINLOG_STATEMENT = 1609
SLAVE_CORRUPT_EVENT = 1610
LOAD_DATA_INVALID_COLUMN = 1611
LOG_PURGE_NO_FILE = 1612
XA_RBTIMEOUT = 1613
XA_RBDEADLOCK = 1614
NEED_REPREPARE = 1615
DELAYED_NOT_SUPPORTED = 1616
WARN_NO_MASTER_INFO = 1617
WARN_OPTION_IGNORED = 1618
PLUGIN_DELETE_BUILTIN = 1619
WARN_PLUGIN_DELETE_BUILTIN = 1619
WARN_PLUGIN_BUSY = 1620
VARIABLE_IS_READONLY = 1621
WARN_ENGINE_TRANSACTION_ROLLBACK = 1622
SLAVE_HEARTBEAT_FAILURE = 1623
SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE = 1624
NDB_REPLICATION_SCHEMA_ERROR = 1625
CONFLICT_FN_PARSE_ERROR = 1626
EXCEPTIONS_WRITE_ERROR = 1627
TOO_LONG_TABLE_COMMENT = | |
<filename>python/tvm/te/operation.py
# Licensed to the Apache Software Foundation (ASF) under one
# or more ibutor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Operation class for computation declaration."""
# pylint: disable=invalid-name
from numbers import Integral as _Integral
import sys
import tvm._ffi
import tvm.tir
import tvm.tir._ffi_api
from tvm._ffi.base import string_types
from tvm.runtime import convert
# from tvm.arith import UninterpFun
from . import tag as _tag
from . import tensor as _tensor
from . import _ffi_api
from tvm.tir import Modes
from tvm.tir import LFunsWrapper
class Dimension(tvm.runtime.Object):
pass
@tvm._ffi.register_object("te.Dimension")
class RangeDimension(Dimension):
"""Represent set of continuous interval [min_value, max_value]
Parameters
----------
min_value : PrimExpr
The minimum value in the interval.
max_value : PrimExpr
The maximum value in the interval.
"""
def __init__(self, name):
super().__init__()
self.name = name
self.__init_handle_by_constructor__(_ffi_api.RangeDimension, name)
def __str__(self):
return 'Dimension('+self.name+')'
@tvm._ffi.register_object("te.Dimension")
class ScanDimension(Dimension):
"""Represent set of continuous interval [min_value, max_value]
Parameters
----------
min_value : PrimExpr
The minimum value in the interval.
max_value : PrimExpr
The maximum value in the interval.
"""
def __init__(self, name):
super().__init__()
self.name = name
self.__init_handle_by_constructor__(_ffi_api.ScanDimension, name)
def __str__(self):
return 'Dimension('+self.name+')'
def placeholder(shape, dtype=None, name="placeholder"):
"""Construct an empty tensor object.
Parameters
----------
shape: Tuple of Expr
The shape of the tensor
dtype: str, optional
The data type of the tensor
name: str, optional
The name hint of the tensor
Returns
-------
tensor: Tensor
The created tensor
"""
shape = (shape,) if isinstance(shape, tvm.tir.PrimExpr) else shape
dtype = "float32" if dtype is None else dtype
return _ffi_api.Placeholder(
shape, dtype, name)
def create_or_return_uf(expr):
if isinstance(expr, tvm.tir.UninterpFun):
return expr
else:
ret = tvm.tir.UninterpFun("uf", (expr, expr), [], lambda: expr)
return ret
def ragged_placeholder(dense_shape, dimensions, loop_extent_ufs, dtype=None,
name="placeholder", width_ufs=None, aggregate_ufs={}):
layout = None
if width_ufs is not None:
layout = Modes.storage_layout(dimensions, dense_shape, width_ufs, aggregate_ufs)
if isinstance(loop_extent_ufs, LFunsWrapper): loop_extent_ufs = loop_extent_ufs.get_ufs()
ret = indirect_placeholder_integrated(dense_shape, dimensions, list(zip(dimensions, loop_extent_ufs)),
dtype, name, layout)
return ret
def indirect_placeholder(shape, self_dims, loop_extent_dims, idx_expr_dims, dtype=None, name="placeholder", layout=None):
return indirect_placeholder_integrated(shape, self_dims, loop_extent_dims + idx_expr_dims, dtype, name, layout)
def indirect_placeholder_integrated(shape, self_dims, dim_ufs, dtype=None, name="placeholder", layout=None):
all_vars = []
all_dims = []
all_ufs = []
for dim_uf in dim_ufs:
dim = dim_uf[0]
all_ufs.append(None)
if len(dim_uf) == 2:
_, max_val_uf_orig = dim_uf
max_val_uf = create_or_return_uf(max_val_uf_orig)
max_val = tvm.tir.Call("int32", max_val_uf.fname, [v.var for v in all_vars],
2, max_val_uf, 0, arg_dims = all_dims)
iter_var = tvm.tir.IterVar((0, max_val), 'i' + name + str(len(all_vars)), 0)
all_vars.append(iter_var)
all_dims.append(dim)
else:
_, min_uf_orig, max_val_uf_orig = dim_uf
min_uf = create_or_return_uf(min_uf_orig)
max_val_uf = create_or_return_uf(max_val_uf_orig)
dom_min = tvm.tir.Call("int32", min_uf.fname, [v.var for v in all_vars],
2, min_uf, 0, arg_dims = all_dims)
dom_max_val = tvm.tir.Call("int32", max_val_uf.fname, [v.var for v in all_vars],
2, max_val_uf, 0, arg_dims = all_dims)
iter_var = tvm.tir.IterVar((dom_min, dom_max_val), 'i' + name + str(len(all_vars)), 0)
all_vars.append(iter_var)
all_dims.append(dim)
shape = (shape,) if isinstance(shape, tvm.tir.PrimExpr) else shape
dtype = "float32" if dtype is None else dtype
return _ffi_api.IndirectPlaceholder(
shape, layout, self_dims, all_dims, all_vars, all_ufs, dtype, name)
def compute(shape, fcompute, name="compute", tag="", attrs=None):
"""Construct a new tensor by computing over the shape domain.
The compute rule is result[axis] = fcompute(axis)
Parameters
----------
shape: Tuple of Expr
The shape of the tensor
fcompute: lambda function of indices-> value
Specifies the input source expression
name: str, optional
The name hint of the tensor
tag: str, optional
Additional tag information about the compute.
attrs: dict, optional
The additional auxiliary attributes about the compute.
Returns
-------
tensor: Tensor
The created tensor
"""
if _tag.TagScope.get_current() is not None:
if tag != "":
raise ValueError("nested tag is not allowed for now")
tag = _tag.TagScope.get_current().tag
shape = (shape,) if isinstance(shape, tvm.tir.PrimExpr) else shape
# for python3
shape = tuple([int(s) if isinstance(s, float) else s for s in shape])
ndim = len(shape)
code = fcompute.__code__
out_ndim = ndim
if code.co_argcount == 0:
arg_names = ["i%d" % i for i in range(ndim)]
else:
arg_names = code.co_varnames[:code.co_argcount]
out_ndim = code.co_argcount
if out_ndim != len(arg_names):
raise ValueError("fcompute do not match dimension, ndim=%d" % ndim)
dim_var = [tvm.tir.IterVar((0, s), x, 0) for x, s in zip(arg_names, shape[:out_ndim])]
body = fcompute(*[v.var for v in dim_var])
if isinstance(body, _tensor.TensorIntrinCall):
for i, s in enumerate(shape[out_ndim:]):
var_name = "ax" + str(i)
dim_var.append(tvm.tir.IterVar((0, s), var_name, 4))
op_node = _ffi_api.TensorComputeOp(name,
tag,
dim_var,
body.reduce_axis,
out_ndim,
body.intrin,
body.tensors,
body.regions,
body.scalar_inputs)
else:
if not isinstance(body, (list, tuple)):
body = [body]
body = convert(body)
op_node = _ffi_api.ComputeOp(
name, tag, attrs, dim_var, body)
num = op_node.num_outputs
outputs = tuple(op_node.output(i) for i in range(num))
return outputs[0] if num == 1 else outputs
def ragged_compute(dense_shape, dimensions, loop_extent_ufs, fcompute, reduce_axis_ufs=None, fpred=None, name="compute",
tag="", attrs=None, loop_aggregate_ufs=None, width_uf_lists=None, aggregate_uf_lists=None, num_outputs=1):
storage_layouts = None
if width_uf_lists is not None:
if width_uf_lists is None: width_uf_lists = [[]] * num_outputs
if aggregate_uf_lists is None: aggregate_uf_lists = [{}] * num_outputs
# storage_layouts = [Modes(dimensions, dense_shape, width_ufs, aggregate_ufs) for width_ufs,
storage_layouts = [Modes.storage_layout(dimensions, dense_shape, width_ufs, aggregate_ufs) for width_ufs,
aggregate_ufs in zip(width_uf_lists, aggregate_uf_lists)]
mode_loop_extent_ufs = []
mode_loop_min_ufs = []
if isinstance(loop_extent_ufs, LFunsWrapper): loop_extent_ufs = loop_extent_ufs.get_ufs()
for uf in loop_extent_ufs:
if isinstance(uf, tvm.tir.UninterpFun):
mode_loop_min_ufs.append(tvm.tir.UninterpFun.from_constant('zero', 0, 'l'))
mode_loop_extent_ufs.append(uf)
else:
mode_loop_min_ufs.append(uf[0])
mode_loop_extent_ufs.append(uf[1])
# loop_layout = Modes(dimensions, dense_shape, mode_loop_extent_ufs, loop_aggregate_ufs, loop_layout = True)
loop_layout = Modes.loop_layout(dimensions, dense_shape, mode_loop_min_ufs, mode_loop_extent_ufs)
output_shape = dense_shape
dim_ufs = list()
if _tag.TagScope.get_current() is not None:
if tag != "":
raise ValueError("nested tag is not allowed for now")
tag = _tag.TagScope.get_current().tag
output_shape = (output_shape,) if isinstance(output_shape, tvm.tir.PrimExpr) else output_shape
# for python3
output_shape = tuple([int(s) if isinstance(s, float) else s for s in output_shape])
code = fcompute.__code__
out_ndim = len(output_shape)
if code.co_argcount > 1 and reduce_axis_ufs is None:
raise ValueError("Ill-formed body lambda with more than one argument")
if out_ndim != len(dimensions):
raise ValueError("Dimensions of the output do not match the number of self dimensions given")
all_dims = []
axis = []
dim_var_map = {}
for dim, ufs in zip(dimensions, loop_extent_ufs):
min_uf, max_uf = None, None
if isinstance(ufs, (list, tuple)):
min_uf, max_uf = ufs
else:
max_uf = ufs
dom_max = tvm.tir.Call("int32", max_uf.fname, [v.var for v in axis],
2, max_uf, 0, arg_dims = all_dims)
if min_uf:
dom_min = tvm.tir.Call("int32", min_uf.fname, [v.var for v in axis],
2, min_uf, 0, arg_dims = all_dims)
else:
dom_min = 0
iter_var = tvm.tir.IterVar((dom_min, dom_max), 'i' + name + str(len(axis)), 0)
axis.append(iter_var)
all_dims.append(dim)
dim_var_map[dim] = iter_var
if reduce_axis_ufs is not None:
reduce_ivs = {}
for iv_name, uf in reduce_axis_ufs:
dom_max = tvm.tir.Call("int32", uf.fname, [v.var for v in axis],
2, uf, 0, arg_dims = all_dims)
iter_var = reduce_axis((0, dom_max), iv_name)
dim_var_map[iv_name] = iter_var
reduce_ivs[iv_name] = iter_var
body = fcompute({k: v.var for k, v in dim_var_map.items()}, reduce_ivs)
else:
body = fcompute({k: v.var for k, v in dim_var_map.items()})
pred = fpred({k: v.var for k, v in dim_var_map.items()}) if fpred is not None else [tvm.tir.IntImm('uint1', 1)]
if isinstance(body, _tensor.TensorIntrinCall):
for i, s in enumerate(loop_domains[out_ndim:]):
var_name = "ax" + str(i)
loop_var.append(tvm.tir.IterVar((0, s), var_name, 4))
op_node = _ffi_api.TensorComputeOp(name, tag, loop_var, body.reduce_axis, out_ndim,
body.intrin, body.tensors, body.regions, body.scalar_inputs)
else:
if not isinstance(body, (list, tuple)):
body = [body]
body = convert(body)
if not isinstance(pred, (list, tuple)):
pred = [pred]
pred = convert(pred)
op_node = _ffi_api.ComputeOp(name, tag, attrs, axis, dimensions, output_shape,
storage_layouts, loop_layout,
body, pred)
num = op_node.num_outputs
outputs = tuple(op_node.output(i) for i in range(num))
return outputs[0] if num == 1 else outputs
def indirect_compute(output_shape, self_dims, loop_domains, idx_expr_ufs, fcompute,
reduce_axis_ufs=None, fpred = None, name="compute", tag="", attrs=None):
return indirect_compute_integrated(output_shape, self_dims, loop_domains + idx_expr_ufs,
fcompute, reduce_axis_ufs, fpred, name, tag, attrs)
def indirect_compute_integrated(output_shape, dimensions, dim_ufs, fcompute, reduce_axis_ufs=None, fpred = None,
name="compute", tag="", attrs=None, storage_layouts=None, loop_layout=None):
if _tag.TagScope.get_current() is not None:
if tag != "":
raise ValueError("nested tag is not allowed for now")
tag = _tag.TagScope.get_current().tag
output_shape = (output_shape,) if isinstance(output_shape, tvm.tir.PrimExpr) else output_shape
# for python3
output_shape = tuple([int(s) if isinstance(s, float) | |
seed : int
Seed value for the random number generator.
show_fig : bool
Whether to show the curve fitting results as a figure.
verbose : bool
Whether to display information (statistics of the loss in each
generation) on the console.
parallel : bool
Whether to use parallel computing across layers, i.e., calculate
multiple layers simultaneously.
n_cores : int
Number of CPU cores to use. If None, all cores are used. No effects
if the parallelization options are set to ``False``.
save_txt : bool
Whether to save the results as a "HH_x_STATION_NAME.txt" file.
txt_filename : str
File name of the text file to save HH parameters. If the object is
created via a "curve" text file, then `txt_filename` can be ``None``
and the output filename will be determined automatically.
sep : str
Delimiter to separate columns of data in the output file.
save_fig : bool
Whether to save damping fitting figures to hard drive.
fig_filename : str
Full file name of the figure. If the object is created via a
"curve" text file, then ``fig_filename`` can be None, and the
output figure name will be determined automatically.
dpi : float
Figure resolution
Return
------
H4_x_param : PySeismoSoil.class_parameters.H4_Param_Multi_Layer
The best parameters for each soil layer found in the optimization.
"""
from .class_parameters import MKZ_Param_Multi_Layer
if save_fig and fig_filename is None:
fig_filename = self._produce_output_file_name('H4', 'png')
if save_txt:
if txt_filename is None:
txt_filename = self._produce_output_file_name('H4', 'txt')
if sep is None:
sep = self._sep
list_of_np_array = [_.raw_data for _ in self.curves]
params = sr.fit_all_damping_curves(
list_of_np_array,
mkz.fit_H4_x_single_layer,
mkz.tau_MKZ,
use_scipy=use_scipy,
pop_size=pop_size,
n_gen=n_gen,
lower_bound_power=lower_bound_power,
upper_bound_power=upper_bound_power,
eta=eta,
seed=seed,
show_fig=show_fig,
verbose=verbose,
parallel=parallel,
n_cores=n_cores,
save_fig=save_fig,
fig_filename=fig_filename,
dpi=dpi,
save_txt=save_txt,
txt_filename=txt_filename,
sep=sep,
func_serialize=mkz.serialize_params_to_array,
)
return MKZ_Param_Multi_Layer(params)
def _produce_output_file_name(self, prefix, extension):
"""
Produce the output file name.
Parameters
----------
prefix : {'HH', 'H4'} or str
Prefix of file name.
extension : {'png', 'txt'} or str
File extension (without the dot).
Returns
-------
new_file_name : str
The new file name based on the input "curve" file name.
"""
if self._filename is None:
raise ValueError(
'Please make sure to create this object from '
'a text file, so that there is an original file '
'name to work with.'
)
path_name, file_name = os.path.split(self._filename)
file_name_, _ = os.path.splitext(file_name)
if 'curve_' in file_name_:
site_name = file_name_[6:]
else:
site_name = file_name_
new_file_name = '%s_x_%s.%s' % (prefix, site_name, extension)
return new_file_name
class Multiple_GGmax_Curves(Multiple_Curves):
"""
Class implementation of multiple G/Gmax curves.
Its behavior is similar to a list,
but with a more stringent requirement: all elements are of the same data
type, i.e., GGmax_Curve.
The list-like behaviors available are:
- indexing: foo[3]
- slicing: foo[:4]
- setting values: foo[2] = ...
- length: len(foo)
- deleting item: del foo[2]
- checking existance: bar in foo
- appending: foo.append(bar)
Parameters
----------
filename_or_list_of_curves : str or list<numpy.ndarray>
A file name of a validly formatted "curve file", or a list of 2-column
numpy arrays, which are in (strain [%], G/Gmax) format.
sep : str
Delimiter of the file to be imported. If ``filename_or_list_of_curves``
is a list, this parameter has no effect.
Attributes
----------
curves : list<GGmax_Curve>
A list of GGmax_Curve objects.
n_layer : int
The number of soil layers (i.e., the length of the list).
"""
def __init__(self, filename_or_list_of_curves, *, sep='\t'):
if isinstance(filename_or_list_of_curves, str): # file name
curves = np.genfromtxt(filename_or_list_of_curves, delimiter=sep)
list_of_GGmax_curves, _ = hlp.extract_from_curve_format(curves)
self._filename = filename_or_list_of_curves
elif isinstance(filename_or_list_of_curves, list):
list_of_GGmax_curves = filename_or_list_of_curves
self._filename = None
else:
raise TypeError('Unrecognized type for `filename_or_list_of_curves`.')
self._sep = sep
super(Multiple_GGmax_Curves, self).__init__(
list_of_GGmax_curves,
element_class=GGmax_Curve,
)
def plot(
self, plot_interpolated=True, fig=None, ax=None, title=None,
xlabel='Strain [%]', ylabel='G/Gmax', figsize=(3, 3), dpi=100,
**kwargs_to_matplotlib,
):
"""
Plot multiple curves together on one figure.
Parameters
----------
plot_interpolated : bool
Whether to plot the interpolated curve or the raw data.
fig : matplotlib.figure.Figure or ``None``
Figure object. If None, a new figure will be created.
ax : matplotlib.axes._subplots.AxesSubplot or ``None``
Axes object. If None, a new axes will be created.
title : str
Title of plot.
xlabel : str
X label of plot.
ylabel : str
Y label of plot.
figsize: (float, float)
Figure size in inches, as a tuple of two numbers. The figure
size of ``fig`` (if not ``None``) will override this parameter.
dpi : float
Figure resolution. The dpi of ``fig`` (if not ``None``) will override
this parameter.
**kwargs_to_matplotlib :
Keyword arguments to be passed to ``matplotlib.pyplot.plot()``.
Returns
-------
fig : matplotlib.figure.Figure
The figure object being created or being passed into this function.
ax : matplotlib.axes._subplots.AxesSubplot
The axes object being created or being passed into this function.
"""
fig, ax = super(Multiple_GGmax_Curves, self).plot(
plot_interpolated=plot_interpolated, fig=fig,
ax=ax, title=title, xlabel=xlabel, ylabel=ylabel,
figsize=figsize, dpi=dpi, **kwargs_to_matplotlib,
)
return fig, ax
def get_curve_matrix(
self, damping_filler_value=1.0, save_to_file=False, full_file_name=None,
):
"""
Based on the G/Gmax data defined in objects of this class, produce a
full "curve matrix" in the following format:
+------------+--------+------------+-------------+-------------+--------+-----+
| strain [%] | G/Gmax | strain [%] | damping [%] | strain [%] | G/Gmax | ... |
+============+========+============+=============+=============+========+=====+
| ... | ... | ... | ... | ... | ... | ... |
+------------+--------+------------+-------------+-------------+--------+-----+
Since this class only defines G/Gmax curves, not damping curves,
damping values will be filled with some dummy values.
Parameters
----------
damping_filler_value : float
A dummy value to fill all the damping curves.
save_to_file : bool
Whether or not to save the "curve matrix" as a text file.
full_file_name : str or ``None``
Full file name to save to the hard drive. It can be ``None`` if
``save_to_file`` is set to ``False``.
Returns
-------
curve_matrix : numpy.ndarray
A matrix containing G/Gmax curves in the above-mentioned format.
"""
lengths = [] # lengths of strain array of each layer
for curve_ in self.curves:
lengths.append(len(curve_.strain))
max_length = np.max(lengths)
curve_matrix = None
for curve_ in self.curves:
strain = curve_.strain
if len(curve_.strain) == max_length:
strain_ = strain # we can use the original strain array
GGmax_ = curve_.GGmax
else: # otherwise we need a new strain array to match `max_length`
strain_ = np.geomspace(np.min(strain), np.max(strain), max_length)
GGmax_ = np.interp(strain_, strain, curve_.GGmax)
# END IF
damping = np.ones_like(strain_) * damping_filler_value
tmp_matrix = np.column_stack((strain_, GGmax_, strain_, damping))
if curve_matrix is None:
curve_matrix = tmp_matrix
else:
curve_matrix = np.column_stack((curve_matrix, tmp_matrix))
# END IF
# END FOR
return curve_matrix
class Multiple_GGmax_Damping_Curves:
"""
A "parent" class that holds both G/Gmax curves and damping curves
information. The user can EITHER initialize this class by providing
instances of ``Multiple_GGmax_Curves`` and ``Multiple_Damping_Curves``
classes, OR by providing a numpy array containing the curves. (The user
can provide one and only one input parameter, and leave the other parameter
to ``None``.)
Parameters
----------
mgc_and_mdc : (Multiple_GGmax_Curves, Multiple_Damping_Curves) or ``None``
A tuple of two elements, which are the G/Gmax curve information and
the damping curve information, respectively. The two objects needs to
have the same ``n_layer`` attribute.
data : numpy.ndarray, str, or ``None``
A 2D numpy array of the following format:
+------------+--------+------------+-------------+-------------+--------+-----+
| strain [%] | G/Gmax | strain [%] | damping [%] | strain [%] | G/Gmax | ... |
+============+========+============+=============+=============+========+=====+
| ... | ... | ... | ... | ... | ... | ... |
+------------+--------+------------+-------------+-------------+--------+-----+
Or a full name of a text file containing the 2D array.
Attributes
----------
mgc : Multiple_GGmax_Curves
Object containing information of G/Gmax curves. It will be ``None`` if
``mgc_and_mdc`` is not provided.
mdc : Multiple_Damping_Curves
Object containing information of damping curves. It will be ``None`` if
``mgc_and_mdc`` is not provided.
data : numpy.ndarray
2D numpy array containing the curve information in the format shown
above. It will be ``None`` if the ``data`` is not provided.
n_layer : int
Number of soil layers.
"""
def __init__(self, *, mgc_and_mdc=None, data=None):
if mgc_and_mdc is None and data is None:
raise ValueError(
'Both parameters are `None`. Please provide '
'one and only one input parameter.'
)
if mgc_and_mdc is not None and data is not None:
raise ValueError(
'Both parameters are not `None`. Please | |
from mesa import Model
from mesa.time import BaseScheduler
from mesa.datacollection import DataCollector
from mesa.space import MultiGrid
from mesa.space import ContinuousSpace
import model.ramenScript
from collections import defaultdict
import random
from model.time import Time
from space.room import Room
from space.door import Door
from space.wall import Wall
from agents.Light import Light
import configuration.settings
import configuration.defineOccupancy
import configuration.defineMap
from agents.WorkerAgent import WorkerAgent
from agents.TimeAgent import TimeAgent
from agents.SensorAgent import SensorAgent
from datacollection.WorkerCollector import WorkerCollector
from datacollection.SensorCollector import SensorCollector
from datacollection.TimeCollector import TimeCollector
from classes.Task import Task
import configuration.workload_settings as workload_settings
import configuration.email_settings as email_settings
import numpy as np
import math
import random
# somen
# Smart Office Multiagent emotional Environments
class SOMENModel(Model):
def __init__(self, width, height):
# Model attributes initialization
self.workers_number = 10
self.agents = []
self.workers = []
self.average_stress = 0
self.running = True
#SOBA
configuration.settings.init()
configuration.defineOccupancy.init()
configuration.defineMap.init()
self.clock = Time()
#Vars of control
self.num_occupants = 0
self.day = self.clock.day
self.NStep = 0
self.placeByStateByTypeAgent = {}
self.agentsWorkingByStep = []
self.agentsIn = 0
# Schedule
self.schedule = BaseScheduler(self)
self.grid = MultiGrid(width, height, False)
#Create the map
self.createRooms()
self.setMap(width, height)
self.createDoors()
self.createWalls()
#Create agents
self.setAgents()
# Create timer agent
self.timer = TimeAgent(len(self.agents), self)
self.schedule.add(self.timer)
self.agents.append(self.timer)
# Create sensor agent
self.sensor = SensorAgent(len(self.agents), self)
self.schedule.add(self.sensor)
self.agents.append(self.sensor)
'''
# Create workers agents
for i in range(self.workers_number):
worker = WorkerAgent(i+len(self.agents), self)
self.schedule.add(worker)
self.workers.append(worker)
'''
# Create data collectors
self.model_collector = DataCollector(model_reporters={"Average Stress": lambda a: a.average_stress})
self.worker_collector = WorkerCollector(agent_reporters={"Stress": lambda a: a.stress,
"Event Stress": lambda a: a.event_stress, "Time Pressure": lambda a: a.time_pressure,
"Effective Fatigue": lambda a: a.effective_fatigue, "Productivity": lambda a: a.productivity,
'Emails read': lambda a: a.emails_read, 'Pending tasks': lambda a: len(a.tasks),
'Overtime hours': lambda a: a.overtime_hours, 'Rest at work hours': lambda a: a.rest_at_work_hours,
'Tasks completed': lambda a: a.tasks_completed})
self.sensor_collector = SensorCollector(agent_reporters={"Temperature": lambda a: a.wbgt,
"Noise": lambda a: a.noise, "Luminosity": lambda a: a.luminosity})
self.time_collector = TimeCollector(agent_reporters={"Day": lambda a: a.days,
"Time": lambda a: a.clock})
#SOBA
def setAgents(self):
self.lights = []
id_light = 0
for room in self.rooms:
if room.typeRoom != 'out' and room.light == False:
light = Light(id_light, self, room)
self.lights.append(light)
id_light = id_light + 1
room.light = light
for room2 in self.rooms:
if room.name.split(r".")[0] == room2.name.split(r".")[0]:
room2.light = light
# Height and Width
height = self.grid.height
width = self.grid.width
# CREATE AGENTS
self.agents = []
# Create occupants
for n_type_occupants in configuration.defineOccupancy.occupancy_json:
self.placeByStateByTypeAgent[n_type_occupants['type']] = n_type_occupants['states']
n_agents = n_type_occupants['N']
for i in range(0, n_agents):
a = WorkerAgent(i+len(self.agents)+1000, self, n_type_occupants)
self.workers.append(a)
self.schedule.add(a)
self.grid.place_agent(a, self.outBuilding.pos)
self.pushAgentRoom(a, self.outBuilding.pos)
self.num_occupants = self.num_occupants + 1
self.schedule.add(self.clock)
for light in self.lights:
self.schedule.add(light)
def isConected(self, pos):
nextRoom = False
for room in self.rooms:
if room.pos == pos:
nextRoom = room
if nextRoom == False:
return False
for x in range(0, width):
for y in range(0, height):
self.pos_out_of_map.append(x, y)
for room in self.rooms:
self.pos_out_of_map.remove(room.pos)
def createRooms(self):
rooms = configuration.defineMap.rooms_json
self.rooms = []
for room in rooms:
newRoom = 0
name = room['name']
typeRoom = room['type']
if typeRoom != 'out':
conectedTo = room.get('conectedTo')
entrance = room.get('entrance')
measures = room['measures']
dx = measures['dx']
dy = measures['dy']
newRoom = Room(name, typeRoom, conectedTo, dx, dy)
newRoom.entrance = entrance
else:
newRoom = Room(name, typeRoom, None, 0, 0,)
self.outBuilding = newRoom
self.rooms.append(newRoom)
for room1 in self.rooms:
if room1.conectedTo is not None:
for otherRooms in list(room1.conectedTo.values()):
for room2 in self.rooms:
if room2.name == otherRooms:
room1.roomsConected.append(room2)
room2.roomsConected.append(room1)
for room in self.rooms:
room.roomsConected = list(set(room.roomsConected))
sameRoom = {}
for room in self.rooms:
if sameRoom.get(room.name.split(r".")[0]) is None:
sameRoom[room.name.split(r".")[0]] = 1
else:
sameRoom[room.name.split(r".")[0]] = sameRoom[room.name.split(r".")[0]] + 1
def setMap(self, width, height):
rooms_noPos = self.rooms
rooms_using = []
rooms_used = []
for room in self.rooms:
if room.entrance is not None:
room.pos = (int(1), 1)
rooms_using.append(room)
rooms_used.append(room)
rooms_noPos.remove(room)
break
while len(rooms_noPos) > 0:
for roomC in rooms_using:
xc, yc = roomC.pos
rooms_conected = roomC.conectedTo
rooms_using.remove(roomC)
if rooms_conected is not None:
orientations = list(rooms_conected.keys())
for orientation in orientations:
if orientation == 'R':
for room in rooms_noPos:
if room.name == rooms_conected['R']:
room.pos = (int(xc + 1), yc)
rooms_noPos.remove(room)
rooms_used.append(room)
rooms_using.append(room)
elif orientation == 'U':
for room in rooms_noPos:
if room.name == rooms_conected['U']:
room.pos = (xc, int(yc + 1))
rooms_noPos.remove(room)
rooms_used.append(room)
rooms_using.append(room)
elif orientation == 'D':
for room in rooms_noPos:
if room.name == rooms_conected['D']:
room.pos = (xc, int(yc - 1))
rooms_noPos.remove(room)
rooms_used.append(room)
rooms_using.append(room)
elif orientation == 'L':
for room in rooms_noPos:
if room.name == rooms_conected['L']:
room.pos = (int(xc -1), yc)
rooms_noPos.remove(room)
rooms_used.append(room)
rooms_using.append(room)
else:
pass
self.rooms = rooms_used
def createDoors(self):
self.doors = []
for roomC in self.rooms:
roomsConected = roomC.roomsConected
for room in roomsConected:
door_created = False
same_corridor = False
if room.name != roomC.name:
for door in self.doors:
if (door.room1.name == roomC.name and door.room2.name == room.name) or (door.room2.name == roomC.name and door.room1.name == room.name):
door_created = True
if room.name.split(r".")[0] == roomC.name.split(r".")[0]:
same_corridor = True
if door_created == False and same_corridor == False:
d = Door(roomC, room)
self.doors.append(d)
room.doors.append(d)
roomC.doors.append(d)
def createWalls(self):
for room in self.rooms:
if room.typeRoom != 'out':
walls = []
xr, yr = room.pos
roomA = self.getRoom((xr, yr+1))
if roomA != False:
if roomA.name.split(r".")[0] == room.name.split(r".")[0]:
pass
else:
wall = Wall(room, roomA)
walls.append(wall)
else:
wall = Wall(room)
walls.append(wall)
roomB = self.getRoom((xr, yr-1))
if roomB != False:
if roomB.name.split(r".")[0] == room.name.split(r".")[0]:
pass
else:
wall = Wall(room, roomB)
walls.append(wall)
else:
wall = Wall(room)
walls.append(wall)
roomC = self.getRoom((xr+1, yr))
if roomC != False:
if roomC.name.split(r".")[0] == room.name.split(r".")[0]:
pass
else:
wall = Wall(room, roomC)
walls.append(wall)
else:
wall = Wall(room)
walls.append(wall)
roomD = self.getRoom((xr-1, yr))
if roomD != False:
if roomD.name.split(r".")[0] == room.name.split(r".")[0]:
pass
else:
wall = Wall(room, roomD)
walls.append(wall)
else:
wall = Wall(room)
walls.append(wall)
room.walls = walls
def getPosState(self, name, typeA):
placeByStateByTypeAgent = self.placeByStateByTypeAgent
n = 0
for state in self.placeByStateByTypeAgent[typeA]:
if state.get('name') == name:
pos1 = state.get('position')
if isinstance(pos1, dict):
for k,v in pos1.items():
if v > 0:
placeByStateByTypeAgent[typeA][n]['position'][k] = v - 1
self.placeByStateByTypeAgent = placeByStateByTypeAgent
return k
return list(pos1.keys())[-1]
else:
return pos1
n = n +1
def thereIsClosedDoor(self, beforePos, nextPos):
oldRoom = False
newRoom = False
for room in rooms:
if room.pos == beforePos:
oldRoom = room
if room.pos == nextPos:
newRoom = room
for door in self.doors:
if (door.room1.name == oldRoom.name and door.room2.name == newRoom.name) or (door.room2.name == oldRoom.name and door.room1.name == newRoom.name):
if door.state == False:
return True
return False
def thereIsOccupant(self, pos):
possible_occupant = self.grid.get_cell_list_contents([pos])
if (len(possible_occupant) > 0):
for occupant in possible_occupant:
if isinstance(occupant, WorkerAgent):
return True
return False
def ThereIsOtherOccupantInRoom(self, room, agent):
for roomAux in self.rooms:
possible_occupant = []
if roomAux.name.split(r".")[0] == room.name.split(r".")[0]:
possible_occupant = self.grid.get_cell_list_contents(roomAux.pos)
for occupant in possible_occupant:
if isinstance(occupant, WorkerAgent) and occupant != agent:
return True
return False
def ThereIsSomeOccupantInRoom(self, room):
for roomAux in self.rooms:
possible_occupant = []
if roomAux.name.split(r".")[0] == room.name.split(r".")[0]:
possible_occupant = self.grid.get_cell_list_contents(roomAux.pos)
for occupant in possible_occupant:
if isinstance(occupant, WorkerAgent):
return True
return False
def thereIsOccupantInRoom(self, room, agent):
for roomAux in self.rooms:
possible_occupant = []
if roomAux.name.split(r".")[0] == room.name.split(r".")[0]:
possible_occupant = self.grid.get_cell_list_contents(roomAux.pos)
for occupant in possible_occupant:
if isinstance(occupant, WorkerAgent) and occupant == agent:
return True
return False
def getRoom(self, pos):
for room in self.rooms:
if room.pos == pos:
return room
return False
def pushAgentRoom(self, agent, pos):
room = self.getRoom(pos)
room.agentsInRoom.append(agent)
def popAgentRoom(self, agent, pos):
room = self.getRoom(pos)
room.agentsInRoom.remove(agent)
def openDoor(self, agent, room1, room2):
for door in self.doors:
if ((door.room1 == room1 and door.room2 == room2) or (door.room1 == room2 and door.room2 == room1)):
door.state = False
def closeDoor(self, agent, room1, room2):
numb = random.randint(0, 10)
for door in self.doors:
if ((door.room1 == room1 and door.room2 == room2) or (door.room1 == room2 and door.room2 == room1)):
if 7 >= numb:
door.state = False
else:
door.state = True
def getMatrix(self,agent):
new_matrix = configuration.defineOccupancy.returnMatrix(agent, self.clock.clock)
agent.markov_matrix = new_matrix
def getTimeInState(self, agent):
matrix_time_in_state = configuration.defineOccupancy.getTimeInState(agent, self.clock.clock)
return matrix_time_in_state
def sobaStep(self):
aw = 0
for agent in self.agents:
if agent.state == 'working in my workplace':
aw = aw + 1
self.agentsWorkingByStep.append(aw)
self.schedule.step()
if (self.clock.day > self.day):
self.day = self.day + 1
self.NStep = self.NStep + 1
if self.clock.clock > 17:
model.ramenScript.generateJSON()
while(True):
pass
def step(self):
self.sobaStep()
if self.timer.new_day:
self.addTasks()
self.createEmailsDistribution()
self.average_stress = sum(worker.stress for worker in self.workers)/len(self.workers)
if self.timer.new_hour:
self.worker_collector.collect(self)
self.sensor_collector.collect(self)
self.time_collector.collect(self)
self.model_collector.collect(self)
def addTasks(self):
''' Add tasks to workers '''
# Get task distribution params
| |
<reponame>leboncoin/vault-manager<filename>vaultmanager/modules/VaultManagerLDAP.py
import os
import yaml
import logging
import re
from jinja2 import Template
from collections import namedtuple
try:
from lib.VaultClient import VaultClient
from lib.LDAPReader import LDAPReader
import lib.utils as utils
except ImportError:
from vaultmanager.lib.VaultClient import VaultClient
from vaultmanager.lib.LDAPReader import LDAPReader
import vaultmanager.lib.utils as utils
class VaultManagerLDAP:
"""
LDAP Module
"""
logger = None
subparser = None
kwargs = None
module_name = None
base_logger = None
conf = None
ldap_conf = None
vault_client = None
ldap_users = None
ldap_kubernetes_groups = None
policies_folder = None
user_policies_folder = None
group_policies_folder = None
kubernetes_policies_folder = None
group_policies_to_create = None
kubernetes_policies_to_create = None
user_policies_to_create = None
def __init__(self, base_logger=None):
"""
:param base_logger: main class name
:type base_logger: string
"""
self.base_logger = base_logger
if base_logger:
self.logger = logging.getLogger(
base_logger + "." + self.__class__.__name__)
else:
self.logger = logging.getLogger()
self.logger.debug("Initializing VaultManagerLDAP")
def connect_to_vault(self, vault_addr, vault_token):
"""
Connect to a Vault instance
:param vault_addr: Vault URL
:type vault_addr: str
:param vault_token: Vault token
:type vault_token: str
:return: VaultClient
"""
self.logger.debug("Connecting to Vault instance '%s'" % vault_addr)
vault_client = VaultClient(
self.base_logger,
dry=self.kwargs.dry_run,
vault_addr=vault_addr,
skip_tls=self.kwargs.skip_tls
)
vault_client.authenticate(vault_token)
return vault_client
def initialize_subparser(self, subparsers):
"""
Add the subparser of this specific module to the list of all subparsers
:param subparsers: list of all subparsers
:type subparsers: argparse.ArgumentParser.add_subparsers()
:return:
"""
self.logger.debug("Initializing subparser")
self.module_name = \
self.__class__.__name__.replace("VaultManager", "").lower()
self.subparser = \
subparsers.add_parser(self.module_name,
help=self.module_name + ' management')
self.subparser.add_argument(
"--list-groups", action='store_true', help="List LDAP groups"
)
self.subparser.add_argument(
"--create-policies", action='store_true',
help="Create policies from LDAP groups and users"
)
self.subparser.add_argument(
"--manage-ldap-groups", nargs='?', metavar="LDAP_mount_point",
help="""Create LDAP groups in Vault with associated
policies at specified mount point"""
)
self.subparser.add_argument(
"--manage-ldap-users", nargs='?', metavar="LDAP_mount_point",
help="""Create LDAP users in Vault with associated
policies and groups at specified mount point"""
)
self.subparser.add_argument(
"--create-groups-secrets", nargs='?',
metavar="groups_secrets_folder",
help="Create a folder for each group in <groups_secrets_folder>"
)
self.subparser.add_argument(
"--create-users-secrets", nargs='?',
metavar="users_secrets_folder",
help="Create a folder for each user in <users_secrets_folder>"
)
self.subparser.set_defaults(module_name=self.module_name)
def get_subparser(self):
"""
Module subparser getter
:return: argparse.ArgumentParser.add_subparsers().add_parser()
"""
return self.subparser
def check_args_integrity(self):
"""
Checking provided arguments integrity
"""
self.logger.debug("Checking arguments integrity")
args_false_count = [self.kwargs.create_policies,
self.kwargs.manage_ldap_groups,
self.kwargs.manage_ldap_users,
self.kwargs.list_groups,
self.kwargs.create_groups_secrets,
self.kwargs.create_users_secrets].count(False)
args_none_count = [self.kwargs.create_policies,
self.kwargs.manage_ldap_groups,
self.kwargs.manage_ldap_users,
self.kwargs.list_groups,
self.kwargs.create_groups_secrets,
self.kwargs.create_users_secrets].count(None)
no_args_count = args_false_count + args_none_count
if no_args_count in [6, 7]:
self.logger.critical("you must specify a command")
return False
return True
def read_configuration(self):
"""
Read the policies configuration file
"""
self.logger.debug("Reading configuration")
with open(os.path.join(self.policies_folder, "policies.yml"),
'r') as fd:
try:
self.conf = yaml.safe_load(fd)
except yaml.YAMLError as e:
self.logger.critical("Impossible to load conf file: " + str(e))
return False
self.logger.debug("Read conf: " + str(self.conf))
return True
def read_ldap_configuration(self):
"""
Read the LDAP configuration file
"""
self.logger.debug("Reading LDAP configuration file")
with open(os.path.join(self.kwargs.vault_config, "ldap.yml"),
'r') as fd:
try:
self.ldap_conf = yaml.safe_load(fd)
except yaml.YAMLError as e:
self.logger.critical("Impossible to load LDAP conf file: %s" %
str(e))
return False
self.logger.debug("Read LDAP conf: " + str(self.conf))
return True
def get_ldap_data(self):
"""
Fetch users and groups from LDAP
"""
self.logger.info("Reading LDAP data")
# base_logger, server, user, password, group_dn, user_dn
try:
if re.search("^VAULT{{.*}}$", self.ldap_conf["ldap"]["password"]):
ldap_password = self.vault_client.read_string_with_secret(
self.ldap_conf["ldap"]["password"]
)
elif re.search("^ENV{{.*}}$", self.ldap_conf["ldap"]["password"]):
ldap_password = self.vault_client.read_string_with_env(
self.ldap_conf["ldap"]["password"]
)
else:
ldap_password = self.ldap_conf["ldap"]["password"]
except TypeError as e:
raise Exception("LDAP password does not exists in env at %s" %
str(self.ldap_conf["ldap"]["password"]))
ldap_reader = LDAPReader(self.base_logger,
self.ldap_conf["ldap"]["server"],
self.ldap_conf["ldap"]["username"],
ldap_password,
self.ldap_conf["ldap"]["kubernetes_group_dn"],
self.ldap_conf["ldap"]["group_dn"],
self.ldap_conf["ldap"]["user_dn"])
if not ldap_reader.connect_to_ldap():
return False
self.ldap_users = ldap_reader.get_all_users(
ldap_reader.get_all_groups())
self.ldap_kubernetes_groups = ldap_reader.get_kubernetes_groups()
self.logger.debug("Users found: " + str(self.ldap_users))
ldap_reader.disconnect_from_ldap()
return True
def create_groups_policies(self):
"""
Create a policy for each group
"""
self.logger.info("Creating groups policies")
ldap_groups = list(sorted(set(
[group for user in self.ldap_users for group in
self.ldap_users[user]])))
for read_group in self.conf["groups"]["groups_to_add"]:
if read_group not in ldap_groups:
self.logger.warning("Group " + read_group +
" in conf file 't been found in LDAP "
"groups. Default conf file. "
"The default group policy will be created "
"anyway.")
with open(
os.path.join(
self.policies_folder,
self.conf["general"]["group"]["default_policy"]
), 'r') as fd:
default_policy = fd.read()
for group in self.conf["groups"]["groups_to_add"]:
policy_file = os.path.join(self.group_policies_folder,
group + ".hcl")
self.group_policies_to_create.append(policy_file)
if os.path.isfile(policy_file):
self.logger.info(
"Policy for group " + group +
" already exists and will not be overwritten"
)
else:
with open(policy_file, 'w+') as fd:
fd.write(default_policy.replace("{{GROUP_NAME}}", group))
self.logger.info("Default policy for " + group + " written")
def create_users_policies(self):
"""
Create policies for each LDAP user
"""
self.logger.info("Creating user policies")
with open(os.path.join(self.policies_folder,
self.conf["general"]["user"]["default_policy"]),
'r') as fd:
default_policy = fd.read()
for user in self.ldap_users:
if len(set(self.conf["groups"]["groups_to_add"]).intersection(
self.ldap_users[user])):
policy_file = os.path.join(self.user_policies_folder,
user + ".hcl")
self.user_policies_to_create.append(policy_file)
if os.path.isfile(policy_file):
self.logger.info(
"Policy for user " + user +
" already exists and will not be overwritten")
else:
with open(policy_file, 'w+') as fd:
fd.write(default_policy.replace("{{USER_NAME}}", user))
self.logger.info(
"Policy for user " + user + " created")
def create_kubernetes_policies(self):
"""
Create policies to allow kubernetes service-accounts to read secrets
"""
self.logger.debug("creating kubernetes policies for service_accounts")
with open(os.path.join(
self.policies_folder,
self.conf["general"]["kubernetes"]["default_policy"]),
'r') as fd:
default_policy = fd.read()
template = Template(default_policy)
for env in ["qa", "preprod", "prod"]:
for group in self.ldap_kubernetes_groups:
policy_file = os.path.join(self.kubernetes_policies_folder, env,
group + ".hcl")
self.kubernetes_policies_to_create.append(policy_file)
if os.path.isfile(policy_file):
self.logger.info(
"Policy for kubernetes group " + group + " in env " +
env + " already exists and will not be overwritten")
else:
with open(policy_file, 'w+') as fd:
fd.write(
template.render(GROUP=group, ENV=env))
self.logger.info(
"Policy for kubernetes group " +
group + "in env " + env + " created")
def deleting_previous_policies(self):
"""
Deleting policies of non existing LDAP users
"""
self.logger.debug("Deleting policies of previously existing LDAP users")
for file in os.listdir(self.group_policies_folder):
policy_path = os.path.join(self.group_policies_folder, file)
if policy_path not in self.group_policies_to_create:
self.logger.info("Deleting group policy: " + policy_path)
os.remove(policy_path)
for file in os.listdir(self.user_policies_folder):
policy_path = os.path.join(self.user_policies_folder, file)
if policy_path not in self.user_policies_to_create:
self.logger.info("Deleting user policy: " + policy_path)
os.remove(policy_path)
def ldap_list_groups(self):
"""
Method running the list-groups function of LDAP module
Display LDAP groups
"""
self.logger.debug("LDAP list-groups starting")
self.logger.debug("Displaying LDAP groups")
groups = []
for user in self.ldap_users:
for group in self.ldap_users[user]:
if group not in groups:
groups.append(group)
self.logger.info(str(sorted(groups)))
def ldap_create_policies(self):
"""
Method running the create-policies function of LDAP module
"""
self.logger.debug("LDAP create-policies starting")
self.logger.info("Creating LDAP policies")
self.create_groups_policies()
self.create_users_policies()
self.create_kubernetes_policies()
self.deleting_previous_policies()
def ldap_manage_ldap_groups(self):
"""
Method running the manage-ldap-groups function of LDAP module
Manage groups in Vault LDAP configuration
"""
self.logger.debug("LDAP manage-ldap-groups starting")
self.logger.info("Managing groups in Vault LDAP '%s' config" %
self.kwargs.manage_ldap_groups)
self.logger.debug("Managing groups to Vault LDAP configuration")
raw_vault_ldap_groups = self.vault_client.list('/auth/ldap/groups')
existing_groups = []
if len(raw_vault_ldap_groups):
existing_groups = raw_vault_ldap_groups["keys"]
for group in self.conf["groups"]["groups_to_add"]:
if group in existing_groups:
existing_groups.remove(group)
policies = ["group_" + group + "_policy"]
if "root" in self.conf["general"]["group"] and \
group in self.conf["general"]["group"]["root"]:
policies.append("root")
self.logger.info("Adding polices %s to group %s" %
(str(policies), group))
self.vault_client.write(
"/auth/ldap/groups/" + group,
{
"policies": utils.list_to_string(
self.logger, policies, separator=""
)
}
)
self.logger.debug("Removing groups %s from Vault LDAP conf" %
str(existing_groups))
for group in existing_groups:
self.logger.info("Removing group %s from Vault LDAP conf" % group)
self.vault_client.delete('/auth/ldap/groups/' + group)
def ldap_manage_ldap_users(self):
"""
Method running the manage-ldap-users function of LDAP module
Manage users in Vault LDAP configuration
"""
self.logger.debug("LDAP manage-ldap-users starting")
self.logger.info("Managing users in Vault LDAP '%s' config" %
self.kwargs.manage_ldap_users)
self.logger.debug("Managing users to Vault LDAP configuration")
raw_vault_ldap_users = self.vault_client.list('/auth/ldap/users')
self.logger.debug("Users found: " + str(raw_vault_ldap_users))
existing_users = []
if len(raw_vault_ldap_users):
existing_users = raw_vault_ldap_users["keys"]
for user in self.ldap_users:
groups_of_user = list(
set(self.conf["groups"]["groups_to_add"]).intersection(
self.ldap_users[user]))
if not len(groups_of_user):
continue
if user in existing_users:
existing_users.remove(user)
policies = ["user_" + user + "_policy"]
if "root" in self.conf["general"]["group"] and \
user in self.conf["general"]["user"]["root"]:
policies.append("root")
self.logger.info("Adding polices %s to user %s" %
(str(policies), user))
self.logger.info("Adding groups %s to user %s" %
(str(groups_of_user), user))
self.vault_client.write(
"/auth/ldap/users/" + user,
{
"policies": utils.list_to_string(self.logger, policies,
separator=""),
"groups": utils.list_to_string(self.logger, groups_of_user,
separator="")
}
)
self.logger.debug("Removing users %s from Vault LDAP conf" %
str(existing_users))
for user in existing_users:
self.logger.info("Removing user %s from Vault LDAP conf" % user)
self.vault_client.delete('/auth/ldap/users/' + user)
self.logger.info("Creating k8s secrets paths for each user")
self.create_kubernetes_policies()
def find_ldap_group(self, user, group_regex):
"""
Find a group matching a regex
"""
ft = []
for group in self.ldap_users[user]:
match = re.match(group_regex, group)
if match:
ft.extend([g for g in match.groups() if g is not None])
if len(ft) == 0:
return ""
return ",".join(ft)
def ldap_create_groups_secrets(self):
"""
Method running the create-groups-secrets function of LDAP module
Create a secret folder for each LDAP group under specified path
"""
self.logger.debug("LDAP create-groups-secrets starting")
| |
Session Error", e)
return result, exceptions
def stop_session(self, session_id: int = None) -> bool:
if not session_id:
session_id = self.session.id
result = False
try:
response = self.client.stop_session(session_id)
logging.info("stopped session(%s), result: %s", session_id, response)
result = response.result
except grpc.RpcError as e:
self.app.show_grpc_exception("Stop Session Error", e)
return result
def show_mobility_players(self) -> None:
for node in self.session.nodes.values():
if not nutils.is_mobility(node):
continue
if node.mobility_config:
mobility_player = MobilityPlayer(self.app, node)
self.mobility_players[node.id] = mobility_player
mobility_player.show()
def set_metadata(self) -> None:
# create canvas data
canvas_config = self.app.manager.get_metadata()
canvas_config = json.dumps(canvas_config)
# create shapes data
shapes = []
for canvas in self.app.manager.all():
for shape in canvas.shapes.values():
shapes.append(shape.metadata())
shapes = json.dumps(shapes)
# create edges config
edges_config = []
for edge in self.links.values():
if not edge.is_customized():
continue
edge_config = dict(token=edge.token, width=edge.width, color=edge.color)
edges_config.append(edge_config)
edges_config = json.dumps(edges_config)
# create hidden metadata
hidden = [x.core_node.id for x in self.canvas_nodes.values() if x.hidden]
hidden = json.dumps(hidden)
# save metadata
metadata = dict(
canvas=canvas_config, shapes=shapes, edges=edges_config, hidden=hidden
)
response = self.client.set_session_metadata(self.session.id, metadata)
logging.debug("set session metadata %s, result: %s", metadata, response)
def launch_terminal(self, node_id: int) -> None:
try:
terminal = self.app.guiconfig.preferences.terminal
if not terminal:
messagebox.showerror(
"Terminal Error",
"No terminal set, please set within the preferences menu",
parent=self.app,
)
return
response = self.client.get_node_terminal(self.session.id, node_id)
cmd = f"{terminal} {response.terminal} &"
logging.info("launching terminal %s", cmd)
os.system(cmd)
except grpc.RpcError as e:
self.app.show_grpc_exception("Node Terminal Error", e)
def get_xml_dir(self) -> str:
return str(self.session.file.parent) if self.session.file else str(XMLS_PATH)
def save_xml(self, file_path: str = None) -> None:
"""
Save core session as to an xml file
"""
if not file_path and not self.session.file:
logging.error("trying to save xml for session with no file")
return
if not file_path:
file_path = str(self.session.file)
try:
if not self.is_runtime():
logging.debug("Send session data to the daemon")
self.send_data()
response = self.client.save_xml(self.session.id, file_path)
logging.info("saved xml file %s, result: %s", file_path, response)
except grpc.RpcError as e:
self.app.show_grpc_exception("Save XML Error", e)
def open_xml(self, file_path: str) -> None:
"""
Open core xml
"""
try:
response = self._client.open_xml(file_path)
logging.info("open xml file %s, response: %s", file_path, response)
self.join_session(response.session_id)
except grpc.RpcError as e:
self.app.show_grpc_exception("Open XML Error", e)
def get_node_service(self, node_id: int, service_name: str) -> NodeServiceData:
response = self.client.get_node_service(self.session.id, node_id, service_name)
logging.debug(
"get node(%s) %s service, response: %s", node_id, service_name, response
)
return NodeServiceData.from_proto(response.service)
def set_node_service(
self,
node_id: int,
service_name: str,
dirs: List[str],
files: List[str],
startups: List[str],
validations: List[str],
shutdowns: List[str],
) -> NodeServiceData:
response = self.client.set_node_service(
self.session.id,
node_id,
service_name,
directories=dirs,
files=files,
startup=startups,
validate=validations,
shutdown=shutdowns,
)
logging.info(
"Set %s service for node(%s), files: %s, Startup: %s, "
"Validation: %s, Shutdown: %s, Result: %s",
service_name,
node_id,
files,
startups,
validations,
shutdowns,
response,
)
response = self.client.get_node_service(self.session.id, node_id, service_name)
return NodeServiceData.from_proto(response.service)
def get_node_service_file(
self, node_id: int, service_name: str, file_name: str
) -> str:
response = self.client.get_node_service_file(
self.session.id, node_id, service_name, file_name
)
logging.debug(
"get service file for node(%s), service: %s, file: %s, result: %s",
node_id,
service_name,
file_name,
response,
)
return response.data
def set_node_service_file(
self, node_id: int, service_name: str, file_name: str, data: str
) -> None:
response = self.client.set_node_service_file(
self.session.id, node_id, service_name, file_name, data
)
logging.info(
"set node(%s) service file, service: %s, file: %s, data: %s, result: %s",
node_id,
service_name,
file_name,
data,
response,
)
def create_nodes_and_links(self) -> None:
"""
create nodes and links that have not been created yet
"""
self.client.set_session_state(self.session.id, SessionState.DEFINITION.value)
for node in self.session.nodes.values():
response = self.client.add_node(
self.session.id, node.to_proto(), source=GUI_SOURCE
)
logging.debug("created node: %s", response)
asymmetric_links = []
for edge in self.links.values():
self.add_link(edge.link)
if edge.asymmetric_link:
asymmetric_links.append(edge.asymmetric_link)
for link in asymmetric_links:
self.add_link(link)
def send_data(self) -> None:
"""
Send to daemon all session info, but don't start the session
"""
self.send_servers()
self.create_nodes_and_links()
for config_proto in self.get_wlan_configs_proto():
self.client.set_wlan_config(
self.session.id, config_proto.node_id, config_proto.config
)
for config_proto in self.get_mobility_configs_proto():
self.client.set_mobility_config(
self.session.id, config_proto.node_id, config_proto.config
)
for config_proto in self.get_service_configs_proto():
self.client.set_node_service(
self.session.id,
config_proto.node_id,
config_proto.service,
config_proto.files,
config_proto.directories,
config_proto.startup,
config_proto.validate,
config_proto.shutdown,
)
for config_proto in self.get_service_file_configs_proto():
self.client.set_node_service_file(
self.session.id,
config_proto.node_id,
config_proto.service,
config_proto.file,
config_proto.data,
)
for hook in self.session.hooks.values():
self.client.add_hook(
self.session.id, hook.state.value, hook.file, hook.data
)
for config_proto in self.get_emane_model_configs_proto():
self.client.set_emane_model_config(
self.session.id,
config_proto.node_id,
config_proto.model,
config_proto.config,
config_proto.iface_id,
)
config = to_dict(self.session.emane_config)
self.client.set_emane_config(self.session.id, config)
location = self.session.location
self.client.set_session_location(
self.session.id,
location.x,
location.y,
location.z,
location.lat,
location.lon,
location.alt,
location.scale,
)
self.set_metadata()
def close(self) -> None:
"""
Clean ups when done using grpc
"""
logging.debug("close grpc")
self.client.close()
def next_node_id(self) -> int:
"""
Get the next usable node id.
"""
i = 1
while True:
if i not in self.session.nodes:
break
i += 1
return i
def create_node(
self, x: float, y: float, node_type: NodeType, model: str
) -> Optional[Node]:
"""
Add node, with information filled in, to grpc manager
"""
node_id = self.next_node_id()
position = Position(x=x, y=y)
image = None
if nutils.has_image(node_type):
image = "ubuntu:latest"
emane = None
if node_type == NodeType.EMANE:
if not self.session.emane_models:
dialog = EmaneInstallDialog(self.app)
dialog.show()
return
emane = self.session.emane_models[0]
name = f"emane{node_id}"
elif node_type == NodeType.WIRELESS_LAN:
name = f"wlan{node_id}"
elif node_type in [NodeType.RJ45, NodeType.TUNNEL]:
name = "unassigned"
else:
name = f"n{node_id}"
node = Node(
id=node_id,
type=node_type,
name=name,
model=model,
position=position,
image=image,
emane=emane,
)
if nutils.is_custom(node):
services = nutils.get_custom_services(self.app.guiconfig, model)
node.services = set(services)
# assign default services to CORE node
else:
services = self.session.default_services.get(model)
if services:
node.services = services.copy()
logging.info(
"add node(%s) to session(%s), coordinates(%s, %s)",
node.name,
self.session.id,
x,
y,
)
self.session.nodes[node.id] = node
return node
def deleted_canvas_nodes(self, canvas_nodes: List[CanvasNode]) -> None:
"""
remove the nodes selected by the user and anything related to that node
such as link, configurations, interfaces
"""
for canvas_node in canvas_nodes:
node = canvas_node.core_node
del self.canvas_nodes[node.id]
del self.session.nodes[node.id]
def deleted_canvas_edges(self, edges: Iterable[CanvasEdge]) -> None:
links = []
for edge in edges:
del self.links[edge.token]
links.append(edge.link)
self.ifaces_manager.removed(links)
def save_edge(self, edge: CanvasEdge) -> None:
self.links[edge.token] = edge
src_node = edge.src.core_node
dst_node = edge.dst.core_node
if nutils.is_container(src_node):
src_iface_id = edge.link.iface1.id
self.iface_to_edge[(src_node.id, src_iface_id)] = edge
if nutils.is_container(dst_node):
dst_iface_id = edge.link.iface2.id
self.iface_to_edge[(dst_node.id, dst_iface_id)] = edge
def get_wlan_configs_proto(self) -> List[wlan_pb2.WlanConfig]:
configs = []
for node in self.session.nodes.values():
if node.type != NodeType.WIRELESS_LAN:
continue
if not node.wlan_config:
continue
config = ConfigOption.to_dict(node.wlan_config)
wlan_config = wlan_pb2.WlanConfig(node_id=node.id, config=config)
configs.append(wlan_config)
return configs
def get_mobility_configs_proto(self) -> List[mobility_pb2.MobilityConfig]:
configs = []
for node in self.session.nodes.values():
if not nutils.is_mobility(node):
continue
if not node.mobility_config:
continue
config = ConfigOption.to_dict(node.mobility_config)
mobility_config = mobility_pb2.MobilityConfig(
node_id=node.id, config=config
)
configs.append(mobility_config)
return configs
def get_emane_model_configs_proto(self) -> List[emane_pb2.EmaneModelConfig]:
configs = []
for node in self.session.nodes.values():
for key, config in node.emane_model_configs.items():
model, iface_id = key
config = ConfigOption.to_dict(config)
if iface_id is None:
iface_id = -1
config_proto = emane_pb2.EmaneModelConfig(
node_id=node.id, iface_id=iface_id, model=model, config=config
)
configs.append(config_proto)
return configs
def get_service_configs_proto(self) -> List[services_pb2.ServiceConfig]:
configs = []
for node in self.session.nodes.values():
if not nutils.is_container(node):
continue
if not node.service_configs:
continue
for name, config in node.service_configs.items():
config_proto = services_pb2.ServiceConfig(
node_id=node.id,
service=name,
directories=config.dirs,
files=config.configs,
startup=config.startup,
validate=config.validate,
shutdown=config.shutdown,
)
configs.append(config_proto)
return configs
def get_service_file_configs_proto(self) -> List[services_pb2.ServiceFileConfig]:
configs = []
for node in self.session.nodes.values():
if not nutils.is_container(node):
continue
if not node.service_file_configs:
continue
for service, file_configs in node.service_file_configs.items():
for file, data in file_configs.items():
config_proto = services_pb2.ServiceFileConfig(
node_id=node.id, service=service, file=file, data=data
)
configs.append(config_proto)
return configs
def get_config_service_configs_proto(
self
) -> List[configservices_pb2.ConfigServiceConfig]:
config_service_protos = []
for node in self.session.nodes.values():
if not nutils.is_container(node):
continue
if not node.config_service_configs:
continue
for name, service_config in node.config_service_configs.items():
config_proto = configservices_pb2.ConfigServiceConfig(
node_id=node.id,
name=name,
templates=service_config.templates,
config=service_config.config,
)
config_service_protos.append(config_proto)
return config_service_protos
def run(self, node_id: int) -> str:
logging.info("running node(%s) cmd: %s", node_id, self.observer)
return self.client.node_command(self.session.id, node_id, self.observer).output
def get_wlan_config(self, node_id: int) -> Dict[str, ConfigOption]:
response = self.client.get_wlan_config(self.session.id, node_id)
config = response.config
logging.debug(
"get wlan configuration from node %s, result configuration: %s",
node_id,
config,
)
return ConfigOption.from_dict(config)
def get_mobility_config(self, node_id: int) -> Dict[str, ConfigOption]:
response = self.client.get_mobility_config(self.session.id, node_id)
config = response.config
logging.debug(
"get mobility config from node %s, result configuration: %s",
node_id,
config,
)
return ConfigOption.from_dict(config)
def get_emane_model_config(
self, node_id: int, model: str, iface_id: int = None
) -> Dict[str, ConfigOption]:
if iface_id is None:
iface_id = -1
response = self.client.get_emane_model_config(
self.session.id, node_id, model, iface_id
)
config = response.config
logging.debug(
"get emane model config: node id: %s, EMANE model: %s, "
"interface: %s, config: %s",
node_id,
model,
iface_id,
config,
)
return ConfigOption.from_dict(config)
def execute_script(self, script) -> None:
response = self.client.execute_script(script)
logging.info("execute python script %s", response)
if response.session_id != -1:
self.join_session(response.session_id)
def add_link(self, link: | |
from types import FunctionType
import unittest
from unittest import mock
import uuid
from django.db.utils import ProgrammingError
from django.test import RequestFactory
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from tethys_apps.exceptions import TethysAppSettingDoesNotExist, TethysAppSettingNotAssigned
import tethys_apps.base.app_base as tethys_app_base
from tethys_apps.base.permissions import Permission, PermissionGroup
from ... import UserFactory
class TethysAppChild(tethys_app_base.TethysAppBase):
"""
Tethys app class for Test App.
"""
name = '<NAME>'
index = 'home'
icon = 'test_app/images/icon.gif'
package = 'test_app'
root_url = 'test-app'
color = '#2c3e50'
description = 'Place a brief description of your app here.'
class TestTethysBase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_package_namespace(self):
base = tethys_app_base.TethysBase()
# assertRaises needs a callable, not a property
def get_package_namespace():
return base.package_namespace
self.assertRaises(NotImplementedError, get_package_namespace)
@mock.patch('tethys_cli.cli_colors.write_warning')
def test_index_namespace_deprecation(self, mock_warning):
class TethysAppSubChild(TethysAppChild):
index = 'namespace:home'
TethysAppSubChild()
mock_warning.assert_called_once()
@mock.patch('tethys_apps.base.controller.register_controllers')
def test_register_url_maps(self, mock_rc):
app = tethys_app_base.TethysAppBase()
app.package = 'package'
app.root_url = 'root_url'
app.index = 'index'
app.register_url_maps()
kwargs = mock_rc.call_args_list[0][1]
modules = [f'tethysapp.package.{name}' for name in tethys_app_base.DEFAULT_CONTROLLER_MODULES]
self.assertEqual(app.root_url, kwargs['root_url'])
for m in kwargs['modules']:
self.assertIn(m, modules)
self.assertIn(app.index, kwargs['index'])
@mock.patch('tethys_cli.cli_colors.write_warning')
@mock.patch('tethys_apps.base.controller.register_controllers')
def test_register_url_maps_deprecation(self, mock_rc, mock_warning):
app = tethys_app_base.TethysAppBase()
app.package = 'package'
app.root_url = 'root_url'
app.index = 'index'
app.url_maps = mock.MagicMock(return_value=['test'])
result = app.registered_url_maps
self.assertEqual(app.url_maps(), result)
mock_rc.assert_called_once()
mock_warning.assert_called_once()
@mock.patch('tethys_apps.base.app_base.re_path')
@mock.patch('tethys_apps.base.app_base.TethysBaseMixin')
def test_url_patterns(self, mock_tbm, mock_url):
app = tethys_app_base.TethysAppBase()
app.root_url = 'foo'
url_map = mock.MagicMock(controller='test_app.controllers.home', url='test-url', protocol='http')
url_map.name = 'home'
url_map_ws = mock.MagicMock(controller='test_app.controllers.TestWS', url='test-url-ws', protocol='websocket')
url_map_ws.name = 'ws'
app.register_url_maps = mock.MagicMock(return_value=[url_map, url_map_ws])
mock_tbm.return_value = mock.MagicMock(url_maps='test-app')
# Execute
result = app.url_patterns
# Check url call at django_url = url...
rts_call_args = mock_url.call_args_list
self.assertEqual('test-url', rts_call_args[0][0][0])
self.assertEqual('test-url-ws', rts_call_args[1][0][0])
self.assertIn('name', rts_call_args[0][1])
self.assertIn('name', rts_call_args[1][1])
self.assertEqual('home', rts_call_args[0][1]['name'])
self.assertEqual('ws', rts_call_args[1][1]['name'])
self.assertIn('foo', result['http'])
self.assertIn('foo', result['websocket'])
self.assertIsInstance(rts_call_args[0][0][1], FunctionType)
self.assertIsInstance(rts_call_args[1][0][1], type)
@mock.patch('tethys_apps.base.app_base.re_path')
@mock.patch('tethys_apps.base.app_base.TethysBaseMixin')
def test_url_patterns_no_str(self, mock_tbm, mock_url):
app = tethys_app_base.TethysAppBase()
def test_func():
return ''
url_map = mock.MagicMock(controller=test_func, url='test-app', protocol='http')
url_map.name = 'home'
app.register_url_maps = mock.MagicMock(return_value=[url_map])
mock_tbm.return_value = mock.MagicMock(url_maps='test-app')
# Execute
app.url_patterns
# Check url call at django_url = url...
rts_call_args = mock_url.call_args_list
self.assertEqual('test-app', rts_call_args[0][0][0])
self.assertIn('name', rts_call_args[0][1])
self.assertEqual('home', rts_call_args[0][1]['name'])
self.assertIs(rts_call_args[0][0][1], test_func)
@mock.patch('tethys_apps.base.app_base.tethys_log')
@mock.patch('tethys_apps.base.app_base.TethysBaseMixin')
def test_url_patterns_import_error(self, mock_tbm, mock_log):
mock_error = mock_log.error
app = tethys_app_base.TethysAppBase()
url_map = mock.MagicMock(controller='1module.1function', url='test-app', protocol='http')
url_map.name = 'home'
app.register_url_maps = mock.MagicMock(return_value=[url_map])
mock_tbm.return_value = mock.MagicMock(url_maps='test-app')
# assertRaises needs a callable, not a property
def test_url_patterns():
return app.url_patterns
# Check Error Message
self.assertRaises(ImportError, test_url_patterns)
rts_call_args = mock_error.call_args_list
error_message = 'The following error occurred while trying to import' \
' the controller function "1module.1function"'
self.assertIn(error_message, rts_call_args[0][0][0])
@mock.patch('tethys_apps.base.app_base.tethys_log')
@mock.patch('tethys_apps.base.app_base.TethysBaseMixin')
def test_url_patterns_attribute_error(self, mock_tbm, mock_log):
mock_error = mock_log.error
app = tethys_app_base.TethysAppBase()
url_map = mock.MagicMock(controller='test_app.controllers.home1', url='test-app', protocol='http')
url_map.name = 'home'
app.register_url_maps = mock.MagicMock(return_value=[url_map])
mock_tbm.return_value = mock.MagicMock(url_maps='test-app')
# assertRaises needs a callable, not a property
def test_url_patterns():
return app.url_patterns
# Check Error Message
self.assertRaises(AttributeError, test_url_patterns)
rts_call_args = mock_error.call_args_list
error_message = 'The following error occurred while trying to access' \
' the controller function "test_app.controllers.home1"'
self.assertIn(error_message, rts_call_args[0][0][0])
@mock.patch('tethys_apps.base.app_base.re_path')
@mock.patch('tethys_apps.base.app_base.TethysBaseMixin')
def test_handler_patterns(self, mock_tbm, mock_url):
app = tethys_app_base.TethysAppBase()
app.root_url = 'test-url'
url_map = mock.MagicMock(
controller='test_app.controllers.home_controller',
handler='test_app.controllers.home',
handler_type='bokeh',
url=''
)
url_map.name = 'home'
app.register_url_maps = mock.MagicMock(return_value=[url_map])
mock_tbm.return_value = mock.MagicMock(url_maps=['test-app', ])
# Execute
result = app.handler_patterns
# Verify format of return
self.assertIn('http', result)
self.assertIn('websocket', result)
self.assertIn('test_url', result['http'])
self.assertIn('test_url', result['websocket'])
# Verify call of url for http endpoint
http_url_call = mock_url.call_args_list[0]
http_url_call_args = http_url_call[0]
http_url_call_kwargs = http_url_call[1]
self.assertEqual(r'^autoload.js$', http_url_call_args[0])
self.assertIsInstance(http_url_call_args[1], FunctionType)
self.assertIn('AutoloadJsConsumer', str(http_url_call_args[1]))
self.assertIn('name', http_url_call_kwargs)
self.assertEqual('home_bokeh_autoload', http_url_call_kwargs['name'])
# Verify call of url for websocket endpoint
ws_url_call = mock_url.call_args_list[1]
ws_url_call_args = ws_url_call[0]
ws_url_call_kwargs = ws_url_call[1]
self.assertEqual(r'^ws$', ws_url_call_args[0])
self.assertIsInstance(ws_url_call_args[1], FunctionType)
self.assertIn('WSConsumer', str(ws_url_call_args[1]))
self.assertIn('name', ws_url_call_kwargs)
self.assertEqual('home_bokeh_ws', ws_url_call_kwargs['name'])
@mock.patch('tethys_apps.base.app_base.WSConsumer')
@mock.patch('tethys_apps.base.app_base.AutoloadJsConsumer')
@mock.patch('tethys_apps.base.app_base.re_path')
@mock.patch('tethys_apps.base.app_base.TethysBaseMixin')
def test_handler_patterns_from_function(self, mock_tbm, mock_url, mock_ajsc, mock_wsc):
app = tethys_app_base.TethysAppBase()
app._namespace = 'foo'
app.root_url = 'test-url'
def test_func(mock_doc):
return ''
url_map = mock.MagicMock(
controller='test_app.controllers.home',
handler=test_func,
handler_type='bokeh',
url=''
)
url_map.name = 'home'
app.register_url_maps = mock.MagicMock(return_value=[url_map])
mock_tbm.return_value = mock.MagicMock(url_maps=['test-app', ])
app.handler_patterns
# Verify call of url for http endpoint
http_url_call = mock_url.call_args_list[0]
http_url_call_args = http_url_call[0]
http_url_call_kwargs = http_url_call[1]
self.assertEqual(r'^autoload.js$', http_url_call_args[0])
self.assertEqual(mock_ajsc.as_asgi(), http_url_call_args[1])
self.assertIn('name', http_url_call_kwargs)
self.assertEqual('home_bokeh_autoload', http_url_call_kwargs['name'])
mock_ajsc.as_asgi.assert_called()
# Verify call of url for websocket endpoint
ws_url_call = mock_url.call_args_list[1]
ws_url_call_args = ws_url_call[0]
ws_url_call_kwargs = ws_url_call[1]
self.assertEqual(r'^ws$', ws_url_call_args[0])
self.assertEqual(mock_wsc.as_asgi(), ws_url_call_args[1])
self.assertIn('name', ws_url_call_kwargs)
self.assertEqual('home_bokeh_ws', ws_url_call_kwargs['name'])
mock_wsc.as_asgi.assert_called()
self.assertIs(
test_func,
mock_wsc.as_asgi.call_args_list[0][1]['app_context']._application._handlers[0]._func
)
@mock.patch('tethys_apps.base.app_base.re_path')
@mock.patch('tethys_apps.base.app_base.TethysBaseMixin')
def test_handler_patterns_url_basename(self, mock_tbm, mock_url):
app = tethys_app_base.TethysAppBase()
app._namespace = 'foo'
app.root_url = 'test-url'
def test_func(mock_doc):
return ''
url_map = mock.MagicMock(
controller='test_app.controllers.home',
handler=test_func,
handler_type='bokeh'
)
url_map.name = 'basename'
url_map.url = 'basename/'
app.register_url_maps = mock.MagicMock(return_value=[url_map])
mock_tbm.return_value = mock.MagicMock(url_maps=['basename/', ])
app.handler_patterns
# Verify call of url for http endpoint
http_url_call = mock_url.call_args_list[0]
http_url_call_args = http_url_call[0]
http_url_call_kwargs = http_url_call[1]
self.assertEqual(r'^basename/autoload.js$', http_url_call_args[0])
self.assertIn('name', http_url_call_kwargs)
self.assertEqual('basename_bokeh_autoload', http_url_call_kwargs['name'])
# Verify call of url for websocket endpoint
ws_url_call = mock_url.call_args_list[1]
ws_url_call_args = ws_url_call[0]
ws_url_call_kwargs = ws_url_call[1]
self.assertEqual(r'^basename/ws$', ws_url_call_args[0])
self.assertIn('name', ws_url_call_kwargs)
self.assertEqual('basename_bokeh_ws', ws_url_call_kwargs['name'])
@mock.patch('tethys_apps.base.app_base.tethys_log')
@mock.patch('tethys_apps.base.app_base.TethysBaseMixin')
def test_handler_patterns_import_error(self, mock_tbm, mock_log):
mock_error = mock_log.error
app = tethys_app_base.TethysAppBase()
url_map = mock.MagicMock(controller='test_app.controllers.home',
handler='1module.1function', handler_type='bokeh', url='')
url_map.name = 'home'
app.register_url_maps = mock.MagicMock(return_value=[url_map])
mock_tbm.return_value = mock.MagicMock(url_maps=['test-app', ])
# assertRaises needs a callable, not a property
def test_handler_patterns():
return app.handler_patterns
# Check Error Message
self.assertRaises(ImportError, test_handler_patterns)
rts_call_args = mock_error.call_args_list
error_message = 'The following error occurred while trying to import ' \
'the handler function "1module.1function"'
self.assertIn(error_message, rts_call_args[0][0][0])
@mock.patch('tethys_apps.base.app_base.tethys_log')
@mock.patch('tethys_apps.base.app_base.TethysBaseMixin')
def test_handler_patterns_attribute_error(self, mock_tbm, mock_log):
mock_error = mock_log.error
app = tethys_app_base.TethysAppBase()
url_map = mock.MagicMock(controller='test_app.controllers.home',
handler='test_app.controllers.home_handler1', handler_type='bokeh', url='')
url_map.name = 'home'
app.register_url_maps = mock.MagicMock(return_value=[url_map])
mock_tbm.return_value = mock.MagicMock(url_maps='test-app')
# assertRaises needs a callable, not a property
def test_handler_patterns():
return app.handler_patterns
# Check Error Message
self.assertRaises(AttributeError, test_handler_patterns)
rts_call_args = mock_error.call_args_list
error_message = 'The following error occurred while trying to access ' \
'the handler function "test_app.controllers.home_handler1"'
self.assertIn(error_message, rts_call_args[0][0][0])
def test_sync_with_tethys_db(self):
self.assertRaises(NotImplementedError, tethys_app_base.TethysBase().sync_with_tethys_db)
def test_remove_from_db(self):
self.assertRaises(NotImplementedError, tethys_app_base.TethysBase().remove_from_db)
class TestTethysExtensionBase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test__str__(self):
result = tethys_app_base.TethysExtensionBase().__str__()
self.assertEqual('<TethysApp: >', result)
def test__repr__(self):
result = tethys_app_base.TethysExtensionBase().__repr__()
self.assertEqual('<TethysApp: >', result)
@mock.patch('tethys_apps.base.controller.register_controllers')
def test_url_maps(self, mock_rc):
ext = tethys_app_base.TethysExtensionBase()
ext.package = 'package'
ext.root_url = 'root_url'
ext.index = 'index'
ext.register_url_maps()
kwargs = mock_rc.call_args_list[0][1]
modules = [f'tethysext.package.{name}' for name in tethys_app_base.DEFAULT_CONTROLLER_MODULES]
self.assertEqual(ext.root_url, kwargs['root_url'])
for m in kwargs['modules']:
self.assertIn(m, modules)
self.assertIn(ext.index, kwargs['index'])
@mock.patch('tethys_apps.models.TethysExtension')
def test_sync_with_tethys_db(self, mock_te):
mock_te.objects.filter().all.return_value = []
tethys_app_base.TethysExtensionBase().sync_with_tethys_db()
mock_te.assert_called_with(description='', name='', package='', root_url='')
mock_te().save.assert_called()
@mock.patch('django.conf.settings')
@mock.patch('tethys_apps.models.TethysExtension')
def test_sync_with_tethys_db_exists(self, mock_te, mock_ds):
mock_ds.DEBUG = True
ext = tethys_app_base.TethysExtensionBase()
ext.root_url = 'test_url'
mock_te2 = mock.MagicMock()
mock_te.objects.filter().all.return_value = [mock_te2]
ext.sync_with_tethys_db()
# Check_result
self.assertTrue(mock_te2.save.call_count == 2)
@mock.patch('tethys_apps.base.app_base.tethys_log')
@mock.patch('tethys_apps.models.TethysExtension')
def test_sync_with_tethys_db_exists_log_error(self, mock_te, mock_log):
mock_error = mock_log.error
ext = tethys_app_base.TethysExtensionBase()
ext.root_url = 'test_url'
mock_te.objects.filter().all.side_effect = Exception('test_error')
ext.sync_with_tethys_db()
# Check_result
rts_call_args = mock_error.call_args_list
self.assertEqual('test_error', rts_call_args[0][0][0].args[0])
@mock.patch('tethys_apps.base.app_base.tethys_log')
@mock.patch('tethys_apps.models.TethysExtension')
def test_sync_with_tethys_db_exists_progamming_error(self, mock_te, mock_log):
mock_warning = mock_log.warning
ext = tethys_app_base.TethysExtensionBase()
ext.root_url = 'test_url'
mock_te.objects.filter().all.side_effect = ProgrammingError('test_error')
ext.sync_with_tethys_db()
# Check_result
mock_warning.assert_called_with("Unable to sync extension with database. "
"tethys_apps_tethysextension table does not exist")
class TestTethysAppBase(unittest.TestCase):
def setUp(self):
self.app = tethys_app_base.TethysAppBase()
self.user = UserFactory()
self.request_factory = RequestFactory()
self.fake_name = 'fake_name'
def tearDown(self):
pass
def test__str__(self):
result = tethys_app_base.TethysAppBase().__str__()
self.assertEqual('<TethysApp: >', result)
def test__repr__(self):
result = tethys_app_base.TethysAppBase().__repr__()
self.assertEqual('<TethysApp: >', result)
def test_custom_settings(self):
self.assertIsNone(tethys_app_base.TethysAppBase().custom_settings())
def test_persistent_store_settings(self):
self.assertIsNone(tethys_app_base.TethysAppBase().persistent_store_settings())
def test_dataset_service_settings(self):
self.assertIsNone(tethys_app_base.TethysAppBase().dataset_service_settings())
def test_spatial_dataset_service_settings(self):
self.assertIsNone(tethys_app_base.TethysAppBase().spatial_dataset_service_settings())
def test_web_processing_service_settings(self):
self.assertIsNone(tethys_app_base.TethysAppBase().web_processing_service_settings())
def test_handoff_handlers(self):
self.assertIsNone(tethys_app_base.TethysAppBase().handoff_handlers())
def test_permissions(self):
self.assertIsNone(tethys_app_base.TethysAppBase().permissions())
@mock.patch('guardian.shortcuts.get_perms')
@mock.patch('guardian.shortcuts.remove_perm')
@mock.patch('guardian.shortcuts.assign_perm')
@mock.patch('tethys_apps.models.TethysApp')
@mock.patch('django.contrib.auth.models.Group')
@mock.patch('django.contrib.auth.models.Permission')
def test_register_app_permissions(self, mock_dp, mock_dg, mock_ta, mock_asg, mock_rem, mock_get):
group_name = 'test_group'
create_test_perm = Permission(name='create_test', description='test_create')
delete_test_perm = Permission(name='delete_test', description='test_delete')
group_perm = PermissionGroup(name=group_name, permissions=[create_test_perm, delete_test_perm])
self.app.permissions = mock.MagicMock(return_value=[create_test_perm, group_perm])
# Mock db_app_permissions
db_app_permission = mock.MagicMock(codename='test_code')
mock_perm_query = mock_dp.objects.filter().filter().all
mock_perm_query.return_value = [db_app_permission]
# Mock Group.objects.filter
db_group = mock.MagicMock()
db_group.name = 'test_app_name:group'
mock_group = mock_dg.objects.filter().all
mock_group.return_value = [db_group]
# Mock TethysApp.objects.all()
db_app = mock.MagicMock(package='test_app_name')
mock_toa = mock_ta.objects.all
mock_toa.return_value = [db_app]
# Mock TethysApp.objects.get()
mock_ta_get = mock_ta.objects.get
mock_ta_get.return_value = 'test_get'
# Mock Group.objects.get()
mock_group_get = mock_dg.objects.get
mock_group_get.return_value = group_name
# Mock get permission get_perms(g, db_app)
mock_get.return_value = ['create_test']
# Execute
self.app.register_app_permissions()
# Check if db_app_permission.delete() is called
db_app_permission.delete.assert_called_with()
# Check if p.saved is called in perm
mock_dp.objects.get().save.assert_called_with()
# Check if db_group.delete() is called
db_group.delete.assert_called_with()
# Check if remove_perm(p, g, db_app) is called
mock_rem.assert_called_with('create_test', group_name, 'test_get')
# Check if assign_perm(p, g, db_app) is called
mock_asg.assert_called_with(':delete_test', group_name, 'test_get')
@mock.patch('guardian.shortcuts.get_perms')
@mock.patch('guardian.shortcuts.remove_perm')
@mock.patch('guardian.shortcuts.assign_perm')
@mock.patch('tethys_apps.models.TethysApp')
@mock.patch('django.contrib.auth.models.Group')
@mock.patch('django.contrib.auth.models.Permission')
def test_register_app_permissions_except_permission(self, mock_dp, mock_dg, mock_ta, mock_asg, mock_rem, mock_get):
group_name = 'test_group'
create_test_perm = Permission(name='create_test', description='test_create')
delete_test_perm = Permission(name='delete_test', description='test_delete')
group_perm = PermissionGroup(name=group_name, permissions=[create_test_perm, delete_test_perm])
self.app.permissions = mock.MagicMock(return_value=[create_test_perm, group_perm])
# Mock Permission.objects.filter
db_app_permission = mock.MagicMock(codename='test_code')
mock_perm_query = mock_dp.objects.filter().filter().all
mock_perm_query.return_value = [db_app_permission]
# Mock Permission.DoesNotExist
mock_dp.DoesNotExist = Exception
# Mock Permission.objects.get
mock_perm_get = mock_dp.objects.get
mock_perm_get.side_effect = Exception
# Mock Group.objects.filter
db_group = mock.MagicMock()
db_group.name = | |
# coding: utf-8
# In[46]:
import numpy as np
import pandas as pd
#import pyodbc
import pickle
import time
import itertools
from joblib import Parallel, delayed
from sklearn.metrics.pairwise import pairwise_distances
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.tree import DecisionTreeRegressor
from sklearn.metrics import mean_squared_error as MSE
from operator import itemgetter
import operator
from sklearn import linear_model
from sklearn.linear_model import Ridge
from sklearn.model_selection import cross_val_score
from sklearn.tree import DecisionTreeRegressor
from sqlalchemy import create_engine
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
# In[49]:
def data_generation_gradual_decrease_imbalance(num_control, num_treated, num_cov):
# a data generation function, not used here
xcs = []
xts = []
for i in np.linspace(0.1, 0.4, num_cov):
xcs.append(np.random.binomial(1, i, size=num_control)) # data for conum_treatedrol group
xts.append(np.random.binomial(1, 1.-i, size=num_treated)) # data for treatmenum_treated group
xc = np.vstack(xcs).T
xt = np.vstack(xts).T
errors1 = np.random.normal(0, 1, size=num_control) # some noise
errors2 = np.random.normal(0, 1, size=num_treated) # some noise
#dense_bs_sign = np.random.choice([-1,1], num_cov_dense)
dense_bs = [ (1./2)**i for i in range(num_cov) ]
yc = np.dot(xc, np.array(dense_bs)) #+ errors1 # y for conum_treatedrol group
yt = np.dot(xt, np.array(dense_bs)) + 10 #+ errors2 # y for treated group
df1 = pd.DataFrame(np.hstack([xc]),
columns = range(num_cov))
df1['outcome'] = yc
df1['treated'] = 0
df2 = pd.DataFrame(np.hstack([xt]),
columns = range(num_cov ) )
df2['outcome'] = yt
df2['treated'] = 1
df = pd.concat([df1,df2])
df['matched'] = 0
return df, dense_bs
# In[50]:
def construct_sec_order(arr):
# an intermediate data generation function used for generating second order information
second_order_feature = []
num_cov_sec = len(arr[0])
for a in arr:
tmp = []
for i in range(num_cov_sec):
for j in range(i+1, num_cov_sec):
tmp.append( a[i] * a[j] )
second_order_feature.append(tmp)
return np.array(second_order_feature)
# In[51]:
def data_generation_dense_2(num_control, num_treated, num_cov_dense, num_covs_unimportant,
control_m = 0.1, treated_m = 0.9):
# the data generating function that we will use. include second order information
xc = np.random.binomial(1, 0.5, size=(num_control, num_cov_dense)) # data for conum_treatedrol group
xt = np.random.binomial(1, 0.5, size=(num_treated, num_cov_dense)) # data for treatmenum_treated group
errors1 = np.random.normal(0, 0.1, size=num_control) # some noise
errors2 = np.random.normal(0, 0.1, size=num_treated) # some noise
dense_bs_sign = np.random.choice([-1,1], num_cov_dense)
#dense_bs = [ np.random.normal(dense_bs_sign[i]* (i+2), 1) for i in range(len(dense_bs_sign)) ]
dense_bs = [ np.random.normal(s * 10, 1) for s in dense_bs_sign ]
yc = np.dot(xc, np.array(dense_bs)) #+ errors1 # y for conum_treatedrol group
treatment_eff_coef = np.random.normal( 1.5, 0.15, size=num_cov_dense)
treatment_effect = np.dot(xt, treatment_eff_coef)
second = construct_sec_order(xt[:,:5])
treatment_eff_sec = np.sum(second, axis=1)
yt = np.dot(xt, np.array(dense_bs)) + treatment_effect + treatment_eff_sec #+ errors2 # y for treated group
xc2 = np.random.binomial(1, control_m, size=(num_control, num_covs_unimportant)) # unimportant covariates for control group
xt2 = np.random.binomial(1, treated_m, size=(num_treated, num_covs_unimportant)) # unimportant covariates for treated group
df1 = pd.DataFrame(np.hstack([xc, xc2]),
columns = range(num_cov_dense + num_covs_unimportant))
df1['outcome'] = yc
df1['treated'] = 0
df2 = pd.DataFrame(np.hstack([xt, xt2]),
columns = range(num_cov_dense + num_covs_unimportant ) )
df2['outcome'] = yt
df2['treated'] = 1
df = pd.concat([df1,df2])
df['matched'] = 0
return df, dense_bs, treatment_eff_coef
# In[52]:
def data_generation(num_control, num_treated, num_cov, control_m = 0.3, treated_m = 0.7):
# a data generation function. not used
x1 = np.random.binomial(1, control_m, size=(num_control, num_cov) ) # data for conum_treatedrol group
x2 = np.random.binomial(1, treated_m, size=(num_treated, num_cov) ) # data for treatmenum_treated group
errors1 = np.random.normal(0, 0.005, size=num_control) # some noise
errors2 = np.random.normal(0, 0.005, size=num_treated) # some noise
mus = []
sigmas = []
bs = []
for i in range(num_cov):
mus.append(i)
sigmas.append(1./(i**2+1)) # generating weights of covariates for the outcomes
bs = [np.random.normal(mus[i], sigmas[i]) for i in range(len(sigmas))] # bs are the weights for the covariates for generating ys
y1 = np.dot(x1, np.array(bs)) + errors1 # y for control group
y2 = np.dot(x2, np.array(bs)) + 1 + errors2 # y for treated group
df1 = pd.DataFrame(x1, columns=[i for i in range(num_cov)])
df1['outcome'] = y1
df1['treated'] = 0
df2 = pd.DataFrame(x2, columns=[i for i in range(num_cov)] )
df2['outcome'] = y2
df2['treated'] = 1
df = pd.concat([df1,df2])
df['matched'] = 0
return df
# In[53]:
def match(df, covs, covs_max_list, treatment_indicator_col = 'treated', match_indicator_col = 'matched'):
# this function takes a dataframe, a set of covariates to match on,
# the treatment indicator column and the matched indicator column.
# it returns the array indicating whether each unit is matched (the first return value),
# and a list of indices for the matched units (the second return value)
arr_slice_wo_t = df[covs].values # the covariates values as a matrix
arr_slice_w_t = df[ covs + [treatment_indicator_col] ].values # the covariate values together with the treatment indicator as a matrix
lidx_wo_t = np.dot( arr_slice_wo_t, np.array([ covs_max_list[i]**(len(covs_max_list) - 1 - i) for i in range(len(covs_max_list))]) ) # matrix multiplication, get a unique number for each unit
lidx_w_t = np.dot( arr_slice_w_t, np.array([ covs_max_list[i]**(len(covs_max_list) - i) for i in range(len(covs_max_list))] + [1]
) ) # matrix multiplication, get a unique number for each unit with treatment indicator
_, unqtags_wo_t, counts_wo_t = np.unique(lidx_wo_t, return_inverse=True, return_counts=True) # count how many times each number appears
_, unqtags_w_t, counts_w_t = np.unique(lidx_w_t, return_inverse=True, return_counts=True) # count how many times each number appears (with treatment indicator)
match_indicator = ~(counts_w_t[unqtags_w_t] == counts_wo_t[unqtags_wo_t]) # a unit is matched if and only if the counts don't agree
return match_indicator, lidx_wo_t[match_indicator]
# In[54]:
# match_quality, the larger the better
def match_quality(df, holdout, covs_subset, match_indicator, ridge_reg = 0.1, tradeoff = 0.1):
s = time.time()
num_control = len(df[df['treated']==0]) # how many control units that are unmatched (recall matched units are removed from the data frame)
num_treated = len(df[df['treated']==1]) # how many treated units that are unmatched (recall matched units are removed from the data frame)
num_control_matched = np.sum(( match_indicator ) & (df['treated']==0) ) # how many control units that are matched on this level
num_treated_matched = np.sum(( match_indicator ) & (df['treated']==1) ) # how many treated units that are matched on this level
time_BF = time.time() - s
# -- below is the regression part for PE
s = time.time()
ridge_c = Ridge(alpha=ridge_reg)
ridge_t = Ridge(alpha=ridge_reg)
#tree_c = DecisionTreeRegressor(max_depth=8, random_state=0)
#tree_t = DecisionTreeRegressor(max_depth=8, random_state=0)
n_mse_t = np.mean(cross_val_score(ridge_t, holdout[holdout['treated']==1][covs_subset],
holdout[holdout['treated']==1]['outcome'] , scoring = 'neg_mean_squared_error' ) )
n_mse_c = np.mean(cross_val_score(ridge_c, holdout[holdout['treated']==0][covs_subset],
holdout[holdout['treated']==0]['outcome'] , scoring = 'neg_mean_squared_error' ) )
#n_mse_t = np.mean(cross_val_score(tree_t, holdout[holdout['treated']==1][covs_subset],
# holdout[holdout['treated']==1]['outcome'] , scoring = 'neg_mean_squared_error' ) )
#n_mse_c = np.mean(cross_val_score(tree_c, holdout[holdout['treated']==0][covs_subset],
# holdout[holdout['treated']==0]['outcome'] , scoring = 'neg_mean_squared_error' ) )
time_PE = time.time() - s
# -- above is the regression part for PE
# -- below is the level-wise MQ
return (tradeoff * ( float(num_control_matched)/num_control + float(num_treated_matched)/num_treated ) + ( n_mse_t + n_mse_c ) , time_PE , time_BF )
# -- above is the level-wise MQ
#return (balance_reg * (num_treated_matched + num_control_matched) * ( float(num_control_matched)/num_control +\
# float(num_treated_matched)/num_treated ) +\
# (num_treated_matched + num_control_matched) * ( n_mse_t + n_mse_c ) , time_PE , time_BF )
# In[55]:
def get_CATE_bit(df, match_indicator, index):
d = df[ match_indicator ]
if index is None: # when index == None, nothing is matched
return None
d.loc[:,'grp_id'] = index
res = d.groupby(['grp_id', 'treated'])['outcome'].aggregate([np.size, np.mean]) # we do a groupby to get the statistics
return res
# In[56]:
def recover_covs(d, covs, covs_max_list, binary = True):
ind = d.index.get_level_values(0)
ind = [ num2vec(ind[i], covs_max_list) for i in range(len(ind)) if i%2==0]
df = pd.DataFrame(ind, columns=covs ).astype(int)
mean_list = list(d['mean'])
size_list = list(d['size'])
effect_list = [mean_list[2*i+1] - mean_list[2*i] for i in range(len(mean_list)/2) ]
df.loc[:,'effect'] = effect_list
df.loc[:,'size'] = [size_list[2*i+1] + size_list[2*i] for i in range(len(size_list)/2) ]
return df
def cleanup_result(res_all):
res = []
for i in range(len(res_all)):
r = res_all[i]
if not r[1] is None:
res.append(recover_covs( r[1], r[0][0], r[0][1] ) )
return res
def num2vec(num, covs_max_list):
res = []
for i in range(len(covs_max_list)):
num_i = num/covs_max_list[i]**(len(covs_max_list)-1-i)
res.append(num_i)
if (num_i == 0) & (num%covs_max_list[i]**(len(covs_max_list)-1-i) == 0):
res = res + [0]*(len(covs_max_list)-1-i)
break
num = num - num_i* covs_max_list[i]**(len(covs_max_list)-1-i)
return res
# In[57]:
def run_bit(df, holdout, covs, covs_max_list, | |
'directors', flag_total, False)
@app.metric('/total-architect-projects', parameters=[ORG.Person],
id='architect-projects', title='Projects of Architect')
def get_total_architects_projects(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_position_projects(uid, args, 'architects', flag_total, False))]
@app.view('/architect-projects', target=ORG.Project, parameters=[ORG.Person],
id='architect-projects', title='Projects of Architect')
def get_architect_projects(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_position_projects(uid, args, 'architects', flag_total, False)
@app.metric('/total-pmanager-projects', parameters=[ORG.Person],
id='pmanager-projects', title='Projects of Product Manager')
def get_total_manager_projects(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_position_projects(uid, args, 'productmanagers', flag_total, False))]
@app.view('/pmanager-projects', target=ORG.Project, parameters=[ORG.Person],
id='pmanager-projects', title='Projects of Product Manager')
def get_manager_projects(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_position_projects(uid, args, 'productmanagers', flag_total, False)
@app.metric('/total-director-products', parameters=[ORG.Person],
id='director-products', title='Products of Director')
def get_total_director_products(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_position_products(uid, args, 'directors', flag_total))]
@app.view('/director-products', target=ORG.Product, parameters=[ORG.Person],
id='director-products', title='Products of Director')
def get_director_products(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_position_products(uid, args, 'directors', flag_total)
@app.metric('/total-architect-products', parameters=[ORG.Person],
id='architects-products', title='Products of Architect')
def get_total_architect_products(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_position_products(uid, args, 'architects', flag_total))]
@app.view('/architect-products', target=ORG.Product, parameters=[ORG.Person],
id='architects-products', title='Products of Architect')
def get_architect_products(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_position_products(uid, args, 'architects', flag_total)
@app.metric('/total-pmanager-repositories', parameters=[ORG.Person],
id='pmanager-repositories', title='Repositories of Product Manager')
def get_total_pmanager_repositories(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_position_repositories(uid, args, 'productmanagers', flag_total, False))]
@app.view('/pmanager-repositories', target=SCM.Repository, parameters=[ORG.Person],
id='pmanager-repositories', title='Repositories of Product Manager')
def get_pmanager_repositories(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_position_repositories(uid, args, 'productmanagers', flag_total, False)
@app.metric('/total-pmanager-products', parameters=[ORG.Person],
id='pmanager-products', title='Products of Product Manager')
def get_total_manager_products(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_position_products(uid, args, 'productmanagers', flag_total))]
@app.view('/pmanager-products', target=ORG.Product, parameters=[ORG.Person],
id='pmanager-products', title='Products of Product Manager')
def get_manager_products(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_position_products(uid, args, 'productmanagers', flag_total)
@app.metric('/total-director-productmanagers', parameters=[ORG.Person],
id='director-productmanagers', title='Product Managers of Director')
def get_total_director_pmanagers(uid, **kwargs):
co, res = helper_get_director_pmanagers(uid, **kwargs)
return co, [len(res)]
@app.view('/director-productmanagers', target=ORG.Person, parameters=[ORG.Person],
id='director-productmanagers', title='Product Managers of Director')
def get_director_pmanagers(uid, **kwargs):
return helper_get_director_pmanagers(uid, **kwargs)
@app.metric('/total-director-architects', parameters=[ORG.Person],
id='director-architects', title='Architects of Director')
def get_total_director_architects(uid, **kwargs):
co, res = helper_get_director_architects(uid, **kwargs)
return co, [len(res)]
@app.view('/director-architects', target=ORG.Person, parameters=[ORG.Person],
id='director-architects', title='Architects of Director')
def get_director_architects(uid, **kwargs):
return helper_get_director_architects(uid, **kwargs)
@app.metric('/total-director-developers', parameters=[ORG.Person],
id='director-developers', title='Developers of Director')
def get_total_director_developers(uid, **kwargs):
co, res = helper_get_position_developers(uid, 'directors', **kwargs)
return co, [len(res)]
@app.view('/director-developers', target=ORG.Person, parameters=[ORG.Person],
id='director-developers', title='Developers of Director')
def get_director_developers(uid, **kwargs):
return helper_get_position_developers(uid, 'directors', **kwargs)
@app.metric('/total-director-stakeholders', parameters=[ORG.Person],
id='director-stakeholders', title='Stakeholders of Director')
def get_total_director_stakeholders(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_director_roles(uid, args, 'stakeholder', flag_total))]
@app.view('/director-stakeholders', target=ORG.Person, parameters=[ORG.Person],
id='director-stakeholders', title='Stakeholders of Director')
def get_director_stakeholders(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_director_roles(uid, args, 'stakeholder', flag_total)
@app.metric('/total-director-swarchitects', parameters=[ORG.Person],
id='director-swarchitects', title='Software Architects of Director')
def get_total_director_swarchitects(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_director_roles(uid, args, 'softwarearchitect', flag_total))]
@app.view('/director-swarchitects', target=ORG.Person, parameters=[ORG.Person],
id='director-swarchitects', title='Software Architects of Director')
def get_director_swarchitects(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_director_roles(uid, args, 'softwarearchitect', flag_total)
@app.metric('/total-director-swdevelopers', parameters=[ORG.Person],
id='director-swdevelopers', title='Software Developers of Director')
def get_total_director_swdevelopers(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_director_roles(uid, args, 'softwaredeveloper', flag_total))]
@app.view('/director-swdevelopers', target=ORG.Person, parameters=[ORG.Person],
id='director-swdevelopers', title='Software Developers of Director')
def get_director_swdevelopers(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_director_roles(uid, args, 'softwaredeveloper', flag_total)
@app.metric('/total-director-pjmanagers', parameters=[ORG.Person],
id='director-pjmanagers', title='Project Managers of Director')
def get_total_director_pjmanagers(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_director_roles(uid, args, 'projectmanager', flag_total))]
@app.view('/director-pjmanagers', target=ORG.Person, parameters=[ORG.Person],
id='director-pjmanagers', title='Project Managers of Director')
def get_director_pjmanagers(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_director_roles(uid, args, 'projectmanager', flag_total)
@app.metric('/total-director-members', parameters=[ORG.Person],
id='director-members', title='Members below Director')
def get_total_director_members(uid, **kwargs):
res = {}
co, pm = helper_get_director_pmanagers(uid, **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in pm]
co, ar = helper_get_director_architects(uid, **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in ar]
co, dev = helper_get_position_developers(uid, 'directors', **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in dev]
return co, [len(res.keys())]
@app.view('/director-members', target=ORG.Person, parameters=[ORG.Person],
id='director-members', title='Members below Director')
def get_director_members(uid, **kwargs):
res = {}
co, pm = helper_get_director_pmanagers(uid, **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in pm]
co, ar = helper_get_director_architects(uid, **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in ar]
co, dev = helper_get_position_developers(uid, 'directors', **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in dev]
res_mem = []
[res_mem.append({
"id": x,
"uri": res[x]
}) for x in res.keys()]
return co, res_mem
@app.metric('/director-productmembers', aggr='avg', parameters=[ORG.Person],
id='director-productmembers', title='Product Members AVG of Director')
def get_avg_director_productmembers(uid, **kwargs):
res = {}
co, pm = helper_get_director_pmanagers(uid, **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in pm]
co, ar = helper_get_director_architects(uid, **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in ar]
co, dev = helper_get_position_developers(uid, 'directors', **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in dev]
res_mem = len(res.keys())
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
res_pr = len(get_position_products(uid, args, 'directors', flag_total))
if res_pr == 0:
return co, [0]
return co, [float(res_mem) / float(res_pr)]
@app.metric('/director-productrepositories', aggr='avg', parameters=[ORG.Person],
id='director-productrepositories', title='Product Repositories AVG of Director')
def get_avg_director_productrepositories(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
res_rep = len(get_position_repositories(uid, args, 'directors', flag_total, True))
res_pr = len(get_position_products(uid, args, 'directors', flag_total))
if res_pr == 0:
return args, [0]
return args, [float(res_rep) / float(res_pr)]
@app.metric('/director-projectmembers', aggr='avg', parameters=[ORG.Person],
id='director-projectmembers', title='Project Members AVG of Director')
def get_avg_director_projectmembers(uid, **kwargs):
res = {}
co, pm = helper_get_director_pmanagers(uid, **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in pm]
co, ar = helper_get_director_architects(uid, **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in ar]
co, dev = helper_get_position_developers(uid, 'directors', **kwargs)
[res.update({x.get('id'): x.get('uri')}) for x in dev]
res_mem = len(res.keys())
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
res_pr = len(get_position_projects(uid, args, 'directors', flag_total, True))
if res_pr == 0:
return co, [0]
return co, [float(res_mem) / float(res_pr)]
@app.metric('/director-projectrepositories', aggr='avg', parameters=[ORG.Person],
id='director-projectrepositories', title='Project Repositories AVG of Director')
def get_avg_director_projectrepositories(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
res_rep = len(get_position_repositories(uid, args, 'directors', flag_total, True))
res_pr = len(get_position_projects(uid, args, 'directors', flag_total, True))
if res_pr == 0:
return args, [0]
return args, [float(res_rep) / float(res_pr)]
@app.metric('/director-activity', parameters=[ORG.Person],
id='director-activity', title='Activity of Director')
def get_director_activity(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
co, res = get_external_position_metric(uid, 'sum-product-activity', 'directors', 'sum', args, flag_total)
res_makeup = []
if len(res):
res_max = max(res)
[res_makeup.append(float(x)/res_max) for x in res]
return co, res_makeup
@app.metric('/director-quality', aggr='avg', parameters=[ORG.Person],
id='director-quality', title='Quality of Director')
def get_director_quality(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return get_external_position_metric(uid, 'sum-product-quality', 'directors', 'avg', args, flag_total)
@app.metric('/director-health', aggr='avg', parameters=[ORG.Person],
id='director-health', title='Health of Director')
def get_director_health(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return get_external_position_metric(uid, 'sum-product-health', 'directors', 'avg', args, flag_total)
@app.metric('/director-costs', parameters=[ORG.Person],
id='director-costs', title='Costs of Director')
def get_director_costs(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return get_external_position_metric(uid, 'sum-product-cost', 'directors', 'sum', args, flag_total)
@app.metric('/director-externals', parameters=[ORG.Person],
id='director-externals', title='External Committers from Products of Director')
def get_director_externals(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return get_external_position_metric(uid, 'sum-product-externals', 'directors', 'sum', args, flag_total)
@app.metric('/director-timetomarket', aggr='avg', parameters=[ORG.Person],
id='director-timetomarket', title='Time To Market from Products of Director')
def get_director_timetomarket(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return get_external_position_metric(uid, 'sum-product-timetomarket', 'directors', 'avg', args, flag_total)
@app.metric('/total-pmanager-architects', parameters=[ORG.Person],
id='pmanager-architects', title='Architects of Product Manager')
def get_total_pmanager_architects(uid, **kwargs):
co, res = helper_get_pmanager_architects(uid, **kwargs)
return co, [len(res)]
@app.view('/pmanager-architects', target=ORG.Person, parameters=[ORG.Person],
id='pmanager-architects', title='Architects of Product Manager')
def get_pmanager_architects(uid, **kwargs):
return helper_get_pmanager_architects(uid, **kwargs)
@app.metric('/total-pmanager-developers', parameters=[ORG.Person],
id='pmanager-developers', title='Developers of Product Manager')
def get_total_pmanager_developers(uid, **kwargs):
co, res = helper_get_position_developers(uid, 'productmanagers', **kwargs)
return co, [len(res)]
@app.view('/pmanager-developers', target=ORG.Person, parameters=[ORG.Person],
id='pmanager-developers', title='Developers of Product Manager')
def get_pmanager_developers(uid, **kwargs):
return helper_get_position_developers(uid, 'productmanagers', **kwargs)
@app.metric('/total-pmanager-stakeholders', parameters=[ORG.Person],
id='pmanager-stakeholders', title='Stakeholders of Product Manager')
def get_total_pmanager_stakeholders(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, [len(get_pmanager_roles(uid, args, 'stakeholder', flag_total))]
@app.view('/pmanager-stakeholders', target=ORG.Person, parameters=[ORG.Person],
id='pmanager-stakeholders', title='Stakeholders of Product Manager')
def get_pmanager_stakeholders(uid, **kwargs):
flag_total = kwargs.get('begin') is None and kwargs.get('end') is None
args = get_correct_kwargs(kwargs)
return args, get_pmanager_roles(uid, args, 'stakeholder', flag_total)
@app.metric('/total-pmanager-swarchitects', parameters=[ORG.Person],
id='pmanager-swarchitects', title='Software Architects of Product Manager')
def get_total_pmanager_swarchitects(uid, | |
= Var(within=Reals,bounds=(0,None),initialize=0)
m.x597 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x598 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x599 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x600 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x601 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x602 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x603 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x604 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x605 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x606 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x607 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x608 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x609 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x610 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x611 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x612 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x613 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x614 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x615 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x616 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x617 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x618 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x619 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x620 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x621 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x622 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x623 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x624 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x625 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x626 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x627 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x628 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x629 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x630 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x631 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x632 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x633 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x634 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x635 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x636 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x637 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x638 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x639 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x640 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x641 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x642 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x643 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x644 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x645 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x646 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x647 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x648 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x649 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x650 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x651 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x652 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x653 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x654 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x655 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x656 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x657 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x658 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x659 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x660 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x661 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x662 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x663 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x664 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x665 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x666 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x667 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x668 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x669 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x670 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x671 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x672 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x673 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x674 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x675 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x676 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x677 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x678 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x679 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x680 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x681 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x682 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x683 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x684 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x685 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x686 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x687 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x688 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x689 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x690 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x691 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x692 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x693 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x694 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x695 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x696 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x697 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x698 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x699 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x700 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x701 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x702 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x703 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x704 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x705 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x706 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x707 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x708 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x709 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x710 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x711 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x712 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x713 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x714 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x715 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x716 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x717 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x718 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x719 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x720 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x721 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x722 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x723 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x724 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x725 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x726 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x727 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x728 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x729 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x730 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x731 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x732 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x733 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x734 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x735 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x736 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x737 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x738 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x739 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x740 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x741 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x742 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x743 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x744 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x745 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x746 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x747 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x748 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x749 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x750 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x751 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x752 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x753 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x754 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x755 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x756 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x757 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x758 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x759 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x760 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x761 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x762 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x763 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x764 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x765 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x766 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x767 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x768 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x769 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x770 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x771 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x772 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x773 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x774 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x775 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x776 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x777 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x778 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x779 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x780 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x781 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x782 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x783 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x784 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x785 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x786 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x787 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x788 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x789 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x790 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x791 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x792 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x793 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x794 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x795 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x796 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x797 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x798 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x799 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x800 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x801 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x802 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x803 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x804 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x805 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x806 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x807 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x808 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x809 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x810 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x811 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x812 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x813 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x814 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x815 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x816 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x817 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x818 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x819 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x820 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x821 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x822 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x823 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x824 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x825 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x826 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x827 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x828 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x829 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x830 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x831 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x832 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x833 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x834 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x835 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x836 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x837 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x838 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x839 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x840 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x841 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x842 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x843 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x844 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x845 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x846 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x847 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x848 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x849 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x850 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x851 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x852 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x853 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x854 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x855 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x856 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x857 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x858 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x859 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x860 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x861 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x862 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x863 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x864 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x865 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x866 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x867 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x868 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x869 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x870 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x871 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x872 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x873 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x874 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x875 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x876 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x877 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x878 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x879 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x880 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x881 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x882 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x883 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x884 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x885 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x886 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x887 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x888 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x889 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x890 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x891 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x892 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x893 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x894 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x895 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x896 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x897 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x898 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x899 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x900 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x901 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x902 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x903 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x904 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x905 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x906 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x907 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x908 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x909 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x910 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x911 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x912 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x913 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x914 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x915 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x916 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x917 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x918 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x919 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x920 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x921 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x922 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x923 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x924 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x925 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x926 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x927 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x928 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x929 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x930 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x931 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x932 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x933 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x934 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x935 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x936 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x937 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x938 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x939 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x940 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x941 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x942 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x943 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x944 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x945 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x946 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x947 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x948 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x949 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x950 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x951 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x952 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x953 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x954 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x955 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x956 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x957 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x958 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x959 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x960 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x961 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x962 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x963 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x964 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x965 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x966 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x967 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x968 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x969 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x970 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x971 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x972 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x973 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x974 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x975 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x976 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x977 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x978 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x979 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x980 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x981 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x982 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x983 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x984 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x985 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x986 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x987 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x988 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x989 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x990 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x991 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x992 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x993 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x994 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x995 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x996 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x997 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x998 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x999 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x1000 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b1001 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1002 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1003 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1004 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1005 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1006 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1007 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1008 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1009 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1010 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1011 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1012 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1013 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1014 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1015 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1016 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1017 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1018 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1019 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b1020 = Var(within=Binary,bounds=(0,1),initialize=0)
m.obj = Objective(expr=48.8554584915799*m.x1*m.x1 + 53.9866118946038*m.x2*m.x2 + 32.2474858966649*m.x3*m.x3 +
25.7320045522626*m.x4*m.x4 + 44.8041748971961*m.x5*m.x5 + 39.2874586313035*m.x6*m.x6 +
46.5335812857622*m.x7*m.x7 + 41.6267030962109*m.x8*m.x8 + 44.7352479103365*m.x9*m.x9 +
13.7922846635507*m.x10*m.x10 + 29.7236346286992*m.x11*m.x11 + 37.7953456306208*m.x12*m.x12 +
2.1694344083028*m.x13*m.x13 + 27.8622079378829*m.x14*m.x14 + 46.9024947590224*m.x15*m.x15 +
28.7851477365269*m.x16*m.x16 + 45.4201600399348*m.x17*m.x17 + 41.0169667711524*m.x18*m.x18 +
38.5887919298439*m.x19*m.x19 + 37.392204096138*m.x20*m.x20 + 41.1741984665897*m.x21*m.x21 +
39.3376253421046*m.x22*m.x22 + 6.19594894014753*m.x23*m.x23 + 43.7829760275622*m.x24*m.x24 +
39.6189399416393*m.x25*m.x25 + 17.9122017523285*m.x26*m.x26 + 32.1022690820397*m.x27*m.x27 +
15.577579659501*m.x28*m.x28 + 48.0958110165587*m.x29*m.x29 + 44.242610019511*m.x30*m.x30 +
12.9078629017768*m.x31*m.x31 + 29.6237146046815*m.x32*m.x32 + 40.2127228205451*m.x33*m.x33 +
27.4839971896194*m.x34*m.x34 + 47.3974691677771*m.x35*m.x35 + 47.4791664541259*m.x36*m.x36 +
56.7917367845697*m.x37*m.x37 + 20.9254052188314*m.x38*m.x38 + 14.4109268203302*m.x39*m.x39 +
43.2009728797349*m.x40*m.x40 + 46.0286285679978*m.x41*m.x41 + 13.6029869768736*m.x42*m.x42 +
51.5465349357845*m.x43*m.x43 + 25.6997780626924*m.x44*m.x44 + 2.4620548405504*m.x45*m.x45 +
27.9568619443691*m.x46*m.x46 + 40.396445663609*m.x47*m.x47 + 11.2663848037081*m.x48*m.x48 +
31.5210489875165*m.x49*m.x49 + 25.4576063089556*m.x50*m.x50 + 15.8516003339703*m.x51*m.x51 +
30.0846775730623*m.x52*m.x52 + 35.3611101637529*m.x53*m.x53 + 47.9613905183025*m.x54*m.x54 +
33.1951313194605*m.x55*m.x55 + 18.6512031945736*m.x56*m.x56 + 17.7880187830563*m.x57*m.x57 +
49.860767401581*m.x58*m.x58 + 16.5586610587482*m.x59*m.x59 + 32.0610602907049*m.x60*m.x60 +
14.5686048248382*m.x61*m.x61 + 16.3739140550006*m.x62*m.x62 + 42.0925643810083*m.x63*m.x63 +
35.5248318215409*m.x64*m.x64 + 12.8137994628541*m.x65*m.x65 + 20.3070738215987*m.x66*m.x66 +
4.78600070516521*m.x67*m.x67 + 15.2775158615347*m.x68*m.x68 + 14.4456238787108*m.x69*m.x69 +
8.2452521993196*m.x70*m.x70 + 41.5193919157908*m.x71*m.x71 + 16.1530025233654*m.x72*m.x72 +
40.5968930080003*m.x73*m.x73 + 20.6209551856633*m.x74*m.x74 + 15.6829694385359*m.x75*m.x75 +
27.3165102680304*m.x76*m.x76 + 12.7873080761611*m.x77*m.x77 + 30.61021703164*m.x78*m.x78 +
17.7940056112844*m.x79*m.x79 + 3.45558376016331*m.x80*m.x80 + 44.215263731464*m.x81*m.x81 +
22.4721650499974*m.x82*m.x82 + 5.87741265005936*m.x83*m.x83 + 20.440898654042*m.x84*m.x84 +
6.01668263765759*m.x85*m.x85 + 9.76377065215142*m.x86*m.x86 + 31.4920320363795*m.x87*m.x87 +
32.5795582029762*m.x88*m.x88 + 29.9498707723918*m.x89*m.x89 + 22.2528495134374*m.x90*m.x90 +
33.5215310060452*m.x91*m.x91 + 31.1379536249797*m.x92*m.x92 + 26.4191025637888*m.x93*m.x93 +
37.6706918870347*m.x94*m.x94 + 42.5947843160881*m.x95*m.x95 + 17.7081325674203*m.x96*m.x96 +
6.54712075315253*m.x97*m.x97 + 32.9558470339385*m.x98*m.x98 + 29.3664910214195*m.x99*m.x99 +
19.150561974833*m.x100*m.x100 + 28.1182743393074*m.x101*m.x101 + 39.0497965414683*m.x102*m.x102
+ 30.8526405373756*m.x103*m.x103 + 37.9162092187562*m.x104*m.x104 + 35.972489960497*m.x105*
m.x105 + 22.8808340120743*m.x106*m.x106 + 27.5853636816224*m.x107*m.x107 + 45.8116191808109*
m.x108*m.x108 + 25.6506447280254*m.x109*m.x109 + 17.3307103279063*m.x110*m.x110 +
6.42143096728142*m.x111*m.x111 + 20.0951398777073*m.x112*m.x112 + 25.1143138698949*m.x113*m.x113
+ 28.7449567619975*m.x114*m.x114 + 20.1377939791076*m.x115*m.x115 + 14.9970586744739*m.x116*
m.x116 + 20.6030527503038*m.x117*m.x117 + 15.1818681307509*m.x118*m.x118 + 12.5064090876373*
m.x119*m.x119 + 10.3999514165193*m.x120*m.x120 + 39.7838431308382*m.x121*m.x121 +
13.8869927034986*m.x122*m.x122 + 22.8461199269574*m.x123*m.x123 + 27.3601785440067*m.x124*m.x124
+ 21.1821453218448*m.x125*m.x125 + 13.171400119456*m.x126*m.x126 + 5.26824846253116*m.x127*
m.x127 + 12.6141649600847*m.x128*m.x128 + 28.632901257336*m.x129*m.x129 + 19.1033115748186*m.x130
*m.x130 + 30.2933328008593*m.x131*m.x131 + 18.1290104864422*m.x132*m.x132 + 13.2561112379863*
m.x133*m.x133 + 4.65638084962282*m.x134*m.x134 + 22.5919955180534*m.x135*m.x135 +
24.2362361298242*m.x136*m.x136 + 41.3697175788562*m.x137*m.x137 + 22.3642861445109*m.x138*m.x138
+ 13.8871519995335*m.x139*m.x139 + 27.9779003581246*m.x140*m.x140 + 36.8456362998035*m.x141*
m.x141 + 13.5885973852764*m.x142*m.x142 + 35.5621751531126*m.x143*m.x143 + 29.5106281783608*
m.x144*m.x144 + 25.8800384494317*m.x145*m.x145 + 10.0956997189855*m.x146*m.x146 +
13.3327085566087*m.x147*m.x147 + 16.1526130062527*m.x148*m.x148 + 25.6331396246099*m.x149*m.x149
+ 7.57646236487529*m.x150*m.x150 + 14.6612663683577*m.x151*m.x151 + 4.24866295839075*m.x152*
m.x152 + 21.2648910502754*m.x153*m.x153 + 37.2641585193082*m.x154*m.x154 + 8.48856954478134*
m.x155*m.x155 + 13.6759334611371*m.x156*m.x156 + 11.7625743826901*m.x157*m.x157 + 28.880031412227
*m.x158*m.x158 + 12.9524101954386*m.x159*m.x159 + 36.732528404677*m.x160*m.x160 +
30.1623111545154*m.x161*m.x161 + 16.5051491661663*m.x162*m.x162 + 48.1430869906303*m.x163*m.x163
+ 25.0135166335402*m.x164*m.x164 + 42.2896281877994*m.x165*m.x165 + 23.3547718162713*m.x166*
m.x166 + 24.9931803060261*m.x167*m.x167 + 43.4882699648209*m.x168*m.x168 + 41.7340705380843*
m.x169*m.x169 + 33.6950861947128*m.x170*m.x170 + 19.7037715609977*m.x171*m.x171 + 43.726908364334
*m.x172*m.x172 + 50.1174410512644*m.x173*m.x173 + 9.47893403275559*m.x174*m.x174 +
15.7236672520494*m.x175*m.x175 + 33.9484056452419*m.x176*m.x176 + 33.8880778573357*m.x177*m.x177
+ 44.2719326432059*m.x178*m.x178 + 12.1684116677416*m.x179*m.x179 + 26.0467168835431*m.x180*
m.x180 + 42.1354136669581*m.x181*m.x181 + 21.2483495255647*m.x182*m.x182 + 33.370444458487*m.x183
*m.x183 + 41.1190113682979*m.x184*m.x184 + 24.8386204200289*m.x185*m.x185 + 20.6320783268308*
m.x186*m.x186 + | |
= min(1.0, (group_target-group_cum)/(D[prev_node_i,i]))
cone_wt = cone_fraction*D[prev_node_i,i]
group_cum+=cone_wt
group_nodes.append( (prev_node_rho,prev_node_i,
d[prev_node_i] if C else D[prev_node_i,i]) )
if __debug__:
if C:
log(DEBUG-3,"Node %d, added %.2f %% of demand (%.2f)" %\
(prev_node_i, cone_fraction*100, d[prev_node_i]))
else:
log(DEBUG-3,"Node %d, added %.2f %% of cost (%.2f)" %\
(prev_node_i, cone_fraction*100, 0.5*D[prev_node_i,i]))
log(DEBUG-2,"Group %.2f %% full"%\
(group_cum/group_target*100.0))
if (group_target-group_cum)<EPS:
group_end_ray = bisect_angle(prev_ray, ray, cone_fraction)
# group is full, store it
grouped_cones.append( _Cone(group_start_ray,group_end_ray,
group_cum, group_nodes) )
if __debug__:
log(DEBUG-2,"Node %d cone sets group_end_ray=%.2f"%\
(prev_node_i,group_end_ray))
log(DEBUG-2,"Group completed!\n")
# next group
group_start_ray = group_end_ray
group_nodes = []
group_cum = 0
if cone_fraction<1.0:
if C:
rmdr_wt = (1.0-cone_fraction)*d[prev_node_i]
else:
rmdr_wt = (1.0-cone_fraction)*D[prev_node_i,i]
group_cum += rmdr_wt
group_nodes.append((prev_node_rho,prev_node_i,
d[prev_node_i] if C else D[prev_node_i,i]))
if __debug__:
if len(grouped_cones)<K:
log(DEBUG-2,"Node %d cone sets group_start_ray=%.2f"%\
(prev_node_i,group_start_ray))
# the group now spans upto this
group_end_ray = ray
if __debug__:
if len(grouped_cones)<K:
log(DEBUG-2,"Node %d cone grows group to ray=%.2f"%\
(prev_node_i,group_end_ray))
prev_ray = ray
prev_node_i = i
prev_node_rho = node_rho
prev_node_phi = node_phi
## get seed form the resulting K merged cones
seed_points = np.zeros((K,2), dtype=np.float64)
depot_x = points[0][0]
depot_y = points[0][1]
for k, grouped_cone in enumerate(grouped_cones):
if __debug__:
log(DEBUG-3," ===========================================")
log(DEBUG-3," #%d %s"%(k, str(grouped_cone)))
log(DEBUG-3," ===========================================\n")
# Find an arc that splits the k-cone in a way that the linear demand
# under the arc is "around" 0.75 (the exact definition is in the
# Fisher & Jaikumar (1981) paper. Begin by sorting by distance from
# the depot and grow arc as long as weight sum is under the limit.
seed_rho = 0
grow_arc_wt = 0
weight_target = 0.75*group_target # 0.75{\labmda}b
for cr,ci,cwt in sorted(grouped_cone.nodes):
if grow_arc_wt+cwt>weight_target:
# take a fraction of the weight just outside the arc
seed_rho+=((weight_target-grow_arc_wt)/cwt)*(cr-seed_rho)
break
else:
grow_arc_wt+=cwt
seed_rho=cr
# Calculate the actual seed point position
seed_phi = bisect_angle(grouped_cone.phi1,grouped_cone.phi2)
seed_points[k,0] = depot_x+seed_rho*np.cos(seed_phi)
seed_points[k,1] = depot_y+seed_rho*np.sin(seed_phi)
return seed_points.tolist()
def _kmeans_seed_points(points, D, d, C, K, trial=0):
"""A seed point generation function that puts the seed points at customer
node point cluster centers using k-Means clustering."""
from sklearn.cluster import KMeans
kmeans = KMeans(n_clusters=K, random_state=trial).fit(points[1:])
return kmeans.cluster_centers_.tolist()
def _end_of_thoroughfares_seed_points(points, D, d, C, K, trial=0):
"""A seed point generation function that automates the human assisted
idea presented in Fisher and Jaikumar (1981) involving placing the seed
points to the end of throughtfares leaving from the depot. A DBSCAN
clustering is made and the seeds are selected among non-core points. Non-
core points should be, due to the operating principle of DBSCAN, at the
ends of long cluster "arms". By selecting the non-core points farthest from
the depot and previously selected seeds, we should get a set of seed points
closely following the Fisher and Jaikumar (1981) idea: "customers
often lie along radial corridors corresponding to major thoroughfares, and
the most distant ... along these corridors are natural seed customers".
Fisher and Jaikumar (1981) presented the idea interactive computer systems
in mind, whereas this implementation is automatic.
TODO: in practice, the results are underwhelming. Instead, one should do
1d clustering for phis and then choose the farthest point of each
"Sweep cluster".
parameters:
- points, D, d, C, K as before
- trial can be used to get different clusterings from the DBSCAN algorithm.
the DBSCAN min_size is 2,2,3,3,4,4,... for trial 0,1,2,3,4,5... .
The inititial eps is determined by getting the median distance of the
nn=2., 3., 2., 3., 3., 4., 3,... nearest neightbour of all nodes
depending if the trial is 0,1,2,3,4,5,6,7.. following the formula
nn=2+trial%2+int(trial/4))
The seed points are selected among the non-core points S_nc by
maximizing the squared distances . If it
happens that |S_nc|<K, all non-core points are included and the rest
of the seed points clustered points are
enough non-core points are found.
WARNING: This seed heuristic may return None seeds as the existence of non-
core points cannot be guranteed.
"""
from sklearn.cluster import DBSCAN
from util import produce_nn_list
# use a heuristic to get eps that finds all 2. closest nodes and
# uses the median distance of those as the eps
N = len(d)
nnD = produce_nn_list(D)
nn = 2+trial%2+int(trial/4)
nn2l = [nnS[nn][0] for nnS in nnD]
nn2l.sort()
min_size = 3#+int(trial/2)
eps = nn2l[int(N/2)]
## Get non-core DBSCAN points
if __debug__:
log(DEBUG-2,"Doing DBSCAN with eps =", eps, " min_size =",min_size)
db = DBSCAN(eps=eps, min_samples=min_size).fit(points)
outliers_mask = db.labels_ == -1
clustered_mask = db.labels_ != -1
core_samples_mask = np.zeros(N, dtype=bool)
core_samples_mask[db.core_sample_indices_] = True
# we are interested of the nodes at the fringes of the clusters
candidate_mask = clustered_mask^core_samples_mask
candidate_idxs = np.where(candidate_mask)[0].tolist()
candidates_type = "cluster non-core"
if __debug__:
log(DEBUG-3,"DBSCAN labels = %s"%str(list(zip(range(N),db.labels_))))
log(DEBUG-3,"DBSCAN core = %s"%str(db.core_sample_idxs_))
log(DEBUG-2,"Select %d seed nodes from non-core nodes %s."%
(min(len(candidate_idxs),K), str(candidate_idxs)))
seeds = []
selected_seeds_mask = np.zeros(N, dtype=bool)
# make depot like a seed -> maximize distance from it
selected_seeds_mask[0] = True
if len(candidate_idxs)<=K:
# if all candidates are needed, add them without checking the distances
for seed_idx in candidate_idxs:
seeds.append( points[seed_idx] )
if __debug__:
log(DEBUG-2,"Selecting n%d (%.2f, %.2f) that is a %s point to be a seed"%
(seed_idx,points[seed_idx][0],points[seed_idx][1],candidates_type))
selected_seeds_mask[seed_idx] = True
candidate_idxs = []
used_core_points = False
while len(seeds)<K:
if not candidate_idxs:
if not used_core_points:
# ran out of non-core candidates. Use clustered as candidates
candidate_mask = core_samples_mask
candidate_idxs = np.where(core_samples_mask)[0].tolist()
candidates_type = "cluster core"
used_core_points = True
if __debug__:
log(DEBUG-3,"Ran out of non-core nodes, select %d seed nodes from core nodes %s"%
(min(len(candidate_idxs), K-len(seeds)), str(candidate_idxs)))
else:
candidate_mask = outliers_mask
candidate_idxs = np.where(outliers_mask)[0].tolist()
candidates_type = "outliers"
if __debug__:
log(DEBUG-3, "Ran out of core and non-core nodes, select %d seed nodes from outlier nodes %s"%
(K-len(seeds), str(candidate_idxs)))
# maximize the distance to other seeds and depot
if not seeds:
D_to_seeds = D[selected_seeds_mask,candidate_mask]
else:
D_to_seeds = np.sum( np.sqrt((D[selected_seeds_mask,:])[:,candidate_mask]), axis=0)
seed_idx = candidate_idxs[np.argmax( D_to_seeds )]
selected_seeds_mask[seed_idx] = True
seeds.append( points[seed_idx] )
if __debug__:
log(DEBUG-2, "Selecting n%d (%.2f, %.2f) that is a %s point to be a seed"%
(seed_idx,points[seed_idx][0],points[seed_idx][1], candidates_type))
# prevent selecting it again
candidate_mask[seed_idx] = False
candidate_idxs.remove(seed_idx)
return seeds
def _large_demand_seed_points(points, D, d, C, K, trial=0):
"""A seed point generation function that automates the human assisted
idea presented in Fisher and Jaikumar (1981)
"""
# make sure we are dealing with np arrays here
np_d = np.array(d)
N = len(d)
# we are look mainly the large d nodes where only 1 fits on a route
can_fit_only_1_mask = np_d > (0.5*C)
candidate_d_mask = can_fit_only_1_mask.copy()
candidate_d_idxs = np.where(can_fit_only_1_mask)[0].tolist()
if trial:
# in addition, add as many OTHER largest d ones as trial is
not_over_half_idxs = np.where( ~candidate_d_mask )[0].tolist()
sorted_d = [(d[i], i) for i in not_over_half_idxs]
sorted_d.sort(reverse=True)
sorted_d_idxs = list(zip(*sorted_d)[1])
additional_large_d_idxs = sorted_d_idxs[max(0, trial-N):min(N,trial)]
candidate_d_idxs+=additional_large_d_idxs
candidate_d_mask[additional_large_d_idxs] = True
large_d_mask = np.copy(candidate_d_mask)
if __debug__:
log(DEBUG-2, "Select %d seed nodes from large demand nodes %s"%
(min(len(candidate_d_idxs),K), str(candidate_d_idxs)))
seeds = []
selected_seeds_mask = np.zeros(len(d), dtype=bool)
# make depot like a seed -> maximize distance from it
selected_seeds_mask[0] = True
if len(candidate_d_idxs)<=K:
# if all candidates are needed, add them without checking the distances
for seed_idx in candidate_d_idxs:
seeds.append( points[seed_idx] )
selected_seeds_mask[seed_idx] = True
if __debug__:
log(DEBUG-2,"Selecting n%d (%.2f, %.2f) that %s to be a seed"%\
(seed_idx,points[seed_idx][0],points[seed_idx][1],
"fills over the half of the capacity" if can_fit_only_1_mask[seed_idx]
else "is within "+str(trial)+" largest demands"))
candidate_d_idxs = []
select_from_non_large = False
while len(seeds)<K:
if not candidate_d_idxs:
candidate_d_mask = ~large_d_mask
candidate_d_mask[0]=False
candidate_d_idxs = np.where(candidate_d_mask)[0].tolist()
select_from_non_large = True
if __debug__:
log(DEBUG-2,"Ran out of nodes with large demand, select %d seed nodes from rest of the nodes %s using inter seed distances weighted by the node demand"%
(min(len(candidate_d_idxs), K-len(seeds)), str(candidate_d_idxs)))
# maximize the distance to other seeds and depot
if not | |
__doc__ = """ Rotation kernels Numpy implementation"""
import functools
from itertools import combinations
import numpy as np
from elastica._linalg import _batch_matmul
@functools.lru_cache(maxsize=1)
def _generate_skew_map(dim: int):
# TODO Documentation
# Preallocate
mapping_list = [None] * ((dim ** 2 - dim) // 2)
# Indexing (i,j), j is the fastest changing
# r = 2, r here is rank, we deal with only matrices
for index, (i, j) in enumerate(combinations(range(dim), r=2)):
# matrix indices
tgt_idx = dim * i + j
# Sign-bit to check order of entries
sign = (-1) ** tgt_idx
# idx in v
# TODO Wrong formulae, but works for two and three dimensions
src_idx = dim - (i + j)
# Check order to fill in the list
if sign < 0:
entry_t = (src_idx, j, i)
else:
entry_t = (src_idx, i, j)
mapping_list[index] = entry_t
return mapping_list
@functools.lru_cache(maxsize=1)
def _get_skew_map(dim):
"""Generates mapping from src to target skew-symmetric operator
For input vector V and output Matrix M (represented in lexicographical index),
we calculate mapping from
|x| |0 -z y|
V = |y| to M = |z 0 -x|
|z| |-y x 0|
in a dimension agnostic way.
"""
mapping_list = _generate_skew_map(dim)
# sort for non-strided access in source dimension, potentially faster copies
mapping_list.sort(key=lambda tup: tup[0])
# return iterator
return tuple(mapping_list)
@functools.lru_cache(maxsize=1)
def _get_inv_skew_map(dim):
# TODO Documentation
# (vec_src, mat_i, mat_j, sign)
mapping_list = _generate_skew_map(dim)
# invert tuple elements order to have
# (mat_i, mat_j, vec_tgt, sign)
return tuple((t[1], t[2], t[0]) for t in mapping_list)
@functools.lru_cache(maxsize=1)
def _get_diag_map(dim):
"""Generates lexicographic mapping to diagonal in a serialized matrix-type
For input dimension dim we calculate mapping to * in Matrix M below
|* 0 0|
M = |0 * 0|
|0 0 *|
in a dimension agnostic way.
"""
# Preallocate
mapping_list = [None] * dim
# Store linear indices
for dim_iter in range(dim):
mapping_list[dim_iter] = dim_iter * (dim + 1)
return tuple(mapping_list)
def _skew_symmetrize(vector):
"""
Parameters
----------
vector : numpy.ndarray of shape (dim, blocksize)
Returns
-------
output : numpy.ndarray of shape (dim*dim, blocksize) corresponding to
[0, -z, y, z, 0, -x, -y , x, 0]
Note
----
Gets close to the hard-coded implementation in time but with slightly
high memory requirement for iteration.
For blocksize=128,
hardcoded : 5.9 µs ± 186 ns per loop
this : 6.19 µs ± 79.2 ns per loop
"""
dim, blocksize = vector.shape
skewed = np.zeros((dim, dim, blocksize))
# Iterate over generated indices and put stuff from v to m
for src_index, tgt_i, tgt_j in _get_skew_map(dim):
skewed[tgt_i, tgt_j] = vector[src_index]
skewed[tgt_j, tgt_i] = -skewed[tgt_i, tgt_j]
return skewed
# This is purely for testing and optimization sake
# While calculating u^2, use u with einsum instead, as it is tad bit faster
def _skew_symmetrize_sq(vector):
"""
Generate the square of an orthogonal matrix from vector elements
Parameters
----------
vector : numpy.ndarray of shape (dim, blocksize)
Returns
-------
output : numpy.ndarray of shape (dim*dim, blocksize) corresponding to
[-(y^2+z^2), xy, xz, yx, -(x^2+z^2), yz, zx, zy, -(x^2+y^2)]
Note
----
Faster than hard-coded implementation in time with slightly high
memory requirement for einsum calculation.
For blocksize=128,
hardcoded : 23.1 µs ± 481 ns per loop
this version: 14.1 µs ± 96.9 ns per loop
"""
dim, _ = vector.shape
# First generate array of [x^2, xy, xz, yx, y^2, yz, zx, zy, z^2]
# across blocksize
# This is slightly faster than doing v[np.newaxis,:,:] * v[:,np.newaxis,:]
products_xy = np.einsum("ik,jk->ijk", vector, vector)
# No copy made here, as we do not change memory layout
# products_xy = products_xy.reshape((dim * dim, -1))
# Now calculate (x^2 + y^2 + z^2) across blocksize
# Interpret this as a contraction ji,ij->j with v.T, v
mag = np.einsum("ij,ij->j", vector, vector)
# Iterate over only the diagonal and subtract mag
# Somewhat faster (5us for 128 blocksize) but more memory efficient than doing :
# > eye_arr = np.ravel(np.eye(dim, dim))
# > eye_arr = eye_arr[:, np.newaxis] * mag[np.newaxis, :]
# > products_xy - mag
# This version is faster for smaller blocksizes <= 128
# Efficiently extracts only diagonal elements
# reshape returns a view in this case
np.einsum("iij->ij", products_xy)[...] -= mag
# # This version is faster for larger blocksizes > 256
# for diag_idx in _get_diag_map(dim):
# products_xy[diag_idx, :] -= mag
# We expect this version to be superior, but due to numpy's advanced
# indexing always returning a copy, rather than a view, it turns out
# to be more expensive.
# products_xy[_get_diag_map(dim, :] -= mag
return products_xy
def _get_skew_symmetric_pair(vector_collection):
"""
Parameters
----------
vector_collection
Returns
-------
"""
u = _skew_symmetrize(vector_collection)
u_sq = np.einsum("ijk,jlk->ilk", u, u)
return u, u_sq
def _inv_skew_symmetrize(matrix):
"""
Return the vector elements from a skew-symmetric matrix M
Parameters
----------
matrix : np.ndarray of dimension (dim, dim, blocksize)
Returns
-------
vector : np.ndarray of dimension (dim, blocksize)
Note
----
Harcoded : 2.28 µs ± 63.3 ns per loop (mean ± std. dev. of 7 runs, 100000 loops each)
This : 2.91 µs ± 58.3 ns per loop (mean ± std. dev. of 7 runs, 100000 loops each)
"""
dim, dim, blocksize = matrix.shape
vector = np.zeros((dim, blocksize))
# Iterate over generated indices and put stuff from v to m
# The original skew_mapping function takes consecutive
# indices in v and puts them in the matrix, so we skip
# indices here
for src_i, src_j, tgt_index in _get_inv_skew_map(dim):
vector[tgt_index] = matrix[src_i, src_j]
return vector
def _get_rotation_matrix(scale: float, axis_collection):
"""
Parameters
----------
scale
axis_collection
Returns
-------
# TODO include microbechmark results
"""
# First normalize omega, this is approx 2x faster than
# np.linalg.norm(axis_collection, ord=2, axis=0) for bs=128
theta = np.sqrt(np.einsum("ij,ij->j", axis_collection, axis_collection))
# Get skew symmetric U and U * U
# Comes first before theta gets multiplied by scale, see rationale
# in the block comment below
# filter_idx = np.where(np.abs(theta) < 1e-14)
# theta[filter_idx] = 1e-14
# u, u_sq = _get_skew_symmetric_pair(axis_collection / theta)
# TODO Verify that this tolerance is sufficient for normalization
u, u_sq = _get_skew_symmetric_pair(axis_collection / (theta + 1e-14))
# Nasty bug, as it changes the state of a passed in variable
# This gets transmitted back to the user scope
# Avoid doing in-place transformations, send as omega/theta instead
# as show above
# axis_collection /= theta
# Multiply theta by scale (efficient, as 1D) and get prefixes
theta *= scale
u_prefix = np.sin(theta)
u_sq_prefix = 1.0 - np.cos(theta)
# Start rotate_mat minus the \delta_ij
rot_mat = -u_prefix * u + u_sq_prefix * u_sq
"""Both these versions are almost equivalent, both in time and memory
keeping second for ease of us"""
# dim, _ = axis_collection.shape
# for idx in iters(dim):
# rot_mat[idx, :] += 1.0
np.einsum("iij->ij", rot_mat)[...] += 1.0
return rot_mat
def _rotate(director_collection, scale: float, axis_collection):
"""
Does alibi rotations
https://en.wikipedia.org/wiki/Rotation_matrix#Ambiguities
Parameters
----------
director_collection
scale
axis_collection
Returns
-------
# TODO Finish documentation
"""
# return _batch_matmul(
# director_collection, _get_rotation_matrix(scale, axis_collection)
# )
return _batch_matmul(
_get_rotation_matrix(scale, axis_collection), director_collection
)
def _inv_rotate(director_collection):
"""
Calculated rate of change using Rodrigues' formula
Parameters
----------
director_collection : The collection of frames/directors at every element,
numpy.ndarray of shape (dim, dim, n)
Returns
-------
vector_collection : The collection of axes around which the body rotates
numpy.ndarray of shape (dim, n)
Note
----
Routine bogged down by 6ms for index checking, gets
37.4 µs ± 1.04 µs per loop
"""
# Q_{i+i}Q^T_{i} collection
rotmat_collection = np.einsum(
"ijk, ljk->ilk", director_collection[:, :, 1:], director_collection[:, :, :-1]
)
# Q^T_{i+i}Q_{i} collection
# rotmat_collection = np.einsum(
# "jik, jlk->ilk", director_collection[:, :, 1:], director_collection[:, :, :-1]
# )
# Returns rate-of-change direction as a collection unit vectors
# unit vector skew-symmetrize the collection
# | | collection transpose
# | | |
vector_collection = _inv_skew_symmetrize(
rotmat_collection - np.einsum("ijk->jik", rotmat_collection)
)
# Returns magnitude of rotation along the above vector_collection
# theta vector Rodrigues formula from trace invariance Tr(R) = 1 + 2cos(\theta)
# | angle from trace trace calculation
# | | |
# theta_collection = np.arccos(0.5 * | |
kwargs dict and
# cares about presence/absence. So we build a dict to send.
kwargs = {}
if onlySucceeded:
# Check only successful jobs.
# Note that for selectors it is "successful" while for the
# actual object field it is "succeeded".
kwargs['field_selector'] = 'status.successful==1'
if token is not None:
kwargs['_continue'] = token
results = self.batchApi.list_namespaced_job(self.namespace, **kwargs)
for job in results.items:
if self._isJobOurs(job):
# This job belongs to us
yield job
# Don't go over the limit
seen += 1
if limit is not None and seen >= limit:
return
# Remember the continuation token, if any
token = getattr(results.metadata, 'continue', None)
if token is None:
# There isn't one. We got everything.
break
def _getPodForJob(self, jobObject):
"""
Get the pod that belongs to the given job, or None if the job's pod is
missing. The pod knows about things like the job's exit code.
:param kubernetes.client.V1Job jobObject: a Kubernetes job to look up
pods for.
:return: The pod for the job, or None if no pod is found.
:rtype: kubernetes.client.V1Pod
"""
token = None
# Work out what the return code was (which we need to get from the
# pods) We get the associated pods by querying on the label selector
# `job-name=JOBNAME`
query = 'job-name={}'.format(jobObject.metadata.name)
while True:
# We can't just pass e.g. a None continue token when there isn't
# one, because the Kubernetes module reads its kwargs dict and
# cares about presence/absence. So we build a dict to send.
kwargs = {'label_selector': query}
if token is not None:
kwargs['_continue'] = token
results = self.coreApi.list_namespaced_pod(self.namespace, **kwargs)
for pod in results.items:
# Return the first pod we find
return pod
# Remember the continuation token, if any
token = getattr(results.metadata, 'continue', None)
if token is None:
# There isn't one. We got everything.
break
# If we get here, no pages had any pods.
return None
def _getLogForPod(self, podObject):
"""
Get the log for a pod.
:param kubernetes.client.V1Pod podObject: a Kubernetes pod with one
container to get the log from.
:return: The log for the only container in the pod.
:rtype: str
"""
return self.coreApi.read_namespaced_pod_log(podObject.metadata.name,
namespace=self.namespace)
def _getIDForOurJob(self, jobObject):
"""
Get the JobID number that belongs to the given job that we own.
:param kubernetes.client.V1Job jobObject: a Kubernetes job object that is a job we issued.
:return: The JobID for the job.
:rtype: int
"""
return int(jobObject.metadata.name[len(self.jobPrefix):])
def getUpdatedBatchJob(self, maxWait):
entry = datetime.datetime.now()
result = self._getUpdatedBatchJobImmediately()
if result is not None or maxWait == 0:
# We got something on the first try, or we only get one try
return result
# Otherwise we need to maybe wait.
if self.enableWatching:
# Try watching for something to happen and use that.
w = kubernetes.watch.Watch()
if self.enableWatching:
for j in self._ourJobObjects():
logger.debug(j.spec.template.metadata.labels[u'job-name'], type(j.spec.template.metadata.labels[u'job-name']))
for event in w.stream(self.coreApi.list_namespaced_pod, self.namespace, timeout_seconds=maxWait):
pod = event['object']
if pod.metadata.name.startswith(self.jobPrefix):
logger.info("Event: %s %s %s" % (event['type'],event['object'].kind, event['object'].metadata.name))
if pod.status.phase == 'Failed' or pod.status.phase == 'Succeeded':
containerStatuses = pod.status.container_statuses
logger.info("FINISHED")
if containerStatuses is None or len(containerStatuses) == 0:
logger.debug("No job container statuses for job %s" % (pod.metadata.owner_references[0].name))
return (int(pod.metadata.owner_references[0].name[len(self.jobPrefix):]), -1, 0)
logger.info("REASON: %s Eixt Code: %s" % (pod.status.container_statuses[0].state.terminated.reason,
pod.status.container_statuses[0].state.terminated.exit_code))
jobID = int(pod.metadata.owner_references[0].name[len(self.jobPrefix):])
terminated = pod.status.container_statuses[0].state.terminated
runtime = (terminated.finished_at - terminated.started_at).total_seconds()
result = (jobID, terminated.exit_code, runtime)
self.batchApi.delete_namespaced_job(pod.metadata.owner_references[0].name,
self.namespace,
propagation_policy='Foreground')
self._waitForJobDeath(pod.metadata.owner_references[0].name)
return result
else:
continue
else:
# Try polling instead
while result is None and (datetime.datetime.now() - entry).total_seconds() < maxWait:
# We still have nothing and we haven't hit the timeout.
# Poll
result = self._getUpdatedBatchJobImmediately()
if result is None:
# Still nothing. Wait a second, or some fraction of our max wait time.
time.sleep(min(maxWait/2, 1.0))
# When we get here, either we found something or we ran out of time
return result
def _getUpdatedBatchJobImmediately(self):
"""
Return None if no updated (completed or failed) batch job is currently
available, and jobID, exitCode, runtime ifsuch a job can be found.
"""
# See if a local batch job has updated and is available immediately
local_tuple = self.getUpdatedLocalJob(0)
if local_tuple:
# If so, use it
return local_tuple
# Otherwise we didn't get a local job.
# Go looking for other jobs
# Everybody else does this with a queue and some other thread that
# is responsible for populating it.
# But we can just ask kubernetes now.
# Find a job that is done, failed, or stuck
jobObject = None
# Put 'done', 'failed', or 'stuck' here
chosenFor = ''
for j in self._ourJobObjects(onlySucceeded=True, limit=1):
# Look for succeeded jobs because that's the only filter Kubernetes has
jobObject = j
chosenFor = 'done'
if jobObject is None:
for j in self._ourJobObjects():
# If there aren't any succeeded jobs, scan all jobs
# See how many times each failed
failCount = getattr(j.status, 'failed', 0)
if failCount is None:
# Make sure it is an int
failCount = 0
if failCount > 0:
# Take the first failed one you find
jobObject = j
chosenFor = 'failed'
break
if jobObject is None:
# If no jobs are failed, look for jobs with pods with
# containers stuck in Waiting with reason ImagePullBackOff
for j in self._ourJobObjects():
pod = self._getPodForJob(j)
if pod is None:
# Skip jobs with no pod
continue
# Get the statuses of the pod's containers
containerStatuses = pod.status.container_statuses
if containerStatuses is None or len(containerStatuses) == 0:
# Pod exists but has no container statuses
# This happens when the pod is just "Scheduled"
# ("PodScheduled" status event) and isn't actually starting
# to run yet.
# Can't be stuck in ImagePullBackOff
continue
waitingInfo = getattr(getattr(pod.status.container_statuses[0], 'state', None), 'waiting', None)
if waitingInfo is not None and waitingInfo.reason == 'ImagePullBackOff':
# Assume it will never finish, even if the registry comes back or whatever.
# We can get into this state when we send in a non-existent image.
# See https://github.com/kubernetes/kubernetes/issues/58384
jobObject = j
chosenFor = 'stuck'
logger.warning('Failing stuck job; did you try to run a non-existent Docker image?'
' Check TOIL_APPLIANCE_SELF.')
break
if jobObject is None:
# Say we couldn't find anything
return None
# Otherwise we got something.
# Work out what the job's ID was (whatever came after our name prefix)
jobID = int(jobObject.metadata.name[len(self.jobPrefix):])
# Grab the pod
pod = self._getPodForJob(jobObject)
if pod is not None:
if chosenFor == 'done' or chosenFor == 'failed':
# The job actually finished or failed
# Get the statuses of the pod's containers
containerStatuses = pod.status.container_statuses
if containerStatuses is None or len(containerStatuses) == 0:
# No statuses available.
# This happens when a pod is "Scheduled". But how could a
# 'done' or 'failed' pod be merely "Scheduled"?
# Complain so we can find out.
logger.warning('Exit code and runtime unavailable; pod has no container statuses')
logger.warning('Pod: %s', str(pod))
exitCode = -1
runtime = 0
else:
# Get the termination info from the pod's main (only) container
terminatedInfo = getattr(getattr(containerStatuses[0], 'state', None), 'terminated', None)
if terminatedInfo is None:
logger.warning('Exit code and runtime unavailable; pod stopped without container terminating')
logger.warning('Pod: %s', str(pod))
exitCode = -1
runtime = 0
else:
# Extract the exit code
exitCode = terminatedInfo.exit_code
# Compute how long the job ran for (subtract datetimes)
# We need to look at the pod's start time because the job's
# start time is just when the job is created.
# And we need to look at the pod's end time because the
# job only gets a completion time if successful.
runtime = (terminatedInfo.finished_at -
pod.status.start_time).total_seconds()
if chosenFor == 'failed':
# Warn the user with the failed pod's log
# TODO: cut this down somehow?
logger.warning('Log from failed pod: %s', self._getLogForPod(pod))
else:
# The job has gotten stuck
assert chosenFor == 'stuck'
# Synthesize an | |
:] <= self.ymaxgoal):
goalIndices.append(self.nodeList.index(node))
# Select a random node from the goal area
goalNodeIndex = random.choice(goalIndices)
return goalNodeIndex
###########################################################################
def GenerateSamplePath(self, goalIndex):
'''
Generate a list of RRT nodes from the root to a node with index goalIndex
Inputs:
goalIndex: index of RRT node which is set as the goal
Outputs:
pathNodesList: list of RRT nodes from root node to goal node (type: python list (element type: DR_RRTStar_Node)) # TODO: check this
'''
pathNodesList = [self.nodeList[goalIndex]]
# Loop until the root node (whose parent is None) is reached
while self.nodeList[goalIndex].parent is not None:
# Set the index to its parent
goalIndex = self.nodeList[goalIndex].parent
# Append the parent node to the pathNodeList
pathNodesList.append(self.nodeList[goalIndex])
# Finally append the path with root node
pathNodesList.append(self.nodeList[0])
return pathNodesList
###########################################################################
def PlotObstacles(self): # TODO: COME BACK AND EDIT THIS
"""
Plots the obstacles and the starting position.
"""
plot_sampled_nodes = False
plot_tree_node_centers = True
fig = plt.figure(figsize=[6, 6])
ax = fig.add_subplot(1, 1, 1) # create an axes object in the figure
# ax.axis('equal')
self.ax = ax
ax = plot_env(ax)
ax.scatter(self.start.means[-1, 0, :], self.start.means[-1, 1, :], s=200, c='tab:blue', marker='o', label='Start')
# plot sampled nodes
x_sampled = []
y_sampled = []
xFreePoints, yFreePoints, thetaFreePoints = self.freePoints
for i in range(len(xFreePoints)):
if not i in self.dropped_samples: # skip nodes that became infeasible after saturation
x_sampled.append(xFreePoints[i])
y_sampled.append(yFreePoints[i])
if plot_sampled_nodes:
# plt.plot(x_sampled, y_sampled,'o', color='red', markersize=3)
plt.plot(x_sampled, y_sampled, 'o', color='salmon', markersize=5)
# add crosses on sampled nodes that were added to the tree
x_added = []
y_added = []
for k, node in enumerate(self.nodeList):
x_added.append(node.means[-1, 0, :][0])
y_added.append(node.means[-1, 1, :][0])
if plot_tree_node_centers:
# plt.plot(x_added, y_added, 'x', color='black', markersize=5)
plt.plot(x_added, y_added, 'o', color='black', markersize=3)
def DrawGraph(self, lastFlag): # TODO: COME BACK AND EDIT THIS
"""
Updates the Plot with uncertainty ellipse and trajectory at each time step
Input Parameters:
lastFlag: Flag to denote if its the last iteration
"""
plot_covars = False
plot_only_last_covar = True
plot_dr_check_ellipse = True
xValues = []
yValues = []
widthValues = []
heightValues = []
angleValues = []
lineObjects = []
# Plot the environment with the obstacles and the starting position
self.PlotObstacles()
ax = self.ax
alpha = np.array(ALFA, float)
delta = (1 - alpha) / alpha
delta = delta ** (0.5)
eps = 0.0001
all_deltas_same = all(delta[0]-eps <= elt <= delta[0]+eps for elt in list(delta))
if not all_deltas_same:
# if not all risk bounds are the same, plotting the dr padding on the robot doesn't make sense
# (different paddings for every obstacle)
plot_dr_check_ellipse = False
for ellipseNode in self.nodeList:
if ellipseNode is not None and ellipseNode.parent is not None:
ellNodeShape = ellipseNode.means.shape
xPlotValues = []
yPlotValues = []
# Prepare the trajectory x and y vectors and plot them
for k in range(ellNodeShape[0]):
xPlotValues.append(ellipseNode.means[k, 0, 0])
yPlotValues.append(ellipseNode.means[k, 1, 0])
# Plotting the risk bounded trajectories
lx, = plt.plot(xPlotValues,
yPlotValues,
color='#636D97',
linewidth=1.0)
lineObjects.append(lx)
if not plot_covars and not plot_dr_check_ellipse: # do not plot covars or dr_coll check
alfa = math.atan2(ellipseNode.means[-1, 1, 0],
ellipseNode.means[-1, 0, 0])
xValues.append(ellipseNode.means[-1, 0, 0])
yValues.append(ellipseNode.means[-1, 1, 0])
widthValues.append(0)
heightValues.append(0)
angleValues.append(alfa * 360)
elif not plot_covars and plot_dr_check_ellipse: # do not plot covars but plot dr_coll check ellipse
# Plot only the last ellipse in the trajectory
alfa = math.atan2(ellipseNode.means[-1, 1, 0],
ellipseNode.means[-1, 0, 0])
elcovar = np.asarray(ellipseNode.covars[-1, :, :]) # covariance
# plot node dr-check (Check this... It might not make sense) TODO: CHECK
xValues.append(ellipseNode.means[-1, 0, 0])
yValues.append(ellipseNode.means[-1, 1, 0])
xDir = np.array([1, 0, 0])
yDir = np.array([0, 1, 0])
Delta = delta[-1] # use environment level of padding
major_ax_len = (Delta * math.sqrt(
xDir.T @ elcovar @ xDir)) * 2 # (.) * 2 <-- because we want width of ellipse
minor_ax_len = (Delta * math.sqrt(
yDir.T @ elcovar @ yDir)) * 2 # --> padding in right and left directions added
widthValues.append(major_ax_len)
heightValues.append(minor_ax_len)
angleValues.append(alfa * 360)
elif plot_only_last_covar and not plot_dr_check_ellipse: # plot covars but only at final node
# Plot only the last ellipse in the trajectory
alfa = math.atan2(ellipseNode.means[-1, 1, 0],
ellipseNode.means[-1, 0, 0])
elcovar = np.asarray(ellipseNode.covars[-1, :, :]) # covariance
# plot node covariance
elE, elV = LA.eig(elcovar[0:2, 0:2])
xValues.append(ellipseNode.means[-1, 0, 0])
yValues.append(ellipseNode.means[-1, 1, 0])
widthValues.append(math.sqrt(elE[0]))
heightValues.append(math.sqrt(elE[1]))
angleValues.append(alfa * 360)
elif plot_only_last_covar and plot_dr_check_ellipse: # plot ellipse representing dr-check but only at final node
# Plot only the last ellipse in the trajectory
alfa = math.atan2(ellipseNode.means[-1, 1, 0],
ellipseNode.means[-1, 0, 0])
elcovar = np.asarray(ellipseNode.covars[-1, :, :]) # covariance
# plot node dr-check (Check this... It might not make sense) TODO: CHECK
xValues.append(ellipseNode.means[-1, 0, 0])
yValues.append(ellipseNode.means[-1, 1, 0])
xDir = np.array([1, 0, 0])
yDir = np.array([0, 1, 0])
Delta = delta[-1] # use environment level of padding
major_ax_len = (Delta * math.sqrt(xDir.T @ elcovar @ xDir)) * 2 # (.) * 2 <-- because we want width of ellipse
minor_ax_len = (Delta * math.sqrt(yDir.T @ elcovar @ yDir)) * 2 # --> padding in right and left directions added
widthValues.append(major_ax_len)
heightValues.append(minor_ax_len)
angleValues.append(alfa * 360)
# plot node covariance
elE, elV = LA.eig(elcovar[0:2, 0:2])
xValues.append(ellipseNode.means[-1, 0, 0])
yValues.append(ellipseNode.means[-1, 1, 0])
widthValues.append(math.sqrt(elE[0]))
heightValues.append(math.sqrt(elE[1]))
angleValues.append(alfa * 360)
elif not plot_only_last_covar: # plot covars (plot_covars=True) at all nodes (plot_only_last_covar=False)
for k in range(ellNodeShape[0]):
# Plot only the last ellipse in the trajectory
alfa = math.atan2(ellipseNode.means[k, 1, 0],
ellipseNode.means[k, 0, 0])
elcovar = np.asarray(ellipseNode.covars[k, :, :])
elE, elV = LA.eig(elcovar[0:2, 0:2])
xValues.append(ellipseNode.means[-1, 0, 0])
yValues.append(ellipseNode.means[-1, 1, 0])
widthValues.append(math.sqrt(elE[0]))
heightValues.append(math.sqrt(elE[1]))
angleValues.append(alfa * 360)
# Plot the Safe Ellipses
XY = np.column_stack((xValues, yValues))
ec = EllipseCollection(widthValues,
heightValues,
angleValues,
units='x',
offsets=XY,
facecolors="#C59434",
# edgecolors="b",
# edgecolors="#C59434",
transOffset=ax.transData)
ec.set_alpha(0.3)
ax.add_collection(ec)
plt.pause(1.0001)
if SAVEDATA:
plot_name = 'plot_tree_' + FILEVERSION + '_' + SAVETIME + '.png'
plot_name = os.path.join(SAVEPATH, plot_name)
plt.savefig(plot_name)
if not lastFlag:
ec.remove()
for lx in lineObjects:
lx.remove()
###########################################################################
###########################################################################
def SteerAndGenerateTrajAndCost(self, from_idx=None, from_node=None, to_idx=None, to_node=None):
"""
Apply steering function to navigate from a starting node in the tree to a given node
Perform a collision check
Return the trajectory and cost between the two nodes
Inputs:
from_idx : index of node in the tree to navigate from
to_node : node to be added (DR_RRTStar_Node)
Outputs:
- Steering success flag (Type: bool)
- Prepared trajectory (xTrajs) returned by PrepareTrajectory (type: # TODO: fill this)
- Trajectory cost (type: float # TODO: CHECK THIS)
The three outputs can have one of two options
- True, xTrajs, trajCost: if steering succeeds (True), a trajectory is prepared (xTrajs); its cost is trajCost
- return False, [], 0: if steering fails (False), the other parameters are set to bad values [] and 0 # TODO: consider replacing 0 with inf
"""
# Steer from nearestNode to the randomNode using LQG Control
# Returns a list of node points along the trajectory and cost
# Box the steer parameters
if from_idx == None: # from index not given
from_node_chosen = from_node
else: # from index given
from_node_chosen = self.nodeList[from_idx]
if to_idx == None: # to index not given
to_node_chosen = to_node
else: # to index given
to_node_chosen = self.nodeList[to_idx]
steerParams = {"fromNode": from_node_chosen,
"toNode": to_node_chosen,
"Params": self.SteerSetParams}
steerOutput = self.nonlinsteer(steerParams)
# Unbox the steer function output
meanValues = steerOutput["means"]
covarValues = steerOutput["covars"]
trajCost = steerOutput["cost"]
steerResult = steerOutput["steerResult"]
inputCommands = steerOutput["inputCommands"]
# If the steering law fails, force next iteration with different random sample
if steerResult == False:
# print('NLP Steering Failed XXXXXXXXX')
return False, [], 0
# Proceed only if the steering law succeeds
# Prepare the trajectory
xTrajs = self.PrepareTrajectory(meanValues, covarValues, inputCommands)
# Check for Distributionally Robust Feasibility of the whole trajectory
collisionFreeFlag = self.PerformCollisionCheck(xTrajs)
# If a collision was detected, stop and move on
if not collisionFreeFlag:
# print('DR Collision Detected @@@@@@@@@')
return False, [], 0
return True, xTrajs, trajCost
def SteerAndGenerateTrajAndCostWithFinalHeading(self, from_idx=None, from_node=None, to_idx=None, to_node=None):
"""
Same as SteerAndGenerateTrajAndCost but uses the steering params, and hence the steering law, with the heading enforced to match the set value
"""
| |
<filename>backend/machine_learning_main.py
import csv
import json
import numpy as np
import time
from data_class import DataClass
import scipy
# from sklearn.metrics import classification_report
from sklearn.metrics.pairwise import pairwise_distances_argmin
from sklearn.metrics import silhouette_samples, silhouette_score
from sklearn.cluster import KMeans
import numpy as np
import math
import random
from math import sqrt
from os import listdir
from os.path import isfile, join
import copy
import itertools
import matplotlib.pylab as plt
from modules.data.constants import Constants
from modules import aux_fn as ml
from modules.dynamic_clustering import ClusteringClass
dcluster = ClusteringClass()
dcluster2 = ClusteringClass()
class MachineLearningMain:
def __init__(self, use_scikit=True):
self.dc = DataClass()
self.data = []
self.node_data = []
self.assignments_series = []
self.min_final = None
self.max_final = None
self.files = [f for f in listdir("data/sensors") if isfile(join("data/sensors", f))]
print(self.files)
self.n_nodes = len(self.files)
self.n_series_disp = 10
# self.i_run = int(self.n_nodes/2)
self.i_run2 = 1
# self.use_previous_cluster_data = False
self.centroids = None
self.final_centroids = None
self.final_clusters = None
self.clusters = []
self.node_centroids = []
self.partial_sample_index = 0
self.use_scikit = use_scikit
def set_lib(self, use_scikit):
self.use_scikit = use_scikit
def init(self):
self.final_centroids = None
self.centroids = None
self.read_data()
# self.assign_class_to_nodes()
def assign_class_to_nodes(self):
print("machine learning: assign class to nodes")
assignment_index = 0
node_id = 0
for node in self.node_data:
cluster = 0
# get average cluster index for node
n_series_node = len(self.data[node_id]["series"])
# get the assignments for the time series corresponding to the node
node_assignments = [None] * n_series_node
for i in range(n_series_node):
# cluster += self.assignments_series[assignment_index]["cluster"]
if assignment_index < len(self.assignments_series):
node_assignments[i] = self.assignments_series[assignment_index]["cluster"]
assignment_index += 1
# node["class"] = int(cluster/n_series_node)
# get class with max number of occurences in list
node["class"] = max(node_assignments, key=node_assignments.count)
node["demand"] = int(self.clusters[node["class"]]["avg_demand"])
node["priority"] = int(self.clusters[node["class"]]["priority"])
# print(node)
node_id += 1
return self.node_data
def get_info(self, node_id=None):
if node_id is None:
info = {
"n_nodes": len(self.node_data),
"nodes": self.node_data
}
else:
info = self.node_data[node_id]
return info
def read_data(self):
"""
read data from files
each file has the data for a measurement node
over a time frame of n days, for every hour
:return:
"""
self.data = []
self.node_data = []
for i, f in enumerate(self.files[0:self.n_nodes]):
# print(str(i) + ". reading: " + f)
fdata = self.dc.read_data(join("data/sensors/", f))
data = copy.copy(fdata)
self.data.append(data)
node = Constants.NODE_MODEL
node["id"] = i
self.node_data.append(copy.deepcopy(node))
def get_raw_data(self, node=0):
t_start = time.time()
# self.read_data()
data = self.data[node]
imax = data["series"].shape[0]
imax_all = 0
for i in range(len(data)):
data_array = data["series"][i]
imax_all += data_array.shape[0]
# print('imax: ' + str(imax))
t_end = time.time()
min = int(np.min(data["series"]))
max = int(np.max(data["series"]))
dt = t_end - t_start
info = {
"description": "Raw data",
"details": {
"node": node,
"n_series": imax,
"n_nodes": len(data),
"n_series_total": imax_all,
"dt": int(dt*1000),
"min": min,
"max": max
},
"headers": np.ndarray.tolist(data["headers"]), "dt": dt, "lines": data["series"].shape[0],
"columns": data["series"].shape[1]}
return (np.ndarray.tolist(data["series"][:self.n_series_disp]), info)
def get_array_of_arrays(self, a):
array = []
for ag in a:
for ag1 in ag:
array.append(ag1)
return array
def get_display_data(self, d, global_scale=False):
if d is not None:
# centroids = d[0]
# info = d[1]
# return np.ndarray.tolist(centroids[:self.n_series_disp]), info
ddata = d[0]
info = d[1]
start = len(ddata) - self.n_series_disp - 1
if start < 0:
start = 0
end = len(ddata)
# start = 0
# end = len(ddata)
# if end > self.n_series_disp - 1:
# end = self.n_series_disp - 1
ddata = ddata[start:end]
if global_scale and self.min_final is not None:
# print("use global scale")
min = self.min_final
max = self.max_final
else:
min = int(np.min(ddata))
max = int(np.max(ddata))
info["details"]["min"] = min
info["details"]["max"] = max
return np.ndarray.tolist(ddata), info
else:
return None
def get_centroids(self, data, n_clusters=8, init=None):
if self.use_scikit:
if n_clusters is not None:
if init is not None:
kmeans = KMeans(n_clusters=n_clusters, init=init)
else:
kmeans = KMeans(n_clusters=n_clusters)
else:
n_clusters_range = range(2, 10)
max_silhouette_avg = [0] * len(n_clusters_range)
# data = np.array(data)
for (i, k) in enumerate(n_clusters_range):
kmeans = KMeans(n_clusters=k)
a = kmeans.fit_predict(data)
# print(data.shape)
# print(a)
# The silhouette_score gives the average value for all the samples.
# This gives a perspective into the density and separation of the formed
# clusters
silhouette_avg = silhouette_score(data, a)
# print("For n_clusters =", k,
# "The average silhouette_score is :", silhouette_avg)
max_silhouette_avg[i] = silhouette_avg
n_clusters = n_clusters_range[max_silhouette_avg.index(max(max_silhouette_avg))]
kmeans = KMeans(n_clusters=n_clusters)
a = kmeans.fit(data)
centroids = a.cluster_centers_
return centroids, a
else:
if n_clusters is None:
n_clusters = 3
dcluster.reinit(data, n_clusters)
# dcluster.add_new_data(data, n_clusters)
centroids, a = dcluster.k_means_clust_dynamic()
# print(centroids)
return centroids, a
def get_assignments(self, a, data):
if self.use_scikit:
return a.predict(data)
else:
return a
def assign_sample_to_cluster(self, node_id, sample_id):
data = self.data[node_id]["series"]
data1 = data[sample_id]
data1 = [data1]
assignments = self.get_assignments(self.final_clusters, data1)
return assignments[0]
def assign_partial_sample_to_cluster(self, node_id, sample_id, init=False):
data = list(self.data[node_id]["series"][sample_id])
if init:
self.partial_sample_index = 0
index = self.partial_sample_index
min_dist = 0
min_index = 0
for (i, c) in enumerate(self.final_centroids):
d = ml.euclid_dist(data[0: index], c[0: index])
if i == 0:
min_dist = d
else:
if d < min_dist:
min_dist = d
min_index = i
partial_time_series = [0] * len(data)
partial_time_series[0: index] = data[0: index]
assignment = min_index
if self.partial_sample_index < len(data) - 1:
self.partial_sample_index += 1
else:
self.partial_sample_index = 0
# # get assignments of time series to the final clusters
partial_time_series = np.array(partial_time_series)
return assignment, partial_time_series
def assign_partial_sample_to_cluster_default(self, node_id, sample_id, init=False):
data = list(self.data[node_id]["series"][sample_id])
if init:
self.partial_sample_index = 0
data1 = [0] * len(data)
partial_time_series = [0] * len(data)
# print(data1)
cluster_mean = list(np.mean(self.final_centroids, axis=0))
# print(cluster_mean)
# print(data)
for i in range(0, len(data)):
if i <= self.partial_sample_index:
data1[i] = data[i]
partial_time_series[i] = data[i]
elif i > self.partial_sample_index:
data1[i] = cluster_mean[i]
assignments = self.get_assignments(self.final_clusters, [data1])
if self.partial_sample_index < len(data1) - 1:
self.partial_sample_index += 1
else:
self.partial_sample_index = 0
# # get assignments of time series to the final clusters
partial_time_series = np.array(partial_time_series)
return assignments[0], partial_time_series
def run_clustering_on_partial_sample(self, node_id, sample_id, init=False):
assignment, partial_time_series = self.assign_partial_sample_to_cluster(node_id, sample_id, init)
min = int(np.min(partial_time_series))
max = int(np.max(partial_time_series))
info = {
"description": "Partial node data loading vs global clusters",
"headers": ["new sample"],
"dt": 0,
"details": {
"node_id": node_id,
"node_sample": sample_id,
"assignment": int(assignment),
"min": min,
"max": max
},
"assignments": None}
# print(partial_time_series)
partial_time_series = [list(partial_time_series)]
for (i, c) in enumerate(self.final_centroids):
partial_time_series.append(list(c))
info["headers"].append("cluster " + str(i))
partial_time_series = np.array(partial_time_series)
return partial_time_series, info
def update_node_clusters_with_partial_sample(self, node_id, sample_id, init=False):
data = self.node_centroids[node_id]["centroids"]
info = {
"description": "Node clusters loading vs global clusters",
"headers": ["data"],
"dt": 0,
"details": {
"node_id": node_id,
"node_sample": sample_id,
"min": 0,
"max": 0
},
"assignments": None}
# print(partial_time_series)
# partial_time_series = [list(partial_time_series)]
# for (i, c) in enumerate(self.final_centroids):
# partial_time_series.append(list(c))
# info["headers"].append("cluster " + str(i))
#
# partial_time_series = np.array(partial_time_series)
return data, info
def run_clustering_on_node_id(self, node_id, nclusters, partial_sample_until_id=None, add_deviation_value=None):
"""
Run clustering on specified node. The data from the node is an array of arrays
(for each day there is an array of 24 values)
The result is the consumer behaviour over the analyzed time frame
:param node_id:
:param nclusters:
:return:
"""
t_start = time.time()
# print(self.data)
data = copy.deepcopy(self.data[node_id]["series"])
if partial_sample_until_id is not None:
data = data[0:partial_sample_until_id]
if add_deviation_value is not None:
data[partial_sample_until_id]+=add_deviation_value
if nclusters is not None and nclusters > len(data):
print("node " + str(node_id) + "nclusters > len(data): " + str(nclusters) + "," + str(len(data)))
return [], None, data
res = self.get_centroids(data, nclusters)
centroids = res[0]
nc = len(centroids)
centroids_np = np.array(centroids)
desc = "Clusters from all data (single clustering)"
# assign each time series to a cluster
assignments = []
headers = []
for i in range(len(centroids_np)):
headers.append("cluster " + str(i))
# the assignments of the data series to the clusters
assignments_series = [None] * len(assignments)
for (i, a) in enumerate(assignments):
assignments_series[i] = {
"series": i,
"cluster": int(assignments[i])
}
t_end = time.time()
dt = t_end - t_start
min = int(np.min(centroids_np))
max = int(np.max(centroids_np))
append = True
for n in self.node_centroids:
if n["id"] == node_id:
n["centroids"] = centroids_np
append = False
break
if append:
self.node_centroids.append({
"id": node_id,
"centroids": centroids_np
})
info = {
"description": desc, "headers": headers,
"dt": t_end - t_start,
"details": {
"node": node_id,
"new_node": node_id,
"n_clusters": nc,
"n_nodes": len(self.data),
"dt": int(dt * 1000),
"min": min,
"max": max
},
"assignments": assignments_series}
return centroids_np, info, data
def run_clustering_on_node_range(self, r, nclusters):
"""
Run clustering on specified node range. The data from a node is an array of arrays
(for each day there | |
0
APPLICANT = 1
INVITEE = 2
class GroupDateRange(Enum):
ALL = 0
PAST_DAY = 1
PAST_WEEK = 2
PAST_MONTH = 3
PAST_YEAR = 4
@dt.dataclass(frozen=True)
class GroupV2Card:
"""A small infocard of group information, usually used for when a list of
groups are returned."""
about: str
avatar_path: str
capabilities: "Capabilities"
clan_info: "GroupV2ClanInfo"
creation_date: str
group_id: int
group_type: "GroupType"
locale: str
member_count: int
membership_option: "MembershipOption"
motto: str
name: str
theme: str
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"groupId": to_json(self.group_id),
"name": to_json(self.name),
"groupType": to_json(self.group_type),
"creationDate": to_json(self.creation_date),
"about": to_json(self.about),
"motto": to_json(self.motto),
"memberCount": to_json(self.member_count),
"locale": to_json(self.locale),
"membershipOption": to_json(self.membership_option),
"capabilities": to_json(self.capabilities),
"clanInfo": to_json(self.clan_info),
"avatarPath": to_json(self.avatar_path),
"theme": to_json(self.theme),
}
@dt.dataclass(frozen=True)
class GroupSearchResponse:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["GroupV2Card"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class GroupQuery:
"""NOTE: GroupQuery, as of Destiny 2, has essentially two totally different and incompatible "modes".
If you are querying for a group, you can pass any of the properties below.
If you are querying for a Clan, you MUST NOT pass any of the following properties (they must be null or undefined in your request, not just empty string/default values):
- groupMemberCountFilter - localeFilter - tagText
If you pass these, you will get a useless InvalidParameters error."""
creation_date: "GroupDateRange"
current_page: int
group_type: "GroupType"
items_per_page: int
locale_filter: str
name: str
request_continuation_token: str
sort_by: "GroupSortBy"
tag_text: str
group_member_count_filter: t.Optional[int] = None
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"name": to_json(self.name),
"groupType": to_json(self.group_type),
"creationDate": to_json(self.creation_date),
"sortBy": to_json(self.sort_by),
"groupMemberCountFilter": to_json(self.group_member_count_filter),
"localeFilter": to_json(self.locale_filter),
"tagText": to_json(self.tag_text),
"itemsPerPage": to_json(self.items_per_page),
"currentPage": to_json(self.current_page),
"requestContinuationToken": to_json(self.request_continuation_token),
}
class GroupSortBy(Enum):
NAME = 0
DATE = 1
POPULARITY = 2
ID = 3
class GroupMemberCountFilter(Enum):
ALL = 0
ONE_TO_TEN = 1
ELEVEN_TO_ONE_HUNDRED = 2
GREATER_THAN_ONE_HUNDRED = 3
@dt.dataclass(frozen=True)
class GroupNameSearchRequest:
group_name: str
group_type: "GroupType"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"groupName": to_json(self.group_name),
"groupType": to_json(self.group_type),
}
@dt.dataclass(frozen=True)
class GroupOptionalConversation:
chat_enabled: bool
chat_name: str
chat_security: "ChatSecuritySetting"
conversation_id: int
group_id: int
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"groupId": to_json(self.group_id),
"conversationId": to_json(self.conversation_id),
"chatEnabled": to_json(self.chat_enabled),
"chatName": to_json(self.chat_name),
"chatSecurity": to_json(self.chat_security),
}
@dt.dataclass(frozen=True)
class GroupEditAction:
about: str
callsign: str
locale: str
motto: str
name: str
tags: str
theme: str
allow_chat: t.Optional[bool] = None
avatar_image_index: t.Optional[int] = None
chat_security: t.Optional[int] = None
default_publicity: t.Optional[int] = None
enable_invitation_messaging_for_admins: t.Optional[bool] = None
homepage: t.Optional[int] = None
is_public: t.Optional[bool] = None
is_public_topic_admin_only: t.Optional[bool] = None
membership_option: t.Optional[int] = None
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"name": to_json(self.name),
"about": to_json(self.about),
"motto": to_json(self.motto),
"theme": to_json(self.theme),
"avatarImageIndex": to_json(self.avatar_image_index),
"tags": to_json(self.tags),
"isPublic": to_json(self.is_public),
"membershipOption": to_json(self.membership_option),
"isPublicTopicAdminOnly": to_json(self.is_public_topic_admin_only),
"allowChat": to_json(self.allow_chat),
"chatSecurity": to_json(self.chat_security),
"callsign": to_json(self.callsign),
"locale": to_json(self.locale),
"homepage": to_json(self.homepage),
"enableInvitationMessagingForAdmins": to_json(
self.enable_invitation_messaging_for_admins
),
"defaultPublicity": to_json(self.default_publicity),
}
@dt.dataclass(frozen=True)
class GroupOptionsEditAction:
host_guided_game_permission_override: t.Optional[int] = dt.field(
default=None,
metadata={
"description": """Minimum Member Level allowed to host guided games
Always Allowed: Founder, Acting Founder, Admin
Allowed Overrides: None, Member, Beginner
Default is Member for clans, None for groups, although this means nothing for groups."""
},
)
invite_permission_override: t.Optional[bool] = dt.field(
default=None,
metadata={
"description": """Minimum Member Level allowed to invite new members to group
Always Allowed: Founder, Acting Founder
True means admins have this power, false means they don't
Default is false for clans, true for groups."""
},
)
join_level: t.Optional[int] = dt.field(
default=None,
metadata={
"description": """Level to join a member at when accepting an invite, application, or joining an open clan
Default is Beginner."""
},
)
update_banner_permission_override: t.Optional[bool] = dt.field(
default=None,
metadata={
"description": """Minimum Member Level allowed to update banner
Always Allowed: Founder, Acting Founder
True means admins have this power, false means they don't
Default is false for clans, true for groups."""
},
)
update_culture_permission_override: t.Optional[bool] = dt.field(
default=None,
metadata={
"description": """Minimum Member Level allowed to update group culture
Always Allowed: Founder, Acting Founder
True means admins have this power, false means they don't
Default is false for clans, true for groups."""
},
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"InvitePermissionOverride": to_json(self.invite_permission_override),
"UpdateCulturePermissionOverride": to_json(
self.update_culture_permission_override
),
"HostGuidedGamePermissionOverride": to_json(
self.host_guided_game_permission_override
),
"UpdateBannerPermissionOverride": to_json(
self.update_banner_permission_override
),
"JoinLevel": to_json(self.join_level),
}
@dt.dataclass(frozen=True)
class GroupOptionalConversationAddRequest:
chat_name: str
chat_security: "ChatSecuritySetting"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"chatName": to_json(self.chat_name),
"chatSecurity": to_json(self.chat_security),
}
@dt.dataclass(frozen=True)
class GroupOptionalConversationEditRequest:
chat_name: str
chat_enabled: t.Optional[bool] = None
chat_security: t.Optional[int] = None
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"chatEnabled": to_json(self.chat_enabled),
"chatName": to_json(self.chat_name),
"chatSecurity": to_json(self.chat_security),
}
@dt.dataclass(frozen=True)
class GroupMemberLeaveResult:
group: "GroupV2"
group_deleted: bool
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"group": to_json(self.group),
"groupDeleted": to_json(self.group_deleted),
}
@dt.dataclass(frozen=True)
class GroupBanRequest:
comment: str
length: "IgnoreLength"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"comment": to_json(self.comment),
"length": to_json(self.length),
}
@dt.dataclass(frozen=True)
class GroupBan:
bungie_net_user_info: "UserInfoCard"
comment: str
created_by: "UserInfoCard"
date_banned: str
date_expires: str
destiny_user_info: "GroupUserInfoCard"
group_id: int
last_modified_by: "UserInfoCard"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"groupId": to_json(self.group_id),
"lastModifiedBy": to_json(self.last_modified_by),
"createdBy": to_json(self.created_by),
"dateBanned": to_json(self.date_banned),
"dateExpires": to_json(self.date_expires),
"comment": to_json(self.comment),
"bungieNetUserInfo": to_json(self.bungie_net_user_info),
"destinyUserInfo": to_json(self.destiny_user_info),
}
@dt.dataclass(frozen=True)
class GroupMemberApplication:
bungie_net_user_info: "UserInfoCard"
creation_date: str
destiny_user_info: "GroupUserInfoCard"
group_id: int
request_message: str
resolve_message: str
resolve_state: "GroupApplicationResolveState"
resolve_date: t.Optional[str] = None
resolved_by_membership_id: t.Optional[int] = None
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"groupId": to_json(self.group_id),
"creationDate": to_json(self.creation_date),
"resolveState": to_json(self.resolve_state),
"resolveDate": to_json(self.resolve_date),
"resolvedByMembershipId": to_json(self.resolved_by_membership_id),
"requestMessage": to_json(self.request_message),
"resolveMessage": to_json(self.resolve_message),
"destinyUserInfo": to_json(self.destiny_user_info),
"bungieNetUserInfo": to_json(self.bungie_net_user_info),
}
class GroupApplicationResolveState(Enum):
UNRESOLVED = 0
ACCEPTED = 1
DENIED = 2
RESCINDED = 3
@dt.dataclass(frozen=True)
class GroupApplicationRequest:
message: str
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"message": to_json(self.message),
}
@dt.dataclass(frozen=True)
class GroupApplicationListRequest:
memberships: t.Sequence["UserMembership"]
message: str
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"memberships": to_json(self.memberships),
"message": to_json(self.message),
}
class GroupsForMemberFilter(Enum):
ALL = 0
FOUNDED = 1
NON_FOUNDED = 2
@dt.dataclass(frozen=True)
class GroupMembershipBase:
group: "GroupV2"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"group": to_json(self.group),
}
@dt.dataclass(frozen=True)
class GroupMembership:
group: "GroupV2"
member: "GroupMember"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"member": to_json(self.member),
"group": to_json(self.group),
}
@dt.dataclass(frozen=True)
class GroupMembershipSearchResponse:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["GroupMembership"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class GetGroupsForMemberResponse:
are_all_memberships_inactive: t.Mapping[str, bool] = dt.field(
metadata={
"description": """A convenience property that indicates if every membership this user has that is a part of this group are part of an account that is considered inactive - for example, overridden accounts in Cross Save.
The key is the Group ID for the group being checked, and the value is true if the users' memberships for that group are all inactive."""
}
)
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["GroupMembership"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"areAllMembershipsInactive": to_json(self.are_all_memberships_inactive),
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class GroupPotentialMembership:
group: "GroupV2"
member: "GroupPotentialMember"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"member": to_json(self.member),
"group": to_json(self.group),
}
@dt.dataclass(frozen=True)
class GroupPotentialMembershipSearchResponse:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: | |
<filename>src/rightClickHelper/tool/regTool.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json, winreg
from enum import Enum
from src.rightClickHelper.tool.systemTool import SystemTool
class CommandFlag(Enum):
NONE = 0X00000000
HIDE = 0X00000008
class RegEnv(Enum):
# https://docs.microsoft.com/en-us/troubleshoot/windows-server/performance/windows-registry-advanced-users
HKEY_CLASSES_ROOT = winreg.HKEY_CLASSES_ROOT # Contains the root of the configuration information for the user who is currently logged on.
# The user's folders, screen colors, and Control Panel settings are stored here.
# This information is associated with the user's profile.
# This key is sometimes abbreviated as HKCU.
HKEY_CURRENT_USER = winreg.HKEY_CURRENT_USER # Contains all the actively loaded user profiles on the computer.
# HKEY_CURRENT_USER is a subkey of HKEY_USERS.
# HKEY_USERS is sometimes abbreviated as HKU.
HKEY_LOCAL_MACHINE = winreg.HKEY_LOCAL_MACHINE # Contains configuration information particular to the computer (for any user).
# This key is sometimes abbreviated as HKLM.
HKEY_USERS = winreg.HKEY_USERS # Is a subkey of HKEY_LOCAL_MACHINE\Software.
# The information that is stored here makes sure that the correct program opens when you open a file by using Windows Explorer.
# This key is sometimes abbreviated as HKCR.
# Starting with Windows 2000, this information is stored under both the HKEY_LOCAL_MACHINE and HKEY_CURRENT_USER keys.
# The HKEY_LOCAL_MACHINE\Software\Classes key contains default settings that can apply to all users on the local computer.
# The HKEY_CURRENT_USER\Software\Classes key contains settings that override the default settings and apply only to the interactive user.
# The HKEY_CLASSES_ROOT key provides a view of the registry that merges the information from these two sources.
# HKEY_CLASSES_ROOT also provides this merged view for programs that are designed for earlier versions of Windows.
# To change the settings for the interactive user, changes must be made under HKEY_CURRENT_USER\Software\Classes instead of under HKEY_CLASSES_ROOT.
# To change the default settings, changes must be made under HKEY_LOCAL_MACHINE\Software\Classes.
# If you write keys to a key under HKEY_CLASSES_ROOT, the system stores the information under HKEY_LOCAL_MACHINE\Software\Classes.
# If you write values to a key under HKEY_CLASSES_ROOT, and the key already exists under HKEY_CURRENT_USER\Software\Classes,
# the system will store the information there instead of under HKEY_LOCAL_MACHINE\Software\Classes.
HKEY_CURRENT_CONFIG = winreg.HKEY_CURRENT_CONFIG # Contains information about the hardware profile that is used by the local computer at system startup.
@staticmethod
def find(val):
for item in RegEnv:
if item.value == val:
return item
raise ValueError('RegEnv not found.')
class RegType(Enum):
# https://docs.microsoft.com/en-us/windows/win32/sysinfo/registry-value-types
REG_BINARY = winreg.REG_BINARY # 任何形式的二进制数据.
REG_DWORD = winreg.REG_DWORD # 一个32位数字.
REG_DWORD_LITTLE_ENDIAN = winreg.REG_DWORD_LITTLE_ENDIAN # 小尾数格式的32位数字.Windows被设计为在小端计算机体系结构上运行.因此,此值在Windows头文件中定义为REG_DWORD.
REG_DWORD_BIG_ENDIAN = winreg.REG_DWORD_BIG_ENDIAN # big-endian格式的32位数字.一些UNIX系统支持big-endian体系结构.
REG_EXPAND_SZ = winreg.REG_EXPAND_SZ # 以空值结尾的字符串,其中包含对环境变量(例如,"%PATH%")的未扩展引用.
# 根据您使用的是Unicode还是ANSI函数,它将是Unicode或ANSI字符串.要扩展环境变量引用,请使用ExpandEnvironmentStrings函数.
REG_LINK = winreg.REG_LINK # 一个以空值结尾的Unicode字符串,其中包含符号链接的目标路径,
# 该符号链接是通过使用REG_OPTION_CREATE_LINK调用RegCreateKeyEx函数创建的.
REG_MULTI_SZ = winreg.REG_MULTI_SZ # 由空字符串(\0)终止的以null终止的字符串序列.
REG_NONE = winreg.REG_NONE # 没有定义的值类型.
REG_QWORD = winreg.REG_QWORD # 一个64位数字.
REG_QWORD_LITTLE_ENDIAN = winreg.REG_QWORD_LITTLE_ENDIAN # 小尾数格式的64位数字.
# Windows被设计为在小端计算机体系结构上运行.因此,此值在Windows头文件中定义为REG_QWORD.
REG_SZ = winreg.REG_SZ # 空终止的字符串.根据您使用的是Unicode还是ANSI函数,这将是Unicode或ANSI字符串.
@staticmethod
def find(val):
for item in RegType:
if item.value == val:
return item
raise ValueError('RegType not found.')
class RegVal:
def __init__(self, _type, val):
self.type = None
for regType in RegType:
if regType.value == _type:
self.type = regType
if self.type is None:
self.type = RegType.REG_NONE
self.val = val
class RegTool:
@staticmethod
def pathExist(
env: RegEnv, path: str
) -> bool:
try:
winreg.CloseKey(
winreg.OpenKey(env.value, path)
)
return True
except: return False
@staticmethod
def keyExist(
env: RegEnv, path: str, key: str
) -> bool:
return RegTool.pathExist(env, path + '\\' + key)
@staticmethod
def createPath(
env: RegEnv, path: str
):
if path == '': raise ValueError('Path cannot be empty.')
if RegTool.pathExist(env, path):
return winreg.OpenKey(env.value, path)
return winreg.CreateKey(env.value, path)
@staticmethod
def createKey(
env: RegEnv, path: str, key: str
):
if path == '' or key == '':
raise ValueError('Path cannot be empty.')
if RegTool.keyExist(env, path, key):
return winreg.OpenKey(env.value, path + '\\' + key)
return RegTool.createPath(env, path + '\\' + key)
@staticmethod
def delKey(
env: RegEnv, path: str, key: str = ''
):
if path == '' or not RegTool.pathExist(env, path):
raise ValueError('Path not found.')
if key != '' and not RegTool.keyExist(env, path, key):
raise ValueError('Key not found.')
def reduceDel(path):
subKeys = RegTool.listKey(env, path)
for subKey in subKeys:
reduceDel(path + '\\' + subKey)
winreg.DeleteKey(env.value, path)
reduceDel(path)
@staticmethod
def listKey(
env: RegEnv, path: str
) -> list:
with winreg.OpenKey(env.value, path) as regKey:
regCount = winreg.QueryInfoKey(regKey)[0]
regKeys = []
for i in range(int(regCount)):
name = winreg.EnumKey(regKey, i)
regKeys.append(name)
return regKeys
@staticmethod
def recursion(
env: RegEnv, startPath: str, depth: int = 1000
) -> {}:
if depth == 0: return {}
if startPath == '' or not RegTool.pathExist(env, startPath):
raise ValueError('Path not found.')
originData = {}
originList = RegTool.listKey(env, startPath)
for item in originList:
originData[item] = RegTool.recursion(env, startPath + '\\' + item, depth - 1)
originData['__path__'] = (env.value, startPath)
originData['__val__'] = RegTool.listVal(env, startPath)
return originData
@staticmethod
def writeKeyByStr(regDataStr: str):
return RegTool.writeKey(
json.loads(regDataStr)
)
@staticmethod
def writeKey(regData: dict):
if regData == {}: return
regEnvVal, path = regData['__path__']
regEnv = RegEnv.find(regEnvVal)
if not RegTool.pathExist(regEnv, path):
RegTool.createPath(regEnv, path)
for key, regDataChild in regData.items():
if key[:2] != '__':
RegTool.writeKey(regDataChild)
for valueName, (valueContent, valueTypeVal) in regData['__val__'].items():
RegTool.setVal(
regEnv, path, valueName, valueContent
, RegType.find(valueTypeVal)
)
@staticmethod
def listVal(
env: RegEnv, path: str
) -> dict:
with winreg.OpenKey(env.value, path) as regKey:
i = 0
regVals = {}
try:
while True:
name, val, type = winreg.EnumValue(regKey, i)
regVals[name] = (val, type)
i += 1
finally:
return regVals
@staticmethod
def setVal(
env: RegEnv, path: str,
valueName: str, valueContent, regType: RegType = RegType.REG_SZ
):
if not SystemTool.isAdmin():
raise PermissionError('Not started with administrator rights.')
with winreg.OpenKey(
env.value, path, access=winreg.KEY_SET_VALUE
) as regKey:
winreg.SetValueEx(regKey, valueName, 0, regType.value, valueContent)
@staticmethod
def delVal(
env: RegEnv, path: str, valueName: str
):
if not SystemTool.isAdmin():
raise PermissionError('Not started with administrator rights.')
try:
with winreg.OpenKey(
env.value, path, access=winreg.KEY_SET_VALUE
) as regKey:
winreg.DeleteValue(regKey, valueName)
except: pass
@staticmethod
def getVal(
env: RegEnv, path: str, valueName: str
) -> RegVal:
if not SystemTool.isAdmin():
raise PermissionError('Not started with administrator rights.')
with winreg.OpenKey(
env.value, path, access=winreg.KEY_QUERY_VALUE
) as regKey:
try:
val, _type = winreg.QueryValueEx(regKey, valueName)
return RegVal(_type, val)
except:
raise FileNotFoundError('Reg val not found.')
@staticmethod
def replacePath(
regData: dict,
source: (RegEnv, str), target: (RegEnv, str)
):
regData['__path__'] = (
target[0].value,
regData['__path__'][1].replace(source[1], target[1])
)
for key, regDataChild in regData.items():
if key[:2] != '__':
RegTool.replacePath(
regDataChild, source, target
)
@staticmethod
def cpKey(
source: (RegEnv, str),
target: (RegEnv, str)
):
try:
sourceRegData = RegTool.recursion(*source)
RegTool.replacePath(
sourceRegData, source, target
)
RegTool.writeKey(sourceRegData)
except Exception as e: raise e
@staticmethod
def mvKey(
source: (RegEnv, str),
target: (RegEnv, str)
):
RegTool.cpKey(source, target)
RegTool.delKey(*source)
CURRENT_USER_USER_SHELL_FOLDERS = (
RegEnv.HKEY_CURRENT_USER, r'Software\Microsoft\Windows\CurrentVersion\Explorer\User Shell Folders'
)
systemDir = {
'pictures': RegTool.getVal(
*CURRENT_USER_USER_SHELL_FOLDERS, valueName='My Pictures'
).val,
'desktop': RegTool.getVal(
*CURRENT_USER_USER_SHELL_FOLDERS, valueName='Desktop'
).val,
'documents': RegTool.getVal(
*CURRENT_USER_USER_SHELL_FOLDERS, valueName='Personal'
).val
}
class MenuItem:
def __init__(self, name: str, regData: dict):
self.regData = regData
self._name = name
self.__name = name
regDataVal = regData.get('__val__', {})
commandVal = regData.get('command', {
'__val__': {}
}).get('__val__', {})
self.title = regDataVal.get('', [''])[0]
if self.title == '':
self.title = regDataVal.get('MUIVerb', [''])[0]
# type: str
self.icon = regDataVal.get('Icon', [''])[0]
# type: str
self.command = commandVal.get('', [''])[0]
# type: str
self.isPackage = regDataVal.get('SubCommands', [False])[0] == ''
# type: bool
# 二级菜单
self.isHide = regDataVal.get('CommandFlags', [CommandFlag.NONE.value])[0] == CommandFlag.HIDE.value
# type: bool
# 隐藏
self.isShift = regDataVal.get('Extended', [False])[0] == ''
# type: bool
# 按下shift时
self.isExplorer = regDataVal.get('OnlyInBrowserWindow', [False])[0] == ''
# type: bool
# 文件浏览器中
self.isNotWorkingDir = regDataVal.get('NoWorkingDirectory', [False])[0] == ''
# type: bool
# 不以当前目录为打开的工作目录
@property
def name(self):
return self._name
@name.setter
def name(self, newName):
if newName == '': raise ValueError('MenuItem\'s name can\'t be set to \'\'.')
self.__name = newName
def saveToReg(self, mv: bool = True):
if self.regData.get('__val__', {}) == {}:
self.regData['__val__'] = {}
valRegData = self.regData['__val__']
if self.isPackage:
valRegData[''] = ('', RegType.REG_SZ.value)
else:
valRegData[''] = (self.title, RegType.REG_SZ.value)
valRegData['MUIVerb'] = (self.title, RegType.REG_SZ.value)
valRegData['Icon'] = (self.icon, RegType.REG_SZ.value)
self.regData['__path__'] = (
self.regData['__path__'][0], '\\'.join([
*self.regData['__path__'][1].split('\\')[:-1],
self.name
])
)
path = self.regData['__path__']
def bool2Create(
boolValue,
valName, valContent, valType: RegType = RegType.REG_SZ,
valPath: (RegEnv, str) = (RegEnv.find(path[0]), path[1])
):
if boolValue:
RegTool.setVal(
valPath[0], valPath[1]
, valName, valContent, valType
)
else:
RegTool.delVal(
valPath[0], valPath[1]
, valName
)
if not self.isPackage:
if self.regData.get('command', {}) == {}:
self.regData['command'] = {
'__path__': (path[0], path[1] + r'\command'),
'__val__': {}
}
commandValRegData = self.regData['command']['__val__']
commandValRegData[''] = (self.command.replace('/', '\\'), RegType.REG_SZ.value)
else:
if | |
bernoulli
ind_test = bernoulli[:, 0:test_batch]
prob_hisps.ind_test = ind_test
prob_hisps.x_sync_test = x_sync_test
noise_var = 1 / np.sqrt(M) * math.pow(10., -SNR / 10.)
# bernoulli_ = tf.to_float(tf.random_uniform((N, L)) < pnz)
# xgen_ = bernoulli_ * tf.random_normal((N, L))
# noise_var = pnz * N / M * math.pow(10., -SNR / 10.)
# ygen_ = tf.matmul(A_, xgen_) + tf.random_normal((M, L), stddev=math.sqrt(noise_var))
user_delay = np.random.random_integers(0, Tg, size=(N, L))
ud_test = user_delay[:, 0:test_batch]
prob_hisps.ud_test = ud_test
prob_hisps.ud = user_delay
x_vir_channel = np.zeros([N * (Tg + 1), L]).astype(np.float32)
for iL in range(L):
for iu in range(N):
if bernoulli[iu][iL] == 1:
x_vir_channel[iu*(Tg+1) + user_delay[iu,iL],iL] = x_channel[iu,iL]
# print(x_vir_channel[iu][iL])
x_test = x_vir_channel[:, 0:test_batch]
noise = np.random.normal(size=(M+Tg, L), scale=noise_var).astype(np.float32)
SNR_ex = np.array([3, 6, 9, 12])
sigma_w = np.zeros([4]).astype(np.float32)
noise_ex = np.zeros([M+Tg, 4*test_batch]).astype(np.float32)
for iw in range(4):
sigma_w[iw] = 1 / np.sqrt(M) * math.pow(10., -SNR_ex[iw] / 10.)
noise_ex[:,iw*test_batch:(iw+1)*test_batch] = sigma_w[iw]/noise_var*noise[:,0:test_batch]
y_signal_noiseless = np.dot(A_v, x_vir_channel)
y_signal = y_signal_noiseless + noise
y_test = y_signal[:, 0:test_batch]
y_test_ex = np.zeros([M+Tg, 4*test_batch])
x_test_ex = np.zeros([N*(Tg+1), 4*test_batch]).astype(np.float32)
for iw in range(4):
x_test_ex[:,iw*test_batch:(iw+1)*test_batch] = x_test
y_test_ex[:,iw*test_batch:(iw+1)*test_batch] = y_signal_noiseless[:,0:test_batch] + noise_ex[:,iw*test_batch:(iw+1)*test_batch]
prob_hisps.y_test_ex = y_test_ex
prob_hisps.x_test_ex = x_test_ex
y_sync = np.dot(Ao, x_sync) + noise[0:M, :]
y_sync_test = y_sync[:, 0:test_batch]
prob_hisps.y_sync_test = y_sync_test
prob_hisps.Ao = Ao
prob_hisps.x_sync = x_sync
prob_hisps.y_sync = y_sync
xgen_ = tf.constant(x_vir_channel, name='x_vir_channel')
ygen_ = tf.constant(y_signal, name='y_signal')
prob_hisps.x_test = x_test
prob_hisps.y_test = y_test
prob_hisps.xval = x_vir_channel
prob_hisps.yval = y_signal
prob_hisps.xinit = x_vir_channel
prob_hisps.yinit = y_signal
prob_hisps.xgen_ = xgen_
prob_hisps.ygen_ = ygen_
prob_hisps.noise_var = noise_var
return prob_hisps
def bernoulli_gaussian_hierarchical_sparse_MMV_randomlocation_trial(M=10,N=200,E=2,L=200000,Tg=3,pnz=0.05,kappa=0,SNR=0):
A = np.random.normal(size=(M, N), scale=1.0 / math.sqrt(M)).astype(np.float32)
if kappa >= 1:
# create a random operator with a specific condition number
U, _, V = la.svd(A, full_matrices=False)
s = np.logspace(0, np.log10(1 / kappa), M)
A = np.dot(U * (s * np.sqrt(N) / la.norm(s)), V).astype(np.float32)
A_col_norm = np.linalg.norm(A, ord=2, axis=0, keepdims=True)
A = A / A_col_norm
Ao = A
#A_col_norm = np.linalg.norm(A, ord=2, axis=0, keepdims=True)
A_v = np.zeros([M+Tg, N*(Tg+1)]).astype(np.float32)
for i1 in range(N):
for i2 in range(Tg+1):
A_v[i2:(i2 + M), i1 * (Tg + 1) + i2] = A[:, i1]
A = A_v
A_ = tf.constant(A, name='A')
iid = False
prob_hisps_MMV = TFGenerator(A=A, A_=A_, pnz=pnz, kappa=kappa, SNR=SNR, Ao=Ao, M=M, N=N, Tg=Tg, E=E, iid=iid)
prob_hisps_MMV.name = 'Bernoulli-Gaussian-Hierarchical-Sparse-MMV, random A'
prob_hisps_MMV.iid = iid
alpha = 15.3
beta = 37.6
lsf_sd = 0.1**((alpha+beta*np.log10(500))/10)
distance = np.random.uniform(low=100, high=1000, size=N).astype(np.float32)
lsf = np.zeros([N]).astype(np.float32)
lsf_t = np.zeros([N*(Tg+1)]).astype(np.float32)
for il in range(N):
lsf[il] = 0.1**((alpha+beta*np.log10(distance[il]))/10)
for iT in range(Tg+1):
lsf_t[il*(Tg+1)+iT] = lsf[il]
lsf = 1.0/lsf_sd*lsf
lsf_t = 1.0/lsf_sd*lsf_t
prob_hisps_MMV.lsf = lsf
prob_hisps_MMV.lsf_t = lsf_t
test_batch = 5000
bernoulli = np.random.uniform(0, 1, size=(L, N)).astype(np.float32)
ind_channel = np.zeros(shape=(L, E, N)).astype(np.float32)
for height in range(L):
for row in range(N):
if bernoulli[height,row] < pnz:
bernoulli[height,row] = 1.0
ind_channel[height,:,row] = np.ones(shape=(E)).astype(np.float32)
else:
bernoulli[height,row] = 0.0
ind_channel[height,:,row] = np.zeros(shape=(E)).astype(np.float32)
sum_ber = np.sum(bernoulli[height, :])
if sum_ber == 0:
bernoulli[height,0] = 1.0
ind_channel[height,:,0] = np.ones(shape=(E)).astype(np.float32)
prob_hisps_MMV.ind = np.transpose(bernoulli, (1,0))
x_channel = np.random.normal(size=(L, E, N), scale=1.0).astype(np.float32)
x_channel = x_channel*lsf.reshape([1,1,N])
x_sync = np.multiply(x_channel, ind_channel)
x_sync_test = x_sync[0:test_batch,:,:]
x_sync_test_vec = x_sync_test.reshape([test_batch,N*E])
prob_hisps_MMV.x_sync_test_mat = np.transpose(x_sync_test, (2,1,0))
prob_hisps_MMV.x_sync_test = x_sync_test_vec
ind_test = bernoulli[0:test_batch,:]
prob_hisps_MMV.ind_test = np.transpose(ind_test, (1,0))
user_delay = np.random.random_integers(0, Tg, size=(L, N))
prob_hisps_MMV.ud = np.transpose(user_delay, (1,0))
ud_test = user_delay[0:test_batch,:]
prob_hisps_MMV.ud_test = np.transpose(ud_test, (1,0))
x_vir_channel = np.zeros([L,E,N*(Tg+1)]).astype(np.float32)
for iL in range(L):
for iu in range(N):
if bernoulli[iL,iu] == 1.0:
x_vir_channel[iL,:,iu*(Tg+1)+user_delay[iL,iu]] = x_channel[iL,:,iu]
x_vir_channel_vec = x_vir_channel.reshape([L,N*(Tg+1)*E])
prob_hisps_MMV.x_mat = np.transpose(x_vir_channel, (2,1,0))
x_test = x_vir_channel[0:test_batch,:,:]
prob_hisps_MMV.x_test_mat = np.transpose(x_test, (2,1,0))
x_test_vec = x_test.reshape([test_batch,N*(Tg+1)*E])
prob_hisps_MMV.x_test = x_test_vec
noise_var = 1 / np.sqrt(M) * math.pow(10., -SNR / 10.)
noise = np.random.normal(size=(L,E,M+Tg), scale=noise_var).astype(np.float32)
prob_hisps_MMV.noise_mat = np.transpose(noise, (2,1,0))
SNR_ex = np.array([3, 6, 9, 12])
sigma_w = np.zeros([4]).astype(np.float32)
noise_ex = np.zeros([4*test_batch,E,M+Tg]).astype(np.float32)
for iw in range(4):
sigma_w[iw] = 1 / np.sqrt(M) * math.pow(10., -SNR_ex[iw] / 10.)
noise_ex[iw*test_batch:(iw+1)*test_batch,:,:] = sigma_w[iw] / noise_var * noise[0:test_batch,:,:]
y_signal_noiseless = np.dot(np.reshape(x_vir_channel, (E*L,N*(Tg+1))), np.transpose(A_v))
y_signal_noiseless = y_signal_noiseless.reshape([L,E,M+Tg])
y_signal = y_signal_noiseless + noise
prob_hisps_MMV.y_mat = np.transpose(y_signal, (2,1,0))
y_signal_vec = y_signal.reshape([L,(M+Tg)*E])
y_test = y_signal[0:test_batch,:,:]
y_test_vec = y_test.reshape([test_batch,(M+Tg)*E])
prob_hisps_MMV.y_test_mat = np.transpose(y_test, (2,1,0))
prob_hisps_MMV.y_test = y_test_vec
y_test_ex = np.zeros([4*test_batch,E,M+Tg])
x_test_ex = np.zeros([4*test_batch,E,N*(Tg+1)]).astype(np.float32)
for iw in range(4):
x_test_ex[iw*test_batch:(iw+1)*test_batch,:,:] = x_test
y_test_ex[iw*test_batch:(iw+1)*test_batch,:,:] = y_signal_noiseless[0:test_batch,:,:] + noise_ex[iw*test_batch:(iw+1)*test_batch,:,:]
prob_hisps_MMV.y_test_ex_mat = np.transpose(y_test_ex, (2,1,0))
prob_hisps_MMV.x_test_ex_mat = np.transpose(x_test_ex, (2,1,0))
y_test_ex_vec = y_test_ex.reshape([4*test_batch,(M+Tg)*E])
x_test_ex_vec = x_test_ex.reshape([4*test_batch,N*(Tg+1)*E])
prob_hisps_MMV.y_test_ex = y_test_ex_vec
prob_hisps_MMV.x_test_ex = x_test_ex_vec
y_sync_noiseless = np.dot(np.reshape(x_sync, (L*E,N)), np.transpose(Ao))
y_sync_noiseless = y_sync_noiseless.reshape([L,E,M])
y_sync = y_sync_noiseless + noise[:,:,0:M]
y_sync_test = y_sync[0:test_batch,:,:]
y_sync_test_vec = y_sync_test.reshape([test_batch,M*E])
prob_hisps_MMV.y_sync_test = y_sync_test_vec
prob_hisps_MMV.y_sync_test_mat = np.transpose(y_sync_test, (2,1,0))
prob_hisps_MMV.Ao = Ao
prob_hisps_MMV.x_sync_mat = np.transpose(x_sync, (2,1,0))
prob_hisps_MMV.y_sync_mat = np.transpose(y_sync, (2,1,0))
prob_hisps_MMV.x_sync_D = x_sync.reshape([L*E,N])
prob_hisps_MMV.y_sync_D = y_sync.reshape([L*E,M])
prob_hisps_MMV.x_sync_test_D = x_sync_test.reshape([test_batch*E,N])
prob_hisps_MMV.y_sync_test_D = y_sync_test.reshape([test_batch*E,M])
prob_hisps_MMV.y_signal_D = y_signal.reshape([L*E,M+Tg])
prob_hisps_MMV.x_D = x_vir_channel.reshape([L*E,N*(Tg+1)])
prob_hisps_MMV.y_test_D = y_test.reshape([test_batch*E,M+Tg])
prob_hisps_MMV.x_test_D = x_test.reshape([test_batch*E,N*(Tg+1)])
prob_hisps_MMV.y_test_ex_D = y_test_ex.reshape([4*test_batch*E,M+Tg])
prob_hisps_MMV.x_test_ex_D = x_test_ex.reshape([4*test_batch*E,N*(Tg+1)])
#xgen_ = tf.constant(x_vir_channel_vec, name='x_vir_channel')
#ygen_ = tf.constant(y_signal_vec, name='y_signal')
prob_hisps_MMV.xval = x_vir_channel_vec
prob_hisps_MMV.yval = y_signal_vec
prob_hisps_MMV.xinit = x_vir_channel_vec
prob_hisps_MMV.yinit = y_signal_vec
#prob_hisps_MMV.xgen_ = xgen_
#prob_hisps_MMV.ygen_ = ygen_
prob_hisps_MMV.noise_var = noise_var
return prob_hisps_MMV
def cbg_hisps_MMV_trial(A0r,A0i,M=40,N=200,E=1,L=105000,Tg=3,pnz=0.05,SNR=0,iid=True):
# complex-valued system
Ar = np.zeros([M+Tg,N*(Tg+1)],dtype=np.float32)
Ai = np.zeros([M+Tg,N*(Tg+1)],dtype=np.float32)
for i1 in range(N):
for i2 in range(Tg+1):
Ar[i2:(i2+M),i1*(Tg+1)+i2] = A0r[:,i1]
Ai[i2:(i2+M),i1*(Tg+1)+i2] = A0i[:,i1]
A = np.zeros([2*(M+Tg),2*N*(Tg+1)]).astype(np.float32)
A[0:(M+Tg),0:(N*(Tg+1))] = Ar
A[0:(M+Tg),(N*(Tg+1)):(2*N*(Tg+1))] = -Ai
A[(M+Tg):(2*(M+Tg)),0:(N*(Tg+1))] = Ai
A[(M+Tg):(2*(M+Tg)),(N*(Tg+1)):(2*N*(Tg+1))] = Ar
AT = np.transpose(A)
A0 = np.zeros([2*M,2*N]).astype(np.float32)
A0[0:M,0:N] = A0r
A0[0:M,N:(2*N)] = -A0i
A0[M:(2*M),0:N] = A0i
A0[M:(2*M),N:(2*N)] = A0r
A_ = tf.constant(A, name='A')
prob_hisps_MMV = TFGenerator(A=A, A_=A_, pnz=pnz, SNR=SNR, A0=A0, M=M, N=N, Tg=Tg, E=E, iid=iid)
prob_hisps_MMV.name = 'Complex-Bernoulli-Gaussian-Hierarchical-Sparse-MMV, random A'
prob_hisps_MMV.A = A
prob_hisps_MMV.iid = iid
prob_hisps_MMV.M = M
prob_hisps_MMV.N = N
prob_hisps_MMV.E = E
prob_hisps_MMV.Tg = Tg
prob_hisps_MMV.pnz = pnz
prob_hisps_MMV.Mt = M+Tg
prob_hisps_MMV.Nt = N*(Tg+1)
# large-scale fading
if iid == False:
alpha = 15.3
beta = 37.6
lsf_sd = 0.1 ** ((alpha + beta * np.log10(150)) / 10)
distance = np.random.uniform(low=50, high=250, size=N).astype(np.float32)
lsf = np.zeros([N]).astype(np.float32)
lsf_t = np.zeros([N*(Tg+1)]).astype(np.float32)
for il in range(N):
lsf[il] = 0.1**( (alpha + beta*np.log10(distance[il]))/10 )
for iT in range(Tg+1):
lsf_t[il*(Tg+1)+iT] = lsf[il]
lsf = np.sqrt(1.0/lsf_sd*lsf)
lsf_t = np.sqrt(1.0/lsf_sd*lsf_t)
else:
lsf = np.ones([N]).astype(np.float32)
lsf_t = np.ones([N*(Tg+1)]).astype(np.float32)
prob_hisps_MMV.lsf = lsf
prob_hisps_MMV.lsf_t = lsf_t
# channel
naep = np.floor(pnz*N).astype(np.int32)
test_batch = 5000
bernoulli = (np.random.uniform(0,1,size=(L,N))<pnz).astype(np.float32)
nau = np.sum(bernoulli,axis=1)
for iL in range(test_batch):
if nau[iL] == 0:
bernoulli[iL,0:naep] = np.ones([naep]).astype(np.float32)
oneE = np.ones([E],dtype=np.float32)
ind_channel = bernoulli.reshape([L,1,N]) * np.reshape(oneE,(1,E,1))
prob_hisps_MMV.ind = np.transpose(bernoulli, (1,0))
x_channel_r = np.sqrt(0.5)*np.random.normal(size=(L,E,N),scale=1.0).astype(np.float32)
x_channel_i = np.sqrt(0.5)*np.random.normal(size=(L,E,N),scale=1.0).astype(np.float32)
x_channel_r = x_channel_r*lsf.reshape([1,1,N])
x_channel_i = x_channel_i*lsf.reshape([1,1,N])
x_sync_r = np.multiply(x_channel_r, ind_channel)
x_sync_i = np.multiply(x_channel_i, ind_channel)
x_sync = np.zeros([L,E,2*N]).astype(np.float32)
x_sync[:,:,0:N] = x_sync_r
x_sync[:,:,N:(2*N)] = x_sync_i
x_sync_test = x_sync[0:test_batch,:,:]
#x_sync_test_vec = x_sync_test.reshape([test_batch,2*N*E])
prob_hisps_MMV.x_sync_test_mat = np.transpose(x_sync_test, (2,1,0))
prob_hisps_MMV.x_sync_test = x_sync_test
prob_hisps_MMV.x_sync_train = x_sync[test_batch:L,:,:]
ind_test = bernoulli[0:test_batch,:]
prob_hisps_MMV.ind_test = np.transpose(ind_test,(1,0))
user_delay = np.random.random_integers(0,Tg,size=(L,N))
prob_hisps_MMV.ud = np.transpose(user_delay,(1,0))
ud_test = user_delay[0:test_batch,:]
prob_hisps_MMV.ud_test = np.transpose(ud_test,(1,0))
x_vir_channel = np.zeros([L,E,2*N*(Tg+1)]).astype(np.float32)
x_vir_channel_r = np.zeros([L,E,N*(Tg+1)],dtype=np.float32)
x_vir_channel_i = np.zeros([L,E,N*(Tg+1)],dtype=np.float32)
for iL in range(L):
for iu in range(N):
if bernoulli[iL,iu] == 1.0:
index1 = iu*(Tg+1)+user_delay[iL,iu]
index2 = N*(Tg+1)+iu*(Tg+1)+user_delay[iL,iu]
x_vir_channel[iL,:,index1] = x_channel_r[iL,:,iu]
x_vir_channel[iL,:,index2] = x_channel_i[iL,:,iu]
x_vir_channel_r[iL,:,index1] = x_channel_r[iL,:,iu]
x_vir_channel_i[iL,:,index1] = x_channel_i[iL,:,iu]
#x_vir_channel_vec = x_vir_channel.reshape([L,N*(Tg+1)*E])
#prob_hisps_MMV.x_mat = np.transpose(x_vir_channel, (2,1,0))
x_test = x_vir_channel[0:test_batch,:,:]
prob_hisps_MMV.x_test_mat = np.transpose(x_test, (2,1,0))
#x_test_vec = x_test.reshape([test_batch,N*(Tg+1)*E])
prob_hisps_MMV.x_test = x_test.reshape([test_batch,E*2*N*(Tg+1)])
x_train = x_vir_channel[test_batch:L,:,:]
prob_hisps_MMV.x_train = x_train.reshape([-1,E*2*N*(Tg+1)])
# noise and ex signal with various SNR
noise_var = 1/np.sqrt(M)*np.sqrt(math.pow(10., -SNR/10.))
noise = noise_var*np.sqrt(0.5)*np.random.normal(size=(L,E,2*(M+Tg)), scale=1.0).astype(np.float32)
prob_hisps_MMV.noise_mat = np.transpose(noise, (2,1,0))
SNR_ex = np.array([2,4,6,8])
sigma_w = np.zeros([4]).astype(np.float32)
noise_ex = np.zeros([4*test_batch,E,2*(M+Tg)]).astype(np.float32)
for iw in range(4):
sigma_w[iw] = 1/np.sqrt(M)*math.pow(10., -SNR_ex[iw]/10.)
noise_ex[(iw*test_batch):((iw+1)*test_batch),:,:] = sigma_w[iw]/noise_var*noise[0:test_batch,:,:]
# signal y in sync system (seems useless)
y_sync_noiseless = np.dot(np.reshape(x_sync,(L*E,2*N)), np.transpose(A0))
y_sync_noiseless = y_sync_noiseless.reshape([L,E,2*M])
noise_sync = np.zeros([L,E,2*M])
noise_sync[:,:,0:M] = noise[:,:,0:M]
noise_sync[:,:,M:(2*M)] = noise[:,:,(M+Tg):(2*M+Tg)]
y_sync = y_sync_noiseless + noise_sync
y_sync_test = y_sync[0:test_batch,:,:]
y_sync_train = y_sync[test_batch:L,:,:]
#y_sync_test_vec = y_sync_test.reshape([test_batch,M*E])
prob_hisps_MMV.y_sync_test = y_sync_test.reshape([test_batch,E*2*M])
prob_hisps_MMV.y_sync_train = y_sync_train.reshape([-1,E*2*M])
prob_hisps_MMV.y_sync_test_mat = np.transpose(y_sync_test, (2,1,0))
prob_hisps_MMV.A0 = A0
#prob_hisps_MMV.x_sync_mat = np.transpose(x_sync, (2,1,0))
#prob_hisps_MMV.y_sync_mat = np.transpose(y_sync, (2,1,0))
# signal y in async system
y_signal_noiseless = np.dot(np.reshape(x_vir_channel, (E*L,2*N*(Tg+1))), AT)
y_signal_noiseless = y_signal_noiseless.reshape([L,E,2*(M+Tg)])
y_signal = y_signal_noiseless + noise
#prob_hisps_MMV.y_mat = np.transpose(y_signal, (2,1,0))
#y_signal_vec = y_signal.reshape([L,(M+Tg)*E])
y_test = y_signal[0:test_batch,:,:]
#y_test_vec = y_test.reshape([test_batch, (M+Tg)*E])
prob_hisps_MMV.y_test_mat = np.transpose(y_test, (2,1,0))
prob_hisps_MMV.y_test = y_test.reshape(test_batch,E*2*(M+Tg))
y_train = y_signal[test_batch:L,:,:]
prob_hisps_MMV.y_train = y_train.reshape([-1,E*2*(M+Tg)])
y_test_ex = np.zeros([4*test_batch,E,2*(M+Tg)])
x_test_ex = np.zeros([4*test_batch,E,2*N*(Tg+1)]).astype(np.float32)
for iw in range(4):
x_test_ex[(iw*test_batch):((iw+1)*test_batch),:,:] = x_test
y_test_ex[(iw*test_batch):((iw+1)*test_batch),:,:] = y_signal_noiseless[0:test_batch,:,:] + noise_ex[(iw*test_batch):((iw+1)*test_batch),:,:]
prob_hisps_MMV.y_test_ex_mat = np.transpose(y_test_ex, (2,1,0))
prob_hisps_MMV.x_test_ex_mat | |
import asyncio
import base64
import os
import signal
import time
from collections import defaultdict
import pytest
from cryptography.fernet import Fernet
from traitlets import Integer, Float
from traitlets.config import Config
import dask
from dask_gateway import GatewayClusterError, GatewayCluster
from dask_gateway_server.app import DaskGateway
from dask_gateway_server.backends import db_base
from dask_gateway_server.backends.base import ClusterConfig
from dask_gateway_server.backends.db_base import (
DBBackendBase,
timestamp,
JobStatus,
DataManager,
)
from dask_gateway_server.backends.inprocess import InProcessBackend
from dask_gateway_server.compat import get_running_loop
from dask_gateway_server.utils import random_port
from dask_gateway_server import options
from .utils_test import temp_gateway, LocalTestingBackend, wait_for_workers
@pytest.fixture(autouse=True)
def ensure_clusters_closed():
instances = len(GatewayCluster._instances)
for c in list(GatewayCluster._instances):
if not c.asynchronous:
c.close()
assert instances == 0
class ClusterSlowToStart(DBBackendBase):
pause_time = Float(0.25, config=True)
state_1 = {"state": 1}
state_2 = {"state": 2}
state_3 = {"state": 3}
stop_cluster_state = None
running = False
async def do_start_cluster(self, cluster):
self.running = True
yield self.state_1
await asyncio.sleep(self.pause_time)
yield self.state_2
await asyncio.sleep(self.pause_time)
yield self.state_3
async def do_check_clusters(self, clusters):
return [self.running for _ in clusters]
async def do_stop_cluster(self, cluster):
self.stop_cluster_state = cluster.state
self.running = False
class ClusterFailsDuringStart(DBBackendBase):
fail_stage = Integer(1, config=True)
stop_cluster_state = None
async def do_start_cluster(self, cluster):
for i in range(3):
if i == self.fail_stage:
raise ValueError("Oh No")
yield {"i": i}
async def do_check_clusters(self, clusters):
return [True] * len(clusters)
async def do_stop_cluster(self, cluster):
self.stop_cluster_state = cluster.state
class ClusterFailsBetweenStartAndConnect(InProcessBackend):
status = "starting"
async def do_start_cluster(self, cluster):
yield {"foo": "bar"}
self.status = "failed"
async def do_check_clusters(self, clusters):
return [self.status not in ("failed", "stopped") for c in clusters]
async def do_stop_cluster(self, cluster):
self.status = "stopped"
class ClusterFailsAfterConnect(InProcessBackend):
async def do_setup(self):
await super().do_setup()
loop = get_running_loop()
self.stop_cluster_called = loop.create_future()
async def do_stop_cluster(self, cluster):
if not self.stop_cluster_called.done():
self.stop_cluster_called.set_result(True)
class TracksStopWorkerCalls(InProcessBackend):
async def do_setup(self):
await super().do_setup()
loop = get_running_loop()
self.stop_worker_called = loop.create_future()
self.stop_worker_state = None
async def do_stop_worker(self, worker):
if not self.stop_worker_called.done():
self.stop_worker_called.set_result(True)
self.stop_worker_state = worker.state
class WorkerSlowToStart(TracksStopWorkerCalls):
pause_time = Float(0.2, config=True)
async def do_start_worker(self, worker):
for i in range(3):
yield {"i": i}
await asyncio.sleep(self.pause_time)
class WorkerFailsDuringStart(TracksStopWorkerCalls):
fail_stage = Integer(1, config=True)
async def do_setup(self):
await super().do_setup()
loop = get_running_loop()
self.stop_cluster_called = loop.create_future()
async def do_start_worker(self, worker):
for i in range(3):
if i == self.fail_stage:
raise ValueError("Oh No")
yield {"i": i}
async def do_stop_cluster(self, cluster):
if not self.stop_cluster_called.done():
self.stop_cluster_called.set_result(True)
await super().do_stop_cluster(cluster)
class WorkerFailsBetweenStartAndConnect(TracksStopWorkerCalls):
async def do_start_worker(self, worker):
yield {"state": 1}
async def do_check_workers(self, workers):
return [False] * len(workers)
def test_shutdown_on_startup_error(tmpdir, capsys):
# A configuration that will cause a failure at runtime (not init time)
c = Config()
c.Proxy.tls_cert = str(tmpdir.join("tls_cert.pem"))
gateway = DaskGateway(config=c)
with pytest.raises(SystemExit) as exc:
gateway.initialize([])
gateway.start()
assert exc.value.code == 1
captured = capsys.readouterr()
assert "tls_cert" in captured.err
def test_db_encrypt_keys_required(tmpdir, capsys):
c = Config()
c.DBBackendBase.db_url = "sqlite:///%s" % tmpdir.join("dask_gateway.sqlite")
with pytest.raises(SystemExit) as exc:
gateway = DaskGateway(config=c)
gateway.initialize([])
gateway.start()
assert exc.value.code == 1
captured = capsys.readouterr()
assert "DASK_GATEWAY_ENCRYPT_KEYS" in captured.err
def test_db_encrypt_keys_invalid(tmpdir):
c = Config()
c.DBBackendBase.db_url = "sqlite:///%s" % tmpdir.join("dask_gateway.sqlite")
c.DBBackendBase.db_encrypt_keys = ["abc"]
with pytest.raises(ValueError) as exc:
gateway = DaskGateway(config=c)
gateway.initialize([])
assert "DASK_GATEWAY_ENCRYPT_KEYS" in str(exc.value)
def test_db_encrypt_keys_from_env(monkeypatch):
keys = [Fernet.generate_key(), Fernet.generate_key()]
val = b";".join(keys).decode()
monkeypatch.setenv("DASK_GATEWAY_ENCRYPT_KEYS", val)
gateway = DaskGateway()
gateway.initialize([])
assert gateway.backend.db_encrypt_keys == keys
def test_resume_clusters_forbid_in_memory_db():
c = Config()
c.DBBackendBase.db_url = "sqlite://"
c.DBBackendBase.stop_clusters_on_shutdown = False
with pytest.raises(ValueError) as exc:
gateway = DaskGateway(config=c)
gateway.initialize([])
assert "stop_clusters_on_shutdown" in str(exc.value)
@pytest.mark.asyncio
async def test_encryption(tmpdir):
db_url = "sqlite:///%s" % tmpdir.join("dask_gateway.sqlite")
encrypt_keys = [Fernet.generate_key() for i in range(3)]
db = DataManager(url=db_url, encrypt_keys=encrypt_keys)
assert db.fernet is not None
data = b"my secret data"
encrypted = db.encrypt(data)
assert encrypted != data
data2 = db.decrypt(encrypted)
assert data == data2
c = db.create_cluster("alice", {}, {})
assert c.tls_cert is not None
assert c.tls_key is not None
# Check database state is encrypted
with db.db.begin() as conn:
res = conn.execute(
db_base.clusters.select(db_base.clusters.c.id == c.id)
).fetchone()
assert res.tls_credentials != b";".join((c.tls_cert, c.tls_key))
cert, key = db.decrypt(res.tls_credentials).split(b";")
token = db.decrypt(res.token).decode()
assert cert == c.tls_cert
assert key == c.tls_key
assert token == c.token
# Check can reload database with keys
db2 = DataManager(url=db_url, encrypt_keys=encrypt_keys)
c2 = db2.id_to_cluster[c.id]
assert c2.tls_cert == c.tls_cert
assert c2.tls_key == c.tls_key
assert c2.token == c.token
def test_normalize_encrypt_key():
key = Fernet.generate_key()
# b64 bytes
assert db_base._normalize_encrypt_key(key) == key
# b64 string
assert db_base._normalize_encrypt_key(key.decode()) == key
# raw bytes
raw = base64.urlsafe_b64decode(key)
assert db_base._normalize_encrypt_key(raw) == key
# Too short
with pytest.raises(ValueError) as exc:
db_base._normalize_encrypt_key(b"abcde")
assert "DASK_GATEWAY_ENCRYPT_KEYS" in str(exc.value)
# Too short decoded
with pytest.raises(ValueError) as exc:
db_base._normalize_encrypt_key(b"\x00" * 43 + b"=")
assert "DASK_GATEWAY_ENCRYPT_KEYS" in str(exc.value)
# Invalid b64 encode
with pytest.raises(ValueError) as exc:
db_base._normalize_encrypt_key(b"=" + b"a" * 43)
assert "DASK_GATEWAY_ENCRYPT_KEYS" in str(exc.value)
def check_db_consistency(db):
for u, clusters in db.username_to_clusters.items():
# Users without clusters are flushed
assert clusters
clusters = db.db.execute(db_base.clusters.select()).fetchall()
workers = db.db.execute(db_base.workers.select()).fetchall()
# Check cluster state
for c in clusters:
cluster = db.id_to_cluster[c.id]
assert db.name_to_cluster[c.name] is cluster
assert db.username_to_clusters[c.username][c.name] is cluster
assert len(db.id_to_cluster) == len(clusters)
assert len(db.name_to_cluster) == len(clusters)
assert sum(map(len, db.username_to_clusters.values())) == len(clusters)
# Check worker state
cluster_to_workers = defaultdict(set)
for w in workers:
cluster = db.id_to_cluster[w.cluster_id]
cluster_to_workers[cluster.name].add(w.name)
for cluster in db.id_to_cluster.values():
expected = cluster_to_workers[cluster.name]
assert set(cluster.workers) == expected
@pytest.mark.asyncio
async def test_cleanup_expired_clusters(monkeypatch):
db = DataManager()
current_time = time.time()
def mytime():
nonlocal current_time
current_time += 0.5
return current_time
monkeypatch.setattr(time, "time", mytime)
def add_cluster(user, stop=True):
c = db.create_cluster(user, {}, {})
for _ in range(5):
w = db.create_worker(c)
if stop:
db.update_worker(
w,
target=JobStatus.STOPPED,
status=JobStatus.STOPPED,
stop_time=timestamp(),
)
if stop:
db.update_cluster(
c,
status=JobStatus.STOPPED,
target=JobStatus.STOPPED,
stop_time=timestamp(),
)
return c
add_cluster("alice", stop=True) # c1
add_cluster("alice", stop=True) # c2
add_cluster("bob", stop=True) # c3
c4 = add_cluster("alice", stop=False)
cutoff = mytime()
c5 = add_cluster("alice", stop=True)
c6 = add_cluster("alice", stop=False)
check_db_consistency(db)
# Set time to always return same value
now = mytime()
monkeypatch.setattr(time, "time", lambda: now)
# 3 clusters are expired
max_age = now - cutoff
n = db.cleanup_expired(max_age)
assert n == 3
check_db_consistency(db)
# Only alice remains, bob is removed since they have no clusters
assert "alice" in db.username_to_clusters
assert "bob" not in db.username_to_clusters
# c4, c5, c6 are all that remains
assert set(db.id_to_cluster) == {c4.id, c5.id, c6.id}
# Running again expires no clusters
max_age = now - cutoff
n = db.cleanup_expired(max_age)
assert n == 0
check_db_consistency(db)
@pytest.mark.asyncio
@pytest.mark.parametrize("start_timeout,state", [(0.1, 1), (0.4, 2)])
async def test_slow_cluster_start(start_timeout, state):
config = Config()
config.DaskGateway.backend_class = ClusterSlowToStart
config.ClusterSlowToStart.check_timeouts_period = 0.05
config.ClusterSlowToStart.cluster_start_timeout = start_timeout
async with temp_gateway(config=config) as g:
async with g.gateway_client() as gateway:
# Submission fails due to start timeout
cluster_id = await gateway.submit()
with pytest.raises(GatewayClusterError) as exc:
async with gateway.connect(cluster_id):
pass
assert cluster_id in str(exc.value)
# Stop cluster called with last reported state
assert g.gateway.backend.stop_cluster_state == {"state": state}
@pytest.mark.asyncio
async def test_slow_cluster_connect():
config = Config()
config.DaskGateway.backend_class = ClusterSlowToStart
config.ClusterSlowToStart.check_timeouts_period = 0.05
config.ClusterSlowToStart.cluster_start_timeout = 0.1
config.ClusterSlowToStart.pause_time = 0
config.DaskGateway.log_level = "DEBUG"
async with temp_gateway(config=config) as g:
async with g.gateway_client() as gateway:
# Submission fails due to connect timeout
cluster_id = await gateway.submit()
with pytest.raises(GatewayClusterError) as exc:
async with gateway.connect(cluster_id):
pass
assert cluster_id in str(exc.value)
# Stop cluster called with last reported state
assert g.gateway.backend.stop_cluster_state == {"state": 3}
@pytest.mark.asyncio
@pytest.mark.parametrize("fail_stage", [0, 1])
async def test_cluster_fails_during_start(fail_stage):
config = Config()
config.DaskGateway.backend_class = ClusterFailsDuringStart
config.ClusterFailsDuringStart.fail_stage = fail_stage
async with temp_gateway(config=config) as g:
async with g.gateway_client() as gateway:
# Submission fails due to error during start
cluster_id = await gateway.submit()
with pytest.raises(GatewayClusterError) as exc:
async with gateway.connect(cluster_id):
pass
assert cluster_id in str(exc.value)
# Stop cluster called with last reported state
res = {} if fail_stage == 0 else {"i": fail_stage - 1}
assert g.gateway.backend.stop_cluster_state == res
@pytest.mark.asyncio
async def test_cluster_fails_between_start_and_connect():
config = Config()
config.DaskGateway.backend_class = ClusterFailsBetweenStartAndConnect
config.ClusterFailsBetweenStartAndConnect.cluster_status_period = 0.1
async with temp_gateway(config=config) as g:
async with g.gateway_client() as gateway:
# Submit cluster
cluster_id = await gateway.submit()
# Connect and wait for start failure
with pytest.raises(GatewayClusterError) as exc:
await asyncio.wait_for(gateway.connect(cluster_id), 5)
assert cluster_id in str(exc.value)
assert "failed to start" in str(exc.value)
assert g.gateway.backend.status == "stopped"
@pytest.mark.asyncio
async def test_cluster_fails_after_connect():
config = Config()
config.DaskGateway.backend_class = ClusterFailsAfterConnect
config.DaskGateway.log_level = "DEBUG"
config.ClusterFailsAfterConnect.cluster_heartbeat_period = 1
config.ClusterFailsAfterConnect.check_timeouts_period = 0.5
async with temp_gateway(config=config) as g:
async with g.gateway_client() as gateway:
# Cluster starts successfully
async with gateway.new_cluster() as cluster:
# Kill scheduler
scheduler = g.gateway.backend.schedulers[cluster.name]
await scheduler.close(fast=True)
scheduler.stop()
# Gateway notices and cleans up cluster in time
await asyncio.wait_for(g.gateway.backend.stop_cluster_called, 5)
@pytest.mark.asyncio
@pytest.mark.parametrize("start_timeout,state", [(0.1, 0), (0.25, 1)])
async def test_slow_worker_start(start_timeout, state):
config = Config()
config.DaskGateway.backend_class = WorkerSlowToStart
config.WorkerSlowToStart.worker_start_timeout = start_timeout
config.WorkerSlowToStart.check_timeouts_period = 0.05
async with temp_gateway(config=config) as g:
async with g.gateway_client() as gateway:
async with gateway.new_cluster() as cluster:
await cluster.scale(1)
# Wait for worker failure
await asyncio.wait_for(g.gateway.backend.stop_worker_called, 5)
# Stop worker called with last reported state
| |
<reponame>srihari-nagaraj/anuvaad
from anuvaad_auditor.loghandler import log_info
from anuvaad_auditor.loghandler import log_exception
from anuvaad_auditor.loghandler import log_debug
from collections import namedtuple
from src.utilities.region_operations import collate_regions, get_polygon,sort_regions, remvoe_regions
from src.services.segment import horzontal_merging
import src.utilities.app_context as app_context
import copy
Rectangle = namedtuple('Rectangle', 'xmin ymin xmax ymax')
class MapKeys:
def __init__(self):
self.left = None
self.right = None
self.top = None
self.bottom = None
def get_left(self,box):
left = int(box['boundingBox']['vertices'][0]['x'])
return left
def get_right(self,box):
right = int(box['boundingBox']['vertices'][1]['x'])
return right
def get_top(self,box):
top = int(box['boundingBox']['vertices'][0]['y'])
return top
def get_bottom(self,box):
bottom = int(box['boundingBox']['vertices'][3]['y'])
return bottom
def get_height(self,box):
height = int(abs(self.get_top(box) - self.get_bottom(box)))
return height
def get_width(self,box):
width = int(abs(self.get_left(box) - self.get_right(box)))
return width
keys = MapKeys()
class Page_Config:
def avg_region_info(self,page):
try:
total_region = 0; avg_hor_dist = 0
for idx, region in enumerate(page):
if idx+1<len(page):
for idx2, region2 in enumerate(page[idx+1:]):
if keys.get_right(region)<keys.get_left(region2):
hor_dis = abs(keys.get_right(region) - keys.get_left(region2))
avg_hor_dist = avg_hor_dist + hor_dis
total_region = total_region +1
if keys.get_right(region2)<keys.get_left(region):
hor_dis = abs(keys.get_right(region2) - keys.get_left(region))
avg_hor_dist = avg_hor_dist + hor_dis
total_region = total_region +1
avg_hor_dist = avg_hor_dist / total_region
except:
pass
return avg_hor_dist
def avg_line_info(self,page):
try:
avg_height = 0; total_line = 0
avg_ver_dist = 0; avg_width = 0
ver_dist_mes_count = 0
for region in page:
if region['children'] !=None:
total_line = total_line+len(region['children'])
for idx, line in enumerate(region['children']):
height = keys.get_height(line)
avg_height = avg_height + height
avg_width = avg_width+ keys.get_width(line)
current_line_top = keys.get_top(line)
if idx<len(region['children'])-1:
next_line_top = keys.get_top(region['children'][idx+1])
max_height = max( keys.get_height(region['children'][idx+1]) ,keys.get_height(region['children'][idx]))
ver_dis = abs(next_line_top-current_line_top)
if ver_dis > max_height * 0.5 :
avg_ver_dist = avg_ver_dist + ver_dis
ver_dist_mes_count +=1
avg_height = avg_height / total_line
avg_width = avg_width / total_line
if ver_dist_mes_count > 0 :
avg_ver_dist = avg_ver_dist / ver_dist_mes_count
else:
avg_ver_dist = avg_height
except:
pass
return avg_height, avg_ver_dist, avg_width
def avg_word_sep(self, page):
try:
avg_height = 0
total_words = 0
avg_spacing = 0
avg_width = 0
for line in page:
if line['children'] != None:
total_words = total_words + len(line['children'])
for idx, word in enumerate(line['children']):
if idx < len(line['children']) - 1:
#print(len(line['children']))
next_line_left = keys.get_left(line['children'][idx + 1])
current_line_right = keys.get_right(line['children'][idx])
spacing = abs(next_line_left - current_line_right)
avg_spacing = avg_spacing + spacing
avg_spacing = avg_spacing / (total_words - len(page))
except:
pass
return avg_spacing
class Region_Unifier:
def check_horizon_region(self,box1,box2):
if keys.get_right(box1)<keys.get_left(box2):
return True
if keys.get_right(box2)<keys.get_left(box1):
return True
else:
return False
def get_text_tabel_region(self,regions):
text_region = []
tabel_region = []
image_region = []
n_text_table_regions = []
for region in regions:
if region['class'] in ['TEXT', "HEADER",'FOOTER']:
text_region.append(region)
else:
if region['class']=='TABLE':
tabel_region.append(region)
else:
if region['class']=='IMAGE':
image_region.append(region)
else :
n_text_table_regions.append(region)
return text_region,n_text_table_regions,tabel_region,image_region
def check_double_column(self,boxes,avg_height):
total_regions = len(boxes)
count =0
regions = copy.deepcopy(boxes)
while len(regions)>2:
flag = False
reg1 = regions[0]
for idx,reg2 in enumerate(regions[1:]):
if self.check_horizon_region(reg1,reg2) and keys.get_height(reg1)>3*avg_height and keys.get_height(reg2)>3*avg_height :
flag = True
del regions[0]
break
if flag==True:
count=count+1
else:
del regions[0]
if count>0.3*total_regions:
return True
else:
return False
def merge_condition(self,reg1,reg2,avg_height, avg_ver_dist, avg_width,avg_word_sepc):
box1_top = keys.get_top(reg1); box1_bottom = keys.get_bottom(reg1)
box1_left = keys.get_left(reg1); box1_right = keys.get_right(reg1)
box2_top = keys.get_top(reg2); box2_bottom = keys.get_bottom(reg2)
box2_left = keys.get_left(reg2); box2_right = keys.get_right(reg2)
box1_lines = reg1["children"]; box2_lines = reg2["children"]
hor_diff_thresh = avg_word_sepc*2 ; line_width_diff = avg_width*0.1
#print(hor_diff_thresh,'')
#issue in order
if box1_left > box2_left :
sepration = abs(box1_left -box2_right)
else :
sepration = abs(box2_left -box1_right)
if box1_lines!= None and len(box1_lines)>0 and box2_lines!=None and len(box2_lines)>0:
box1_last_line = box1_lines[-1]; box2_first_line = box2_lines[0]
#Mergin lines which are detected as regions
# if (keys.get_height(reg1)<= avg_height*2 and keys.get_height(reg2)<= avg_height+2) \
# and sepration < hor_diff_thresh\
# and abs(box2_top-box1_bottom)< 3 * avg_ver_dist:
# return True
# ########### conditions based on merging two horizon regions which are lines and horizontal spaing is less than threshold
if self.check_horizon_region(reg1,reg2) \
and (keys.get_height(reg1)<= avg_height*2 and keys.get_height(reg2)<= avg_height*2) :
if (0<(keys.get_left(reg2)-keys.get_right(reg1))<hor_diff_thresh \
and abs(box2_top-box1_bottom)<avg_ver_dist) \
or (0<(keys.get_left(reg1)-keys.get_right(reg2))<hor_diff_thresh \
and abs(box2_top-box1_bottom)<avg_ver_dist):
return True
else:
return False
############
#based on box separation :
# if abs(keys.get_width(reg1)-keys.get_width(reg2))<line_width_diff\
# and abs(box2_top-box1_bottom)<avg_ver_dist *2 \
# and keys.get_right(box2_first_line)-keys.get_right(box1_last_line)< line_width_diff \
# and keys.get_left(box2_first_line)-keys.get_left(box1_last_line)< line_width_diff :
# return True
#IF a running paragraph is broken (1) :
if len(box1_lines) > 1 :
box_1_second_last_line = box1_lines[-2]
if (keys.get_left(box2_first_line)-keys.get_left(box1_last_line)< hor_diff_thresh)\
and (keys.get_right(box_1_second_last_line)-keys.get_right(box1_last_line)< hor_diff_thresh)\
and abs(box2_top-box1_bottom)<avg_ver_dist *2 :
return True
# IF a running paragraph is broken (2)
if keys.get_right(box2_first_line)-keys.get_right(box1_last_line) > hor_diff_thresh*0.5 :
return False
# based on box separation :
if abs(box2_top-box1_bottom)<avg_ver_dist *2 \
and keys.get_right(box2_first_line)-keys.get_right(box1_last_line)< hor_diff_thresh \
and keys.get_left(box2_first_line)-keys.get_left(box1_last_line)< hor_diff_thresh :
return True
# if abs(box2_top-box1_bottom)<avg_ver_dist and abs(box1_left-box2_left)<50 and abs(box1_right-box2_right)<50:
# return True
# if (abs(box1_bottom-box2_top)<avg_ver_dist*0.5 \
# and abs(box1_left-box2_left)<line_width_diff) \
# or (abs(box1_bottom-box2_top)<avg_ver_dist*0.5\
# and abs(box1_right-box2_right)<line_width_diff):
# return True
# else:
# return False
else:
return False
def check_region_unification(self,reg1,reg2,avg_height, avg_ver_dist, avg_width,avg_word_sepc):
box1_top = keys.get_top(reg1)
box2_top = keys.get_top(reg2)
if box1_top < box2_top:
return self.merge_condition(reg1,reg2,avg_height, avg_ver_dist, avg_width,avg_word_sepc)
if box1_top > box2_top:
return self.merge_condition(reg2,reg1,avg_height, avg_ver_dist, avg_width,avg_word_sepc)
def update_children(self,reg1,reg2):
page_config = Page_Config()
if reg1['children']!=None and len(reg1['children']) > 0 :
if reg2['children']!=None and len(reg2['children']) > 0 :
agg_children = reg1['children'] + reg2['children']
agg_children.sort(key=lambda x: x['boundingBox']['vertices'][0]['y'])
children = sort_regions(agg_children , [])
if len(children) > 1 :
avg__region_height, avg__region_ver_dist, avg__region_width = page_config.avg_line_info([{'children': children}])
avrage_region_ver_ratio = avg__region_ver_dist / max(1,avg__region_height)
return horzontal_merging(children, avrage_region_ver_ratio)
#v_list[idx] =v_block
else:
return children
else :
return reg1['children']
else :
if reg2['children']!=None and len(reg2['children']) > 0 :
return reg2['children']
else :
return []
def update_coord(self,reg1,reg2):
#try:
box1_top = keys.get_top(reg1); box1_bottom = keys.get_bottom(reg1)
box1_left = keys.get_left(reg1); box1_right = keys.get_right(reg1)
box2_top = keys.get_top(reg2); box2_bottom = keys.get_bottom(reg2)
box2_left = keys.get_left(reg2); box2_right = keys.get_right(reg2)
reg1['children'] = self.update_children(reg1, reg2)
reg1["boundingBox"]["vertices"][0]['x']= min(box1_left,box2_left)
reg1["boundingBox"]["vertices"][0]['y']= min(box1_top,box2_top)
reg1["boundingBox"]["vertices"][1]['x']= max(box1_right,box2_right)
reg1["boundingBox"]["vertices"][1]['y']= min(box1_top,box2_top)
reg1["boundingBox"]["vertices"][2]['x']= max(box1_right,box2_right)
reg1["boundingBox"]["vertices"][2]['y']= max(box1_bottom,box2_bottom)
reg1["boundingBox"]["vertices"][3]['x']= min(box1_left,box2_left)
reg1["boundingBox"]["vertices"][3]['y']= max(box1_bottom,box2_bottom)
#reg1['class'] = 'TEXT'
# except:
# pass
return reg1
def is_connected(self,region1, region2,avg_height, avg_ver_dist, avg_width,avg_word_sepc):
region_poly = get_polygon(region2['boundingBox'])
base_poly = get_polygon(region1['boundingBox'])
area=0
check=False
if region_poly and base_poly:
area = base_poly.intersection(region_poly).area
check = self.check_region_unification(region1,region2,avg_height, avg_ver_dist, avg_width,avg_word_sepc)
return area>0 or check
def merge_remove_overlap(self,text_regions,avg_height, avg_ver_dist, avg_width,avg_word_sepc):
region_updated = []
flag =False
while len(text_regions)>0:
check = False
region_temp= text_regions[1:]
for idx2,region2 in enumerate(region_temp):
if self.is_connected(text_regions[0], region2, avg_height, avg_ver_dist, avg_width,avg_word_sepc):
region1 = self.update_coord(text_regions[0],region2)
text_regions[0] = copy.deepcopy(region1)
check =True ; flag = True
del text_regions[idx2+1]
break
if check == False:
region_updated.append(copy.deepcopy(text_regions[0]))
del text_regions[0]
return region_updated, flag
#middle box merging kan_1_0
# secnod ponint (2.) mergingkam_1_1
# horizontal block merging kan_1_7
# b1_last_line b2_fisrst line right comparish kan_1_9
#kan_1_12
#kan_1_16 line merrgin issue , merging above 7.1
#kan_1_20 (check after h_mergng is fixed)
#kna_1_21
#kan_1_22 (check whti lef/ right diff ) horizontal_diff threahold
#kan_1_29 horzontal region mersing if regions are very colse (a seprate condition)
#kan_1_30
#yolo_1_1
#20695_1_0 (reportable)
#20695_1_2 (merging of list with numbering)
#20695_1_3 sorting issue
# segmeter kan_1_1
# background removal (sasta fix)
# basckground removal integration with prima
#segmenter kan_1_23
#36066_2008_3_1502_15489_Judgement_01-Aug-2019_ORI page [1,6] last line of paragraph
def region_unifier(self,page_words, page_lines,page_regions):
try:
text_region,n_text_table_regions,tabel_region,image_region = self.get_text_tabel_region(page_regions)
tabel_region = remvoe_regions(copy.deepcopy(image_region), copy.deepcopy(tabel_region))
filtered_words = remvoe_regions(copy.deepcopy(image_region), copy.deepcopy(page_words))
filtered_lines = remvoe_regions(copy.deepcopy(image_region), copy.deepcopy(page_lines))
for idx,table in enumerate(tabel_region):
filtered_words = remvoe_regions(copy.deepcopy(table['children']), copy.deepcopy(filtered_words))
filtered_lines = remvoe_regions(copy.deepcopy(table['children']), copy.deepcopy(filtered_lines))
tabel_region[idx]['children'] = collate_regions(copy.deepcopy( table['children']),copy.deepcopy(page_words),grand_children=False,region_flag = False)
page_words = filtered_words
page_lines = filtered_lines
text_region = remvoe_regions(copy.deepcopy(tabel_region) ,copy.deepcopy(text_region))
# filtered_words = remvoe_regions(copy.deepcopy(tabel_region), copy.deepcopy(page_words))
# filtered_lines = remvoe_regions(copy.deepcopy(tabel_region), copy.deepcopy(page_lines))
line_list = collate_regions(copy.deepcopy( filtered_lines), copy.deepcopy( filtered_words))
v_list = collate_regions( copy.deepcopy( text_region),copy.deepcopy( line_list ),grand_children=True )
#t_list = collate_regions(copy.deepcopy( tabel_region),copy.deepcopy(page_words),grand_children=True,region_flag = False)
t_list = tabel_region
i_list = collate_regions(copy.deepcopy( image_region),copy.deepcopy(page_words),grand_children=True,region_flag = False,skip_enpty_children=True)
for i in i_list :
if 'chiildren' in i.keys():
v_list.append(i)
# line_list = collate_regions(page_lines,page_words)
# v_list = collate_regions(page_regions,line_list,grand_children=True)
page_config = Page_Config()
# text_regions, n_text_regions = self.get_text_region(v_list)
avg_height, avg_ver_dist, avg_width = page_config.avg_line_info(v_list)
if avg_height == 0:
avg_height = 1
self.avg_ver_ratio = avg_ver_dist /avg_height
for idx,v_block in enumerate(v_list):
#if 'children' in v_block.keys()
if v_block['children'] != None and len(v_block['children']) > 1 :
#print(idx, 'region index')
#print('merging horrrrrrrrrrrrrrrrrrrr' , len(v_block['children']))
avg__region_height, avg__region_ver_dist, avg__region_width = page_config.avg_line_info([v_block])
v_block['avg_ver_dist'] = avg__region_ver_dist
avrage_region_ver_ratio= avg__region_ver_dist / max(1,avg__region_height)
#v_block['children'] = horzontal_merging(v_block['children'],avrage_region_ver_ratio)
v_list[idx] =v_block
for idx,t_block in enumerate(t_list):
if t_block['children'] != None and len(t_block['children']) > 1 :
#print(idx, 'region index')
#print('merging horrrrrrrrrrrrrrrrrrrr' , len(v_block['children']))
avg__region_height, avg__region_ver_dist, avg__region_width = page_config.avg_line_info([t_block])
t_block['avg_ver_dist'] = avg__region_ver_dist
avrage_region_ver_ratio= avg__region_ver_dist / max(1,avg__region_height)
#t_block['children'] = horzontal_merging(t_block['children'],avrage_region_ver_ratio)
t_list[idx] =t_block
################### page configs for region unifier
#avg_hor_dist = page_config.avg_region_info(text_regions)
avg_word_sepc = page_config.avg_word_sep(line_list)
# print("av height : ",avg_height)
# print("avg_ver_dist : ",avg_ver_dist)
# print("av avg_width : ",avg_width)
| |
<reponame>CMPUT404F21-Very-Good-Team/CMPUT404-project-socialdistribution
import json
from functools import partial
import requests
from datetime import datetime, timezone
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth import login as django_login
from django.contrib.auth import logout as django_logout
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied, ValidationError
from django.core.paginator import (EmptyPage, InvalidPage, PageNotAnInteger,
Paginator)
from django.core.validators import URLValidator
from django.db.models import Subquery
from django.http import HttpResponse, JsonResponse
from django.shortcuts import render
from django.utils import timezone
from django.views.decorators.http import require_GET, require_http_methods
from post.models import Like
from post.serializers import LikeSerializer
from rest_framework.authentication import (BasicAuthentication,
SessionAuthentication)
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from author import serializers
from .models import Author, Follow, Inbox
from post.models import Post, Like, Comment
from server.models import Setting, Node
from .serializers import AuthorSerializer
from post.serializers import LikeSerializer, CommentSerializer, PostSerializer
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
from Social_Distribution import utils
class index(APIView):
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request):
'''
GET: retrieve all profiles on the server paginated. Does not return authors from other nodes.
* If no page and size are given, returns all authors instead
* If invalid parameters are given e.g. size = 0, negative page number, sends 400 Bad Request
'''
utils.update_authors()
author_query = Author.objects.filter(node=None).order_by("authorID")
param_page = request.GET.get("page", None)
param_size = request.GET.get("size", None)
if param_page != None and param_size != None:
authorPaginator = Paginator(author_query, param_size)
authors_data = []
try:
authors_data = AuthorSerializer(authorPaginator.page(param_page), many=True).data
except (PageNotAnInteger, ZeroDivisionError):
# bad request where page is not a number
return Response(status=400)
except EmptyPage:
pass
response = {
"type": "authors",
"items": authors_data
}
return Response(response)
else:
# return all authors
authors_data = AuthorSerializer(author_query, many=True).data
response = {
"type": "authors",
"items": authors_data
}
return Response(response)
class allAuthors(APIView):
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request):
'''
GET: retrieve all profiles on the server paginated
* If no page and size are given, returns all authors instead
* If invalid parameters are given e.g. size = 0, negative page number, sends 400 Bad Request
'''
utils.update_authors()
author_query = Author.objects.all().order_by("authorID")
param_page = request.GET.get("page", None)
param_size = request.GET.get("size", None)
if param_page != None and param_size != None:
authorPaginator = Paginator(author_query, param_size)
authors_data = []
try:
authors_data = AuthorSerializer(authorPaginator.page(param_page), many=True).data
except (PageNotAnInteger, ZeroDivisionError):
# bad request where page is not a number
return Response(status=400)
except EmptyPage:
pass
response = {
"type": "authors",
"items": authors_data
}
return Response(response)
else:
# return all authors
authors_data = AuthorSerializer(author_query, many=True).data
response = {
"type": "authors",
"items": authors_data
}
return Response(response)
class profile(APIView):
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
authentication_classes = [SessionAuthentication, BasicAuthentication]
def get(self, request, author_id):
try:
author_profile = Author.objects.get(authorID=author_id)
serializer = AuthorSerializer(author_profile)
return Response(serializer.data)
except Author.DoesNotExist:
return Response("This author does not exist", status=404)
def post(self, request, author_id):
if request.user.is_authenticated:
try:
user_author = request.user.author
except:
return Response("The user does not have an author profile.", status=401)
if str(user_author.authorID) != author_id:
return Response("The user does not have permission to modify this profile.", status=401)
try:
author = Author.objects.get(authorID=author_id)
update_data = request.data
serializer = AuthorSerializer(author, data=update_data, partial=True)
if serializer.is_valid():
serializer.save()
return JsonResponse(serializer.data, status=201)
else:
print(serializer.errors)
return Response(status=422)
except Author.DoesNotExist:
return Response(status=404)
return Response("The user is not authenticated.", status=401)
class login(APIView):
def post(self, request):
try:
username = request.data['username']
password = request.data['password']
except KeyError:
return Response("Missing username or password.", status=400)
if username is None or password is None:
return Response("Bad request. The expected keys 'username' and 'password' were not found.", status=400)
user = authenticate(request=request, username=username, password=password)
if user is not None:
try:
author_serializer = AuthorSerializer(user.author)
except Author.DoesNotExist:
return Response("The user credentials are not associated with an author.", status=400)
django_login(request, user)
return Response(author_serializer.data, status=200)
else:
return Response("Invalid login credentials.", status=401)
class logout(APIView):
def post(self, request):
django_logout(request)
return Response(status=200)
class register(APIView):
def post(self, request):
try:
username = request.data['username']
password = request.data['password']
except:
return Response("Bad request. The expected keys 'username' and 'password' were not found.", status=400)
if User.objects.filter(username=username).exists():
# The user already exists
return Response("The given username is already in use.", status=409)
user = User.objects.create_user(username=username, password=password)
if Setting.user_sign_up_enabled():
user.is_active = True
else:
user.is_active = False
user.save()
author = Author(user=user, host=request.build_absolute_uri('/'), displayName=username, node=None)
author.save()
return Response("A new user was created.", status=201)
class followers(APIView):
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, author_id):
utils.update_authors()
try:
author = Author.objects.get(authorID=author_id)
except:
# The author does not exist
return Response(status=404)
follower_ids = Follow.objects.filter(toAuthor=author_id)
follower_profiles = Author.objects.filter(authorID__in=follower_ids.values_list('fromAuthor', flat=True))
serializer = AuthorSerializer(follower_profiles, many=True)
response = {'type': 'followers', 'items': serializer.data}
return Response(response)
class follower(APIView):
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, author_id, foreign_author_id):
try:
author = Author.objects.get(authorID=author_id)
except Author.DoesNotExist:
# The author does not exist
return Response(status=404)
try:
validate = URLValidator()
validate(foreign_author_id)
foreignID = foreign_author_id.split("/")[-1]
except ValidationError as e:
foreignID = foreign_author_id
if author.node is not None:
# print(foreignID)
follower = Author.objects.get(authorID=foreignID)
if author.node.host_url == "https://social-distribution-fall2021.herokuapp.com/api/":
response = requests.get(author.node.host_url + "author/" + str(author.authorID) + "/followers/" + follower.get_url(), auth=(author.node.username, author.node.password))
# print(author.node.host_url + "author/" + str(author.authorID) + "/followers/" + follower.get_url())
else:
response = requests.get(author.node.host_url + "author/" + str(author.authorID) + "/followers/" + foreign_author_id + "/", auth=(author.node.username, author.node.password))
# print(author.node.host_url + "author/" + str(author.authorID) + "/followers/" + foreign_author_id + "/")
if response.status_code >= 300:
return Response(response.text, response.status_code)
if response.text == '"true"':
serializer = AuthorSerializer(follower)
return Response(serializer.data, status=200)
elif response.text == '"false"':
return Response(status=404)
return Response(response.json(), response.status_code)
else:
follow = Follow.objects.filter(toAuthor=author_id, fromAuthor=foreignID)
if not follow:
return Response(status=404)
else:
follower = Author.objects.get(authorID=foreignID)
serializer = AuthorSerializer(follower)
return Response(serializer.data, status=200)
def put(self, request, author_id, foreign_author_id):
if request.user.is_authenticated:
try:
author = request.user.author
except:
# The user does not have an author profile
return Response(status=403)
if str(author.authorID) != author_id:
# The request was made by a different author
return Response(status=403)
# Update the authors on the local node in case the author being put is on a different node
utils.update_authors()
try:
fromAuthor = Author.objects.get(authorID=foreign_author_id)
except:
return Response(status=404)
if Follow.objects.filter(fromAuthor=fromAuthor, toAuthor=author).exists():
# The follower already exists
return Response(status=409)
# Add the follower
follow = Follow.objects.create(fromAuthor=fromAuthor, toAuthor=author, date=timezone.now())
follow.save()
return Response(status=201)
else:
# Request was not authenticated
return Response(status=401)
def delete(self, request, author_id, foreign_author_id):
try:
Follow.objects.get(fromAuthor=foreign_author_id, toAuthor=author_id).delete()
except:
# Nothing to delete
return Response(status=404)
return Response(status=200)
class liked(APIView):
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, author_id):
if not Author.objects.filter(authorID=author_id).exists():
return Response(status=404)
liked = Like.objects.filter(fromAuthor=author_id)
serializer = LikeSerializer(liked, many=True)
response = {"type": "liked", "items": serializer.data}
return Response(response, status=200)
class inbox(APIView):
authentication_classes = [SessionAuthentication, BasicAuthentication]
permission_classes = [IsAuthenticated]
def get(self, request, author_id):
# Return 404 if the inbox does not exist
if not Author.objects.filter(authorID=author_id).exists():
return Response("The requested author does not exist.", status=404)
# Return 403 if somebody other than the author requests their inbox
try:
author = request.user.author
except Author.DoesNotExist:
return Response("You do not have permission to fetch this inbox.", status=403)
if str(author.authorID) != author_id:
return Response("You do not have permission to fetch this inbox.", status=403)
# Return the inbox contents
response = {"type": "inbox", "author": request.user.author.get_url(), "items": []}
author_inbox = Inbox.objects.filter(authorID = author_id).order_by("-date")
try:
size = int(request.query_params.get("size", 5))
page = int(request.query_params.get("page", 1))
paginator = Paginator(author_inbox, size)
inbox_page = paginator.get_page(page)
except:
return Response("Bad request. Invalid size or page parameters.", status=400)
for item in inbox_page:
if item.inboxType.lower() == "post":
try:
post = Post.objects.get(postID=item.objectID)
except:
continue
serializer = PostSerializer(post)
response["items"].append(serializer.data)
elif item.inboxType.lower() == "follow":
actor_serializer = AuthorSerializer(item.fromAuthor)
object_serializer = AuthorSerializer(request.user.author)
item = {"type": "Follow", "summary": item.summary, "actor": actor_serializer.data, "object": object_serializer.data}
response["items"].append(item)
elif item.inboxType.lower() == "like":
try:
like = Like.objects.get(authorID=item.fromAuthor, objectID=item.objectID)
except:
continue
serializer = LikeSerializer(like)
response["items"].append(serializer.data)
elif item.inboxType.lower() == "comment":
try:
comment = Comment.objects.get(commentID=item.objectID)
except:
continue
serializer = CommentSerializer(comment)
response["items"].append(serializer.data)
return Response(response, status=200)
def post(self, request, author_id):
# print(request.data)
# Update authors in case this was sent by or to an author that our local node does not know about
utils.update_authors()
# return 404 if the author does not exist
try:
inbox_recipient = Author.objects.get(authorID=author_id)
except Author.DoesNotExist:
return Response("The author specified in the url does not exist.", status=404)
if inbox_recipient.node is not None:
# send the data to the correct host
try:
if inbox_recipient.node.host_url == "https://social-distribution-fall2021.herokuapp.com/api/":
destination = inbox_recipient.node.host_url + "author/" + author_id + "/inbox"
else:
destination = inbox_recipient.node.host_url + "author/" + author_id + "/inbox/"
response = requests.post(destination, auth=(inbox_recipient.node.username, inbox_recipient.node.password), json=request.data)
if response.status_code >= 300:
print("Could not connect to the host: " + inbox_recipient.host)
# | |
<reponame>VITA-Group/Audio-Lottery
# Copyright 2021, <NAME>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# PyTorch
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.tensorboard import SummaryWriter
# Sentencepiece
import sentencepiece as spm
# Schedulers
from models.schedules import *
# Other
from tqdm import tqdm
import jiwer
import os
import time
from utils.pruning_utils import remove_prune, prune_model_custom, prune_main, check_sparsity
def sample_synaptic_noise(m, distributed):
if hasattr(m, "sample_synaptic_noise"):
m.sample_synaptic_noise(distributed)
def init_vn(m, vn_std):
if hasattr(m, "init_vn"):
m.init_vn(vn_std)
class Model(nn.Module):
def __init__(self, tokenizer_params, training_params, decoding_params, name):
super(Model, self).__init__()
# Tokenizer
try:
self.tokenizer = spm.SentencePieceProcessor(tokenizer_params["tokenizer_path"])
except:
self.tokenizer = None
print("Tokenizer not found...")
# Training Params
self.encoder_frozen_steps = training_params.get("encoder_frozen_steps", None)
self.vn_start_step = training_params.get("vn_start_step", None)
# Decoding Params
self.beam_size = decoding_params.get("beam_size", 1)
self.tmp = decoding_params.get("tmp", 1)
# Ngram
self.ngram_path = decoding_params.get("ngram_path", None)
self.ngram_alpha = decoding_params.get("ngram_alpha", 0)
self.ngram_beta = decoding_params.get("ngram_beta", 0)
self.ngram_offset = decoding_params.get("ngram_offset", 100)
# LM
self.lm = None
self.lm_weight = decoding_params.get("lm_weight", 0)
self.lm_tmp = decoding_params.get("lm_tmp", 1)
# Distributed Computing
self.is_distributed = False
self.rank = 0
self.is_parallel = False
# Model Name
self.name = name
def compile(self, training_params):
# Optimizers
if training_params["optimizer"] == "Adam":
# Adam
self.optimizer = optim.Adam(
params=self.parameters(),
lr=0,
betas=(training_params["beta1"], training_params["beta2"]),
eps=training_params["eps"],
weight_decay=training_params["weight_decay"])
elif training_params["optimizer"] == "AdamW":
# Adam
self.optimizer = optim.AdamW(
params=self.parameters(),
lr=0,
betas=(training_params["beta1"], training_params["beta2"]),
eps=training_params["eps"],
weight_decay=training_params["weight_decay"])
elif training_params["optimizer"] == "SGD":
# SGD
self.optimizer = optim.SGD(
params=self.parameters,
lr=0,
momentum=training_params["momentum"],
weight_decay=training_params["weight_decay"])
# LR Schedulers
if training_params["lr_schedule"] == "Constant":
# Constant LR
self.scheduler = constant_learning_rate_scheduler(
optimizer=self.optimizer,
lr_value=training_params["lr_value"])
elif training_params["lr_schedule"] == "ConstantWithDecay":
# Constant With Decay LR
self.scheduler = constant_with_decay_learning_rate_scheduler(
optimizer=self.optimizer,
lr_values=training_params["lr_values"],
decay_steps=training_params["decay_steps"])
elif training_params["lr_schedule"] == "Transformer":
# Transformer LR
self.scheduler = transformer_learning_rate_scheduler(
optimizer=self.optimizer,
dim_model=training_params["schedule_dim"],
warmup_steps=training_params["warmup_steps"],
K=training_params["K"])
elif training_params["lr_schedule"] == "ExpDecayTransformer":
# Exp Decay Transformer LR
self.scheduler = exponential_decay_transformer_learning_rate_scheduler(
optimizer=self.optimizer,
warmup_steps=training_params["warmup_steps"],
lr_max=training_params["lr_max"] if training_params.get("lr_max", None) else training_params["K"] * training_params["schedule_dim"]**-0.5 * training_params["warmup_steps"]**-0.5,
alpha=training_params["alpha"],
end_step=training_params["end_step"])
elif training_params["lr_schedule"] == "Cosine":
# Cosine Annealing LR
self.scheduler = cosine_annealing_learning_rate_scheduler(
optimizer=self.optimizer,
warmup_steps=training_params["warmup_steps"],
lr_max=training_params["lr_max"] if training_params.get("lr_max", None) else training_params["K"] * training_params["schedule_dim"]**-0.5 * training_params["warmup_steps"]**-0.5,
lr_min= training_params["lr_min"],
end_step=training_params["end_step"])
# Init LR
self.scheduler.step()
# Pruning mask
self.mask_dict = None
def num_params(self):
return sum([p.numel() for p in self.parameters()])
def summary(self, show_dict=False):
print(self.name)
print("Model Parameters :", self.num_params())
if show_dict:
for key, value in self.state_dict().items():
print("{:<64} {:<16} mean {:<16.4f} std {:<16.4f}".format(key, str(tuple(value.size())), value.float().mean(), value.float().std()))
def distribute_strategy(self, rank):
self.rank = rank
self.is_distributed = True
def parallel_strategy(self):
self.is_parallel = True
def check_loss(self, loss, loss_value):
"""
Check that warp-rnnt loss is valid and will not break training
:return: Return if loss is valid, and the error in case it is not
"""
loss_valid = True
error = ''
if loss_value == float("inf") or loss_value == float("-inf"):
loss_valid = False
error = "WARNING: received an inf loss"
elif torch.isnan(loss).sum() > 0:
loss_valid = False
error = 'WARNING: received a nan loss, setting loss value to 0'
elif loss_value < 0:
loss_valid = False
error = "WARNING: received a negative loss"
return loss_valid, error
def fit(self, dataset_train, epochs, dataset_val=None, val_steps=None, verbose_val=False, initial_epoch=0, callback_path=None, steps_per_epoch=None, mixed_precision=False, accumulated_steps=1, saving_period=1, val_period=1, max_grad_norm=5.0):
# Model Device
device = next(self.parameters()).device
# Mixed Precision Gradient Scaler
scaler = torch.cuda.amp.GradScaler(enabled=mixed_precision)
# Init Training
acc_step = 0
self.optimizer.zero_grad()
wers = []
best_wer = 1000.0
# Callbacks
if self.rank == 0 and callback_path is not None:
# Create Callbacks
if not os.path.isdir(callback_path):
os.makedirs(callback_path)
# Create Writer
writer = SummaryWriter(os.path.join(callback_path, "logs"))
else:
writer = None
# Sample Synaptic Noise
if self.vn_start_step is not None:
if self.scheduler.model_step >= self.vn_start_step:
self.decoder.apply(lambda m: sample_synaptic_noise(m, self.is_distributed))
# Try Catch
try:
# Training Loop
for epoch in range(initial_epoch, epochs):
# Sync sampler if distributed
if self.is_distributed:
dataset_train.sampler.set_epoch(epoch)
# Epoch Init
if self.rank == 0:
print("Epoch {}/{}".format(epoch + 1, epochs))
epoch_iterator = tqdm(dataset_train, total=steps_per_epoch * accumulated_steps if steps_per_epoch else None)
else:
epoch_iterator = dataset_train
epoch_loss = 0.0
# Training Mode
self.train()
# Epoch training
for step, batch in enumerate(epoch_iterator):
# Load batch to model device
batch = [elt.to(device) for elt in batch]
# Encoder Frozen Steps
if self.encoder_frozen_steps:
if self.scheduler.model_step > self.encoder_frozen_steps:
self.encoder.requires_grad_(True)
else:
self.encoder.requires_grad_(False)
# Automatic Mixed Precision Casting (model prediction + loss computing)
with torch.cuda.amp.autocast(enabled=mixed_precision):
pred = self.forward(batch)
loss_mini = self.criterion(batch, pred)
loss = loss_mini / accumulated_steps
loss_value = loss.item()
valid_loss, error = self.check_loss(loss, loss_value)
if valid_loss:
self.optimizer.zero_grad()
# Accumulate gradients
scaler.scale(loss).backward()
# Update Epoch Variables
acc_step += 1
epoch_loss += loss_mini.detach()
# Continue Accumulating
if acc_step < accumulated_steps:
continue
# Update Parameters, Zero Gradients and Update Learning Rate
scaler.unscale_(self.optimizer)
torch.nn.utils.clip_grad_norm_(self.parameters(), max_grad_norm)
scaler.step(self.optimizer)
scaler.update()
self.scheduler.step()
acc_step = 0
else:
print(error)
print('loss {} not valid, skip update.'.format(loss_value))
# Sample Synaptic Noise
if self.vn_start_step is not None:
if self.scheduler.model_step >= self.vn_start_step:
self.decoder.apply(lambda m: sample_synaptic_noise(m, self.is_distributed))
# Step Print
if self.rank == 0:
epoch_iterator.set_description("model step: {} - mean loss {:.4f} - batch loss: {:.4f} - learning rate: {:.6f}".format(self.scheduler.model_step, epoch_loss / (step + 1), loss_mini, self.optimizer.param_groups[0]['lr']))
# Logs Step
if self.rank == 0 and writer is not None and (step + 1) % 10 == 0:
writer.add_scalar('Training/Loss', loss_mini, self.scheduler.model_step)
writer.add_scalar('Training/LearningRate', self.optimizer.param_groups[0]['lr'], self.scheduler.model_step)
# Step per Epoch
if steps_per_epoch is not None:
if step + 1 >= steps_per_epoch * accumulated_steps:
break
# Reduce Epoch Loss among devices
if self.is_distributed:
torch.distributed.barrier()
torch.distributed.all_reduce(epoch_loss)
epoch_loss /= torch.distributed.get_world_size()
# Logs Epoch
if self.rank == 0 and writer is not None:
writer.add_scalar('Training/MeanLoss', epoch_loss / (steps_per_epoch * accumulated_steps if steps_per_epoch is not None else dataset_train.__len__()), epoch + 1)
# Validation
if (epoch + 1) % val_period == 0:
# Validation Dataset
if dataset_val:
# Multiple Validation Datasets
if isinstance(dataset_val, dict):
for dataset_name, dataset in dataset_val.items():
# Evaluate
wer, truths, preds, val_loss = self.evaluate(dataset, val_steps, verbose_val, eval_loss=True)
# Print wer
if self.rank == 0:
print("{} wer : {:.2f}% - loss : {:.4f}".format(dataset_name, 100 * wer, val_loss))
# Logs Validation
if self.rank == 0 and writer is not None:
writer.add_scalar('Validation/WER/{}'.format(dataset_name), 100 * wer, epoch + 1)
writer.add_scalar('Validation/MeanLoss/{}'.format(dataset_name), val_loss, epoch + 1)
writer.add_text('Validation/Predictions/{}'.format(dataset_name), "GroundTruth : " + truths[0] + " / Prediction : " + preds[0], epoch + 1)
else:
# Evaluate
wer, truths, preds, val_loss = self.evaluate(dataset_val, val_steps, verbose_val, eval_loss=True)
# Print wer
if self.rank == 0:
print("Val wer : {:.2f}% - Val loss : {:.4f}".format(100 * wer, val_loss))
# Logs Validation
if self.rank == 0 and writer is not None:
writer.add_scalar('Validation/WER', 100 * wer, epoch + 1)
writer.add_scalar('Validation/MeanLoss', val_loss, epoch + 1)
writer.add_text('Validation/Predictions', "GroundTruth : " + truths[0] + " / Prediction : " + preds[0], epoch + 1)
# Saving Checkpoint
wers.append(100 * wer)
if (epoch + 1) % saving_period == 0:
if callback_path and self.rank == 0:
# if current epoch has the best model, save it, otherwise skip
self.save(os.path.join(callback_path, "checkpoints_" + str(epoch + 1) + ".ckpt"))
if 100 * wer < best_wer:
best_wer = 100 * wer
self.save(os.path.join(callback_path, "checkpoints_best.ckpt"))
if epoch == (epochs - 1):
if callback_path and self.rank == 0:
self.save(os.path.join(callback_path, "checkpoints_" + str(epoch + 1) + ".ckpt"))
# Exception Handler
except Exception as e:
if self.is_distributed:
torch.distributed.destroy_process_group()
if self.rank == 0 and writer is not None:
writer.add_text('Exceptions', str(e))
raise e
def save(self, path, save_optimizer=True):
# Save Model Checkpoint
if self.mask_dict is not None:
remove_prune(self)
torch.save({
"model_state_dict": self.state_dict(),
"optimizer_state_dict": self.optimizer.state_dict() if save_optimizer else None,
"model_step": self.scheduler.model_step,
"tokenizer": self.tokenizer,
"is_distributed": self.is_distributed or self.is_parallel,
"mask_dict": self.mask_dict if self.mask_dict is not None else None,
}, path)
if self.mask_dict is not None:
prune_model_custom(self, self.mask_dict)
# Print Model state
if self.rank == 0:
print("model saved at step {} / lr {:.6f}".format(self.scheduler.model_step, self.optimizer.param_groups[0]['lr']))
def load(self, path):
# Load Model Checkpoint
checkpoint = torch.load(path, map_location=next(self.parameters()).device)
# Model State Dict
if checkpoint["is_distributed"] and not self.is_distributed:
self.load_state_dict({key.replace(".module.", | |
<reponame>vishalbelsare/DESlib<gh_stars>100-1000
# coding=utf-8
# Author: <NAME> <<EMAIL>>
#
# License: BSD 3 clause
import functools
import math
import warnings
from abc import abstractmethod, ABCMeta
import numpy as np
from scipy.stats import mode
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.ensemble import BaseEnsemble, BaggingClassifier
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import LabelEncoder
from sklearn.utils.validation import (check_X_y, check_is_fitted, check_array,
check_random_state)
from deslib.util import KNNE
from deslib.util import faiss_knn_wrapper
from deslib.util.dfp import frienemy_pruning_preprocessed
from deslib.util.instance_hardness import hardness_region_competence
class BaseDS(BaseEstimator, ClassifierMixin):
"""Base class for a dynamic classifier selection (dcs) and
dynamic ensemble selection (des) methods.
All dcs and des techniques should inherit from this class.
Warning: This class should not be used directly.
Use derived classes instead.
"""
__metaclass__ = ABCMeta
@abstractmethod
def __init__(self, pool_classifiers=None, k=7, DFP=False, with_IH=False,
safe_k=None, IH_rate=0.30, needs_proba=False,
random_state=None, knn_classifier='knn', DSEL_perc=0.5,
knne=False, n_jobs=-1, voting=None):
self.pool_classifiers = pool_classifiers
self.k = k
self.DFP = DFP
self.with_IH = with_IH
self.safe_k = safe_k
self.IH_rate = IH_rate
self.needs_proba = needs_proba
self.random_state = random_state
self.knn_classifier = knn_classifier
self.DSEL_perc = DSEL_perc
self.knne = knne
self.n_jobs = n_jobs
self.voting = voting
# Check optional dependency
if knn_classifier == 'faiss' and not faiss_knn_wrapper.is_available():
raise ImportError(
'Using knn_classifier="faiss" requires that the FAISS library '
'be installed.Please check the Installation Guide.')
def fit(self, X, y):
"""Prepare the DS model by setting the KNN algorithm and
pre-processing the information required to apply the DS
methods
Parameters
----------
X : array of shape (n_samples, n_features)
The input data.
y : array of shape (n_samples)
class labels of each example in X.
Returns
-------
self
"""
self.random_state_ = check_random_state(self.random_state)
X, y = check_X_y(X, y)
# Check if the pool of classifiers is None.
# If yes, use a BaggingClassifier for the pool.
if self.pool_classifiers is None:
X_dsel, y_dsel = self._fit_pool_classifiers(X, y)
else:
self._check_base_classifier_fitted()
self.pool_classifiers_ = self.pool_classifiers
X_dsel = X
y_dsel = y
self.n_classifiers_ = len(self.pool_classifiers_)
# allow base models with feature subspaces.
if hasattr(self.pool_classifiers_, "estimators_features_"):
self.estimator_features_ = \
np.array(self.pool_classifiers_.estimators_features_)
else:
indices = np.arange(X.shape[1])
self.estimator_features_ = np.tile(indices,
(self.n_classifiers_, 1))
# check if the input parameters are correct.
self._setup_label_encoder(y)
y_dsel = self.enc_.transform(y_dsel)
self._set_dsel(X_dsel, y_dsel)
self._set_region_of_competence_algorithm()
self._validate_parameters()
self.roc_algorithm_.fit(X_dsel, y_dsel)
self.BKS_DSEL_ = self._predict_base(self.DSEL_data_)
self.DSEL_processed_ = self.BKS_DSEL_ == y_dsel[:, np.newaxis]
return self
def get_competence_region(self, query, k=None):
"""Compute the region of competence of the query sample
using the data belonging to DSEL.
Parameters
----------
query : array of shape (n_samples, n_features)
The test examples.
k : int (Default = self.k)
The number of neighbors used to in the region of competence.
Returns
-------
dists : array of shape (n_samples, k)
The distances between the query and each sample in the region
of competence. The vector is ordered in an ascending fashion.
idx : array of shape (n_samples, k)
Indices of the instances belonging to the region of competence of
the given query sample.
"""
if k is None:
k = self.k_
dists, idx = self.roc_algorithm_.kneighbors(query,
n_neighbors=k,
return_distance=True)
return np.atleast_2d(dists), np.atleast_2d(idx)
@abstractmethod
def estimate_competence(self, competence_region, distances=None,
predictions=None):
"""estimate the competence of each base classifier :math:`c_{i}`
the classification of the query sample :math:`\\mathbf{x}`.
Returns an array containing the level of competence estimated
for each base classifier. The size of the vector is equals to
the size of the generated_pool of classifiers.
Parameters
----------
competence_region : array of shape (n_samples, n_neighbors)
Indices of the k nearest neighbors according for each
test sample.
distances : array of shape (n_samples, n_neighbors)
Distances of the k nearest neighbors according for each
test sample.
predictions : array of shape (n_samples, n_classifiers)
Predictions of the base classifiers for all test examples
Returns
-------
competences : array (n_classifiers) containing the competence level
estimated for each base classifier
"""
pass
@abstractmethod
def select(self, competences):
"""Select the most competent classifier for
the classification of the query sample x.
The most competent classifier (dcs) or an ensemble
with the most competent classifiers (des) is returned
Parameters
----------
competences : array of shape (n_samples, n_classifiers)
The estimated competence level of each base classifier
for test example
Returns
-------
selected_classifiers : array containing the selected base classifiers
for each test sample
"""
pass
@abstractmethod
def classify_with_ds(self, predictions, probabilities=None,
neighbors=None, distances=None, DFP_mask=None):
"""Predicts the label of the corresponding query sample.
Returns the predicted label.
Parameters
----------
predictions : array of shape (n_samples, n_classifiers)
Predictions of the base classifiers for all test examples
probabilities : array of shape (n_samples, n_classifiers, n_classes)
Probabilities estimates of each base classifier for all test
examples (For methods that always require probabilities from the
base classifiers)
neighbors : array of shape (n_samples, n_neighbors)
Indices of the k nearest neighbors.
distances : array of shape (n_samples, n_neighbors)
Distances from the k nearest neighbors to the query
DFP_mask : array of shape (n_samples, n_classifiers)
Mask containing 1 for the selected base classifier and 0 otherwise.
Returns
-------
predicted_label : array of shape (n_samples)
The predicted label for each query
"""
pass
@abstractmethod
def predict_proba_with_ds(self, predictions, probabilities,
neighbors=None, distances=None, DFP_mask=None):
"""Predicts the posterior probabilities of the corresponding
query sample. Returns the probability estimates of each class.
Parameters
----------
predictions : array of shape (n_samples, n_classifiers)
Predictions of the base classifiers for all test examples
probabilities : array of shape (n_samples, n_classifiers, n_classes)
The predictions of each base classifier for all samples (For
methods that always require probabilities from the base
classifiers).
neighbors : array of shape (n_samples, n_neighbors)
Indices of the k nearest neighbors.
distances : array of shape (n_samples, n_neighbors)
Distances from the k nearest neighbors to the query
DFP_mask : array of shape (n_samples, n_classifiers)
Mask containing 1 for the selected base classifier and 0 otherwise.
Returns
-------
predicted_proba: array of shape (n_samples, n_classes)
Posterior probabilities estimates for each test example.
"""
pass
def predict(self, X):
"""Predict the class label for each sample in X.
Parameters
----------
X : array of shape (n_samples, n_features)
The input data.
Returns
-------
predicted_labels : array of shape (n_samples)
Predicted class label for each sample in X.
"""
X = self._check_predict(X)
preds = np.empty(X.shape[0], dtype=np.intp)
need_proba = self.needs_proba or self.voting == 'soft'
base_preds, base_probas = self._preprocess_predictions(X, need_proba)
# predict all agree
ind_disagreement, ind_all_agree = self._split_agreement(base_preds)
if ind_all_agree.size:
preds[ind_all_agree] = base_preds[ind_all_agree, 0]
# predict with IH
if ind_disagreement.size:
distances, ind_ds_classifier, neighbors = self._IH_prediction(
X, ind_disagreement, preds, is_proba=False
)
# Predict with DS - Check if there are still samples to be labeled.
if ind_ds_classifier.size:
DFP_mask = self._get_DFP_mask(neighbors)
inds, sel_preds, sel_probas = self._prepare_indices_DS(
base_preds, base_probas, ind_disagreement,
ind_ds_classifier)
preds_ds = self.classify_with_ds(sel_preds, sel_probas,
neighbors, distances,
DFP_mask)
preds[inds] = preds_ds
return self.classes_.take(preds)
def _check_predict(self, X):
check_is_fitted(self,
["DSEL_processed_", "DSEL_data_", "DSEL_target_"])
X = check_array(X)
if self.n_features_ != X.shape[1]:
raise ValueError("Number of features of the model must "
"match the input. Model n_features is {0} and "
"input n_features is {1}."
"".format(self.n_features_, X.shape[1]))
return X
def predict_proba(self, X):
"""Estimates the posterior probabilities for sample in X.
Parameters
----------
X : array of shape (n_samples, n_features)
The input data.
Returns
-------
predicted_proba : array of shape (n_samples, n_classes)
Probabilities estimates for each sample in X.
"""
X = self._check_predict(X)
self._check_predict_proba()
probas = np.zeros((X.shape[0], self.n_classes_))
base_preds, base_probas = self._preprocess_predictions(X, True)
# predict all agree
ind_disagreement, ind_all_agree = self._split_agreement(base_preds)
if ind_all_agree.size:
probas[ind_all_agree] = base_probas[ind_all_agree].mean(axis=1)
# predict with IH
if ind_disagreement.size:
distances, ind_ds_classifier, neighbors = self._IH_prediction(
X, ind_disagreement, probas, is_proba=True)
# Predict with DS - Check if there are still samples to be labeled.
if ind_ds_classifier.size:
DFP_mask = self._get_DFP_mask(neighbors)
inds, sel_preds, sel_probas = self._prepare_indices_DS(
base_preds, base_probas, ind_disagreement,
ind_ds_classifier)
probas_ds = self.predict_proba_with_ds(sel_preds,
sel_probas,
neighbors, distances,
DFP_mask)
probas[inds] = probas_ds
return probas
def _preprocess_predictions(self, X, req_proba):
if req_proba:
base_probabilities = self._predict_proba_base(X)
base_predictions = base_probabilities.argmax(axis=2)
else:
base_probabilities = None
base_predictions = self._predict_base(X)
return base_predictions, base_probabilities
def _split_agreement(self, base_predictions):
all_agree_vector = BaseDS._all_classifier_agree(base_predictions)
ind_all_agree = np.where(all_agree_vector)[0]
ind_disagreement = np.where(~all_agree_vector)[0]
return ind_disagreement, ind_all_agree
def _IH_prediction(self, X, ind_disagree, predicted_proba, is_proba=False):
X_DS = X[ind_disagree, :]
distances, region_competence = self.get_competence_region(X_DS)
if self.with_IH:
ind_hard, ind_easy = self._split_easy_samples(region_competence)
distances, region_competence = self._predict_easy_samples(
X_DS, distances, ind_disagree, ind_easy,
region_competence, predicted_proba, is_proba)
else:
# IH was not considered. So all samples go to predict with DS
ind_hard = np.arange(ind_disagree.size)
return distances, | |
<gh_stars>1-10
#!/usr/bin/env python
import numpy
import math
import logging
from scipy.stats import poisson
import networkx as nx
import NetworkX_Extension as nxe
# from GSA import Edge
class NJTree:
logger = logging.getLogger("NJTree")
def __init__(self, mrca, alpha, beta, gamma, gain, loss, synteny):
self.graph = nx.Graph()
self.bigNode = ""
self.alpha = float(alpha)
self.beta = float(beta)
self.gamma = float(gamma)
self.gain = float(gain)
self.loss = float(loss)
self.synteny = synteny
self.OK = "false"
self.rootEdge = None
self.mrca = mrca
self.rootedTree = None
self.hom_shortest_paths = None
self.syn_shortest_paths = None
self.paths = None
# self.gl_map = {} # node -> gain/loss tuple
def readDistanceMatrix(self):
return self.distance_matrix
def buildGraphFromNewDistanceMatrix(self, hom_matrix, syn_matrix, leaves):
for l in leaves:
my_species = "_".join(l.split("_")[:-1])
self.graph.add_node(l, species=my_species)
# TODO Verify order of the leaves names and of the data
unadded_nodes = leaves
unadded_count = len(unadded_nodes)
while unadded_count > 2:
uan = unadded_count
uan_denom = float(uan) - 2.0
matrix_size = unadded_count * (unadded_count - 1) / 2
sum_of_hom = numpy.zeros(unadded_count, float)
sum_of_syn = numpy.zeros(unadded_count, float)
imax = 1
pos = 0
while(pos < matrix_size):
for i in xrange(imax):
sum_of_hom[i] += hom_matrix[pos]
sum_of_syn[i] += syn_matrix[pos]
sum_of_hom[imax] += hom_matrix[pos]
sum_of_syn[imax] += syn_matrix[pos]
pos += 1
imax += 1
min_nm = float("Inf")
minp = []
k = 0
l = 1
for i in xrange(matrix_size):
nm = uan_denom * (hom_matrix[i] + syn_matrix[i]) - (sum_of_hom[k] + sum_of_syn[k]) - (sum_of_hom[l] + sum_of_syn[l]) # need to look back for these indices
if nm < min_nm:
min_nm = nm
minp = [k, l]
k += 1
if not k < l:
k = 0
l += 1
# mp0_mp_dist = 0.5 * hom_matrix[minp[0] + ((minp[1] - 1) * minp[1] / 2)] + 0.5 * ((sum_of_hom[minp[0]] + sum_of_syn[minp[0]]) - (sum_of_hom[minp[1]] + sum_of_syn[minp[1]])) / uan_denom
mp0_mp_dist = 0.5 * hom_matrix[minp[0] + ((minp[1] - 1) * minp[1] / 2)] + 0.5 * (sum_of_hom[minp[0]] - sum_of_hom[minp[1]]) / uan_denom
# syn0_mp_dist = 0.5 * syn_matrix[minp[0] + ((minp[1] - 1) * minp[1] / 2)] + 0.5 * ((sum_of_hom[minp[0]] + sum_of_syn[minp[0]]) - (sum_of_hom[minp[1]] + sum_of_syn[minp[1]])) / uan_denom
syn0_mp_dist = 0.5 * syn_matrix[minp[0] + ((minp[1] - 1) * minp[1] / 2)] + 0.5 * (sum_of_syn[minp[0]] - sum_of_syn[minp[1]]) / uan_denom
# mp1_mp_dist = 0.5 * hom_matrix[minp[0] + ((minp[1] - 1) * minp[1] / 2)] + 0.5 * ((sum_of_hom[minp[1]] + sum_of_syn[minp[1]]) - (sum_of_hom[minp[0]] + sum_of_syn[minp[0]])) / uan_denom
mp1_mp_dist = 0.5 * hom_matrix[minp[0] + ((minp[1] - 1) * minp[1] / 2)] + 0.5 * (sum_of_hom[minp[1]] - sum_of_hom[minp[0]]) / uan_denom
# syn1_mp_dist = 0.5 * syn_matrix[minp[0] + ((minp[1] - 1) * minp[1] / 2)] + 0.5 * ((sum_of_hom[minp[1]] + sum_of_syn[minp[1]]) - (sum_of_hom[minp[0]] + sum_of_syn[minp[0]])) / uan_denom
syn1_mp_dist = 0.5 * syn_matrix[minp[0] + ((minp[1] - 1) * minp[1] / 2)] + 0.5 * (sum_of_syn[minp[1]] - sum_of_syn[minp[0]]) / uan_denom
newNode = ";".join([unadded_nodes[minp[0]], unadded_nodes[minp[1]]])
my_species = ""
if self.graph.node[unadded_nodes[minp[0]]]['species'] == self.graph.node[unadded_nodes[minp[1]]]['species']:
my_species = self.graph.node[unadded_nodes[minp[0]]]['species']
else:
my_species = self.mrca
self.graph.add_node(newNode, species=my_species)
# replace first merged leave by newNode then shift everything after the 2nd merged leave
self.graph.add_edge(unadded_nodes[minp[0]], newNode, homology_dist=mp0_mp_dist, synteny_dist=syn0_mp_dist)
self.graph.add_edge(unadded_nodes[minp[1]], newNode, homology_dist=mp1_mp_dist, synteny_dist=syn1_mp_dist)
unadded_nodes[minp[0]] = newNode
for i in xrange(minp[1], unadded_count - 1):
unadded_nodes[i] = unadded_nodes[i + 1]
unadded_count -= 1 # replaced 2 nodes with 1
# replace the first line/column of the merging with the merging and shift values after second line/column
k = 0
l = 1
offset = 0
dfg_hom = hom_matrix[(minp[1] * (minp[1] - 1) / 2) + minp[0]]
dfg_syn = syn_matrix[(minp[1] * (minp[1] - 1) / 2) + minp[0]]
for pos in xrange(matrix_size):
if k == minp[1] or l == minp[1]:
offset += 1
elif l == minp[0]:
dfk_hom = hom_matrix[pos]
dgk_hom = hom_matrix[(minp[1] * (minp[1] - 1) / 2) + k]
dfk_syn = syn_matrix[pos]
dgk_syn = syn_matrix[(minp[1] * (minp[1] - 1) / 2) + k]
hom_matrix[pos] = 0.5 * (dfk_hom + dgk_hom - dfg_hom)
syn_matrix[pos] = 0.5 * (dfk_syn + dgk_syn - dfg_syn)
elif k == minp[0]:
dfk_hom = hom_matrix[pos]
dgk_hom = hom_matrix[pos + minp[1] - minp[0]]
dfk_syn = syn_matrix[pos]
dgk_syn = syn_matrix[pos + minp[1] - minp[0]]
hom_matrix[pos - offset] = 0.5 * (dfk_hom + dgk_hom - dfg_hom)
syn_matrix[pos - offset] = 0.5 * (dfk_syn + dgk_syn - dfg_syn)
else:
hom_matrix[pos - offset] = hom_matrix[pos]
syn_matrix[pos - offset] = syn_matrix[pos]
k += 1
if not k < l:
k = 0
l += 1
if unadded_count == 2:
self.graph.add_edge(unadded_nodes[0], unadded_nodes[1], homology_dist=hom_matrix[0], synteny_dist=syn_matrix[0]) # check this
unadded_nodes = [";".join(unadded_nodes[:2])]
bigNode = unadded_nodes.pop()
self.bigNode = bigNode
return bigNode
def getNewick(self):
if self.rootedTree:
processed = ['root']
current_leaves = list(self.rootedTree['root'])
# nwk = "(" + ",".join(current_leaves) + ");"
# nwk = ",".join(current_leaves)
nwk = "(" + current_leaves[0] + ":" + str(self.rootedTree['root'][current_leaves[0]]['homology_dist']) + ',' + current_leaves[1] + ":" + str(self.rootedTree['root'][current_leaves[1]]['homology_dist']) + ")"
if self.synteny:
nwk2 = "(" + current_leaves[0] + ":" + str(self.rootedTree['root'][current_leaves[0]]['synteny_dist']) + ',' + current_leaves[1] + ":" + str(self.rootedTree['root'][current_leaves[1]]['synteny_dist']) + ")"
while current_leaves:
n = current_leaves.pop()
neighbors = list(self.rootedTree[n])
if len(neighbors) > 1: # if not a leaf
for neighbor in neighbors:
if neighbor in processed:
neighbors.remove(neighbor)
break
processed.append(n)
# new_nwk = ",".join(neighbors)
new_nwk = neighbors[0] + ":" + str(self.rootedTree[n][neighbors[0]]['homology_dist']) + ',' + neighbors[1] + ":" + str(self.rootedTree[n][neighbors[1]]['homology_dist'])
nwk = nwk.replace(n, "(" + new_nwk + ")")
if self.synteny:
new_nwk2 = neighbors[0] + ":" + str(self.rootedTree[n][neighbors[0]]['synteny_dist']) + ',' + neighbors[1] + ":" + str(self.rootedTree[n][neighbors[1]]['synteny_dist'])
nwk2 = nwk2.replace(n, "(" + new_nwk2 + ")")
current_leaves.extend(neighbors)
if self.synteny:
return [nwk, nwk2]
else:
return [nwk, ""]
else:
NJTree.logger.critical("Tried to get Newick from a tree that has no rootTree: %s" % (self.bigNode))
@staticmethod
def toNewick(graph):
up = [] # unprocessed
leaf = []
for n in graph.nodes():
if len(graph[n]) > 1:
up.append(n)
else:
leaf.append((n, graph.node[n]['species']))
curNode = None
last_string = ""
if len(graph.nodes()) == 2:
ew = str(graph[leaf[0][0]][leaf[1][0]]['homology_dist'])
last_string = "(" + leaf[0][0] + ":" + ew + "," + leaf[1][0] + ":" + ew + ")"
while len(up) > 0:
(curNode, e_count) = NJTree.calcMostEdgesToLeaves(up, leaf, graph)
leaves = []
for e in graph[curNode]:
for l in leaf:
if l[0] == e:
e_i = leaf.index(l)
e_text = e
if 'child_newick' in graph.node[e]:
if e_count > 2 and len(up) > 1:
continue
e_text = graph.node[e]['child_newick']
leaf.pop(e_i)
ew = graph[curNode][e]['homology_dist']
text = e_text + ":" + str(ew)
leaves.append(text)
# add newick text to curNode
node_text = "(" + ",".join(leaves) + ")"
last_string = node_text
graph.node[curNode]['child_newick'] = node_text
# change curNode to leaf
cn_i = up.index(curNode)
up.pop(cn_i)
leaf.append((curNode, graph.node[curNode]['species']))
if len(leaf) == 2 and len(up) == 0 and len(graph.nodes()) > 2:
ew = str(graph[leaf[0][0]][leaf[1][0]]['homology_dist'])
last_string = "(" + graph.node[leaf[0][0]]['child_newick'] + ":" + ew + "," + graph.node[leaf[1][0]]['child_newick'] + ":" + ew + ")"
last_string = last_string.replace("(", "(\n")
last_string = last_string.replace(",", ",\n")
last_string = last_string.replace(")", ")\n")
last_string = last_string.rstrip()
return last_string + ";"
def rootTree(self):
"""Return Score, root edge, number of losses
"""
# for each edge in 'tree' graph, score the tree
roots = []
min_gl = len(self.graph.nodes()) * 2
if self.rootEdge is not None: # check speed of this part in case of big cluster that's already been split once and is still big
# self.hom_shortest_paths = nxe.all_pairs_path_length(self.graph, 'homology_dist') # should already be present from splitNewTree if rooted because it was splitted from larger tree
self.paths = nx.shortest_path(self.graph, None, None)
# self.syn_shortest_paths = nxe.all_pairs_path_length(self.graph, 'synteny_dist')
(score, tree, gl_sum, loss) = self.scoreEdge(self.rootEdge, min_gl)
self.rootedTree = tree
return (score, self.rootEdge, loss)
if self.synteny:
([self.hom_shortest_paths, self.syn_shortest_paths], self.paths) = nxe.all_pairs_path_length(self.graph, ['homology_dist', 'synteny_dist'])
else:
([self.hom_shortest_paths], self.paths) = nxe.all_pairs_path_length(self.graph, ['homology_dist'])
# self.syn_shortest_paths = nxe.all_pairs_path_length(self.graph, 'synteny_dist')[0]
# store shortest path matrix - it is the same for everyone
if len(self.graph.nodes()) > 100:
limit = len(self.graph.nodes()) / 2
degrees = {}
right_stack = []
left_stack = []
to_degree_stack = []
for n in self.graph.nodes():
if len(self.graph[n]) == 1:
degrees[n] = 1
right_stack.append(self.graph[n].keys()[0])
break
while True:
if right_stack:
current_node = right_stack.pop()
else:
current_node = left_stack.pop()
right = False
neighbors = self.graph[current_node].keys()
if len(neighbors) == 1:
degrees[current_node] = 1
else:
for neighbor in neighbors:
if neighbor in to_degree_stack or neighbor in degrees: # neighbor == to_degree_stack[-1]?
continue
if not right:
right_stack.append(neighbor)
right = True
else:
left_stack.append(neighbor)
to_degree_stack.append(current_node)
if not right:
while True:
if not to_degree_stack:
break
to_degree = to_degree_stack[len(to_degree_stack) - 1]
neighbors = self.graph[to_degree].keys()
for neighbor in self.graph[to_degree].keys():
if neighbor not in degrees:
neighbors.remove(neighbor)
if len(neighbors) == 2:
degrees[to_degree] = degrees[neighbors[0]] + degrees[neighbors[1]]
if degrees[to_degree] >= limit:
pair = neighbors[0] if degrees[neighbors[0]] >= degrees[neighbors[1]] else neighbors[1]
return (-1.0, [to_degree, pair], len(self.graph.nodes()))
to_degree_stack.pop()
else:
break
# # big_e = 0.0
# big_combo = 0.0
# e_pair = None
# for e in self.graph.edges():
# if e[0].find(";") > -1 and e[1].find(";") > -1:
# my_semi = min(e[0].count(";"), e[1].count(";"))
# my_big_e = self.graph[e[0]][e[1]]['homology_dist']
# my_big_combo = float(my_semi) * my_big_e
# if my_big_combo > big_combo:
# e_pair = e
# big_combo = my_big_combo
# return (-1.0, e_pair, len(self.graph.nodes()))
else:
# self.paths = nx.shortest_path(self.graph, None, None)
for e in self.graph.edges():
(score, tree, gl_sum, loss) = self.scoreEdge(e, min_gl)
if gl_sum < min_gl:
# ~ print "gl_sum", gl_sum
min_gl = gl_sum
self.graph[e[0]][e[1]]['root_score'] = score
roots.append((score, e, tree, loss))
roots = sorted(roots, key=lambda tup: tup[0], reverse=True)
if len(roots) == 1 or roots[0][0] == roots[1][0]: # how does the second condition make the tree correct?
self.OK = "true"
self.rootedTree = roots[0][2]
return (roots[0][0], roots[0][1], roots[0][3])
def scoreEdge(self, e, min_gl):
# get homology distances, calculate variance
# h_dists = self.getHomologyDistances(e)
h_dists = self.getDistances(e, 'homology_dist')
h_var = numpy.var(h_dists)
# s_dists = self.getSyntenyDistances(e)
if self.synteny:
s_dists = self.getDistances(e, 'synteny_dist')
s_var = numpy.var(s_dists)
else:
s_var = 0 # syn_factor becomes a constant at -2 so doesn't change ranking
# get gain/loss count
my_gl = 0
gain = 0
loss = 0
if len(self.graph.nodes()) > 100:
(gain, loss, tree) = self.getGainLossCount(e, -1)
# why not keep the results from getGainLossCount?
gain = min_gl / 2
loss = min_gl / 2
else:
(gain, loss, tree) = self.getGainLossCount(e, min_gl)
my_gl = gain + loss
# score root
my_poisson = 0.0
# my_gain_poisson = 0.0
my_gain_poisson = poisson.pmf((gain), | |
<reponame>rainzhop/ConvNetQuake<gh_stars>0
# coding: utf-8
# The Hazard Library
# Copyright (C) 2013-2016 GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Module :mod:`openquake.hazardlib.mfd.youngs_coppersmith_1985` defines the
Youngs and Coppersmith 1985 MFD.
"""
import numpy
from openquake.baselib.python3compat import range, round
from openquake.hazardlib.mfd.base import BaseMFD
# width of the boxcar function representing the characteristic
# distribution
DELTA_CHAR = 0.5
class YoungsCoppersmith1985MFD(BaseMFD):
"""
Class implementing the MFD for the 'Characteristic Earthquake Model' as
described in: "Implications of fault slip rates and earthquake recurrence
models to probabilistic seismic hazard estimates", by <NAME> and
<NAME> and published in Bulletin of the Seismological
Society of America, Vol. 75, No. 4, pages 939-964, 1985.
The class implements the MFD under the following assumptions as reported
at page 954:
1) Δ_mc (width of the boxcar distribution representing characteristic
rate) is equal to 0.5 magnitude unit
2) m' (maximum magnitude value for the Gutenberg-Richeter part of the
distribution) is equal to the absolute maximum magnitude minus Δ_mc
(that is there is no gap between the Gutenberg-Richter distribution and
the boxcar distribution)
3) the rate of events at the characteristic magnitude is equal to the
rate of events for magnitude equal to m' - 1
:param min_mag:
The lowest possible magnitude for the MFD. The first bin in the
:meth:`result histogram <get_annual_occurrence_rates>` is aligned
to make its left border match this value.
:param a_val:
The Gutenberg-Richter ``a`` value -- the intercept of the loglinear
cumulative G-R relationship.
:param b_val:
The Gutenberg-Richter ``b`` value -- the gradient of the loglinear
G-R relationship.
:param char_mag:
The characteristic magnitude defining the middle point of the
characteristic distribution. That is the boxcar function representing
the characteristic distribution is defined in the range
[char_mag - 0.25, char_mag + 0.25].
:param char_rate:
The characteristic rate associated to the characteristic magnitude,
to be distributed over the domain of the boxcar function representing
the characteristic distribution (that is λ_char = char_rate / 0.5)
:param bin_width:
A positive float value -- the width of a single histogram bin.
Values for ``min_mag`` and the maximum magnitude (char_mag + 0.25) don't
have to be aligned with respect to ``bin_width``. They get rounded
accordingly anyway so that both are divisible by ``bin_width`` just before
converting a function to a histogram.
See :meth:`_get_min_mag_and_num_bins`.
"""
MODIFICATIONS = set()
def __init__(self, min_mag, a_val, b_val, char_mag, char_rate, bin_width):
self.min_mag = min_mag
self.a_val = a_val
self.b_val = b_val
self.char_mag = char_mag
self.char_rate = char_rate
self.bin_width = bin_width
self.check_constraints()
def get_min_max_mag(self):
"Return the minimum and maximum magnitudes"
mag, num_bins = self._get_min_mag_and_num_bins()
return mag, mag + self. bin_width * (num_bins - 1)
def check_constraints(self):
"""
Checks the following constraints:
* minimum magnitude is positive.
* ``b`` value is positive.
* characteristic magnitude is positive
* characteristic rate is positive
* bin width is in the range (0, 0.5] to allow for at least one bin
representing the characteristic distribution
* characteristic magnitude minus 0.25 (that is the maximum magnitude
of the G-R distribution) is greater than the minimum magnitude by at
least one magnitude bin.
* rate of events at the characteristic magnitude is equal to the
rate of events for magnitude equal to m_prime - 1. This is done
by asserting the equality (up to 7 digit precision) ::
10 ** (a_incr - b * (m' - 1)) == char_rate / 0.5
where ``a_incr`` is the incremental a value obtained from the
cumulative a value using the following formula ::
a_incr = a_val + log10(b_val * ln(10))
and ``m' - 1 = char_mag - 1.25``
"""
if not self.min_mag > 0:
raise ValueError('minimum magnitude must be positive')
if not self.b_val > 0:
raise ValueError('b value must be positive')
if not self.char_mag > 0:
raise ValueError('characteristic magnitude must be positive')
if not self.char_rate > 0:
raise ValueError('characteristic rate must be positive')
if not 0 < self.bin_width <= DELTA_CHAR:
err_msg = 'bin width must be in the range (0, %s] to allow for ' \
'at least one magnitude bin representing the ' \
'characteristic distribution' % DELTA_CHAR
raise ValueError(err_msg)
if not self.char_mag - DELTA_CHAR / 2 >= self.min_mag + self.bin_width:
err_msg = 'Maximum magnitude of the G-R distribution (char_mag ' \
'- 0.25) must be greater than the minimum magnitude ' \
'by at least one magnitude bin.'
raise ValueError(err_msg)
a_incr = self.a_val + numpy.log10(self.b_val * numpy.log(10))
actual = 10 ** (a_incr - self.b_val * (self.char_mag - 1.25))
desired = self.char_rate / DELTA_CHAR
if not numpy.allclose(actual, desired, rtol=0.0, atol=1e-07):
err_msg = 'Rate of events at the characteristic magnitude is ' \
'not equal to the rate of events for magnitude equal ' \
'to char_mag - 1.25'
raise ValueError(err_msg)
@classmethod
def from_total_moment_rate(cls, min_mag, b_val, char_mag,
total_moment_rate, bin_width):
"""
Define Youngs and Coppersmith 1985 MFD by constraing cumulative a
value and characteristic rate from total moment rate.
The cumulative a value and characteristic rate are obtained by
solving equations (16) and (17), page 954, for the cumulative rate of
events with magnitude greater than the minimum magnitude - N(min_mag)
- and the cumulative rate of characteristic earthquakes - N(char_mag).
The difference ``N(min_mag) - N(char_mag)`` represents the rate of
noncharacteristic, exponentially distributed earthquakes and is used
to derive the cumulative a value by solving the following equation ::
10 ** (a_val - b_val * min_mag) -
10 ** (a_val - b_val * (char_mag - 0.25))
= N(min_mag) - N(char_mag)
which can be written as ::
a_val =
log10(N(min_mag) - N(char_mag)) /
(10 ** (- b_val * min_mag) - 10 ** (- b_val * (char_mag - 0.25))
In the calculation of N(min_mag) and N(char_mag), the Hanks and
Kanamori (1979) formula ::
M0 = 10 ** (1.5 * Mw + 9.05)
is used to convert moment magnitude (Mw) to seismic moment (M0,
Newton × m)
:param min_mag:
The lowest magnitude for the MFD. The first bin in the
:meth:`result histogram <get_annual_occurrence_rates>` is aligned
to make its left border match this value.
:param b_val:
The Gutenberg-Richter ``b`` value -- the gradient of the loglinear
G-R relationship.
:param char_mag:
The characteristic magnitude defining the middle point of
characteristic distribution. That is the boxcar function
representing the characteristic distribution is defined in the
range [char_mag - 0.25, char_mag + 0.25].
:param total_moment_rate:
Total moment rate in N * m / year.
:param bin_width:
A positive float value -- the width of a single histogram bin.
:returns:
An instance of :class:`YoungsCoppersmith1985MFD`.
Values for ``min_mag`` and the maximum magnitude (char_mag + 0.25)
don't have to be aligned with respect to ``bin_width``. They get
rounded accordingly anyway so that both are divisible by ``bin_width``
just before converting a function to a histogram.
See :meth:`_get_min_mag_and_num_bins`.
"""
beta = b_val * numpy.log(10)
mu = char_mag + DELTA_CHAR / 2
m0 = min_mag
# seismic moment (in Nm) for the maximum magnitude
c = 1.5
d = 9.05
mo_u = 10 ** (c * mu + d)
# equations (16) and (17) solved for N(min_mag) and N(char_mag)
c1 = numpy.exp(-beta * (mu - m0 - 0.5))
c2 = numpy.exp(-beta * (mu - m0 - 1.5))
c3 = beta * c2 / (2 * (1 - c1) + beta * c2)
c4 = (b_val * (10 ** (-c / 2)) / (c - b_val)) + \
(b_val * numpy.exp(beta) * (1 - (10 ** (-c / 2))) / c)
n_min_mag = (1 | |
# Cannot determine a URL to this page - cobble one together based on
# whatever we find in ALLOWED_HOSTS
try:
hostname = settings.ALLOWED_HOSTS[0]
if hostname == '*':
# '*' is a valid value to find in ALLOWED_HOSTS[0], but it's not a valid domain name.
# So we pretend it isn't there.
raise IndexError
except IndexError:
hostname = 'localhost'
path = '/'
port = 80
scheme = 'http'
http_host = hostname
if port != (443 if scheme == 'https' else 80):
http_host = '%s:%s' % (http_host, port)
dummy_values = {
'REQUEST_METHOD': 'GET',
'PATH_INFO': path,
'SERVER_NAME': hostname,
'SERVER_PORT': port,
'SERVER_PROTOCOL': 'HTTP/1.1',
'HTTP_HOST': http_host,
'wsgi.version': (1, 0),
'wsgi.input': StringIO(),
'wsgi.errors': StringIO(),
'wsgi.url_scheme': scheme,
'wsgi.multithread': True,
'wsgi.multiprocess': True,
'wsgi.run_once': False,
}
# Add important values from the original request object, if it was provided.
HEADERS_FROM_ORIGINAL_REQUEST = [
'REMOTE_ADDR', 'HTTP_X_FORWARDED_FOR', 'HTTP_COOKIE', 'HTTP_USER_AGENT', 'HTTP_AUTHORIZATION',
'wsgi.version', 'wsgi.multithread', 'wsgi.multiprocess', 'wsgi.run_once',
]
if settings.SECURE_PROXY_SSL_HEADER:
HEADERS_FROM_ORIGINAL_REQUEST.append(settings.SECURE_PROXY_SSL_HEADER[0])
if original_request:
for header in HEADERS_FROM_ORIGINAL_REQUEST:
if header in original_request.META:
dummy_values[header] = original_request.META[header]
return dummy_values
def _get_dummy_header_url(self, original_request=None):
"""
Return the URL that _get_dummy_headers() should use to set META headers
for the faked HttpRequest.
"""
return self.full_url
DEFAULT_PREVIEW_MODES = [('', _('Default'))]
@property
def preview_modes(self):
"""
A list of (internal_name, display_name) tuples for the modes in which
this page can be displayed for preview/moderation purposes. Ordinarily a page
will only have one display mode, but subclasses of Page can override this -
for example, a page containing a form might have a default view of the form,
and a post-submission 'thank you' page
"""
return Page.DEFAULT_PREVIEW_MODES
@property
def default_preview_mode(self):
"""
The preview mode to use in workflows that do not give the user the option of selecting a
mode explicitly, e.g. moderator approval. Will raise IndexError if preview_modes is empty
"""
return self.preview_modes[0][0]
def is_previewable(self):
"""Returns True if at least one preview mode is specified"""
# It's possible that this will be called from a listing page using a plain Page queryset -
# if so, checking self.preview_modes would incorrectly give us the default set from
# Page.preview_modes. However, accessing self.specific.preview_modes would result in an N+1
# query problem. To avoid this (at least in the general case), we'll call .specific only if
# a check of the property at the class level indicates that preview_modes has been
# overridden from whatever type we're currently in.
page = self
if page.specific_class.preview_modes != type(page).preview_modes:
page = page.specific
return bool(page.preview_modes)
def serve_preview(self, request, mode_name):
"""
Return an HTTP response for use in page previews. Normally this would be equivalent
to self.serve(request), since we obviously want the preview to be indicative of how
it looks on the live site. However, there are a couple of cases where this is not
appropriate, and custom behaviour is required:
1) The page has custom routing logic that derives some additional required
args/kwargs to be passed to serve(). The routing mechanism is bypassed when
previewing, so there's no way to know what args we should pass. In such a case,
the page model needs to implement its own version of serve_preview.
2) The page has several different renderings that we would like to be able to see
when previewing - for example, a form page might have one rendering that displays
the form, and another rendering to display a landing page when the form is posted.
This can be done by setting a custom preview_modes list on the page model -
Wagtail will allow the user to specify one of those modes when previewing, and
pass the chosen mode_name to serve_preview so that the page model can decide how
to render it appropriately. (Page models that do not specify their own preview_modes
list will always receive an empty string as mode_name.)
Any templates rendered during this process should use the 'request' object passed
here - this ensures that request.user and other properties are set appropriately for
the wagtail user bar to be displayed. This request will always be a GET.
"""
request.is_preview = True
response = self.serve(request)
patch_cache_control(response, private=True)
return response
def get_cached_paths(self):
"""
This returns a list of paths to invalidate in a frontend cache
"""
return ['/']
def get_sitemap_urls(self, request=None):
return [
{
'location': self.get_full_url(request),
# fall back on latest_revision_created_at if last_published_at is null
# (for backwards compatibility from before last_published_at was added)
'lastmod': (self.last_published_at or self.latest_revision_created_at),
}
]
def get_static_site_paths(self):
"""
This is a generator of URL paths to feed into a static site generator
Override this if you would like to create static versions of subpages
"""
# Yield path for this page
yield '/'
# Yield paths for child pages
for child in self.get_children().live():
for path in child.specific.get_static_site_paths():
yield '/' + child.slug + path
def get_ancestors(self, inclusive=False):
"""
Returns a queryset of the current page's ancestors, starting at the root page
and descending to the parent, or to the current page itself if ``inclusive`` is true.
"""
return Page.objects.ancestor_of(self, inclusive)
def get_descendants(self, inclusive=False):
"""
Returns a queryset of all pages underneath the current page, any number of levels deep.
If ``inclusive`` is true, the current page itself is included in the queryset.
"""
return Page.objects.descendant_of(self, inclusive)
def get_siblings(self, inclusive=True):
"""
Returns a queryset of all other pages with the same parent as the current page.
If ``inclusive`` is true, the current page itself is included in the queryset.
"""
return Page.objects.sibling_of(self, inclusive)
def get_next_siblings(self, inclusive=False):
return self.get_siblings(inclusive).filter(path__gte=self.path).order_by('path')
def get_prev_siblings(self, inclusive=False):
return self.get_siblings(inclusive).filter(path__lte=self.path).order_by('-path')
def get_view_restrictions(self):
"""
Return a query set of all page view restrictions that apply to this page.
This checks the current page and all ancestor pages for page view restrictions.
If any of those pages are aliases, it will resolve them to their source pages
before querying PageViewRestrictions so alias pages use the same view restrictions
as their source page and they cannot have their own.
"""
page_ids_to_check = set()
def add_page_to_check_list(page):
# If the page is an alias, add the source page to the check list instead
if page.alias_of:
add_page_to_check_list(page.alias_of)
else:
page_ids_to_check.add(page.id)
# Check current page for view restrictions
add_page_to_check_list(self)
# Check each ancestor for view restrictions as well
for page in self.get_ancestors().only('alias_of'):
add_page_to_check_list(page)
return PageViewRestriction.objects.filter(page_id__in=page_ids_to_check)
password_required_template = getattr(settings, 'PASSWORD_REQUIRED_TEMPLATE', 'wagtailcore/password_required.html')
def serve_password_required_response(self, request, form, action_url):
"""
Serve a response indicating that the user has been denied access to view this page,
and must supply a password.
form = a Django form object containing the password input
(and zero or more hidden fields that also need to be output on the template)
action_url = URL that this form should be POSTed to
"""
context = self.get_context(request)
context['form'] = form
context['action_url'] = action_url
return TemplateResponse(request, self.password_required_template, context)
def with_content_json(self, content_json):
"""
Returns a new version of the page with field values updated to reflect changes
in the provided ``content_json`` (which usually comes from a previously-saved
page revision).
Certain field values are preserved in order to prevent errors if the returned
page is saved, such as ``id``, ``content_type`` and some tree-related values.
The following field values are also preserved, as they are considered to be
meaningful to the page as a whole, rather than to a specific revision:
* ``draft_title``
* ``live``
* ``has_unpublished_changes``
* ``owner``
* ``locked``
* ``locked_by``
* ``locked_at``
* ``latest_revision_created_at``
* ``first_published_at``
* ``alias_of``
* ``comments``
"""
obj = self.specific_class.from_json(content_json)
# These should definitely never change between revisions
obj.id = self.id
obj.pk = self.pk
obj.content_type = self.content_type
# Override possibly-outdated tree parameter fields
obj.path = self.path
obj.depth = self.depth
obj.numchild = self.numchild
# Update url_path to reflect potential slug changes, but maintining the page's
# existing tree position
obj.set_url_path(self.get_parent())
# Ensure other values that are meaningful for the page as a whole (rather than
# to a specific revision) are preserved
obj.draft_title = self.draft_title
obj.live = self.live
obj.has_unpublished_changes = self.has_unpublished_changes
obj.owner = self.owner
obj.locked = self.locked
obj.locked_by = self.locked_by
obj.locked_at = | |
# Copyright 2016 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`zk_test` --- lib.zk.zk unit tests
=======================================
"""
# Stdlib
import logging
from unittest.mock import MagicMock, call, patch
# External packages
import nose
import nose.tools as ntools
from kazoo.client import KazooState
from kazoo.exceptions import (
ConnectionLoss,
LockTimeout,
NoNodeError,
SessionExpiredError,
)
from kazoo.handlers.threading import KazooTimeoutError
# SCION
from lib.thread import thread_safety_net
from lib.zk.errors import ZkNoConnection
from lib.zk.zk import ZkRetryLimit, Zookeeper
from test.testcommon import SCIONTestError, create_mock
class BaseZookeeper(object):
"""
Base class for lib.zk.zk.Zookeeper unit tests
"""
default_args = ["1-ff00:0:301", "srvtype", b"srvid"]
default_hosts = ["host1:9521", "host2:339"]
def _init_basic_setup(self, **kwargs):
all_args = self.default_args + [self.default_hosts]
return Zookeeper(*all_args, **kwargs)
class TestZookeeperInit(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper.__init__
"""
@patch("lib.zk.zk.Zookeeper._kazoo_start", autospec=True)
@patch("lib.zk.zk.Zookeeper._setup_state_listener", autospec=True)
@patch("lib.zk.zk.Zookeeper._kazoo_setup", autospec=True)
@patch("lib.zk.zk.queue.Queue", autospec=True)
@patch("lib.zk.zk.threading.Event", autospec=True)
def test_full(self, event, queue, ksetup, listener, kstart):
# Setup and call
event.side_effect = ["event0", "event1"]
inst = self._init_basic_setup(
timeout=4.5, on_connect="on_conn", on_disconnect="on_dis")
# Tests
ntools.eq_(inst._isd_as, "1-ff00:0:301")
ntools.eq_(inst._srv_id, 'c3J2aWQ=')
ntools.eq_(inst._timeout, 4.5)
ntools.eq_(inst._on_connect, "on_conn")
ntools.eq_(inst._on_disconnect, "on_dis")
ntools.eq_(inst.prefix, "/1-ff00:0:301/srvtype")
ntools.eq_(inst._connected, "event0")
ntools.eq_(inst._lock, "event1")
queue.assert_called_once_with()
ntools.eq_(inst._state_events, queue.return_value)
ntools.eq_(inst.conn_epoch, 0)
ntools.eq_(inst._parties, {})
ntools.eq_(inst._zk_lock, None)
ksetup.assert_called_once_with(inst, self.default_hosts)
listener.assert_called_once_with(inst)
kstart.assert_called_once_with(inst)
@patch("lib.zk.zk.Zookeeper._kazoo_start", autospec=True)
@patch("lib.zk.zk.Zookeeper._setup_state_listener", autospec=True)
@patch("lib.zk.zk.Zookeeper._kazoo_setup", autospec=True)
@patch("lib.zk.zk.threading.Semaphore", autospec=True)
@patch("lib.zk.zk.threading.Event", autospec=True)
def test_defaults(self, event, semaphore, ksetup, listener, kstart):
# Setup and call
inst = self._init_basic_setup()
# Tests
ntools.eq_(inst._timeout, 1.0)
ntools.eq_(inst._on_connect, None)
ntools.eq_(inst._on_disconnect, None)
class TestZookeeperKazooSetup(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper._kazoo_setup
"""
@patch("lib.zk.zk.KazooClient", autospec=True)
@patch("lib.zk.zk.logging.getLogger", autospec=True)
@patch("lib.zk.zk.KazooRetry", autospec=True)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test(self, init, kretry, getlogger, kclient):
# Setup
inst = self._init_basic_setup()
inst._timeout = 7.9
logger = create_mock(["setLevel"])
getlogger.return_value = logger
# Call
inst._kazoo_setup(["host0", "host1"])
# Tests
kretry.assert_called_once_with(max_tries=-1, max_delay=1)
getlogger.assert_called_once_with("KazooClient")
logger.setLevel.assert_called_once_with(logging.ERROR)
kclient.assert_called_once_with(
hosts="host0,host1", timeout=7.9,
connection_retry=kretry.return_value, logger=getlogger.return_value)
ntools.eq_(inst.kazoo, kclient.return_value)
class TestZookeeperKazooStart(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper._kazoo_start
"""
@patch("lib.zk.zk.logging", autospec=True)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test(self, init, logging_):
# Setup
inst = self._init_basic_setup()
inst.kazoo = create_mock(["start"])
# Call
inst._kazoo_start()
# Tests
inst.kazoo.start.assert_called_once_with()
@patch("lib.zk.zk.kill_self", autospec=True)
@patch("lib.zk.zk.logging", autospec=True)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_timeout(self, init, logging_, kill_self):
# Setup
inst = self._init_basic_setup()
inst.kazoo = create_mock(["start"])
inst.kazoo.start.side_effect = KazooTimeoutError
# Call
inst._kazoo_start()
# Tests
kill_self.assert_called_once_with()
class TestZookeeperSetupStateListener(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper._setup_state_listener
"""
@patch("lib.zk.zk.threading.Thread", autospec=True)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test(self, init, thread):
# Setup
inst = self._init_basic_setup()
thread.return_value = create_mock(["start"])
inst.kazoo = create_mock(["add_listener"])
# Call
inst._setup_state_listener()
# Tests
thread.assert_called_once_with(target=thread_safety_net,
args=(inst._state_handler,),
name="libZK._state_handler", daemon=True)
thread.return_value.start.assert_called_once_with()
inst.kazoo.add_listener(inst._state_listener)
class TestZookeeperStateListener(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper._state_listener
"""
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test(self, init):
# Setup
inst = self._init_basic_setup()
inst._state_events = create_mock(["put"])
inst.conn_epoch = 47
# Call
ntools.eq_(inst._state_listener("statist"), False)
# Tests
inst._state_events.put.assert_called_once_with("statist")
ntools.eq_(inst.conn_epoch, 48)
class TestZookeeperStateHandler(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper._state_handler
"""
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_flapping(self, init):
# Setup
inst = self._init_basic_setup()
inst._state_events = create_mock(["get", "empty"])
# Setup inst._state_events.get to allow a single iteration of the loop
inst._state_events.get.side_effect = [KazooState.CONNECTED]
inst._state_events.empty.return_value = False
inst._state_connected = create_mock()
# Call
ntools.assert_raises(StopIteration, inst._state_handler)
# Tests
inst._state_events.get.assert_has_calls([call()] * 2)
ntools.assert_false(inst._state_connected.called)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def _check(self, old_state, new_state, init):
# Setup
inst = self._init_basic_setup()
inst._state_events = create_mock(["get", "empty"])
# Setup inst._state_events.get to allow a single iteration of the loop
inst._state_events.get.side_effect = [new_state]
inst._state_connected = create_mock()
inst._state_suspended = create_mock()
inst._state_lost = create_mock()
# Call
ntools.assert_raises(StopIteration, inst._state_handler,
initial_state=old_state)
# Tests
connected = suspended = lost = 0
if old_state == new_state:
# In this case none of the state change handlers should be called
pass
elif new_state == KazooState.CONNECTED:
connected = 1
elif new_state == KazooState.SUSPENDED:
suspended = 1
elif new_state == KazooState.LOST:
lost = 1
else:
raise SCIONTestError("Invalid new state")
ntools.eq_(inst._state_connected.call_count, connected)
ntools.eq_(inst._state_suspended.call_count, suspended)
ntools.eq_(inst._state_lost.call_count, lost)
def test_basic(self):
test_inputs = (
("startup", KazooState.CONNECTED),
(KazooState.CONNECTED, KazooState.CONNECTED),
(KazooState.CONNECTED, KazooState.SUSPENDED),
(KazooState.CONNECTED, KazooState.LOST),
(KazooState.SUSPENDED, KazooState.CONNECTED),
(KazooState.SUSPENDED, KazooState.LOST),
(KazooState.LOST, KazooState.CONNECTED),
)
for old_state, new_state, in test_inputs:
yield self._check, old_state, new_state
class TestZookeeperStateConnected(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper._state_connected
"""
def _setup(self):
inst = self._init_basic_setup()
inst.kazoo = MagicMock(spec_set=["client_id"])
inst.kazoo.client_id = MagicMock(spec_set=["__getitem__"])
inst.ensure_path = create_mock()
inst.prefix = "/prefix"
inst._parties = {
"/patha": create_mock(["autojoin"]),
"/pathb": create_mock(["autojoin"]),
}
inst._connected = create_mock(["set"])
inst._on_connect = None
return inst
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_basic(self, init):
inst = self._setup()
# Call
inst._state_connected()
# Tests
inst.ensure_path.assert_called_once_with(inst.prefix, abs=True)
inst._parties["/patha"].autojoin.assert_called_once_with()
inst._parties["/pathb"].autojoin.assert_called_once_with()
inst._connected.set.assert_called_once_with()
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_connloss(self, init):
inst = self._setup()
inst.ensure_path.side_effect = ZkNoConnection
# Call
inst._state_connected()
# Tests
ntools.assert_false(inst._connected.called)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_on_connect(self, init):
inst = self._setup()
inst._on_connect = create_mock()
# Call
inst._state_connected()
# Tests
inst._on_connect.assert_called_once_with()
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_conn_flap(self, init):
inst = self._setup()
inst.kazoo.client_id = None
# Call
inst._state_connected()
# Tests
ntools.assert_false(inst.ensure_path.called)
class TestZookeeperStateDisconnected(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper._state_suspended AND
lib.zk.zk.Zookeeper._state_lost
_state_suspended and _state_lost currently have almost identical code, so
test them both in the same way.
"""
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def _check(self, f_name, init, test_callback=False):
inst = self._init_basic_setup()
inst._connected = create_mock(["clear"])
inst._on_disconnect = None
if test_callback:
inst._on_disconnect = create_mock()
# Call
getattr(inst, f_name)()
# Tests
inst._connected.clear.assert_called_once_with()
if test_callback:
inst._on_disconnect.assert_called_once_with()
def test(self):
"""
Test with and without a callback function defined
"""
for f in "_state_suspended", "_state_lost":
yield self._check, f, True
yield self._check, f
class TestZookeeperIsConnected(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper.is_connected
"""
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def _check(self, connected, init):
inst = self._init_basic_setup()
inst._connected = create_mock(["is_set"])
inst._connected.is_set.return_value = connected
# Call
ntools.eq_(inst.is_connected(), connected)
# Tests
inst._connected.is_set.assert_called_once_with()
def test(self):
for connected in True, False:
yield self._check, connected
class TestZookeeperWaitConnected(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper.wait_connected
"""
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_connected(self, init):
inst = self._init_basic_setup()
inst.is_connected = create_mock()
# Call
inst.wait_connected()
# Tests
inst.is_connected.assert_called_once_with()
@patch("lib.zk.zk.time.time", autospec=True)
@patch("lib.zk.zk.logging.debug", autospec=True)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_no_timeout(self, init, _, time_):
inst = self._init_basic_setup()
inst.is_connected = create_mock()
inst.is_connected.return_value = False
time_.side_effect = [0, 10, 20]
inst._connected = create_mock(["wait"])
inst._connected.wait.side_effect = [False, True]
# Call
inst.wait_connected(timeout=None)
# Tests
inst._connected.wait.assert_has_calls([call(timeout=10.0)] * 2)
ntools.eq_(inst._connected.wait.call_count, 2)
@patch("lib.zk.zk.time.time", autospec=True)
@patch("lib.zk.zk.logging.debug", autospec=True)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_timeout_success(self, init, _, time_):
inst = self._init_basic_setup()
inst.is_connected = create_mock()
inst.is_connected.return_value = False
time_.side_effect = [0, 10, 20]
inst._connected = create_mock(["wait"])
inst._connected.wait.side_effect = [False, True]
# Call
inst.wait_connected(timeout=15)
# Tests
inst._connected.wait.assert_has_calls([call(timeout=10.0),
call(timeout=5.0)])
ntools.eq_(inst._connected.wait.call_count, 2)
@patch("lib.zk.zk.time.time", autospec=True)
@patch("lib.zk.zk.logging.debug", autospec=True)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_timeout_fail(self, init, _, time_):
inst = self._init_basic_setup()
inst.is_connected = create_mock()
inst.is_connected.return_value = False
time_.side_effect = [0, 10, 20]
inst._connected = create_mock(["wait"])
inst._connected.wait.side_effect = [False, False]
# Call
ntools.assert_raises(ZkNoConnection, inst.wait_connected, timeout=15)
# Tests
ntools.eq_(inst._connected.wait.call_count, 2)
class TestZookeeperEnsurePath(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper.ensure_path
"""
def _setup(self):
inst = self._init_basic_setup()
inst.prefix = "/prefix"
inst.kazoo = create_mock(["ensure_path"])
return inst
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_basic(self, init):
# Setup
inst = self._setup()
# Call
inst.ensure_path("pathness")
# Tests
inst.kazoo.ensure_path.assert_called_once_with("/prefix/pathness")
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_abs(self, init):
# Setup
inst = self._setup()
# Call
inst.ensure_path("/path/to/stuff", abs=True)
# Tests
inst.kazoo.ensure_path.assert_called_once_with("/path/to/stuff")
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def _check_error(self, excp, init):
# Setup
inst = self._setup()
inst.kazoo.ensure_path.side_effect = excp
# Call
ntools.assert_raises(ZkNoConnection, inst.ensure_path, "asdwaf")
def test_errors(self):
for excp in ConnectionLoss, SessionExpiredError:
yield self._check_error, excp
class TestZookeeperPartySetup(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper.party_setup
"""
def _setup(self, connected=True):
inst = self._init_basic_setup()
inst.is_connected = create_mock()
inst.is_connected.return_value = connected
inst.prefix = "/prefix"
inst.kazoo = create_mock()
inst.ensure_path = create_mock()
inst._srv_id = "srvid"
inst._parties = {}
return inst
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_not_connected(self, init):
inst = self._setup(connected=False)
# Call
ntools.assert_raises(ZkNoConnection, inst.party_setup)
# Tests
inst.is_connected.assert_called_once_with()
@patch("lib.zk.zk.ZkParty", autospec=True)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_basic(self, init, zkparty):
inst = self._setup()
# Call
inst.party_setup()
# Tests
inst.ensure_path.assert_called_once_with("/prefix/party", abs=True)
zkparty.assert_called_once_with(inst.kazoo, "/prefix/party",
inst._srv_id, True)
ntools.eq_(inst._parties, {"/prefix/party": zkparty.return_value})
@patch("lib.zk.zk.ZkParty", autospec=True)
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_full(self, init, zkparty):
inst = self._setup()
# Call
inst.party_setup("/pref", False)
# Tests
zkparty.assert_called_once_with(inst.kazoo, "/pref/party", inst._srv_id,
False)
class TestZookeeperGetLock(BaseZookeeper):
"""
Unit tests for lib.zk.zk.Zookeeper.get_lock
"""
def _setup(self, have_lock):
inst = self._init_basic_setup()
inst._zk_lock = create_mock(["acquire"])
inst.kazoo = create_mock(["Lock"])
inst.have_lock = create_mock()
inst.have_lock.side_effect = have_lock
inst.wait_connected = create_mock()
inst._lock_epoch = 0
inst.conn_epoch = 42
inst._lock = create_mock(["set"])
return inst
@patch("lib.zk.zk.Zookeeper.__init__", autospec=True, return_value=None)
def test_full(self, init):
inst = self._setup((True,))
inst._zk_lock = None
inst.prefix = "/prefix"
inst._srv_id = "srvid"
lock = create_mock(["acquire"])
lock.return_value = True
inst.kazoo.Lock.return_value = lock
# Call
ntools.assert_true(inst.get_lock(lock_timeout="lock t/o",
conn_timeout="conn t/o"))
# Tests
inst.kazoo.Lock.assert_called_once_with("/prefix/lock", "srvid")
inst.wait_connected.assert_called_once_with(timeout="conn t/o")
ntools.eq_(inst._lock_epoch, inst.conn_epoch)
inst._zk_lock.acquire.assert_called_once_with(timeout="lock t/o")
inst._lock.set.assert_called_once_with()
| |
import numpy as np
import os
import os.path as osp
import json
import torch
from rpn.db import SequenceDB, SAMPLE, SEQUENCE
from torch.utils.data import Dataset, Sampler
from torch.utils.data.dataloader import default_collate
from rpn.utils.torch_graph_utils import construct_full_graph, get_edge_features
import rpn.utils.torch_utils as tu
import rpn.utils.np_utils as npu
import deepdish
import h5py
from rpn.utils.timer import Timers
from rpn.utils.config import dict_to_namedtuple
def sample_actions(num_samples, num_objects, num_actions):
# TODO: merge this to make_action_graph_inputs
samples = np.zeros((num_samples, num_objects), dtype=np.int64)
rand_obj = np.random.randint(0, num_objects, size=num_samples)
# action 0 is noop
rand_act = np.random.randint(1, num_actions, size=num_samples)
samples[np.arange(num_samples), rand_obj] = rand_act
return samples
def make_action_graph_inputs(serialized_action, object_ids, num_nodes):
object_arg_index = np.array([object_ids.index(a) for a in serialized_action[3:]])
action_index = serialized_action[1]
action_inputs = np.zeros(num_nodes, dtype=np.int64)
# action is indexed by both action_index and arg index
if len(object_arg_index) > 0:
action_inputs[object_arg_index] = action_index + np.arange(len(object_arg_index))
return action_inputs, object_arg_index
def to_graph_dense(inputs, input_ids, input_len, id_to_index_map):
s = (input_len,) + inputs.shape[1:]
dense_inputs = np.zeros(s, dtype=inputs.dtype)
map_index = np.array([id_to_index_map[iid] for iid in input_ids])
dense_inputs[map_index, ...] = inputs
return dense_inputs
def collate_samples(samples, concatenate=False, exclude_keys=()):
batch = {}
for k in samples[0].keys():
if k in exclude_keys:
batch[k] = [s[k] for s in samples]
# elif isinstance(samples[0][k], Data):
# batch[k] = Batch.from_data_list([s[k] for s in samples])
elif concatenate and len(samples[0][k].shape) > 1:
batch[k] = torch.cat([s[k] for s in samples], dim=0)
else:
batch[k] = default_collate([s[k] for s in samples])
return batch
def get_keyframes(symbol_sequence):
"""
Find frames where symbol state changes
:param symbol_sequence:
:return:
"""
symbol_sequence = symbol_sequence.reshape((symbol_sequence.shape[0], -1))
keyframes = np.any(symbol_sequence[:-1] != symbol_sequence[1:], axis=1)
keyframes = np.where(keyframes)[0] + 1
assert(np.all(keyframes > 0) and np.all(keyframes < symbol_sequence.shape[0]))
return keyframes
def make_gt_inputs(state, current_unitary, node_index, edge_index, id_to_index_map):
# construct inputs
# append action inputs to nodes
# append the current unitary state to input since they are only visible
current_unitary_dense = to_graph_dense(
current_unitary[:, 1:], current_unitary[:, 0], node_index.shape[0], id_to_index_map)
node_inputs = np.concatenate([state, current_unitary_dense], axis=1)
edge_inputs = get_edge_features(node_inputs, edge_index, lambda a, b: np.concatenate([a, b], axis=1))
# edge_inputs = get_edge_poses(state, edge_index)
return node_inputs, edge_inputs
def make_graph_labels(sym_unitary, sym_binary, node_index, edge_index, id_to_index_map):
# dense labels
unitary_dense = to_graph_dense(
sym_unitary[:, 1:], sym_unitary[:, 0], node_index.shape[0], id_to_index_map)
binary_index, binary_val = sym_binary[:, :2], sym_binary[:, 2:]
binary_index = [(s, t) for s, t in binary_index.astype(np.int64)]
binary_dense = to_graph_dense(
binary_val, binary_index, edge_index.shape[0], id_to_index_map)
assert (sym_unitary.shape[0] == node_index.shape[0])
assert (sym_binary.shape[0] == edge_index.shape[0])
return unitary_dense, binary_dense
def get_id_to_graph_index_map(object_ids, node_index, edge_index):
"""
Get object id to graph index map
:param object_ids: a list object ids
:param node_index: index of each node
:param edge_index: index of edges
:return: a dictionary mapping object id (or pairs) to graph index
"""
id_to_index_map = {}
for i, ni in enumerate(node_index):
id_to_index_map[object_ids[ni]] = i
for i, (s, t) in enumerate(edge_index):
id_to_index_map[(object_ids[s], object_ids[t])] = i
return id_to_index_map
def get_edge_poses(object_poses, edge_index):
edge_poses = np.zeros((edge_index.shape[0], 7), dtype=object_poses.dtype)
s_idx = edge_index[:, 0]
t_idx = edge_index[:, 1]
edge_poses[:, :3] = object_poses[s_idx, :3] - object_poses[t_idx, :3]
edge_poses[:, 3:7] = object_poses[s_idx, 3:]
return edge_poses
def get_sampler_without_eos(dataset, random=True):
index = dataset.db.index_without_eos()
return SubsetSampler(index, random=random)
class SubsetSampler(Sampler):
"""Samples elements randomly from a given list of indices, without replacement.
:param: indices (sequence): a sequence of indices
"""
def __init__(self, indices, random=True):
self.indices = indices
self.random = random
def __iter__(self):
if self.random:
return (self.indices[i] for i in torch.randperm(len(self.indices)))
else:
return (self.indices[i] for i in torch.arange(len(self.indices)))
def __len__(self):
return len(self.indices)
class DDWrapper(object):
"""
A dict-like DeepDish wrapper for partially loading an hdf5 file
"""
def __init__(self, filename, perm_load_keys=(), prefix='/'):
self._fn = filename
self._perm_dict = {}
self._prefix = prefix
self._perm_load_keys = perm_load_keys
for k in self._perm_load_keys:
if '/' in k:
continue
self._perm_dict[prefix + k] = deepdish.io.load(self._fn, prefix + k)
def __getitem__(self, key):
if not isinstance(key, str):
return deepdish.io.load(self._fn, self._prefix, sel=deepdish.aslice[key])
elif self._prefix + key in self._perm_dict:
return self._perm_dict[self._prefix + key]
else:
new_keys = []
for k in self._perm_load_keys:
if '/' in k:
ks = k.split('/')
assert(ks[0] == key)
new_keys.append('/'.join(ks[1:]))
return self.__class__(self._fn, perm_load_keys=new_keys, prefix=self._prefix + key + '/')
class BasicDataset(Dataset):
"""Basic dataset for iterating task demonstration data."""
def __init__(self, db, **kwargs):
super(BasicDataset, self).__init__()
self.db = db
self.config = dict_to_namedtuple(kwargs)
self.timers = Timers()
seed = kwargs.get('seed', 0)
self._npr = np.random.RandomState(seed)
self._first = True
def __len__(self):
return self.db.num_samples(SAMPLE) # number of samples
def __str__(self):
return 'num_sample=%i, avg_sequence_length=%f' % \
(len(self), float(self.db.average_sequence_len()))
def reload_data(self):
self.db = self.load(self.config.data_file).db
@classmethod
def load(cls, data_file, **kwargs):
print('loading data from %s' % data_file)
if data_file.endswith('.h5'):
rc = deepdish.io.load(data_file)
db = SequenceDB.deserialize(rc)
elif data_file.endswith('.h5p'):
h5f = h5py.File(data_file, 'r')
sample_len = json.loads(str(np.array(h5f['sample_len'])))
rc = {
'db': h5f['db'],
'sample_len': sample_len
}
db = SequenceDB.deserialize(rc)
elif data_file.endswith('.group'):
db_list = []
for f in sorted(os.listdir(data_file)):
db_list.append(BasicDataset.load(osp.join(data_file, f)).db)
db = GroupDB(db_list)
else:
raise NotImplementedError(data_file)
kwargs['data_file'] = data_file
return cls(db, **kwargs)
def dump(self, data_file):
db = self.db.serialize()
if data_file.endswith('.h5'):
deepdish.io.save(data_file, db)
elif data_file.endswith('.h5p'):
h5f = h5py.File(data_file, 'w')
dbg = h5f.create_group('db')
for k, v in db['db'].items():
dbg.create_dataset(k, data=v)
h5f.create_dataset('sample_len', data=json.dumps(db['sample_len']))
else:
raise NotImplementedError
print('data saved to %s' % data_file)
class GroupDB(object):
"""A wrapper for iterating a group of datasets. Takes a list of BasicDataset as input."""
def __init__(self, db_list):
self.db_list = db_list
self.index_range_to_db = []
top = 0
for db in db_list:
dblen = db.num_samples(SAMPLE)
self.index_range_to_db.append((top, top + dblen))
top += dblen
print(self.index_range_to_db)
def num_samples(self, level):
return self.index_range_to_db[-1][1]
def average_sequence_len(self):
accum = []
for db in self.db_list:
accum.append(db.sample_len[SEQUENCE]['actions'])
return np.mean(np.hstack(accum))
def translate_index(self, index):
for i, (begin, end) in enumerate(self.index_range_to_db):
if begin <= index < end:
return i, begin
raise IndexError
def get_db_item(self, key, index):
db_idx, begin_idx = self.translate_index(index)
return self.db_list[db_idx].get_db_item(key, index - begin_idx)
def get_db_item_list(self, key, index, end_index):
db_idx, begin_idx = self.translate_index(index)
return self.db_list[db_idx].get_db_item_list(key, index - begin_idx, end_index - begin_idx)
def sequence_index_from_sample_index(self, index):
db_idx, begin_idx = self.translate_index(index)
return self.db_list[db_idx].sequence_index_from_sample_index(index - begin_idx)
def eos_index_from_sample_index(self, index):
db_idx, begin_idx = self.translate_index(index)
seq_i = self.db_list[db_idx].sequence_index_from_sample_index(index - begin_idx)
bi = self.db_list[db_idx].sequence_begin_index(seq_i)
gi = bi + self.db_list[db_idx].sequence_length(seq_i)
return gi + begin_idx
def index_without_eos(self):
"""
Compute index of database such that End of Sequence (EoS) is not included
:param db: database from db.py
:return: indices in 1-D array
"""
all_index = np.arange(self.num_samples(SAMPLE))
k = list(self.db_list[0].data_keys)[0]
sample_end_index = []
for db, (begin_idx, end_idx) in zip(self.db_list, self.index_range_to_db):
sample_end_index.append(
begin_idx + db.sample_begin_index[SEQUENCE][k] + db.sample_len[SEQUENCE][k] - 1
)
return np.setdiff1d(all_index, np.hstack(sample_end_index))
class PreimageDataset(BasicDataset):
"""Dataset with precondition and dependency for training RPN."""
def __init__(self, db, **kwargs):
super(PreimageDataset, self).__init__(db, **kwargs)
self._graph_edges = {}
self._gnn_edges = {}
def graph_edges(self, num_nodes):
if num_nodes not in self._graph_edges:
self._graph_edges[num_nodes] = construct_full_graph(num_nodes, self_connection=True)[1]
return self._graph_edges[num_nodes]
def gnn_edges(self, num_nodes):
if num_nodes not in self._gnn_edges:
self._gnn_edges[num_nodes] = construct_full_graph(num_nodes, self_connection=False)[1]
return self._gnn_edges[num_nodes]
def get_plan_sample(self, index):
debug = hasattr(self.config, 'debug') and self.config.debug
if debug:
print('WARNING!!!!!!!!!!!! DEBUG MODE!')
with self.timers.timed('goal_trace'):
num_goal_entities = self.db.get_db_item('num_goal_entities', index)
goal_split = np.cumsum(num_goal_entities)[:-1]
goal_trace = self.db.get_db_item('goal_trace', index).astype(np.float32)
goal_mask_trace = self.db.get_db_item('goal_mask_trace', index).astype(np.float32)
goal_trace = np.split(goal_trace, goal_split, axis=0)
goal_mask_trace = np.split(goal_mask_trace, goal_split, axis=0)
assert(len(goal_trace) == num_goal_entities.shape[0])
# pick a random step in the trace
trace_len = len(goal_trace)
trace_idx = int(self._npr.randint(0, trace_len))
goal = goal_trace[trace_idx]
goal_mask = goal_mask_trace[trace_idx]
# find preimage, dummy if trace_idx is the last step
if trace_idx < trace_len - 1:
preimage = goal_trace[trace_idx + 1]
preimage_mask = goal_mask_trace[trace_idx + 1]
preimage_loss_mask = np.ones_like(preimage)
else:
# end of the trace
preimage = np.zeros_like(goal_trace[trace_idx])
preimage_mask = np.zeros_like(goal_mask_trace[trace_idx])
preimage_loss_mask = np.zeros_like(preimage_mask)
reachable = self.db.get_db_item('reachable_trace', index)[trace_idx].astype(np.float32)
focus_trace = self.db.get_db_item('focus_trace', index).astype(np.float32)
focus_trace = np.split(focus_trace, goal_split, axis=0)
focus_mask = focus_trace[trace_idx]
if debug:
reachable = self.db.get_db_item('reachable_trace', index).astype(np.float32)
goal = goal_trace
goal_mask = goal_mask_trace
focus_mask = focus_trace
# ground truth subgoal for the policy, use the last reachable goals as the goal mask
subgoal = goal_trace[-1]
subgoal_mask = focus_trace[-1]
with self.timers.timed('sat_deps'):
# satisfied and dependency have their own inputs
sat_trace = self.db.get_db_item('satisfied_trace', index)
ri = int(self._npr.randint(0, sat_trace.shape[0]))
last_step = np.array(sat_trace[ri, 0] == sat_trace[-1, 0]).astype(np.float32)
satisfied = sat_trace[ri, 1:].astype(np.float32)
if debug:
satisfied = sat_trace[:, 1:].astype(np.float32)
# dependencies
deps_trace = self.db.get_db_item('dependency_trace', index)
ri = int(self._npr.randint(0, deps_trace.shape[0]))
deps = deps_trace[ri, 1:].astype(np.float32)
deps_loss_mask = np.array(np.any(deps > 0)).astype(np.float32)
if debug:
deps = deps_trace[:, 1:].astype(np.float32)
sample = {
'goal': goal,
'goal_mask': goal_mask,
'focus_mask': focus_mask,
'satisfied': satisfied,
'reachable': np.array(reachable),
'preimage': preimage,
'preimage_mask': preimage_mask,
'preimage_loss_mask': preimage_loss_mask,
'subgoal': subgoal,
'subgoal_mask': subgoal_mask,
'dependency': deps,
'dependency_loss_mask': deps_loss_mask,
'last_step': last_step,
}
return tu.batch_to_tensor(sample)
class GridPreimageDataset(PreimageDataset):
def __getitem__(self, index):
seq_i = self.db.sequence_index_from_sample_index(index)
with self.timers.timed('state'):
action = self.db.get_db_item('actions', index)
current_state = self.db.get_db_item('object_state_flat', index).astype(np.float32)
plan_sample = self.get_plan_sample(index)
with self.timers.timed('sample'):
sample = {
'states': tu.to_tensor(current_state),
'action_labels': tu.to_tensor(np.array(action[0])),
'num_entities': tu.to_tensor(np.array(current_state.shape[0])),
'seq_idx': | |
positive is the case where the detector predicts the patch's targeted
class (at a location overlapping the patch). A false positive is the case where the
detector predicts a non-targeted class at a location overlapping the patch. If the
detector predicts multiple instances of the target class (that overlap with the patch),
one of the predictions is considered a true positive and the others are ignored.
This metric is computed over all evaluation samples, rather than on a per-sample basis.
It returns a dictionary mapping each class to the average precision (AP) for the class.
The only classes with potentially nonzero AP's are the classes targeted by the patches
(see above paragraph).
Assumptions made for D-APRICOT dataset: each image has one ground truth box. This box corresponds
to the patch and is assigned a label of whatever the attack's target label is. There are no
ground-truth boxes of COCO objects.
From https://arxiv.org/abs/1912.08166: use a low IOU since "the patches will sometimes
generate many small, overlapping predictions in the region of the attack"
y_list (list): of length equal to the number of input examples. Each element in the list
should be a dict with "labels" and "boxes" keys mapping to a numpy array of
shape (N,) and (N, 4) respectively where N = number of boxes.
y_pred_list (list): of length equal to the number of input examples. Each element in the
list should be a dict with "labels", "boxes", and "scores" keys mapping to a numpy
array of shape (N,), (N, 4), and (N,) respectively where N = number of boxes.
"""
_check_object_detection_input(y_list, y_pred_list)
# Precision will be computed at recall points of 0, 0.1, 0.2, ..., 1
RECALL_POINTS = np.linspace(0, 1, 11)
# Converting boxes to a list of dicts (a list for predicted boxes that overlap with the patch,
# and a separate list for ground truth patch boxes), where each dict corresponds to a box and
# has the following keys "img_idx", "label", "box", as well as "score" for predicted boxes
patch_boxes_list = []
overlappping_pred_boxes_list = []
for img_idx, (y, y_pred) in enumerate(zip(y_list, y_pred_list)):
patch_box = y["boxes"].flatten()
patch_target_label = int(y["labels"])
patch_box_dict = {
"img_idx": img_idx,
"label": patch_target_label,
"box": patch_box,
}
patch_boxes_list.append(patch_box_dict)
for pred_box_idx in range(y_pred["labels"].size):
box = y_pred["boxes"][pred_box_idx]
if _intersection_over_union(box, patch_box) > iou_threshold:
label = y_pred["labels"][pred_box_idx]
score = y_pred["scores"][pred_box_idx]
pred_box_dict = {
"img_idx": img_idx,
"label": label,
"box": box,
"score": score,
}
overlappping_pred_boxes_list.append(pred_box_dict)
# Only compute AP of classes targeted by patches. The D-APRICOT dataset in some
# cases contains unlabeled COCO objects in the background
set_of_class_ids = set([i["label"] for i in patch_boxes_list])
# Initialize dict that will store AP for each class
average_precisions_by_class = {}
# Compute AP for each class
for class_id in set_of_class_ids:
# Build lists that contain all the predicted and patch boxes with a
# label of class_id
class_predicted_boxes = []
class_patch_boxes = []
for pred_box in overlappping_pred_boxes_list:
if pred_box["label"] == class_id:
class_predicted_boxes.append(pred_box)
for patch_box in patch_boxes_list:
if patch_box["label"] == class_id:
class_patch_boxes.append(patch_box)
# Determine how many patch boxes (of class_id) there are in each image
num_patch_boxes_per_img = Counter([gt["img_idx"] for gt in class_patch_boxes])
# Initialize dict where we'll keep track of whether a patch box has been matched to a
# prediction yet. This is necessary because if multiple predicted boxes of class_id
# overlap with a patch box, only one of the predicted boxes can be considered a
# true positive. The rest will be ignored
img_idx_to_patchboxismatched_array = {}
for img_idx, num_patch_boxes in num_patch_boxes_per_img.items():
img_idx_to_patchboxismatched_array[img_idx] = np.zeros(num_patch_boxes)
# Sort all predicted boxes (of class_id) by descending confidence
class_predicted_boxes.sort(key=lambda x: x["score"], reverse=True)
# Initialize list. Once filled in, true_positives[i] indicates (with a 1 or 0)
# whether the ith predicted box (of class_id) is a true positive or false positive
is_true_positive = []
# Iterating over all predicted boxes of class_id
for pred_idx, pred_box in enumerate(class_predicted_boxes):
# Only compare patch boxes from the same image as the predicted box
patch_boxes_from_same_img = [
patch_box
for patch_box in class_patch_boxes
if patch_box["img_idx"] == pred_box["img_idx"]
]
# If there are no patch boxes in the predicted box's image that target the predicted class
if len(patch_boxes_from_same_img) == 0:
is_true_positive.append(0)
continue
# Iterate over all patch boxes (of class_id) from the same image as the predicted box,
# determining which patch box has the highest iou with the predicted box.
highest_iou = 0
for patch_idx, patch_box in enumerate(patch_boxes_from_same_img):
iou = _intersection_over_union(pred_box["box"], patch_box["box"])
if iou >= highest_iou:
highest_iou = iou
highest_iou_patch_idx = patch_idx
# If the patch box has not yet been covered
if (
img_idx_to_patchboxismatched_array[pred_box["img_idx"]][
highest_iou_patch_idx
]
== 0
):
is_true_positive.append(1)
# Record that we've now covered this patch box. Any subsequent
# pred boxes that overlap with it are ignored
img_idx_to_patchboxismatched_array[pred_box["img_idx"]][
highest_iou_patch_idx
] = 1
else:
# This patch box was already covered previously (i.e a different predicted
# box was deemed a true positive after overlapping with this patch box).
# The predicted box is thus ignored.
continue
# Cumulative sums of false/true positives across all predictions which were sorted by
# descending confidence
tp_cumulative_sum = np.cumsum(is_true_positive)
fp_cumulative_sum = np.cumsum([not i for i in is_true_positive])
# Total number of patch boxes with a label of class_id
total_patch_boxes = len(class_patch_boxes)
if total_patch_boxes > 0:
recalls = tp_cumulative_sum / total_patch_boxes
else:
recalls = np.zeros_like(tp_cumulative_sum)
precisions = tp_cumulative_sum / (tp_cumulative_sum + fp_cumulative_sum + 1e-8)
interpolated_precisions = np.zeros(len(RECALL_POINTS))
# Interpolate the precision at each recall level by taking the max precision for which
# the corresponding recall exceeds the recall point
# See http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.157.5766&rep=rep1&type=pdf
for i, recall_point in enumerate(RECALL_POINTS):
precisions_points = precisions[np.where(recalls >= recall_point)]
# If there's no cutoff at which the recall > recall_point
if len(precisions_points) == 0:
interpolated_precisions[i] = 0
else:
interpolated_precisions[i] = max(precisions_points)
# Compute mean precision across the different recall levels
average_precision = interpolated_precisions.mean()
average_precisions_by_class[int(class_id)] = np.around(
average_precision, decimals=2
)
return average_precisions_by_class
def dapricot_patch_target_success(
y_list, y_pred_list, iou_threshold=0.1, conf_threshold=0.5
):
"""
Binary metric that simply indicates whether or not the model predicted the targeted
class at the location of the patch (given an IOU threshold which defaults to 0.1) with
confidence >= a confidence threshold which defaults to 0.5.
Assumptions made for D-APRICOT dataset: each image has one ground truth box. This box
corresponds to the patch and is assigned a label of whatever the attack's target label is.
There are no ground-truth boxes of COCO objects.
Note: from https://arxiv.org/abs/1912.08166: by default a low IOU threshold is used since
"the patches will sometimes generate many small, overlapping predictions in the region
of the attack"
y_list (list): of length equal to the number of input examples. Each element in the list
should be a dict with "labels" and "boxes" keys mapping to a numpy array of
shape (N,) and (N, 4) respectively where N = number of boxes.
y_pred_list (list): of length equal to the number of input examples. Each element in the
list should be a dict with "labels", "boxes", and "scores" keys mapping to a numpy
array of shape (N,), (N, 4), and (N,) respectively where N = number of boxes.
"""
return [
_dapricot_patch_target_success(
y, y_pred, iou_threshold=iou_threshold, conf_threshold=conf_threshold
)
for y, y_pred in zip(y_list, y_pred_list)
]
def _dapricot_patch_target_success(y, y_pred, iou_threshold=0.1, conf_threshold=0.5):
target_label = int(y["labels"])
target_box = y["boxes"].reshape((4,))
pred_indices = np.where(y_pred["scores"] > conf_threshold)[0]
for pred_idx in pred_indices:
if y_pred["labels"][pred_idx] == target_label:
if (
_intersection_over_union(y_pred["boxes"][pred_idx], target_box)
> iou_threshold
):
return 1
return 0
SUPPORTED_METRICS = {
"dapricot_patch_target_success": dapricot_patch_target_success,
"dapricot_patch_targeted_AP_per_class": dapricot_patch_targeted_AP_per_class,
"apricot_patch_targeted_AP_per_class": apricot_patch_targeted_AP_per_class,
"categorical_accuracy": categorical_accuracy,
"top_n_categorical_accuracy": top_n_categorical_accuracy,
"top_5_categorical_accuracy": top_5_categorical_accuracy,
"norm": norm,
"l0": l0,
"l1": l1,
"l2": l2,
"lp": lp,
"linf": linf,
"snr": snr,
"snr_db": snr_db,
"snr_spectrogram": snr_spectrogram,
"snr_spectrogram_db": snr_spectrogram_db,
"image_circle_patch_diameter": image_circle_patch_diameter,
"mars_mean_l2": mars_mean_l2,
"mars_mean_patch": mars_mean_patch,
"word_error_rate": word_error_rate,
"char_error_rate" : char_error_rate,
"object_detection_AP_per_class": object_detection_AP_per_class,
}
# Image-based metrics applied to video
def video_metric(metric, frame_average="mean"):
mapping = {
"mean": np.mean,
"max": np.max,
"min": np.min,
}
if frame_average not in mapping:
raise ValueError(f"frame_average {frame_average} not in {tuple(mapping)}")
frame_average_func = mapping[frame_average]
def func(x, | |
# Copyright 2016 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the :class:`MorphologiesController` and its auxiliary functions.
.. module:: morphologies
:synopsis: Contains the morphologies controller and its auxiliary functions.
"""
import logging
import simplejson as json
import os
import cPickle
from uuid import uuid4
import codecs
from paste.fileapp import FileApp
from pylons.controllers.util import forward
from pylons import request, response, session, config
from formencode.validators import Invalid
from onlinelinguisticdatabase.lib.base import BaseController
from onlinelinguisticdatabase.lib.schemata import MorphologySchema, MorphemeSequencesSchema
import onlinelinguisticdatabase.lib.helpers as h
from onlinelinguisticdatabase.lib.SQLAQueryBuilder import SQLAQueryBuilder, OLDSearchParseError
from onlinelinguisticdatabase.model.meta import Session
from onlinelinguisticdatabase.model import Morphology, MorphologyBackup
from onlinelinguisticdatabase.lib.foma_worker import foma_worker_q
log = logging.getLogger(__name__)
class MorphologiesController(BaseController):
"""Generate responses to requests on morphology resources.
A morphology, as here conceived, is an FST that is both a recognizer and a transducer, i.e.,
it recognizes only those sequences of morphemes that are form valid words and it maps sequences
of morphemes (in the general sense) to sequences of morpheme *forms*. By a morpheme in the general
sense, I mean to refer to ordered pairs of morpheme form and morpheme gloss. That is, an OLD
morphology is an FST that maps something like 'chien|dog-s|PL' to 'chien-s' (and vice versa) and
which does not recognize 's|PL-chien|dog'.
REST Controller styled on the Atom Publishing Protocol.
.. note::
The ``h.jsonify`` decorator converts the return value of the methods to
JSON.
TODO: consider generating values for ``lexicon_script`` and ``rules_script`` attributes
which, by default, are concatenated to produce a value for the ``script`` attribute but
where such default auto-generation can be overridden by the user so that, for example, the
auto-generated subscripts could be used to hand-write a more intelligent morphology FST script.
"""
query_builder = SQLAQueryBuilder('Morphology', config=config)
@h.jsonify
@h.restrict('SEARCH', 'POST')
@h.authenticate
def search(self):
"""Return the list of morphology resources matching the input JSON
query.
:URL: ``SEARCH /morphologies`` (or ``POST /morphologies/search``)
:request body: A JSON object of the form::
{"query": {"filter": [ ... ], "order_by": [ ... ]},
"paginator": { ... }}
where the ``order_by`` and ``paginator`` attributes are optional.
"""
try:
json_search_params = unicode(request.body, request.charset)
python_search_params = json.loads(json_search_params)
query = self.query_builder.get_SQLA_query(python_search_params.get('query'))
return h.add_pagination(query, python_search_params.get('paginator'))
except h.JSONDecodeError:
response.status_int = 400
return h.JSONDecodeErrorResponse
except (OLDSearchParseError, Invalid), e:
response.status_int = 400
return {'errors': e.unpack_errors()}
except:
response.status_int = 400
return {'error': u'The specified search parameters generated an invalid database query'}
@h.jsonify
@h.restrict('GET')
@h.authenticate
def new_search(self):
"""Return the data necessary to search the morphology resources.
:URL: ``GET /morphologies/new_search``
:returns: ``{"search_parameters": {"attributes": { ... }, "relations": { ... }}``
"""
return {'search_parameters': h.get_search_parameters(self.query_builder)}
@h.jsonify
@h.restrict('GET')
@h.authenticate
def index(self):
"""Get all morphology resources.
:URL: ``GET /morphologies`` with optional query string parameters for
ordering and pagination.
:returns: a list of all morphology resources.
.. note::
See :func:`utils.add_order_by` and :func:`utils.add_pagination` for the
query string parameters that effect ordering and pagination.
"""
try:
query = h.eagerload_morphology(Session.query(Morphology))
query = h.add_order_by(query, dict(request.GET), self.query_builder)
return h.add_pagination(query, dict(request.GET))
except Invalid, e:
response.status_int = 400
return {'errors': e.unpack_errors()}
@h.jsonify
@h.restrict('POST')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def create(self):
"""Create a new morphology resource and return it.
:URL: ``POST /morphologies``
:request body: JSON object representing the morphology to create.
:returns: the newly created morphology.
"""
try:
schema = MorphologySchema()
values = json.loads(unicode(request.body, request.charset))
data = schema.to_python(values)
morphology = create_new_morphology(data)
Session.add(morphology)
Session.commit()
morphology.make_directory_safely(morphology.directory)
return morphology
except h.JSONDecodeError:
response.status_int = 400
return h.JSONDecodeErrorResponse
except Invalid, e:
response.status_int = 400
return {'errors': e.unpack_errors()}
@h.jsonify
@h.restrict('GET')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def new(self):
"""Return the data necessary to create a new morphology.
:URL: ``GET /morphologies/new``.
:returns: a dictionary containing summarizing the corpora.
"""
return get_data_for_new_edit(dict(request.GET))
@h.jsonify
@h.restrict('PUT')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def update(self, id):
"""Update a morphology and return it.
:URL: ``PUT /morphologies/id``
:Request body: JSON object representing the morphology with updated attribute values.
:param str id: the ``id`` value of the morphology to be updated.
:returns: the updated morphology model.
"""
morphology = h.eagerload_morphology(Session.query(Morphology)).get(int(id))
if morphology:
try:
schema = MorphologySchema()
values = json.loads(unicode(request.body, request.charset))
state = h.get_state_object(values)
state.id = id
data = schema.to_python(values, state)
morphology_dict = morphology.get_dict()
morphology = update_morphology(morphology, data)
# morphology will be False if there are no changes (cf. update_morphology).
if morphology:
backup_morphology(morphology_dict)
Session.add(morphology)
Session.commit()
return morphology
else:
response.status_int = 400
return {'error':
u'The update request failed because the submitted data were not new.'}
except h.JSONDecodeError:
response.status_int = 400
return h.JSONDecodeErrorResponse
except Invalid, e:
response.status_int = 400
return {'errors': e.unpack_errors()}
else:
response.status_int = 404
return {'error': 'There is no morphology with id %s' % id}
@h.jsonify
@h.restrict('DELETE')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def delete(self, id):
"""Delete an existing morphology and return it.
:URL: ``DELETE /morphologies/id``
:param str id: the ``id`` value of the morphology to be deleted.
:returns: the deleted morphology model.
"""
morphology = h.eagerload_morphology(Session.query(Morphology)).get(id)
if morphology:
morphology_dict = morphology.get_dict()
backup_morphology(morphology_dict)
Session.delete(morphology)
Session.commit()
morphology.remove_directory()
return morphology
else:
response.status_int = 404
return {'error': 'There is no morphology with id %s' % id}
@h.jsonify
@h.restrict('GET')
@h.authenticate
def show(self, id):
"""Return a morphology.
:URL: ``GET /morphologies/id``
:param str id: the ``id`` value of the morphology to be returned.
:GET param str script: if set to '1', the script will be returned with the morphology
:GET param str lexicon: if set to '1', the lexicon (dict) will be returned with the morphology
:returns: a morphology model object.
"""
morphology = h.eagerload_morphology(Session.query(Morphology)).get(id)
if morphology:
morphology_dict = morphology.get_dict()
if request.GET.get('script') == u'1':
morphology_script_path = morphology.get_file_path('script')
if os.path.isfile(morphology_script_path):
morphology_dict['script'] = codecs.open(morphology_script_path, mode='r', encoding='utf8').read()
else:
morphology_dict['script'] = u''
if request.GET.get('lexicon') == u'1':
morphology_lexicon_path = morphology.get_file_path('lexicon')
if os.path.isfile(morphology_lexicon_path):
morphology_dict['lexicon'] = cPickle.load(open(morphology_lexicon_path, 'rb'))
else:
morphology_dict['lexicon'] = {}
return morphology_dict
else:
response.status_int = 404
return {'error': 'There is no morphology with id %s' % id}
@h.jsonify
@h.restrict('GET')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def edit(self, id):
"""Return a morphology and the data needed to update it.
:URL: ``GET /morphologies/id/edit``
:param str id: the ``id`` value of the morphology that will be updated.
:returns: a dictionary of the form::
{"morphology": {...}, "data": {...}}
where the value of the ``morphology`` key is a dictionary
representation of the morphology and the value of the ``data`` key
is a list of corpora in the database.
"""
morphology = h.eagerload_morphology(Session.query(Morphology)).get(id)
if morphology:
return {'data': get_data_for_new_edit(dict(request.GET)), 'morphology': morphology}
else:
response.status_int = 404
return {'error': 'There is no morphology with id %s' % id}
@h.jsonify
@h.restrict('GET')
@h.authenticate
def history(self, id):
"""Return the morphology with ``morphology.id==id`` and its previous versions.
:URL: ``GET /morphologies/history/id``
:param str id: a string matching the ``id`` or ``UUID`` value of the
morphology whose history is requested.
:returns: A dictionary of the form::
{"morphology": { ... }, "previous_versions": [ ... ]}
where the value of the ``morphology`` key is the morphology whose
history is requested and the value of the ``previous_versions`` key
is a list of dictionaries representing previous versions of the
morphology.
"""
morphology, previous_versions = h.get_model_and_previous_versions('Morphology', id)
if morphology or previous_versions:
return {'morphology': morphology,
'previous_versions': previous_versions}
else:
response.status_int = 404
return {'error': 'No morphologies or morphology backups match %s' % id}
@h.jsonify
@h.restrict('PUT')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def generate_and_compile(self, id):
"""Generate the morphology's script and compile it as a foma FST.
:URL: ``PUT /morphologies/compile/id``
:param str id: the ``id`` value of the morphology whose script will be compiled.
:returns: if the morphology exists and foma is installed, the morphology
model is returned; ``GET /morphologies/id`` must be polled to
determine when and how the compilation task has terminated.
.. note::
The script is compiled asynchronously in a worker thread. See
:mod:`onlinelinguisticdatabase.lib.foma_worker`.
"""
return generate_and_compile_morphology(id)
@h.jsonify
@h.restrict('PUT')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def generate(self, id):
"""Generate the morphology's script -- do not compile it.
:URL: ``PUT /morphologies/compile/id``
:param str id: the ``id`` value of the morphology whose script will be compiled.
:returns: if | |
import hail as hl
from hail.typecheck import typecheck, sequenceof
from hail.expr.expressions import expr_str, expr_call, expr_locus, expr_array
from typing import List
@typecheck(locus=expr_locus(),
alleles=expr_array(expr_str),
proband_call=expr_call,
father_call=expr_call,
mother_call=expr_call)
def phase_by_transmission(
locus: hl.expr.LocusExpression,
alleles: hl.expr.ArrayExpression,
proband_call: hl.expr.CallExpression,
father_call: hl.expr.CallExpression,
mother_call: hl.expr.CallExpression
) -> hl.expr.ArrayExpression:
"""Phases genotype calls in a trio based allele transmission.
Notes
-----
In the phased calls returned, the order is as follows:
- Proband: father_allele | mother_allele
- Parents: transmitted_allele | untransmitted_allele
Phasing of sex chromosomes:
- Sex chromosomes of male individuals should be haploid to be phased correctly.
- If `proband_call` is diploid on non-par regions of the sex chromosomes, it is assumed to be female.
Returns `NA` when genotype calls cannot be phased.
The following genotype calls combinations cannot be phased by transmission:
1. One of the calls in the trio is missing
2. The proband genotype cannot be obtained from the parents alleles (Mendelian violation)
3. All individuals of the trio are heterozygous for the same two alleles
4. Father is diploid on non-PAR region of X or Y
5. Proband is diploid on non-PAR region of Y
In addition, individual phased genotype calls are returned as missing in the following situations:
1. All mother genotype calls non-PAR region of Y
2. Diploid father genotype calls on non-PAR region of X for a male proband (proband and mother are still phased as father doesn't participate in allele transmission)
Note
----
:func:`~.phase_trio_matrix_by_transmission` provides a convenience wrapper for phasing a trio matrix.
Parameters
----------
locus : :class:`.LocusExpression`
Expression for the locus in the trio matrix
alleles : :class:`.ArrayExpression`
Expression for the alleles in the trio matrix
proband_call : :class:`.CallExpression`
Expression for the proband call in the trio matrix
father_call : :class:`.CallExpression`
Expression for the father call in the trio matrix
mother_call : :class:`.CallExpression`
Expression for the mother call in the trio matrix
Returns
-------
:class:`.ArrayExpression`
Array containing: [phased proband call, phased father call, phased mother call]"""
def call_to_one_hot_alleles_array(call: hl.expr.CallExpression, alleles: hl.expr.ArrayExpression) -> hl.expr.ArrayExpression:
"""
Get the set of all different one-hot-encoded allele-vectors in a genotype call.
It is returned as an ordered array where the first vector corresponds to the first allele,
and the second vector (only present if het) the second allele.
:param CallExpression call: genotype
:param ArrayExpression alleles: Alleles at the site
:return: Array of one-hot-encoded alleles
:rtype: ArrayExpression
"""
return hl.if_else(
call.is_het(),
hl.array([
hl.call(call[0]).one_hot_alleles(alleles),
hl.call(call[1]).one_hot_alleles(alleles),
]),
hl.array([hl.call(call[0]).one_hot_alleles(alleles)])
)
def phase_parent_call(call: hl.expr.CallExpression, transmitted_allele_index: int):
"""
Given a genotype and which allele was transmitted to the offspring, returns the parent phased genotype.
:param CallExpression call: Parent genotype
:param int transmitted_allele_index: index of transmitted allele (0 or 1)
:return: Phased parent genotype
:rtype: CallExpression
"""
return hl.call(
call[transmitted_allele_index],
call[hl.int(transmitted_allele_index == 0)],
phased=True
)
def phase_diploid_proband(
locus: hl.expr.LocusExpression,
alleles: hl.expr.ArrayExpression,
proband_call: hl.expr.CallExpression,
father_call: hl.expr.CallExpression,
mother_call: hl.expr.CallExpression
) -> hl.expr.ArrayExpression:
"""
Returns phased genotype calls in the case of a diploid proband
(autosomes, PAR regions of sex chromosomes or non-PAR regions of a female proband)
:param LocusExpression locus: Locus in the trio MatrixTable
:param ArrayExpression alleles: Alleles in the trio MatrixTable
:param CallExpression proband_call: Input proband genotype call
:param CallExpression father_call: Input father genotype call
:param CallExpression mother_call: Input mother genotype call
:return: Array containing: phased proband call, phased father call, phased mother call
:rtype: ArrayExpression
"""
proband_v = proband_call.one_hot_alleles(alleles)
father_v = hl.if_else(
locus.in_x_nonpar() | locus.in_y_nonpar(),
hl.or_missing(father_call.is_haploid(), hl.array([father_call.one_hot_alleles(alleles)])),
call_to_one_hot_alleles_array(father_call, alleles)
)
mother_v = call_to_one_hot_alleles_array(mother_call, alleles)
combinations = hl.flatmap(
lambda f:
hl.enumerate(mother_v)
.filter(lambda m: m[1] + f[1] == proband_v)
.map(lambda m: hl.struct(m=m[0], f=f[0])),
hl.enumerate(father_v)
)
return (
hl.or_missing(
hl.is_defined(combinations) & (hl.len(combinations) == 1),
hl.array([
hl.call(father_call[combinations[0].f], mother_call[combinations[0].m], phased=True),
hl.if_else(father_call.is_haploid(), hl.call(father_call[0], phased=True), phase_parent_call(father_call, combinations[0].f)),
phase_parent_call(mother_call, combinations[0].m)
])
)
)
def phase_haploid_proband_x_nonpar(
proband_call: hl.expr.CallExpression,
father_call: hl.expr.CallExpression,
mother_call: hl.expr.CallExpression
) -> hl.expr.ArrayExpression:
"""
Returns phased genotype calls in the case of a haploid proband in the non-PAR region of X
:param CallExpression proband_call: Input proband genotype call
:param CallExpression father_call: Input father genotype call
:param CallExpression mother_call: Input mother genotype call
:return: Array containing: phased proband call, phased father call, phased mother call
:rtype: ArrayExpression
"""
transmitted_allele = hl.enumerate(hl.array([mother_call[0], mother_call[1]])).find(lambda m: m[1] == proband_call[0])
return hl.or_missing(
hl.is_defined(transmitted_allele),
hl.array([
hl.call(proband_call[0], phased=True),
hl.or_missing(father_call.is_haploid(), hl.call(father_call[0], phased=True)),
phase_parent_call(mother_call, transmitted_allele[0])
])
)
def phase_y_nonpar(
proband_call: hl.expr.CallExpression,
father_call: hl.expr.CallExpression,
) -> hl.expr.ArrayExpression:
"""
Returns phased genotype calls in the non-PAR region of Y (requires both father and proband to be haploid to return phase)
:param CallExpression proband_call: Input proband genotype call
:param CallExpression father_call: Input father genotype call
:return: Array containing: phased proband call, phased father call, phased mother call
:rtype: ArrayExpression
"""
return hl.or_missing(
proband_call.is_haploid() & father_call.is_haploid() & (father_call[0] == proband_call[0]),
hl.array([
hl.call(proband_call[0], phased=True),
hl.call(father_call[0], phased=True),
hl.missing(hl.tcall)
])
)
return (
hl.case()
.when(locus.in_x_nonpar() & proband_call.is_haploid(), phase_haploid_proband_x_nonpar(proband_call, father_call, mother_call))
.when(locus.in_y_nonpar(), phase_y_nonpar(proband_call, father_call))
.when(proband_call.is_diploid(), phase_diploid_proband(locus, alleles, proband_call, father_call, mother_call))
.or_missing()
)
@typecheck(tm=hl.MatrixTable,
call_field=str,
phased_call_field=str)
def phase_trio_matrix_by_transmission(tm: hl.MatrixTable, call_field: str = 'GT', phased_call_field: str = 'PBT_GT') -> hl.MatrixTable:
"""Adds a phased genoype entry to a trio MatrixTable based allele transmission in the trio.
Example
-------
>>> # Create a trio matrix
>>> pedigree = hl.Pedigree.read('data/case_control_study.fam')
>>> trio_dataset = hl.trio_matrix(dataset, pedigree, complete_trios=True)
>>> # Phase trios by transmission
>>> phased_trio_dataset = phase_trio_matrix_by_transmission(trio_dataset)
Notes
-----
Uses only a `Call` field to phase and only phases when all 3 members of the trio are present and have a call.
In the phased genotypes, the order is as follows:
- Proband: father_allele | mother_allele
- Parents: transmitted_allele | untransmitted_allele
Phasing of sex chromosomes:
- Sex chromosomes of male individuals should be haploid to be phased correctly.
- If a proband is diploid on non-par regions of the sex chromosomes, it is assumed to be female.
Genotypes that cannot be phased are set to `NA`.
The following genotype calls combinations cannot be phased by transmission (all trio members phased calls set to missing):
1. One of the calls in the trio is missing
2. The proband genotype cannot be obtained from the parents alleles (Mendelian violation)
3. All individuals of the trio are heterozygous for the same two alleles
4. Father is diploid on non-PAR region of X or Y
5. Proband is diploid on non-PAR region of Y
In addition, individual phased genotype calls are returned as missing in the following situations:
1. All mother genotype calls non-PAR region of Y
2. Diploid father genotype calls on non-PAR region of X for a male proband (proband and mother are still phased as father doesn't participate in allele transmission)
Parameters
----------
tm : :class:`.MatrixTable`
Trio MatrixTable (entries have to be a Struct with `proband_entry`, `mother_entry` and `father_entry` present)
call_field : str
genotype field name in the matrix entries to use for phasing
phased_call_field : str
name for the phased genotype field in the matrix entries
Returns
-------
:class:`.MatrixTable`
Trio MatrixTable entry with additional phased genotype field for each individual"""
tm = tm.annotate_entries(
__phased_GT=phase_by_transmission(
tm.locus,
tm.alleles,
tm.proband_entry[call_field],
tm.father_entry[call_field],
tm.mother_entry[call_field]
)
)
return tm.select_entries(
proband_entry=hl.struct(
**tm.proband_entry,
**{phased_call_field: tm.__phased_GT[0]}
),
father_entry=hl.struct(
**tm.father_entry,
**{phased_call_field: tm.__phased_GT[1]}
),
mother_entry=hl.struct(
**tm.mother_entry,
**{phased_call_field: tm.__phased_GT[2]}
)
)
@typecheck(tm=hl.MatrixTable,
col_keys=sequenceof(str),
keep_trio_cols=bool,
keep_trio_entries=bool)
def explode_trio_matrix(tm: hl.MatrixTable, col_keys: List[str] = ['s'], keep_trio_cols: bool = True, keep_trio_entries: bool = False) -> hl.MatrixTable:
"""Splits a trio MatrixTable back into a sample MatrixTable.
Example
-------
>>> # Create a trio matrix from a sample matrix
>>> pedigree = hl.Pedigree.read('data/case_control_study.fam')
>>> trio_dataset = hl.trio_matrix(dataset, pedigree, complete_trios=True)
>>> # Explode trio matrix back into a sample matrix
>>> exploded_trio_dataset = explode_trio_matrix(trio_dataset)
Notes
-----
The resulting MatrixTable column schema is the same as the proband/father/mother schema,
and the resulting entry schema is the same as the proband_entry/father_entry/mother_entry schema.
If the `keep_trio_cols` option is set, then an additional `source_trio` column is added with the trio column data.
If the `keep_trio_entries` option is set, then an additional `source_trio_entry` column is added with the trio entry data.
Note
----
This assumes that the input MatrixTable is a trio MatrixTable (similar to
the result of :func:`~.trio_matrix`) Its entry schema has to contain
| |
keypoints_equal(keypoints_aug, keypoints_lr):
nb_keypoints_if_branch += 1
elif keypoints_equal(keypoints_aug, keypoints):
nb_keypoints_else_branch += 1
else:
raise Exception("Received output doesnt match any expected output.")
assert (0.50 - 0.10) <= nb_images_if_branch / nb_iterations <= (0.50 + 0.10)
assert (0.50 - 0.10) <= nb_images_else_branch / nb_iterations <= (0.50 + 0.10)
assert (0.50 - 0.10) <= nb_keypoints_if_branch / nb_iterations <= (0.50 + 0.10)
assert (0.50 - 0.10) <= nb_keypoints_else_branch / nb_iterations <= (0.50 + 0.10)
assert (0.50 - 0.10) <= (1 - (nb_changed_aug / nb_iterations)) <= (0.50 + 0.10) # should be the same in roughly 50% of all cases
assert nb_changed_aug_det == 0
# p as stochastic parameter
image = np.zeros((1, 1), dtype=np.uint8) + 100
images = [image] * 10
aug = iaa.Sometimes(p=iap.Binomial(iap.Choice([0.0, 1.0])), then_list=iaa.Add(10))
seen = [0, 0]
for _ in sm.xrange(100):
observed = aug.augment_images(images)
uq = np.unique(np.uint8(observed))
assert len(uq) == 1
if uq[0] == 100:
seen[0] += 1
elif uq[0] == 110:
seen[1] += 1
else:
assert False
assert seen[0] > 20
assert seen[1] > 20
# bad datatype for p
got_exception = False
try:
aug = iaa.Sometimes(p=False)
except Exception as exc:
assert "Expected " in str(exc)
got_exception = True
assert got_exception
# both lists none
aug = iaa.Sometimes(0.2, then_list=None, else_list=None)
image = np.random.randint(0, 255, size=(16, 16), dtype=np.uint8)
observed = aug.augment_image(image)
assert np.array_equal(observed, image)
# then_list bad datatype
got_exception = False
try:
aug = iaa.Sometimes(p=0.2, then_list=False)
except Exception as exc:
assert "Expected " in str(exc)
got_exception = True
assert got_exception
# else_list bad datatype
got_exception = False
try:
aug = iaa.Sometimes(p=0.2, then_list=None, else_list=False)
except Exception as exc:
assert "Expected " in str(exc)
got_exception = True
assert got_exception
# deactivated propagation via hooks
image = np.random.randint(0, 255-10, size=(16, 16), dtype=np.uint8)
aug = iaa.Sometimes(1.0, iaa.Add(10))
observed1 = aug.augment_image(image)
observed2 = aug.augment_image(image, hooks=ia.HooksImages(propagator=lambda images, augmenter, parents, default: False if augmenter == aug else default))
assert np.array_equal(observed1, image + 10)
assert np.array_equal(observed2, image)
# get_parameters
aug = iaa.Sometimes(0.75)
params = aug.get_parameters()
assert isinstance(params[0], iap.Binomial)
assert isinstance(params[0].p, iap.Deterministic)
assert 0.75 - 1e-8 < params[0].p.value < 0.75 + 1e-8
# str/repr
then_list = iaa.Add(1)
else_list = iaa.Add(2)
aug = iaa.Sometimes(0.5, then_list=then_list, else_list=else_list, name="SometimesTest")
expected = "Sometimes(p=%s, name=%s, then_list=%s, else_list=%s, deterministic=%s)" % (
"Binomial(Deterministic(float 0.50000000))",
"SometimesTest",
"Sequential(name=SometimesTest-then, random_order=False, children=[%s], deterministic=False)" % (str(then_list),),
"Sequential(name=SometimesTest-else, random_order=False, children=[%s], deterministic=False)" % (str(else_list),),
"False"
)
assert aug.__repr__() == aug.__str__() == expected
aug = iaa.Sometimes(0.5, then_list=None, else_list=None, name="SometimesTest")
expected = "Sometimes(p=%s, name=%s, then_list=%s, else_list=%s, deterministic=%s)" % (
"Binomial(Deterministic(float 0.50000000))",
"SometimesTest",
"Sequential(name=SometimesTest-then, random_order=False, children=[], deterministic=False)",
"Sequential(name=SometimesTest-else, random_order=False, children=[], deterministic=False)",
"False"
)
assert aug.__repr__() == aug.__str__() == expected
# Test for https://github.com/aleju/imgaug/issues/143
# (shapes change in child augmenters, leading to problems if input arrays are assumed to
# stay input arrays)
image = np.zeros((8, 8, 3), dtype=np.uint8)
aug = iaa.Sometimes(
0.5,
iaa.Crop((2, 0, 2, 0), keep_size=False),
iaa.Crop((1, 0, 1, 0), keep_size=False)
)
for _ in sm.xrange(10):
observed = aug.augment_images(np.uint8([image, image, image, image]))
assert isinstance(observed, list) or (ia.is_np_array(observed) and len(set([img.shape for img in observed])) == 1)
assert all([img.shape in [(4, 8, 3), (6, 8, 3)] for img in observed])
observed = aug.augment_images([image, image, image, image])
assert isinstance(observed, list)
assert all([img.shape in [(4, 8, 3), (6, 8, 3)] for img in observed])
observed = aug.augment_images(np.uint8([image]))
assert isinstance(observed, list) or (ia.is_np_array(observed) and len(set([img.shape for img in observed])) == 1)
assert all([img.shape in [(4, 8, 3), (6, 8, 3)] for img in observed])
observed = aug.augment_images([image])
assert isinstance(observed, list)
assert all([img.shape in [(4, 8, 3), (6, 8, 3)] for img in observed])
observed = aug.augment_image(image)
assert ia.is_np_array(image)
assert observed.shape in [(4, 8, 3), (6, 8, 3)]
image = np.zeros((32, 32, 3), dtype=np.uint8)
aug = iaa.Sometimes(
0.5,
iaa.Crop(((1, 4), 0, (1, 4), 0), keep_size=False),
iaa.Crop(((4, 8), 0, (4, 8), 0), keep_size=False)
)
for _ in sm.xrange(10):
observed = aug.augment_images(np.uint8([image, image, image, image]))
assert isinstance(observed, list) or (ia.is_np_array(observed) and len(set([img.shape for img in observed])) == 1)
assert all([16 <= img.shape[0] <= 30 and img.shape[1:] == (32, 3) for img in observed])
observed = aug.augment_images([image, image, image, image])
assert isinstance(observed, list)
assert all([16 <= img.shape[0] <= 30 and img.shape[1:] == (32, 3) for img in observed])
observed = aug.augment_images(np.uint8([image]))
assert isinstance(observed, list) or (ia.is_np_array(observed) and len(set([img.shape for img in observed])) == 1)
assert all([16 <= img.shape[0] <= 30 and img.shape[1:] == (32, 3) for img in observed])
observed = aug.augment_images([image])
assert isinstance(observed, list)
assert all([16 <= img.shape[0] <= 30 and img.shape[1:] == (32, 3) for img in observed])
observed = aug.augment_image(image)
assert ia.is_np_array(image)
assert 16 <= observed.shape[0] <= 30 and observed.shape[1:] == (32, 3)
image = np.zeros((8, 8, 3), dtype=np.uint8)
aug = iaa.Sometimes(
0.5,
iaa.Crop((2, 0, 2, 0), keep_size=True),
iaa.Crop((1, 0, 1, 0), keep_size=True)
)
for _ in sm.xrange(10):
observed = aug.augment_images(np.uint8([image, image, image, image]))
assert ia.is_np_array(observed)
assert all([img.shape in [(8, 8, 3)] for img in observed])
observed = aug.augment_images([image, image, image, image])
assert isinstance(observed, list)
assert all([img.shape in [(8, 8, 3)] for img in observed])
observed = aug.augment_images(np.uint8([image]))
assert ia.is_np_array(observed)
assert all([img.shape in [(8, 8, 3)] for img in observed])
observed = aug.augment_images([image])
assert isinstance(observed, list)
assert all([img.shape in [(8, 8, 3)] for img in observed])
observed = aug.augment_image(image)
assert ia.is_np_array(observed)
assert observed.shape in [(8, 8, 3)]
image = np.zeros((8, 8, 3), dtype=np.uint8)
aug = iaa.Sometimes(
0.5,
iaa.Crop(((1, 4), 0, (1, 4), 0), keep_size=True),
iaa.Crop(((4, 8), 0, (4, 8), 0), keep_size=True)
)
for _ in sm.xrange(10):
observed = aug.augment_images(np.uint8([image, image, image, image]))
assert ia.is_np_array(observed)
assert all([img.shape in [(8, 8, 3)] for img in observed])
observed = aug.augment_images([image, image, image, image])
assert isinstance(observed, list)
assert all([img.shape in [(8, 8, 3)] for img in observed])
observed = aug.augment_images(np.uint8([image]))
assert ia.is_np_array(observed)
assert all([img.shape in [(8, 8, 3)] for img in observed])
observed = aug.augment_images([image])
assert isinstance(observed, list)
assert all([img.shape in [(8, 8, 3)] for img in observed])
observed = aug.augment_image(image)
assert ia.is_np_array(observed)
assert observed.shape in [(8, 8, 3)]
def test_WithChannels():
base_img = np.zeros((3, 3, 2), dtype=np.uint8)
base_img[..., 0] += 100
base_img[..., 1] += 200
aug = iaa.WithChannels(None, iaa.Add(10))
observed = aug.augment_image(base_img)
expected = base_img + 10
assert np.allclose(observed, expected)
aug = iaa.WithChannels(0, iaa.Add(10))
observed = aug.augment_image(base_img)
expected = np.copy(base_img)
expected[..., 0] += 10
assert np.allclose(observed, expected)
aug = iaa.WithChannels(1, iaa.Add(10))
observed = aug.augment_image(base_img)
expected = np.copy(base_img)
expected[..., 1] += 10
assert np.allclose(observed, expected)
base_img = np.zeros((3, 3, 2), dtype=np.uint8)
base_img[..., 0] += 5
base_img[..., 1] += 10
aug = iaa.WithChannels(1, [iaa.Add(10), iaa.Multiply(2.0)])
observed = aug.augment_image(base_img)
expected = np.copy(base_img)
expected[..., 1] += 10
expected[..., 1] *= 2
assert np.allclose(observed, expected)
# multiple images, given as array
images = np.concatenate([base_img[np.newaxis, ...], base_img[np.newaxis, ...]], axis=0)
aug = iaa.WithChannels(1, iaa.Add(10))
observed = aug.augment_images(images)
expected = np.copy(images)
expected[..., 1] += 10
assert np.allclose(observed, expected)
# multiple images, given as list
images = [base_img, base_img]
aug = iaa.WithChannels(1, iaa.Add(10))
observed = aug.augment_images(images)
expected = np.copy(base_img)
expected[..., 1] += 10
expected = [expected, expected]
assert array_equal_lists(observed, expected)
# children list is empty
aug = iaa.WithChannels(1, children=None)
observed = aug.augment_image(base_img)
expected = np.copy(base_img)
assert np.array_equal(observed, expected)
# channel list is empty
aug = iaa.WithChannels([], iaa.Add(10))
observed = aug.augment_image(base_img)
expected = np.copy(base_img)
assert np.array_equal(observed, expected)
# invalid datatype for channels
got_exception = False
try:
aug = iaa.WithChannels(False, iaa.Add(10))
except Exception as exc:
assert "Expected " in str(exc)
got_exception = True
assert got_exception
# invalid datatype for children
got_exception = False
try:
aug = iaa.WithChannels(1, False)
except Exception as exc:
assert "Expected " in str(exc)
got_exception = True
assert got_exception
# get_parameters
aug = iaa.WithChannels([1], iaa.Add(10))
params = aug.get_parameters()
assert len(params) == 1
assert params[0] == [1]
# get_children_lists
children = iaa.Sequential([iaa.Add(10)])
aug = iaa.WithChannels(1, children)
assert aug.get_children_lists() == [children]
# repr/str
children = iaa.Sequential([iaa.Noop()])
aug = iaa.WithChannels(1, children, name="WithChannelsTest")
expected = "WithChannels(channels=[1], name=WithChannelsTest, children=%s, deterministic=False)" % (str(children),)
assert aug.__repr__() == aug.__str__() == expected
def test_2d_inputs():
"""Test whether inputs of 2D-images (i.e. (H, W) instead of (H, W, C)) work.
"""
reseed()
base_img1 = np.array([[0, 0, 1, 1],
[0, 0, 1, 1],
[0, 1, 1, 1]], dtype=np.uint8)
base_img2 = np.array([[0, | |
elif isinstance(inputstr, unicode):
instr = inputstr.encode("utf8")
else:
return -1
else:
if python.is_string(inputstr):
instr = inputstr
else:
return -1
h = 0x00000000
for i in range(0, len(instr)):
h = (h << 4) + ord(instr[i])
h ^= (h & 0xf0000000) >> 23
h &= 0x0fffffff
return h
def get_focus_widget():
"""
Gets the currently focused widget
:return: variant, QWidget || None
"""
return QApplication.focusWidget()
def get_widget_at_mouse():
"""
Get the widget under the mouse
:return: variant, QWidget || None
"""
current_pos = QtGui.QCursor().pos()
widget = QApplication.widgetAt(current_pos)
return widget
def is_valid_widget(widget):
"""
Checks if a widget is a valid in the backend
:param widget: QWidget
:return: bool, True if the widget still has a C++ object, False otherwise
"""
if widget is None:
return False
# Added try because Houdini does not includes Shiboken library by default
# TODO: When Houdini app class implemented, add cleaner way
try:
if not shiboken.isValid(widget):
return False
except Exception:
return True
return True
def close_and_cleanup(widget):
"""
Call close and deleteLater on a widget safely
NOTE: Skips the close call if the widget is already not visible
:param widget: QWidget, widget to delete and close
"""
if is_valid_widget(widget):
if widget.isVisible():
widget.close()
widget.deleteLater()
def get_string_input(message, title='Rename', old_name=None, parent=None):
"""
Shows a Input dialog to allow the user to input a new string
:param message: str, mesage to show in the dialog
:param title: str, title of the input dialog
:param old_name: str (optional): old name where are trying to rename
:return: str, new name
"""
dialog = QInputDialog(parent)
flags = dialog.windowFlags() ^ Qt.WindowContextHelpButtonHint | Qt.WindowStaysOnTopHint
if not old_name:
comment, ok = dialog.getText(None, title, message, flags=flags)
else:
comment, ok = dialog.getText(None, title, message, text=old_name, flags=flags)
comment = comment.replace('\\', '_')
if ok:
return str(comment)
def get_comment(text_message='Add Comment', title='Save', comment_text='', parent=None):
"""
Shows a comment dialog to allow user to input a new comment
:param parent: QwWidget
:param text_message: str, text to show before message input
:param title: str, title of message dialog
:param comment_text: str, default text for the commment
:return: str, input comment write by the user
"""
comment_dialog = QInputDialog()
flags = comment_dialog.windowFlags() ^ Qt.WindowContextHelpButtonHint | Qt.WindowStaysOnTopHint
if is_pyside2() or is_pyqt5():
comment, ok = comment_dialog.getMultiLineText(parent, title, text_message, flags=flags, text=comment_text)
else:
comment, ok = comment_dialog.getText(parent, title, text_message, flags=flags, text=comment_text)
if ok:
return comment
def get_file(directory, parent=None):
"""
Show a open file dialog
:param directory: str, root directory
:param parent: QWidget
:return: str, selected folder or None if no folder is selected
"""
file_dialog = QFileDialog(parent)
if directory:
file_dialog.setDirectory(directory)
directory = file_dialog.getOpenFileName()
directory = python.force_list(directory)
if directory:
return directory
def get_folder(directory=None, title='Select Folder', show_files=False, parent=None):
"""
Shows a open folder dialog
:param directory: str, root directory
:param title: str, select folder dialog title
:param parent: QWidget
:return: str, selected folder or None if no folder is selected
"""
file_dialog = QFileDialog(parent)
if show_files:
file_dialog.setFileMode(QFileDialog.DirectoryOnly)
file_dialog.setOption(QFileDialog.ShowDirsOnly, False)
if directory:
file_dialog.setDirectory(directory)
directory = file_dialog.getExistingDirectory(parent, title)
if directory:
return directory
def get_permission(message=None, cancel=True, title='Permission', parent=None):
"""
Shows a permission message box
:param message: str, message to show to the user
:param cancel: bool, Whether the user can cancel the operation or not
:param title: str, title of the window
:param parent: QWidget
:return: bool
"""
message_box = QMessageBox(parent=parent)
message_box.setWindowTitle(title)
flags = message_box.windowFlags() ^ Qt.WindowContextHelpButtonHint | Qt.WindowStaysOnTopHint
if message:
message_box.setText(message)
if cancel:
message_box.setStandardButtons(QMessageBox.Yes | QMessageBox.No | QMessageBox.Cancel)
else:
message_box.setStandardButtons(QMessageBox.Yes | QMessageBox.No)
message_box.setWindowFlags(flags)
result = message_box.exec_()
if result == QMessageBox.Yes:
return True
elif result == QMessageBox.No:
return False
elif result == QMessageBox.Cancel:
return None
return None
def get_save_permission(message, file_path=None, title='Permission', parent=None):
"""
Shows a save path message box
:param message: str, message to show to the user
:param file_path: str, path you want to save
:param title: str, title of the window
:param parent: QWidget
:return: bool
"""
message_box = QMessageBox(parent=parent)
message_box.setWindowTitle(title)
flags = message_box.windowFlags() ^ Qt.WindowContextHelpButtonHint | Qt.WindowStaysOnTopHint
if file_path:
path_message = 'Path: {}'.format(file_path)
message_box.setInformativeText(path_message)
message_box.setWindowFlags(flags)
save = message_box.addButton('Save', QMessageBox.YesRole)
no_save = message_box.addButton('Do not save', QMessageBox.NoRole)
cancel = message_box.addButton('Cancel', QMessageBox.RejectRole)
message_box.exec_()
if message_box.clickedButton() == save:
return True
elif message_box.clickedButton() == no_save:
return False
elif message_box.clickedButton() == cancel:
return None
return None
def get_line_layout(title, parent, *widgets):
"""
Returns a QHBoxLayout with all given widgets added to it
:param parent: QWidget
:param title: str
:param widgets: list<QWidget>
:return: QHBoxLayout
"""
layout = QHBoxLayout()
layout.setContentsMargins(1, 1, 1, 1)
if title and title != '':
label = QLabel(title, parent)
layout.addWidget(label)
for w in widgets:
if isinstance(w, QWidget):
layout.addWidget(w)
elif isinstance(w, QLayout):
layout.addLayout(w)
return layout
def get_column_layout(*widgets):
"""
Returns a QVBoxLayout with all given widgets added to it
:param widgets: list<QWidget>
:return: QVBoxLayout
"""
layout = QVBoxLayout()
for w in widgets:
if isinstance(w, QWidget):
layout.addWidget(w)
elif isinstance(w, QLayout):
layout.addLayout(w)
return layout
def get_top_level_widget(w):
widget = w
while True:
parent = widget.parent()
if not parent:
break
widget = parent
return widget
def is_modifier():
"""
Returns True if either the Alt key or Control key is down
:return: bool
"""
return is_alt_modifier() or is_control_modifier()
def is_alt_modifier():
"""
Return True if the Alt key is down
:return: bool
"""
modifiers = QApplication.keyboardModifiers()
return modifiers == Qt.AltModifier
def is_control_modifier():
"""
Returns True if the Control key is down
:return: bool
"""
modifiers = QApplication.keyboardModifiers()
return modifiers == Qt.ControlModifier
def is_shift_modifier():
"""
Returns True if the Shift key is down
:return: bool
"""
modifiers = QApplication.keyboardModifiers()
return modifiers == Qt.ShiftModifier
def to_qt_object(long_ptr, qobj=None):
"""
Returns an instance of the Maya UI element as a QWidget
"""
if not qobj:
qobj = QWidget
return wrapinstance(long_ptr, qobj)
def critical_message(message, parent=None):
"""
Shows a critical message
:param message: str
:param parent: QWidget
"""
parent = None
message_box = QMessageBox(parent)
flags = message_box.windowFlags() ^ Qt.WindowContextHelpButtonHint | Qt.WindowStaysOnTopHint
message_box.setWindowFlags(flags)
message_box.critical(parent, 'Critical Error', message)
def warning_message(message, parent=None):
"""
Shows a warning message
:param message: str
:param parent: QWidget
"""
parent = None
message_box = QMessageBox(parent)
flags = message_box.windowFlags() ^ Qt.WindowContextHelpButtonHint | Qt.WindowStaysOnTopHint
message_box.setWindowFlags(flags)
message_box.warning(parent, 'Warning', message)
def info_message(message, parent=None):
"""
Shows a warning message
:param message: str
:param parent: QWidget
"""
parent = None
message_box = QMessageBox(parent)
flags = message_box.windowFlags() ^ Qt.WindowContextHelpButtonHint | Qt.WindowStaysOnTopHint
message_box.setWindowFlags(flags)
message_box.setText(message)
message_box.exec_()
def about_message(message, parent=None):
"""
Shows an about message
:param message: str
:param parent: QWidget
"""
parent = None
message_box = QMessageBox(parent)
flags = message_box.windowFlags() ^ Qt.WindowContextHelpButtonHint | Qt.WindowStaysOnTopHint
message_box.setWindowFlags(flags)
message_box.about(parent, 'About', message)
def change_button_color(
button,
text_color=200, bg_color=68, hi_color=68,
hi_text=255, hi_background=[97, 132, 167],
ds_color=[255, 128, 128],
mode='common',
toggle=False, hover=True, destroy=False,
ds_width=1):
text_color = python.to_3_list(text_color)
bg_color = python.to_3_list(bg_color)
hi_color = python.to_3_list(hi_color)
hi_text = python.to_3_list(hi_text)
ds_color = python.to_3_list(ds_color)
if toggle and button.isChecked():
bg_color = hi_color
if hover:
hv_color = map(lambda a: a + 20, bg_color)
else:
hv_color = bg_color
text_hex = color.convert_2_hex(text_color)
bg_hex = color.convert_2_hex(bg_color)
hv_hex = color.convert_2_hex(hv_color)
hi_hex = color.convert_2_hex(hi_color)
ht_hex = color.convert_2_hex(hi_text)
hb_hex = color.convert_2_hex(hi_background)
ds_hex = color.convert_2_hex(ds_color)
if mode == 'common':
button.setStyleSheet('color: ' + text_hex + ' ; background-color: ' + bg_hex)
elif mode == 'button':
if not destroy:
button.setStyleSheet(
'QPushButton{'
'background-color: ' + bg_hex + '; color: ' + text_hex + '; border-style:solid; border-width: ' + str(
ds_width) + 'px; border-color:' + ds_hex + '; border-radius: 0px;}' + 'QPushButton:hover{'
'background-color: ' + hv_hex + '; color: ' + text_hex + '; border-style:solid; border-width: ' + str(
ds_width) + 'px; border-color:' + ds_hex + '; border-radius: 0px;}' + 'QPushButton:pressed{'
'background-color: ' + hi_hex + '; color: ' + text_hex + '; border-style:solid; border-width: ' + str(
ds_width) + 'px; border-color:' + ds_hex + '; border-radius: 0px;}')
else:
button_style = 'QPushButton{background-color: ' + bg_hex + '; color: ' + text_hex + ' ; border: black 0px}'
button_style += 'QPushButton:hover{background-color: ' + hv_hex + '; color: ' + text_hex
button_style += ' ; border: black 0px}' + 'QPushButton:pressed{background-color: ' + hi_hex + '; color: '
button_style += text_hex + '; border: black 2px}'
button.setStyleSheet(button_style)
elif mode == 'window':
button_style = 'color: ' + text_hex + ';' + 'background-color: ' + bg_hex + ';' + 'selection-color: '
button_style += ht_hex + ';' + 'selection-background-color: ' + hb_hex + ';'
button.setStyleSheet(button_style)
def change_border_style(btn):
button_style = 'QPushButton{border-style:solid; border-width: 2px; border-color: red ; border-radius: 1px;}'
button_style | |
75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse6b0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse6c0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse6c0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse6d0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, u"Misc Conf0", wx.DefaultPosition, wx.Size( 75,20 ), wx.TE_PROCESS_ENTER )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse6d0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse6e0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, u"Misc Conf1", wx.DefaultPosition, wx.Size( 75,20 ), wx.TE_PROCESS_ENTER )
self.m_textCtrl_fuse6e0.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse6e0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse6f0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse6f0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse700 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse700, 0, wx.ALL, 5 )
self.m_textCtrl_fuse710 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse710, 0, wx.ALL, 5 )
self.m_textCtrl_fuse720 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse720, 0, wx.ALL, 5 )
self.m_textCtrl_fuse730 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse730, 0, wx.ALL, 5 )
self.m_textCtrl_fuse740 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse740, 0, wx.ALL, 5 )
self.m_textCtrl_fuse750 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse750, 0, wx.ALL, 5 )
self.m_textCtrl_fuse760 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse760, 0, wx.ALL, 5 )
self.m_textCtrl_fuse770 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl3.Add( self.m_textCtrl_fuse770, 0, wx.ALL, 5 )
wSizer_fuseUtil.Add( bSizer_fuseGroupCtrl3, 1, wx.EXPAND, 5 )
bSizer_fuseGroupTxt4 = wx.BoxSizer( wx.VERTICAL )
self.m_staticText_fuse780 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x780:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse780.Wrap( -1 )
self.m_staticText_fuse780.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse780, 0, wx.ALL, 5 )
self.m_staticText_fuse790 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x790:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse790.Wrap( -1 )
self.m_staticText_fuse790.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse790, 0, wx.ALL, 5 )
self.m_staticText_fuse7a0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x7a0:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse7a0.Wrap( -1 )
self.m_staticText_fuse7a0.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse7a0, 0, wx.ALL, 5 )
self.m_staticText_fuse7b0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x7b0:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse7b0.Wrap( -1 )
self.m_staticText_fuse7b0.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse7b0, 0, wx.ALL, 5 )
self.m_staticText_fuse7c0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x7c0:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse7c0.Wrap( -1 )
self.m_staticText_fuse7c0.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse7c0, 0, wx.ALL, 5 )
self.m_staticText_fuse7d0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x7d0:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse7d0.Wrap( -1 )
self.m_staticText_fuse7d0.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse7d0, 0, wx.ALL, 5 )
self.m_staticText_fuse7e0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x7e0:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse7e0.Wrap( -1 )
self.m_staticText_fuse7e0.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse7e0, 0, wx.ALL, 5 )
self.m_staticText_fuse7f0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x7f0:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse7f0.Wrap( -1 )
self.m_staticText_fuse7f0.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse7f0, 0, wx.ALL, 5 )
self.m_staticText_fuse800 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x800:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse800.Wrap( -1 )
self.m_staticText_fuse800.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse800, 0, wx.ALL, 5 )
self.m_staticText_fuse810 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x810:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse810.Wrap( -1 )
self.m_staticText_fuse810.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse810, 0, wx.ALL, 5 )
self.m_staticText_fuse820 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x820:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse820.Wrap( -1 )
self.m_staticText_fuse820.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse820, 0, wx.ALL, 5 )
self.m_staticText_fuse830 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x830:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse830.Wrap( -1 )
self.m_staticText_fuse830.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse830, 0, wx.ALL, 5 )
self.m_staticText_fuse840 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x840:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse840.Wrap( -1 )
self.m_staticText_fuse840.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse840, 0, wx.ALL, 5 )
self.m_staticText_fuse850 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x850:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse850.Wrap( -1 )
self.m_staticText_fuse850.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt4.Add( self.m_staticText_fuse850, 0, wx.ALL, 5 )
wSizer_fuseUtil.Add( bSizer_fuseGroupTxt4, 1, wx.EXPAND, 5 )
bSizer_fuseGroupCtrl4 = wx.BoxSizer( wx.VERTICAL )
self.m_textCtrl_fuse780 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse780, 0, wx.ALL, 5 )
self.m_textCtrl_fuse790 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse790, 0, wx.ALL, 5 )
self.m_textCtrl_fuse7a0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse7a0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse7b0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse7b0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse7c0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse7c0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse7d0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse7d0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse7e0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse7e0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse7f0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse7f0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse800 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse800, 0, wx.ALL, 5 )
self.m_textCtrl_fuse810 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse810, 0, wx.ALL, 5 )
self.m_textCtrl_fuse820 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse820, 0, wx.ALL, 5 )
self.m_textCtrl_fuse830 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse830, 0, wx.ALL, 5 )
self.m_textCtrl_fuse840 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse840, 0, wx.ALL, 5 )
self.m_textCtrl_fuse850 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl4.Add( self.m_textCtrl_fuse850, 0, wx.ALL, 5 )
wSizer_fuseUtil.Add( bSizer_fuseGroupCtrl4, 1, wx.EXPAND, 5 )
bSizer_fuseGroupTxt5 = wx.BoxSizer( wx.VERTICAL )
self.m_staticText_fuse860 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x860:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse860.Wrap( -1 )
self.m_staticText_fuse860.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt5.Add( self.m_staticText_fuse860, 0, wx.ALL, 5 )
self.m_staticText_fuse870 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x870:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse870.Wrap( -1 )
self.m_staticText_fuse870.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt5.Add( self.m_staticText_fuse870, 0, wx.ALL, 5 )
self.m_staticText_fuse880 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x880:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse880.Wrap( -1 )
self.m_staticText_fuse880.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt5.Add( self.m_staticText_fuse880, 0, wx.ALL, 5 )
self.m_staticText_fuse890 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x890:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse890.Wrap( -1 )
self.m_staticText_fuse890.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt5.Add( self.m_staticText_fuse890, 0, wx.ALL, 5 )
self.m_staticText_fuse8a0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x8a0:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse8a0.Wrap( -1 )
self.m_staticText_fuse8a0.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt5.Add( self.m_staticText_fuse8a0, 0, wx.ALL, 5 )
self.m_staticText_fuse8b0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"0x8b0:", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse8b0.Wrap( -1 )
self.m_staticText_fuse8b0.SetFont( wx.Font( 9, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, "宋体" ) )
bSizer_fuseGroupTxt5.Add( self.m_staticText_fuse8b0, 0, wx.ALL, 5 )
self.m_staticText_fuse8c0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"Gp4", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse8c0.Wrap( -1 )
self.m_staticText_fuse8c0.SetFont( wx.Font( wx.NORMAL_FONT.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, wx.EmptyString ) )
self.m_staticText_fuse8c0.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )
bSizer_fuseGroupTxt5.Add( self.m_staticText_fuse8c0, 0, wx.ALL, 5 )
self.m_staticText_fuse8d0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"Gp4", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse8d0.Wrap( -1 )
self.m_staticText_fuse8d0.SetFont( wx.Font( wx.NORMAL_FONT.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, wx.EmptyString ) )
self.m_staticText_fuse8d0.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )
bSizer_fuseGroupTxt5.Add( self.m_staticText_fuse8d0, 0, wx.ALL, 5 )
self.m_staticText_fuse8e0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"Gp4", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse8e0.Wrap( -1 )
self.m_staticText_fuse8e0.SetFont( wx.Font( wx.NORMAL_FONT.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, wx.EmptyString ) )
self.m_staticText_fuse8e0.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )
bSizer_fuseGroupTxt5.Add( self.m_staticText_fuse8e0, 0, wx.ALL, 5 )
self.m_staticText_fuse8f0 = wx.StaticText( self.m_panel_fuseUtil, wx.ID_ANY, u"Gp4", wx.DefaultPosition, wx.Size( 31,20 ), 0 )
self.m_staticText_fuse8f0.Wrap( -1 )
self.m_staticText_fuse8f0.SetFont( wx.Font( wx.NORMAL_FONT.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False, wx.EmptyString ) )
self.m_staticText_fuse8f0.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )
bSizer_fuseGroupTxt5.Add( self.m_staticText_fuse8f0, 0, wx.ALL, 5 )
wSizer_fuseUtil.Add( bSizer_fuseGroupTxt5, 1, wx.EXPAND, 5 )
bSizer_fuseGroupCtrl5 = wx.BoxSizer( wx.VERTICAL )
self.m_textCtrl_fuse860 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl5.Add( self.m_textCtrl_fuse860, 0, wx.ALL, 5 )
self.m_textCtrl_fuse870 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl5.Add( self.m_textCtrl_fuse870, 0, wx.ALL, 5 )
self.m_textCtrl_fuse880 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl5.Add( self.m_textCtrl_fuse880, 0, wx.ALL, 5 )
self.m_textCtrl_fuse890 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl5.Add( self.m_textCtrl_fuse890, 0, wx.ALL, 5 )
self.m_textCtrl_fuse8a0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl5.Add( self.m_textCtrl_fuse8a0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse8b0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl5.Add( self.m_textCtrl_fuse8b0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse8c0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl5.Add( self.m_textCtrl_fuse8c0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse8d0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl5.Add( self.m_textCtrl_fuse8d0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse8e0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 75,20 ), 0 )
bSizer_fuseGroupCtrl5.Add( self.m_textCtrl_fuse8e0, 0, wx.ALL, 5 )
self.m_textCtrl_fuse8f0 = wx.TextCtrl( self.m_panel_fuseUtil, wx.ID_ANY, wx.EmptyString, | |
#!/usr/bin/python
#This script is particle filter localization.
#Input: TXT file of map which have [X, Y ,Z, DESCRIPTOR]
#Output: Graphs of visualization for the robot position
# Author : <NAME>
# Contact : <EMAIL>
# Thesis source code, CVUT, Prague, Czech Republic
#Import Libraries
#===================
import numpy as np
import rospy
import roslib; roslib.load_manifest('visualization_marker_tutorials')
import math
import itertools
from fractions import Fraction as frac
import cv2
from cv_bridge import CvBridge, CvBridgeError
# Third party libraries
#========================
from numpy.random import random
from std_msgs.msg import String, Header
from geometry_msgs.msg import Pose, PoseStamped, Quaternion, Point, Vector3
from filterpy.monte_carlo import systematic_resample
import matplotlib.pyplot as plt
from numpy.random import seed
from nav_msgs.msg import Odometry
import tf.transformations as tf
from tf.transformations import euler_from_quaternion, quaternion_from_euler
from sensor_msgs.msg import Image
from geometry_msgs.msg import
from visualization_msgs.msg import Marker
from visualization_msgs.msg import MarkerArray
from geometry_msgs.msg import
from std_msgs.msg import Header, ColorRGBA
from scipy.cluster.hierarchy import fcluster
from scipy.cluster.hierarchy import cophenet
from scipy.cluster.hierarchy import dendrogram, linkage
from scipy.spatial.distance import pdist
import sklearn.metrics as sm
import matplotlib.pyplot as plt
from sklearn.cluster import AgglomerativeClustering
from sklearn.datasets import make_blobs
from sklearn.cluster import KMeans
from sklearn.metrics import silhouette_score
from sklearn.preprocessing import StandardScaler
import matplotlib.colors as colors
import matplotlib.cm as cmx
#Initialize some Values
#========================
previous_orientation_x = 0
previous_orientation_y = 0
previous_orientation_z = 0
previous_orientation_w = 0
previous_orientation_z = float(0)
current_orientation_z = float(0)
previous_position_y = float(0)
previous_position_x = float(0)
gamma = float(0)
desc = None
image_width = float(1280) # CAMERA IS 1.2 MP ... 1280 x 960
image_height = float(960)
camera_matrix = np.array ([[1087.5798, 0, 652.0836], [0, 1089.213, 484.02 ],[0, 0, 1]])
num_of_particles = 2500
z_position_for_particle = 0.3
number_of_matches = 0
# LIST OF VARIABLES
#=====================
Xsnew = list()
Ysnew = list()
Zsnew = list()
Wsnew = list()
desc_features = list()
distances_of_matches = list()
features_in_range_particles = list()
all_particles_positions = list()
matched1= list()
particle_heading_list = list()
global weights
weights = np.ones(num_of_particles)
# OPEN THE MAP FILE
#======================
f = open('Triangulate_and_desc_Trajectory.txt', 'r').readlines()
# ORGANISE THE DATA IN THE TXT FILE !
#=====================================
for line in f:
tmp = line.strip().split(",")
values = [float(v) for v in tmp]
#print("values", values)
points4d = np.array(values).reshape((-1, 65))
points3d = points4d[:, :65] # 65 is number of elements per row
#Normalization
Xs = points3d[:, 0]/points4d[:,3]
Ys = points3d[:, 1]/points4d[:,3]
Zs = points3d[:, 2]/points4d[:,3]
Ws = points3d[:, 3]/points4d[:,3]
desc_features.append(values[4:])
# FILTERATION OF THE FEATURES !!!
#=================================
output_list = []
for i in range(len(Xs)):
if 2.8 < Zs[i] < 3.6:
output_list.append([Xs[i], Ys[i], Zs[i], Ws[i] , desc])
for values in output_list:
Xsnew.append(values[0])
Ysnew.append(values[1])
Zsnew.append(values[2])
Wsnew.append(values[3])
# VICON CALLBACK
#===============
def callback_position_vicon(data):
global current_x_position_real
global current_y_position_real
global current_z_position_real
global current_x_orientation_real
global current_y_orientation_real
global current_z_orientation_real
global current_w_orientation_real
global real_position_of_the_robot
global fi_robot
current_x_position_real = data.pose.position.x
current_y_position_real = data.pose.position.y
current_z_position_real = data.pose.position.z
current_x_orientation_real = data.pose.orientation.x
current_y_orientation_real = data.pose.orientation.y
current_z_orientation_real = data.pose.orientation.z
current_w_orientation_real = data.pose.orientation.w
current_orientation_list_odom = [current_x_orientation_real, current_y_orientation_real, current_z_orientation_real, current_w_orientation_real]
(roll_robot, pitch_robot, yaw_robot) = euler_from_quaternion(current_orientation_list_odom)
fi_robot = yaw_robot
real_position_of_the_robot = [current_x_position_real, current_y_position_real]
#ODOM CALLBACK
#==============
def callback_position_odom(msg):
global gamma
global previous_position_y
global previous_position_x
global x_array_diff
global y_array_diff
global current_orientation_z
global previous_orientation_x
global previous_orientation_y
global previous_orientation_z
global previous_orientation_w
global current_position_x
global current_position_y
global fi_previous
global fi_current
current_position_x = msg.pose.pose.position.x
current_position_y = msg.pose.pose.position.y
current_orientation_x = msg.pose.pose.orientation.x
current_orientation_y = msg.pose.pose.orientation.y
current_orientation_z = msg.pose.pose.orientation.z
current_orientation_w = msg.pose.pose.orientation.w
current_orientation_list_odom = [current_orientation_x, current_orientation_y, current_orientation_z, current_orientation_w]
(roll_odom_current, pitch_odom_current, yaw_odom_current) = euler_from_quaternion(current_orientation_list_odom)
fi_current = yaw_odom_current
previous_orientation_list_odom = [previous_orientation_x, previous_orientation_y, previous_orientation_z, previous_orientation_w]
(roll_odom_previous, pitch_odom_previous, yaw_odom_previous) = euler_from_quaternion(current_orientation_list_odom)
fi_previous = yaw_odom_previous
#print("current_orientantion_z", current_orientation_z) ## DEBUG:
#print("previous_orientation_z", previous_orientation_z) ## DEBUG:
#print("Current position x is ", current_position_x) ## DEBUG:
#print("Current position y is ", current_position_y) ## DEBUG:
y_array_diff = current_position_y - previous_position_y
x_array_diff = current_position_x - previous_position_x
gamma = np.arctan2(y_array_diff, x_array_diff)
# MOTION MODEL IS HERE !!
if math.sqrt(y_array_diff**2+x_array_diff**2) < 0.5 :
motion_model(particles)
# After calculating gamma SWAPPING OF VARIABLES !
#===================================================
previous_position_x = current_position_x
previous_position_y = current_position_y
previous_orientation_x = current_orientation_x
previous_orientation_y = current_orientation_y
previous_orientation_z = current_orientation_z
previous_orientation_w = current_orientation_w
def minmax(val_list):
'''
DESCRIPTION
Function to create range of x and y trajectories.
It basically adds the smallest number in the range and the max as well to a list.
'''
min_val = min(val_list)
max_val = max(val_list)
return (min_val, max_val)
# PARTICLE FILTER STARTS HERE
#================================
# UNIFOM PARTICLES
#===================
def create_uniform_particles(x_range, y_range, hdg_range, num_of_particles):
global particles
global particle_heading_list
'''
Create Uniformly Distributed Particles
PARAMETERS
- x_range: Interval of x values for particle locations
- y_range: Interval of y values for particle locations
- hdg_range: Interval of heading values for particles in radians
- num_of_particles: Number of particles
DESCRIPTION
Create N by 4 array to store x location, y location, and heading
of each particle uniformly distributed. Take modulus of heading to ensure heading is
in range (0, 2*pi).
Returns particle locations and headings
'''
particles = np.empty((num_of_particles, 3)) #Return a new array of given shape and type, without initializing entries.
particles[:, 0] = uniform(x_range[0], x_range[1], size = num_of_particles)
particles[:, 1] = uniform(y_range[0], y_range[1], size = num_of_particles)
particles[:, 2] = uniform(hdg_range[0], hdg_range[1], size = num_of_particles)
#particles[:, 2] %= 2 * np.pi # Conversion of heading to radians
print("particles of X axis in create ", particles[:,0])
#print("particles of Y axis in create ", particles[:,1])
#print("particles of angle in create ", particles[:,2])
return particles
# MOTION MODEL
# ===============
def motion_model(particles):
global x_array_diff
global y_array_diff
global current_orientation_z
global previous_orientation_z
global fi_current
global fi_previous
alpha1 = 0.04
alpha2 = 0.04
alpha3 = 0.4
alpha4 = 0.04
'''
Create Uniformly Distributed Particles
PARAMETERS
- Particles : Particles location
DESCRIPTION
MOTION MODEL ASSOCIATED With noise
Returns particle new locations and headings
'''
num_of_particles = len(particles)
delta_translation = math.sqrt((x_array_diff)**2 + (y_array_diff)**2)
delta_rotation_1 = (gamma) - (fi_previous)
delta_rotation_2 = fi_current - fi_previous - delta_rotation_1
std1 = alpha1 * abs(delta_rotation_1) + alpha2 * abs(delta_translation) #sa7
std2 = (alpha3 * abs(delta_translation)) + (alpha4 * (abs(delta_rotation_1) + abs(delta_rotation_2)))
std3 = (alpha1 * abs(delta_rotation_2)) + (alpha2 * abs(delta_translation))
mu = 0
s1 = np.random.normal(mu, std1, num_of_particles)
s2 = np.random.normal(mu, std2, num_of_particles)
s3 = np.random.normal(mu, std3, num_of_particles)
delta_rotation_1_hat = delta_rotation_1 + s1
delta_translation_hat = delta_translation + s2
delta_rotation_2_hat = delta_rotation_2 + s3
particles[:, 0] += delta_translation_hat * (np.cos(fi_previous + delta_rotation_1_hat))
particles[:, 1] += delta_translation_hat * (np.sin(fi_previous + delta_rotation_1_hat))
particles[:, 2] += fi_previous + delta_rotation_1_hat + delta_rotation_2_hat
#SENSOR MODEL - UPDATE WEIGHTS OF PARTICLES
#=============================================
def update(particles):
global desc
global number_of_matches
global weights
weights_of_current = []
print ("num of particles beginning of sensor model", len(particles))
for x, y, angle in zip(particles[:, 0], particles[:, 1], particles[:, 2]):
#print("x in sensor model is ",x)
d = np.sqrt(x*x+y*y)
c = np.cos(angle)
s = np.sin(angle)
alpha = np.arctan2(y,x)
tvec = np.array([d*np.cos(alpha-angle), d*np.sin(alpha-angle), 0], dtype = float).reshape(3,1)
R_matrix = np.array([
[ c, -s, 0],
[ s, c, 0],
[ 0, 0, 1]
])
R_t_matrix = np.concatenate((np.transpose(R_matrix), 0*-tvec), axis =1)
projection_matrix = np.dot(camera_matrix, R_t_matrix)
sum_of_distances = 0
good_matches = list()
map_feature_count = 0
sum_of_euclidean_distances = 0
sum_of_manhattan_distances = 0
for i in range(len(Xsnew)):
x_features = Xsnew[i] - x
y_features = Ysnew[i] - y
z_features = Zsnew[i] - 0.3
feature_coordinates = np.array([[x_features],[y_features],[z_features],[1]], dtype=float)
position_of_features = np.dot(projection_matrix, feature_coordinates)
u, v, w = position_of_features[0], position_of_features[1], position_of_features[2]
u_normalized = u / w
v_normalized = v / w
if 0 < u_normalized < image_height and 0 < v_normalized < image_width:
map_feature_count += 1
#print("U normalized is {} and v normailzed is {} from particles in position x is {} y is {} z is {}". format(u_normalized, v_normalized, x,y,angle)) #DEBUG:
#print ("feature {}, {},{} and moved {},{},{}". format(x_features, y_features, z_features, Xsnew[i], Ysnew[i], Zsnew[i])) # DEBUG:
virtual_camera_desc = desc_features[i]
#print("X features that is seen is {} and virtual camera desc is {}". format(x_features, virtual_camera_desc)) # DEBUG:
virtual_camera_desc_array = np.array([virtual_camera_desc], dtype = np.uint8)
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck = True)
matches = bf.match(desc, virtual_camera_desc_array)
for m in matches:
if m.distance < 25:
img1_idx = m.queryIdx
(x1, y1) = kps[img1_idx].pt
(x2, y2) = (u_normalized, v_normalized)
euclidean_distance = math.sqrt(((x1 - x2)**2) + ((y1 - y2)**2))
sum_of_euclidean_distances += euclidean_distance
#print("euclidean_distance", euclidean_distance) # DEBUG:
#print("sum euclidean_distance", sum_of_euclidean_distances) # DEBUG:
# Manhattan distance # DEBUG:
#======================
# manhattan_distance = abs(x1 - x2) + abs(y1 - y2)
# sum_of_manhattan_distances += manhattan_distance
good_matches.append(m)
sum_of_distances += m.distance
number_of_matches = len(good_matches)
print ("num of matches ", number_of_matches)
try: # TO AVOID DIVISION OF ZEROS IN CASE IF NO MATCHES ARE FOUND!
#weights_of_each_particle = 0.001+number_of_matches
#weights_of_each_particle = (0.001 + number_of_matches**2/sum_of_euclidean_distances)
weights_of_each_particle = (0.001 + number_of_matches/sum_of_euclidean_distances)
weights_of_current.append(weights_of_each_particle)
except ZeroDivisionError:
weights_of_each_particle = (0.001 + number_of_matches)
weights_of_current.append(weights_of_each_particle)
weights_of_current = np.array(weights_of_current)
weights = (weights_of_current) / sum(weights_of_current)
#RESAMPLING OF Particles - LOW VARIANCE METHOD
#=================================================
def resample(weights):
N = len(weights)
#print("N is in the resample before start ", N) # DEBUG:
# | |
<reponame>ph4m/constrained-rl
# Copyright (c) IBM Corp. 2018. All Rights Reserved.
# Project name: Constrained Exploration and Recovery from Experience Shaping
# This project is licensed under the MIT License, see LICENSE
import pygame
import numpy as np
from enum import Enum
from .nav2d_pos import Nav2dPos
from .obstacles import ObstacleSquare, ObstacleCircle
class RenderingState(Enum):
disable, continuous, step, episode = range(4)
class Nav2dRendering(object):
'''
Implement rendering functions for Nav2d environments,
with or without obstacles, with or without constraints
'''
rendering_window_name = 'Nav2d'
rendering_width = 800
rendering_height = 800
rendering_wait = 1
rendering_background_rgb = (0, 0, 0)
rendering_background_transparent_rgba = (0, 0, 0, 0)
rendering_agent_rgb = (0, 0, 255)
rendering_agent_range_rgba = (255, 0, 0, 155)
rendering_target_rgb = (0, 255, 0)
rendering_agent_path_rgb = (255, 255, 255)
rendering_constraint_line_rgb = (25, 0, 0)
rendering_constraint_line_width = 1
rendering_constraint_polygon_rgba = (50, 50, 50, 128)
rendering_constraint_polygon_width = 0 # set to zero to fill
rendering_border_polygon_rgb = (155, 155, 155, 0)
rendering_border_polygon_width = 0 # set to zero to fill
rendering_obstacle_polygon_rgb = (0, 0, 0)
rendering_obstacle_polygon_width = 0 # set to zero to fill
rendering_info_text_size = (30)
rendering_info_text_rgb = (255, 255, 255)
rendering_info_text_pos = (10, 10)
rendering_cmd_text_size = (30)
rendering_cmd_text_rgb = (255, 255, 255)
rendering_cmd_text_pos = (10, 890)
rendering_cmd_text_str = 'Commands: (n) next step, (e) next episode, (c) continuous, (ESC) disable, (h) hide help'
def __init__(self):
self.constraint_lines = []
self.constraint_polygons = []
self.init_border()
self.rendering_state = RenderingState.continuous
super(Nav2dRendering, self).__init__()
def init_border(self):
self.border_width_x = 1.5 * self.max_step_x
self.border_width_y = 1.5 * self.max_step_y
self.min_x_display = self.min_x - self.border_width_x
self.max_x_display = self.max_x + self.border_width_x
self.min_y_display = self.min_y - self.border_width_y
self.max_y_display = self.max_y + self.border_width_y
self.corner_pos_top_left = (self.min_x_display, self.max_y_display)
self.corner_pos_top_right = (self.max_x_display, self.max_y_display)
self.corner_pos_bottom_left = (self.min_x_display, self.min_y_display)
self.corner_pos_bottom_right = (self.max_x_display, self.min_y_display)
self.world_width_with_border = self.world_width + 2*self.border_width_x
self.world_height_with_border = self.world_height + 2*self.border_width_y
self.rendering_width_with_border = int(self.rendering_width/self.world_width*self.world_width_with_border)
self.rendering_height_with_border = int(self.rendering_height/self.world_height*self.world_height_with_border)
self.border_polygon = []
self.border_polygon.append(self.convert_world_to_pixel(self.min_x, self.min_y))
self.border_polygon.append(self.convert_world_to_pixel(self.min_x, self.max_y))
self.border_polygon.append(self.convert_world_to_pixel(self.max_x, self.max_y))
self.border_polygon.append(self.convert_world_to_pixel(self.max_x, self.min_y))
def init_rendering(self):
pygame.init()
pygame.font.init()
self.rendering_window_size = (self.rendering_width_with_border, self.rendering_height_with_border)
self.rendering_surface_constraints = pygame.Surface(self.rendering_window_size, pygame.SRCALPHA)
self.rendering_surface_agent_range = self.rendering_surface_constraints
self.rendering_display = pygame.display.set_mode(self.rendering_window_size)
self.rendering_agent_radius = int(self.agent_radius * self.rendering_width / self.world_width)
self.rendering_agent_range_radius = int(self.rendering_width*self.max_step_x/self.world_width)
self.rendering_target_radius = int(self.target_radius * self.rendering_width / self.world_width)
self.reset_image()
self.agent_path_closed = False
self.agent_path_width = 1
self.is_init_rendering = True
self.info_font = pygame.font.SysFont('', self.rendering_info_text_size)
self.cmd_font = pygame.font.SysFont('', self.rendering_cmd_text_size)
self.do_display_cmd_help = True
def redraw(self):
self.draw_obstacles()
self.draw_lidar()
self.draw_agent_range()
self.draw_constraints()
self.draw_agent_path()
self.draw_agent_target()
self.draw_info()
def draw_obstacles(self):
if not hasattr(self, 'obstacles'):
return
for o in self.obstacles:
if isinstance(o, ObstacleSquare):
polygon_lines_world = o.to_polygon()
polygon_lines_pixel = [self.convert_world_to_pixel(*l) for l in polygon_lines_world]
pygame.draw.polygon(self.rendering_display,
self.rendering_obstacle_polygon_rgb,
polygon_lines_pixel,
self.rendering_obstacle_polygon_width)
elif isinstance(o, ObstacleCircle):
center_pixel = self.convert_world_to_pixel(o.center_x, o.center_y)
radius_pixel = self.convert_world_length_to_pixel_length(o.radius)
pygame.draw.circle(self.rendering_display,
self.rendering_obstacle_polygon_rgb,
center_pixel,
radius_pixel,
self.rendering_obstacle_polygon_width)
else:
raise ValueError('Invalid obstacle {0}'.format(type(o)))
def draw_lidar(self):
if not hasattr(self, 'lidar_segments'):
return
for segment in self.lidar_segments:
p1, p2, dist = segment
if dist == 0.:
continue
p1_pixel = self.convert_world_to_pixel(*p1)
p2_pixel = self.convert_world_to_pixel(*p2)
pygame.draw.line(self.rendering_display,
(255, 255, 0),
p1_pixel,
p2_pixel,
1)
def render(self, mode='human', close=False):
if close or (self.rendering_state == RenderingState.disable):
if self.is_init_rendering:
pygame.display.quit()
return
elif not self.is_init_rendering:
self.init_rendering()
self.reset_image()
self.redraw()
pygame.display.update()
self.user_interaction()
def save_render(self, path_save, verbose=False):
pygame.image.save(self.rendering_display, path_save)
if verbose:
print('Saved window: {0}'.format(path_save))
def user_interaction(self):
if self.rendering_state == RenderingState.continuous:
event_list = pygame.event.get()
for event in event_list:
if event.type == pygame.KEYDOWN:
if (event.key == pygame.K_n):
self.rendering_state = RenderingState.step
elif (event.key == pygame.K_e):
self.rendering_state = RenderingState.episode
elif (event.key == pygame.K_h):
self.do_display_cmd_help = not self.do_display_cmd_help
elif (event.key == pygame.K_ESCAPE):
self.rendering_state = RenderingState.disable
elif self.rendering_state == RenderingState.step:
while True:
event = pygame.event.wait()
if event.type == pygame.KEYDOWN:
if (event.key == pygame.K_n) or (event.key == pygame.K_RETURN):
break
elif (event.key == pygame.K_c):
self.rendering_state = RenderingState.continuous
break
elif (event.key == pygame.K_e):
self.rendering_state = RenderingState.episode
break
elif (event.key == pygame.K_h):
self.do_display_cmd_help = not self.do_display_cmd_help
break
elif (event.key == pygame.K_ESCAPE):
self.rendering_state = RenderingState.disable
break
elif self.rendering_state == RenderingState.episode:
done = self.done_base or (self.i_step == self.max_episode_steps)
if done:
while True:
event = pygame.event.wait()
if event.type == pygame.KEYDOWN:
if (event.key == pygame.K_e) or (event.key == pygame.K_RETURN):
break
elif (event.key == pygame.K_c):
self.rendering_state = RenderingState.continuous
break
elif (event.key == pygame.K_n):
self.rendering_state = RenderingState.step
break
elif (event.key == pygame.K_h):
self.do_display_cmd_help = not self.do_display_cmd_help
break
elif (event.key == pygame.K_ESCAPE):
self.rendering_state = RenderingState.disable
break
def reset_image(self):
self.rendering_display.fill(self.rendering_background_rgb)
self.rendering_surface_constraints.fill(self.rendering_background_transparent_rgba)
if self.rendering_surface_agent_range != self.rendering_surface_constraints:
self.rendering_surface_agent_range.fill(self.rendering_background_transparent_rgba)
self.draw_border()
def draw_border(self):
pygame.draw.polygon(self.rendering_display,
self.rendering_border_polygon_rgb,
self.border_polygon,
self.rendering_border_polygon_width)
def draw_agent_range(self):
# Draw agent range
pygame.draw.circle(self.rendering_surface_agent_range,
self.rendering_agent_range_rgba,
(self.agent_pixel_col, self.agent_pixel_row),
self.rendering_agent_range_radius,
)
if self.rendering_surface_agent_range != self.rendering_surface_constraints:
self.rendering_display.blit(self.rendering_surface_agent_range, (0, 0))
def draw_agent_target(self):
# Draw agent
pygame.draw.circle(self.rendering_display,
self.rendering_agent_rgb,
(self.agent_pixel_col, self.agent_pixel_row),
self.rendering_agent_radius,
)
# Draw target
pygame.draw.circle(self.rendering_display,
self.rendering_target_rgb,
(self.target_pixel_col, self.target_pixel_row),
self.rendering_target_radius,
)
def draw_info(self):
if self.do_display_cmd_help:
info_str = 'Avg. reward: {0:.2f}'.format(self.ep_reward_avg)
info_surface = self.info_font.render(info_str, False, self.rendering_info_text_rgb, self.rendering_background_rgb)
self.rendering_display.blit(info_surface, self.rendering_info_text_pos)
cmd_surface = self.cmd_font.render(self.rendering_cmd_text_str, False, self.rendering_cmd_text_rgb, self.rendering_background_rgb)
self.rendering_display.blit(cmd_surface, self.rendering_cmd_text_pos)
def draw_agent_path(self):
if len(self.agent_pixel_seq) >= 2:
pygame.draw.lines(self.rendering_display,
self.rendering_agent_path_rgb,
self.agent_path_closed,
self.agent_pixel_seq,
self.agent_path_width,
)
def draw_constraints(self):
self.update_constraint_rendering()
self.draw_constraint_polygons()
self.draw_constraint_lines()
def draw_constraint_polygons(self):
for i_constraint, polygon in enumerate(self.constraint_polygons):
if len(polygon) < 3:
continue
pygame.draw.polygon(self.rendering_surface_constraints,
self.rendering_constraint_polygon_rgba,
polygon,
self.rendering_constraint_polygon_width)
self.rendering_display.blit(self.rendering_surface_constraints, (0, 0))
def draw_constraint_lines(self):
for i_constraint, (pixel_start, pixel_end) in enumerate(self.constraint_lines):
pygame.draw.line(self.rendering_display,
self.rendering_constraint_line_rgb,
pixel_start,
pixel_end,
self.rendering_constraint_line_width,
)
def update_env(self):
self.update_pixel_agent()
def convert_world_length_to_pixel_length(self, x):
x_pixel = int(self.rendering_width_with_border*x/self.world_width_with_border)
return x_pixel
def convert_world_to_pixel(self, x, y):
col = int(self.rendering_width_with_border*(0.5 + x / self.world_width_with_border))
row = int(self.rendering_height_with_border*(0.5 - y / self.world_height_with_border))
return col, row
def convert_pixel_to_world(self, col, row):
x = self.world_width_with_border * (col/self.rendering_width_with_border - 0.5)
y = -self.world_height_with_border * (row/self.rendering_height_with_border - 0.5)
return x, y
def update_pixel_agent(self):
self.agent_pixel_col, self.agent_pixel_row = self.convert_world_to_pixel(self.agent_pos_x, self.agent_pos_y)
def update_pixel_target(self):
self.target_pixel_col, self.target_pixel_row = self.convert_world_to_pixel(self.target_pos_x, self.target_pos_y)
def reset_task_specific(self):
self.agent_pixel_seq = []
self.update_pixel_agent()
self.update_pixel_target()
self.agent_pixel_seq = [(self.agent_pixel_col, self.agent_pixel_row)]
def _step_task_specific(self):
self.store_agent_pixel()
def store_agent_pixel(self):
self.agent_pixel_seq.append((self.agent_pixel_col, self.agent_pixel_row))
def update_constraint_rendering(self):
if not self.is_init_rendering:
return
self.constraint_lines = []
self.constraint_polygons = []
if (not hasattr(self, 'ineq_mat')) or (not hasattr(self, 'ineq_vec')):
return
ineq_mat = self.ineq_mat
ineq_vec = self.ineq_vec
if (ineq_mat is None) or (ineq_vec is None):
assert (ineq_mat is None) and (ineq_vec is None)
return
if (len(ineq_mat) == 0) or (len(ineq_vec) == 0):
assert (len(ineq_mat) == 0) and (len(ineq_vec) == 0)
return
assert (ineq_mat.ndim == 2) and (ineq_vec.ndim == 2), 'Dimension mismatch: got shapes {0} and {1}'.format(ineq_mat.shape, ineq_vec.shape)
# ineq_mat X <= ineq_vec
# X: normalized action
# Y: real action (dx, dy)
# X = self.normalize_act_scale * Y + self.normalize_act_shift
# ineq_mat * self.normalize_act_scale * Y <= ineq_vec - ineq_mat * self.normalize_act_shift
ineq_mat = np.dot(ineq_mat, self.normalize_act_scale)
ineq_vec = ineq_vec - np.dot(ineq_mat, self.normalize_act_shift)
for i_constraint, (A, B) in enumerate(zip(ineq_mat, ineq_vec)):
# A X <= B, alpha_x * x + alpha_y * y <= b
alpha_x = A[0]
alpha_y = A[1]
b = B[0]
if abs(alpha_y) < self.alpha_xy_epsilon:
if abs(alpha_x) < self.alpha_xy_epsilon:
continue
action_x = b / alpha_x
line_start_pos_x = self.agent_pos_x + action_x
line_end_pos_x = self.agent_pos_x + action_x
line_start_pos_y = self.min_y_display
line_end_pos_y = self.max_y_display
line_start_pos = (line_start_pos_x, line_start_pos_y)
line_end_pos = (line_end_pos_x, line_end_pos_y)
if alpha_x > 0: # left of line: forbidden right
constraint_polygon = [line_end_pos, line_start_pos, self.corner_pos_bottom_right, self.corner_pos_top_right]
else: # valid right: forbidden left
constraint_polygon = [line_start_pos, line_end_pos, self.corner_pos_top_left, self.corner_pos_bottom_left]
else:
action_x_left = self.min_x_display - self.agent_pos_x
action_x_right = self.max_x_display - self.agent_pos_x
action_y_left, action_y_right = map(lambda x: (b - alpha_x*x)/alpha_y, [action_x_left, action_x_right])
line_start_pos_x = self.min_x_display
line_end_pos_x = self.max_x_display
line_start_pos_y = self.agent_pos_y + action_y_left
line_end_pos_y = self.agent_pos_y + action_y_right
line_start_pos = (line_start_pos_x, line_start_pos_y)
line_end_pos = (line_end_pos_x, line_end_pos_y)
# Check that we picked the right display range
# y = a_line * x + b_line
a_line = (line_end_pos_y - line_start_pos_y)/(line_end_pos_x - line_start_pos_x)
b_line = line_end_pos_y - a_line * line_end_pos_x
y_line_to_x = lambda y: (y - b_line)/a_line
if (line_start_pos_y < self.min_y_display) or (line_start_pos_y > self.max_y_display):
if line_start_pos_y < self.min_y_display:
line_start_pos_y_restricted = self.min_y_display
else:
line_start_pos_y_restricted = self.max_y_display
line_start_pos_x_restricted = y_line_to_x(line_start_pos_y_restricted)
line_start_pos_restricted = (line_start_pos_x_restricted, line_start_pos_y_restricted)
else:
line_start_pos_restricted = line_start_pos
if (line_end_pos_y < self.min_y_display) or (line_end_pos_y > self.max_y_display):
if line_end_pos_y < self.min_y_display:
line_end_pos_y_restricted = self.min_y_display
else:
line_end_pos_y_restricted = self.max_y_display
line_end_pos_x_restricted = y_line_to_x(line_end_pos_y_restricted)
line_end_pos_restricted = (line_end_pos_x_restricted, line_end_pos_y_restricted)
else:
line_end_pos_restricted = line_end_pos
if alpha_y > 0: # valid below line: forbidden above
constraint_polygon = [line_start_pos_restricted, line_end_pos_restricted]
for p in [self.corner_pos_bottom_right, self.corner_pos_top_right]:
if p[1] > line_end_pos[1]:
constraint_polygon.append(p)
for p in [self.corner_pos_top_left, self.corner_pos_bottom_left]:
if p[1] > line_start_pos[1]:
constraint_polygon.append(p)
else: # valid above line: forbidden below
constraint_polygon = [line_end_pos_restricted, line_start_pos_restricted]
for p in [self.corner_pos_top_left, self.corner_pos_bottom_left]:
if p[1] < line_start_pos[1]:
constraint_polygon.append(p)
for p in [self.corner_pos_bottom_right, self.corner_pos_top_right]:
if p[1] < line_end_pos[1]:
constraint_polygon.append(p)
line_start_pixel_col, line_start_pixel_row | |
StringIO()
rootObj.export(oStreamString, 0, name_="XSDataInputBioSaxsISPyB_HPLCv1_0")
oStreamString.close()
return rootObj
parseString = staticmethod(parseString)
# Static method for parsing a file
def parseFile(_inFilePath):
doc = minidom.parse(_inFilePath)
rootNode = doc.documentElement
rootObj = XSDataInputBioSaxsISPyB_HPLCv1_0()
rootObj.build(rootNode)
return rootObj
parseFile = staticmethod(parseFile)
# end class XSDataInputBioSaxsISPyB_HPLCv1_0
class XSDataInputBioSaxsISPyBv1_0(XSDataInput):
"""Input class for populating ISPyB"""
def __init__(
self,
configuration=None,
densityPlot=None,
kratkyPlot=None,
guinierPlot=None,
scatterPlot=None,
averageSample=None,
bestBuffer=None,
subtractedFilePath=None,
sampleFrames=None,
bufferFrames=None,
averageFilePath=None,
discardedFrames=None,
averagedFrames=None,
curves=None,
frameMerged=None,
frameAverage=None,
volume=None,
gnom=None,
autoRg=None,
sample=None,
):
XSDataInput.__init__(self, configuration)
if sample is None:
self._sample = None
elif sample.__class__.__name__ == "XSDataBioSaxsSample":
self._sample = sample
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'sample' is not XSDataBioSaxsSample but %s"
% self._sample.__class__.__name__
)
raise BaseException(strMessage)
if autoRg is None:
self._autoRg = None
elif autoRg.__class__.__name__ == "XSDataAutoRg":
self._autoRg = autoRg
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'autoRg' is not XSDataAutoRg but %s"
% self._autoRg.__class__.__name__
)
raise BaseException(strMessage)
if gnom is None:
self._gnom = None
elif gnom.__class__.__name__ == "XSDataGnom":
self._gnom = gnom
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'gnom' is not XSDataGnom but %s"
% self._gnom.__class__.__name__
)
raise BaseException(strMessage)
if volume is None:
self._volume = None
elif volume.__class__.__name__ == "XSDataDoubleWithUnit":
self._volume = volume
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'volume' is not XSDataDoubleWithUnit but %s"
% self._volume.__class__.__name__
)
raise BaseException(strMessage)
if frameAverage is None:
self._frameAverage = None
elif frameAverage.__class__.__name__ == "XSDataInteger":
self._frameAverage = frameAverage
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'frameAverage' is not XSDataInteger but %s"
% self._frameAverage.__class__.__name__
)
raise BaseException(strMessage)
if frameMerged is None:
self._frameMerged = None
elif frameMerged.__class__.__name__ == "XSDataInteger":
self._frameMerged = frameMerged
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'frameMerged' is not XSDataInteger but %s"
% self._frameMerged.__class__.__name__
)
raise BaseException(strMessage)
if curves is None:
self._curves = []
elif curves.__class__.__name__ == "list":
self._curves = curves
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'curves' is not list but %s"
% self._curves.__class__.__name__
)
raise BaseException(strMessage)
if averagedFrames is None:
self._averagedFrames = []
elif averagedFrames.__class__.__name__ == "list":
self._averagedFrames = averagedFrames
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'averagedFrames' is not list but %s"
% self._averagedFrames.__class__.__name__
)
raise BaseException(strMessage)
if discardedFrames is None:
self._discardedFrames = []
elif discardedFrames.__class__.__name__ == "list":
self._discardedFrames = discardedFrames
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'discardedFrames' is not list but %s"
% self._discardedFrames.__class__.__name__
)
raise BaseException(strMessage)
if averageFilePath is None:
self._averageFilePath = None
elif averageFilePath.__class__.__name__ == "XSDataFile":
self._averageFilePath = averageFilePath
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'averageFilePath' is not XSDataFile but %s"
% self._averageFilePath.__class__.__name__
)
raise BaseException(strMessage)
if bufferFrames is None:
self._bufferFrames = []
elif bufferFrames.__class__.__name__ == "list":
self._bufferFrames = bufferFrames
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'bufferFrames' is not list but %s"
% self._bufferFrames.__class__.__name__
)
raise BaseException(strMessage)
if sampleFrames is None:
self._sampleFrames = []
elif sampleFrames.__class__.__name__ == "list":
self._sampleFrames = sampleFrames
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'sampleFrames' is not list but %s"
% self._sampleFrames.__class__.__name__
)
raise BaseException(strMessage)
if subtractedFilePath is None:
self._subtractedFilePath = None
elif subtractedFilePath.__class__.__name__ == "XSDataFile":
self._subtractedFilePath = subtractedFilePath
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'subtractedFilePath' is not XSDataFile but %s"
% self._subtractedFilePath.__class__.__name__
)
raise BaseException(strMessage)
if bestBuffer is None:
self._bestBuffer = None
elif bestBuffer.__class__.__name__ == "XSDataFile":
self._bestBuffer = bestBuffer
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'bestBuffer' is not XSDataFile but %s"
% self._bestBuffer.__class__.__name__
)
raise BaseException(strMessage)
if averageSample is None:
self._averageSample = None
elif averageSample.__class__.__name__ == "XSDataFile":
self._averageSample = averageSample
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'averageSample' is not XSDataFile but %s"
% self._averageSample.__class__.__name__
)
raise BaseException(strMessage)
if scatterPlot is None:
self._scatterPlot = None
elif scatterPlot.__class__.__name__ == "XSDataFile":
self._scatterPlot = scatterPlot
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'scatterPlot' is not XSDataFile but %s"
% self._scatterPlot.__class__.__name__
)
raise BaseException(strMessage)
if guinierPlot is None:
self._guinierPlot = None
elif guinierPlot.__class__.__name__ == "XSDataFile":
self._guinierPlot = guinierPlot
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'guinierPlot' is not XSDataFile but %s"
% self._guinierPlot.__class__.__name__
)
raise BaseException(strMessage)
if kratkyPlot is None:
self._kratkyPlot = None
elif kratkyPlot.__class__.__name__ == "XSDataFile":
self._kratkyPlot = kratkyPlot
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'kratkyPlot' is not XSDataFile but %s"
% self._kratkyPlot.__class__.__name__
)
raise BaseException(strMessage)
if densityPlot is None:
self._densityPlot = None
elif densityPlot.__class__.__name__ == "XSDataFile":
self._densityPlot = densityPlot
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0 constructor argument 'densityPlot' is not XSDataFile but %s"
% self._densityPlot.__class__.__name__
)
raise BaseException(strMessage)
# Methods and properties for the 'sample' attribute
def getSample(self):
return self._sample
def setSample(self, sample):
if sample is None:
self._sample = None
elif sample.__class__.__name__ == "XSDataBioSaxsSample":
self._sample = sample
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.setSample argument is not XSDataBioSaxsSample but %s"
% sample.__class__.__name__
)
raise BaseException(strMessage)
def delSample(self):
self._sample = None
sample = property(getSample, setSample, delSample, "Property for sample")
# Methods and properties for the 'autoRg' attribute
def getAutoRg(self):
return self._autoRg
def setAutoRg(self, autoRg):
if autoRg is None:
self._autoRg = None
elif autoRg.__class__.__name__ == "XSDataAutoRg":
self._autoRg = autoRg
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.setAutoRg argument is not XSDataAutoRg but %s"
% autoRg.__class__.__name__
)
raise BaseException(strMessage)
def delAutoRg(self):
self._autoRg = None
autoRg = property(getAutoRg, setAutoRg, delAutoRg, "Property for autoRg")
# Methods and properties for the 'gnom' attribute
def getGnom(self):
return self._gnom
def setGnom(self, gnom):
if gnom is None:
self._gnom = None
elif gnom.__class__.__name__ == "XSDataGnom":
self._gnom = gnom
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.setGnom argument is not XSDataGnom but %s"
% gnom.__class__.__name__
)
raise BaseException(strMessage)
def delGnom(self):
self._gnom = None
gnom = property(getGnom, setGnom, delGnom, "Property for gnom")
# Methods and properties for the 'volume' attribute
def getVolume(self):
return self._volume
def setVolume(self, volume):
if volume is None:
self._volume = None
elif volume.__class__.__name__ == "XSDataDoubleWithUnit":
self._volume = volume
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.setVolume argument is not XSDataDoubleWithUnit but %s"
% volume.__class__.__name__
)
raise BaseException(strMessage)
def delVolume(self):
self._volume = None
volume = property(getVolume, setVolume, delVolume, "Property for volume")
# Methods and properties for the 'frameAverage' attribute
def getFrameAverage(self):
return self._frameAverage
def setFrameAverage(self, frameAverage):
if frameAverage is None:
self._frameAverage = None
elif frameAverage.__class__.__name__ == "XSDataInteger":
self._frameAverage = frameAverage
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.setFrameAverage argument is not XSDataInteger but %s"
% frameAverage.__class__.__name__
)
raise BaseException(strMessage)
def delFrameAverage(self):
self._frameAverage = None
frameAverage = property(
getFrameAverage, setFrameAverage, delFrameAverage, "Property for frameAverage"
)
# Methods and properties for the 'frameMerged' attribute
def getFrameMerged(self):
return self._frameMerged
def setFrameMerged(self, frameMerged):
if frameMerged is None:
self._frameMerged = None
elif frameMerged.__class__.__name__ == "XSDataInteger":
self._frameMerged = frameMerged
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.setFrameMerged argument is not XSDataInteger but %s"
% frameMerged.__class__.__name__
)
raise BaseException(strMessage)
def delFrameMerged(self):
self._frameMerged = None
frameMerged = property(
getFrameMerged, setFrameMerged, delFrameMerged, "Property for frameMerged"
)
# Methods and properties for the 'curves' attribute
def getCurves(self):
return self._curves
def setCurves(self, curves):
if curves is None:
self._curves = []
elif curves.__class__.__name__ == "list":
self._curves = curves
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.setCurves argument is not list but %s"
% curves.__class__.__name__
)
raise BaseException(strMessage)
def delCurves(self):
self._curves = None
curves = property(getCurves, setCurves, delCurves, "Property for curves")
def addCurves(self, value):
if value is None:
strMessage = "ERROR! XSDataInputBioSaxsISPyBv1_0.addCurves argument is None"
raise BaseException(strMessage)
elif value.__class__.__name__ == "XSDataFile":
self._curves.append(value)
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.addCurves argument is not XSDataFile but %s"
% value.__class__.__name__
)
raise BaseException(strMessage)
def insertCurves(self, index, value):
if index is None:
strMessage = "ERROR! XSDataInputBioSaxsISPyBv1_0.insertCurves argument 'index' is None"
raise BaseException(strMessage)
if value is None:
strMessage = "ERROR! XSDataInputBioSaxsISPyBv1_0.insertCurves argument 'value' is None"
raise BaseException(strMessage)
elif value.__class__.__name__ == "XSDataFile":
self._curves[index] = value
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.addCurves argument is not XSDataFile but %s"
% value.__class__.__name__
)
raise BaseException(strMessage)
# Methods and properties for the 'averagedFrames' attribute
def getAveragedFrames(self):
return self._averagedFrames
def setAveragedFrames(self, averagedFrames):
if averagedFrames is None:
self._averagedFrames = []
elif averagedFrames.__class__.__name__ == "list":
self._averagedFrames = averagedFrames
else:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.setAveragedFrames argument is not list but %s"
% averagedFrames.__class__.__name__
)
raise BaseException(strMessage)
def delAveragedFrames(self):
self._averagedFrames = None
averagedFrames = property(
getAveragedFrames,
setAveragedFrames,
delAveragedFrames,
"Property for averagedFrames",
)
def addAveragedFrames(self, value):
if value is None:
strMessage = (
"ERROR! XSDataInputBioSaxsISPyBv1_0.addAveragedFrames | |
as
# NULL in the db.
cdata = cdata.replace(np.nan, '', regex=True)
cdata = cdata.applymap(lambda x: str(x).strip() if len(str(x).strip()) else None)
return cdata
def get_data_after_colon(row, header):
# return the data after the colon in the row, or None
if ':' in str(row[header]):
colon_loc = str(row[header]).find(':') + 1
return str(row[header])[colon_loc:].strip()
return None
def get_data_before_colon(row, header):
# return the data before the colon in the row, or all the data if there is no colon
if ':' in str(row[header]):
return str(row[header]).split(':')[0]
return str(row[header])
def map_legal_entity_state_code(row):
# only return a value if the country code is USA
if row['vendorcountrycode'] and (str(row['vendorcountrycode']).upper() == "USA" or
str(row['vendorcountrycode']).upper() == "UNITED STATES"):
return str(row['vendor_state_code'])
return None
def map_legal_entity_state_descrip(row):
# if the country code doesn't exist or isn't USA, use the country code as the state description
if not row['vendorcountrycode'] or (str(row['vendorcountrycode']).upper() != "USA" and
str(row['vendorcountrycode']).upper() != "UNITED STATES"):
return str(row['vendor_state_code'])
return None
def map_type(row, mappings):
if str(row['contractactiontype']) in mappings:
return mappings[str(row['contractactiontype'])]
return None
def map_type_description(row, mappings):
if str(row['contractactiontype']) in mappings:
return str(row['contractactiontype']).split(' ')[0]
return None
def map_type_manual(row, header, mappings):
content = str(row[header])
if ':' in content:
content = content.split(':')[0].strip()
if content in mappings:
return content
return None
def map_description_manual(row, header, mappings):
content = str(row[header])
if ':' in content:
content = content.split(':')[0].strip()
if content in mappings:
return mappings[content]
return content.upper()
def map_agency_code(row, header, sub_tier_list):
try:
code = str(row[header])
sub_tier_agency = sub_tier_list[code]
use_frec = sub_tier_agency.is_frec
agency_data = sub_tier_agency.frec if use_frec else sub_tier_agency.cgac
return agency_data.frec_code if use_frec else agency_data.cgac_code
except KeyError:
return '999'
def map_agency_name(row, header, sub_tier_list):
try:
code = str(row[header])
sub_tier_agency = sub_tier_list[code]
use_frec = sub_tier_agency.is_frec
agency_data = sub_tier_agency.frec if use_frec else sub_tier_agency.cgac
return agency_data.agency_name
except KeyError:
return None
def map_sub_tier_name(row, header, sub_tier_list):
try:
code = str(row[header])
return sub_tier_list[code].sub_tier_agency_name
except KeyError:
return None
def map_naics(row, header, naics_list):
try:
code = str(row[header])
return naics_list[code]
except KeyError:
return None
def map_pulled_from(row, award_contract, idv):
field_contents = str(row['contractactiontype'])
if field_contents in award_contract:
return 'award'
if field_contents in idv:
return 'IDV'
return None
def format_date(row, header):
given_date = str(row[header])
given_date_split = given_date.split('/')
if given_date == '01/01/1900' or len(given_date_split) != 3:
return None
return given_date_split[2] + '-' + given_date_split[0] + '-' + given_date_split[1] + ' 00:00:00'
def create_unique_key(row):
key_list = ['agencyid', 'idvagencyid', 'piid', 'modnumber', '<KEY> 'transactionnumber']
idv_list = ['agencyid', 'piid', 'modnumber']
unique_string = ""
for item in key_list:
if len(unique_string) > 0:
unique_string += "_"
if row[item] and str(row[item]) != 'nan' and (row['pulled_from'] == 'award' or item in idv_list):
unique_string += str(row[item])
else:
unique_string += "-none-"
return unique_string
def create_unique_award_key(row):
key_list = ['piid', 'agencyid', 'idvpiid', 'idvagencyid'] if row['pulled_from'] == 'award' else ['piid', 'agencyid']
unique_string_list = ['CONT_AWD'] if row['pulled_from'] == 'award' else ['CONT_IDV']
for item in key_list:
unique_string_list.append(row[item] if row[item] and str(row[item]) != 'nan' else '-none-')
return '_'.join(unique_string_list).upper()
def main():
sess = GlobalDB.db().session
now = datetime.datetime.now()
parser = argparse.ArgumentParser(description='Pull data from the FPDS Atom Feed.')
parser.add_argument('-a', '--all', help='Clear out the database and get historical data', action='store_true')
parser.add_argument('-l', '--latest', help='Get by last_mod_date stored in DB', action='store_true')
parser.add_argument('-d', '--delivery', help='Used in conjunction with -a to indicate delivery order feed',
action='store_true')
parser.add_argument('-o', '--other',
help='Used in conjunction with -a to indicate all feeds other than delivery order',
action='store_true')
parser.add_argument('-f', '--files', help='Load historical data from files', action='store_true')
parser.add_argument('-sf', '--subfolder',
help='Used in conjunction with -f to indicate which Subfolder to load files from',
nargs="+", type=str)
parser.add_argument('-da', '--dates', help='Used in conjunction with -l to specify dates to gather updates from.'
'Should have 2 arguments, first and last day, formatted YYYY/mm/dd',
nargs=2, type=str)
parser.add_argument('-del', '--delete', help='Used to only run the delete feed. First argument must be "both", '
'"idv", or "award". The second and third arguments must be the first '
'and last day to run the feeds for, formatted YYYY/mm/dd',
nargs=3, type=str)
args = parser.parse_args()
award_types_award = ["BPA Call", "Definitive Contract", "Purchase Order", "Delivery Order"]
award_types_idv = ["GWAC", "BOA", "BPA", "FSS", "IDC"]
metrics_json = {
'script_name': 'pull_fpds_data.py',
'start_time': str(now),
'records_received': 0,
'deletes_received': 0,
'records_deleted': 0,
'deleted_award_records_file': '',
'deleted_idv_records_file': '',
'start_date': '',
'end_date': ''
}
# get and create list of sub tier agencies
sub_tiers = sess.query(SubTierAgency).all()
sub_tier_list = {}
for sub_tier in sub_tiers:
sub_tier_list[sub_tier.sub_tier_agency_code] = sub_tier
# get and create list of country code -> country name mappings.
countries = sess.query(CountryCode).all()
country_list = {}
for country in countries:
country_list[country.country_code] = country.country_name
# get and create list of state code -> state name mappings. Prime the county lists with state codes
county_by_name = {}
county_by_code = {}
state_code_list = {}
state_codes = sess.query(States.state_code, func.upper(States.state_name).label('state_name')).all()
for state_code in state_codes:
county_by_name[state_code.state_code] = {}
county_by_code[state_code.state_code] = {}
state_code_list[state_code.state_code] = state_code.state_name
# Fill the county lists with data (code -> name mappings and name -> code mappings)
county_codes = sess.query(CountyCode.county_number, CountyCode.state_code,
func.upper(CountyCode.county_name).label('county_name')).all()
for county_code in county_codes:
# we don't want any "(CA)" endings, so strip those
county_name = county_code.county_name.replace(' (CA)', '').strip()
# we want all the counties in our by-code lookup because we'd be using this table anyway for derivations
county_by_code[county_code.state_code][county_code.county_number] = county_name
# if the county name has only letters/spaces then we want it in our by-name lookup, the rest have the potential
# to be different from the FPDS feed
if re.match('^[A-Z\s]+$', county_code.county_name):
county_by_name[county_code.state_code][county_name] = county_code.county_number
# get and create list of duns -> exec comp data mappings
exec_comp_dict = {}
duns_list = sess.query(DUNS).filter(DUNS.high_comp_officer1_full_na.isnot(None)).all()
for duns in duns_list:
exec_comp_dict[duns.awardee_or_recipient_uniqu] = \
{'officer1_name': duns.high_comp_officer1_full_na, 'officer1_amt': duns.high_comp_officer1_amount,
'officer2_name': duns.high_comp_officer2_full_na, 'officer2_amt': duns.high_comp_officer2_amount,
'officer3_name': duns.high_comp_officer3_full_na, 'officer3_amt': duns.high_comp_officer3_amount,
'officer4_name': duns.high_comp_officer4_full_na, 'officer4_amt': duns.high_comp_officer4_amount,
'officer5_name': duns.high_comp_officer5_full_na, 'officer5_amt': duns.high_comp_officer5_amount}
del duns_list
if args.all:
if (not args.delivery and not args.other) or (args.delivery and args.other):
logger.error("When using the -a flag, please include either -d or -o "
"(but not both) to indicate which feeds to read in")
raise ValueError("When using the -a flag, please include either -d or -o "
"(but not both) to indicate which feeds to read in")
logger.info("Starting at: %s", str(datetime.datetime.now()))
if args.other:
for award_type in award_types_idv:
get_data("IDV", award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, metrics=metrics_json)
for award_type in award_types_award:
if award_type != "Delivery Order":
get_data("award", award_type, now, sess, sub_tier_list, county_by_name, county_by_code,
state_code_list, country_list, exec_comp_dict, metrics=metrics_json)
elif args.delivery:
get_data("award", "Delivery Order", now, sess, sub_tier_list, county_by_name, county_by_code,
state_code_list, country_list, exec_comp_dict, metrics=metrics_json)
last_update = sess.query(FPDSUpdate).one_or_none()
if last_update:
sess.query(FPDSUpdate).update({"update_date": now}, synchronize_session=False)
else:
sess.add(FPDSUpdate(update_date=now))
sess.commit()
logger.info("Ending at: %s", str(datetime.datetime.now()))
elif args.latest:
logger.info("Starting at: %s", str(datetime.datetime.now()))
last_update_obj = sess.query(FPDSUpdate).one_or_none()
# update_date can't be null because it's being used as the PK for the table, so it can only exist or
# there are no rows in the table. If there are no rows, act like it's an "add all"
if not last_update_obj:
logger.error(
"No last_update date present, please run the script with the -a flag to generate an initial dataset")
raise ValueError(
"No last_update date present, please run the script with the -a flag to generate an initial dataset")
last_update = last_update_obj.update_date
start_date = None
end_date = None
if args.dates:
start_date = args.dates[0]
end_date = args.dates[1]
for award_type in award_types_idv:
get_data("IDV", award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, last_update, start_date=start_date, end_date=end_date,
metrics=metrics_json)
for award_type in award_types_award:
get_data("award", award_type, now, sess, sub_tier_list, county_by_name, county_by_code, state_code_list,
country_list, exec_comp_dict, last_update, start_date=start_date, end_date=end_date,
metrics=metrics_json)
# We also need to process the delete feed
get_delete_data("IDV", now, sess, last_update, start_date, end_date, metrics=metrics_json)
get_delete_data("award", now, sess, last_update, start_date, end_date, metrics=metrics_json)
if not start_date and not end_date:
sess.query(FPDSUpdate).update({"update_date": now}, synchronize_session=False)
sess.commit()
logger.info("Ending at: %s", str(datetime.datetime.now()))
elif args.delete:
del_type = args.delete[0]
if del_type == 'award':
del_awards = True
del_idvs = False
elif del_type == 'idv':
del_awards = False
del_idvs = True
elif del_type == 'both':
del_awards = True
del_idvs = True
else:
logger.error("Delete argument must be \"idv\", \"award\", or \"both\"")
raise ValueError("Delete argument must be \"idv\", \"award\", or \"both\"")
if del_idvs:
get_delete_data("IDV", now, sess, now, args.delete[1], args.delete[2], metrics=metrics_json)
if del_awards:
get_delete_data("award", now, sess, now, args.delete[1], args.delete[2], metrics=metrics_json)
sess.commit()
elif args.files:
logger.info("Starting file loads at: %s", str(datetime.datetime.now()))
max_year = 2015
subfolder = None
if args.subfolder:
if len(args.subfolder) != 1:
logger.error("When using the -sf flag, please enter just one string for the folder name")
raise ValueError("When using the -sf flag, please enter just | |
<filename>clever/src/simple_offboard.py
#!/usr/bin/env python
from __future__ import division
import rospy
from geometry_msgs.msg import TransformStamped, PoseStamped, Point, PointStamped, Vector3, \
Vector3Stamped, TwistStamped, QuaternionStamped
from sensor_msgs.msg import NavSatFix, BatteryState
import tf2_ros
import tf2_geometry_msgs
from mavros_msgs.msg import PositionTarget, AttitudeTarget, State
from mavros_msgs.srv import CommandBool, SetMode
from threading import Lock
import math
from global_local import global_to_local
from util import euler_from_orientation, vector3_from_point, orientation_from_euler
from std_srvs.srv import Trigger
from clever import srv
rospy.init_node('simple_offboard')
# TF2 stuff
tf_broadcaster = tf2_ros.TransformBroadcaster()
static_tf_broadcaster = tf2_ros.StaticTransformBroadcaster()
tf_buffer = tf2_ros.Buffer()
tf_listener = tf2_ros.TransformListener(tf_buffer)
position_pub = rospy.Publisher('/mavros/setpoint_raw/local', PositionTarget, queue_size=1)
attitude_pub = rospy.Publisher('/mavros/setpoint_raw/attitude', AttitudeTarget, queue_size=1)
target_pub = rospy.Publisher('~target', PoseStamped, queue_size=1)
arming = rospy.ServiceProxy('/mavros/cmd/arming', CommandBool, persistent=True)
set_mode = rospy.ServiceProxy('/mavros/set_mode', SetMode, persistent=True)
pose = None
global_position = None
velocity = None
state = None
battery = None
def pose_update(data):
global pose
pose = data
def global_position_update(data):
global global_position
global_position = data
def velocity_update(data):
global velocity
velocity = data
def state_update(data):
global state
state = data
def battery_update(data):
global battery
battery = data
rospy.Subscriber('/mavros/state', State, state_update)
rospy.Subscriber('/mavros/local_position/pose', PoseStamped, pose_update)
rospy.Subscriber('/mavros/local_position/velocity', TwistStamped, velocity_update)
rospy.Subscriber('/mavros/global_position/global', NavSatFix, global_position_update)
rospy.Subscriber('/mavros/battery', BatteryState, battery_update)
PT = PositionTarget
AT = AttitudeTarget
AUTO_OFFBOARD = rospy.get_param('~auto_offboard', True)
AUTO_ARM = AUTO_OFFBOARD and rospy.get_param('~auto_arm', True)
OFFBOARD_TIMEOUT = rospy.Duration(rospy.get_param('~offboard_timeout', 3))
ARM_TIMEOUT = rospy.Duration(rospy.get_param('~arm_timeout', 5))
LOCAL_POSITION_TIMEOUT = rospy.Duration(rospy.get_param('~local_position_timeout', 0.5))
NAVIGATE_AFTER_ARMED = rospy.Duration(rospy.get_param('~navigate_after_armed', True))
TRANSFORM_TIMEOUT = rospy.Duration(rospy.get_param('~transform_timeout', 3))
SETPOINT_RATE = rospy.get_param('~setpoint_rate', 30)
LOCAL_FRAME = rospy.get_param('mavros/local_position/frame_id', 'local_origin')
LAND_MODE = rospy.get_param('~land_mode', 'AUTO.LAND')
LAND_TIMEOUT = rospy.Duration(rospy.get_param('~land_timeout', 2))
DEFAULT_SPEED = rospy.get_param('~default_speed', 0.5)
def offboard_and_arm():
if AUTO_OFFBOARD and state.mode != 'OFFBOARD':
rospy.sleep(.3)
rospy.loginfo('Switch mode to OFFBOARD')
res = set_mode(base_mode=0, custom_mode='OFFBOARD')
start = rospy.get_rostime()
while True:
if state.mode == 'OFFBOARD':
break
if rospy.get_rostime() - start > OFFBOARD_TIMEOUT:
raise Exception('OFFBOARD request timed out')
rospy.sleep(0.1)
if AUTO_ARM and not state.armed:
rospy.loginfo('Arming')
res = arming(True)
start = rospy.get_rostime()
while True:
if state.armed:
return True
if rospy.get_rostime() - start > ARM_TIMEOUT:
raise Exception('Arming timed out')
rospy.sleep(0.1)
ps = PoseStamped()
vs = Vector3Stamped()
pt = PositionTarget()
at = AttitudeTarget()
BRAKE_TIME = rospy.Duration(0)
def get_navigate_setpoint(stamp, start, finish, start_stamp, speed):
distance = math.sqrt((finish.z - start.z)**2 + (finish.x - start.x)**2 + (finish.y - start.y)**2)
time = rospy.Duration(distance / speed)
k = (stamp - start_stamp) / time
time_left = start_stamp + time - stamp
if BRAKE_TIME and time_left < BRAKE_TIME:
# time to brake
time_before_braking = time - BRAKE_TIME
brake_time_passed = (stamp - start_stamp - time_before_braking)
if brake_time_passed > 2 * BRAKE_TIME:
# finish
k = 1
else:
# brake!
k_before_braking = time_before_braking / time
k_after_braking = (speed * brake_time_passed.to_sec() - brake_time_passed.to_sec() ** 2 * speed / 4 / BRAKE_TIME.to_sec()) / distance
k = k_before_braking + k_after_braking
k = min(k, 1)
p = Point()
p.x = start.x + (finish.x - start.x) * k
p.y = start.y + (finish.y - start.y) * k
p.z = start.z + (finish.z - start.z) * k
return p
def get_publisher_and_message(req, stamp, continued=True, update_frame=True):
ps.header.stamp = stamp
vs.header.stamp = stamp
if isinstance(req, (srv.NavigateRequest, srv.NavigateGlobalRequest)):
global current_nav_start, current_nav_start_stamp, current_nav_finish
if update_frame:
ps.header.frame_id = req.frame_id or LOCAL_FRAME
ps.pose.position = Point(getattr(req, 'x', 0), getattr(req, 'y', 0), req.z)
ps.pose.orientation = orientation_from_euler(0, 0, req.yaw, axes='sxyz')
current_nav_finish = tf_buffer.transform(ps, LOCAL_FRAME, TRANSFORM_TIMEOUT)
if isinstance(req, srv.NavigateGlobalRequest):
# Recalculate x and y from lat and lon
current_nav_finish.pose.position.x, current_nav_finish.pose.position.y = \
global_to_local(req.lat, req.lon)
if not continued:
current_nav_start = pose.pose.position
current_nav_start_stamp = stamp
if NAVIGATE_AFTER_ARMED and not state.armed:
current_nav_start_stamp = stamp
setpoint = get_navigate_setpoint(stamp, current_nav_start, current_nav_finish.pose.position,
current_nav_start_stamp, req.speed)
yaw_rate_flag = math.isnan(req.yaw)
msg = pt
msg.coordinate_frame = PT.FRAME_LOCAL_NED
msg.type_mask = PT.IGNORE_VX + PT.IGNORE_VY + PT.IGNORE_VZ + \
PT.IGNORE_AFX + PT.IGNORE_AFY + PT.IGNORE_AFZ + \
(PT.IGNORE_YAW if yaw_rate_flag else PT.IGNORE_YAW_RATE)
msg.position = setpoint
msg.yaw = euler_from_orientation(current_nav_finish.pose.orientation, 'sxyz')[2]
msg.yaw_rate = req.yaw_rate
return position_pub, msg
elif isinstance(req, (srv.SetPositionRequest, srv.SetPositionGlobalRequest)):
ps.header.frame_id = req.frame_id or LOCAL_FRAME
ps.pose.position = Point(getattr(req, 'x', 0), getattr(req, 'y', 0), req.z)
ps.pose.orientation = orientation_from_euler(0, 0, req.yaw)
pose_local = tf_buffer.transform(ps, LOCAL_FRAME, TRANSFORM_TIMEOUT)
if isinstance(req, srv.SetPositionGlobalRequest):
pose_local.pose.position.x, pose_local.pose.position.y = global_to_local(req.lat, req.lon)
yaw_rate_flag = math.isnan(req.yaw)
msg = pt
msg.coordinate_frame = PT.FRAME_LOCAL_NED
msg.type_mask = PT.IGNORE_VX + PT.IGNORE_VY + PT.IGNORE_VZ + \
PT.IGNORE_AFX + PT.IGNORE_AFY + PT.IGNORE_AFZ + \
(PT.IGNORE_YAW if yaw_rate_flag else PT.IGNORE_YAW_RATE)
msg.position = pose_local.pose.position
msg.yaw = euler_from_orientation(pose_local.pose.orientation, 'sxyz')[2]
msg.yaw_rate = req.yaw_rate
return position_pub, msg
elif isinstance(req, srv.SetVelocityRequest):
vs.vector = Vector3(req.vx, req.vy, req.vz)
vs.header.frame_id = req.frame_id or LOCAL_FRAME
ps.header.frame_id = req.frame_id or LOCAL_FRAME
ps.pose.orientation = orientation_from_euler(0, 0, req.yaw)
pose_local = tf_buffer.transform(ps, LOCAL_FRAME, TRANSFORM_TIMEOUT)
vector_local = tf_buffer.transform(vs, LOCAL_FRAME, TRANSFORM_TIMEOUT)
yaw_rate_flag = math.isnan(req.yaw)
msg = pt
msg.coordinate_frame = PT.FRAME_LOCAL_NED
msg.type_mask = PT.IGNORE_PX + PT.IGNORE_PY + PT.IGNORE_PZ + \
PT.IGNORE_AFX + PT.IGNORE_AFY + PT.IGNORE_AFZ + \
(PT.IGNORE_YAW if yaw_rate_flag else PT.IGNORE_YAW_RATE)
msg.velocity = vector_local.vector
msg.yaw = euler_from_orientation(pose_local.pose.orientation, 'sxyz')[2]
msg.yaw_rate = req.yaw_rate
return position_pub, msg
elif isinstance(req, srv.SetAttitudeRequest):
ps.header.frame_id = req.frame_id or LOCAL_FRAME
ps.pose.orientation = orientation_from_euler(req.roll, req.pitch, req.yaw)
pose_local = tf_buffer.transform(ps, LOCAL_FRAME, TRANSFORM_TIMEOUT)
msg = at
msg.orientation = pose_local.pose.orientation
msg.thrust = req.thrust
msg.type_mask = AT.IGNORE_YAW_RATE + AT.IGNORE_PITCH_RATE + AT.IGNORE_ROLL_RATE
return attitude_pub, msg
elif isinstance(req, srv.SetRatesRequest):
msg = at
msg.thrust = req.thrust
msg.type_mask = AT.IGNORE_ATTITUDE
msg.body_rate.x = req.roll_rate
msg.body_rate.y = req.pitch_rate
msg.body_rate.z = req.yaw_rate
return attitude_pub, msg
current_pub = None
current_msg = None
current_req = None
current_nav_start = None
current_nav_finish = None
current_nav_start_stamp = None
handle_lock = Lock()
def handle(req):
global current_pub, current_msg, current_req
if not state or not state.connected:
rospy.logwarn('No connection to the FCU')
return {'message': 'No connection to the FCU'}
if isinstance(req, (srv.NavigateRequest, srv.NavigateGlobalRequest)):
if req.speed < 0:
rospy.logwarn('Navigate speed must be positive, %s passed')
return {'message': 'Navigate speed must be positive, %s passed' % req.speed}
elif req.speed == 0:
req.speed = DEFAULT_SPEED
if isinstance(req, (srv.NavigateRequest, srv.NavigateGlobalRequest)) and \
(pose is None or rospy.get_rostime() - pose.header.stamp > LOCAL_POSITION_TIMEOUT):
rospy.logwarn('No local position')
return {'message': 'No local position'}
if getattr(req, 'yaw_rate', 0) != 0 and not math.isnan(getattr(req, 'yaw')):
rospy.logwarn('Yaw value should be NaN for setting yaw rate')
return {'message': 'Yaw value should be NaN for setting yaw rate'}
if math.isnan(getattr(req, 'yaw', 0)) and math.isnan(getattr(req, 'yaw_rate', 0)):
rospy.logwarn('Both yaw and yaw_rate cannot be NaN')
return {'message': 'Both yaw and yaw_rate cannot be NaN'}
try:
with handle_lock:
stamp = rospy.get_rostime()
current_req = req
current_pub, current_msg = get_publisher_and_message(req, stamp, False)
rospy.loginfo('Topic: %s, message: %s', current_pub.name, current_msg)
current_msg.header.stamp = stamp
current_pub.publish(current_msg)
if req.auto_arm:
offboard_and_arm()
else:
if state.mode != 'OFFBOARD':
return {'message': 'Copter is not in OFFBOARD mode, use auto_arm?'}
if not state.armed:
return {'message': 'Copter is not armed, use auto_arm?'}
return {'success': True}
except Exception as e:
rospy.logerr(str(e))
return {'success': False, 'message': str(e)}
def land(req):
if not state or not state.connected:
rospy.logwarn('No connection to the FCU')
return {'message': 'No connection to the FCU'}
rospy.loginfo('Set %s mode', LAND_MODE)
res = set_mode(custom_mode=LAND_MODE)
if not res.mode_sent:
return {'message': 'Cannot send %s mode request' % LAND_MODE}
start = rospy.get_rostime()
while True:
if state.mode == LAND_MODE:
return {'success': True}
if rospy.get_rostime() - start > LAND_TIMEOUT:
return {'message': '%s mode request timed out' % LAND_MODE}
rospy.sleep(0.1)
def release(req):
global current_pub
current_pub = None
rospy.loginfo('simple_offboard: release')
return {'success': True}
rospy.Service('navigate', srv.Navigate, handle)
rospy.Service('navigate_global', srv.NavigateGlobal, handle)
rospy.Service('set_position', srv.SetPosition, handle)
rospy.Service('set_position_global', srv.SetPositionGlobal, handle)
rospy.Service('set_velocity', srv.SetVelocity, handle)
rospy.Service('set_attitude', srv.SetAttitude, handle)
rospy.Service('set_rates', srv.SetRates, handle)
rospy.Service('land', Trigger, land)
rospy.Service('release', Trigger, release)
def get_telemetry(req):
res = {
'frame_id': req.frame_id or LOCAL_FRAME,
'x': float('nan'),
'y': float('nan'),
'z': float('nan'),
'lat': float('nan'),
'lon': float('nan'),
'alt': float('nan'),
'vx': float('nan'),
'vy': float('nan'),
'vz': float('nan'),
'pitch': float('nan'),
'roll': float('nan'),
'yaw': float('nan'),
'pitch_rate': float('nan'),
'roll_rate': float('nan'),
'yaw_rate': float('nan'),
'voltage': float('nan'),
'cell_voltage': float('nan')
}
frame_id = req.frame_id or LOCAL_FRAME
stamp = rospy.get_rostime()
if pose:
p = tf_buffer.transform(pose, frame_id, TRANSFORM_TIMEOUT)
res['x'] = p.pose.position.x
res['y'] = p.pose.position.y
res['z'] = p.pose.position.z
# Calculate roll pitch and yaw as Tait-Bryan angles, order z-y-x
res['yaw'], res['pitch'], res['roll'] = euler_from_orientation(p.pose.orientation, axes='rzyx')
if velocity:
v = Vector3Stamped()
v.header.stamp = velocity.header.stamp
v.header.frame_id = velocity.header.frame_id
v.vector = velocity.twist.linear
linear = tf_buffer.transform(v, frame_id, TRANSFORM_TIMEOUT)
res['vx'] = linear.vector.x
res['vy'] = linear.vector.y
res['vz'] = linear.vector.z
res['yaw_rate'] = velocity.twist.angular.z
res['pitch_rate'] = velocity.twist.angular.y
res['roll_rate'] = velocity.twist.angular.x
if global_position and stamp - global_position.header.stamp < rospy.Duration(5):
res['lat'] = global_position.latitude
res['lon'] = global_position.longitude
res['alt'] = global_position.altitude
if state:
res['connected'] = state.connected
res['armed'] = state.armed
res['mode'] = state.mode
if battery:
res['voltage'] = battery.voltage
try:
res['cell_voltage'] = battery.cell_voltage[0]
except:
pass
return res
rospy.Service('get_telemetry', srv.GetTelemetry, get_telemetry)
rospy.loginfo('simple_offboard inited')
def start_loop():
global current_pub, current_msg, current_req
r = rospy.Rate(SETPOINT_RATE)
while not rospy.is_shutdown():
with handle_lock:
if current_pub is not None:
try:
stamp = rospy.get_rostime()
if getattr(current_req, 'update_frame', False) or \
isinstance(current_req, (srv.NavigateRequest, srv.NavigateGlobalRequest)):
current_pub, current_msg = get_publisher_and_message(current_req, stamp, True,
getattr(current_req, 'update_frame', False))
current_msg.header.stamp = stamp
current_pub.publish(current_msg)
# For monitoring
if isinstance(current_msg, PositionTarget):
p = PoseStamped()
p.header.frame_id = LOCAL_FRAME
p.header.stamp = stamp
p.pose.position = current_msg.position
p.pose.orientation = orientation_from_euler(0, 0, current_msg.yaw + math.pi / 2)
target_pub.publish(p)
except Exception as | |
# coding: utf-8
"""This module is a wrapper to ``argparse`` module. It allow to generate a
command-line from a predefined directory (ie: a YAML, JSON, ... file)."""
import os
import re
import sys
import importlib
import copy
import pydoc
import argparse
from collections import OrderedDict
#
# Constants.
#
# Get current module.
_SELF = sys.modules[__name__]
# Get types.
_BUILTINS = sys.modules['builtins']
TYPES = {builtin: getattr(_BUILTINS, builtin) for builtin in vars(_BUILTINS)}
TYPES['suppress'] = argparse.SUPPRESS
# Allow custom actions.
ACTIONS = {}
# Allow argcomplete completers.
COMPLETERS = {}
# Keywords (argparse and clg).
KEYWORDS = {
'parsers': {'argparse': ['prog', 'usage', 'description', 'epilog', 'help',
'add_help', 'formatter_class', 'argument_default',
'conflict_handler', 'allow_abbrev', 'print_help'],
'clg': ['anchors', 'subparsers', 'options', 'args', 'groups',
'exclusive_groups', 'execute', 'negative_value']},
'subparsers': {'argparse': ['title', 'description', 'prog', 'help', 'metavar'],
'clg': ['required', 'parsers']},
'groups': {'argparse': ['title', 'description'],
'clg': ['options', 'args', 'exclusive_groups']},
'exclusive_groups': {'argparse': ['required'],
'clg': ['options']},
'options': {'argparse': ['action', 'nargs', 'const', 'default', 'choices',
'required', 'help', 'metavar', 'type', 'version'],
'clg': ['short', 'completer'],
'post': ['match', 'need', 'conflict']},
'args': {'argparse': ['action', 'nargs', 'const', 'default', 'choices',
'required', 'help', 'metavar', 'type'],
'clg': ['short', 'completer'],
'post': ['match', 'need', 'conflict']},
'execute': {'clg': ['module', 'file', 'function']}}
# Help command description.
_HELP_PARSER = OrderedDict(
{'help': {'help': "Print commands' tree with theirs descriptions.",
'description': "Print commands' tree with theirs descriptions.",
'options': {'page':{'short': 'p',
'action': 'store_true',
'help': 'page output'}}}})
# Errors messages.
_INVALID_SECTION = "this section is not of type '{type}'"
_EMPTY_CONF = 'configuration is empty'
_INVALID_KEYWORD = "invalid keyword '{keyword}'"
_ONE_KEYWORDS = "this section need (only) one of theses keywords: '{keywords}'"
_MISSING_KEYWORD = "keyword '{keyword}' is missing"
_UNKNOWN_ARG = "unknown {type} '{arg}'"
_SHORT_ERR = 'this must be a single letter'
_NEED_ERR = "{type} '{arg}' need {need_type} '{need_arg}'"
_NEED_VALUE_ERR = "{type} '{arg}' need {need_type} '{need_arg}' with value '{need_value}'"
_CONFLICT_ERR = "{type} '{arg}' conflict with {conflict_type} '{conflict_arg}'"
_CONFLICT_VALUE_ERR = ("{type} '{arg}' conflict with value '{conflict_value}' "
"of {conflict_type} '{conflict_arg}'")
_MATCH_ERR = "value '{val}' of {type} '{arg}' does not match pattern '{pattern}'"
_FILE_ERR = "Unable to load file: {err}"
_LOAD_ERR = "Unable to load module: {err}"
# Argparse group's methods.
_GRP_METHODS = {'groups': 'add_argument_group',
'exclusive_groups': 'add_mutually_exclusive_group'}
#
# Exceptions.
#
class CLGError(Exception):
"""CLG exception."""
def __init__(self, path, msg):
Exception.__init__(self, msg)
self.path = path
self.msg = msg
def __str__(self):
return "/%s: %s" % ('/'.join(self.path), self.msg)
#
# Utils functions.
#
def _deepcopy(config):
"""When using YAML anchors, parts of configurations are just references to
an other part. CLG parameters (like the 'short' parameter of an option or
the title of a group) are deleted from the current configuration, so theses
informations are lost in parts of configuration using anchors ... This
function replace references by a copy of the datas.
"""
new_config = copy.deepcopy(config)
for key, value in new_config.items():
if isinstance(value, dict):
new_config[key] = _deepcopy(value)
return new_config
def _gen_parser(parser_conf, subparser=False):
"""Retrieve arguments pass to **argparse.ArgumentParser** from
**parser_conf**. A subparser can take an extra 'help' keyword."""
formatter_class = parser_conf.get('formatter_class', 'HelpFormatter')
conf = {'prog': parser_conf.get('prog', None),
'usage': None,
'description': parser_conf.get('description', None),
'epilog': parser_conf.get('epilog', None),
'formatter_class': getattr(argparse, formatter_class),
'argument_default': parser_conf.get('argument_default', None),
'conflict_handler': parser_conf.get('conflict_handler', 'error'),
'add_help': parser_conf.get('add_help', True)}
if subparser and 'help' in parser_conf:
conf.update(help=parser_conf['help'])
return conf
def _get_args(parser_conf):
"""Get options and arguments from a parser configuration."""
args = OrderedDict()
args.update((arg, (arg_type, OrderedDict(arg_conf)))
for arg_type in ('options', 'args')
for arg, arg_conf in parser_conf.get(arg_type, {}).items())
for grp_type in ('groups', 'exclusive_groups'):
for group in parser_conf.get(grp_type, {}):
args.update(_get_args(group))
return args
def _set_builtin(value):
"""Replace configuration values which begin and end by ``__`` by the
respective builtin function."""
try:
return TYPES[re.search('^__([A-Z]*)__$', value).group(1).lower()]
except (AttributeError, TypeError):
return (value.replace('__FILE__', sys.path[0])
if isinstance(value, str)
else value)
def _print_help(parser):
"""Manage 'print_help' parameter of a (sub)command. It monkey patch the
`_parse_known_args` method of the **parser** instance for simulating the
use of the --help option if no arguments is supplied for the command.
It also manage the integration with ``argcomplete`` which also monkey
patch this method."""
import types
def _parse_known_args(self, arg_strings, namespace):
# Manage argcomplete monkey patching.
if self.__class__.__name__ == 'MonkeyPatchedIntrospectiveArgumentParser':
from argcomplete.my_argparse import IntrospectiveArgumentParser
return IntrospectiveArgumentParser._parse_known_args(self, arg_strings, namespace)
if not arg_strings:
arg_strings = ['--help']
return argparse.ArgumentParser._parse_known_args(self, arg_strings, namespace)
parser._parse_known_args = types.MethodType(_parse_known_args, parser)
#
# Formatting functions.
#
def _format_usage(prog, usage):
"""Format usage."""
spaces = re.sub('.', ' ', 'usage: ')
usage_elts = [prog]
usage_elts.extend(['%s %s' % (spaces, elt) for elt in usage.split('\n')[:-1]])
return '\n'.join(usage_elts)
def _format_optname(value):
"""Format the name of an option in the configuration file to a more
readable option in the command-line."""
return value.replace('_', '-').replace(' ', '-')
def _format_optdisplay(value, conf):
"""Format the display of an option in error message (short and long option
with dash(es) separated by a slash."""
return ('-%s/--%s' % (conf['short'], _format_optname(value))
if 'short' in conf
else '--%s' % _format_optname(value))
def _format_arg(arg, arg_conf, arg_type):
return _format_optdisplay(arg, arg_conf) if arg_type == 'options' else arg
#
# Check functions.
#
def _check_empty(path, conf):
"""Check **conf** is not ``None`` or an empty iterable."""
if conf is None or (hasattr(conf, '__iter__') and not len(conf)):
raise CLGError(path, _EMPTY_CONF)
def _check_type(path, conf, conf_type=dict):
"""Check the **conf** is of **conf_type** type and raise an error if not."""
if not isinstance(conf, conf_type):
type_str = str(conf_type).split()[1][1:-2]
raise CLGError(path, _INVALID_SECTION.format(type=type_str))
def _check_keywords(path, conf, section, one=None, need=None):
"""Check items of **conf** from **KEYWORDS[section]**. **one** indicate
whether a check must be done on the number of elements or not."""
valid_keywords = [keyword
for keywords in KEYWORDS[section].values()
for keyword in keywords]
for keyword in conf:
if keyword not in valid_keywords:
raise CLGError(path, _INVALID_KEYWORD.format(keyword=keyword))
_check_empty(path + [keyword], conf[keyword])
if one and len([arg for arg in conf if arg in one]) != 1:
raise CLGError(path, _ONE_KEYWORDS.format(keywords="', '".join(one)))
if need:
for keyword in need:
if keyword not in conf:
raise CLGError(path, _MISSING_KEYWORD.format(keyword=keyword))
def _check_section(path, conf, section, one=None, need=None):
"""Check section is not empty, is a dict and have not extra keywords."""
_check_empty(path, conf)
_check_type(path, conf, dict)
_check_keywords(path, conf, section, one=one, need=need)
#
# Post processing functions.
#
def _has_value(value, conf):
"""The value of an argument not passed in the command is *None*, except:
* if **action** is ``store_true`` or ``store_false``: in this case, the
value is respectively ``False`` and ``True``.
This function take theses cases in consideration and check if an argument
really has a value.
"""
if value is None:
return False
action = conf.get('action', None)
return ((not action and value) or
(action and action == 'store_true' and value) or
(action and action == 'store_false' and not value))
def _post_need(parser, parser_args, args_values, arg):
"""Post processing that check all for needing options."""
arg_type, arg_conf = parser_args[arg]
for cur_arg in arg_conf['need']:
cur_arg_split = cur_arg.split(':')
cur_arg = cur_arg_split[0]
need_value = cur_arg_split[1] if len(cur_arg_split) == 2 else None
cur_arg_type, cur_arg_conf = parser_args[cur_arg]
cur_value = args_values[cur_arg]
if not _has_value(cur_value, cur_arg_conf):
strings = {'type': arg_type[:-1],
'arg': _format_arg(arg, arg_conf, arg_type),
'need_type': cur_arg_type[:-1],
'need_arg': _format_arg(cur_arg, cur_arg_conf, cur_arg_type)}
parser.error(_NEED_ERR.format(**strings))
if (need_value is not None
and (isinstance(cur_value, (list, tuple)) and need_value not in cur_value)
and cur_value != need_value):
strings = {'type': arg_type[:-1],
'arg': _format_arg(arg, arg_conf, arg_type),
'need_type': cur_arg_type[:-1],
'need_arg': _format_arg(cur_arg, cur_arg_conf, cur_arg_type),
'need_value': need_value}
parser.error(_NEED_VALUE_ERR.format(**strings))
def _post_conflict(parser, parser_args, args_values, arg):
"""Post processing that check for conflicting options."""
arg_type, arg_conf = parser_args[arg]
for cur_arg in arg_conf['conflict']:
cur_arg_split = cur_arg.split(':')
cur_arg = cur_arg_split[0]
conflict_value = cur_arg_split[1] if len(cur_arg_split) == 2 else None
cur_arg_type, cur_arg_conf = parser_args[cur_arg]
cur_value = args_values[cur_arg]
if _has_value(args_values[cur_arg], cur_arg_conf):
strings = {'type': arg_type[:-1],
'arg': _format_arg(arg, arg_conf, arg_type),
'conflict_type': cur_arg_type[:-1],
'conflict_arg': _format_arg(cur_arg, cur_arg_conf, cur_arg_type)}
if conflict_value is None:
parser.error(_CONFLICT_ERR.format(**strings))
return
if ((isinstance(cur_value, (list, tuple)) and conflict_value in cur_value)
or cur_value == conflict_value):
strings.update(conflict_value=conflict_value)
parser.error(_CONFLICT_VALUE_ERR.format(**strings))
def _post_match(parser, parser_args, args_values, arg):
"""Post processing that check the value."""
arg_type, arg_conf = parser_args[arg]
pattern = arg_conf['match']
msg_elts = {'type': arg_type, 'arg': arg, 'pattern': pattern}
if arg_conf.get('nargs', None) in ('*', '+'):
for value in args_values[arg] or []:
if not re.match(pattern, value):
parser.error(_MATCH_ERR.format(val=value, **msg_elts))
elif not re.match(pattern, args_values[arg]):
parser.error(_MATCH_ERR.format(val=args_values[arg], **msg_elts))
def _exec_module(path, exec_conf, args_values):
"""Load and execute a function of a module according to **exec_conf**."""
mdl_func = exec_conf.get('function', 'main')
mdl_tree = exec_conf['module']
try:
mdl = importlib.import_module(mdl_tree)
except (ImportError, ModuleNotFoundError) as err:
raise CLGError(path, _LOAD_ERR.format(err=err))
getattr(mdl, mdl_func)(args_values)
def _exec_file(path, exec_conf, args_values):
"""Load and execute a function of a file according to **exec_conf**."""
mdl_path = _set_builtin(exec_conf['file']) # Allow __FILE__ builtin.
mdl_name = os.path.splitext(os.path.basename(mdl_path))[0]
mdl_func = exec_conf.get('function', 'main')
try:
spec = importlib.util.spec_from_file_location(mdl_name, mdl_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
getattr(module, mdl_func)(args_values)
except FileNotFoundError as err:
raise CLGError(path, _FILE_ERR.format(err=err.filename))
except (IOError, ImportError, AttributeError) as err:
raise CLGError(path, _FILE_ERR.format(err=str(err)))
#
# Classes.
#
class NoAbbrevParser(argparse.ArgumentParser):
"""Child class of **ArgumentParser** allowing to disable abbravetions."""
def _get_option_tuples(self, option_string):
result = []
# option strings | |
<gh_stars>1-10
import time
import queue
import PySpin
import numpy as np
import multiprocessing as mp
from scipy.ndimage import gaussian_filter as gaussian
_PROPERTIES = {
'FRAMERATE': {
'minimum': 1,
'maximum': 200,
'initial': 30
},
'BINSIZE': {
'initial': (2, 2)
},
'WIDTH': {
'initial': 1440
},
'HEIGHT': {
'initial': 1080
},
'OFFSET': {
'initial': (0, 0)
},
'EXPOSURE': {
'minimum': 1,
'maximum': 999999,
'initial': 3000
},
'STREAM_BUFFER_COUNT': {
'minimum': 1,
'maximum': 100,
'initial': 10
}
}
class DummyProperty():
"""
Mimics the properties of an actual camera pointer object
"""
def __init__(self, parent, min, max, val):
self.parent = parent
self.min = min
self.max = max
self.val = val
return
def GetValue(self):
return self.val
def GetMax(self):
return self.max
def GetMin(self):
return self.min
def SetValue(self):
if self.parent._initialized is False:
raise PySpin.SpinnakerException('Camera is not initialized')
def GetAccessMode(self):
return PySpin.RW
class DummyAcquisitionProcess(mp.Process):
"""
Mimics image acquisition and buffering
"""
def __init__(self, buffersize=10, framerate=30, shape=(1280, 1440), color=False):
"""
"""
super().__init__()
self.framerate = framerate
self.buffersize = buffersize
self.width, self.height = shape
self.color = color
self.buffer = mp.JoinableQueue()
self.started = mp.Value('i', 0)
self.acquiring = mp.Value('i', 0)
return
def start(self):
"""Start acquisition"""
self.started.value = 1
super().start()
return
def run(self):
"""
"""
while self.started.value:
while self.acquiring.value:
#
t0 = time.time()
# generate noise
if self.color:
size = (self.height, self.width, 3)
else:
size = (self.height, self.width)
image = np.random.randint(low=0, high=255, size=size, dtype=np.uint8)
# Wait for the appropriate inter-frame interval to lapse
while time.time() - t0 < (1 / self.framerate):
continue
# Queue image
if self.buffer.qsize() == self.buffersize:
discard = self.buffer.get()
self.buffer.task_done()
self.buffer.put(image)
return
def stop(self):
"""Stop acquisition and join the dummy acquisition process"""
#
if self.acquiring.value == 1:
self.acquiring.value = 0
# Exit from the main acquisition loop
if self.started.value == 1:
self.started.value = 0
# Flush the image buffer
while self.buffer.qsize() != 0:
discard = self.buffer.get()
self.buffer.task_done()
#
self.buffer.join()
return
class DummyCameraPointer():
"""
Mimics the camera pointer object (and some of its methods)
"""
def __init__(self):
"""
"""
self.Width = self.Width(self, val=_PROPERTIES['WIDTH']['initial'])
self.Height = self.Height(self, val=_PROPERTIES['HEIGHT']['initial'])
self.OffsetX = self.OffsetX(self, val=_PROPERTIES['OFFSET']['initial'][0])
self.OffsetY = self.OffsetY(self, val=_PROPERTIES['OFFSET']['initial'][1])
self.BinningVertical = self.BinningVertical(self, val=_PROPERTIES['OFFSET']['initial'][1])
self.BinningHorizontal = self.BinningHorizontal(self, val=_PROPERTIES['OFFSET']['initial'][0])
self.PixelFormat = self.PixelFormat(self)
self.AcquisitionFrameRateEnable = self.AcquisitionFrameRateEnable(self)
self.AcquisitionFrameRate = self.AcquisitionFrameRate(
self,
min=_PROPERTIES['FRAMERATE']['minimum'],
max=_PROPERTIES['FRAMERATE']['maximum'],
val=_PROPERTIES['FRAMERATE']['initial']
)
self.AcquisitionMode = self.AcquisitionMode(self)
self.ExposureAuto = self.ExposureAuto(self)
self.ExposureTime = self.ExposureTime(
self,
min=_PROPERTIES['EXPOSURE']['minimum'],
max=_PROPERTIES['EXPOSURE']['maximum'],
val=_PROPERTIES['EXPOSURE']['initial']
)
self.TLStream = self.TLStream(self)
self.LineSelector = self.LineSelector(self)
self.LineSource = self.LineSource(self)
self.V3_3Enable = self.V3_3Enable(self)
self.TriggerMode = self.TriggerMode(self)
self.TriggerSource = self.TriggerSource(self)
self.TriggerOverlap = self.TriggerOverlap(self)
self.TriggerActivation = self.TriggerActivation(self)
# private attributes
self._initialized = False
self._streaming = False
self._p = None
return
def IsValid(self):
return True
def Init(self):
"""
"""
# Despawn the process
if self._p is not None:
self._p.stop()
self._p.join(timeout=3)
if self._p.is_alive():
self._p.terminate()
self._p = None
# Spawn a new process
kwargs = {
'buffersize': self.TLStream.StreamBufferCountManual.GetValue(),
'framerate' : self.AcquisitionFrameRate.GetValue(),
'shape' : (self.Height.GetValue(), self.Width.GetValue()),
'color' : True if self.PixelFormat.GetValue() == PySpin.PixelFormat_RGB8 else False
}
self._p = DummyAcquisitionProcess(**kwargs)
self._p.start()
#
self._initialized = True
return
def DeInit(self):
"""
"""
# Despawn the process
if self._p is not None:
self._p.stop()
self._p.join(timeout=3)
if self._p.is_alive():
self._p.terminate()
self._p = None
#
self._initialized = False
return
def IsInitialized(self):
return self._initialized
def IsStreaming(self):
return self._streaming
def BeginAcquisition(self):
"""
"""
if self._initialized is False:
raise PySpin.SpinnakerException('Camera is not initialized')
#
self._p.acquiring.value = 1
#
self._streaming = True
return
def EndAcquisition(self):
"""
"""
if self._initialized is False:
raise PySpin.SpinnakerException('Camera is not initialized')
#
self._p.acquiring.value = 0
#
self._streaming = False
return
def GetNextImage(self, timeout=100):
"""
Keywords
--------
timeout
Timeout (in ms)
"""
if self._streaming is False:
raise PySpin.SpinnakerException('Camera is not streaming')
#
try:
noise = self._p.buffer.get(timeout=timeout / 1000)
self._p.buffer.task_done()
except queue.Empty:
raise PySpin.SpinnakerException('No buffered images available') from None
pointer = PySpin.Image_Create(self.Width.GetValue(), self.Height.GetValue(), 0, 0, self.PixelFormat.GetValue(), noise)
return pointer
class TriggerMode(DummyProperty):
def __init__(self, parent, min=None, max=None, val=PySpin.TriggerMode_Off):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if val == PySpin.TriggerMode_On:
if self.parent._initialized:
self.parent._p.acquiring.value = 0
self.val = val
elif val == PySpin.TriggerMode_Off:
if self.parent._initialized:
self.parent._p.acquiring.value = 1
self.val = val
else:
raise PySpin.SpinnakerException(f'{val} is an invalid value')
return
class Width(DummyProperty):
def __init__(self, parent, min=1, max=1440, val=1440):
super().__init__(parent, min, max, val)
self._ceiling = max
def SetValue(self, val):
super().SetValue()
if val < self.min:
raise PySpin.SpinnakerException(f'{val} is too small')
elif val > self.max:
raise PySpin.SpinnakerException(f'{val} is too big')
else:
self.val = val
return
class Height(DummyProperty):
def __init__(self, parent, min=1, max=1080, val=1440):
super().__init__(parent, min, max, val)
self._ceiling = max
def SetValue(self, val):
super().SetValue()
if val < self.min:
raise PySpin.SpinnakerException(f'{val} is too small')
elif val > self.max:
raise PySpin.SpinnakerException(f'{val} is too big')
else:
self.val = val
return
class OffsetX(DummyProperty):
def __init__(self, parent, min=0, max=None, val=0):
super().__init__(parent, min, max, val)
self.max = self.parent.Width.GetMax() - 1
return
def SetValue(self, val):
super().SetValue()
if val < self.min:
raise PySpin.SpinnakerException(f'{val} is too small')
elif val > self.max:
raise PySpin.SpinnakerException(f'{val} is too big')
else:
self.val = val
return
class OffsetY(DummyProperty):
def __init__(self, parent, min=0, max=None, val=0):
super().__init__(parent, min, max, val)
self.max = self.parent.Height.GetMax() - 1
return
def SetValue(self, val):
super().SetValue()
if val < self.min:
raise PySpin.SpinnakerException(f'{val} is too small')
elif val > self.max:
raise PySpin.SpinnakerException(f'{val} is too big')
else:
self.val = val
return
class BinningVertical(DummyProperty):
def __init__(self, parent, min=1, max=4, val=1):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if self.min <= val <= self.max and val in [1, 2, 4]:
self.val = val
height = int(self.parent.Height._ceiling / val)
self.parent.Height.max = height
self.parent.Height.SetValue(height)
self.parent.OffsetY.max = self.parent.Height.GetMax() - 1
else:
raise PySpin.SpinnakerException(f'{val} is an invalid value')
class BinningHorizontal(DummyProperty):
def __init__(self, parent, min=1, max=4, val=1):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if self.min <= val <= self.max and val in [1, 2, 4]:
self.val = val
width = int(self.parent.Width._ceiling / val)
self.parent.Width.max = width
self.parent.Width.SetValue(width)
self.parent.OffsetX.max = self.parent.Width.GetMax() - 1
else:
raise PySpin.SpinnakerException(f'{val} is an invalid value')
class AcquisitionFrameRateEnable(DummyProperty):
def __init__(self, parent, min=None, max=None, val=False):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if val is True:
self.val = val
elif val is False:
self.val = val
else:
raise PySpin.SpinnakerException(f'{val} is not a valid value')
class AcquisitionFrameRate(DummyProperty):
def __init__(self, parent, min=1, max=200, val=30):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if self.parent.AcquisitionFrameRateEnable.GetValue() is False:
raise PySpin.SpinnakerException('Framerate is not enabled')
if self.min <= val <= self.max:
self.val = val
maximum_exposure = 1 / val * 1000000 - 1
if self.parent.ExposureTime.GetValue() > maximum_exposure:
self.parent.ExposureTime.max = maximum_exposure
self.parent.ExposureTime.val = maximum_exposure
else:
raise PySpin.SpinnakerException(f'{val} is not a valid value')
class PixelFormat(DummyProperty):
def __init__(self, parent, min=None, max=None, val=PySpin.PixelFormat_Mono8):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if val not in [PySpin.PixelFormat_Mono8, PySpin.PixelFormat_RGB8]:
raise PySpin.SpinnakerException(f'{val} is not a valid pixel format')
else:
self.val = val
class ExposureAuto(DummyProperty):
def __init__(self, parent, min=None, max=None, val=PySpin.ExposureAuto_Once):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if val not in [PySpin.ExposureAuto_Continuous, PySpin.ExposureAuto_Once, PySpin.ExposureAuto_Off]:
raise PySpin.SpinnakerException(f'{val} is not a valid value')
else:
self.val = val
class ExposureTime(DummyProperty):
def __init__(self, parent, min=100, max=None, val=3000):
super().__init__(parent, min, max, val)
self.max = 1 / self.parent.AcquisitionFrameRate.GetValue() * 1000000 - 1
def SetValue(self, val):
super().SetValue()
if self.min <= val <= self.max:
self.val = val
else:
raise PySpin.SpinnakerException(f'{val} is not a valid value')
class AcquisitionMode(DummyProperty):
def __init__(self, parent, min=None, max=None, val=PySpin.AcquisitionMode_SingleFrame):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if val not in [PySpin.AcquisitionMode_Continuous, PySpin.AcquisitionMode_SingleFrame, PySpin.AcquisitionMode_MultiFrame]:
raise PySpin.SpinnakerException(f'{val} is not a valid value')
else:
self.val = val
class TLStream():
def __init__(self, parent):
self.StreamBufferHandlingMode = self.StreamBufferHandlingMode(parent)
self.StreamBufferCountMode = self.StreamBufferCountMode(parent)
self.StreamBufferCountManual = self.StreamBufferCountManual(parent)
return
class StreamBufferHandlingMode(DummyProperty):
def __init__(self, parent, min=None, max=None, val=PySpin.StreamBufferHandlingMode_OldestFirst):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if val not in [
PySpin.StreamBufferHandlingMode_NewestOnly,
PySpin.StreamBufferHandlingMode_OldestFirst,
PySpin.StreamBufferHandlingMode_NewestFirst,
PySpin.StreamBufferHandlingMode_OldestFirstOverwrite,
]:
raise PySpin.SpinnakerException(f'{val} is not a valid value')
else:
self.val = val
class StreamBufferCountMode(DummyProperty):
def __init__(self, parent, min=None, max=None, val=PySpin.StreamBufferCountMode_Auto):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if val not in [
PySpin.StreamBufferCountMode_Auto, PySpin.StreamBufferCountMode_Manual
]:
raise PySpin.SpinnakerException(f'{val} is not a valid value')
else:
self.val = val
class StreamBufferCountManual(DummyProperty):
def __init__(self, parent, min=1, max=1000, val=10):
super().__init__(parent, min, max, val)
def SetValue(self, val):
super().SetValue()
if self.min <= val <= self.max:
self.val = val
else:
raise PySpin.SpinnakerException(f'{val} is not a valid value')
class LineSelector(DummyProperty):
def __init__(self, parent, min=None, max=None, val=PySpin.LineSelector_Line1):
super().__init__(parent, min, max, | |
# Copyright 1997 - 2018 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class OpenFlowChannel(Base):
"""The OpenFlowChannel class encapsulates a user managed openFlowChannel node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the OpenFlowChannel property from a parent instance.
The internal properties list will be empty when the property is accessed and is populated from the server using the find method.
The internal properties list can be managed by the user by using the add and remove methods.
"""
_SDM_NAME = 'openFlowChannel'
def __init__(self, parent):
super(OpenFlowChannel, self).__init__(parent)
@property
def Groups(self):
"""An instance of the Groups class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.groups.Groups)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.groups import Groups
return Groups(self)
@property
def Meters(self):
"""An instance of the Meters class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.meters.Meters)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.meters import Meters
return Meters(self)
@property
def Tables(self):
"""An instance of the Tables class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tables.Tables)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tables import Tables
return Tables(self)
@property
def Active(self):
"""Activate/Deactivate Configuration
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('active')
@property
def CalcFlowRate(self):
"""If selected, the statistics on the rate of transmission of flows per second by the controller is published.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('calcFlowRate')
@property
def CalcFlowRateWithBarrier(self):
"""If selected, statistics on the rate of transmission of flows per second by the controller, along with Barrier Request messages is published.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('calcFlowRateWithBarrier')
@property
def ConnectedVia(self):
"""List of layers this layer used to connect to the wire
Returns:
list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])
"""
return self._get_attribute('connectedVia')
@ConnectedVia.setter
def ConnectedVia(self, value):
self._set_attribute('connectedVia', value)
@property
def ControllerIndex(self):
"""Parent Controller Index
Returns:
list(str)
"""
return self._get_attribute('controllerIndex')
@property
def ControllerName(self):
"""Parent Controller Name
Returns:
str
"""
return self._get_attribute('controllerName')
@property
def Count(self):
"""Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group
Returns:
number
"""
return self._get_attribute('count')
@property
def DatapathId(self):
"""The Datapath ID of the OF Channel.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('datapathId')
@property
def DatapathIdHex(self):
"""The Datapath ID in hexadecimal format.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('datapathIdHex')
@property
def DescriptiveName(self):
"""Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but maybe offers more context
Returns:
str
"""
return self._get_attribute('descriptiveName')
@property
def EnableHelloElement(self):
"""If selected, the Controller sends a hello message consisting of an OpenFlow header and a set of variable size hello elements to inform the initial handshake of the connection.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('enableHelloElement')
@property
def Errors(self):
"""A list of errors that have occurred
Returns:
list(dict(arg1:str[None|/api/v1/sessions/1/ixnetwork/?deepchild=*],arg2:list[str]))
"""
return self._get_attribute('errors')
@property
def FlowTxBurstSize(self):
"""Specify the number of Flow transmitting packets that can be sent in a single burst within the time frame specified by the Inter Flow Burst Gap value.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('flowTxBurstSize')
@property
def GroupsPerChannel(self):
"""Number of Groups per Channel
Returns:
number
"""
return self._get_attribute('groupsPerChannel')
@GroupsPerChannel.setter
def GroupsPerChannel(self, value):
self._set_attribute('groupsPerChannel', value)
@property
def InterFlowBurstGap(self):
"""Specify the duration (in milliseconds) for which the controller waits between successive flow advertisements.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('interFlowBurstGap')
@property
def LocalIp(self):
"""The local IP address of the interface. This field is auto-populated and cannot be changed.
Returns:
list(str)
"""
return self._get_attribute('localIp')
@property
def MaxFlowsAtATime(self):
"""The Max Number of Flows Processed at a Time is the size of an internal buffer maintained by the Ixiacontroller, which prevents it from sending more flows than the Openflow switch can consume at a time.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('maxFlowsAtATime')
@property
def MetersPerChannel(self):
"""Number of Meters per Channel
Returns:
number
"""
return self._get_attribute('metersPerChannel')
@MetersPerChannel.setter
def MetersPerChannel(self, value):
self._set_attribute('metersPerChannel', value)
@property
def Multiplier(self):
"""Number of layer instances per parent instance (multiplier)
Returns:
number
"""
return self._get_attribute('multiplier')
@Multiplier.setter
def Multiplier(self, value):
self._set_attribute('multiplier', value)
@property
def Name(self):
"""Name of NGPF element, guaranteed to be unique in Scenario
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
@property
def RemoteIp(self):
"""The IP address of the DUT at the other end of the OF Channel.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('remoteIp')
@property
def SendRoleRequest(self):
"""If selected, the controller sends a Role Request message after the connection is established; to change its role according to the Role Request option selected.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('sendRoleRequest')
@property
def SessionStatus(self):
"""Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
Returns:
list(str[down|notStarted|up])
"""
return self._get_attribute('sessionStatus')
@property
def StackedLayers(self):
"""List of secondary (many to one) child layer protocols
Returns:
list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])
"""
return self._get_attribute('stackedLayers')
@StackedLayers.setter
def StackedLayers(self, value):
self._set_attribute('stackedLayers', value)
@property
def StartupGenerationId(self):
"""A 64-bit sequence number field that identifies a given mastership view.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('startupGenerationId')
@property
def StartupRoleRequest(self):
"""This defines role of the controller.Options include: 1) No Change 2) Equal 3) Master 4) Slave
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('startupRoleRequest')
@property
def StateCounts(self):
"""A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
Returns:
dict(total:number,notStarted:number,down:number,up:number)
"""
return self._get_attribute('stateCounts')
@property
def Status(self):
"""Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
Returns:
str(configured|error|mixed|notStarted|started|starting|stopping)
"""
return self._get_attribute('status')
@property
def TablesPerChannel(self):
"""Number of Tables per Channel
Returns:
number
"""
return self._get_attribute('tablesPerChannel')
@TablesPerChannel.setter
def TablesPerChannel(self, value):
self._set_attribute('tablesPerChannel', value)
@property
def UseDatapathID(self):
"""If selected, the Datapath ID and IP address are used as the OF Channel identifier.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('useDatapathID')
def add(self, ConnectedVia=None, GroupsPerChannel=None, MetersPerChannel=None, Multiplier=None, Name=None, StackedLayers=None, TablesPerChannel=None):
"""Adds a new openFlowChannel node on the server and retrieves it in this instance.
Args:
ConnectedVia (list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])): List of layers this layer used to connect to the wire
GroupsPerChannel (number): Number of Groups per Channel
MetersPerChannel (number): Number of Meters per Channel
Multiplier (number): Number of layer instances per parent instance (multiplier)
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
StackedLayers (list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])): List of secondary (many to one) child layer protocols
TablesPerChannel (number): Number of Tables per Channel
Returns:
self: This instance with all currently retrieved openFlowChannel data using find and the newly added openFlowChannel data available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._create(locals())
def remove(self):
"""Deletes all the openFlowChannel data in this instance from server.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ConnectedVia=None, ControllerIndex=None, ControllerName=None, Count=None, DescriptiveName=None, Errors=None, GroupsPerChannel=None, LocalIp=None, MetersPerChannel=None, Multiplier=None, Name=None, SessionStatus=None, StackedLayers=None, StateCounts=None, Status=None, TablesPerChannel=None):
"""Finds and retrieves openFlowChannel data from the server.
All named parameters support regex and can be used to selectively retrieve openFlowChannel data from the server.
By default the find method takes no parameters and will retrieve all openFlowChannel data from the server.
Args:
ConnectedVia (list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])): List of layers this layer used to connect to the wire
ControllerIndex (list(str)): Parent Controller Index
ControllerName (str): Parent Controller Name
Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group
DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but maybe offers more context
Errors (list(dict(arg1:str[None|/api/v1/sessions/1/ixnetwork/?deepchild=*],arg2:list[str]))): A list of errors that have occurred
GroupsPerChannel (number): Number of Groups per Channel
LocalIp (list(str)): The local IP address of the interface. This field is auto-populated and cannot be changed.
MetersPerChannel (number): Number of Meters per Channel
Multiplier (number): Number of layer instances per parent instance (multiplier)
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
SessionStatus (list(str[down|notStarted|up])): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
StackedLayers (list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])): List of secondary (many to one) child layer protocols
StateCounts (dict(total:number,notStarted:number,down:number,up:number)): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
Status (str(configured|error|mixed|notStarted|started|starting|stopping)): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
TablesPerChannel (number): Number of Tables per Channel
Returns:
self: This instance with matching openFlowChannel data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of openFlowChannel data from the server.
Args:
href (str): An href to the | |
= False
for obj in self.model.grid.get_cell_list_contents(next):
if obj.__class__.__name__ in ["Adult", "Child", "Obstacle", "Fire", "Heat", "Smoke"]:
log("Path blocked by " + obj.__class__.__name__)
# If path blocked by another agent, wait, and move randomly to avoid blockages
if obj.__class__.__name__ in ["Adult", "Child"]:
log("Waiting (" + str(self.waitingTime) + ")")
self.waiting = True
self.waitingTime += self.freq
# When waiting limit reached, consider the exit blocked
if self.waitingTime > self.patience:
self.path = self.considerRouteBlocked()
# Every 10 steps try to get the blocking agent to move out of the way
if self.waitingTime % (10 * self.freq) == 0 and obj.moved == False:
adjacentToNeighbor = self.model.grid.getObject(obj.pos, "Cell").neighbors
for cell in adjacentToNeighbor:
if self.model.grid.cellAvailable(adjacentToNeighbor[cell], ["Adult", "Child", "Exit", "Fire", "Heat", "Obstacle"]):
log("Asking agent " + obj.unique_id + " to move out of the way, to cell " + str(adjacentToNeighbor[cell]) + ".")
if obj.path != None:
obj.path = [obj.pos] + obj.path
self.model.grid.move_agent(obj, adjacentToNeighbor[cell])
obj.moved = True
break
#Move randomly from time to time
blocked = True
r = random.random()
if r < 0.2 and self.waitingTime > 20:
neighbors = self.model.grid.getObject(self.pos, "Cell").neighbors
for neighbor in neighbors:
free = True
for obj in self.model.grid.get_cell_list_contents(neighbors[neighbor]):
if obj.__class__.__name__ in ["Adult", "Child", "Obstacle", "Fire", "Heat", "Exit"]:
free = False
if free and self.target != None:
log("Moving to a free space to avoid potential blockage.")
log("MOVING " + str(neighbor) + ", TO " + str(neighbors[neighbor]))
self.model.grid.move_agent(self, neighbors[neighbor])
self.path = computePath(self.model.grid, self.pos, self.target,
self.knownFires, self.knownHeat, self.knownObstacles)
break
break
# If next cell blocked by a non-agent, attempt to update path
else:
self.path = computePath(self.model.grid, self.pos, self.target,
self.knownFires, self.knownHeat, self.knownObstacles)
oldNext = next
next = self.path[0]
# If no path possible anymore, end step
if next == "blocked":
log("No path possible.")
blocked = True
# If the new path is blocked by an agent, end step
elif next != "reached" and next != None:
for obj in self.model.grid.get_cell_list_contents(next):
if obj.__class__.__name__ in ["Adult", "Child"]:
log("Path blocked by " + obj.__class__.__name__ + ".")
blocked = True
# If possible, move to the next cell
if not blocked:
log("MOVING TO " + str(next))
del (self.path[0])
self.model.grid.move_agent(self, next)
# Decrease waiting time counter:
self.waiting = False
if self.waitingTime > 0:
self.waitingTime -= 2* self.freq
# Check if the agent has not reached exit
for el in self.model.grid.get_cell_list_contents(self.pos):
if el.__class__.__name__ == "Exit":
self.evacuated = True
self.model.schedule.remove(self)
self.model.grid.remove_agent(self)
self.model.removedAgents.append((self, "evacuated", self.model.schedule.steps + 1))
self.model.activeAgents.remove(self)
self.model.activeAgentsCount -= 1
if self.selected:
for x in range(self.model.grid.width):
for y in range(self.model.grid.height):
self.model.grid.getObject((x, y), "Tile").selected = False
return
# Update the lists of objects visible to the agent
self.updateVisibility()
log("\nChildren: " + str(self.children))
log("Visible children: " + str(self.visibleChildren))
log("\nVisible exits: " + str(self.visibleExits))
log("Visible signs: " + str(self.visibleSigns))
log("Visible obstacles: " + str(self.visibleObstacles))
log("Visible fires: " + str(self.visibleFires))
log("Visible heat: " + str(self.visibleHeat))
log("Visible agents: " + str(self.visibleAgents) + "\n")
# Update list of known exits
self.updateExits()
# Update list of known signs
self.updateSigns()
# Update list of known obstacles
self.updateObstacles()
# Update list of known fire location
self.updateFires()
# Print a log of known objects
log("\nKnown exits: " + str(self.knownExits))
log("Known signs: " + str(self.knownSigns))
log("Known obstacle locations: " + str(self.knownObstacles))
log("Known fire locations: " + str(self.knownFires))
log("Known heat locations: " + str(self.knownHeat))
# If at least one exit in sight
if len(self.visibleExits) > 0 and self.state not in ["FINDING_CHILDREN"]:
# Update the target if necessary, to aim for the nearest exit
oldTarget = self.target
tentativeTarget, tentativePath = self.pickExit(self.visibleExits, visible=True)
if tentativeTarget != None:
if tentativeTarget != oldTarget:
self.target = tentativeTarget
log("\nNew viable exit located. Switching state to 'EVACUATING'")
self.path = tentativePath
# Change state to 'EVACUATING' to attempt to pursue the new exit
self.previousState = self.state
self.state = "EVACUATING"
## CHILD
class Child(Adult):
def __init__(self, model, type, unique_id, guardians, startingLocation):
self.model = model
self.type = type
self.unique_id = unique_id
self.guardians = guardians
self.followedGuardian = None
self.startingLocation = startingLocation
self.knownExits = {}
self.knownFires = []
self.knownHeat = []
self.knownObstacles = []
self.moved = False
self.visibleGuardians = []
self.visibleExits = []
self.visibleSigns = []
self.visibleObstacles = []
self.visibleFires = []
self.visibleHeat = []
self.visibleAgents = []
self.visibleCells = []
self.selected = False
self.path = []
self.initDist = 0
self.optEvacTime = 0
self.target = None
self.intoxication = 0
self.unconscious = False
self.dead = False
self.evacuated = False
self.maxSpeed = 0.8
self.maxFreq = 3
self.freq = self.maxFreq
self.offset = round(random.random() * self.freq)
self.previousState = "AT_REST"
self.state = "LOST"
# Function that checks if a guardian is visible and picks the nearest one
def locateGuardian(self):
self.visibleGuardians = []
visibleGuardians = {}
for agentID in self.visibleAgents:
if agentID in self.guardians:
agent = self.model.getAgent(agentID)
visibleGuardians[agentID] = round(eucDist(self.pos, agent.pos), 2)
if len(visibleGuardians) == 0:
return
else:
for el in visibleGuardians.keys():
self.visibleGuardians.append(el)
return self.model.getAgent(min(visibleGuardians, key=visibleGuardians.get))
# THE MAIN FUNCTION FOR CHILD'S STEP
def step(self):
log("\n---\n\nAGENT " + str(self.unique_id) + " (CHILD) step beginning.")
log("State: " + self.state + "\n")
# Initial checks
for el in self.model.grid.get_cell_list_contents(self.pos):
# Apply effects of fire
if el.__class__.__name__ == "Fire":
if not self.unconscious:
self.model.activeAgents.remove(self)
self.model.activeAgentsCount -= 1
else:
self.model.removedAgents.remove((self, "unconscious"))
self.dead = True
self.state = "DEAD"
self.model.schedule.remove(self)
self.model.grid.remove_agent(self)
self.model.removedAgents.append((self, "dead"))
log(str(self.unique_id) + " died in the fire")
return
# Check if the agent is not already unconscious and skip if so
if self.unconscious:
log("Agent unconscious.")
return
# Apply effects of smoke
if el.__class__.__name__ == "Smoke":
self.intoxication += 1
log("Agent inhaled smoke. Intoxication level: " + str(self.intoxication))
if self.intoxication >= 60 and self.unconscious == False:
self.unconscious = True
self.state = "UNCONSCIOUS"
self.model.activeAgents.remove(self)
self.model.removedAgents.append((self, "unconscious"))
self.model.activeAgentsCount -= 1
log(str(self.unique_id) + " lost consciousness")
continue
# Check if it's the agent's turn to move
if (self.model.schedule.steps + self.offset) % self.freq != 0:
log("Skipping step...")
return
# If the agent has moved to accommodate other agent, reset the flag and skip step
if self.moved == True:
self.moved = False
log("Skipping step...")
return
# Update the lists of objects visible to the agent
self.updateVisibility()
self.locateGuardian()
log("\nGuardians: " + str(self.guardians))
log("Visible guardians: " + str(self.visibleGuardians))
log("\nVisible signs: " + str(self.visibleSigns))
log("Visible obstacles: " + str(self.visibleObstacles))
log("Visible fires: " + str(self.visibleFires))
log("Visible heat: " + str(self.visibleHeat))
log("Visible agents: " + str(self.visibleAgents) + "\n")
# Update list of known exits
self.updateExits()
# Update list of known obstacles
self.updateObstacles()
# Update list of known fire location
self.updateFires()
# Take action depending on current state
if self.state == "FOLLOWING":
log("Following guardian: " + self.followedGuardian.unique_id)
# If lost sight of guardian, switch state to lost
if self.followedGuardian.evacuated:
log("Guardian evacuated. Following to the same exit.")
self.previousState = self.state
self.state = "EXITING"
self.target = self.followedGuardian.target
return
if self.followedGuardian.unique_id not in self.visibleGuardians \
and manDist(self.followedGuardian.pos, self.pos) > len(self.followedGuardian.ledChildren) + 10:
log("Lost sight of the guardian. Switching state to 'LOST'")
if self.unique_id in self.followedGuardian.foundChildren:
self.followedGuardian.foundChildren.remove(self.unique_id)
self.previousState = self.state
self.state = "LOST"
else:
self.target = self.followedGuardian.pos
if self.target != None:
if self.path == []:
self.path = computePath(self.model.grid, self.pos,
(self.target, self.followedGuardian.unique_id),
self.knownFires, self.knownHeat, self.knownObstacles)
if len(self.path) > 12:
self.path = computePath(self.model.grid, self.pos,
(self.target, self.followedGuardian.unique_id),
self.knownFires, self.knownHeat, self.knownObstacles, ignoreAgents=True)
elif self.path[-1] != self.target and self.path != ["blocked"] and self.path != ["reached"]:
pathExtension = computePath(self.model.grid, self.path[-1], (self.target, self.followedGuardian.unique_id),
self.knownFires, self.knownHeat, self.knownObstacles, ignoreAgents=True)
if pathExtension != ["blocked"] and len(self.path) < 10:
for el in pathExtension:
self.path.append(el)
else:
self.path = computePath(self.model.grid, self.pos, (self.target, self.followedGuardian.unique_id),
self.knownFires, self.knownHeat, self.knownObstacles)
if len(self.path) > 12:
self.path = computePath(self.model.grid, self.pos,
(self.target, self.followedGuardian.unique_id),
self.knownFires, self.knownHeat, self.knownObstacles, ignoreAgents=True)
next = self.path[0]
else:
log("Lost sight of the guardian. Switching state to 'LOST'")
self.followedGuardian = None
self.previousState = self.state
self.state = "LOST"
return
# If too close to the followed adult move away to avoid blocking him
if manDist(self.pos, self.followedGuardian.pos) < 2:
neighbors = self.model.grid.getObject(self.pos, "Cell").neighbors
for neighbor in neighbors:
free = True
for obj in self.model.grid.get_cell_list_contents(neighbors[neighbor]):
if obj.__class__.__name__ in ["Adult", "Child", "Obstacle", "Fire", "Heat", "Exit"]:
free = False
if free and self.target != None:
log("Moving away from the followed guardian to keep reasonable distance.")
log("MOVING " + str(neighbor) + ", TO " + str(neighbors[neighbor]))
self.model.grid.move_agent(self, neighbors[neighbor])
self.path = []
return
else:
if | |
-0.86859548568613],
[-0.73152230491144, 0.62253568842167, -0.89101338351023],
[-1.00000000000000, 0.81459905395471, -0.90729952697735],
[-0.90729952697735, 0.81459905395471, -0.90729952697735],
[-1.00000000000000, 0.93400143040806, -0.93400143040806],
[-1.00000000000000, -1.00000000000000, -0.78448347366314],
[-0.89101338351023, -1.00000000000000, -0.73152230491144],
[-0.69900284547384, -1.00000000000000, -0.69900284547384],
[-0.44709790670022, -1.00000000000000, -0.68165204574864],
[-0.16188382733210, -1.00000000000000, -0.67623234533581],
[0.12874995244886, -1.00000000000000, -0.68165204574864],
[0.39800569094768, -1.00000000000000, -0.69900284547384],
[0.62253568842167, -1.00000000000000, -0.73152230491144],
[0.78448347366314, -1.00000000000000, -0.78448347366314],
[-1.00000000000000, -0.89101338351023, -0.73152230491144],
[-0.84680080912920, -0.84680080912920, -0.68921299955661],
[-0.65244925135376, -0.82768715531655, -0.65244925135376],
[-0.40689302311095, -0.82031213075495, -0.63668791072659],
[-0.13610693540751, -0.82031213075495, -0.63668791072659],
[0.13258565802407, -0.82768715531655, -0.65244925135376],
[0.38281461781500, -0.84680080912920, -0.68921299955661],
[0.62253568842167, -0.89101338351023, -0.73152230491144],
[-1.00000000000000, -0.69900284547384, -0.69900284547384],
[-0.82768715531655, -0.65244925135376, -0.65244925135376],
[-0.62495499771497, -0.62495499771497, -0.62495499771497],
[-0.38343990765303, -0.61656009234697, -0.61656009234697],
[-0.12513500685510, -0.62495499771497, -0.62495499771497],
[0.13258565802407, -0.65244925135376, -0.65244925135376],
[0.39800569094768, -0.69900284547384, -0.69900284547384],
[-1.00000000000000, -0.44709790670022, -0.68165204574864],
[-0.82031213075495, -0.40689302311095, -0.63668791072659],
[-0.61656009234697, -0.38343990765303, -0.61656009234697],
[-0.38343990765303, -0.38343990765303, -0.61656009234697],
[-0.13610693540751, -0.40689302311095, -0.63668791072659],
[0.12874995244886, -0.44709790670022, -0.68165204574864],
[-1.00000000000000, -0.16188382733210, -0.67623234533581],
[-0.82031213075495, -0.13610693540751, -0.63668791072659],
[-0.62495499771497, -0.12513500685510, -0.62495499771497],
[-0.40689302311094, -0.13610693540751, -0.63668791072659],
[-0.16188382733210, -0.16188382733210, -0.67623234533581],
[-1.00000000000000, 0.12874995244886, -0.68165204574864],
[-0.82768715531655, 0.13258565802407, -0.65244925135376],
[-0.65244925135376, 0.13258565802407, -0.65244925135376],
[-0.44709790670022, 0.12874995244886, -0.68165204574864],
[-1.00000000000000, 0.39800569094768, -0.69900284547384],
[-0.84680080912920, 0.38281461781500, -0.68921299955661],
[-0.69900284547384, 0.39800569094768, -0.69900284547384],
[-1.00000000000000, 0.62253568842167, -0.73152230491144],
[-0.89101338351023, 0.62253568842167, -0.73152230491144],
[-1.00000000000000, 0.78448347366314, -0.78448347366314],
[-1.00000000000000, -1.00000000000000, -0.56523532699621],
[-0.88196956377807, -1.00000000000000, -0.49119920657729],
[-0.68165204574864, -1.00000000000000, -0.44709790670022],
[-0.42664737762904, -1.00000000000000, -0.42664737762904],
[-0.14670524474192, -1.00000000000000, -0.42664737762904],
[0.12874995244886, -1.00000000000000, -0.44709790670022],
[0.37316877035536, -1.00000000000000, -0.49119920657729],
[0.56523532699621, -1.00000000000000, -0.56523532699621],
[-1.00000000000000, -0.88196956377807, -0.49119920657729],
[-0.83728581509126, -0.83728581509126, -0.44588737305619],
[-0.63668791072659, -0.82031213075495, -0.40689302311095],
[-0.39476381053366, -0.81570856839903, -0.39476381053366],
[-0.13610693540751, -0.82031213075495, -0.40689302311095],
[0.12045900323870, -0.83728581509126, -0.44588737305619],
[0.37316877035536, -0.88196956377807, -0.49119920657729],
[-1.00000000000000, -0.68165204574864, -0.44709790670022],
[-0.82031213075495, -0.63668791072659, -0.40689302311095],
[-0.61656009234697, -0.61656009234697, -0.38343990765303],
[-0.38343990765303, -0.61656009234697, -0.38343990765303],
[-0.13610693540751, -0.63668791072659, -0.40689302311095],
[0.12874995244886, -0.68165204574864, -0.44709790670022],
[-1.00000000000000, -0.42664737762904, -0.42664737762904],
[-0.81570856839903, -0.39476381053366, -0.39476381053366],
[-0.61656009234697, -0.38343990765303, -0.38343990765303],
[-0.39476381053366, -0.39476381053366, -0.39476381053366],
[-0.14670524474192, -0.42664737762904, -0.42664737762904],
[-1.00000000000000, -0.14670524474192, -0.42664737762904],
[-0.82031213075495, -0.13610693540751, -0.40689302311095],
[-0.63668791072659, -0.13610693540751, -0.40689302311095],
[-0.42664737762904, -0.14670524474192, -0.42664737762904],
[-1.00000000000000, 0.12874995244886, -0.44709790670022],
[-0.83728581509126, 0.12045900323870, -0.44588737305619],
[-0.68165204574864, 0.12874995244886, -0.44709790670022],
[-1.00000000000000, 0.37316877035536, -0.49119920657729],
[-0.88196956377807, 0.37316877035536, -0.49119920657729],
[-1.00000000000000, 0.56523532699620, -0.56523532699621],
[-1.00000000000000, -1.00000000000000, -0.29575813558694],
[-0.87799847850836, -1.00000000000000, -0.20950668229487],
[-0.67623234533581, -1.00000000000000, -0.16188382733210],
[-0.42664737762904, -1.00000000000000, -0.14670524474192],
[-0.16188382733210, -1.00000000000000, -0.16188382733210],
[0.08750516080323, -1.00000000000000, -0.20950668229487],
[0.29575813558694, -1.00000000000000, -0.29575813558694],
[-1.00000000000000, -0.87799847850836, -0.20950668229487],
[-0.83456513545924, -0.83456513545924, -0.16543486454076],
[-0.63668791072659, -0.82031213075495, -0.13610693540751],
[-0.40689302311095, -0.82031213075495, -0.13610693540751],
[-0.16543486454076, -0.83456513545924, -0.16543486454076],
[0.08750516080323, -0.87799847850836, -0.20950668229487],
[-1.00000000000000, -0.67623234533581, -0.16188382733210],
[-0.82031213075495, -0.63668791072659, -0.13610693540751],
[-0.62495499771497, -0.62495499771497, -0.12513500685510],
[-0.40689302311095, -0.63668791072659, -0.13610693540751],
[-0.16188382733210, -0.67623234533581, -0.16188382733210],
[-1.00000000000000, -0.42664737762904, -0.14670524474192],
[-0.82031213075495, -0.40689302311094, -0.13610693540751],
[-0.63668791072659, -0.40689302311094, -0.13610693540751],
[-0.42664737762904, -0.42664737762904, -0.14670524474192],
[-1.00000000000000, -0.16188382733210, -0.16188382733210],
[-0.83456513545924, -0.16543486454076, -0.16543486454076],
[-0.67623234533581, -0.16188382733210, -0.16188382733210],
[-1.00000000000000, 0.08750516080323, -0.20950668229487],
[-0.87799847850836, 0.08750516080323, -0.20950668229487],
[-1.00000000000000, 0.29575813558694, -0.29575813558694],
[-1.00000000000000, -1.00000000000000, 0.00000000000000],
[-0.87799847850836, -1.00000000000000, 0.08750516080323],
[-0.68165204574864, -1.00000000000000, 0.12874995244886],
[-0.44709790670022, -1.00000000000000, 0.12874995244886],
[-0.20950668229487, -1.00000000000000, 0.08750516080323],
[-0.00000000000000, -1.00000000000000, 0.00000000000000],
[-1.00000000000000, -0.87799847850836, 0.08750516080323],
[-0.83728581509126, -0.83728581509126, 0.12045900323870],
[-0.65244925135376, -0.82768715531655, 0.13258565802407],
[-0.44588737305619, -0.83728581509126, 0.12045900323870],
[-0.20950668229487, -0.87799847850836, 0.08750516080323],
[-1.00000000000000, -0.68165204574864, 0.12874995244886],
[-0.82768715531655, -0.65244925135376, 0.13258565802407],
[-0.65244925135376, -0.65244925135376, 0.13258565802407],
[-0.44709790670022, -0.68165204574864, 0.12874995244886],
[-1.00000000000000, -0.44709790670022, 0.12874995244886],
[-0.83728581509126, -0.44588737305619, 0.12045900323870],
[-0.68165204574864, -0.44709790670022, 0.12874995244886],
[-1.00000000000000, -0.20950668229487, 0.08750516080323],
[-0.87799847850836, -0.20950668229487, 0.08750516080323],
[-1.00000000000000, 0.00000000000000, -0.00000000000000],
[-1.00000000000000, -1.00000000000000, 0.29575813558694],
[-0.88196956377807, -1.00000000000000, 0.37316877035536],
[-0.69900284547384, -1.00000000000000, 0.39800569094768],
[-0.49119920657729, -1.00000000000000, 0.37316877035536],
[-0.29575813558694, -1.00000000000000, 0.29575813558694],
[-1.00000000000000, -0.88196956377807, 0.37316877035536],
[-0.84680080912920, -0.84680080912920, 0.38281461781500],
[-0.68921299955661, -0.84680080912920, 0.38281461781500],
[-0.49119920657729, -0.88196956377807, 0.37316877035536],
[-1.00000000000000, -0.69900284547384, 0.39800569094768],
[-0.84680080912920, -0.68921299955661, 0.38281461781500],
[-0.69900284547384, -0.69900284547384, 0.39800569094768],
[-1.00000000000000, -0.49119920657729, 0.37316877035536],
[-0.88196956377807, -0.49119920657729, 0.37316877035536],
[-1.00000000000000, -0.29575813558694, 0.29575813558694],
[-1.00000000000000, -1.00000000000000, 0.56523532699620],
[-0.89101338351023, -1.00000000000000, 0.62253568842167],
[-0.73152230491144, -1.00000000000000, 0.62253568842167],
[-0.56523532699621, -1.00000000000000, 0.56523532699620],
[-1.00000000000000, -0.89101338351023, 0.62253568842167],
[-0.86859548568613, -0.86859548568613, 0.60578645705840],
[-0.73152230491144, -0.89101338351023, 0.62253568842167],
[-1.00000000000000, -0.73152230491144, 0.62253568842167],
[-0.89101338351023, -0.73152230491144, 0.62253568842167],
[-1.00000000000000, -0.56523532699621, 0.56523532699621],
[-1.00000000000000, -1.00000000000000, 0.78448347366314],
[-0.90729952697735, -1.00000000000000, 0.81459905395471],
[-0.78448347366314, -1.00000000000000, 0.78448347366314],
[-1.00000000000000, -0.90729952697735, 0.81459905395471],
[-0.90729952697735, -0.90729952697735, 0.81459905395471],
[-1.00000000000000, -0.78448347366314, 0.78448347366314],
[-1.00000000000000, -1.00000000000000, 0.93400143040806],
[-0.93400143040806, -1.00000000000000, 0.93400143040806],
[-1.00000000000000, -0.93400143040806, 0.93400143040806],
])
elif C==10:
feketeNodes = np.array([
[-1.00000000000000, -1.00000000000000, -1.00000000000000],
[1.00000000000000, -1.00000000000000, -1.00000000000000],
[-1.00000000000000, 1.00000000000000, -1.00000000000000],
[-1.00000000000000, -1.00000000000000, 1.00000000000000],
[-0.94489927222288, -1.00000000000000, -1.00000000000000],
[-0.81927932164401, -1.00000000000000, -1.00000000000000],
[-0.63287615303186, -1.00000000000000, -1.00000000000000],
[-0.39953094096535, -1.00000000000000, -1.00000000000000],
[-0.13655293285493, -1.00000000000000, -1.00000000000000],
[0.13655293285493, -1.00000000000000, -1.00000000000000],
[0.39953094096535, -1.00000000000000, -1.00000000000000],
[0.63287615303186, -1.00000000000000, -1.00000000000000],
[0.81927932164401, -1.00000000000000, -1.00000000000000],
[0.94489927222288, -1.00000000000000, -1.00000000000000],
[-1.00000000000000, -0.94489927222288, -1.00000000000000],
[-0.92151786043038, -0.92151786043038, -1.00000000000000],
[-0.77429591383879, -0.90410902745121, -1.00000000000000],
[-0.57040226533050, -0.89209241318327, -1.00000000000000],
[-0.32565807981549, -0.88505087202618, -1.00000000000000],
[-0.05863404688654, -0.88273190622692, -1.00000000000000],
[0.21070895184167, -0.88505087202618, -1.00000000000000],
[0.46249467851378, -0.89209241318327, -1.00000000000000],
[0.67840494128999, -0.90410902745121, -1.00000000000000],
[0.84303572086077, -0.92151786043038, -1.00000000000000],
[0.94489927222288, -0.94489927222288, -1.00000000000000],
[-1.00000000000000, -0.81927932164401, -1.00000000000000],
[-0.90410902745121, -0.77429591383879, -1.00000000000000],
[-0.74166110875971, -0.74166110875971, -1.00000000000000],
[-0.52692548127315, -0.72043884349009, -1.00000000000000],
[-0.27740272398980, -0.70998997530133, -1.00000000000000],
[-0.01260730070887, -0.70998997530133, -1.00000000000000],
[0.24736432476324, -0.72043884349009, -1.00000000000000],
[0.48332221751941, -0.74166110875971, -1.00000000000000],
[0.67840494128999, -0.77429591383879, -1.00000000000000],
[0.81927932164401, -0.81927932164401, -1.00000000000000],
[-1.00000000000000, -0.63287615303186, -1.00000000000000],
[-0.89209241318327, -0.57040226533050, -1.00000000000000],
[-0.72043884349009, -0.52692548127315, -1.00000000000000],
[-0.50130644558671, -0.50130644558671, -1.00000000000000],
[-0.25357728582324, -0.49284542835353, -1.00000000000000],
[0.00261289117342, -0.50130644558671, -1.00000000000000],
[0.24736432476324, -0.52692548127315, -1.00000000000000],
[0.46249467851378, -0.57040226533050, -1.00000000000000],
[0.63287615303186, -0.63287615303186, -1.00000000000000],
[-1.00000000000000, -0.39953094096535, -1.00000000000000],
[-0.88505087202618, -0.32565807981549, -1.00000000000000],
[-0.70998997530133, -0.27740272398980, -1.00000000000000],
[-0.49284542835353, -0.25357728582324, -1.00000000000000],
[-0.25357728582324, -0.25357728582324, -1.00000000000000],
[-0.01260730070887, -0.27740272398980, -1.00000000000000],
[0.21070895184167, -0.32565807981549, -1.00000000000000],
[0.39953094096535, -0.39953094096535, -1.00000000000000],
[-1.00000000000000, -0.13655293285493, -1.00000000000000],
[-0.88273190622692, -0.05863404688654, -1.00000000000000],
[-0.70998997530133, -0.01260730070887, -1.00000000000000],
[-0.50130644558671, 0.00261289117342, -1.00000000000000],
[-0.27740272398980, -0.01260730070887, -1.00000000000000],
[-0.05863404688654, -0.05863404688654, -1.00000000000000],
[0.13655293285493, -0.13655293285493, -1.00000000000000],
[-1.00000000000000, 0.13655293285493, -1.00000000000000],
[-0.88505087202618, 0.21070895184167, -1.00000000000000],
[-0.72043884349009, 0.24736432476324, -1.00000000000000],
[-0.52692548127315, 0.24736432476324, -1.00000000000000],
[-0.32565807981549, 0.21070895184167, -1.00000000000000],
[-0.13655293285493, 0.13655293285493, -1.00000000000000],
[-1.00000000000000, 0.39953094096535, -1.00000000000000],
[-0.89209241318327, 0.46249467851378, -1.00000000000000],
[-0.74166110875971, 0.48332221751941, -1.00000000000000],
[-0.57040226533050, 0.46249467851378, -1.00000000000000],
[-0.39953094096535, 0.39953094096535, -1.00000000000000],
[-1.00000000000000, 0.63287615303186, -1.00000000000000],
[-0.90410902745121, 0.67840494128999, -1.00000000000000],
[-0.77429591383879, 0.67840494128999, -1.00000000000000],
[-0.63287615303186, 0.63287615303186, -1.00000000000000],
[-1.00000000000000, 0.81927932164401, -1.00000000000000],
[-0.92151786043038, 0.84303572086077, -1.00000000000000],
[-0.81927932164401, 0.81927932164401, -1.00000000000000],
[-1.00000000000000, 0.94489927222288, -1.00000000000000],
[-0.94489927222288, 0.94489927222288, -1.00000000000000],
[-1.00000000000000, -1.00000000000000, -0.94489927222288],
[-0.92151786043038, -1.00000000000000, -0.92151786043038],
[-0.77429591383879, -1.00000000000000, -0.90410902745121],
[-0.57040226533050, -1.00000000000000, -0.89209241318327],
[-0.32565807981549, -1.00000000000000, -0.88505087202618],
[-0.05863404688654, -1.00000000000000, -0.88273190622692],
[0.21070895184167, -1.00000000000000, -0.88505087202618],
[0.46249467851378, -1.00000000000000, -0.89209241318327],
[0.67840494128999, -1.00000000000000, -0.90410902745121],
[0.84303572086077, -1.00000000000000, -0.92151786043038],
[0.94489927222288, -1.00000000000000, -0.94489927222288],
[-1.00000000000000, -0.92151786043038, -0.92151786043038],
[-0.88661024062098, -0.88661024062098, -0.88661024062098],
[-0.73694173023916, -0.86491347593435, -0.86491347593435],
[-0.52974325723302, -0.85396178016912, -0.85396178016912],
[-0.28267608212559, -0.84919912434147, -0.84919912434147],
[-0.01892566919147, -0.84919912434147, -0.84919912434147],
[0.23766681757125, -0.85396178016912, -0.85396178016912],
[0.46676868210785, -0.86491347593435, -0.86491347593435],
[0.65983072186294, -0.88661024062098, -0.88661024062098],
[0.84303572086077, -0.92151786043038, -0.92151786043038],
[-1.00000000000000, -0.77429591383879, -0.90410902745121],
[-0.86491347593435, -0.73694173023916, -0.86491347593435],
[-0.69929094490663, -0.69929094490663, -0.84651122824852],
[-0.48717651513984, -0.67966769800228, -0.83802132950146],
[-0.24545730477403, -0.67357697905978, -0.83550841139217],
[0.00486554264358, -0.67966769800228, -0.83802132950146],
[0.24509311806177, -0.69929094490663, -0.84651122824852],
[0.46676868210785, -0.73694173023916, -0.86491347593435],
[0.67840494128999, -0.77429591383879, -0.90410902745121],
[-1.00000000000000, -0.57040226533050, -0.89209241318327],
[-0.85396178016912, -0.52974325723302, -0.85396178016912],
[-0.67966769800228, -0.48717651513984, -0.83802132950146],
[-0.46740379986922, -0.46740379986922, -0.83175894856653],
[-0.23343345169504, -0.46740379986922, -0.83175894856653],
[0.00486554264358, -0.48717651513984, -0.83802132950146],
[0.23766681757125, -0.52974325723302, -0.85396178016912],
[0.46249467851378, -0.57040226533050, -0.89209241318327],
[-1.00000000000000, -0.32565807981549, -0.88505087202618],
[-0.84919912434147, -0.28267608212560, -0.84919912434147],
[-0.67357697905978, -0.24545730477403, -0.83550841139217],
[-0.46740379986922, -0.23343345169504, -0.83175894856653],
[-0.24545730477403, -0.24545730477403, -0.83550841139217],
[-0.01892566919147, -0.28267608212559, -0.84919912434147],
[0.21070895184167, -0.32565807981549, -0.88505087202618],
[-1.00000000000000, -0.05863404688654, -0.88273190622692],
[-0.84919912434147, -0.01892566919147, -0.84919912434147],
[-0.67966769800228, 0.00486554264358, -0.83802132950146],
[-0.48717651513984, 0.00486554264358, -0.83802132950146],
[-0.28267608212559, -0.01892566919147, -0.84919912434147],
[-0.05863404688654, -0.05863404688654, -0.88273190622692],
[-1.00000000000000, 0.21070895184167, -0.88505087202618],
[-0.85396178016912, 0.23766681757125, -0.85396178016912],
[-0.69929094490663, 0.24509311806177, -0.84651122824852],
[-0.52974325723302, 0.23766681757125, -0.85396178016912],
[-0.32565807981549, 0.21070895184167, -0.88505087202618],
[-1.00000000000000, 0.46249467851378, -0.89209241318327],
[-0.86491347593435, 0.46676868210785, -0.86491347593435],
[-0.73694173023916, 0.46676868210785, -0.86491347593435],
[-0.57040226533050, 0.46249467851378, -0.89209241318327],
[-1.00000000000000, 0.67840494128999, -0.90410902745121],
[-0.88661024062098, 0.65983072186294, -0.88661024062098],
[-0.77429591383879, 0.67840494128999, -0.90410902745121],
[-1.00000000000000, 0.84303572086077, -0.92151786043038],
[-0.92151786043038, 0.84303572086077, -0.92151786043038],
[-1.00000000000000, 0.94489927222288, -0.94489927222288],
[-1.00000000000000, -1.00000000000000, -0.81927932164401],
[-0.90410902745121, -1.00000000000000, -0.77429591383879],
[-0.74166110875971, -1.00000000000000, -0.74166110875971],
[-0.52692548127315, -1.00000000000000, -0.72043884349009],
[-0.27740272398980, -1.00000000000000, -0.70998997530133],
[-0.01260730070887, -1.00000000000000, -0.70998997530133],
[0.24736432476324, -1.00000000000000, -0.72043884349009],
[0.48332221751941, -1.00000000000000, -0.74166110875971],
[0.67840494128999, -1.00000000000000, -0.77429591383879],
[0.81927932164401, -1.00000000000000, -0.81927932164401],
[-1.00000000000000, -0.90410902745121, -0.77429591383879],
[-0.86491347593435, -0.86491347593435, -0.73694173023916],
[-0.69929094490663, -0.84651122824852, -0.69929094490663],
[-0.48717651513984, -0.83802132950146, -0.67966769800228],
[-0.24545730477403, -0.83550841139217, -0.67357697905978],
[0.00486554264358, -0.83802132950146, -0.67966769800228],
[0.24509311806177, -0.84651122824852, -0.69929094490663],
[0.46676868210785, -0.86491347593435, -0.73694173023916],
[0.67840494128999, -0.90410902745121, -0.77429591383879],
[-1.00000000000000, -0.74166110875971, -0.74166110875971],
[-0.84651122824852, -0.69929094490663, -0.69929094490663],
[-0.67073447278009, -0.67073447278009, -0.67073447278009],
[-0.45884944222678, -0.65778667667481, -0.65778667667481],
[-0.22557720442361, -0.65778667667481, -0.65778667667481],
[0.01220341834026, -0.67073447278009, -0.67073447278009],
[0.24509311806177, -0.69929094490663, -0.69929094490663],
[0.48332221751941, -0.74166110875971, -0.74166110875971],
[-1.00000000000000, -0.52692548127315, -0.72043884349009],
[-0.83802132950146, -0.48717651513984, -0.67966769800228],
[-0.65778667667481, -0.45884944222678, -0.65778667667481],
[-0.44965012855595, -0.44965012855595, -0.65104961433215],
[-0.22557720442361, -0.45884944222678, -0.65778667667481],
[0.00486554264358, -0.48717651513984, -0.67966769800228],
[0.24736432476324, -0.52692548127315, -0.72043884349009],
[-1.00000000000000, -0.27740272398980, -0.70998997530133],
[-0.83550841139217, -0.24545730477403, -0.67357697905978],
[-0.65778667667481, -0.22557720442361, -0.65778667667481],
[-0.45884944222678, -0.22557720442361, -0.65778667667481],
[-0.24545730477403, -0.24545730477403, -0.67357697905978],
[-0.01260730070887, -0.27740272398980, -0.70998997530133],
[-1.00000000000000, -0.01260730070887, -0.70998997530133],
[-0.83802132950146, 0.00486554264358, -0.67966769800228],
[-0.67073447278009, 0.01220341834026, -0.67073447278009],
[-0.48717651513984, 0.00486554264358, -0.67966769800228],
[-0.27740272398980, -0.01260730070887, -0.70998997530133],
[-1.00000000000000, 0.24736432476324, -0.72043884349009],
[-0.84651122824852, 0.24509311806177, -0.69929094490663],
[-0.69929094490663, 0.24509311806177, -0.69929094490663],
[-0.52692548127315, 0.24736432476324, -0.72043884349009],
[-1.00000000000000, 0.48332221751941, -0.74166110875971],
[-0.86491347593435, 0.46676868210785, -0.73694173023916],
[-0.74166110875971, 0.48332221751941, -0.74166110875971],
[-1.00000000000000, 0.67840494128999, -0.77429591383879],
[-0.90410902745121, 0.67840494128999, -0.77429591383879],
[-1.00000000000000, 0.81927932164401, -0.81927932164401],
[-1.00000000000000, -1.00000000000000, -0.63287615303186],
[-0.89209241318327, -1.00000000000000, -0.57040226533050],
[-0.72043884349009, -1.00000000000000, -0.52692548127315],
[-0.50130644558671, -1.00000000000000, -0.50130644558671],
[-0.25357728582324, -1.00000000000000, -0.49284542835353],
[0.00261289117342, -1.00000000000000, -0.50130644558671],
[0.24736432476324, -1.00000000000000, -0.52692548127315],
[0.46249467851378, -1.00000000000000, -0.57040226533050],
[0.63287615303186, -1.00000000000000, -0.63287615303186],
[-1.00000000000000, -0.89209241318327, -0.57040226533050],
[-0.85396178016912, -0.85396178016912, -0.52974325723302],
[-0.67966769800228, -0.83802132950146, -0.48717651513984],
[-0.46740379986922, -0.83175894856653, -0.46740379986922],
[-0.23343345169504, -0.83175894856653, -0.46740379986922],
[0.00486554264358, -0.83802132950146, -0.48717651513984],
[0.23766681757125, -0.85396178016912, -0.52974325723302],
[0.46249467851378, -0.89209241318327, -0.57040226533050],
[-1.00000000000000, -0.72043884349009, -0.52692548127315],
[-0.83802132950146, -0.67966769800228, -0.48717651513984],
[-0.65778667667481, -0.65778667667481, -0.45884944222678],
[-0.44965012855595, -0.65104961433215, -0.44965012855595],
[-0.22557720442361, -0.65778667667481, -0.45884944222678],
[0.00486554264358, -0.67966769800228, -0.48717651513984],
[0.24736432476324, -0.72043884349009, -0.52692548127315],
[-1.00000000000000, -0.50130644558671, -0.50130644558671],
[-0.83175894856653, -0.46740379986922, -0.46740379986922],
[-0.65104961433215, -0.44965012855595, -0.44965012855595],
[-0.44965012855595, -0.44965012855595, -0.44965012855595],
[-0.23343345169504, -0.46740379986922, -0.46740379986922],
[0.00261289117342, -0.50130644558671, -0.50130644558671],
[-1.00000000000000, -0.25357728582324, -0.49284542835353],
[-0.83175894856653, -0.23343345169504, -0.46740379986922],
[-0.65778667667481, -0.22557720442361, -0.45884944222678],
[-0.46740379986922, -0.23343345169504, -0.46740379986922],
[-0.25357728582324, -0.25357728582324, -0.49284542835353],
[-1.00000000000000, 0.00261289117342, -0.50130644558671],
[-0.83802132950146, 0.00486554264358, -0.48717651513984],
[-0.67966769800228, 0.00486554264358, -0.48717651513984],
[-0.50130644558671, 0.00261289117342, -0.50130644558671],
[-1.00000000000000, 0.24736432476324, -0.52692548127315],
[-0.85396178016912, 0.23766681757125, -0.52974325723302],
[-0.72043884349009, 0.24736432476324, -0.52692548127315],
[-1.00000000000000, 0.46249467851378, -0.57040226533050],
[-0.89209241318327, 0.46249467851378, -0.57040226533050],
[-1.00000000000000, 0.63287615303186, -0.63287615303186],
[-1.00000000000000, -1.00000000000000, -0.39953094096535],
[-0.88505087202618, -1.00000000000000, -0.32565807981549],
[-0.70998997530133, -1.00000000000000, -0.27740272398980],
[-0.49284542835353, -1.00000000000000, -0.25357728582324],
[-0.25357728582324, -1.00000000000000, -0.25357728582324],
[-0.01260730070887, -1.00000000000000, -0.27740272398980],
[0.21070895184167, -1.00000000000000, -0.32565807981549],
[0.39953094096535, -1.00000000000000, -0.39953094096535],
[-1.00000000000000, -0.88505087202618, -0.32565807981549],
[-0.84919912434147, -0.84919912434147, -0.28267608212560],
[-0.67357697905978, -0.83550841139217, -0.24545730477403],
[-0.46740379986922, | |
# -*- coding: utf-8 -*-
import io
import requests
from lxml import etree, objectify
from xml.etree import ElementTree as ET
from uuid import uuid4
import pprint
import logging
from odoo.addons.payment.models.payment_acquirer import _partner_split_name
from odoo.exceptions import ValidationError, UserError
from odoo import _
_logger = logging.getLogger(__name__)
XMLNS = 'AnetApi/xml/v1/schema/AnetApiSchema.xsd'
def strip_ns(xml, ns):
"""Strip the provided name from tag names.
:param str xml: xml document
:param str ns: namespace to strip
:rtype: etree._Element
:return: the parsed xml string with the namespace prefix removed
"""
it = ET.iterparse(io.BytesIO(xml))
ns_prefix = '{%s}' % XMLNS
for _, el in it:
if el.tag.startswith(ns_prefix):
el.tag = el.tag[len(ns_prefix):] # strip all Auth.net namespaces
return it.root
def error_check(elem):
"""Check if the response sent by Authorize.net contains an error.
Errors can be a failure to try the transaction (in that case, the transasctionResponse
is empty, and the meaningful error message will be in message/code) or a failure to process
the transaction (in that case, the message/code content will be generic and the actual error
message is in transactionResponse/errors/error/errorText).
:param etree._Element elem: the root element of the response that will be parsed
:rtype: tuple (bool, str)
:return: tuple containnig a boolean indicating if the response should be considered
as an error and the most meaningful error message found in it.
"""
result_code = elem.find('messages/resultCode')
msg = 'No meaningful error message found, please check logs or the Authorize.net backend'
has_error = result_code is not None and result_code.text == 'Error'
if has_error:
# accumulate the most meangingful error
error = elem.find('transactionResponse/errors/error')
error = error if error is not None else elem.find('messages/message')
if error is not None:
code = error[0].text
text = error[1].text
msg = '%s: %s' % (code, text)
return (has_error, msg)
class AuthorizeAPI():
"""Authorize.net Gateway API integration.
This class allows contacting the Authorize.net API with simple operation
requests. It implements a *very limited* subset of the complete API
(http://developer.authorize.net/api/reference); namely:
- Customer Profile/Payment Profile creation
- Transaction authorization/capture/voiding
"""
AUTH_ERROR_STATUS = 3
def __init__(self, acquirer):
"""Initiate the environment with the acquirer data.
:param record acquirer: payment.acquirer account that will be contacted
"""
if acquirer.environment == 'test':
self.url = 'https://apitest.authorize.net/xml/v1/request.api'
else:
self.url = 'https://api.authorize.net/xml/v1/request.api'
self.name = acquirer.authorize_login
self.transaction_key = acquirer.authorize_transaction_key
def _authorize_request(self, data):
"""Encode, send and process the request to the Authorize.net API.
Encodes the xml data and process the response. Note that only a basic
processing is done at this level (namespace cleanup, basic error management).
:param etree._Element data: etree data to process
"""
logged_data = data
data = etree.tostring(data, encoding='utf-8')
for node_to_remove in ['//merchantAuthentication', '//creditCard']:
for node in logged_data.xpath(node_to_remove):
node.getparent().remove(node)
logged_data = str(etree.tostring(logged_data, encoding='utf-8', pretty_print=True)).replace(r'\n', '\n')
_logger.info('_authorize_request: Sending values to URL %s, values:\n%s', self.url, logged_data)
r = requests.post(self.url, data=data, headers={'Content-Type': 'text/xml'})
r.raise_for_status()
response = strip_ns(r.content, XMLNS)
logged_data = etree.XML(r.content)
logged_data = str(etree.tostring(logged_data, encoding='utf-8', pretty_print=True)).replace(r'\n', '\n')
_logger.info('_authorize_request: Values received\n%s', logged_data)
return response
def _base_tree(self, requestType):
"""Create a basic tree containing authentication information.
Create a etree Element of type requestType and appends the Authorize.net
credentials (they are always required).
:param str requestType: the type of request to send to Authorize.net
See http://developer.authorize.net/api/reference
for available types.
:return: basic etree Element of the requested type
containing credentials information
:rtype: etree._Element
"""
root = etree.Element(requestType, xmlns=XMLNS)
auth = etree.SubElement(root, "merchantAuthentication")
etree.SubElement(auth, "name").text = self.name
etree.SubElement(auth, "transactionKey").text = self.transaction_key
return root
# Customer profiles
def create_customer_profile(self, partner, cardnumber, expiration_date, card_code):
"""Create a payment and customer profile in the Authorize.net backend.
Creates a customer profile for the partner/credit card combination and links
a corresponding payment profile to it. Note that a single partner in the Odoo
database can have multiple customer profiles in Authorize.net (i.e. a customer
profile is created for every res.partner/payment.token couple).
:param record partner: the res.partner record of the customer
:param str cardnumber: cardnumber in string format (numbers only, no separator)
:param str expiration_date: expiration date in 'YYYY-MM' string format
:param str card_code: three- or four-digit verification number
:return: a dict containing the profile_id and payment_profile_id of the
newly created customer profile and payment profile
:rtype: dict
"""
root = self._base_tree('createCustomerProfileRequest')
profile = etree.SubElement(root, "profile")
# merchantCustomerId is ODOO-{partner.id}-{random hex string} truncated to maximum 20 characters
etree.SubElement(profile, "merchantCustomerId").text = ('ODOO-%s-%s' % (partner.id, uuid4().hex[:8]))[:20]
etree.SubElement(profile, "email").text = partner.email or ''
payment_profile = etree.SubElement(profile, "paymentProfiles")
etree.SubElement(payment_profile, "customerType").text = 'business' if partner.is_company else 'individual'
billTo = etree.SubElement(payment_profile, "billTo")
if partner.is_company:
etree.SubElement(billTo, "firstName").text = ' '
etree.SubElement(billTo, "lastName").text = partner.name
else:
etree.SubElement(billTo, "firstName").text = _partner_split_name(partner.name)[0]
etree.SubElement(billTo, "lastName").text = _partner_split_name(partner.name)[1]
etree.SubElement(billTo, "address").text = (partner.street or '' + (partner.street2 if partner.street2 else '')) or None
missing_fields = [partner._fields[field].string for field in ['city', 'country_id'] if not partner[field]]
if missing_fields:
raise ValidationError({'missing_fields': missing_fields})
etree.SubElement(billTo, "city").text = partner.city
etree.SubElement(billTo, "state").text = partner.state_id.name or None
etree.SubElement(billTo, "zip").text = partner.zip or ''
etree.SubElement(billTo, "country").text = partner.country_id.name or None
payment = etree.SubElement(payment_profile, "payment")
creditCard = etree.SubElement(payment, "creditCard")
etree.SubElement(creditCard, "cardNumber").text = cardnumber
etree.SubElement(creditCard, "expirationDate").text = expiration_date
etree.SubElement(creditCard, "cardCode").text = card_code
etree.SubElement(root, "validationMode").text = 'liveMode'
response = self._authorize_request(root)
# If the user didn't set up authorize.net properly then the response
# won't contain stuff like customerProfileId and accessing text
# will raise a NoneType has no text attribute
msg = response.find('messages')
if msg is not None:
rc = msg.find('resultCode')
if rc is not None and rc.text == 'Error':
err = msg.find('message')
err_code = err.find('code').text
err_msg = err.find('text').text
raise UserError(
"Authorize.net Error:\nCode: %s\nMessage: %s"
% (err_code, err_msg)
)
res = dict()
res['profile_id'] = response.find('customerProfileId').text
res['payment_profile_id'] = response.find('customerPaymentProfileIdList/numericString').text
return res
def create_customer_profile_from_tx(self, partner, transaction_id):
"""Create an Auth.net payment/customer profile from an existing transaction.
Creates a customer profile for the partner/credit card combination and links
a corresponding payment profile to it. Note that a single partner in the Odoo
database can have multiple customer profiles in Authorize.net (i.e. a customer
profile is created for every res.partner/payment.token couple).
Note that this function makes 2 calls to the authorize api, since we need to
obtain a partial cardnumber to generate a meaningful payment.token name.
:param record partner: the res.partner record of the customer
:param str transaction_id: id of the authorized transaction in the
Authorize.net backend
:return: a dict containing the profile_id and payment_profile_id of the
newly created customer profile and payment profile as well as the
last digits of the card number
:rtype: dict
"""
root = self._base_tree('createCustomerProfileFromTransactionRequest')
etree.SubElement(root, "transId").text = transaction_id
customer = etree.SubElement(root, "customer")
# merchantCustomerId is ODOO-{partner.id}-{random hex string} truncated to maximum 20 characters
etree.SubElement(customer, "merchantCustomerId").text = ('ODOO-%s-%s' % (partner.id, uuid4().hex[:8]))[:20]
etree.SubElement(customer, "email").text = partner.email or ''
response = self._authorize_request(root)
res = dict()
if response.find('customerProfileId') is None: # Warning: do not use bool(etree) as the semantics is very misleading
_logger.warning(
'Unable to create customer payment profile, data missing from transaction. Transaction_id: %s - Partner_id: %s'
% (transaction_id, partner)
)
return res
res['profile_id'] = response.find('customerProfileId').text
res['payment_profile_id'] = response.find('customerPaymentProfileIdList/numericString').text
root_profile = self._base_tree('getCustomerPaymentProfileRequest')
etree.SubElement(root_profile, "customerProfileId").text = res['profile_id']
etree.SubElement(root_profile, "customerPaymentProfileId").text = res['payment_profile_id']
response_profile = self._authorize_request(root_profile)
res['name'] = response_profile.find('paymentProfile/payment/creditCard/cardNumber').text
return res
def credit(self, token, amount, transaction_id):
""" Refund a payment for the given amount.
:param record token: the payment.token record that must be refunded.
:param str amount: transaction amount
:param str transaction_id: the reference of the transacation that is going to be refunded.
:return: a dict containing the response code, transaction id and transaction type
:rtype: dict
"""
root = self._base_tree('createTransactionRequest')
tx = etree.SubElement(root, "transactionRequest")
etree.SubElement(tx, "transactionType").text = "refundTransaction"
etree.SubElement(tx, "amount").text = str(amount)
payment = etree.SubElement(tx, "payment")
credit_card = etree.SubElement(payment, "creditCard")
idx = token.name.find(' - ')
etree.SubElement(credit_card, "cardNumber").text = token.name[idx-4:idx] # shitty hack, but that's the only way to get the 4 last digits
etree.SubElement(credit_card, "expirationDate").text = "XXXX"
etree.SubElement(tx, "refTransId").text = transaction_id
response = self._authorize_request(root)
res = dict()
res['x_response_code'] = response.find('transactionResponse/responseCode').text
res['x_trans_id'] = transaction_id
res['x_type'] = 'refund'
return res
# Transaction management
def auth_and_capture(self, token, amount, reference):
"""Authorize and capture a payment for the given amount.
Authorize and immediately capture a payment for the given payment.token
record for the specified amount with reference as communication.
:param record token: the payment.token record that must be charged
:param str amount: transaction amount | |
#recall that our 3d left and right are approximately cubes in python lists, so put them in zero padded numpy arrays to be actual cubes
left = get_numpy_cube(response_matrices[filter][t], spatial_info_l, 0)
right = get_numpy_cube(response_matrices[filter][t], spatial_info_r, lcount)
#we can lose any notion of 3d space now, these will just return flattened lists
left_pooled = pooling_3d(left, pooling_cube)
right_pooled = pooling_3d(right, pooling_cube)
timeseries_l.append(left_pooled)
timeseries_r.append(right_pooled)
pooled_final_l.append(timeseries_l)
pooled_final_r.append(timeseries_r)
return pooled_final_l, pooled_final_r
#starting index is either 0 or lcount, so that the values for the right side cube are taken starting at index lcount in the flattened array,
# since the flattened array is left + right
def get_numpy_cube(flat_values, spatial_info, starting_index):
x_dim = spatial_info[0]
y_dim = spatial_info[1]
z_dim = spatial_info[2]
y_lengths = spatial_info[3]
z_lengths = spatial_info[4]
# print("xdim is "+str(x_dim))
# print("ydim is "+str(y_dim))
# print("zdim is "+str(z_dim))
# print("length of flat values is "+str(len(flat_values)))
# print("There are "+str(len(y_lengths))+" many y lengths\n")
# print("There are "+str(len(z_lengths))+" many z lengths\n")
cube = numpy.zeros((x_dim, y_dim, z_dim))
count = 0
z_lengths_index = 0
for x in range(0, x_dim):
#how far do we go in the y direction at that x value?
# print("y is about to range from 0 to "+str(y_lengths[x])+"\n")
for y in range(0, y_lengths[x]):
#how far do we go in the z direction at that x value?
# print("z lengths index is "+str(z_lengths_index)+"\n")
# print("z is about to range from 0 to" + str(z_lengths[z_lengths_index])+" \n")
for z in range(0, z_lengths[z_lengths_index]):
cube[x][y][z] = flat_values[count + starting_index]
count+=1
#remember our position in the z_lengths list
z_lengths_index += 1
return cube
def pooling_3d(cube, pooling_cube):
pooled_values = []
x_stride = pooling_cube[0]
y_stride = pooling_cube[1]
z_stride = pooling_cube[2]
x_len = len(cube)
y_len = len(cube[0])
z_len = len(cube[0][0])
for x in range(0, x_len, x_stride):
for y in range(0, y_len, y_stride):
for z in range(0, z_len, z_stride):
##### set initial c_x,y,z values i think? So they only change for the one iteration where we passed an if statement below
#if there aren't enough values in the cube to use the default pooling cube, resize it
if (x+x_stride > x_len):
c_x = x_len - x
else:
c_x = x_stride
if (y+y_stride > y_len):
c_y = y_len - y
else:
c_y = y_stride
if (z+z_stride > z_len):
c_z = z_len - z
else:
c_z = z_stride
sub_cube = get_cube(cube, -1, x, y, z, c_x, c_y, c_z)
if(zero_check(sub_cube, c_x, c_y, c_z, "secondary")):
pooled_values.append(tanh(numpy.amax(sub_cube)))
return pooled_values
def save_block(arg_dict, left, right, layer, log_file, iteration):
subject = arg_dict["subject"]
side = arg_dict["ROI"][-1]
ROI = arg_dict["ROI"][:-1]
directory = ROI_PATH +"/"+str(subject)+"/"+str(ROI)+"/"
log_file.write("Saving flat pooled values, these should be concatenated and then made into an mvpa dataset object\n")
half = arg_dict["half"]
if half!=0:
flat_p = open(directory+"strat"+arg_dict["strategy"]+"_"+(arg_dict["half_dict"])[half]+"_primary_pooled_"+str(side)+".p","wb")
else:
flat_p = open(directory+"strat"+arg_dict["strategy"]+"_primary_pooled_"+str(side)+".p","wb")
pickle.dump(left, flat_p)
#layer is either 'primary' or 'secondary'
def load_block(arg_dict, layer, version, log_file):
subject = arg_dict["subject"]
ROI = arg_dict["ROI"]
directory = "/isi/music/auditoryimagery2/seanfiles/"+str(subject)+"/"+str(ROI)+"/"
#if we are loading in python3, the filename is just left/right_primary/secondary, so make version the empty string
if int(version)==3:
version = ""
else:
version = "_py2"
log_file.write("Loading "+layer+" block of left and right sides...\n")
left_p = open(directory+"/left_"+layer+version+str(iteration)+".p", "rb")
right_p = open(directory+"/right_"+layer+version+str(iteration)+".p", "rb")
left = pickle.load(left_p)
right = pickle.load(right_p)
return left, right
def load_subject(subject, ROI, log_file):
#log_file.write("Loading pickled data... \n\n")
directory = "/isi/music/auditoryimagery2/seanfiles/"+str(subject)+"/"+str(ROI)
# s_p = open(directory + "/samples.p", "rb")
# c_p = open(directory + "/chunks.p", "rb")
# t_p = open(directory + "/targets.p", "rb")
#
# samples = pickle.load(s_p, encoding="bytes")
# chunks = pickle.load(c_p, encoding="bytes")
# targets = pickle.load(t_p, encoding="bytes")
ds_p = open(directory + "/raw_ds.p", "rb")
ds = pickle.load(ds_p)
return ds
def convolve(ae, samples, num_features, num_filters, temporal_window, log_file, arg_dict):
# numpy mumbo jumbo to get the right shape/format
samples = [samples]
samples = [samples]
TIMESTEPS = arg_dict["TIMESTEPS"]
samples = numpy.array(samples)
log_file.write("Convolving along time axis with temporal window "+str(temporal_window)+"... \n\n")
#### convolution architecture #####
full_input = Input(shape=(1, TIMESTEPS, num_features))
# convolve along the time dimension
firstConv = Conv2D(num_filters, (temporal_window, 1), strides=1, padding='same',
data_format='channels_first', name='conv1')
# sequential API
conv1 = firstConv(full_input)
firstBlock = Model(inputs=full_input, outputs=conv1)
# these parameters don't matter since we're not training here. the compile call is just to build the graph.
firstBlock.compile(optimizer='adamax', loss='mse',
metrics=['cosine_proximity'])
# use loaded model's neurons as filters
# required numpy mumbo jumbo to make it the right shape even though it already was
trained_weights = ae.get_layer('encoder').get_weights()
log_file.write("trained weights is "+str(trained_weights))
new_weights = []
for i in range(0, temporal_window):
a = trained_weights[0][i] # a is a list
a = [[a]]
new_weights.append(a)
new_weights = numpy.array(new_weights)
# new_weights is the first item in the final array.
new_biases = numpy.array(trained_weights[1])
# new_biases is the second item in the final array
final_weights = [new_weights, new_biases]
final_weights = numpy.array(final_weights)
# print("final_weights has shape "+str(final_weights.shape))
# finally set the weights to be our trained filters
firstConv.set_weights(final_weights)
out = firstBlock.predict(samples)
out = out[0] #out has an extra dimension for stupid numpy reasons
return out
######### fills the 3d volume with the values from responses (which is flat)
# def get_3d_encoded(volume, flattened, responses, num_filters, voxel_indices, directory, log_file):
# #our map for placing the encoded values in "responses" into the 3d volume
#
# zerocount = 0
# left_encoded_responses = []
# right_encoded_responses = []
#
# #We need to know where to look to find the active voxels, then store those bounds for our loops to use
# l_x, l_x_min, l_y, l_y_min, l_z, l_z_min, r_x, r_x_min, r_y, r_y_min, r_z, r_z_min = get_voxel_bounds(voxel_indices, log_file)
#
# for response in range(0, num_filters):
# nonzerocount = 0
# # we need to create the encoded version of the original data for each filter/response matrix
# log_file.write("Overwriting old data with encoded data for response " + str(response)+"\n\n")
# for t in range(0, TIMESTEPS):
# count = 0
# #The list of voxel indices is in the same order as the flattened values, so we can count through "responses" with it
# for voxel in voxel_indices:
# i = voxel[0]
# j = voxel[1]
# k = voxel[2]
# # print("i, j, and k are "+str(i)+", "+str(j)+", "+str(k))
# # get the encoded value corresponding to that voxel at that point in time
# enc_value = responses[0][response][t][count]
# if (enc_value == 0):
# zerocount += 1
# # print("encoded value of zero: count is "+str(zerocount))
#
# # replace that voxel value with the encoded value
# volume[t][i][j][k] = enc_value
#
# count += 1
#
#
# log_file.write("slicing left hemi of response " + str(response) + "...")
# left_hemi = numpy.zeros((TIMESTEPS, l_x, l_y, l_z))
#
# for t in range(0, TIMESTEPS):
# for x in range(0, l_x):
# for y in range(0, l_y):
# for z in range(0, l_z):
# value = volume[t][l_x_min + x][l_y_min + y][l_z_min + z]
# if (value != 0):
# nonzerocount += 1
# left_hemi[t][x][y][z] = value
#
# log_file.write("l_hemi has shape " + str(left_hemi.shape)+"\n\n")
#
#
# left_encoded_responses.append(left_hemi)
#
# print("slicing right hemi of response " + str(response) + "...")
# r_hemi = numpy.zeros((TIMESTEPS, r_x, r_y, r_z))
#
# for t in range(0, TIMESTEPS):
# for x in range(0, r_x):
# for y in range(0, r_y):
# for z in range(0, r_z):
# r_hemi[t][x][y][z] = volume[t][r_x_min + x][r_y_min + y][r_z_min + z]
#
# right_encoded_responses.append(r_hemi)
#
# return [l_x, l_x_min, l_y, l_y_min, l_z, l_z_min, r_x, r_x_min, r_y, r_y_min, r_z, r_z_min]
def get_voxel_bounds(voxels, arg_dict, log_file):
subject = arg_dict["subject"]
roi = arg_dict["ROI"]
min_x = -1
max_x = -1
l_x_min = -1
l_x_max = -1
r_x_min = -1
r_x_max = -1
x_dict = {}
#### figure out the spread of x values
for voxel in voxels:
if min_x == -1:
min_x = voxel[0]
elif voxel[0] < min_x:
min_x = voxel[0]
elif voxel[0] > max_x:
max_x = voxel[0]
l_x_min = min_x
r_x_max = max_x
for m in range(0, max_x+1):
x_dict[m] = 0
#Flag all the x values with active voxels
for voxel in voxels:
x_val = voxel[0]
x_dict[x_val] += 1
for m in range(min_x, max_x+1):
#if that's an active voxel
if x_dict[m]!=0:
#if left hemisphere's max was already found and right hemisphere's min has not been found, then this is r_x_min
if l_x_max !=- 1 and r_x_min == -1:
r_x_min = m
if x_dict[m]==0:
# if we find a zero and haven't found l_x_max, then the previous one must be l_x_max
if l_x_max == -1:
l_x_max = m-1
##############
##############################
################################################
#the above code doesn't work on contiguous regions of the brain because there's no gap
#let's try loading | |
<gh_stars>0
#
# MIT License
#
# (C) Copyright 2020-2022 Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
"""
slurm-related test helper functions
"""
from common.helpers import debug, error, info, raise_test_error, \
raise_test_exception_error, run_cmd_list, \
sleep
from common.utils import run_command_on_xname_via_ssh, scp_to_xname
import datetime
import os.path
import re
import tempfile
import time
# Our nodes in slurm are named nid######
# Examples:
# - nid000001
# Ranges of nodes can be specified with nid#####[#-#], nid####[##-##], nid###[###-###], nid##[####-####], nid#[#####-#####], nid[######-######]
# Examples:
# - nid0000[02-12]
# - nid[000003-000007]
# Inside brackets, commas can also be used to specify multiple ranges or nids.
# Examples:
# - nid0000[02-12,17,20-29]
# Commas can also be used outside brackets, to join any of the above together
# Examples:
# - nid000005,nid[000007-000010],nid0000[12-22,27,30-39]
# So inside of brackets, there can be a list or a single entry
# Each entry is either a single number, or a range of numbers
# Inside the brackets, every number will have the same number of digits
# Examples of entries:
# 1
# 05
# 11-17
# This is a map for the relevant RE for entries with 1 digit numbers,
# 2 digit numbers, etc, up to the maximum of 6 digit numbers
BRACKET_LIST_ENTRY_RE = {
1: "[0-9](?:-[0-9])?",
2: "[0-9]{2}(?:-[0-9]{2})?",
3: "[0-9]{3}(?:-[0-9]{3})?",
4: "[0-9]{4}(?:-[0-9]{4})?",
5: "[0-9]{5}(?:-[0-9]{5})?",
6: "[0-9]{6}(?:-[0-9]{6})?" }
# This is a map for RE for bracket lists of 1 digit numbers,
# 2 digit numbers, etc
BRACKET_LIST_RE = {
n: "%s(?:,%s)*" % (BRACKET_LIST_ENTRY_RE[n], BRACKET_LIST_ENTRY_RE[n])
for n in range(1,7)
}
BRACKET_LIST_RE_PROG = { n: re.compile("^%s$" % BRACKET_LIST_RE[n]) for n in BRACKET_LIST_RE.keys() }
# So at the top level, we can have a single node specification, or a list of them
# Each node specification will follow one of the following patterns:
# nid[6-digit bracket list pattern]
# nid#[5-digit bracket list pattern]
# ...
# nid#####[1-digit bracket list pattern]
# nid######
# This is a map for each node specification list entry RE, mapped from the number
# of digits the numbers in the brackets contain (0 in the case where there are
# no brackets)
#
# We include capturing parenthesis around the numbers preceding the brackets (if any)
# and the contents of the brackets (if any), to help ourselves with later parsing
BRACKET_DIGITS_TO_NODE_SPEC_ENTRY_RE = {
0: "nid([0-9]{6})",
1: "nid([0-9]{5})\[(%s)]" % BRACKET_LIST_RE[1],
2: "nid([0-9]{4})\[(%s)]" % BRACKET_LIST_RE[2],
3: "nid([0-9]{3})\[(%s)]" % BRACKET_LIST_RE[3],
4: "nid([0-9]{2})\[(%s)]" % BRACKET_LIST_RE[4],
5: "nid([0-9])\[(%s)]" % BRACKET_LIST_RE[5],
6: "nid\[(%s)]" % BRACKET_LIST_RE[6] }
BRACKET_DIGITS_TO_NODE_SPEC_ENTRY_RE_PROG = {
n: re.compile("^%s$" % BRACKET_DIGITS_TO_NODE_SPEC_ENTRY_RE[n]) for n in BRACKET_DIGITS_TO_NODE_SPEC_ENTRY_RE.keys() }
# So the combined RE for all possible node specifications
NODE_SPEC_ENTRY_RE = "|".join( [ "(?:%s)" % BRACKET_DIGITS_TO_NODE_SPEC_ENTRY_RE[n] for n in range(0,7) ])
# Finally, this RE is for a node specification list, where each entry must
# match one of our node spec entry patterns
# We capture the first entry of the list to help us with later parsing
NODE_SPEC_LIST_RE = "(%s)(?:,%s)*" % (NODE_SPEC_ENTRY_RE, NODE_SPEC_ENTRY_RE)
NODE_SPEC_LIST_RE_PROG = re.compile("^%s$" % NODE_SPEC_LIST_RE)
def slurm_bracket_list_entry_to_nidlist(bracket_list_entry):
"""
Takes the bracket_list_entry string and returns the list of NID numbers it corresponds to.
The string will either be a nonnegative integer, or a nonnegative integer range.
This function assumes the string has already been validated to match one of our REs.
"""
dash_count = bracket_list_entry.count("-")
if dash_count > 1:
error("PROGRAMMING LOGIC ERROR: Our prior regular expression checking should have prevented us from hitting this")
raise_test_error("Bracket list entry should contain 0-1 dashes, but this contains %d: %s" % (dash_count, bracket_list_entry))
elif dash_count == 1:
start_number_str, end_number_str = bracket_list_entry.split('-')
try:
start_number = int(start_number_str)
except ValueError as e:
error("PROGRAMMING LOGIC ERROR: Our prior regular expression checking should have prevented us from hitting this")
raise_test_exception_error(e, "to parse first integer (%s) in range (%s)" % (start_number_str, bracket_list_entry))
try:
end_number = int(end_number_str)
except ValueError as e:
error("PROGRAMMING LOGIC ERROR: Our prior regular expression checking should have prevented us from hitting this")
raise_test_exception_error(e, "to parse integer (%s) in range (%s)" % (end_number_str, bracket_list_entry))
if start_number > end_number:
# Our RE doesn't check for this
raise_test_error("First number in range must be <= second number. Invalid range: %s" % bracket_list_entry)
return list(range(start_number, end_number+1))
# No dashes means it should just be a single nonnegative integer
try:
return [ int(bracket_list_entry) ]
except ValueError as e:
raise_test_exception_error(e, "to parse bracket list entry as integer (%s)" % bracket_list_entry)
def slurm_bracket_contents_to_nidlist(bracket_contents):
"""
Takes the bracket_contents string and returns the list of NID numbers it corresponds to.
The string will be a comma-separated list (possibly with only 1 entry). We will call a helper
function to handle each entry in the list.
This function assumes the string has already been validated to match one of our REs.
"""
nid_list = list()
for bracket_list_entry in bracket_contents.split(','):
nid_list.extend( slurm_bracket_list_entry_to_nidlist(bracket_list_entry) )
return nid_list
def slurm_list_entry_to_nidlist(node_spec_list_entry):
"""
Takes the node_spec_list_entry string and returns the list of NID numbers it corresponds to.
This function assumes the string has already been validated to match one of our REs.
"""
# Let's examine the list entry, checking for the different possible number of digits the
# numbers inside the brackets have (0-6)
for n in range(0,7):
m = BRACKET_DIGITS_TO_NODE_SPEC_ENTRY_RE_PROG[n].match(node_spec_list_entry)
if not m:
continue
if n == 0:
# The numbers inside the brackets have 0 digits, meaning
# this list entry is just a single nid, without any brackets
# e.g. nid000020
# In this case, the matching group is just the nid number
debug("Node spec list entry \"%s\" appears to be a single node" % node_spec_list_entry)
nid_str = m.group(1)
debug("Node spec list entry yields NID string \"%s\"" % nid_str)
try:
return [ int(nid_str) ]
except ValueError as e:
error("PROGRAMMING LOGIC ERROR: Our prior regular expression checking should have prevented us from hitting this")
raise_test_exception_error(e, "to parse integer (%s) from node spec list entry (%s)" % (nid_str, node_spec_list_entry))
elif n == 6:
# This is an entry where the numbers inside the brackets have 6 digits, so there are no digits before the brackets.
# e.g. nid[000300-000310,000555]
# So we just parse the contents of the brackets, and that's our answer.
# In this case, the matching group is the bracket contents.
debug("Node spec list entry \"%s\" appears to have no digits before the brackets" % node_spec_list_entry)
bracket_contents = m.group(1)
debug("Node spec list entry yields bracket contents \"%s\"" % bracket_contents)
return slurm_bracket_contents_to_nidlist(bracket_contents)
# Finally, there are the cases where there are n digit numbers inside the brackets and 6-n digits outside the brackets,
# for 1 <= n <= 5
# In this case, the first matching group is the digits before the brackets, and the second matching group
# is the bracket contents
debug("Node spec list entry \"%s\" appears to have %d digits before the brackets" % (6-n, node_spec_list_entry))
prefix_digits_str = m.group(1)
bracket_contents = m.group(2)
debug("Node spec list entry yields prefix digits \"%s\" and bracket contents \"%s\"" % (
prefix_digits_str, bracket_contents))
try:
prefix_digits = int(prefix_digits_str)
except ValueError as e:
error("PROGRAMMING LOGIC ERROR: Our prior regular expression checking should have prevented us from hitting this")
raise_test_exception_error(e, "to parse prefix digits (%s) from node spec list entry (%s)" % (prefix_digits, node_spec_list_entry))
# If the entry is nid0001[09-11], then this specified nodes 109-111
# So to arrive at the node numbers, every number that we get from the | |
#!/usr/bin/env python
"""
Validator script for BOT-level Fe55 analysis.
"""
from __future__ import print_function
import os
import glob
from collections import OrderedDict
import pickle
import numpy as np
from astropy.io import fits
import lcatr.schema
import siteUtils
import eotestUtils
import lsst.eotest.sensor as sensorTest
from camera_components import camera_info
from tearing_detection import persist_tearing_png_files
from bot_eo_analyses import make_file_prefix
def report_missing_data(validator, missing_data, components='detectors',
total=189):
"""Summarize the missing data for the specified components."""
if len(missing_data) == total:
print("{}: missing data for all {} {}".format(validator, total,
components))
else:
print("{}: missing data for {} {}".format(validator, len(missing_data),
components))
print(missing_data)
def validate_fe55(results, det_names):
"""Validate and persist fe55 gain and psf results."""
run = siteUtils.getRunNumber()
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
# The output files from producer script.
gain_file = '%(file_prefix)s_eotest_results.fits' % locals()
psf_results_files \
= glob.glob('%(file_prefix)s_psf_results*.fits' % locals())
if not os.path.isfile(gain_file) or not psf_results_files:
# Results for this detector are not available so note
# that and continue with the others.
missing_det_names.append(det_name)
continue
psf_results = psf_results_files[0]
rolloff_mask = '%(file_prefix)s_edge_rolloff_mask.fits' % locals()
output_files = psf_results, rolloff_mask
# Add/update the metadata to the primary HDU of these files.
for fitsfile in output_files:
eotestUtils.addHeaderData(fitsfile, TESTTYPE='FE55',
DATE=eotestUtils.utc_now_isoformat())
results.extend([lcatr.schema.fileref.make(x) for x in output_files])
# Persist the median bias FITS file.
bias_frame \
= glob.glob('%(file_prefix)s_median_bias.fits' % locals())[0]
results.append(siteUtils.make_fileref(bias_frame))
# Persist the png files.
png_file_list = '{}_fe55_task_png_files.txt'.format(det_name)
with open(png_file_list, 'r') as input_:
png_files = [x.strip() for x in input_]
metadata = dict(TESTTYPE='FE55', TEST_CATEGORY='EO',
DETECTOR=det_name, RUN=run)
results.extend(siteUtils.persist_png_files('', file_prefix,
png_files=png_files,
metadata=metadata))
data = sensorTest.EOTestResults(gain_file)
amps = data['AMP']
gain_data = data['GAIN']
gain_errors = data['GAIN_ERROR']
sigmas = data['PSF_SIGMA']
for amp, gain_value, gain_error, sigma in zip(amps, gain_data,
gain_errors, sigmas):
if not np.isfinite(gain_error):
gain_error = -1
results.append(lcatr.schema.valid(
lcatr.schema.get('fe55_BOT_analysis'), amp=amp, gain=gain_value,
gain_error=gain_error, psf_sigma=sigma, slot=slot, raft=raft))
report_missing_data('validate_fe55', missing_det_names)
return results
def validate_read_noise(results, det_names):
"""Validate and persist read noise results."""
run = siteUtils.getRunNumber()
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
read_noise_file = '%s_eotest_results.fits' % file_prefix
if not os.path.isfile(read_noise_file):
# No data for this detector, so note that and continue
# with the others.
missing_det_names.append(det_name)
continue
data = sensorTest.EOTestResults(read_noise_file)
amps = data['AMP']
read_noise_data = data['READ_NOISE']
system_noise_data = data['SYSTEM_NOISE']
total_noise_data = data['TOTAL_NOISE']
for amp, read_noise, system_noise, total_noise \
in zip(amps, read_noise_data, system_noise_data, total_noise_data):
results.append(lcatr.schema.valid(
lcatr.schema.get('read_noise_BOT'),
amp=amp, read_noise=read_noise, system_noise=system_noise,
total_noise=total_noise, slot=slot, raft=raft))
files = glob.glob('%s_read_noise?*.fits' % file_prefix)
for fitsfile in files:
eotestUtils.addHeaderData(fitsfile, TESTTYPE='FE55',
DATE=eotestUtils.utc_now_isoformat())
data_products = [siteUtils.make_fileref(item) for item in files]
results.extend(data_products)
# Persist the png files.
metadata = dict(DETECTOR=det_name, TESTTYPE='FE55', TEST_CATEGORY='EO',
RUN=run)
filename = '%s_correlated_noise.png' % file_prefix
results.extend(siteUtils.persist_png_files(filename, file_prefix,
metadata=metadata))
# Persist the raft-level overscan correlation plots.
for raft in camera_info.get_raft_names():
metadata = dict(TESTTYPE='FE55', TEST_CATEGORY='EO', RAFT=raft, RUN=run)
file_prefix = make_file_prefix(run, raft)
filename = '%s_overscan_correlations.png' % file_prefix
results.extend(siteUtils.persist_png_files(filename, file_prefix,
metadata=metadata))
report_missing_data("validate_read_noise", missing_det_names)
return results
def validate_bright_defects(results, det_names):
"""Validate and persist bright defects results."""
run = siteUtils.getRunNumber()
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
mask_file = '%s_bright_pixel_mask.fits' % file_prefix
if not os.path.isfile(mask_file):
missing_det_names.append(det_name)
continue
eotestUtils.addHeaderData(mask_file, TESTTYPE='DARK',
DATE=eotestUtils.utc_now_isoformat())
results.append(siteUtils.make_fileref(mask_file))
medianed_dark = '%s_median_dark_bp.fits' % file_prefix
eotestUtils.addHeaderData(medianed_dark,
DATE=eotestUtils.utc_now_isoformat())
results.append(siteUtils.make_fileref(medianed_dark))
eotest_results = '%s_eotest_results.fits' % file_prefix
data = sensorTest.EOTestResults(eotest_results)
amps = data['AMP']
npixels = data['NUM_BRIGHT_PIXELS']
ncolumns = data['NUM_BRIGHT_COLUMNS']
for amp, npix, ncol in zip(amps, npixels, ncolumns):
results.append(lcatr.schema.valid(
lcatr.schema.get('bright_defects_BOT'),
amp=amp, bright_pixels=npix, bright_columns=ncol,
slot=slot, raft=raft))
# Persist the png file.
metadata = dict(TESTTYPE='DARK', TEST_CATEGORY='EO', DETECTOR=det_name,
RUN=run)
filename = '%s_medianed_dark.png' % file_prefix
results.extend(siteUtils.persist_png_files(filename, file_prefix,
metadata=metadata))
report_missing_data("validate_bright_defects", missing_det_names)
return results
def validate_dark_defects(results, det_names):
"""Validate and persist dark defects results."""
run = siteUtils.getRunNumber()
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
mask_file = '%s_dark_pixel_mask.fits' % file_prefix
if not os.path.isfile(mask_file):
missing_det_names.append(det_name)
continue
eotestUtils.addHeaderData(mask_file, TESTTYPE='SFLAT_500',
DATE=eotestUtils.utc_now_isoformat())
results.append(siteUtils.make_fileref(mask_file))
superflat = '%s_median_sflat.fits' % file_prefix
eotestUtils.addHeaderData(superflat,
DATE=eotestUtils.utc_now_isoformat())
results.append(siteUtils.make_fileref(superflat))
eotest_results = '%s_eotest_results.fits' % file_prefix
data = sensorTest.EOTestResults(eotest_results)
amps = data['AMP']
npixels = data['NUM_DARK_PIXELS']
ncolumns = data['NUM_DARK_COLUMNS']
for amp, npix, ncol in zip(amps, npixels, ncolumns):
results.append(lcatr.schema.valid(
lcatr.schema.get('dark_defects_BOT'),
amp=amp, dark_pixels=npix, dark_columns=ncol,
slot=slot, raft=raft))
# Persist the png files.
metadata = dict(DETECTOR=det_name, RUN=run,
TESTTYPE='SFLAT_500', TEST_CATEGORY='EO')
filename = '%s_superflat_dark_defects.png' % file_prefix
results.extend(siteUtils.persist_png_files(filename, file_prefix,
metadata=metadata))
report_missing_data("validate_dark_defects", missing_det_names)
return results
def validate_traps(results, det_names):
"""Validate and persist trap results."""
run = siteUtils.getRunNumber()
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
trap_file = '%s_traps.fits' % file_prefix
if not os.path.isfile(trap_file):
missing_det_names.append(det_name)
continue
eotestUtils.addHeaderData(trap_file, TESTTYPE='TRAP',
DATE=eotestUtils.utc_now_isoformat())
results.append(siteUtils.make_fileref(trap_file))
mask_file = '%s_traps_mask.fits' % file_prefix
results.append(siteUtils.make_fileref(mask_file))
results_file = '%s_eotest_results.fits' % file_prefix
data = sensorTest.EOTestResults(results_file)
amps = data['AMP']
num_traps = data['NUM_TRAPS']
for amp, ntrap in zip(amps, num_traps):
results.append(lcatr.schema.valid(
lcatr.schema.get('traps_BOT'), amp=amp, num_traps=ntrap,
slot=slot, raft=raft))
report_missing_data("validate_traps", missing_det_names)
return results
def validate_dark_current(results, det_names):
"""Validate and persist dark current results."""
run = siteUtils.getRunNumber()
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
results_file = '%s_eotest_results.fits' % file_prefix
if not os.path.isfile(results_file):
missing_det_names.append(det_name)
continue
data = sensorTest.EOTestResults(results_file)
amps = data['AMP']
dc95s = data['DARK_CURRENT_95']
for amp, dc95 in zip(amps, dc95s):
results.append(lcatr.schema.valid(
lcatr.schema.get('dark_current_BOT'), amp=amp,
dark_current_95CL=dc95, slot=slot, raft=raft))
# Persist the png files.
metadata = dict(TESTTYPE='DARK', TEST_CATEGORY='EO',
DETECTOR=det_name, RUN=run)
pattern = '{}_noise.png'.format(file_prefix)
results.extend(siteUtils.persist_png_files(pattern, file_prefix,
metadata=metadata))
pattern = '{}_total_noise_hists.png'.format(file_prefix)
results.extend(siteUtils.persist_png_files(pattern, file_prefix,
metadata=metadata))
report_missing_data("validate_dark_current", missing_det_names)
return results
def validate_cte(results, det_names):
"""Validate the CTE task results."""
run = siteUtils.getRunNumber()
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
superflats \
= sorted(glob.glob('{}_superflat_*.fits'.format(file_prefix)))
if not superflats:
missing_det_names.append(det_name)
continue
for item in superflats:
eotestUtils.addHeaderData(item, FILENAME=item,
DATE=eotestUtils.utc_now_isoformat())
results.extend([siteUtils.make_fileref(x) for x in superflats])
results_file = '%s_eotest_results.fits' % file_prefix
data = sensorTest.EOTestResults(results_file)
amps = data['AMP']
cti_high_serial = data['CTI_HIGH_SERIAL']
cti_high_serial_error = data['CTI_HIGH_SERIAL_ERROR']
cti_high_parallel = data['CTI_HIGH_PARALLEL']
cti_high_parallel_error = data['CTI_HIGH_PARALLEL_ERROR']
cti_low_serial = data['CTI_LOW_SERIAL']
cti_low_serial_error = data['CTI_LOW_SERIAL_ERROR']
cti_low_parallel = data['CTI_LOW_PARALLEL']
cti_low_parallel_error = data['CTI_LOW_PARALLEL_ERROR']
for values in zip(amps,
cti_high_serial, cti_high_serial_error,
cti_high_parallel, cti_high_parallel_error,
cti_low_serial, cti_low_serial_error,
cti_low_parallel, cti_low_parallel_error):
results.append(lcatr.schema.valid(lcatr.schema.get('cte_BOT'),
amp=values[0],
cti_high_serial=values[1],
cti_high_serial_error=values[2],
cti_high_parallel=values[3],
cti_high_parallel_error=values[4],
cti_low_serial=values[5],
cti_low_serial_error=values[6],
cti_low_parallel=values[7],
cti_low_parallel_error=values[8],
slot=slot, raft=raft))
# Persist the png files.
png_file_list = '{}_cte_task_png_files.txt'.format(det_name)
with open(png_file_list, 'r') as input_:
png_files = [x.strip() for x in input_]
metadata = dict(DETECTOR=det_name, RUN=run,
TESTTYPE='SFLAT_500', TEST_CATEGORY='EO')
results.extend(siteUtils.persist_png_files('', file_prefix,
png_files=png_files,
metadata=metadata))
report_missing_data("validate_cte", missing_det_names)
return results
def validate_flat_pairs(results, det_names):
"""Validate the flat pair analysis results."""
run = siteUtils.getRunNumber()
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
det_resp_data = '%s_det_response.fits' % file_prefix
if not os.path.isfile(det_resp_data):
missing_det_names.append(det_name)
continue
eotestUtils.addHeaderData(det_resp_data, DETECTOR=det_name,
TESTTYPE='FLAT',
DATE=eotestUtils.utc_now_isoformat())
results.append(siteUtils.make_fileref(det_resp_data))
results_file = '%s_eotest_results.fits' % file_prefix
data = sensorTest.EOTestResults(results_file)
amps = data['AMP']
full_well_data = data['FULL_WELL']
max_frac_dev_data = data['MAX_FRAC_DEV']
for amp, full_well, max_frac_dev in zip(amps, full_well_data,
max_frac_dev_data):
results.append(lcatr.schema.valid(
lcatr.schema.get('flat_pairs_BOT'),
amp=amp, full_well=full_well, max_frac_dev=max_frac_dev,
slot=slot, raft=raft))
# Persist the png files.
metadata = dict(DETECTOR=det_name, RUN=run,
TESTTYPE='FLAT', TEST_CATEGORY='EO')
results.extend(siteUtils.persist_png_files(('%s_linearity*.png'
% file_prefix),
file_prefix,
metadata=metadata))
report_missing_data("validate_flat_pairs", missing_det_names)
return results
def validate_ptc(results, det_names):
"""Validate the PTC results."""
run = siteUtils.getRunNumber()
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
ptc_results = '%s_ptc.fits' % file_prefix
if not os.path.isfile(ptc_results):
missing_det_names.append(det_name)
continue
eotestUtils.addHeaderData(ptc_results, TESTTYPE='FLAT',
DATE=eotestUtils.utc_now_isoformat())
results.append(siteUtils.make_fileref(ptc_results))
results_file = '%s_eotest_results.fits' % file_prefix
data = sensorTest.EOTestResults(results_file)
columns = (data['AMP'], data['PTC_GAIN'], data['PTC_GAIN_ERROR'],
data['PTC_A00'], data['PTC_A00_ERROR'], data['PTC_NOISE'],
data['PTC_NOISE_ERROR'], data['PTC_TURNOFF'])
for amp, gain, gain_error, a00, a00_error,\
noise, noise_error, turnoff in zip(*columns):
results.append(lcatr.schema.valid(lcatr.schema.get('ptc_BOT'),
amp=amp, ptc_gain=gain,
ptc_gain_error=gain_error,
ptc_a00=a00,
ptc_a00_error=a00_error,
ptc_noise=noise,
ptc_noise_error=noise_error,
ptc_turnoff=turnoff,
slot=slot, raft=raft))
# Persist the png files.
metadata = dict(DETECTOR=det_name, RUN=run,
TESTTYPE='FLAT', TEST_CATEGORY='EO')
results.extend(siteUtils.persist_png_files('%s*ptcs.png' % file_prefix,
file_prefix,
metadata=metadata))
report_missing_data("validate_ptc", missing_det_names)
return results
def validate_qe(results, det_names):
"""Validate the QE results."""
run = siteUtils.getRunNumber()
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
qe_results_file = '%s_QE.fits' % file_prefix
if not os.path.isfile(qe_results_file):
missing_det_names.append(det_name)
continue
with fits.open(qe_results_file) as qe_results:
qe_data = qe_results['QE_BANDS'].data
QE = OrderedDict((band, []) for band in qe_data.field('BAND'))
for amp in range(1, 17):
values = qe_data.field('AMP%02i' % amp)
for band, value in zip(QE, values):
QE[band].append(value)
for band in QE:
for amp in range(1, 17):
results.append(lcatr.schema.valid(
lcatr.schema.get('qe_BOT_analysis'),
band=band, QE=QE[band][amp-1],
amp=amp, slot=slot, raft=raft))
qe_files = glob.glob('%s_*QE*.fits' % file_prefix)
for item in qe_files:
eotestUtils.addHeaderData(item, TESTTYPE='LAMBDA',
DATE=eotestUtils.utc_now_isoformat())
results.extend([siteUtils.make_fileref(item) for item in qe_files])
# Persist the png files.
metadata = dict(DETECTOR=det_name, RUN=run,
TESTTYPE='LAMBDA', TEST_CATEGORY='EO')
results.extend(siteUtils.persist_png_files('%s*qe.png' % file_prefix,
file_prefix,
metadata=metadata))
results.extend(siteUtils.persist_png_files('%s*flat.png' % file_prefix,
file_prefix,
metadata=metadata))
report_missing_data("validate_qe", missing_det_names)
return results
def validate_tearing(results, det_names):
"""Validate the tearing analysis results."""
run = siteUtils.getRunNumber()
schema = lcatr.schema.get('tearing_detection_BOT')
missing_det_names = []
for det_name in det_names:
raft, slot = det_name.split('_')
file_prefix = make_file_prefix(run, det_name)
tearing_results_file = '%s_tearing_stats.pkl' % file_prefix
if not os.path.isfile(tearing_results_file):
missing_det_names.append(det_name)
continue
with | |
<reponame>ybarancan/STSU
import os
from PIL import Image
import torch
from torch.utils.data import Dataset
from torchvision.transforms.functional import to_tensor
from src.utils import bezier
from .utils import IMAGE_WIDTH, IMAGE_HEIGHT, ARGOVERSE_CLASS_NAMES
from ..utils import decode_binary_labels
import numpy as np
import logging
import sys
import cv2
from scipy.ndimage import gaussian_filter
class ArgoverseMapDataset(Dataset):
def __init__(self, config, loader, am,
log_names=None, train=True, pinet=False, work_objects=True):
self.image_size = config.image_size
self.config = config
self.examples = []
self.pinet = pinet
self.am = am
self.calibs = dict()
# Preload training examples from Argoverse train and test sets
self.loader = loader
self.n_control = 3
self.camera = "ring_front_center"
self.obj_dict = np.load(config.argo_obj_dict_path,allow_pickle=True)
self.resolution = config.map_resolution
self.preload(loader, log_names)
self.work_objects = work_objects
logging.error('ARGO LOADED')
def preload(self, loader, log_names=None):
for my_scene_id in range(len(log_names)):
log = loader.get(log_names[my_scene_id])
#
n_frames_in_scene = log.num_lidar_frame
for k in range(n_frames_in_scene):
timestamp = str(np.copy(loader._image_timestamp_list_sync[log_names[my_scene_id]][self.camera][k]))
#
#
self.examples.append((timestamp,log_names[my_scene_id], k))
def __len__(self):
return len(self.examples)
def __getitem__(self, idx):
# Get the split, log and camera ids corresponding to the given timestamp
try:
timestamp, logid, ind = self.examples[idx]
image = self.load_image(logid, timestamp)
calib = self.load_calib(logid)
obj_to_return, center_width_orient,con_matrix,endpoints, orig_img_centers, origs, mask, bev_mask,\
to_return_centers, labels,roads,coeffs,\
outgoings, incomings, problem, obj_exists = self.load_line_labels(timestamp, logid, ind)
if problem:
logging.error('THERE WAS PROBLEM')
return (None, dict(), True)
if self.work_objects:
#1.5 is camera height
if len(center_width_orient) > 0:
my_calib = calib.cpu().numpy()
obj_center = center_width_orient[:,:2]
obj_x = obj_center[:,0]*(self.config.map_extents[2]-self.config.map_extents[0]) + self.config.map_extents[0]
obj_y = obj_center[:,1]*(self.config.map_extents[3]-self.config.map_extents[1]) + self.config.map_extents[1]
img_x = (obj_x*my_calib[0,0] + obj_y*my_calib[0,-1])/(obj_y + 0.0001)
img_y = (1.5*my_calib[1,1] + obj_y*my_calib[1,-1])/(obj_y + 0.0001)
img_x = img_x / self.image_size[0]
img_y = img_y / self.image_size[1]
to_keep = np.logical_not((img_x > 1) | (img_x < 0) | (img_y > 1) | (img_y < 0))
img_centers = np.stack([img_x,img_y],axis=-1)
if np.sum(to_keep) == 0:
img_centers = []
center_width_orient = []
obj_to_return = []
obj_exists = False
else:
img_centers = img_centers[to_keep]
center_width_orient = center_width_orient[to_keep]
obj_to_return = obj_to_return[to_keep]
else:
img_centers = []
init_points = np.reshape(endpoints,(-1,2,2))[:,0]
sorted_init_points, sort_index = self.get_sorted_init_points(init_points)
temp_ar = np.zeros((len(sorted_init_points),2*self.config.polyrnn_feat_side,2*self.config.polyrnn_feat_side))
for k in range(len(sorted_init_points)):
temp_ar[k,int(np.clip(sorted_init_points[k,1]*2*self.config.polyrnn_feat_side,0,2*self.config.polyrnn_feat_side-1)),int(np.clip(sorted_init_points[k,0]*2*self.config.polyrnn_feat_side,0,2*self.config.polyrnn_feat_side-1))]=1
temp_ar[k] = gaussian_filter(temp_ar[k], sigma=0.1)
temp_ar[k] = temp_ar[k]/np.max(temp_ar[k])
# sorted_points = np.copy(np.ascontiguousarray(coeffs[sort_index,:]))
sorted_points = np.copy(coeffs)
grid_sorted_points = np.reshape(sorted_points,(-1,self.n_control ,2))
grid_sorted_points[...,0]= np.int32(grid_sorted_points[...,0]*(self.config.polyrnn_feat_side - 1))
grid_sorted_points[...,1]= np.int32(grid_sorted_points[...,1]*(self.config.polyrnn_feat_side - 1))
my_grid_points = np.copy(np.ascontiguousarray(grid_sorted_points))
target = dict()
target['mask'] = torch.tensor(mask).float()
target['bev_mask'] = bev_mask
target['static_mask'] = torch.zeros(self.config.num_bev_classes,mask.shape[1],mask.shape[2])
target['calib'] = calib.float()
target['center_img'] = to_return_centers
target['orig_center_img'] = orig_img_centers
target['labels'] = labels.long()
target['roads'] = torch.tensor(np.int64(roads)).long()
target['control_points'] = torch.tensor(coeffs)
target['con_matrix'] = torch.tensor(con_matrix)
target['obj_exists'] = obj_exists
if self.work_objects:
target['obj_corners'] = torch.tensor(obj_to_return).float()
target['obj_converted'] = torch.tensor(center_width_orient).float()
target['obj_exists'] = torch.tensor(obj_exists)
target['init_point_matrix'] = torch.tensor(np.copy(np.ascontiguousarray(temp_ar))).float()
target['sorted_control_points'] = torch.tensor(sorted_points).float()
target['grid_sorted_control_points'] = torch.tensor(my_grid_points).float()
target['sort_index'] = torch.tensor(np.copy(np.ascontiguousarray(sort_index)))
target['endpoints'] = torch.tensor(endpoints).float()
target['origs'] = torch.tensor(origs)
target['scene_token'] = logid
target['sample_token'] = timestamp
target['data_token'] = logid
target['scene_name'] = logid
target['outgoings'] = outgoings
target['incomings'] = incomings
target['left_traffic'] = torch.tensor(False)
return (image, target, False)
except Exception as e:
logging.error('ARGO DATALOADER ' + str(e))
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logging.error(str((exc_type, fname, exc_tb.tb_lineno)))
return (None, dict(), True)
def load_image(self, log_id, timestamp):
# Load image
# loader = self.loader
# image_file = loader._timestamp_image_dict[log_id][self.camera][timestamp]
image_file = os.path.join(self.config.argo_log_root,log_id,'ring_front_center','ring_front_center_'+str(timestamp)+'.jpg')
image = Image.open(image_file)
image = np.array(image,np.float32)
if self.pinet:
image = cv2.resize(image, (512,256), cv2.INTER_LINEAR)[...,[2,1,0]]
else:
image = cv2.resize(image, (self.config.patch_size[0], self.config.patch_size[1]), cv2.INTER_LINEAR)
image = np.float32(image)
image = self.minmax_normalize(image, norm_range=(-1, 1))
return to_tensor(image).float()
def minmax_normalize(self,img, norm_range=(0, 1), orig_range=(0, 255)):
# range(0, 1)
norm_img = (img - orig_range[0]) / (orig_range[1] - orig_range[0])
# range(min_value, max_value)
norm_img = norm_img * (norm_range[1] - norm_range[0]) + norm_range[0]
return norm_img
def load_calib(self, log):
# Get the loader for the current split
loader = self.loader
# Get intrinsics matrix and rescale to account for downsampling
calib = np.copy(loader.get_calibration(self.camera, log).K[:,:3])
calib[0] *= self.image_size[0] / IMAGE_WIDTH
calib[1] *= self.image_size[1] / IMAGE_HEIGHT
# Convert to a torch tensor
return torch.from_numpy(calib)
def load_labels(self, split, log, camera, timestamp):
# Construct label path from example data
label_path = os.path.join(self.label_root, split, log, camera,
timestamp, f'{camera}_{timestamp}.png')
# Load encoded label image as a torch tensor
encoded_labels = to_tensor(Image.open(label_path)).long()
# Decode to binary labels
num_class = len(ARGOVERSE_CLASS_NAMES)
labels = decode_binary_labels(encoded_labels, num_class+ 1)
labels, mask = labels[:-1], ~labels[-1]
return labels, mask
def get_object_params(self, log_id, timestamp, vis_mask):
resolution=self.resolution
token = log_id +'_' + str(timestamp)
objs = self.obj_dict.item().get(token)
to_return=[]
center_width_orient=[]
obj_exists = False
for obj in objs:
if obj[-1] > 7:
continue
reshaped = np.reshape(np.copy(obj)[:8],(4,2))
reshaped[:,0] = (reshaped[:,0] - self.config.map_extents[0])/(self.config.map_extents[2]-self.config.map_extents[0])
reshaped[:,1] = (reshaped[:,1] - self.config.map_extents[1])/(self.config.map_extents[3]-self.config.map_extents[1])
reshaped[:,1] = 1 - reshaped[:,1]
coords = (np.clip(np.int64(reshaped[:,1]*(self.config.map_extents[3]-self.config.map_extents[1])/resolution),0,195),
np.clip(np.int64(reshaped[:,0]*(self.config.map_extents[2]-self.config.map_extents[0])/resolution),0,199))
inside = False
for k in range(4):
inside = inside | ((vis_mask[coords[0][k], coords[1][k]] > 0.5) &
((reshaped[k,1] >= 0) & (reshaped[k,1] <= 1)) &
((reshaped[k,0] >= 0) & (reshaped[k,0] <= 1)))
if inside:
# logging.error('INSIDE')
res_ar = np.zeros(5)
temp=np.squeeze(np.zeros((9,1),np.float32))
temp[:8] = reshaped.flatten()
temp[-1] = obj[-1]
to_return.append(np.copy(temp))
reshaped[:,1] = 1 - reshaped[:,1]
all_edges = np.zeros((4,2))
for k in range(4):
first_corner = reshaped[k%4]
second_corner = reshaped[(k+1)%4]
all_edges[k,:]=np.copy(second_corner - first_corner)
all_lengths = np.sqrt(np.square(all_edges[:,0]) + np.square(all_edges[:,1]))
long_side = np.argmax(all_lengths)
# egim = np.sign(all_edges[long_side][1]/(all_edges[long_side][0] + 0.00001))*\
# np.abs(all_edges[long_side][1])/(all_lengths[long_side] + 0.00001)
my_abs_cos = np.abs(all_edges[long_side][0])/(all_lengths[long_side] + 0.00001)
my_sign = np.sign(all_edges[long_side][1]/(all_edges[long_side][0] + 0.00001))
angle = np.arccos(my_abs_cos*my_sign)
center = np.mean(reshaped,axis=0)
long_len = np.max(all_lengths)
short_len = np.min(all_lengths)
res_ar[:2] = center
# res_ar[4] = my_abs_cos
# res_ar[5] = my_sign
res_ar[4] = angle
res_ar[2] = long_len
res_ar[3] = short_len
center_width_orient.append(np.copy(res_ar))
obj_exists = True
return np.array(to_return), np.array(center_width_orient), obj_exists
def load_seg_labels(self, log, timestamp):
camera = self.camera
label_path = os.path.join(self.config.argo_seg_label_root, log, camera,
f'{camera}_{timestamp}.png')
encoded_labels = np.array(Image.open(label_path))
bev_label = torch.tensor(np.flipud(encoded_labels).copy()).long()
bev_label = decode_binary_labels(bev_label, len(ARGOVERSE_CLASS_NAMES)+1)
return bev_label
def line_endpoints(self, coeffs, inc, out, roads):
try:
roads = list(roads)
new_coeffs = np.copy(np.array(coeffs))
for k in range(len(coeffs)):
if len(inc[k]) > 0:
other = roads.index(inc[k][0])
other_coef = coeffs[other]
dist1 = np.sum(np.abs(new_coeffs[k,0] - other_coef[0]))
dist2 = np.sum(np.abs(new_coeffs[k,-1] - other_coef[0]))
dist3 = np.sum(np.abs(new_coeffs[k,0] - other_coef[-1]))
dist4 = np.sum(np.abs(new_coeffs[k,-1] - other_coef[-1]))
min_one = np.squeeze(np.argmin(np.stack([dist1,dist2,dist3,dist4])))
if min_one == 0:
temp = np.copy(new_coeffs[other,0])
new_coeffs[other,0] = new_coeffs[other,-1]
new_coeffs[other,-1] = temp
elif min_one == 1:
temp = np.copy(new_coeffs[other,0])
new_coeffs[other,0] = new_coeffs[other,-1]
new_coeffs[other,-1] = temp
temp = np.copy(new_coeffs[k,0])
new_coeffs[k,0] = new_coeffs[k,-1]
new_coeffs[k,-1] = temp
elif min_one == 3:
temp = np.copy(new_coeffs[k,0])
new_coeffs[k,0] = new_coeffs[k,-1]
new_coeffs[k,-1] = temp
if len(out[k]) > 0:
other = roads.index(out[k][0])
other_coef = coeffs[other]
dist1 = np.sum(np.abs(new_coeffs[k,0] - other_coef[0]))
dist2 = np.sum(np.abs(new_coeffs[k,-1] - other_coef[0]))
dist3 = np.sum(np.abs(new_coeffs[k,0] - other_coef[-1]))
dist4 = np.sum(np.abs(new_coeffs[k,-1] - other_coef[-1]))
min_one = np.squeeze(np.argmin(np.stack([dist1,dist2,dist3,dist4])))
if min_one == 0:
temp = np.copy(new_coeffs[k,0])
new_coeffs[k,0] = new_coeffs[k,-1]
new_coeffs[k,-1] = temp
elif min_one == 2:
temp = np.copy(new_coeffs[other,0])
new_coeffs[other,0] = new_coeffs[other,-1]
new_coeffs[other,-1] = temp
temp = np.copy(new_coeffs[k,0])
new_coeffs[k,0] = new_coeffs[k,-1]
new_coeffs[k,-1] = temp
elif min_one == 3:
temp = np.copy(new_coeffs[other,0])
new_coeffs[other,0] = new_coeffs[other,-1]
new_coeffs[other,-1] = temp
return new_coeffs
except Exception as e:
logging.error('ENDPOINTS ' + str(e))
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logging.error(str((exc_type, fname, exc_tb.tb_lineno)))
return coeffs
# inc, out = self.get_line_orientation(k, roads, all_selected,selected_pred[k],selected_suc[k],selected_id)
#
def get_line_orientation(self, road, all_roads, all_selected,selected_pred,selected_suc,selected_id):
try:
# my_gt_id = selected_id[road]
# road_id=all_roads[road]
outgoing_id = []
for tok in selected_suc:
# logging.error('OUTGOING ' + tok)
if tok in selected_id:
outgoing_id.append(all_selected[selected_id.index(tok)])
incoming_id = []
for tok in selected_pred:
# logging.error('INCOMING ' + tok)
if tok in selected_id:
incoming_id.append(all_selected[selected_id.index(tok)])
return incoming_id, outgoing_id
except Exception as e:
logging.error('ORIENT ' + str(e))
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
logging.error(str((exc_type, fname, exc_tb.tb_lineno)))
return [],[]
def get_connectivity(self,roads,outgoings, incomings):
try:
con_matrix = np.zeros((len(roads),len(roads)))
# logging.error('CON ROAD ' + str(roads))
for k in range(len(roads)):
con_matrix[k,k] = 0
outs = outgoings[k]
# logging.error('CON OUTS ' + str(outs))
for ou in outs:
sel = ou
if sel in roads:
ind = roads.index(sel)
# logging.error('INCOM ' + str(incomings[ind]))
# if not (ou | |
raise TypeError(msg)
# 2.1.5
class WrappingMethod(Enumeration):
def __init__(self, value=None):
super(WrappingMethod, self).__init__(
enums.WrappingMethod, value, Tags.WRAPPING_METHOD)
class EncodingOption(Enumeration):
def __init__(self, value=None):
super(EncodingOption, self).__init__(
enums.EncodingOption, value, Tags.ENCODING_OPTION)
class KeyInformation(Struct):
def __init__(self,
unique_identifier=None,
cryptographic_parameters=None,
tag=Tags.ENCRYPTION_KEY_INFORMATION):
super(KeyInformation, self).__init__(tag=tag)
self.unique_identifier = unique_identifier
self.cryptographic_parameters = cryptographic_parameters
self.validate()
def read(self, istream):
super(KeyInformation, self).read(istream)
tstream = BytearrayStream(istream.read(self.length))
self.unique_identifier = attributes.UniqueIdentifier()
self.unique_identifier.read(tstream)
if self.is_tag_next(Tags.CRYPTOGRAPHIC_PARAMETERS, tstream):
self.cryptographic_parameters = CryptographicParameters()
self.cryptographic_parameters.read(tstream)
self.is_oversized(tstream)
self.validate()
def write(self, ostream):
tstream = BytearrayStream()
self.unique_identifier.write(tstream)
if self.cryptographic_parameters is not None:
self.cryptographic_parameters.write(tstream)
# Write the length and value of the template attribute
self.length = tstream.length()
super(KeyInformation, self).write(ostream)
ostream.write(tstream.buffer)
def validate(self):
self.__validate()
def __validate(self):
# TODO (peter-hamilton) Finish implementation.
pass
class EncryptionKeyInformation(KeyInformation):
def __init__(self,
unique_identifier=None,
cryptographic_parameters=None,
tag=Tags.ENCRYPTION_KEY_INFORMATION):
super(EncryptionKeyInformation, self).__init__(
unique_identifier, cryptographic_parameters, tag)
def validate(self):
self.__validate()
def __validate(self):
# TODO (peter-hamilton) Finish implementation.
pass
class MACSignatureKeyInformation(KeyInformation):
def __init__(self,
unique_identifier=None,
cryptographic_parameters=None,
tag=Tags.MAC_SIGNATURE_KEY_INFORMATION):
super(MACSignatureKeyInformation, self).__init__(
unique_identifier, cryptographic_parameters, tag)
def validate(self):
self.__validate()
def __validate(self):
# TODO (peter-hamilton) Finish implementation.
pass
class KeyWrappingData(Struct):
class MACSignature(ByteString):
def __init__(self, value=None):
super(KeyWrappingData.MACSignature, self).__init__(
value, Tags.MAC_SIGNATURE)
class IVCounterNonce(ByteString):
def __init__(self, value=None):
super(KeyWrappingData.IVCounterNonce, self).__init__(
value, Tags.IV_COUNTER_NONCE)
def __init__(self,
wrapping_method=None,
encryption_key_information=None,
mac_signature_key_information=None,
mac_signature=None,
iv_counter_nonce=None,
encoding_option=None):
super(KeyWrappingData, self).__init__(Tags.KEY_WRAPPING_DATA)
self.wrapping_method = wrapping_method
self.encryption_key_information = encryption_key_information
self.mac_signature_key_information = mac_signature_key_information
self.mac_signature = mac_signature
self.iv_counter_nonce = iv_counter_nonce
self.encoding_option = encoding_option
self.validate()
def read(self, istream):
super(KeyWrappingData, self).read(istream)
tstream = BytearrayStream(istream.read(self.length))
self.wrapping_method = WrappingMethod()
self.wrapping_method.read(tstream)
if self.is_tag_next(Tags.ENCRYPTION_KEY_INFORMATION, tstream):
self.encryption_key_information = EncryptionKeyInformation()
self.encryption_key_information.read(tstream)
if self.is_tag_next(Tags.MAC_SIGNATURE_KEY_INFORMATION, tstream):
self.mac_signature_key_information = MACSignatureKeyInformation()
self.mac_signature_key_information.read(tstream)
if self.is_tag_next(Tags.MAC_SIGNATURE, tstream):
self.mac_signature = KeyWrappingData.MACSignature()
self.mac_signature.read(tstream)
if self.is_tag_next(Tags.IV_COUNTER_NONCE, tstream):
self.iv_counter_nonce = KeyWrappingData.IVCounterNonce()
self.iv_counter_nonce.read(tstream)
if self.is_tag_next(Tags.ENCODING_OPTION, tstream):
self.encoding_option = EncodingOption()
self.encoding_option.read(tstream)
self.is_oversized(tstream)
self.validate()
def write(self, ostream):
tstream = BytearrayStream()
# Write the contents of the key wrapping data
self.wrapping_method.write(tstream)
if self.encryption_key_information is not None:
self.encryption_key_information.write(tstream)
if self.mac_signature_key_information is not None:
self.mac_signature_key_information.write(tstream)
if self.mac_signature is not None:
self.mac_signature.write(tstream)
if self.iv_counter_nonce is not None:
self.iv_counter_nonce.write(tstream)
if self.encoding_option is not None:
self.encoding_option.write(tstream)
# Write the length and value of the key wrapping data
self.length = tstream.length()
super(KeyWrappingData, self).write(ostream)
ostream.write(tstream.buffer)
def validate(self):
self.__validate()
def __validate(self):
# TODO (peter-hamilton) Finish implementation
pass
# 2.1.6
class KeyWrappingSpecification(Struct):
class AttributeName(TextString):
def __init__(self, value=None):
super(KeyWrappingSpecification.AttributeName, self).__init__(
value, Tags.ATTRIBUTE_NAME)
def __init__(self,
wrapping_method=None,
encryption_key_information=None,
mac_signature_key_information=None,
attribute_name=None,
encoding_option=None):
super(KeyWrappingSpecification, self).__init__(
tag=Tags.KEY_WRAPPING_SPECIFICATION)
self.wrapping_method = wrapping_method
self.encryption_key_information = encryption_key_information
self.mac_signature_key_information = mac_signature_key_information
self.attribute_name = attribute_name
self.encoding_option = encoding_option
def read(self, istream):
super(KeyWrappingSpecification, self).read(istream)
tstream = BytearrayStream(istream.read(self.length))
self.wrapping_method = WrappingMethod()
self.wrapping_method.read(tstream)
if self.is_tag_next(Tags.ENCRYPTION_KEY_INFORMATION, tstream):
self.encryption_key_information = EncryptionKeyInformation()
self.encryption_key_information.read(tstream)
if self.is_tag_next(Tags.MAC_SIGNATURE_KEY_INFORMATION, tstream):
self.mac_signature_key_information = MACSignatureKeyInformation()
self.mac_signature_key_information.read(tstream)
if self.is_tag_next(Tags.ATTRIBUTE_NAME, tstream):
self.attribute_name = KeyWrappingSpecification.AttributeName()
self.attribute_name.read(tstream)
if self.is_tag_next(Tags.ENCODING_OPTION, tstream):
self.encoding_option = EncodingOption()
self.encoding_option.read(tstream)
self.is_oversized(tstream)
self.validate()
def write(self, ostream):
tstream = BytearrayStream()
# Write the contents of the key wrapping data
self.wrapping_method.write(tstream)
if self.encryption_key_information is not None:
self.encryption_key_information.write(tstream)
if self.mac_signature_key_information is not None:
self.mac_signature_key_information.write(tstream)
if self.attribute_name is not None:
self.attribute_name.write(tstream)
if self.encoding_option is not None:
self.encoding_option.write(tstream)
# Write the length and value of the key wrapping data
self.length = tstream.length()
super(KeyWrappingSpecification, self).write(ostream)
ostream.write(tstream.buffer)
def validate(self):
self.__validate()
def __validate(self):
# TODO (peter-hamilton) Finish implementation.
pass
# 2.1.8
class TemplateAttribute(Struct):
def __init__(self,
names=None,
attributes=None,
tag=Tags.TEMPLATE_ATTRIBUTE):
super(TemplateAttribute, self).__init__(tag)
if names is None:
self.names = list()
else:
self.names = names
if attributes is None:
self.attributes = list()
else:
self.attributes = attributes
self.validate()
def read(self, istream):
super(TemplateAttribute, self).read(istream)
tstream = BytearrayStream(istream.read(self.length))
self.names = list()
self.attributes = list()
# Read the names of the template attribute, 0 or more
while self.is_tag_next(Tags.NAME, tstream):
name = attributes.Name()
name.read(tstream)
self.names.append(name)
# Read the attributes of the template attribute, 0 or more
while self.is_tag_next(Tags.ATTRIBUTE, tstream):
attribute = Attribute()
attribute.read(tstream)
self.attributes.append(attribute)
self.is_oversized(tstream)
self.validate()
def write(self, ostream):
tstream = BytearrayStream()
# Write the names and attributes of the template attribute
for name in self.names:
name.write(tstream)
for attribute in self.attributes:
attribute.write(tstream)
# Write the length and value of the template attribute
self.length = tstream.length()
super(TemplateAttribute, self).write(ostream)
ostream.write(tstream.buffer)
def validate(self):
self.__validate()
def __validate(self):
# TODO (peter-hamilton) Finish implementation.
pass
def __eq__(self, other):
if isinstance(other, TemplateAttribute):
if len(self.names) != len(other.names):
return False
if len(self.attributes) != len(other.attributes):
return False
for i in xrange(len(self.names)):
a = self.names[i]
b = other.names[i]
if a != b:
return False
for i in xrange(len(self.attributes)):
a = self.attributes[i]
b = other.attributes[i]
if a != b:
return False
return True
else:
return NotImplemented
class CommonTemplateAttribute(TemplateAttribute):
def __init__(self,
names=None,
attributes=None):
super(CommonTemplateAttribute, self).__init__(
names, attributes, Tags.COMMON_TEMPLATE_ATTRIBUTE)
class PrivateKeyTemplateAttribute(TemplateAttribute):
def __init__(self,
names=None,
attributes=None):
super(PrivateKeyTemplateAttribute, self).__init__(
names, attributes, Tags.PRIVATE_KEY_TEMPLATE_ATTRIBUTE)
class PublicKeyTemplateAttribute(TemplateAttribute):
def __init__(self,
names=None,
attributes=None):
super(PublicKeyTemplateAttribute, self).__init__(
names, attributes, Tags.PUBLIC_KEY_TEMPLATE_ATTRIBUTE)
# 2.1.9
class ExtensionName(TextString):
"""
The name of an extended Object.
A part of ExtensionInformation, specifically identifying an Object that is
a custom vendor addition to the KMIP specification. See Section 2.1.9 of
the KMIP 1.1 specification for more information.
Attributes:
value: The string data representing the extension name.
"""
def __init__(self, value=''):
"""
Construct an ExtensionName object.
Args:
value (str): The string data representing the extension name.
Optional, defaults to the empty string.
"""
super(ExtensionName, self).__init__(value, Tags.EXTENSION_NAME)
class ExtensionTag(Integer):
"""
The tag of an extended Object.
A part of ExtensionInformation. See Section 2.1.9 of the KMIP 1.1
specification for more information.
Attributes:
value: The tag number identifying the extended object.
"""
def __init__(self, value=0):
"""
Construct an ExtensionTag object.
Args:
value (int): A number representing the extension tag. Often
displayed in hex format. Optional, defaults to 0.
"""
super(ExtensionTag, self).__init__(value, Tags.EXTENSION_TAG)
class ExtensionType(Integer):
"""
The type of an extended Object.
A part of ExtensionInformation, specifically identifying the type of the
Object in the specification extension. See Section 2.1.9 of the KMIP 1.1
specification for more information.
Attributes:
value: The type enumeration for the extended object.
"""
def __init__(self, value=None):
"""
Construct an ExtensionType object.
Args:
value (Types): A number representing a Types enumeration value,
indicating the type of the extended Object. Optional, defaults
to None.
"""
super(ExtensionType, self).__init__(value, Tags.EXTENSION_TYPE)
class ExtensionInformation(Struct):
"""
A structure describing Objects defined in KMIP specification extensions.
It is used specifically for Objects with Item Tag values in the Extensions
range and appears in responses to Query requests for server extension
information. See Sections 2.1.9 and 4.25 of the KMIP 1.1 specification for
more information.
Attributes:
extension_name: The name of the extended Object.
extension_tag: The tag of the extended Object.
extension_type: The type of the extended Object.
"""
def __init__(self, extension_name=None, extension_tag=None,
extension_type=None):
"""
Construct an ExtensionInformation object.
Args:
extension_name (ExtensionName): The name of the extended Object.
extension_tag (ExtensionTag): The tag of the extended Object.
extension_type (ExtensionType): The type of the extended Object.
"""
super(ExtensionInformation, self).__init__(Tags.EXTENSION_INFORMATION)
if extension_name is None:
self.extension_name = ExtensionName()
else:
self.extension_name = extension_name
self.extension_tag = extension_tag
self.extension_type = extension_type
self.validate()
def read(self, istream):
"""
Read the data encoding the ExtensionInformation object and decode it
into its constituent parts.
Args:
istream (Stream): A data stream containing encoded object data,
supporting a read method; usually a BytearrayStream object.
"""
super(ExtensionInformation, self).read(istream)
tstream = BytearrayStream(istream.read(self.length))
self.extension_name.read(tstream)
if self.is_tag_next(Tags.EXTENSION_TAG, tstream):
self.extension_tag = ExtensionTag()
self.extension_tag.read(tstream)
if self.is_tag_next(Tags.EXTENSION_TYPE, tstream):
self.extension_type = ExtensionType()
self.extension_type.read(tstream)
self.is_oversized(tstream)
self.validate()
def write(self, ostream):
"""
Write the data encoding the ExtensionInformation object to a stream.
Args:
ostream (Stream): A data stream in which to encode object data,
supporting a write method; usually a BytearrayStream object.
"""
tstream = BytearrayStream()
self.extension_name.write(tstream)
if self.extension_tag is not None:
self.extension_tag.write(tstream)
if self.extension_type is not None:
self.extension_type.write(tstream)
self.length = tstream.length()
super(ExtensionInformation, self).write(ostream)
ostream.write(tstream.buffer)
def validate(self):
"""
Error check the attributes of the ExtensionInformation object.
"""
self.__validate()
def __validate(self):
if not isinstance(self.extension_name, ExtensionName):
msg = "invalid extension name"
msg += "; expected {0}, received {1}".format(
ExtensionName, self.extension_name)
raise TypeError(msg)
if self.extension_tag is not None:
if not isinstance(self.extension_tag, ExtensionTag):
msg = "invalid extension tag"
msg += "; expected {0}, received {1}".format(
ExtensionTag, self.extension_tag)
raise TypeError(msg)
if self.extension_type is not None:
if not isinstance(self.extension_type, ExtensionType):
msg = "invalid extension type"
msg += "; expected {0}, received {1}".format(
ExtensionType, self.extension_type)
raise TypeError(msg)
def __eq__(self, other):
if isinstance(other, ExtensionInformation):
if self.extension_name != other.extension_name:
return False
elif self.extension_tag != other.extension_tag:
return False
elif self.extension_type != other.extension_type:
return False
else:
return True
else:
return NotImplemented
def __ne__(self, other):
if isinstance(other, ExtensionInformation):
return not (self == other)
else:
return NotImplemented
def __repr__(self):
name = "extension_name={0}".format(repr(self.extension_name))
tag = "extension_tag={0}".format(repr(self.extension_tag))
typ | |
import socket
import asyncio
import os
import re
import http
import gzip
import select
import signal
import json
import traceback
from .timer import Timer
from urllib.parse import unquote
from .logger import info, error, warning
from typing import Any, Union, Tuple, Dict, Callable, Coroutine, List, Iterable, Optional
STATUS_CODE = {c.value: c.phrase for c in http.HTTPStatus}
# Sadly no windows support.
if os.name == "nt":
raise OSError("You can't use this package on windows machine!")
class Glob:
json = None
logging = False
glob = Glob()
class CaseInsensitiveDict:
"""A Python dictionary equivalent with case insensitive keys."""
__slots__ = ("_dict",)
def __init__(self, d: dict = None) -> None:
"""Creates an instance of `CaseInsensitiveDict`. If `d` is set, the
data in it will be converted into case insensitive."""
self._dict: dict = {}
# Dict convertion.
if d:
if not isinstance(d, dict):
raise ValueError("Only conversion of dict is supported.")
self.__conv_dict(d)
def __repr__(self) -> str:
"""String representation of the CaseInsensitiveDict."""
return f"<CaseInsensitiveDict {self._dict!r}>"
# Dictionary Functionality.
def __setitem__(self, key, val) -> None:
"""Sets an item to the CaseInsensitiveDict."""
if key.__class__ is str: key = key.lower()
self._dict[key] = val
def __getitem__(self, key):
"""Retrieves an item from the dictionary, raising a `KeyError` if not
found."""
if key.__class__ is str: key = key.lower()
return self._dict[key]
def __delitem__(self, key):
"""Deletes an item from the dictionary, raising a `KeyError` if not found."""
if key.__class__ is str: key = key.lower()
del self._dict[key]
def __iter__(self):
"""Simple iteration support, iterating over the keys."""
for k in self._dict: yield k
def __not__(self) -> bool:
"""Returns bool corresponding to whether the bool is empty."""
return not self._dict
def __concat__(self, d: Union[dict, 'CaseInsensitiveDict']) -> None:
"""Expands the current dict."""
self.__conv_dict(d)
def __contains__(self, key) -> bool:
"""Checks if the dict contains the key `key`."""
if key.__class__ is str: key = key.lower()
return key in self._dict
def __conv_dict(self, d: Union[dict, 'CaseInsensitiveDict']) -> None:
"""Converts data from the dictionary `d` to our storage format.
Note:
This does NOT clear the data of the CaseInsensitiveDict.
"""
for k, v in d.items():
if k.__class__ is str: k = k.lower()
self._dict[k] = v
def items(self):
"""Iterates over all items and keys of the dict."""
return self._dict.items()
def keys(self) -> tuple:
"""Displays all keys of the dictionary as a `tuple`."""
# Decided to do the conversion here as we dont need their fancy stuff.
return tuple(self._dict.keys())
def get(self, key, default = None):
"""Returns the value of a `key` in the dict, returning `default` if
key does not exist."""
if key.__class__ is str: key = key.lower()
return self._dict.get(key, default)
class Request:
"""A class for parsing incomming web request."""
def __init__(
self,
client: socket.socket,
loop: asyncio.AbstractEventLoop
) -> None:
self.__client: socket.socket = client
self.__loop: asyncio.AbstractEventLoop = loop
self.type: str = "GET"
self.http_ver: str = "1.1"
self.path: str = "/"
self.body: bytearray = bytearray()
self.elapsed: str = "0ms" # Logging purposes.
self.conns_served: int = 0
self.headers: CaseInsensitiveDict = CaseInsensitiveDict()
self.get_args: Dict[str, Any] = {}
self.post_args: Dict[str, Any] = {}
self.files: Dict[str, Any] = {}
self.handle_args: list = [self]
self.resp_code: int = 200
self.resp_headers: Dict[str, Any] = {}
def add_header(self, key: str, value: Any) -> None:
"""Adds header to response back headers."""
self.resp_headers[key] = value
def _parse_headers(self, data: str) -> None:
"""Instance funtion to parse headers content
from client data.
Params:
- content: bytes = first chunks splited by \r\n\r\n
from client response.
Returns:
Parsed headers, get_args.
"""
self.type, self.path, version = data.splitlines()[0].split(" ")
self.version = version.split("/")[1]
# Parsing get args.
if "?" in self.path:
self.path, args = self.path.split("?")
for arg in args.split("&"):
key, value = arg.split("=", 1)
self.get_args[unquote(key)] = unquote(value).strip()
# Now headers.
for key, value in [header.split(":", 1) for header in data.splitlines()[1:]]:
self.headers[key] = value.strip()
def _www_form_parser(self) -> None:
"""Optional parser for parsing form data.
Returns:
Updates self.post with form data args.
"""
body_str = self.body.decode()
for args in body_str.split("&"):
k, v = args.split("=", 1)
self.post_args[unquote(k).strip()] = unquote(v).strip()
def return_json(self, code: int, content: Union[dict, str, Any]):
"""Returns an response but in json."""
self.resp_code = code
json_parser = glob.json or json.dumps
resp_back = json_parser(content)
self.resp_headers["Content-Type"] = "application/json"
return resp_back
async def send(self, code: int, data: bytes) -> None:
"""Sends data back to the client.
Params:
- code: int = Status code to send back.
- data: bytes = Bytes to send back.
Returns:
Sends all data to client.
"""
resp = bytearray()
temp = [f"HTTP/1.1 {code} {STATUS_CODE.get(code)}"]
# Add content len
if data:
temp.append(f"Content-Length: {len(data)}")
# Join headers.
temp.extend(map(': '.join, self.resp_headers.items()))
resp += ('\r\n'.join(temp) + '\r\n\r\n').encode()
# Add body.
if data:
resp += data
try: # Send all data to client.
await self.__loop.sock_sendall(self.__client, resp)
except Exception:
pass
def _parse_multipart(self) -> None:
"""Simplest instance funtion to parse
multipart I found so far.
Returns:
Parsed files & post args from request.
"""
# Create an boundary.
boundary = "--" + self.headers['Content-Type'].split('boundary=', 1)[1]
parts = self.body.split(boundary.encode())[1:]
for part in parts[:-1]:
# We get headers & body.
headers, body = part.split(b"\r\n\r\n", 1)
temp_headers = CaseInsensitiveDict()
for key, val in [p.split(":", 1) for p in [h for h in headers.decode().split("\r\n")[1:]]]:
temp_headers[key] = val.strip()
content = temp_headers.get("Content-Disposition")
if not content:
# Main header don't exist, we can't continue.
continue
temp_args = {}
for key, val in [args.split("=", 1) for args in content.split(";")[1:]]:
temp_args[key.strip()] = val[1:-1]
if "filename" in temp_args: self.files[temp_args['filename']] = body[:-2] # It is a file.
else: self.post_args[temp_args['name']] = body[:-2].decode() # It's a post arg.
async def perform_parse(self) -> None:
"""Performs full parsing on headers and body bytes."""
buffer = bytearray() # Bytearray is faster than bytes.
while (offset := buffer.find(b"\r\n\r\n")) == -1:
buffer += await self.__loop.sock_recv(self.__client, 1024)
self._parse_headers(buffer[:offset].decode())
# Headers are parsed so now we put rest to body.
self.body += buffer[offset + 4:]
try: content_len = int(self.headers["Content-Length"])
except KeyError: return # Get args request only.
if (to_read := ((offset + 4) + content_len) - len(buffer)): # Find how much to read.
buffer += b"\x00" * to_read # Allocate space.
with memoryview(buffer)[-to_read:] as view:
while to_read:
read_bytes = await self.__loop.sock_recv_into(self.__client, view)
view = view[read_bytes:]
to_read -= read_bytes
# Add to body.
self.body += memoryview(buffer)[offset + 4 + len(self.body):].tobytes()
if self.type == "POST":
if (ctx_type := self.headers.get("Content-Type")):
if ctx_type.startswith("multipart/form-data") or \
"form-data" in ctx_type or "multipart/form-data" in ctx_type:
self._parse_multipart()
elif ctx_type in ("x-www-form", "application/x-www-form-urlencoded"):
self._www_form_parser()
class Endpoint:
"""An dataclass to match route."""
def __init__(
self,
path: Union[str, re.Pattern, Iterable],
handler: Coroutine,
methods: List[str] = ["GET"]
) -> None:
self.path: Union[str, re.Pattern, Iterable] = path
self.methods: List[str] = methods
self.handler: Coroutine = handler
self.condition: object = None
if not isinstance(self.path, re.Pattern) and all(char in self.path for char in ("<", ">")):
self.path = re.compile(rf"{self.path.replace('<', '(?P<').replace('>', '>.+)')}")
def parse_regex(self, path: str, regex_path: re.Pattern):
"""Checks for regex."""
if not (args := regex_path.match(path)):
return False
if not (adict := args.groupdict()):
return True
args_back = []
for key in adict:
args_back.append(unquote(adict[key]))
return args_back
def match(self, path: str) -> Union[bool, List[Any]]:
"""Compares the path with current endpoint path."""
if isinstance(self.path, re.Pattern):
# Parse regex :D
return self.parse_regex(path, self.path)
elif isinstance(self.path, str):
# This is simple one
return self.path == path
elif isinstance(self.path, Iterable):
if path in self.path: return True
for p in self.path:
if isinstance(p, re.Pattern):
return self.parse_regex(path, p)
return False
class Router:
"""A class for a single app router."""
def __init__(self, domain: Union[str, set, re.Pattern]) -> None:
self.domain: Union[str, set, re.Pattern] = domain
self.endpoints: set = set()
self.before_serve: set = set()
self.after_serve: set = set()
def match(self, host: str) -> bool:
"""Performs some checks to match domain with host."""
if isinstance(self.domain, str):
return host == self.domain
elif isinstance(self.domain, Iterable):
if host in self.domain: return True
for domain in self.domain:
if isinstance(domain, re.Pattern):
return domain.match(host) is not None
return False
elif isinstance(self.domain, re.Pattern):
return self.domain.match(host) is not None
def before_request(self) -> Callable:
"""Serves things before request."""
def wrapper(handler: Coroutine) -> Coroutine:
self.before_serve.add(handler)
return handler
return wrapper
def after_request(self) -> Callable:
"""Serves things after request."""
def wrapper(handler: Coroutine) -> Coroutine:
self.after_serve.add(handler)
return handler
return wrapper
def add_endpoint(self, path: Union[str, re.Pattern, Iterable], methods: List[str] = ["GET"]) -> Callable:
"""Adds the endpoint class to a set."""
def wrapper(handler: Coroutine) -> Coroutine:
self.endpoints.add(Endpoint(path, handler, methods))
return handler
return wrapper
class LenHTTP:
"""An http server class."""
def __init__(
self,
address: Union[Tuple[str, int], str],
loop = asyncio.get_event_loop(),
**kwargs
) -> None:
self.address: Union[Tuple[str, int], str] = address
self.loop: asyncio.AbstractEventLoop = loop
self.socket_fam: Union[socket.AF_INET, socket.AF_UNIX] = None
self.gzip = kwargs.get("gzip", 0)
self.max_conns = kwargs.get("max_conns", 5)
self.routers: set = set()
self.middleware_request: dict = {}
self._conns_served: int = 0
self.before_serving_coros: set = set()
self.after_serving_coros: set = set()
self.coro_tasks: set = set()
self.tasks: set = set()
self.app: bool = kwargs.get("app", False)
if "logging" in kwargs: glob.logging = kwargs.pop("logging")
if "json_serialize" in kwargs: glob.json = kwargs.pop("json_serialize")
def add_router(self, router: Router) -> None:
"""Adds router to server."""
self.routers.add(router)
def add_routers(self, routers: set[Router]) -> None:
"""Adds routers to server."""
self.routers |= routers
def add_task(self, task: Coroutine, *args) -> None:
"""Adds task to server."""
if args:
self.coro_tasks.add((task, args))
else:
self.coro_tasks.add(task)
def add_tasks(self, tasks: set[Coroutine]) -> None:
"""Adds tasks to server."""
self.coro_tasks |= tasks
def add_middleware(self, code: int) -> Callable:
"""Adds an custom middleware for handling codes."""
def wrapper(handler: Coroutine) -> Coroutine:
self.middleware_request[code] = handler
return handler
return wrapper
def find_router(self, host: str) -> Optional[Router]:
"""Finds the right router."""
for router in self.routers:
if router.match(host):
return router
def find_endpoint(self, router: Router, path: str) -> Optional[Tuple[Union[List[Any], bool], Endpoint]]:
"""Match an endpoint with given path."""
for endpoint in router.endpoints:
if (check := endpoint.match(path)):
return (check, endpoint)
def before_serving(self) -> Callable:
"""Adds the coroutines to be started before server permanently starts."""
def wrapper(handler: Coroutine) -> Coroutine:
self.before_serving_coros.add(handler)
return handler
return wrapper
def after_serving(self) -> Callable:
"""Adds the coroutines to be started after server close."""
def wrapper(handler: Coroutine) -> Coroutine:
self.after_serving_coros.add(handler)
return handler
return wrapper
async def handle_route(self, request: Request) -> None:
"""Handle a request route."""
host = request.headers['Host']
path = request.path
request.resp_code = 404
resp = b"Request not found!"
try:
# Check if there is custom middleware handler.
if (handler := self.middleware_request.get(request.resp_code)):
resp = await handler(request)
if isinstance(resp, str): resp = resp.encode()
if not (router := self.find_router(host)):
request.elapsed = request.elapsed.time_str()
if glob.logging:
info(f"{request.resp_code} | Handled {request.type} {host}{path} in {request.elapsed}")
return await request.send(request.resp_code, resp)
for coro in router.before_serve: await coro(request)
if (found | |
super(IKTaskSet, self).adoptAndAppend(aIKTask)
__swig_destroy__ = _tools.delete_IKTaskSet
__del__ = lambda self: None
IKTaskSet_swigregister = _tools.IKTaskSet_swigregister
IKTaskSet_swigregister(IKTaskSet)
def IKTaskSet_safeDownCast(obj):
"""
IKTaskSet_safeDownCast(OpenSimObject obj) -> IKTaskSet
Parameters
----------
obj: OpenSim::Object *
"""
return _tools.IKTaskSet_safeDownCast(obj)
def IKTaskSet_getClassName():
"""IKTaskSet_getClassName() -> std::string const &"""
return _tools.IKTaskSet_getClassName()
class MarkerPair(opensim.common.OpenSimObject):
"""Proxy of C++ OpenSim::MarkerPair class."""
__swig_setmethods__ = {}
for _s in [opensim.common.OpenSimObject]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, MarkerPair, name, value)
__swig_getmethods__ = {}
for _s in [opensim.common.OpenSimObject]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, MarkerPair, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> MarkerPair
Parameters
----------
obj: OpenSim::Object *
"""
return _tools.MarkerPair_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(MarkerPair self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _tools.MarkerPair_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _tools.MarkerPair_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(MarkerPair self) -> MarkerPair
Parameters
----------
self: OpenSim::MarkerPair const *
"""
return _tools.MarkerPair_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(MarkerPair self) -> std::string const &
Parameters
----------
self: OpenSim::MarkerPair const *
"""
return _tools.MarkerPair_getConcreteClassName(self)
def __init__(self, *args):
"""
__init__(OpenSim::MarkerPair self) -> MarkerPair
__init__(OpenSim::MarkerPair self, MarkerPair aMarkerPair) -> MarkerPair
Parameters
----------
aMarkerPair: OpenSim::MarkerPair const &
__init__(OpenSim::MarkerPair self, std::string const & aName1, std::string const & aName2) -> MarkerPair
Parameters
----------
aName1: std::string const &
aName2: std::string const &
"""
this = _tools.new_MarkerPair(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
__swig_destroy__ = _tools.delete_MarkerPair
__del__ = lambda self: None
def copyData(self, aMarkerPair):
"""
copyData(MarkerPair self, MarkerPair aMarkerPair)
Parameters
----------
aMarkerPair: OpenSim::MarkerPair const &
"""
return _tools.MarkerPair_copyData(self, aMarkerPair)
def getMarkerNames(self, aName1, aName2):
"""
getMarkerNames(MarkerPair self, std::string & aName1, std::string & aName2)
Parameters
----------
aName1: std::string &
aName2: std::string &
"""
return _tools.MarkerPair_getMarkerNames(self, aName1, aName2)
def getMarkerName(self, i):
"""
getMarkerName(MarkerPair self, int i) -> std::string const &
Parameters
----------
i: int
"""
return _tools.MarkerPair_getMarkerName(self, i)
def setMarkerName(self, i, aName):
"""
setMarkerName(MarkerPair self, int i, std::string const & aName)
Parameters
----------
i: int
aName: std::string const &
"""
return _tools.MarkerPair_setMarkerName(self, i, aName)
MarkerPair_swigregister = _tools.MarkerPair_swigregister
MarkerPair_swigregister(MarkerPair)
def MarkerPair_safeDownCast(obj):
"""
MarkerPair_safeDownCast(OpenSimObject obj) -> MarkerPair
Parameters
----------
obj: OpenSim::Object *
"""
return _tools.MarkerPair_safeDownCast(obj)
def MarkerPair_getClassName():
"""MarkerPair_getClassName() -> std::string const &"""
return _tools.MarkerPair_getClassName()
class SetMarkerPairs(opensim.common.OpenSimObject):
"""Proxy of C++ OpenSim::Set<(OpenSim::MarkerPair,OpenSim::Object)> class."""
__swig_setmethods__ = {}
for _s in [opensim.common.OpenSimObject]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SetMarkerPairs, name, value)
__swig_getmethods__ = {}
for _s in [opensim.common.OpenSimObject]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SetMarkerPairs, name)
__repr__ = _swig_repr
def safeDownCast(obj):
"""
safeDownCast(OpenSimObject obj) -> SetMarkerPairs
Parameters
----------
obj: OpenSim::Object *
"""
return _tools.SetMarkerPairs_safeDownCast(obj)
safeDownCast = staticmethod(safeDownCast)
def assign(self, aObject):
"""
assign(SetMarkerPairs self, OpenSimObject aObject)
Parameters
----------
aObject: OpenSim::Object &
"""
return _tools.SetMarkerPairs_assign(self, aObject)
def getClassName():
"""getClassName() -> std::string const &"""
return _tools.SetMarkerPairs_getClassName()
getClassName = staticmethod(getClassName)
def clone(self):
"""
clone(SetMarkerPairs self) -> SetMarkerPairs
Parameters
----------
self: OpenSim::Set< OpenSim::MarkerPair,OpenSim::Object > const *
"""
return _tools.SetMarkerPairs_clone(self)
def getConcreteClassName(self):
"""
getConcreteClassName(SetMarkerPairs self) -> std::string const &
Parameters
----------
self: OpenSim::Set< OpenSim::MarkerPair,OpenSim::Object > const *
"""
return _tools.SetMarkerPairs_getConcreteClassName(self)
__swig_destroy__ = _tools.delete_SetMarkerPairs
__del__ = lambda self: None
def __init__(self, *args):
"""
__init__(OpenSim::Set<(OpenSim::MarkerPair,OpenSim::Object)> self) -> SetMarkerPairs
__init__(OpenSim::Set<(OpenSim::MarkerPair,OpenSim::Object)> self, std::string const & aFileName, bool aUpdateFromXMLNode=True) -> SetMarkerPairs
Parameters
----------
aFileName: std::string const &
aUpdateFromXMLNode: bool
__init__(OpenSim::Set<(OpenSim::MarkerPair,OpenSim::Object)> self, std::string const & aFileName) -> SetMarkerPairs
Parameters
----------
aFileName: std::string const &
__init__(OpenSim::Set<(OpenSim::MarkerPair,OpenSim::Object)> self, SetMarkerPairs aSet) -> SetMarkerPairs
Parameters
----------
aSet: OpenSim::Set< OpenSim::MarkerPair,OpenSim::Object > const &
"""
this = _tools.new_SetMarkerPairs(*args)
try:
self.this.append(this)
except __builtin__.Exception:
self.this = this
def setupGroups(self):
"""
setupGroups(SetMarkerPairs self)
Parameters
----------
self: OpenSim::Set< OpenSim::MarkerPair,OpenSim::Object > *
"""
return _tools.SetMarkerPairs_setupGroups(self)
def setMemoryOwner(self, aTrueFalse):
"""
setMemoryOwner(SetMarkerPairs self, bool aTrueFalse)
Parameters
----------
aTrueFalse: bool
"""
return _tools.SetMarkerPairs_setMemoryOwner(self, aTrueFalse)
def setSize(self, aSize):
"""
setSize(SetMarkerPairs self, int aSize) -> bool
Parameters
----------
aSize: int
"""
return _tools.SetMarkerPairs_setSize(self, aSize)
def getSize(self):
"""
getSize(SetMarkerPairs self) -> int
Parameters
----------
self: OpenSim::Set< OpenSim::MarkerPair,OpenSim::Object > const *
"""
return _tools.SetMarkerPairs_getSize(self)
def getIndex(self, *args):
"""
getIndex(SetMarkerPairs self, MarkerPair aObject, int aStartIndex=0) -> int
Parameters
----------
aObject: OpenSim::MarkerPair const *
aStartIndex: int
getIndex(SetMarkerPairs self, MarkerPair aObject) -> int
Parameters
----------
aObject: OpenSim::MarkerPair const *
getIndex(SetMarkerPairs self, std::string const & aName, int aStartIndex=0) -> int
Parameters
----------
aName: std::string const &
aStartIndex: int
getIndex(SetMarkerPairs self, std::string const & aName) -> int
Parameters
----------
aName: std::string const &
"""
return _tools.SetMarkerPairs_getIndex(self, *args)
def getGroupNamesContaining(self, aObjectName, rGroupNames):
"""
getGroupNamesContaining(SetMarkerPairs self, std::string const & aObjectName, ArrayStr rGroupNames)
Parameters
----------
aObjectName: std::string const &
rGroupNames: OpenSim::Array< std::string > &
"""
return _tools.SetMarkerPairs_getGroupNamesContaining(self, aObjectName, rGroupNames)
def adoptAndAppend(self, aObject):
"""
adoptAndAppend(SetMarkerPairs self, MarkerPair aObject) -> bool
Parameters
----------
aObject: OpenSim::MarkerPair *
"""
return _tools.SetMarkerPairs_adoptAndAppend(self, aObject)
def cloneAndAppend(self, aObject):
"""
cloneAndAppend(SetMarkerPairs self, MarkerPair aObject) -> bool
Parameters
----------
aObject: OpenSim::MarkerPair const &
"""
return _tools.SetMarkerPairs_cloneAndAppend(self, aObject)
def insert(self, aIndex, aObject):
"""
insert(SetMarkerPairs self, int aIndex, MarkerPair aObject) -> bool
Parameters
----------
aIndex: int
aObject: OpenSim::MarkerPair *
"""
return _tools.SetMarkerPairs_insert(self, aIndex, aObject)
def remove(self, *args):
"""
remove(SetMarkerPairs self, int aIndex) -> bool
Parameters
----------
aIndex: int
remove(SetMarkerPairs self, MarkerPair aObject) -> bool
Parameters
----------
aObject: OpenSim::MarkerPair const *
"""
return _tools.SetMarkerPairs_remove(self, *args)
def clearAndDestroy(self):
"""
clearAndDestroy(SetMarkerPairs self)
Parameters
----------
self: OpenSim::Set< OpenSim::MarkerPair,OpenSim::Object > *
"""
return _tools.SetMarkerPairs_clearAndDestroy(self)
def set(self, aIndex, aObject, preserveGroups=False):
"""
set(SetMarkerPairs self, int aIndex, MarkerPair aObject, bool preserveGroups=False) -> bool
Parameters
----------
aIndex: int
aObject: OpenSim::MarkerPair *
preserveGroups: bool
set(SetMarkerPairs self, int aIndex, MarkerPair aObject) -> bool
Parameters
----------
aIndex: int
aObject: OpenSim::MarkerPair *
"""
return _tools.SetMarkerPairs_set(self, aIndex, aObject, preserveGroups)
def get(self, *args):
"""
get(SetMarkerPairs self, int aIndex) -> MarkerPair
Parameters
----------
aIndex: int
get(SetMarkerPairs self, std::string const & aName) -> MarkerPair
Parameters
----------
aName: std::string const &
"""
return _tools.SetMarkerPairs_get(self, *args)
def contains(self, aName):
"""
contains(SetMarkerPairs self, std::string const & aName) -> bool
Parameters
----------
aName: std::string const &
"""
return _tools.SetMarkerPairs_contains(self, aName)
def getNames(self, rNames):
"""
getNames(SetMarkerPairs self, ArrayStr rNames)
Parameters
----------
rNames: OpenSim::Array< std::string > &
"""
return _tools.SetMarkerPairs_getNames(self, rNames)
def getNumGroups(self):
"""
getNumGroups(SetMarkerPairs self) -> int
Parameters
----------
self: OpenSim::Set< OpenSim::MarkerPair,OpenSim::Object > const *
"""
return _tools.SetMarkerPairs_getNumGroups(self)
def addGroup(self, aGroupName):
"""
addGroup(SetMarkerPairs self, std::string const & aGroupName)
Parameters
----------
aGroupName: std::string const &
"""
return _tools.SetMarkerPairs_addGroup(self, aGroupName)
def removeGroup(self, aGroupName):
"""
removeGroup(SetMarkerPairs self, std::string const & aGroupName)
Parameters
----------
aGroupName: std::string const &
"""
return _tools.SetMarkerPairs_removeGroup(self, aGroupName)
def renameGroup(self, oldGroupName, newGroupName):
"""
renameGroup(SetMarkerPairs self, std::string const & oldGroupName, std::string const & newGroupName)
Parameters
----------
oldGroupName: std::string const &
newGroupName: std::string const &
"""
return _tools.SetMarkerPairs_renameGroup(self, oldGroupName, newGroupName)
def addObjectToGroup(self, aGroupName, aObjectName):
"""
addObjectToGroup(SetMarkerPairs self, std::string const & aGroupName, std::string const & aObjectName)
Parameters
----------
aGroupName: std::string const &
aObjectName: std::string const &
"""
return _tools.SetMarkerPairs_addObjectToGroup(self, aGroupName, aObjectName)
def getGroupNames(self, rGroupNames):
"""
getGroupNames(SetMarkerPairs self, ArrayStr rGroupNames)
Parameters
----------
rGroupNames: OpenSim::Array< std::string > &
"""
return _tools.SetMarkerPairs_getGroupNames(self, rGroupNames)
def getGroup(self, *args):
"""
getGroup(SetMarkerPairs self, std::string const & aGroupName) -> ObjectGroup
Parameters
----------
aGroupName: std::string const &
getGroup(SetMarkerPairs self, int aIndex) -> ObjectGroup
Parameters
----------
aIndex: int
"""
return _tools.SetMarkerPairs_getGroup(self, *args)
class SetIterator(object):
"""
Use this object to iterate over a Set. You create an instance of
this nested class by calling Set.__iter__().
"""
def __init__(self, set_obj, index):
"""Construct an iterator for the Set `set`."""
self._set_obj = set_obj
self._index = index
def __iter__(self):
"""This iterator is also iterable."""
return self
def next(self):
if self._index < self._set_obj.getSize():
current_index = self._index
self._index += 1
return self._set_obj.get(current_index)
else:
# This is how Python knows to stop iterating.
raise StopIteration()
__next__ = next # For Python 3.
def __iter__(self):
"""Get an iterator for this Set, starting at index 0."""
return self.SetIterator(self, 0)
def items(self):
"""
A generator function that allows you to iterate over the key-value
pairs of this Set. You can use this in a for-loop as such::
for key, val in my_function_set.items():
# `val` is an item in the Set, and `key` is its name.
print key, val
"""
index = 0
while index < self.getSize():
yield self.get(index).getName(), self.get(index)
index += 1
SetMarkerPairs_swigregister = _tools.SetMarkerPairs_swigregister
SetMarkerPairs_swigregister(SetMarkerPairs)
def SetMarkerPairs_safeDownCast(obj):
"""
SetMarkerPairs_safeDownCast(OpenSimObject obj) -> SetMarkerPairs
Parameters
----------
obj: OpenSim::Object *
"""
return _tools.SetMarkerPairs_safeDownCast(obj)
def SetMarkerPairs_getClassName():
"""SetMarkerPairs_getClassName() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.