input stringlengths 2.65k 237k | output stringclasses 1 value |
|---|---|
val
del local_var_params['kwargs']
# verify the required parameter 'data_view_name' is set
if ('data_view_name' not in local_var_params or
local_var_params['data_view_name'] is None):
raise ApiValueError("Missing the required parameter `data_view_name` when calling `users_get_user_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'data_view_name' in local_var_params:
path_params['dataViewName'] = local_var_params['data_view_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/{dataViewName}/Users/Configuration', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserConfigurationDetails', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def users_get_user_dashboard(self, data_view_name, username, dashboard_id, **kwargs): # noqa: E501
"""EXPERIMENTAL: Gets a dashboard in the DataView. # noqa: E501
EXPERIMENTAL # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_get_user_dashboard(data_view_name, username, dashboard_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str data_view_name: The name of the DataView to act on (required)
:param str username: (required)
:param int dashboard_id: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: DashboardDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.users_get_user_dashboard_with_http_info(data_view_name, username, dashboard_id, **kwargs) # noqa: E501
def users_get_user_dashboard_with_http_info(self, data_view_name, username, dashboard_id, **kwargs): # noqa: E501
"""EXPERIMENTAL: Gets a dashboard in the DataView. # noqa: E501
EXPERIMENTAL # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_get_user_dashboard_with_http_info(data_view_name, username, dashboard_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str data_view_name: The name of the DataView to act on (required)
:param str username: (required)
:param int dashboard_id: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(DashboardDetail, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['data_view_name', 'username', 'dashboard_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method users_get_user_dashboard" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'data_view_name' is set
if ('data_view_name' not in local_var_params or
local_var_params['data_view_name'] is None):
raise ApiValueError("Missing the required parameter `data_view_name` when calling `users_get_user_dashboard`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `users_get_user_dashboard`") # noqa: E501
# verify the required parameter 'dashboard_id' is set
if ('dashboard_id' not in local_var_params or
local_var_params['dashboard_id'] is None):
raise ApiValueError("Missing the required parameter `dashboard_id` when calling `users_get_user_dashboard`") # noqa: E501
collection_formats = {}
path_params = {}
if 'data_view_name' in local_var_params:
path_params['dataViewName'] = local_var_params['data_view_name'] # noqa: E501
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'dashboard_id' in local_var_params:
path_params['dashboardId'] = local_var_params['dashboard_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml']) # noqa: E501
# Authentication setting
auth_settings = ['faststats_auth'] # noqa: E501
return self.api_client.call_api(
'/{dataViewName}/Users/{username}/Dashboards/{dashboardId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DashboardDetail', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def users_get_user_details(self, data_view_name, username, **kwargs): # noqa: E501
"""Returns details for the given username # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_get_user_details(data_view_name, username, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str data_view_name: The name of the DataView to act on (required)
:param str username: The username to view the details for (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: UserDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.users_get_user_details_with_http_info(data_view_name, username, **kwargs) # noqa: E501
def users_get_user_details_with_http_info(self, data_view_name, username, **kwargs): # noqa: E501
"""Returns details for the given username # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_get_user_details_with_http_info(data_view_name, username, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str data_view_name: The name of the DataView to act on (required)
:param str username: The username to view the details for (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(UserDetail, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['data_view_name', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method users_get_user_details" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'data_view_name' is set
if ('data_view_name' not in local_var_params or
local_var_params['data_view_name'] is None):
raise ApiValueError("Missing the required parameter `data_view_name` when calling `users_get_user_details`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `users_get_user_details`") # noqa: E501
collection_formats = {}
path_params = {}
if 'data_view_name' in local_var_params:
path_params['dataViewName'] = local_var_params['data_view_name'] # noqa: E501
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/json', 'application/xml', 'text/xml']) # noqa: E501
# Authentication setting
auth_settings = ['faststats_auth'] # noqa: E501
return self.api_client.call_api(
'/{dataViewName}/Users/{username}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserDetail', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def users_get_user_details_list(self, data_view_name, **kwargs): # noqa: E501
"""Returns all users in the system. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.users_get_user_details_list(data_view_name, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str data_view_name: The name of the DataView to act on (required)
:param str filter: Filter the list of items using a simple expression language. The available list of fields are Username, EmailAddress, Firstname, Surname, UserDisabledDate
:param str order_by: Order the items by a given field (in ascending order unless the field is preceeded by a \"-\" character). The available list of fields are Username, EmailAddress, Firstname, Surname, UserDisabledDate
:param int offset: The number of items to skip in the (potentially filtered) result set before returning subsequent items.
:param int count: The maximum number of items to show from the (potentially filtered) result set.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is | |
if clusters[j] - 1 in [v[1] for v in j_vecs]:
clusters[j] = j_vecs[0][1] + 1
# Calcluate the new scale and the new Vector direction.
vec_scale = largest_distances[j_vecs[0][1]] / __vector_length(
[merged_barycenters[i][0] - x_new_bary, merged_barycenters[i][1] - y_new_bary])
vectors[j_vecs[0][1]] = [vec_scale * (merged_barycenters[i][0] - x_new_bary),
vec_scale * (merged_barycenters[i][1] - y_new_bary)]
if plot:
plot_vectors[j_vecs[0][1]] = (x_new_bary, y_new_bary,
vectors[j_vecs[0][1]][0], vectors[j_vecs[0][1]][1])
# Set merged vectors to None.
for j in range(1, len(j_vecs)):
vectors[j_vecs[j][1]] = None
if plot:
plot_vectors[j_vecs[j][1]] = None
# --- Dimension Calculation ---
dimensions = []
if dimensionfactor:
for i in range(max(clusters)):
# Some clusters might be empty, because of the merging process.
empty = True
pixels = np.zeros(len(array))
for j, pxl in enumerate(stick_out_pixels):
if i == clusters[j] - 1:
pixels[pxl[1] * slicesize + pxl[0]] = 2
empty = False
if empty:
continue
# Calculate the width of the cluster in dependence of the vector rotation.
theta = __vector_shift_in_radians([0, 1], vectors[i])
pixels_x = []
pixels_y = []
for y in range(int(len(pixels) / slicesize)):
for x in range(slicesize):
if pixels[y * slicesize + x]:
px = x * cos(theta) - y * sin(theta)
# Rotation of y is not necessary, but is needed for debugging.
py = x * sin(theta) + y * cos(theta)
pixels_x.append(px)
pixels_y.append(py)
# Calculate the variance in the x direction
sum_x = 0
sum_xx = 0
for j in range(len(pixels_x)):
sum_x += pixels_x[j]
sum_xx += pixels_x[j] * pixels_x[j]
x_bary_rotation = sum_x / len(pixels_x)
var_x = sum_xx / len(pixels_x) - x_bary_rotation * x_bary_rotation
discriminant = sqrt(4 * var_x * var_x)
width = dimensionfactor * 2 * sqrt(((var_x + var_x) + discriminant) / 2.0)
dimensions.append([__vector_length(vectors[i]), width])
if merge:
# Remove objects in vector lists, which are equal to None.
# Objects which are None got deleted in the vector and cluster merging process above.
# The deletion must be down here, because it changes the indices of the vectors.
# The correct vector indices are needed for the dimension calculations
vectors = [x for x in vectors if x is not None]
if plot:
plot_vectors = [x for x in plot_vectors if x is not None]
# Calculate the mean and the variance of vector lengths.
mean = sum([__vector_length(vec) for vec in vectors]) / float(len(vectors))
variance = sum([pow(__vector_length(vec) - mean, 2) for vec in vectors]) \
/ float(len(vectors))
# Find the number of vectors which are nearly in a straight line.
# In other words: Vectors which are nearly parallel to each other.
angles = []
nearly_straight_angles = 0
for i, vec0 in enumerate(vectors):
for j, vec1 in enumerate(vectors):
if i == j:
break
angle = round(__degree(vec0, vec1), 2)
if angle >= mindegree:
nearly_straight_angles += 1
angles.append(angle)
if plot:
if plot[0]:
if plot[0] == 1:
plt.scatter(stick_out_pixels[:, 0], stick_out_pixels[:, 1], c=clusters, cmap="jet")
elif plot[0] == 2:
plt.scatter(stick_out_pixels[:, 0], stick_out_pixels[:, 1], c=old_clusters, cmap="jet")
else:
plt.scatter(stick_out_pixels[:, 0], stick_out_pixels[:, 1], c="gray")
plt.scatter(remaining_pixels[:, 0], remaining_pixels[:, 1], c="gray")
ax = plt.gca()
ax.invert_yaxis()
ax.set_aspect(1)
if plot[1] == 1:
a, b, c, d = zip(*plot_vectors)
plt.quiver(a, b, c, d, angles='xy', scale_units='xy', scale=1, width=0.01)
elif plot[1] == 2 and dimensionfactor:
for i, v in enumerate(vectors):
theta = __vector_shift_in_radians([0, 1], v)
trans = Affine2D().rotate_around(x_new_bary, y_new_bary, -theta)
hexagon_1 = Rectangle((x_new_bary, y_new_bary),
dimensions[i][1] / 2.0, dimensions[i][0],
fill=True,
edgecolor="black",
alpha=0.3,
linewidth=1)
hexagon_2 = Rectangle((x_new_bary, y_new_bary),
-dimensions[i][1] / 2.0, dimensions[i][0],
fill=True,
edgecolor="black",
alpha=0.3,
linewidth=1)
hexagon_1.set_transform(trans + ax.transData)
hexagon_2.set_transform(trans + ax.transData)
ax.add_patch(hexagon_1)
ax.add_patch(hexagon_2)
if plot[2]:
cluster_barycenters = np.array(cluster_barycenters)
plt.scatter(cluster_barycenters[:, 0], cluster_barycenters[:, 1], c="black")
plt.scatter(x_bary, y_bary, c="red")
plt.scatter(x_new_bary, y_new_bary, c="black")
plt.show()
return number_of_stick_outs, \
nearly_straight_angles, \
angles, \
round(variance, 3), \
dimensions
def __mse(array_a, array_b):
"""
Mean Squared Error: The two arrays must have the same dimension.
"""
error = np.sum((array_a.astype("float") - array_b.astype("float")) ** 2)
# Divide by the number of imagepixels
error /= float(len(array_a))
# The lower the error, the more similar the two images are.
return error
def __polygon_intersection(verts, x, y):
"""
Computes the intersection with a polygon.
Algorithm from:
<NAME> (WRF)
https://wrf.ecse.rpi.edu//Research/Short_Notes/pnpoly.html#The Method
"""
intersection = False
for i in range(len(verts)):
j = (i + len(verts) - 1) % len(verts)
if (verts[i][1] > y) != (verts[j][1] > y) \
and x < (verts[j][0] - verts[i][0]) * (y - verts[i][1]) / (verts[j][1] - verts[i][1]) + verts[i][0]:
intersection = not intersection
return intersection
def __polygon_rotation(verts, alpha, point):
"""
Rotates an Object around a specific point.
Parameter: List of 2d vertices represented as tuple (x,y)
The angle in radians
A 2d point (x,y) as Tuple, List or Array
Returns: The rotated object
"""
polygon = []
cos_alpha = cos(alpha)
sin_alpha = sin(alpha)
for v in verts:
x = v[0] * cos_alpha - sin_alpha * v[1] + point[0]
y = v[0] * sin_alpha + cos_alpha * v[1] + point[1]
polygon.append((x, y))
return polygon
def principal_components(array, dimensionfactor=1.0, plot=None, slicesize=SLICE_SIZE):
"""
Calculates the Principal Components of an Optical Array.
"""
sum_x = 0.0
sum_y = 0.0
sum_xx = 0.0
sum_yy = 0.0
sum_xy = 0.0
number_pix = 0.0
for y in range(int(len(array) / slicesize)):
for x in range(slicesize):
if array[y * slicesize + x]:
sum_x += x
sum_y += y
sum_xx += x * x
sum_yy += y * y
sum_xy += x * y
number_pix += 1
if number_pix == 0:
return
x_bary = sum_x / number_pix
y_bary = sum_y / number_pix
# Calculating the variance and the covariance.
var_x = sum_xx / number_pix - x_bary * x_bary
var_y = sum_yy / number_pix - y_bary * y_bary
cov_xy = sum_xy / number_pix - x_bary * y_bary
discriminant = (var_x - var_y) * (var_x - var_y) + 4 * cov_xy * cov_xy
sqrt_discr = sqrt(discriminant)
lambda_plus = ((var_x + var_y) + sqrt_discr) / 2.0
lambda_minus = ((var_x + var_y) - sqrt_discr) / 2.0
# --- Eigenvectors ---
x_plus = var_x + cov_xy - lambda_minus
y_plus = var_y + cov_xy - lambda_minus
x_minus = var_x + cov_xy - lambda_plus
y_minus = var_y + cov_xy - lambda_plus
# Normalizing the vectors.
denom_plus = sqrt(x_plus * x_plus + y_plus * y_plus)
denom_minus = sqrt(x_minus * x_minus + y_minus * y_minus)
# Computing the normalized vector, which is parallel to the
# longest axis of a particle image.
if denom_plus:
x_parallel = x_plus / denom_plus
y_parallel = y_plus / denom_plus
else:
x_parallel = x_plus
y_parallel = y_plus
# Computing the normalized vector, which is corresponding the
# Normal of a particle image.
if denom_minus:
x_normal = x_minus / denom_minus
y_normal = y_minus / denom_minus
else:
x_normal = x_minus
y_normal = y_minus
if lambda_plus < 0:
lambda_plus = 0
if lambda_minus < 0:
lambda_minus = 0
major_axis = dimensionfactor * 2 * sqrt(lambda_plus)
minor_axis = dimensionfactor * 2 * sqrt(lambda_minus)
# Computing the rotation of the principal components.
if x_parallel:
alpha = atan(y_parallel / x_parallel)
else:
alpha = radians(90.0)
# --- Polygon ------------------------------------------------------------------------------------------------------
"""
scale_1 = 0.75
scale_2 = 1.0
scale_3 = 1.0
scale_4 = 0.5
vert_1 = (-major_axis * scale_1, minor_axis * scale_2)
vert_2 = ( major_axis * scale_1, minor_axis * scale_2)
vert_3 = ( major_axis * scale_1, -minor_axis * scale_2)
vert_4 = (-major_axis * scale_1, -minor_axis * scale_2)
vert_5 = (-major_axis * scale_3, minor_axis * scale_4)
vert_6 = ( major_axis * scale_3, minor_axis * scale_4)
vert_7 = ( major_axis * scale_3, -minor_axis * scale_4)
vert_8 = (-major_axis * scale_3, -minor_axis * scale_4)
polygon = [vert_5, vert_1, vert_2, vert_6,
vert_7, vert_3, vert_4, vert_8]
polygon = __polygon_rotation(object, alpha, (x_bary, y_bary))
"""
vert_1 = (-major_axis, minor_axis)
vert_2 = (major_axis, minor_axis)
vert_3 = (major_axis, -minor_axis)
vert_4 = (-major_axis, -minor_axis)
polygon = [vert_1, vert_2, vert_3, vert_4]
polygon = __polygon_rotation(polygon, alpha, (x_bary, y_bary))
cos_alpha = cos(alpha)
sin_alpha = sin(alpha)
# ToDo: perhaps a return would be better here
if minor_axis == 0:
minor_axis = 1e-8
if major_axis == 0:
major_axis = 1e-8
b = minor_axis * minor_axis
a = major_axis * major_axis
polygon_array = np.zeros(len(array), "int")
ellipse_array = np.zeros(len(array), "int")
one_color_array = np.zeros(len(array), "int")
polygon_hits = 0
ellipse_hits = 0
polygon_misses = 0
ellipse_misses | |
############################################################################
# Copyright (C) by gempa GmbH, <NAME> #
# #
# You can redistribute and/or modify this program under the #
# terms of the SeisComP Public License. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# SeisComP Public License for more details. #
############################################################################
from __future__ import division, print_function
import os
import sys
import glob
import getpass
try:
# Python 2.5
from xml.etree import ElementTree
from xml.parsers.expat import ExpatError as ParseError
except ImportError:
from elementtree import ElementTree
from xml.parsers.expat import ExpatError as ParseError
# Python version depended string conversion
if sys.version_info[0] < 3:
py3input = raw_input #pylint: disable=E0602
else:
py3input = input
from seiscomp3 import Config
def tagname(element):
names = element.tag.split("}")
if len(names) == 0:
return ""
return names.pop()
def oneliner(txt):
return txt.strip().replace("\n", "")
def block(txt, width=80):
lines = [l.strip() for l in txt.strip().replace("\r", "").split('\n')]
line = "\n".join(lines)
current = 0
lines = []
while current < len(line):
end = line.find('\n', current)
if (end == -1) or (end - current > width):
if len(line) - current > width:
end = line.rfind(' ', current, current+width)
if end == -1:
end = line.find(' ', current)
if end == -1:
end = len(line)
else:
end = len(line)
lines.append(line[current:end].strip())
current = end + 1
return lines
class SetupNode:
def __init__(self, parent, left, right, inp):
self.parent = parent
self.prev = left
self.next = right
self.input = inp
self.child = None
self.activeChild = None
self.modname = ""
self.groupname = ""
self.path = ""
self.value = ""
self.lastInGroup = False
class Option:
"""
Setup input option wrapper.
"""
def __init__(self, value):
self.value = value
self.desc = None
self.inputs = []
class Input:
"""
Setup input wrapper.
"""
def __init__(self, name, t, default_value=None):
self.name = name
self.type = t
self.default_value = default_value
self.text = None
self.desc = None
self.echo = None
self.options = []
def dumpTree(cfg, node):
if node.input:
cfg.setString(node.modname + "." + node.path, node.value)
if node.activeChild:
dumpTree(cfg, node.activeChild)
if not node.lastInGroup and not node.next is None:
dumpTree(cfg, node.next)
class Simple:
"""
Simple console setup handler that parses all description xml files
and extracts the setup part. It asks for all available setting line
by line and passes the resulting configuration back which is then
passed to all init modules that have a setup method.
"""
def __init__(self):
self.setupTree = SetupNode(None, None, None, None)
self.paths = []
self.currentNode = None
def run(self, env):
desc_pattern = os.path.join(
env.SEISCOMP_ROOT, "etc", "descriptions", "*.xml")
xmls = glob.glob(desc_pattern)
setup_groups = {}
for f in xmls:
try:
tree = ElementTree.parse(f)
except ParseError as e:
sys.stderr.write("%s: parsing XML failed: %s\n" % (f, e))
continue
root = tree.getroot()
if tagname(root) != "seiscomp":
sys.stderr.write(
"%s: wrong root tag, expected 'seiscomp'\n" % f)
continue
# Read all modules
mods = tree.findall("module")
for mod in mods:
modname = mod.get('name')
if not modname:
sys.stderr.write("%s: skipping module without name\n" % f)
continue
if modname in setup_groups:
raise Exception(
"%s: duplicate module name: %s" % (f, modname))
setup = mod.find("setup")
if setup is None:
continue
groups = setup.findall("group")
if len(groups) == 0:
continue
setup_groups[modname] = groups
# Read all plugin's
plugins = tree.findall("plugin")
for plugin in plugins:
try:
modname = plugin.find('extends').text.strip()
except:
raise Exception("%s: plugin does not define 'extends'" % f)
if modname.find('\n') >= 0:
raise Exception(
"%s: wrong module name in plugin.extends: no newlines allowed" % f)
if not modname:
sys.stderr.write("%s: skipping module without name\n" % f)
continue
setup = plugin.find("setup")
if setup is None:
continue
groups = setup.findall("group")
if len(groups) == 0:
continue
if modname in setup_groups:
setup_groups[modname] += groups
else:
setup_groups[modname] = groups
for name, groups in sorted(setup_groups.items()):
self.addGroups(self.setupTree, name, groups)
# Always descend to the first child (if available)
self.setupTree.activeChild = self.setupTree.child
self.currentNode = self.setupTree.activeChild
sys.stdout.write('''
====================================================================
SeisComP setup
====================================================================
This initializes the configuration of your installation.
If you already made adjustments to the configuration files
be warned that this setup will overwrite existing parameters
with default values. This is not a configurator for all
options of your setup but helps to setup initial standard values.
--------------------------------------------------------------------
Hint: Entered values starting with a dot (.) are handled
as commands. Available commands are:
quit: Quit setup without modification to your configuration.
back: Go back to the previous parameter.
help: Show help about the current parameter (if available).
If you need to enter a value with a leading dot, escape it
with backslash, e.g. "\.value".
--------------------------------------------------------------------
''')
try:
self.fillTree()
except StopIteration:
raise Exception("aborted by user")
cfg = Config.Config()
dumpTree(cfg, self.setupTree)
return cfg
def addGroups(self, node, modname, groups):
for g in groups:
self.addInputs(None, node, modname, g.get(
'name'), g, g.get('name', "") + ".")
def addInputs(self, obj, parent, modname, group, xml, prefix):
last = parent.child
# find the last child and add the current list to it
while not last is None:
if last.next is None:
break
last = last.next
inputs = xml.findall("input")
for inp in inputs:
name = inp.get('name')
if not name:
raise Exception("%s: no name defined" % prefix)
input_ = Input(name, inp.get('type'), inp.get('default'))
try:
input_.text = oneliner(inp.find('text').text)
except:
input_.text = input_.name
try:
input_.desc = block(inp.find('description').text)
except:
pass
input_.echo = inp.get('echo')
if obj:
obj.inputs.append(input_)
node = SetupNode(parent, last, None, input_)
node.path = prefix + input_.name
node.value = input_.default_value
node.modname = modname
node.groupname = group
if not last is None:
last.next = node
last = node
if parent.child is None:
parent.child = last
opts = inp.findall("option")
for opt in opts:
value = opt.get('value')
if not value:
raise Exception("%s: option without value" % prefix)
option = Option(value)
try:
option.desc = block(opt.find('description').text, 74)
except:
pass
input_.options.append(option)
self.addInputs(option, node, modname,
group, opt, node.path + ".")
if not obj is None and not last is None:
last.lastInGroup = True
def fillTree(self):
while True:
if not self.currentNode:
sys.stdout.write("\nFinished setup\n--------------\n\n")
sys.stdout.write("P) Proceed to apply configuration\n")
sys.stdout.write("B) Back to last parameter\n")
sys.stdout.write("Q) Quit without changes\n")
value = py3input('Command? [P]: ').upper()
if value == "Q":
raise StopIteration()
if value == "P" or not value:
sys.stdout.write("\nRunning setup\n-------------\n\n")
return
if value == "B":
self.prevStep()
continue
sys.stdout.write("\nEnter either p, b or q\n")
continue
if not self.currentNode.input:
self.nextStep()
continue
default_value = self.valueToString(self.currentNode)
isChoice = False
isPassword = False
if self.currentNode.input.echo == "password":
isPassword = True
node_text = default_value
prompt = self.currentNode.input.text
if isPassword:
node_text = '*' * len(node_text)
prompt += " (input not echoed)"
if (not self.currentNode.input.type or self.currentNode.input.type != "boolean") \
and len(self.currentNode.input.options) > 0:
idx = 0
def_idx = 0
for opt in self.currentNode.input.options:
sys.stdout.write("%2d) %s\n" % (idx, opt.value))
for l in opt.desc:
sys.stdout.write(" %s\n" % l)
if default_value == opt.value:
def_idx = idx
idx += 1
isChoice = True
prompt += " [%d]: " % def_idx
else:
prompt += " [%s]: " % node_text
if self.currentNode.input.echo == "password":
value = getpass.getpass(prompt)
else:
value = py3input(prompt)
if not value:
value = default_value
elif value == ".help":
if self.currentNode.input.desc:
sys.stdout.write("\n%s\n\n" %
"\n".join(self.currentNode.input.desc))
else:
sys.stdout.write("\nSorry, no help available.\n\n")
continue
elif value == ".back":
self.prevStep()
continue
elif value == ".quit":
raise StopIteration()
elif value.startswith("."):
sys.stdout.write("Unknown command. Values starting with '.' are handled has commands such as\n"
"'.help', '.quit' or '.back'. To use a leading dot in a value, escape it with '\'\n"
"e.g. '\.color'\n")
continue
else:
# Replace leading \. with .
if value.startswith('\\.'):
value = value[1:]
if isChoice:
try:
idx = int(value)
except:
idx = -1
if idx < 0 or idx >= len(self.currentNode.input.options):
sys.stdout.write("\nEnter a number between 0 and %d\n\n" % (
len(self.currentNode.input.options)-1))
continue
value = self.currentNode.input.options[idx].value
if self.currentNode.input.type and self.currentNode.input.type == "boolean":
if not value in ["yes", "no"]:
sys.stdout.write("Please enter 'yes' or 'no'\n")
continue
if value == "yes":
value = "true"
else:
value = "false"
self.currentNode.value = value
self.nextStep()
@staticmethod
def valueToString(node):
if not node.input.type:
if node.value is None:
return ""
return node.value
if node.input.type == "boolean":
if node.value == "true":
return "yes"
if node.value == "false":
return "no"
return "yes"
if node.value is None:
return ""
return node.value
def prevStep(self):
if len(self.paths) == 0:
sys.stdout.write("No previous step available\n")
return
self.currentNode = self.paths.pop()
def nextStep(self):
self.currentNode.activeChild = None
step = | |
Ensure that an attribute that maps to None that is not explicitly
# ignored in configuration is implicitly ignored without triggering
# an error.
conf = self.get_config(CONF.identity.default_domain_id)
conf.ldap.user_attribute_ignore = ['enabled', 'email',
'tenants', 'tenantId']
self.reload_backends(CONF.identity.default_domain_id)
user = {'id': u'fäké1',
'name': u'fäké1',
'password': u'<PASSWORD>',
'domain_id': CONF.identity.default_domain_id,
}
user_ref = self.identity_api.create_user(u'fäké1', user)
# If this doesn't raise, then the test is successful.
self.identity_api.get_user(user_ref['id'])
def test_update_user_name(self):
"""A user's name cannot be changed through the LDAP driver."""
self.assertRaises(exception.Conflict,
super(BaseLDAPIdentity, self).test_update_user_name)
def test_arbitrary_attributes_are_returned_from_create_user(self):
self.skipTest("Using arbitrary attributes doesn't work under LDAP")
def test_arbitrary_attributes_are_returned_from_get_user(self):
self.skipTest("Using arbitrary attributes doesn't work under LDAP")
def test_new_arbitrary_attributes_are_returned_from_update_user(self):
self.skipTest("Using arbitrary attributes doesn't work under LDAP")
def test_updated_arbitrary_attributes_are_returned_from_update_user(self):
self.skipTest("Using arbitrary attributes doesn't work under LDAP")
def test_user_id_comma_grants(self):
"""Even if the user has a , in their ID, can get user and group grants.
"""
# Create a user with a , in their ID
# NOTE(blk-u): the DN for this user is hard-coded in fakeldap!
user_id = u'<NAME>'
user = {
'id': user_id,
'name': self.getUniqueString(),
'password': <PASSWORD>(),
'domain_id': CONF.identity.default_domain_id,
}
self.identity_api.create_user(user_id, user)
# Grant the user a role on a project.
role_id = 'member'
project_id = self.tenant_baz['id']
self.assignment_api.create_grant(role_id, user_id=user_id,
project_id=project_id)
role_ref = self.assignment_api.get_grant(role_id, user_id=user_id,
project_id=project_id)
self.assertEqual(role_id, role_ref['id'])
def test_utf8_conversion(self):
value_unicode = u'fäké1'
value_utf8 = value_unicode.encode('utf-8')
result_utf8 = common_ldap_core.utf8_encode(value_unicode)
self.assertEqual(value_utf8, result_utf8)
result_utf8 = common_ldap_core.utf8_encode(value_utf8)
self.assertEqual(value_utf8, result_utf8)
result_unicode = common_ldap_core.utf8_decode(value_utf8)
self.assertEqual(value_unicode, result_unicode)
result_unicode = common_ldap_core.utf8_decode(value_unicode)
self.assertEqual(value_unicode, result_unicode)
self.assertRaises(TypeError,
common_ldap_core.utf8_encode,
100)
result_unicode = common_ldap_core.utf8_decode(100)
self.assertEqual(u'100', result_unicode)
class LDAPIdentity(BaseLDAPIdentity, tests.TestCase):
def setUp(self):
super(LDAPIdentity, self).setUp()
self.clear_database()
common_ldap.register_handler('fake://', fakeldap.FakeLdap)
self.load_backends()
self.load_fixtures(default_fixtures)
fixture = self.useFixture(moxstubout.MoxStubout())
self.mox = fixture.mox
self.stubs = fixture.stubs
def test_configurable_allowed_project_actions(self):
tenant = {'id': u'fäké1', 'name': u'fäké1', 'enabled': True}
self.assignment_api.create_project(u'fäké1', tenant)
tenant_ref = self.assignment_api.get_project(u'fäké1')
self.assertEqual(u'fäké1', tenant_ref['id'])
tenant['enabled'] = False
self.assignment_api.update_project(u'fäké1', tenant)
self.assignment_api.delete_project(u'fäké1')
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
u'fäké1')
def test_configurable_subtree_delete(self):
self.config_fixture.config(group='ldap', allow_subtree_delete=True)
self.load_backends()
project1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id}
self.assignment_api.create_project(project1['id'], project1)
role1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.assignment_api.create_role(role1['id'], role1)
user1 = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex,
'domain_id': CONF.identity.default_domain_id,
'password': <PASSWORD>,
'enabled': True}
self.identity_api.create_user(user1['id'], user1)
self.assignment_api.add_role_to_user_and_project(
user_id=user1['id'],
tenant_id=project1['id'],
role_id=role1['id'])
self.assignment_api.delete_project(project1['id'])
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
project1['id'])
self.assignment_api.create_project(project1['id'], project1)
list = self.assignment_api.get_roles_for_user_and_project(
user1['id'],
project1['id'])
self.assertEqual(0, len(list))
def test_configurable_forbidden_project_actions(self):
CONF.ldap.tenant_allow_create = False
CONF.ldap.tenant_allow_update = False
CONF.ldap.tenant_allow_delete = False
self.load_backends()
tenant = {'id': u'fäké1', 'name': u'fäké1'}
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.create_project,
u'fäké1',
tenant)
self.tenant_bar['enabled'] = False
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.update_project,
self.tenant_bar['id'],
self.tenant_bar)
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.delete_project,
self.tenant_bar['id'])
def test_configurable_allowed_role_actions(self):
role = {'id': u'fäké1', 'name': u'fäké1'}
self.assignment_api.create_role(u'fäké1', role)
role_ref = self.assignment_api.get_role(u'fäké1')
self.assertEqual(u'fäké1', role_ref['id'])
role['name'] = u'fäké2'
self.assignment_api.update_role(u'fäké1', role)
self.assignment_api.delete_role(u'fäké1')
self.assertRaises(exception.RoleNotFound,
self.assignment_api.get_role,
u'fäké1')
def test_configurable_forbidden_role_actions(self):
CONF.ldap.role_allow_create = False
CONF.ldap.role_allow_update = False
CONF.ldap.role_allow_delete = False
self.load_backends()
role = {'id': uuid.uuid4().hex, 'name': uuid.uuid4().hex}
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.create_role,
role['id'],
role)
self.role_member['name'] = uuid.uuid4().hex
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.update_role,
self.role_member['id'],
self.role_member)
self.assertRaises(exception.ForbiddenAction,
self.assignment_api.delete_role,
self.role_member['id'])
def test_project_filter(self):
tenant_ref = self.assignment_api.get_project(self.tenant_bar['id'])
self.assertDictEqual(tenant_ref, self.tenant_bar)
CONF.ldap.tenant_filter = '(CN=DOES_NOT_MATCH)'
self.load_backends()
# NOTE(morganfainberg): CONF.ldap.tenant_filter will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_role.invalidate(self.assignment_api,
self.role_member['id'])
self.assignment_api.get_role(self.role_member['id'])
self.assignment_api.get_project.invalidate(self.assignment_api,
self.tenant_bar['id'])
self.assertRaises(exception.ProjectNotFound,
self.assignment_api.get_project,
self.tenant_bar['id'])
def test_role_filter(self):
role_ref = self.assignment_api.get_role(self.role_member['id'])
self.assertDictEqual(role_ref, self.role_member)
CONF.ldap.role_filter = '(CN=DOES_NOT_MATCH)'
self.load_backends()
# NOTE(morganfainberg): CONF.ldap.role_filter will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_role.invalidate(self.assignment_api,
self.role_member['id'])
self.assertRaises(exception.RoleNotFound,
self.assignment_api.get_role,
self.role_member['id'])
def test_dumb_member(self):
self.config_fixture.config(group='ldap', use_dumb_member=True)
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
dumb_id = common_ldap.BaseLdap._dn_to_id(CONF.ldap.dumb_member)
self.assertRaises(exception.UserNotFound,
self.identity_api.get_user,
dumb_id)
def test_project_attribute_mapping(self):
CONF.ldap.tenant_name_attribute = 'ou'
CONF.ldap.tenant_desc_attribute = 'description'
CONF.ldap.tenant_enabled_attribute = 'enabled'
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
# NOTE(morganfainberg): CONF.ldap.tenant_name_attribute,
# CONF.ldap.tenant_desc_attribute, and
# CONF.ldap.tenant_enabled_attribute will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_project.invalidate(self.assignment_api,
self.tenant_baz['id'])
tenant_ref = self.assignment_api.get_project(self.tenant_baz['id'])
self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
self.assertEqual(self.tenant_baz['name'], tenant_ref['name'])
self.assertEqual(
self.tenant_baz['description'],
tenant_ref['description'])
self.assertEqual(self.tenant_baz['enabled'], tenant_ref['enabled'])
CONF.ldap.tenant_name_attribute = 'description'
CONF.ldap.tenant_desc_attribute = 'ou'
self.load_backends()
# NOTE(morganfainberg): CONF.ldap.tenant_name_attribute,
# CONF.ldap.tenant_desc_attribute, and
# CONF.ldap.tenant_enabled_attribute will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_project.invalidate(self.assignment_api,
self.tenant_baz['id'])
tenant_ref = self.assignment_api.get_project(self.tenant_baz['id'])
self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
self.assertEqual(self.tenant_baz['description'], tenant_ref['name'])
self.assertEqual(self.tenant_baz['name'], tenant_ref['description'])
self.assertEqual(self.tenant_baz['enabled'], tenant_ref['enabled'])
def test_project_attribute_ignore(self):
CONF.ldap.tenant_attribute_ignore = ['name',
'description',
'enabled']
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
# NOTE(morganfainberg): CONF.ldap.tenant_attribute_ignore will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change configs values in tests
# that could affect what the drivers would return up to the manager.
# This solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_project.invalidate(self.assignment_api,
self.tenant_baz['id'])
tenant_ref = self.assignment_api.get_project(self.tenant_baz['id'])
self.assertEqual(self.tenant_baz['id'], tenant_ref['id'])
self.assertNotIn('name', tenant_ref)
self.assertNotIn('description', tenant_ref)
self.assertNotIn('enabled', tenant_ref)
def test_role_attribute_mapping(self):
CONF.ldap.role_name_attribute = 'ou'
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
# NOTE(morganfainberg): CONF.ldap.role_name_attribute will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_role.invalidate(self.assignment_api,
self.role_member['id'])
role_ref = self.assignment_api.get_role(self.role_member['id'])
self.assertEqual(self.role_member['id'], role_ref['id'])
self.assertEqual(self.role_member['name'], role_ref['name'])
CONF.ldap.role_name_attribute = 'sn'
self.load_backends()
# NOTE(morganfainberg): CONF.ldap.role_name_attribute will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_role.invalidate(self.assignment_api,
self.role_member['id'])
role_ref = self.assignment_api.get_role(self.role_member['id'])
self.assertEqual(self.role_member['id'], role_ref['id'])
self.assertNotIn('name', role_ref)
def test_role_attribute_ignore(self):
CONF.ldap.role_attribute_ignore = ['name']
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
# NOTE(morganfainberg): CONF.ldap.role_attribute_ignore will not be
# dynamically changed at runtime. This invalidate is a work-around for
# the expectation that it is safe to change config values in tests that
# could affect what the drivers would return up to the manager. This
# solves this assumption when working with aggressive (on-create)
# cache population.
self.assignment_api.get_role.invalidate(self.assignment_api,
self.role_member['id'])
role_ref = self.assignment_api.get_role(self.role_member['id'])
self.assertEqual(self.role_member['id'], role_ref['id'])
self.assertNotIn('name', role_ref)
def test_user_enable_attribute_mask(self):
CONF.ldap.user_enabled_mask = 2
CONF.ldap.user_enabled_default = '512'
self.clear_database()
self.load_backends()
self.load_fixtures(default_fixtures)
ldap_ = self.identity_api.driver.user.get_connection()
def get_enabled_vals():
user_dn = self.identity_api.driver.user._id_to_dn_string(u'fäké1')
enabled_attr_name = CONF.ldap.user_enabled_attribute
res = ldap_.search_s(user_dn,
ldap.SCOPE_BASE,
query=u'(sn=fäké1)')
return res[0][1][enabled_attr_name]
user = {'id': u'fäké1', 'name': u'fäké1', 'enabled': True,
'domain_id': CONF.identity.default_domain_id}
user_ref = self.identity_api.create_user(u'fäké1', user)
# Use assertIs rather than assertTrue because assertIs will assert the
# value is a Boolean as expected.
self.assertIs(user_ref['enabled'], True)
self.assertNotIn('enabled_nomask', user_ref)
enabled_vals = get_enabled_vals()
self.assertEqual([512], enabled_vals)
user_ref = self.identity_api.get_user(u'fäké1')
self.assertIs(user_ref['enabled'], True)
self.assertNotIn('enabled_nomask', user_ref)
user['enabled'] = False
user_ref = self.identity_api.update_user(u'fäké1', user)
self.assertIs(user_ref['enabled'], False)
self.assertNotIn('enabled_nomask', user_ref)
enabled_vals = get_enabled_vals()
self.assertEqual([514], enabled_vals)
user_ref = self.identity_api.get_user(u'fäké1')
self.assertIs(user_ref['enabled'], False)
self.assertNotIn('enabled_nomask', user_ref)
user['enabled'] = True
user_ref = self.identity_api.update_user(u'fäké1', user)
self.assertIs(user_ref['enabled'], True)
self.assertNotIn('enabled_nomask', user_ref)
enabled_vals = get_enabled_vals()
self.assertEqual([512], enabled_vals)
user_ref = self.identity_api.get_user(u'fäké1')
self.assertIs(user_ref['enabled'], True)
self.assertNotIn('enabled_nomask', user_ref)
def test_user_api_get_connection_no_user_password(self):
"""Don't bind in case the user and password are blank."""
# Ensure the username/password are in-fact blank
self.config_fixture.config(group='ldap', user=None, password=<PASSWORD>)
user_api = identity.backends.ldap.UserApi(CONF)
self.stubs.Set(fakeldap, 'FakeLdap',
self.mox.CreateMock(fakeldap.FakeLdap))
common_ldap.register_handler('fake://', fakeldap.FakeLdap)
# we have to track all calls on 'conn' to make sure that
# conn.simple_bind_s is not called
conn = self.mox.CreateMockAnything()
conn = fakeldap.FakeLdap(CONF.ldap.url,
0,
alias_dereferencing=None,
tls_cacertdir=None,
tls_cacertfile=None,
tls_req_cert=2,
use_tls=False,
chase_referrals=None).AndReturn(conn)
self.mox.ReplayAll()
user_api.get_connection(user=None, password=None)
def test_chase_referrals_off(self):
self.config_fixture.config(
group='ldap',
url='fake://memory',
chase_referrals=False)
user_api = identity.backends.ldap.UserApi(CONF)
self.stubs.Set(fakeldap, 'FakeLdap',
self.mox.CreateMock(fakeldap.FakeLdap))
common_ldap.register_handler('fake://', fakeldap.FakeLdap)
user = uuid.uuid4().hex
password = <PASSWORD>
conn = self.mox.CreateMockAnything()
conn = fakeldap.FakeLdap(CONF.ldap.url,
0,
alias_dereferencing=None,
tls_cacertdir=None,
tls_cacertfile=None,
tls_req_cert=2,
use_tls=False,
chase_referrals=False).AndReturn(conn)
conn.simple_bind_s(user, password).AndReturn(None)
self.mox.ReplayAll()
user_api.get_connection(user=user, password=password)
def test_chase_referrals_on(self):
self.config_fixture.config(
group='ldap',
url='fake://memory',
chase_referrals=True)
user_api = identity.backends.ldap.UserApi(CONF)
self.stubs.Set(fakeldap, 'FakeLdap',
self.mox.CreateMock(fakeldap.FakeLdap))
common_ldap.register_handler('fake://', fakeldap.FakeLdap)
| |
<reponame>XiaoSong9905/tvm
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, no-member, too-many-locals, too-many-arguments, too-many-statements, singleton-comparison
# pylint: disable=bad-continuation, unused-argument
"""Non-maximum suppression operator"""
import tvm
from tvm import te
from tvm.contrib import nvcc
from tvm.contrib.thrust import can_use_thrust, can_use_rocthrust
from tvm.ir import register_intrin_lowering
from tvm.tir import if_then_else
from .sort import argsort, argsort_thrust
from .scan import exclusive_scan
from ..utils import ceil_div
from ..math import cast
from ..transform import reshape
from ..vision.nms_util import (
calculate_overlap,
binary_search,
collect_selected_indices,
collect_selected_indices_and_scores,
run_all_class_nms,
)
def cuda_atomic_add_rule(op):
if op.dtype == "float32":
return tvm.tir.call_pure_extern("float32", "atomicAdd", op.args[0], op.args[1])
if op.dtype == "float64":
return tvm.tir.call_pure_extern("float64", "atomicAdd", op.args[0], op.args[1])
if op.dtype == "int32":
return tvm.tir.call_pure_extern("int32", "atomicAdd", op.args[0], op.args[1])
raise RuntimeError("only support int32, float32 and float64")
def opencl_atomic_add_rule(op):
if op.dtype == "int32":
return tvm.tir.call_pure_extern("int32", "atomic_add", op.args[0], op.args[1])
raise RuntimeError("only support int32")
register_intrin_lowering("tir.atomic_add", target="cuda", f=cuda_atomic_add_rule, level=99)
register_intrin_lowering("tir.atomic_add", target="opencl", f=opencl_atomic_add_rule, level=99)
def atomic_add(x, y):
return tvm.tir.call_intrin(y.dtype, "tir.atomic_add", x, y)
def get_valid_boxes_ir(data, valid_boxes, score_threshold, id_index, score_index):
"""Low level IR to identify bounding boxes given a score threshold.
Parameters
----------
data : Buffer
Input data. 3-D Buffer with shape [batch_size, num_anchors, elem_length].
score_threshold : Buffer or float32
Lower limit of score for valid bounding boxes.
id_index : optional, int
index of the class categories, -1 to disable.
score_index: optional, int
Index of the scores/confidence of boxes.
Returns
-------
valid_boxes: Buffer
2D Buffer indicating valid boxes with shape [batch_size, num_anchors].
"""
batch_size = data.shape[0]
num_anchors = data.shape[1]
elem_length = data.shape[2]
ib = tvm.tir.ir_builder.create()
data = ib.buffer_ptr(data)
valid_boxes = ib.buffer_ptr(valid_boxes)
if isinstance(score_threshold, float):
score_threshold = tvm.tir.FloatImm("float32", score_threshold)
id_index = tvm.tir.IntImm("int32", id_index)
score_index = tvm.tir.IntImm("int32", score_index)
max_threads = int(tvm.target.Target.current(allow_none=False).max_num_threads)
with ib.new_scope():
nthread_tx = max_threads
nthread_bx = ceil_div(num_anchors, max_threads)
nthread_by = batch_size
tx = te.thread_axis("threadIdx.x")
bx = te.thread_axis("blockIdx.x")
by = te.thread_axis("blockIdx.y")
ib.scope_attr(tx, "thread_extent", nthread_tx)
ib.scope_attr(bx, "thread_extent", nthread_bx)
ib.scope_attr(by, "thread_extent", nthread_by)
tid = bx * max_threads + tx
with ib.if_scope(tid < num_anchors):
i = by
j = tid
score = data[(i * num_anchors + j) * elem_length + score_index]
with ib.if_scope(
tvm.tir.all(
score > score_threshold,
tvm.tir.any(
id_index < 0, data[(i * num_anchors + j) * elem_length + id_index] >= 0
),
)
):
valid_boxes[i * num_anchors + j] = 1
with ib.else_scope():
valid_boxes[i * num_anchors + j] = 0
return ib.get()
def get_valid_counts_ir(data, valid_indices, valid_boxes, out, out_indices):
"""Low level IR to get valid count of bounding boxes
given a score threshold. Also prepares to move valid boxes to the
top of input data.
Parameters
----------
data : Buffer
Input data. 3-D Buffer with shape [batch_size, num_anchors, elem_length].
valid_indices: Buffer
2D Buffer of flag indicating valid data with shape [batch_size, num_anchors].
Returns
-------
out : Buffer
Sorted valid boxes
out_indices : Buffer
Incidices of valid boxes in original data
"""
batch_size = data.shape[0]
num_anchors = data.shape[1]
elem_length = data.shape[2]
ib = tvm.tir.ir_builder.create()
data = ib.buffer_ptr(data)
valid_indices = ib.buffer_ptr(valid_indices)
valid_boxes = ib.buffer_ptr(valid_boxes)
out = ib.buffer_ptr(out)
out_indices = ib.buffer_ptr(out_indices)
one = tvm.tir.const(1, dtype=out.dtype)
max_threads = int(tvm.target.Target.current(allow_none=False).max_num_threads)
nthread_tx = max_threads
nthread_bx = num_anchors // max_threads + 1
nthread_by = batch_size
with ib.new_scope():
tx = te.thread_axis("threadIdx.x")
bx = te.thread_axis("blockIdx.x")
by = te.thread_axis("blockIdx.y")
ib.scope_attr(tx, "thread_extent", nthread_tx)
ib.scope_attr(bx, "thread_extent", nthread_bx)
ib.scope_attr(by, "thread_extent", nthread_by)
tid = bx * max_threads + tx
with ib.if_scope(tid < num_anchors):
i = by
j = tid
with ib.for_range(0, elem_length) as k:
out[(i * num_anchors + j) * elem_length + k] = -one
out_indices[i * num_anchors + j] = -1
with ib.new_scope():
tx = te.thread_axis("threadIdx.x")
bx = te.thread_axis("blockIdx.x")
by = te.thread_axis("blockIdx.y")
ib.scope_attr(tx, "thread_extent", nthread_tx)
ib.scope_attr(bx, "thread_extent", nthread_bx)
ib.scope_attr(by, "thread_extent", nthread_by)
tid = bx * max_threads + tx
with ib.if_scope(tid < num_anchors):
i = by
j = tid
with ib.if_scope(valid_boxes[i, tid] > 0):
with ib.for_range(0, elem_length) as k:
out[(i * num_anchors + valid_indices[i, tid]) * elem_length + k] = data[
(i * num_anchors + j) * elem_length + k
]
out_indices[i * num_anchors + valid_indices[i, tid]] = j
return ib.get()
def get_valid_counts(data, score_threshold=0, id_index=0, score_index=1):
"""Get valid count of bounding boxes given a score threshold.
Also moves valid boxes to the top of input data.
Parameters
----------
data : tvm.te.Tensor
Input data. 3-D tensor with shape [batch_size, num_anchors, elem_length].
score_threshold : optional, tvm.te.Tensor or float
Lower limit of score for valid bounding boxes.
id_index : optional, int
index of the class categories, -1 to disable.
score_index: optional, int
Index of the scores/confidence of boxes.
Returns
-------
valid_count : tvm.te.Tensor
1-D tensor for valid number of boxes.
out_tensor : tvm.te.Tensor
Rearranged data tensor.
"""
batch_size = data.shape[0]
num_anchors = data.shape[1]
data_buf = tvm.tir.decl_buffer(data.shape, data.dtype, "data_buf", data_alignment=8)
valid_boxes_buf = tvm.tir.decl_buffer(
(batch_size, num_anchors), "int32", "valid_boxes_buf", data_alignment=8
)
valid_boxes = te.extern(
[(batch_size, num_anchors)],
[data],
lambda ins, outs: get_valid_boxes_ir(
ins[0], outs[0], score_threshold, id_index, score_index
),
dtype=["int32"],
in_buffers=[data_buf],
out_buffers=[valid_boxes_buf],
name="get_valid_boxes",
tag="get_valid_boxes_gpu",
)
valid_indices_buf = tvm.tir.decl_buffer(
(batch_size, num_anchors), "int32", "valid_indices_buf", data_alignment=8
)
valid_indices, valid_count = exclusive_scan(valid_boxes, axis=1, return_reduction=True)
out_buf = tvm.tir.decl_buffer(data.shape, data.dtype, "out_buf", data_alignment=8)
out_indices_buf = tvm.tir.decl_buffer(
(batch_size, num_anchors), "int32", "out_buf", data_alignment=8
)
out, out_indices = te.extern(
[data.shape, (batch_size, num_anchors)],
[data, valid_indices, valid_boxes],
lambda ins, outs: get_valid_counts_ir(ins[0], ins[1], ins[2], outs[0], outs[1]),
dtype=["int32", data.dtype],
in_buffers=[data_buf, valid_indices_buf, valid_boxes_buf],
out_buffers=[out_buf, out_indices_buf],
name="get_valid_counts",
tag="get_valid_counts_gpu",
)
return [valid_count, out, out_indices]
def _nms_loop(
ib,
batch_size,
top_k,
iou_threshold,
max_output_size,
valid_count,
on_new_valid_box_func,
on_new_invalidated_box_func,
needs_bbox_check_func,
calc_overlap_func,
out_scores,
num_valid_boxes,
):
max_threads = int(tvm.target.Target.current(allow_none=False).max_num_threads)
with ib.new_scope():
nthread_by = batch_size
nthread_tx = max_threads
# Some cuda architectures have smaller limit of 32K for cudaDevAttrMaxRegistersPerBlock
# vs 64K for most GPUs. Since this kernel uses many registers (around 35), the limit will
# be exceeded with 1024 threads.
target = tvm.target.Target.current(allow_none=False)
if target.kind.name == "cuda":
if nvcc.get_target_compute_version(target) in ["3.2", "5.3", "6.2"]:
nthread_tx = 512
by = te.thread_axis("blockIdx.y")
tx = te.thread_axis("threadIdx.x")
ib.scope_attr(by, "thread_extent", nthread_by)
ib.scope_attr(tx, "thread_extent", nthread_tx)
num_valid_boxes_local = ib.allocate(
"int32", (1,), name="num_valid_boxes_local", scope="local"
)
num_valid_boxes_local[0] = 0
def nms_inner_loop(ib, i, j, nkeep):
# The box j is valid, invalidate other boxes that overlap with j above iou_threshold
on_new_valid_box_func(ib, tx, num_valid_boxes_local[0], i, j)
num_valid_boxes_local[0] += 1
num_iter_per_thread = ceil_div(nkeep - (j + 1), nthread_tx)
with ib.for_range(0, num_iter_per_thread, name="_k") as _k:
k = j + 1 + _k * nthread_tx + tx
with ib.if_scope(
tvm.tir.all(
k < nkeep,
out_scores[i, k] > 0, # is the box k still valid?
needs_bbox_check_func(i, j, k),
)
):
iou = calc_overlap_func(i, j, k)
with ib.if_scope(iou >= iou_threshold):
# invalidate the box k
out_scores[i, k] = -1.0
on_new_invalidated_box_func(i, k)
ib.emit(tvm.tir.Call(None, "tir.tvm_storage_sync", tvm.runtime.convert(["shared"])))
i = by
nkeep = if_then_else(tvm.tir.all(top_k > 0, top_k < valid_count[i]), top_k, valid_count[i])
max_output_size = if_then_else(max_output_size > 0, max_output_size, nkeep)
with ib.if_scope(tvm.tir.all(iou_threshold > 0, valid_count[i] > 0)):
# Apply nms
# No need to do more iteration if we have already reached max_output_size boxes
box_idx = ib.allocate("int32", (1,), name="box_idx", scope="local")
box_idx[0] = 0
with ib.while_loop(
tvm.tir.all(box_idx[0] < nkeep, num_valid_boxes_local[0] < max_output_size)
):
# Proceed to the inner loop if the box with id box_idx is still valid
with ib.if_scope(out_scores[i, box_idx[0]] > -1.0):
nms_inner_loop(ib, i, box_idx[0], nkeep)
box_idx[0] += 1
with ib.if_scope(tx + 0 == 0):
num_valid_boxes[i] = num_valid_boxes_local[0]
with ib.else_scope():
num_valid_boxes[i] = 0
return ib.get()
def nms_ir(
data,
sorted_index,
valid_count,
indices,
out_bboxes,
out_scores,
out_class_ids,
out_features,
box_indices,
num_valid_boxes,
max_output_size,
iou_threshold,
force_suppress,
top_k,
coord_start,
id_index,
score_index,
return_indices,
):
"""Low level IR routing for transform location in multibox_detection operator.
Parameters
----------
data : Buffer
Buffer of output boxes with class and score.
sorted_index : Buffer
Buffer of output box indexes sorted by score.
valid_count : Buffer
Buffer of number of valid output boxes.
indices : Buffer
indices in original tensor, with shape [batch_size, num_anchors],
represents the index of | |
import enum
class block(enum.Enum):
"""
block
* acacia_button
* acacia_door
* acacia_fence
* acacia_fence_gate
* acacia_leaves
* acacia_log
* acacia_planks
* acacia_pressure_plate
* acacia_sapling
* acacia_sign
* acacia_slab
* acacia_stairs
* acacia_trapdoor
* acacia_wall_sign
* acacia_wood
* activator_rail
* air
* allium
* amethyst_block
* amethyst_cluster
* ancient_debris
* andesite
* andesite_slab
* andesite_stairs
* andesite_wall
* anvil
* attached_melon_stem
* attached_pumpkin_stem
* azalea
* azalea_leaves
* azure_bluet
* bamboo
* bamboo_sapling
* barrel
* barrier
* basalt
* beacon
* bedrock
* bee_nest
* beehive
* beetroots
* bell
* big_dripleaf
* big_dripleaf_stem
* birch_button
* birch_door
* birch_fence
* birch_fence_gate
* birch_leaves
* birch_log
* birch_planks
* birch_pressure_plate
* birch_sapling
* birch_sign
* birch_slab
* birch_stairs
* birch_trapdoor
* birch_wall_sign
* birch_wood
* black_banner
* black_bed
* black_candle
* black_candle_cake
* black_carpet
* black_concrete
* black_concrete_powder
* black_glazed_terracotta
* black_shulker_box
* black_stained_glass
* black_stained_glass_pane
* black_terracotta
* black_wall_banner
* black_wool
* blackstone
* blackstone_slab
* blackstone_stairs
* blackstone_wall
* blast_furnace
* blue_banner
* blue_bed
* blue_candle
* blue_candle_cake
* blue_carpet
* blue_concrete
* blue_concrete_powder
* blue_glazed_terracotta
* blue_ice
* blue_orchid
* blue_shulker_box
* blue_stained_glass
* blue_stained_glass_pane
* blue_terracotta
* blue_wall_banner
* blue_wool
* bone_block
* bookshelf
* brain_coral
* brain_coral_block
* brain_coral_fan
* brain_coral_wall_fan
* brewing_stand
* brick_slab
* brick_stairs
* brick_wall
* bricks
* brown_banner
* brown_bed
* brown_candle
* brown_candle_cake
* brown_carpet
* brown_concrete
* brown_concrete_powder
* brown_glazed_terracotta
* brown_mushroom
* brown_mushroom_block
* brown_shulker_box
* brown_stained_glass
* brown_stained_glass_pane
* brown_terracotta
* brown_wall_banner
* brown_wool
* bubble_column
* bubble_coral
* bubble_coral_block
* bubble_coral_fan
* bubble_coral_wall_fan
* budding_amethyst
* cactus
* cake
* calcite
* campfire
* candle
* candle_cake
* carrots
* cartography_table
* carved_pumpkin
* cauldron
* cave_air
* cave_vines
* cave_vines_plant
* chain
* chain_command_block
* chest
* chipped_anvil
* chiseled_deepslate
* chiseled_nether_bricks
* chiseled_polished_blackstone
* chiseled_quartz_block
* chiseled_red_sandstone
* chiseled_sandstone
* chiseled_stone_bricks
* chorus_flower
* chorus_plant
* clay
* coal_block
* coal_ore
* coarse_dirt
* cobbled_deepslate
* cobbled_deepslate_slab
* cobbled_deepslate_stairs
* cobbled_deepslate_wall
* cobblestone
* cobblestone_slab
* cobblestone_stairs
* cobblestone_wall
* cobweb
* cocoa
* command_block
* comparator
* composter
* conduit
* copper_block
* copper_ore
* cornflower
* cracked_deepslate_bricks
* cracked_deepslate_tiles
* cracked_nether_bricks
* cracked_polished_blackstone_bricks
* cracked_stone_bricks
* crafting_table
* creeper_head
* creeper_wall_head
* crimson_button
* crimson_door
* crimson_fence
* crimson_fence_gate
* crimson_fungus
* crimson_hyphae
* crimson_nylium
* crimson_planks
* crimson_pressure_plate
* crimson_roots
* crimson_sign
* crimson_slab
* crimson_stairs
* crimson_stem
* crimson_trapdoor
* crimson_wall_sign
* crying_obsidian
* cut_copper
* cut_copper_slab
* cut_copper_stairs
* cut_red_sandstone
* cut_red_sandstone_slab
* cut_sandstone
* cut_sandstone_slab
* cyan_banner
* cyan_bed
* cyan_candle
* cyan_candle_cake
* cyan_carpet
* cyan_concrete
* cyan_concrete_powder
* cyan_glazed_terracotta
* cyan_shulker_box
* cyan_stained_glass
* cyan_stained_glass_pane
* cyan_terracotta
* cyan_wall_banner
* cyan_wool
* damaged_anvil
* dandelion
* dark_oak_button
* dark_oak_door
* dark_oak_fence
* dark_oak_fence_gate
* dark_oak_leaves
* dark_oak_log
* dark_oak_planks
* dark_oak_pressure_plate
* dark_oak_sapling
* dark_oak_sign
* dark_oak_slab
* dark_oak_stairs
* dark_oak_trapdoor
* dark_oak_wall_sign
* dark_oak_wood
* dark_prismarine
* dark_prismarine_slab
* dark_prismarine_stairs
* daylight_detector
* dead_brain_coral
* dead_brain_coral_block
* dead_brain_coral_fan
* dead_brain_coral_wall_fan
* dead_bubble_coral
* dead_bubble_coral_block
* dead_bubble_coral_fan
* dead_bubble_coral_wall_fan
* dead_bush
* dead_fire_coral
* dead_fire_coral_block
* dead_fire_coral_fan
* dead_fire_coral_wall_fan
* dead_horn_coral
* dead_horn_coral_block
* dead_horn_coral_fan
* dead_horn_coral_wall_fan
* dead_tube_coral
* dead_tube_coral_block
* dead_tube_coral_fan
* dead_tube_coral_wall_fan
* deepslate
* deepslate_brick_slab
* deepslate_brick_stairs
* deepslate_brick_wall
* deepslate_bricks
* deepslate_coal_ore
* deepslate_copper_ore
* deepslate_diamond_ore
* deepslate_emerald_ore
* deepslate_gold_ore
* deepslate_iron_ore
* deepslate_lapis_ore
* deepslate_redstone_ore
* deepslate_tile_slab
* deepslate_tile_stairs
* deepslate_tile_wall
* deepslate_tiles
* detector_rail
* diamond_block
* diamond_ore
* diorite
* diorite_slab
* diorite_stairs
* diorite_wall
* dirt
* dirt_path
* dispenser
* dragon_egg
* dragon_head
* dragon_wall_head
* dried_kelp_block
* dripstone_block
* dropper
* emerald_block
* emerald_ore
* enchanting_table
* end_gateway
* end_portal
* end_portal_frame
* end_rod
* end_stone
* end_stone_brick_slab
* end_stone_brick_stairs
* end_stone_brick_wall
* end_stone_bricks
* ender_chest
* exposed_copper
* exposed_cut_copper
* exposed_cut_copper_slab
* exposed_cut_copper_stairs
* farmland
* fern
* fire
* fire_coral
* fire_coral_block
* fire_coral_fan
* fire_coral_wall_fan
* fletching_table
* flower_pot
* flowering_azalea
* flowering_azalea_leaves
* frosted_ice
* furnace
* gilded_blackstone
* glass
* glass_pane
* glow_lichen
* glowstone
* gold_block
* gold_ore
* granite
* granite_slab
* granite_stairs
* granite_wall
* grass
* grass_block
* gravel
* gray_banner
* gray_bed
* gray_candle
* gray_candle_cake
* gray_carpet
* gray_concrete
* gray_concrete_powder
* gray_glazed_terracotta
* gray_shulker_box
* gray_stained_glass
* gray_stained_glass_pane
* gray_terracotta
* gray_wall_banner
* gray_wool
* green_banner
* green_bed
* green_candle
* green_candle_cake
* green_carpet
* green_concrete
* green_concrete_powder
* green_glazed_terracotta
* green_shulker_box
* green_stained_glass
* green_stained_glass_pane
* green_terracotta
* green_wall_banner
* green_wool
* grindstone
* hanging_roots
* hay_block
* heavy_weighted_pressure_plate
* honey_block
* honeycomb_block
* hopper
* horn_coral
* horn_coral_block
* horn_coral_fan
* horn_coral_wall_fan
* ice
* infested_chiseled_stone_bricks
* infested_cobblestone
* infested_cracked_stone_bricks
* infested_deepslate
* infested_mossy_stone_bricks
* infested_stone
* infested_stone_bricks
* iron_bars
* iron_block
* iron_door
* iron_ore
* iron_trapdoor
* jack_o_lantern
* jigsaw
* jukebox
* jungle_button
* jungle_door
* jungle_fence
* jungle_fence_gate
* jungle_leaves
* jungle_log
* jungle_planks
* jungle_pressure_plate
* jungle_sapling
* jungle_sign
* jungle_slab
* jungle_stairs
* jungle_trapdoor
* jungle_wall_sign
* jungle_wood
* kelp
* kelp_plant
* ladder
* lantern
* lapis_block
* lapis_ore
* large_amethyst_bud
* large_fern
* lava
* lava_cauldron
* lectern
* lever
* light
* light_blue_banner
* light_blue_bed
* light_blue_candle
* light_blue_candle_cake
* light_blue_carpet
* light_blue_concrete
* light_blue_concrete_powder
* light_blue_glazed_terracotta
* light_blue_shulker_box
* light_blue_stained_glass
* light_blue_stained_glass_pane
* light_blue_terracotta
* light_blue_wall_banner
* light_blue_wool
* light_gray_banner
* light_gray_bed
* light_gray_candle
* light_gray_candle_cake
* light_gray_carpet
* light_gray_concrete
* light_gray_concrete_powder
* light_gray_glazed_terracotta
* light_gray_shulker_box
* light_gray_stained_glass
* light_gray_stained_glass_pane
* light_gray_terracotta
* light_gray_wall_banner
* light_gray_wool
* light_weighted_pressure_plate
* lightning_rod
* lilac
* lily_of_the_valley
* lily_pad
* lime_banner
* lime_bed
* lime_candle
* lime_candle_cake
* lime_carpet
* lime_concrete
* lime_concrete_powder
* lime_glazed_terracotta
* lime_shulker_box
* lime_stained_glass
* lime_stained_glass_pane
* lime_terracotta
* lime_wall_banner
* lime_wool
* lodestone
* loom
* magenta_banner
* magenta_bed
* magenta_candle
* magenta_candle_cake
* magenta_carpet
* magenta_concrete
* magenta_concrete_powder
* magenta_glazed_terracotta
* magenta_shulker_box
* magenta_stained_glass
* magenta_stained_glass_pane
* magenta_terracotta
* magenta_wall_banner
* magenta_wool
* magma_block
* medium_amethyst_bud
* melon
* melon_stem
* moss_block
* moss_carpet
* mossy_cobblestone
* mossy_cobblestone_slab
* mossy_cobblestone_stairs
* mossy_cobblestone_wall
* mossy_stone_brick_slab
* mossy_stone_brick_stairs
* mossy_stone_brick_wall
* mossy_stone_bricks
* moving_piston
* mushroom_stem
* mycelium
* nether_brick_fence
* nether_brick_slab
* nether_brick_stairs
* nether_brick_wall
* nether_bricks
* nether_gold_ore
* nether_portal
* nether_quartz_ore
* nether_sprouts
* nether_wart
* nether_wart_block
* netherite_block
* netherrack
* note_block
* oak_button
* oak_door
* oak_fence
* oak_fence_gate
* oak_leaves
* oak_log
* oak_planks
* oak_pressure_plate
* oak_sapling
* oak_sign
* oak_slab
* oak_stairs
* oak_trapdoor
* oak_wall_sign
* oak_wood
* observer
* obsidian
* orange_banner
* orange_bed
* orange_candle
* orange_candle_cake
* orange_carpet
* orange_concrete
* orange_concrete_powder
* orange_glazed_terracotta
* orange_shulker_box
* orange_stained_glass
* orange_stained_glass_pane
* orange_terracotta
* orange_tulip
* orange_wall_banner
* orange_wool
* oxeye_daisy
* oxidized_copper
* oxidized_cut_copper
* oxidized_cut_copper_slab
* oxidized_cut_copper_stairs
* packed_ice
* peony
* petrified_oak_slab
* pink_banner
* pink_bed
* pink_candle
* pink_candle_cake
* pink_carpet
* pink_concrete
* pink_concrete_powder
* pink_glazed_terracotta
* pink_shulker_box
* pink_stained_glass
* pink_stained_glass_pane
* pink_terracotta
* pink_tulip
* pink_wall_banner
* pink_wool
* piston
* piston_head
* player_head
* player_wall_head
* podzol
* pointed_dripstone
* polished_andesite
* polished_andesite_slab
* polished_andesite_stairs
* polished_basalt
* polished_blackstone
* polished_blackstone_brick_slab
* polished_blackstone_brick_stairs
* polished_blackstone_brick_wall
* polished_blackstone_bricks
* polished_blackstone_button
* polished_blackstone_pressure_plate
* polished_blackstone_slab
* polished_blackstone_stairs
* polished_blackstone_wall
* polished_deepslate
* polished_deepslate_slab
* polished_deepslate_stairs
* polished_deepslate_wall
* polished_diorite
* polished_diorite_slab
* polished_diorite_stairs
* polished_granite
* polished_granite_slab
* polished_granite_stairs
* poppy
* potatoes
* potted_acacia_sapling
* potted_allium
* potted_azalea_bush
* potted_azure_bluet
* potted_bamboo
* potted_birch_sapling
* potted_blue_orchid
* potted_brown_mushroom
* potted_cactus
* potted_cornflower
* potted_crimson_fungus
* potted_crimson_roots
* potted_dandelion
* potted_dark_oak_sapling
* potted_dead_bush
* potted_fern
* potted_flowering_azalea_bush
* potted_jungle_sapling
* potted_lily_of_the_valley
* potted_oak_sapling
* potted_orange_tulip
* potted_oxeye_daisy
* potted_pink_tulip
* potted_poppy
* potted_red_mushroom
* potted_red_tulip
* potted_spruce_sapling
* potted_warped_fungus
* potted_warped_roots
* potted_white_tulip
* potted_wither_rose
* powder_snow
* powder_snow_cauldron
* powered_rail
* prismarine
* prismarine_brick_slab
* prismarine_brick_stairs
* prismarine_bricks
* prismarine_slab
* prismarine_stairs
* prismarine_wall
* pumpkin
* pumpkin_stem
* purple_banner
* purple_bed
* purple_candle
* purple_candle_cake
* purple_carpet
* purple_concrete
* purple_concrete_powder
* purple_glazed_terracotta
* purple_shulker_box
* purple_stained_glass
* purple_stained_glass_pane
* purple_terracotta
* purple_wall_banner
* purple_wool
* purpur_block
* purpur_pillar
* purpur_slab
* purpur_stairs
* quartz_block
* quartz_bricks
* quartz_pillar
* quartz_slab
* quartz_stairs
* rail
* raw_copper_block
* raw_gold_block
* raw_iron_block
* red_banner
* red_bed
* red_candle
* red_candle_cake
* red_carpet
* red_concrete
* red_concrete_powder
* red_glazed_terracotta
* red_mushroom
* red_mushroom_block
* red_nether_brick_slab
* red_nether_brick_stairs
* red_nether_brick_wall
* red_nether_bricks
* red_sand
* red_sandstone
* red_sandstone_slab
* red_sandstone_stairs
* red_sandstone_wall
* red_shulker_box
* red_stained_glass
* red_stained_glass_pane
* red_terracotta
* red_tulip
* red_wall_banner
* red_wool
* redstone_block
* redstone_lamp
* redstone_ore
* redstone_torch
* redstone_wall_torch
* redstone_wire
* repeater
* repeating_command_block
* respawn_anchor
* rooted_dirt
* rose_bush
* sand
* sandstone
* sandstone_slab
* sandstone_stairs
* sandstone_wall
* scaffolding
* sculk_sensor
* sea_lantern
* sea_pickle
* seagrass
* shroomlight
* shulker_box
* skeleton_skull
* skeleton_wall_skull
* slime_block
* small_amethyst_bud
* small_dripleaf
* smithing_table
* smoker
* smooth_basalt
* smooth_quartz
* smooth_quartz_slab
* smooth_quartz_stairs
* smooth_red_sandstone
* smooth_red_sandstone_slab
* smooth_red_sandstone_stairs
* smooth_sandstone
* smooth_sandstone_slab
* smooth_sandstone_stairs
* smooth_stone
* smooth_stone_slab
* snow
* snow_block
* soul_campfire
* soul_fire
* soul_lantern
* soul_sand
* soul_soil
* soul_torch
* soul_wall_torch
* spawner
* sponge
* spore_blossom
* spruce_button
* spruce_door
* spruce_fence
* spruce_fence_gate
* spruce_leaves
* spruce_log
* spruce_planks
* spruce_pressure_plate
* spruce_sapling
* spruce_sign
* spruce_slab
* spruce_stairs
* spruce_trapdoor
* spruce_wall_sign
* spruce_wood
* sticky_piston
* stone
* stone_brick_slab
* stone_brick_stairs
* stone_brick_wall
* stone_bricks
* stone_button
* stone_pressure_plate
* stone_slab
* stone_stairs
* stonecutter
* stripped_acacia_log
* stripped_acacia_wood
* stripped_birch_log
* stripped_birch_wood
* stripped_crimson_hyphae
* stripped_crimson_stem
* stripped_dark_oak_log
* stripped_dark_oak_wood
* stripped_jungle_log
* stripped_jungle_wood
* stripped_oak_log
* stripped_oak_wood
* stripped_spruce_log
* stripped_spruce_wood
* stripped_warped_hyphae
* stripped_warped_stem
* structure_block
* structure_void
* sugar_cane
* sunflower
* sweet_berry_bush
* tall_grass
* tall_seagrass
* target
* terracotta
* tinted_glass
* tnt
* torch
* trapped_chest
* tripwire
* tripwire_hook
* tube_coral
* tube_coral_block
* tube_coral_fan
* tube_coral_wall_fan
* tuff
* turtle_egg
* twisting_vines
* twisting_vines_plant
* vine
* void_air
* wall_torch
* warped_button
* warped_door
* warped_fence
* warped_fence_gate
* warped_fungus
* warped_hyphae
* warped_nylium
* warped_planks
* warped_pressure_plate
* warped_roots
* warped_sign
* warped_slab
* warped_stairs
* warped_stem
* warped_trapdoor
* warped_wall_sign
* warped_wart_block
* water
* water_cauldron
* waxed_copper_block
* waxed_cut_copper
* waxed_cut_copper_slab
* waxed_cut_copper_stairs
* waxed_exposed_copper
* waxed_exposed_cut_copper
* waxed_exposed_cut_copper_slab
* waxed_exposed_cut_copper_stairs
* waxed_oxidized_copper
* waxed_oxidized_cut_copper
* waxed_oxidized_cut_copper_slab
* waxed_oxidized_cut_copper_stairs
* waxed_weathered_copper
* waxed_weathered_cut_copper
* waxed_weathered_cut_copper_slab
* waxed_weathered_cut_copper_stairs
* weathered_copper
* weathered_cut_copper
* weathered_cut_copper_slab
* weathered_cut_copper_stairs
* weeping_vines
* weeping_vines_plant
* wet_sponge
* wheat
* white_banner
* white_bed
* white_candle
* white_candle_cake
* white_carpet
* white_concrete
* white_concrete_powder
* white_glazed_terracotta
* white_shulker_box
* white_stained_glass
* white_stained_glass_pane
* white_terracotta
* white_tulip
* white_wall_banner
* white_wool
* wither_rose
* wither_skeleton_skull
* wither_skeleton_wall_skull
* yellow_banner
* yellow_bed
* yellow_candle
* yellow_candle_cake
* yellow_carpet
* yellow_concrete
* yellow_concrete_powder
* yellow_glazed_terracotta
* yellow_shulker_box
* yellow_stained_glass
* yellow_stained_glass_pane
* yellow_terracotta
* yellow_wall_banner
* yellow_wool
* zombie_head
* zombie_wall_head
"""
acacia_button = "minecraft:acacia_button"
acacia_door = "minecraft:acacia_door"
acacia_fence = "minecraft:acacia_fence"
acacia_fence_gate = "minecraft:acacia_fence_gate"
acacia_leaves = "minecraft:acacia_leaves"
acacia_log = "minecraft:acacia_log"
acacia_planks = "minecraft:acacia_planks"
acacia_pressure_plate = "minecraft:acacia_pressure_plate"
acacia_sapling = "minecraft:acacia_sapling"
acacia_sign = "minecraft:acacia_sign"
acacia_slab = "minecraft:acacia_slab"
acacia_stairs = "minecraft:acacia_stairs"
acacia_trapdoor = "minecraft:acacia_trapdoor"
acacia_wall_sign = "minecraft:acacia_wall_sign"
acacia_wood = "minecraft:acacia_wood"
activator_rail = "minecraft:activator_rail"
air = "minecraft:air"
allium = "minecraft:allium"
amethyst_block = "minecraft:amethyst_block"
amethyst_cluster = "minecraft:amethyst_cluster"
ancient_debris = "minecraft:ancient_debris"
andesite = "minecraft:andesite"
andesite_slab = "minecraft:andesite_slab"
andesite_stairs = "minecraft:andesite_stairs"
andesite_wall = "minecraft:andesite_wall"
anvil = "minecraft:anvil"
attached_melon_stem = "minecraft:attached_melon_stem"
attached_pumpkin_stem = "minecraft:attached_pumpkin_stem"
azalea = "minecraft:azalea"
azalea_leaves = "minecraft:azalea_leaves"
azure_bluet = "minecraft:azure_bluet"
bamboo = "minecraft:bamboo"
bamboo_sapling = "minecraft:bamboo_sapling"
barrel = "minecraft:barrel"
barrier = "minecraft:barrier"
basalt = "minecraft:basalt"
beacon = "minecraft:beacon"
bedrock = "minecraft:bedrock"
bee_nest = "minecraft:bee_nest"
beehive = "minecraft:beehive"
beetroots = "minecraft:beetroots"
bell = "minecraft:bell"
big_dripleaf = "minecraft:big_dripleaf"
big_dripleaf_stem = "minecraft:big_dripleaf_stem"
birch_button = "minecraft:birch_button"
birch_door = "minecraft:birch_door"
birch_fence = "minecraft:birch_fence"
birch_fence_gate = "minecraft:birch_fence_gate"
birch_leaves = "minecraft:birch_leaves"
birch_log = "minecraft:birch_log"
birch_planks = "minecraft:birch_planks"
birch_pressure_plate = "minecraft:birch_pressure_plate"
birch_sapling = "minecraft:birch_sapling"
birch_sign = "minecraft:birch_sign"
birch_slab = "minecraft:birch_slab"
birch_stairs = "minecraft:birch_stairs"
birch_trapdoor = "minecraft:birch_trapdoor"
birch_wall_sign = "minecraft:birch_wall_sign"
birch_wood = "minecraft:birch_wood"
black_banner = "minecraft:black_banner"
black_bed = "minecraft:black_bed"
black_candle = "minecraft:black_candle"
black_candle_cake = "minecraft:black_candle_cake"
black_carpet = "minecraft:black_carpet"
black_concrete = "minecraft:black_concrete"
black_concrete_powder = "minecraft:black_concrete_powder"
black_glazed_terracotta = "minecraft:black_glazed_terracotta"
black_shulker_box = "minecraft:black_shulker_box"
black_stained_glass = "minecraft:black_stained_glass"
black_stained_glass_pane = "minecraft:black_stained_glass_pane"
black_terracotta = "minecraft:black_terracotta"
black_wall_banner = "minecraft:black_wall_banner"
black_wool = "minecraft:black_wool"
blackstone = "minecraft:blackstone"
blackstone_slab = "minecraft:blackstone_slab"
blackstone_stairs = "minecraft:blackstone_stairs"
blackstone_wall = "minecraft:blackstone_wall"
blast_furnace = "minecraft:blast_furnace"
blue_banner = "minecraft:blue_banner"
blue_bed = "minecraft:blue_bed"
blue_candle = "minecraft:blue_candle"
blue_candle_cake = "minecraft:blue_candle_cake"
blue_carpet = "minecraft:blue_carpet"
blue_concrete = "minecraft:blue_concrete"
blue_concrete_powder = "minecraft:blue_concrete_powder"
blue_glazed_terracotta = "minecraft:blue_glazed_terracotta"
blue_ice = "minecraft:blue_ice"
blue_orchid = "minecraft:blue_orchid"
blue_shulker_box = "minecraft:blue_shulker_box"
blue_stained_glass = "minecraft:blue_stained_glass"
blue_stained_glass_pane = "minecraft:blue_stained_glass_pane"
blue_terracotta = "minecraft:blue_terracotta"
blue_wall_banner = "minecraft:blue_wall_banner"
blue_wool = "minecraft:blue_wool"
bone_block = "minecraft:bone_block"
bookshelf = "minecraft:bookshelf"
brain_coral = "minecraft:brain_coral"
brain_coral_block = "minecraft:brain_coral_block"
brain_coral_fan = "minecraft:brain_coral_fan"
brain_coral_wall_fan = "minecraft:brain_coral_wall_fan"
brewing_stand = "minecraft:brewing_stand"
brick_slab = "minecraft:brick_slab"
brick_stairs = "minecraft:brick_stairs"
brick_wall = "minecraft:brick_wall"
bricks = "minecraft:bricks"
brown_banner = "minecraft:brown_banner"
brown_bed = "minecraft:brown_bed"
brown_candle = "minecraft:brown_candle"
brown_candle_cake = "minecraft:brown_candle_cake"
brown_carpet = "minecraft:brown_carpet"
brown_concrete = "minecraft:brown_concrete"
brown_concrete_powder = "minecraft:brown_concrete_powder"
brown_glazed_terracotta = "minecraft:brown_glazed_terracotta"
brown_mushroom = "minecraft:brown_mushroom"
brown_mushroom_block = "minecraft:brown_mushroom_block"
brown_shulker_box = "minecraft:brown_shulker_box"
brown_stained_glass = "minecraft:brown_stained_glass"
brown_stained_glass_pane = "minecraft:brown_stained_glass_pane"
brown_terracotta = "minecraft:brown_terracotta"
brown_wall_banner = "minecraft:brown_wall_banner"
brown_wool = "minecraft:brown_wool"
bubble_column = "minecraft:bubble_column"
bubble_coral = "minecraft:bubble_coral"
bubble_coral_block = "minecraft:bubble_coral_block"
bubble_coral_fan = "minecraft:bubble_coral_fan"
bubble_coral_wall_fan = "minecraft:bubble_coral_wall_fan"
budding_amethyst = "minecraft:budding_amethyst"
cactus = "minecraft:cactus"
cake = "minecraft:cake"
calcite = "minecraft:calcite"
campfire = "minecraft:campfire"
candle = "minecraft:candle"
candle_cake = "minecraft:candle_cake"
carrots = "minecraft:carrots"
cartography_table = "minecraft:cartography_table"
carved_pumpkin = "minecraft:carved_pumpkin"
cauldron = "minecraft:cauldron"
cave_air = "minecraft:cave_air"
cave_vines = "minecraft:cave_vines"
cave_vines_plant = "minecraft:cave_vines_plant"
chain = "minecraft:chain"
chain_command_block = "minecraft:chain_command_block"
chest = "minecraft:chest"
chipped_anvil = "minecraft:chipped_anvil"
chiseled_deepslate = "minecraft:chiseled_deepslate"
chiseled_nether_bricks = "minecraft:chiseled_nether_bricks"
chiseled_polished_blackstone = "minecraft:chiseled_polished_blackstone"
chiseled_quartz_block = "minecraft:chiseled_quartz_block"
chiseled_red_sandstone = "minecraft:chiseled_red_sandstone"
chiseled_sandstone = "minecraft:chiseled_sandstone"
chiseled_stone_bricks = "minecraft:chiseled_stone_bricks"
chorus_flower = "minecraft:chorus_flower"
chorus_plant = "minecraft:chorus_plant"
clay = "minecraft:clay"
coal_block = "minecraft:coal_block"
coal_ore = "minecraft:coal_ore"
coarse_dirt = "minecraft:coarse_dirt"
cobbled_deepslate = "minecraft:cobbled_deepslate"
cobbled_deepslate_slab = "minecraft:cobbled_deepslate_slab"
cobbled_deepslate_stairs = "minecraft:cobbled_deepslate_stairs"
cobbled_deepslate_wall = "minecraft:cobbled_deepslate_wall"
cobblestone = "minecraft:cobblestone"
cobblestone_slab = "minecraft:cobblestone_slab"
cobblestone_stairs = "minecraft:cobblestone_stairs"
cobblestone_wall = "minecraft:cobblestone_wall"
cobweb = "minecraft:cobweb"
cocoa = "minecraft:cocoa"
command_block = "minecraft:command_block"
comparator = "minecraft:comparator"
composter = "minecraft:composter"
conduit = "minecraft:conduit"
copper_block = "minecraft:copper_block"
copper_ore = "minecraft:copper_ore"
cornflower = "minecraft:cornflower"
cracked_deepslate_bricks = "minecraft:cracked_deepslate_bricks"
cracked_deepslate_tiles = "minecraft:cracked_deepslate_tiles"
cracked_nether_bricks = "minecraft:cracked_nether_bricks"
cracked_polished_blackstone_bricks = "minecraft:cracked_polished_blackstone_bricks"
cracked_stone_bricks = "minecraft:cracked_stone_bricks"
crafting_table = "minecraft:crafting_table"
creeper_head = "minecraft:creeper_head"
creeper_wall_head = "minecraft:creeper_wall_head"
crimson_button = "minecraft:crimson_button"
crimson_door = "minecraft:crimson_door"
crimson_fence = "minecraft:crimson_fence"
crimson_fence_gate = "minecraft:crimson_fence_gate"
crimson_fungus = "minecraft:crimson_fungus"
crimson_hyphae = "minecraft:crimson_hyphae"
crimson_nylium = "minecraft:crimson_nylium"
crimson_planks = | |
<reponame>stickyparticles/lumberyard
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# $Revision: #1 $
import argparse
import os
import traceback
import sys
import json
import imp
import project
import deployment
import mappings
import resource_group
import player_identity
import profile
import importer
import gem
import constant
import security
from config import ConfigContext
from errors import HandledError
from context import Context
from metrics import MetricsContext
from util import Args
from botocore.exceptions import NoCredentialsError
from botocore.exceptions import EndpointConnectionError
from botocore.exceptions import IncompleteReadError
from botocore.exceptions import ConnectionError
from botocore.exceptions import UnknownEndpointError
DEPRECATED_MSG = "DEPRECATED"
def main():
'''Main entry point for the lmbr_aws command line interface'''
args = None
try:
metricsInterface = MetricsContext('cli')
context = Context(metricsInterface)
__bootstrap_context(context)
# Deprecated in 1.9. TODO: remove.
context.hooks.call_module_handlers('cli-plugin-code/resource_commands.py', 'add_cli_view_commands',
args=[context.view],
deprecated=True)
context.hooks.call_module_handlers('resource-manager-code/command.py', 'add_cli_view_commands',
kwargs={
'view_context': context.view
}
)
parser = argparse.ArgumentParser(
prog = 'lmbr-aws',
description='Manage AWS resources used by a Lumberyard project.'
)
parser.register('action', 'parsers', AliasedSubParsersAction)
subparsers = parser.add_subparsers();
__add_built_in_commands(context, subparsers)
__add_hook_module_commands(context, subparsers)
try:
args = Args(**parser.parse_args().__dict__)
except:
metricsInterface.submit_command_error()
return constant.CLI_RETURN_ERROR_HANDLED_CODE
command_name = ''
for arg in sys.argv[1:]:
if arg.startswith('-'):
break
if command_name:
command_name = command_name + '-'
command_name = command_name + arg
metricsInterface.set_command_name(command_name)
metricsInterface.submit_attempt()
try:
context.initialize(args)
args.func(context, args)
metricsInterface.submit_success()
except KeyboardInterrupt:
metricsInterface.submit_interrupted()
raise
except:
metricsInterface.submit_failure()
raise
return constant.CLI_RETURN_OK_CODE
except KeyboardInterrupt:
return constant.CLI_RETURN_ERROR_HANDLED_CODE
except HandledError as e:
print '\nERROR: {0}'.format(e)
if '--verbose' in sys.argv:
traceback.print_exc()
return constant.CLI_RETURN_ERROR_HANDLED_CODE
except NoCredentialsError:
print '\nERROR: No AWS credentials were provided.'
if '--verbose' in sys.argv:
traceback.print_exc()
return constant.CLI_RETURN_ERROR_HANDLED_CODE
except (EndpointConnectionError, IncompleteReadError, ConnectionError, UnknownEndpointError) as e:
print '\nERROR: We were unable to contact your AWS endpoint.\n {0}'.format(e.message)
if '--verbose' in sys.argv:
traceback.print_exc()
return constant.CLI_RETURN_ERROR_HANDLED_CODE
except Exception as e:
print '\nERROR: An unexpected error has occured:\n'
traceback.print_exc()
return constant.CLI_RETURN_ERROR_UNHANDLED_CODE
def __bootstrap_context(context):
'''Process a select subset of the command line arguments in order to pre-initialize the context.
This is done so that command line plugins can be located in the file system at locations that
can be overridden by command line arguments.
'''
parser = argparse.ArgumentParser(add_help=False)
__add_bootstrap_args(parser)
args, unknown = parser.parse_known_args()
context.bootstrap(Args(**args.__dict__))
def __add_common_args(parser, no_assume_role = False):
'''These arguments are accepted by most sub-commands.'''
parser.add_argument('--aws-access-key', help='The AWS access key to use. The default value is taken from the ~/.aws/credentials file.')
parser.add_argument('--aws-secret-key', help='The AWS secret key to use. The default value is taken from the ~/.aws/credentials file.')
parser.add_argument('--profile', '-P', help='The AWS configuration profile in the ~/.aws/credentials and ~/.aws/config files to use.')
if not no_assume_role:
parser.add_argument('--assume-role', '-R', metavar='ROLE-NAME', help='Specifies an IAM role that will be assumed when performing the requested actions. The credentials taken from the ~/.aws/credentials file must be able to asssume this role.')
__add_bootstrap_args(parser)
def __add_bootstrap_args(parser):
'''These arguments are used to pre-initialize the context.'''
parser.add_argument('--root-directory', default=os.getcwd(), help='Lumberyard install directory and location of bootstrap.cfg file. Default is the current working directory.')
parser.add_argument('--game-directory', help='Location of the game project directory. The default is {root-directory}\{game} where {game} is determined by the sys_game_folder setting in the {root-directory}\bootstrap.cfg file.')
parser.add_argument('--aws-directory', help='Location of AWS configuration directory for the game. The default is {game-directory}\AWS.')
parser.add_argument('--user-directory', help='Location of user cache directory. The default is {root-directory}\Cache\{game}\AWS where {game} is determined by the sys_game_folder setting in the {root-directory}\bootstrap.cfg file.')
parser.add_argument('--verbose', action='store_true', help='Show additional output when executing commands.')
parser.add_argument('--no-prompt', action='store_true', help='Special flag set automatically when entering from tests - calls which would raise an option for user input will instead raise an error')
def __add_project_stack_commands(stack_subparser):
stack_subparser.register('action', 'parsers', AliasedSubParsersAction)
subparsers = stack_subparser.add_subparsers(dest='subparser_name')
subparser = subparsers.add_parser('create', help='Creates the AWS resources needed for a Lumberyard project. If the {game}\AWS directory contains no resource definitions, default ones will be created.')
subparser.add_argument('--stack-name', help='The name used for the project stack. The default is the name of the {game} directory.')
subparser.add_argument('--confirm-aws-usage', '-C', action='store_true', help='Confirms that you know this command will create AWS resources for which you may be charged and that it may perform actions that can affect permissions in your AWS account.')
subparser.add_argument('--confirm-security-change', action='store_true', help='Confirms that you know this command will make security changes.')
subparser.add_argument('--files-only', action='store_true', help='Initializes the {game}\AWS directory and exit. If this option is given the project stack is not created. If the directory already exists and contains any files, no new files are created.')
subparser.add_argument('--region', required=True, help='The AWS region where the project stack will be located.')
__add_common_args(subparser, no_assume_role = True)
subparser.set_defaults(func=project.create_stack)
subparser = subparsers.add_parser('upload-resources', aliases=['upload', 'update'], help='Updates the AWS resources used by Lumberyard project.')
subparser.add_argument('--confirm-aws-usage', '-C', action='store_true', help='Confirms that you know this command will create AWS resources for which you may be charged and that it may perform actions that can affect permissions in your AWS account.')
subparser.add_argument('--confirm-resource-deletion', action='store_true', help='Confirms that you know this command will permanently delete resources.')
subparser.add_argument('--confirm-security-change', action='store_true', help='Confirms that you know this command will make security related changes.')
subparser.add_argument('--enable-capability', nargs='+', metavar='CAPABILITY', help='A list of capabilities that you must specify before AWS CloudFormation can create or update certain stacks. Some stack templates might include resources that can affect permissions in your AWS account. For those stacks, you must explicitly acknowledge their capabilities by specifying this parameter. Possible values include: CAPABILITY_IAM.')
__add_common_args(subparser)
subparser.set_defaults(func=project.update_stack)
subparser = subparsers.add_parser('delete', help='Deletes the AWS resources used by Lumberyard project. This command will not delete projects with deployments.')
subparser.add_argument('--confirm-resource-deletion', action='store_true', help='Confirms that you know this command will permanently delete resources.')
subparser.add_argument('--enable-capability', nargs='+', metavar='CAPABILITY', help='A list of capabilities that you must specify before AWS CloudFormation can create or update certain stacks. Some stack templates might include resources that can affect permissions in your AWS account. For those stacks, you must explicitly acknowledge their capabilities by specifying this parameter. Possible values include: CAPABILITY_IAM.')
__add_common_args(subparser)
subparser.set_defaults(func=project.delete_stack)
subparser = subparsers.add_parser('list-resources')
subparser.add_argument('--show-id', action='store_true', help='Include the AWS resource id in the output.')
__add_common_args(subparser)
subparser.set_defaults(func=project.list_project_resources)
subparser = subparsers.add_parser('create-extension-template')
subparser.add_argument('--project', action='store_true', help='Create a project-template-extensions.json file in the project directory.')
subparser.add_argument('--deployment', action='store_true', help='Create a deployment-template-extensions.json file in the project directory.')
subparser.add_argument('--deployment-access', action='store_true', help='Create a deployment-access-template-extensions.json file in the project directory.')
__add_common_args(subparser)
subparser.set_defaults(func=project.create_extension_template)
def __add_resource_group_commands(group_subparser):
group_subparser.register('action', 'parsers', AliasedSubParsersAction)
subparsers = group_subparser.add_subparsers(dest='subparser_name')
subparser = subparsers.add_parser('add', help='Add a set of related resources to the project.')
subparser.add_argument('--resource-group', '-r', required=True, metavar='GROUP', help='The name of the resource group.')
subparser.add_argument('--include-example-resources', action='store_true', help='Include "Hello World" example resources.')
subparser.add_argument('--gem', const='', default=None, action='store', nargs='?', metavar='GEM-PATH', help='Looks for resource group definition at Gems\GROUP\AWS or GEM-PATH\AWS if the optional GEM-PATH value is provided.')
__add_common_args(subparser)
subparser.set_defaults(func=resource_group.add)
subparser = subparsers.add_parser('remove', help='Remove a resource group from the project.')
subparser.add_argument('--resource-group', '-r', required=True, metavar='GROUP', help='The name of the resource group to remove.')
__add_common_args(subparser)
subparser.set_defaults(func=resource_group.remove)
subparser = subparsers.add_parser('list-resources', help='list all the resources for the specified resource group')
subparser.add_argument('--deployment', '-d', metavar='DEPLOYMENT', help='The name of the deployment that contains the resource group. If not specified, the default deployment is used.')
subparser.add_argument('--resource-group', '-r', required=True, help='The name of the resource group to describe')
subparser.add_argument('--show-id', action='store_true', help='Include the AWS resource id in the output.')
__add_common_args(subparser)
subparser.set_defaults(func=resource_group.list_resource_group_resources)
subparser = subparsers.add_parser('list', help='List the project''s resource groups.')
subparser.add_argument('--deployment', '-d', metavar='DEPLOYMENT', help='The name of the deployment used when determining the resource group status. If not specified, the default deployment is used.')
subparser.add_argument('--show-id', action='store_true', help='Include the AWS resource id in the output.')
__add_common_args(subparser)
subparser.set_defaults(func=resource_group.list)
subparser = subparsers.add_parser('upload-resources', aliases=['upload', 'update'], help='Uploads and applies changes made to local resource-template.json files.')
subparser.add_argument('--deployment', '-d', metavar='DEPLOYMENT', help='The deployment to update. If not specified the default deployment is updated.')
subparser.add_argument('--resource-group', '-r', metavar='GROUP', help='The name of the resource group to update. If not specified, all the resource groups in the deployment are updated.')
subparser.add_argument('--confirm-aws-usage', '-C', action='store_true', help='Confirms that you know this command will create AWS resources for which you may be charged and that it may perform actions that can affect permissions in your AWS account.')
subparser.add_argument('--confirm-resource-deletion', action='store_true', help='Confirms that you know this command will permanently delete resources.')
subparser.add_argument('--confirm-security-change', action='store_true', help='Confirms that you know this command will make security related changes.')
__add_common_args(subparser)
subparser.set_defaults(func=deployment.upload_resources)
subparser = | |
<reponame>obo/loev3go<gh_stars>1-10
"""
interpreter for pylogo
<NAME> <<EMAIL>>
A Logo interpreter.
"""
# python2-3 compatibility
from future.utils import raise_
from past.builtins import basestring # pip install future
from types import *
from pylogo import reader
import inspect, os, sys
from pylogo.common import *
from pylogo.objectintrospect import getlogoattr, update_logo_attrs
import imp
import threading
class Interpreter(object):
"""
The interpreter gets tokens (from a reader.TrackingStream) and
runs them. It holds the namespace, which is dynamically scoped.
You execute one expression by calling interpreter.expr(tokenizer),
where tokenizer may be reader.TrackingStream or other tokenizer
instance. It returns the value of the expression.
The RootFrame and Frame subclasses implement the namespace
operations (this class is abstract).
"""
special_forms = {}
"Methods register themselves using this dictionary"
def special(names, special_forms=special_forms):
def decorator(func):
if isinstance(names, basestring):
all_names = [names]
else:
all_names = names
for name in all_names:
special_forms[name] = func
return func
return decorator
def __init__(self, tokenizer=None):
self.tokenizers = []
if tokenizer is not None:
self.tokenizers.append(tokenizer)
self.actors = []
def tokenizer__get(self):
"""
Gets the current tokenizer.
"""
return self.tokenizers[-1]
tokenizer = property(tokenizer__get)
def push_tokenizer(self, tokenizer):
"""
You can stack up multiple tokenizers, as the interpreter goes
from evaluating a file to a list to a sublist, etc. New
interpreters are created for a new scope.
"""
#print "Pushing %r onto %r" % (tokenizer, self)
self.tokenizers.append(tokenizer)
def pop_tokenizer(self):
#print "Popping %r from %r" % (self.tokenizers[-1], self)
self.tokenizers.pop()
def expr(self):
"""
Top level expression-getter/evaluator (see also expr_top).
"""
try:
val = self.expr_without_error()
except LogoError as e:
# This is used for creating the traceback
e.set_frame(self)
raise
except (LogoControl, SystemExit, KeyboardInterrupt,
StopIteration):
# These exceptions are mostly harmless
raise
except Exception as e:
# Here we wrap other exceptions... this needs some work
#import traceback
#traceback.print_exc()
exc_info = sys.exc_info()
# @@: should add the exception traceback to this somehow
newExc = LogoError(str(e), description=str(e))
newExc.set_frame(self)
# raise_(LogoError, newExc, exc_info[2])
raise LogoError(str(e))
return val
def expr_top(self):
"""
Unlike expr(), this ignores empty lines; should only be used
in top-level expressions (including expressions taken from
lists).
"""
try:
p = self.tokenizer.peek()
except StopIteration:
p = None
if p == '\n':
self.tokenizer.next()
return None
elif p == ';':
while 1:
p = self.tokenizer.next()
if p == '\n':
break
return None
elif p is EOF:
return EOF
return self.expr()
def expr_without_error(self):
"""
Get a full expression from the tokenizer, execute it, and
return the value.
expr ::= exprInner <operator> exprInner
::= exprInner
"""
while 1:
# Strip out any comments:
# (typically the reader would do this, but we do it more
# lazily so we can get the comments if we want them)
p = self.tokenizer.peek()
if p == ';':
while 1:
p = self.tokenizer.next()
if p == '\n':
break
else:
break
val = self.expr_inner()
while 1:
# Check if there's any infix operators:
p = self.tokenizer.peek()
if p not in ['/', '*', '+', '-', '>', '<', '=',
'>=', '=>', '<=', '=<', '<>']:
break
self.tokenizer.next()
e = self.expr_inner()
# @@: no order of precedence
if p == '/':
val = float(val) / e
elif p == '*':
val *= e
elif p == '-':
val -= e
elif p == '+':
val += e
elif p == '<':
val = val < e
elif p == '>':
val = val > e
elif p == '=':
val = val == e
elif p == '>=' or p == '=>':
val = val >= e
elif p == '<=' or p == '=<':
val = val <= e
elif p == '<>':
val = val != e
else:
assert 0, "Unknown symbol: %s" % p
return val
def expr_inner(self, apply=None, get_function=None,
get_variable=None):
"""
An 'inner' expression, an expression that does not include
infix operators.
::
exprInner ::= <literal int or float>
::= '-' expr
::= '+' expr
::= ('\"' or 'QUOTE') <word>
::= ':' <word>
::= MAKE (':' or '\"') <word> expr
::= MAKE <word> expr
::= TO <to expression>
::= '[' <list expression> ']'
::= '(' <word> <expr> ... <expr> ')'
::= <word> <expr> ... <expr>
Things to note:
* ``MAKE :x 10``, ``MAKE \"x 10``, and ``MAKE x 10`` all work
equivalently (make is a special form, unlike in UCBLogo).
* <list expression> is a nested list of tokens.
* <to expression> is TO func_name var1 var2 <int>, where <int>
is the default arity (number of variables). Variables, like
with make, can be prefixed with : or \", but need not be.
* () is not used to force precedence, but to force execution
with a specific arity. In other words, () works like Lisp.
"""
tok = self.tokenizer.next()
if apply is None:
apply = self.apply
if get_function is None:
get_function = self.get_function
if get_variable is None:
get_variable = self.get_variable
if tok == '\n':
raise LogoEndOfLine("The end of the line was not expected")
return self.expr_inner()
elif tok is EOF:
raise LogoEndOfCode("The end of the code block was not expected")
elif not isinstance(tok, basestring):
# Some other fundamental type (usually int or float)
return tok
elif tok == '-':
# This works really poorly in practice, because "-" usually
# gets interpreted as an infix operator.
return -self.expr()
elif tok == '+':
return self.expr()
elif tok in ('/', '*'):
raise LogoError("Operator not expected: %s" % tok)
elif tok == '"' or tok.lower() == 'quote':
tok = self.tokenizer.next()
return tok
elif tok == ':':
tok = self.tokenizer.next()
return get_variable(tok)
elif tok == '[':
self.tokenizer.push_context('[')
result = self.expr_list()
self.tokenizer.pop_context()
return result
elif tok == ';':
while 1:
tok = self.tokenizer.next()
if tok == '\n' or tok is EOF:
break
elif tok == '(':
self.tokenizer.push_context('(')
try:
func = self.tokenizer.peek()
if not reader.is_word(func):
# We don't actually have a function call then, but
# just a sub-expression.
val = self.expr()
if not self.tokenizer.next() == ')':
raise LogoSyntaxError("')' expected")
return val
else:
self.tokenizer.next()
if func.lower() in self.special_forms:
special_form = self.special_forms[func.lower()]
val = special_form(self, greedy=True)
next_tok = self.tokenizer.next()
if next_tok != ')':
raise LogoSyntaxError("')' expected")
return val
else:
args = []
while 1:
tok = self.tokenizer.peek()
if tok == ')':
break
elif tok == '\n':
self.tokenizer.next()
continue
elif tok is EOF:
raise LogoEndOfCode("Unexpected end of code (')' expected)")
args.append(self.expr())
val = apply(get_function(func), args)
if not self.tokenizer.next() == ')':
raise LogoSyntaxError("')' was expected.")
finally:
self.tokenizer.pop_context()
return val
else:
if not reader.is_word(tok):
raise LogoSyntaxError("Unknown token: %r" % tok)
if tok.lower() in self.special_forms:
special_form = self.special_forms[tok.lower()]
val = special_form(self, greedy=False)
return val
else:
func_name = tok
func = get_function(func_name)
n = arity(func)
self.tokenizer.push_context('func')
try:
args = []
# -1 arity means the function is greedy
if n == -1:
while 1:
tok = self.tokenizer.peek()
if tok == '\n' or tok is EOF:
self.tokenizer.next()
break
args.append(self.expr())
else:
for i in range(n):
try:
args.append(self.expr())
except (LogoEndOfCode, LogoEndOfLine):
raise LogoEndOfCode(
"Not enough arguments provided to %s: got %i and need %i" % (func_name, i, n))
finally:
self.tokenizer.pop_context()
return apply(func, args)
@special('make')
def special_make(self, greedy):
"""
The special MAKE form (special because a variable in the
first argument isn't evaluated).
"""
tok = self.tokenizer.next()
if tok in ('"', ':'):
tok = self.tokenizer.next()
self.set_variable(tok, self.expr())
@special('localmake')
def special_localmake(self, greedy):
"""
The special LOCALMAKE form
"""
tok = self.tokenizer.next()
if tok in ('"', ':'):
tok = self.tokenizer.next()
self.set_variable_local(tok, self.expr())
@special('to')
def special_to(self, greedy):
"""
The special TO form.
"""
self.tokenizer.push_context('to')
vars = []
default = None
name = self.tokenizer.next()
while 1:
tok = self.tokenizer.next()
if tok == '\n':
break
elif tok == '"' or tok == ':':
continue
elif type(tok) is int:
default = tok
continue
vars.append(tok)
body = []
# END can only occur immediately after a newline, so we keep track
lastNewline = False
while 1:
tok = self.tokenizer.next()
if (lastNewline and isinstance(tok, str)
and tok.lower() == 'end'):
break
lastNewline = (tok == '\n')
if tok is EOF:
raise LogoEndOfCode("The end of the file was not expected in a TO; use END")
body.append(tok)
func = UserFunction(name, vars, default, body)
self.set_function(name.lower(), func)
| |
they will return the same hash code
"""
return self.handle.hashCode()
@property
def isdefault(self):
"""bool: True if the node is created automatically by Maya."""
return self.fn.isDefaultNode
@property
def isreferenced(self):
"""bool: True if the node come from a referenced file."""
return self.fn.isFromReferencedFile
# Read write properties ---
@property
def name(self):
"""str: The name of the node."""
return self.fn.name()
@name.setter
def name(self, value):
cmds.rename(self.name, value)
@property
def lock(self):
"""bool: The lock state of the node.
A locked node means that it cannot be deleted, repaired or renamed.
It is also not possible to create, edit or delete their attributes.
"""
return self.fn.isLocked
@lock.setter
def lock(self, value):
cmds.lockNode(self.name, lock=value)
# Public methods ---
def duplicate(self, name=None):
"""Duplicate the node.
Examples:
>>> newscene()
>>> a = create("transform", name="A")
>>> b = a.duplicate("B")
>>> b
<DagNode 'B' type::transform>
>>> a != b
True
Arguments:
name (str): The name to give to the duplicate node.
Returns:
DependencyNode: The instance of the duplicate node.
"""
return encode(cmds.duplicate(self.name, name=name)[0])
def delete(self):
"""Delete the node.
Warning:
Even if the node is deleted, its instance still exists in memory.
Attempting to access a deleted node may cause a crash.
Examples:
>>> newscene()
>>> node = create("transform")
>>> exists(node)
True
>>> node.delete()
>>> exists(node)
False
"""
cmds.delete(self.name)
def findplug(self, attribute):
"""Find a plug from an attribute name.
Examples:
>>> newscene()
>>> node = create("transform", name="A")
>>> node.findplug("message")
<Plug 'A.message' type::message>
>>> node.findplug("unknown")
Traceback (most recent call last):
...
ValueError
Arguments:
attribute (str): The name of the attribute to search for.
Returns:
Plug: The instance of the plug.
Raises:
ValueError: The attribute does not exists on the node.
"""
LOG.debug("Acess '%s.%s'", self.name, attribute)
try:
return Plug(self.fn.findPlug(attribute, False))
except RuntimeError:
message = "The plug '{}.{}' does not exists."
raise ValueError(message.format(self, attribute))
def history(self, filter=None):
"""Search in the node history."""
return self._related(OpenMaya.MItDependencyGraph.kUpstream, filter)
def future(self, filter=None):
"""Search in the future of the node."""
return self._related(OpenMaya.MItDependencyGraph.kDownstream, filter)
def istype(self, filter, strict=False):
"""Check the type of the node.
Arguments:
filter (str, tuple): The node(s) that should match with self.
strict (bool): If `True`, does not check for inherited types and
return `True` only if self has the exact same type as the on of
the specified filter.
Returns:
bool: `True` if self match the filter otherwise `False`.
"""
if strict:
return self.type in filter
if isinstance(filter, _STRING_TYPES):
filter = [filter]
return any(x in self.inherited for x in filter)
# Private methods ---
def _related(self, direction, filter=None):
"""Retrive node through the graph."""
iterator = OpenMaya.MItDependencyGraph(
self.object,
direction,
traversal=OpenMaya.MItDependencyGraph.kDepthFirst,
level=OpenMaya.MItDependencyGraph.kNodeLevel,
)
# Skip self.
iterator.next()
while not iterator.isDone():
node = encode(iterator.currentNode())
# print(node.type, filter)
if filter is None or node.type in filter:
yield node
iterator.next()
class DagNode(DependencyNode):
"""A Directed Acyclic Graph (DAG) node."""
_class = OpenMaya.MFnDagNode
_identifier = OpenMaya.MFn.kDagNode
def __len__(self):
return self.childcount
def __iter__(self):
return self.children()
def __init__(self, mobject):
super(DagNode, self).__init__(mobject)
self._dagpath = OpenMaya.MDagPath.getAPathTo(self.object)
# Read properties ---
@property
def dagpath(self):
"""MDagPath: The dag path instance associated to the node."""
return self._dagpath
@property
def path(self):
"""str: The path of the attached object from the root of the DAG."""
return self.fn.fullPathName()
@property
def childcount(self):
"""int: The number of chidren of the node"""
return self.fn.childCount()
# Public methods ---
def root(self):
"""The root node of the first path leading to this node.
Examples:
>>> newscene()
>>> a = create("transform", name="A")
>>> b = create("transform", name="B")
>>> c = create("transform", name="C")
>>> a.addchild(b)
>>> b.addchild(c)
>>> c.root()
<DagNode 'A' type::transform>
Returns:
DagNode: The root node.
"""
parents = list(self.parents())
if len(parents) > 0:
return parents[-1]
return None
def parents(self, filter=None, strict=False):
"""Find the parents nodes.
Examples:
>>> newscene()
>>> a = create("transform", name="A")
>>> b = create("transform", name="B")
>>> c = create("transform", name="C")
>>> a.addchild(b)
>>> b.addchild(c)
>>> list(c.parents())
[<DagNode 'B' type::transform>, <DagNode 'A' type::transform>]
Arguments:
filter (str, tuple): Filter the returned node types.
strict (bool): If `True`, does not check for inherited types and
return `True` only if self has the exact same type as the on of
the specified filter.
Yield:
DagNode: The next parent node.
"""
# The `parentCount` and `parent` (with an index other than 0)
# methods seem does not to work...
mobject = self.fn.parent(0)
while mobject.apiType() != OpenMaya.MFn.kWorld:
parent = encode(mobject)
if _match_filter(parent, filter, strict):
yield parent
mobject = parent.fn.parent(0)
def parent(self, index=None):
"""Find a parent node.
Examples:
>>> newscene()
>>> a = create("transform", name="A")
>>> b = create("transform", name="B")
>>> a.addchild(b)
>>> b.parent()
<DagNode 'A' type::transform>
Arguments:
index (int): The index of the parent to find.
Returns:
DagNode: The parent node.
Raises:
DagError: The parent at the speicified index is inaccessible.
"""
try:
parents = list(self.parents())
return parents[index or 0]
except IndexError:
if index is None:
return None
msg = "The parent node at the index '{}' is inaccessible."
raise DagError(msg.format(index))
def siblings(self, filter=None, strict=False):
"""Find the siblings nodes
Examples:
>>> newscene()
>>> a = create("transform", name="A")
>>> b = create("transform", name="B")
>>> c = create("transform", name="C")
>>> d = create("transform", name="D")
>>> a.addchildren(b, c, d)
>>> list(b.siblings())
[<DagNode 'C' type::transform>, <DagNode 'D' type::transform>]
Arguments:
filter (str, tuple): Filter the returned node types.
strict (bool): If `True`, does not check for inherited types and
return `True` only if self has the exact same type as the on of
the specified filter.
Yield:
DagNode: The next sibling node.
"""
parent = self.parent()
if parent is None:
nodes = ls(assemblies=True)
else:
nodes = parent.children()
for node in nodes:
if node != self and _match_filter(node, filter, strict):
yield node
def sibling(self, index=None):
"""Find a sibling node.
Examples:
>>> newscene()
>>> a = create("transform", name="A")
>>> b = create("transform", name="B")
>>> c = create("transform", name="C")
>>> a.addchildren(b, c)
>>> b.sibling()
<DagNode 'C' type::transform>
Arguments:
index (int): The index of the sibling to find.
Returns:
DagNode: The sibling node.
Raises:
DagError: The sibling at the speicified index is inaccessible.
"""
try:
siblings = list(self.siblings())
return siblings[index or 0]
except IndexError:
if index is None:
return None
msg = "The sibling node at the index '{}' is inaccessible."
raise DagError(msg.format(index))
def shapes(self, filter=None, strict=False):
"""Find the shape nodes.
Arguments:
filter (str, tuple): Filter the returned node types.
strict (bool): If `True`, does not check for inherited types and
return `True` only if self has the exact same type as the on of
the specified filter.
Yield:
Shape: The next shape node.
"""
for index in range(self.fn.childCount()):
obj = self.fn.child(index)
if obj.hasFn(OpenMaya.MFn.kShape):
child = encode(obj)
if _match_filter(child, filter, strict):
yield child
def shape(self, index=None):
"""Find a shape node.
Arguments:
index (int): The index of the shape to find.
Returns:
Shape: The shape node.
Raises:
DagError: The shape at the speicified index is inaccessible.
"""
try:
shapes = list(self.shapes())
return shapes[index or 0]
except IndexError:
if index is None:
return None
msg = "The shape node at the index '{}' is inaccessible."
raise DagError(msg.format(index))
def children(self, recurse=False, shape=False, filter=None, strict=False):
"""Find the child nodes.
Arguments:
recurse (bool): Include all descendants in the yielded nodes
instead of the just the children.
shape (bool): Include the shapes in the yielded nodes.
filter (str, tuple): Filter the returned node types.
strict (bool): If `True`, does not check for inherited types and
return `True` only if self has the exact same type as the on of
the specified filter.
Yield:
DagNode: The next child node.
"""
for index in range(self.fn.childCount()):
child = encode(self.fn.child(index))
if _match_filter(child, filter, strict):
if not (child.object.hasFn(OpenMaya.MFn.kShape) and not shape):
yield child
if recurse:
for each in child.children(recurse=True, filter=filter):
yield each
def child(self, index=None):
"""Find a child node.
Arguments:
index (int): The index of the child to find.
Returns:
DagNode: The child node.
Raises:
DagError: The child at the speicified index is inaccessible.
"""
try:
children = list(self.children())
return children[index or 0]
except IndexError:
if index is None:
return None
msg = "The sibling node at the index '{}' is inaccessible."
raise DagError(msg.format(index))
def | |
"Type of Set Aside",
"Evaluated Preference Code",
"Evaluated Preference",
"Research Code",
"Research",
"Fair Opportunity Limited Sources Code",
"Fair Opportunity Limited Sources",
"Other than Full and Open Competition Code",
"Other than Full and Open Competition",
"Number of Offers Received",
"Commercial Item Acquisition Procedures Code",
"Commercial Item Acquisition Procedures",
"Small Business Competitiveness Demonstration Program",
"Commercial Item Test Program Code",
"Commercial Item Test Program",
"A-76 FAIR Act Action Code",
"A-76 FAIR Act Action",
"FedBizOpps Code",
"FedBizOpps",
"Local Area Set Aside Code",
"Local Area Set Aside",
"Price Evaluation Adjustment Preference Percent Difference",
"Clinger-Cohen Act Planning Compliance Code",
"Clinger-Cohen Act Planning Compliance",
"Materials Supplies Articles Equipment Code",
"Materials Supplies Articles Equipment",
"Labor Standards Code",
"Labor Standards",
"Construction Wage Rate Requirements Code",
"Construction Wage Rate Requirements",
"Interagency Contracting Authority Code",
"Interagency Contracting Authority",
"Other Statutory Authority",
"Program Acronym",
"Parent Award Type Code",
"Parent Award Type",
"Parent Award Single or Multiple Code",
"Parent Award Single or Multiple",
"Major Program",
"National Interest Action Code",
"National Interest Action",
"Cost or Pricing Data Code",
"Cost or Pricing Data",
"Cost Accounting Standards Clause Code",
"Cost Accounting Standards Clause",
"GFE and GFP Code",
"GFE and GFP",
"Sea Transportation Code",
"Sea Transportation",
"Undefinitized Action Code",
"Undefinitized Action",
"Consolidated Contract Code",
"Consolidated Contract",
"Performance-Based Service Acquisition Code",
"Performance-Based Service Acquisition",
"Multi Year Contract Code",
"Multi Year Contract",
"Contract Financing Code",
"Contract Financing",
"Purchase Card as Payment Method Code",
"Purchase Card as Payment Method",
"Contingency Humanitarian or Peacekeeping Operation Code",
"Contingency Humanitarian or Peacekeeping Operation",
"Alaskan Native Owned Corporation or Firm",
"American Indian Owned Business",
"Indian Tribe Federally Recognized",
"Native Hawaiian Owned Business",
"Tribally Owned Business",
"Veteran Owned Business",
"Service Disabled Veteran Owned Business",
"Woman Owned Business",
"Women Owned Small Business",
"Economically Disadvantaged Women Owned Small Business",
"Joint Venture Women Owned Small Business",
"Joint Venture Economically Disadvantaged Women Owned Small Business",
"Minority Owned Business",
"Subcontinent Asian Asian - Indian American Owned Business",
"Asian Pacific American Owned Business",
"Black American Owned Business",
"Hispanic American Owned Business",
"Native American Owned Business",
"Other Minority Owned Business",
"Contracting Officer's Determination of Business Size",
"Contracting Officer's Determination of Business Size Code",
"Emerging Small Business",
"Community Developed Corporation Owned Firm",
"Labor Surplus Area Firm",
"U.S. Federal Government",
"Federally Funded Research and Development Corp",
"Federal Agency",
"U.S. State Government",
"U.S. Local Government",
"City Local Government",
"County Local Government",
"Inter-Municipal Local Government",
"Local Government Owned",
"Municipality Local Government",
"School District Local Government",
"Township Local Government",
"U.S. Tribal Government",
"Foreign Government",
"Corporate Entity Not Tax Exempt",
"Corporate Entity Tax Exempt",
"Partnership or Limited Liability Partnership",
"Sole Proprietorship",
"Small Agricultural Cooperative",
"International Organization",
"U.S. Government Entity",
"Community Development Corporation",
"Domestic Shelter",
"Educational Institution",
"Foundation",
"Hospital Flag",
"Manufacturer of Goods",
"Veterinary Hospital",
"Hispanic Servicing Institution",
"Receives Contracts",
"Receives Grants",
"Receives Contracts and Grants",
"Airport Authority",
"Council of Governments",
"Housing Authorities Public/Tribal",
"Interstate Entity",
"Planning Commission",
"Port Authority",
"Transit Authority",
"Subchapter S Corporation",
"Limited Liability Corporation",
"Foreign Owned and Located",
"For Profit Organization",
"Nonprofit Organization",
"Other Not For Profit Organization",
"The AbilityOne Program",
"Private University or College ",
"State Controlled Institution of Higher Learning",
"1862 Land Grant College",
"1890 Land Grant College",
"1994 Land Grant College",
"Minority Institution",
"Historically Black College or University",
"Tribal College",
"Alaskan Native Servicing Institution",
"Native Hawaiian Servicing Institution",
"School of Forestry",
"Veterinary College",
"DoT Certified Disadvantaged Business Enterprise",
"Self-Certified Small Disadvantaged Business",
"Small Disadvantaged Business",
"8a Program Participant",
"Historically Underutilized Business Zone HUBZone Firm",
"SBA Certified 8a Joint Venture",
"Last Modified Date",
"URI",
"SAI Number",
"Federal Action Obligation",
"Non Federal Funding Amount",
"Total Funding Amount",
"Face Value of Loan",
"Original Subsidy Cost",
"Action Date",
"Period of Performance Start Date",
"Period of Performance Current End Date",
"Awarding Agency Code",
"Awarding Agency Name",
"Awarding Sub Agency Code",
"Awarding Sub Agency Name",
"Awarding Office Code",
"Awarding Office Name",
"Funding Agency Code",
"Funding Agency Name",
"Funding Sub Agency Code",
"Funding Sub Agency Name",
"Funding Office Code",
"Funding Office Name",
"Recipient DUNS",
"Recipient Name",
"Recipient Country Code",
"Recipient Country Name",
"Recipient Address Line 1",
"Recipient Address Line 2",
"Recipient Address Line 3",
"Recipient City Code",
"Recipient City Name",
"Recipient Country Code",
"Recipient Country Name",
"Recipient State Code",
"Recipient State Name",
"Recipient Zip Code",
"Recipient Zip Last 4 Code",
"Recipient Congressional District",
"Recipient Foreign City Name",
"Recipient Foreign Province Name",
"Recipient Foreign Postal Code",
"Primary Place of Performance Country Code",
"Primary Place of Performance Country Name",
"Primary Place of Performance Code",
"Primary Place of Performance City Name",
"Primary Place of Performance County Code",
"Primary Place of Performance County Name",
"Primary Place of Performance State Name",
"Primary Place of Performance Zip+4",
"Primary Place of Performance Congressional District",
"Primary Place of Performance Foreign Location",
"CFDA Number",
"CFDA Title",
"Assistance Type Code",
"Assistance Type",
"Award Description",
"Business Funds Indicator Code",
"Business Funds Indicator",
"Business Types Code",
"Business Types",
"Action Type Code",
"Action Type",
"Record Type Code",
"Record Type",
"Submitted Type",
"Fiscal Year and Quarter Correction",
"Last Modified Date"
]
award_unique_columns = {
"1862 Land Grant College",
"1890 Land Grant College",
"1994 Land Grant College",
"8a Program Participant",
"A-76 FAIR Act Action Code",
"A-76 FAIR Act Action",
"Action Date",
"Action Type Code",
"Action Type",
"Airport Authority",
"Alaskan Native Owned Corporation or Firm",
"Alaskan Native Servicing Institution",
"American Indian Owned Business",
"Asian Pacific American Owned Business",
"Assistance Type Code",
"Assistance Type",
"Award Description",
"Award ID",
"Award Type Code",
"Award Type",
"Award or Parent Award Flag",
"Awarding Agency Code",
"Awarding Agency Name",
"Awarding Office Code",
"Awarding Office Name",
"Awarding Sub Agency Code",
"Awarding Sub Agency Name",
"Black American Owned Business",
"Business Funds Indicator Code",
"Business Funds Indicator",
"Business Types Code",
"Business Types",
"CFDA Number",
"CFDA Title",
"Change in Current Award Amount",
"Change in Potential Award Amount",
"City Local Government",
"Clinger-Cohen Act Planning Compliance Code",
"Clinger-Cohen Act Planning Compliance",
"Commercial Item Acquisition Procedures Code",
"Commercial Item Acquisition Procedures",
"Commercial Item Test Program Code",
"Commercial Item Test Program",
"Community Developed Corporation Owned Firm",
"Community Development Corporation",
"Consolidated Contract Code",
"Consolidated Contract",
"Contingency Humanitarian or Peacekeeping Operation Code",
"Contingency Humanitarian or Peacekeeping Operation",
"Contract Bundling Code",
"Contract Bundling",
"Contract Financing Code",
"Contract Financing",
"Contracting Officer's Determination of Business Size Code",
"Contracting Officer's Determination of Business Size",
"Corporate Entity Not Tax Exempt",
"Corporate Entity Tax Exempt",
"Cost Accounting Standards Clause Code",
"Cost Accounting Standards Clause",
"Cost or Pricing Data Code",
"Cost or Pricing Data",
"Council of Governments",
"Country of Product or Service Origin Code",
"Country of Product or Service Origin",
"County Local Government",
"Construction Wage Rate Requirements Code",
"Construction Wage Rate Requirements",
"DoD Acquisition Program Code",
"DoD Acquisition Program Description",
"DoD Claimant Program Code",
"DoD Claimant Program Description",
"DoT Certified Disadvantaged Business Enterprise",
"Domestic Shelter",
"Domestic or Foreign Entity Code",
"Domestic or Foreign Entity",
"EPA-Designated Product Code",
"EPA-Designated Product",
"Economically Disadvantaged Women Owned Small Business",
"Educational Institution",
"Emerging Small Business",
"Evaluated Preference Code",
"Evaluated Preference",
"Extent Competed Code",
"Extent Competed",
"Face Value of Loan",
"Fair Opportunity Limited Sources Code",
"Fair Opportunity Limited Sources",
"FedBizOpps Code",
"FedBizOpps",
"Federal Action Obligation",
"Federal Agency",
"Federally Funded Research and Development Corp",
"Fiscal Year and Quarter Correction",
"For Profit Organization",
"Foreign Funding",
"Foreign Government",
"Foreign Owned and Located",
"Foundation",
"Funding Agency Code",
"Funding Agency Name",
"Funding Office Code",
"Funding Office Name",
"Funding Sub Agency Code",
"Funding Sub Agency Name",
"GFE and GFP Code",
"GFE and GFP",
"Hispanic American Owned Business",
"Hispanic Servicing Institution",
"Historically Black College or University",
"Historically Underutilized Business Zone HUBZone Firm",
"Hospital Flag",
"Housing Authorities Public/Tribal",
"IDV Type Code",
"IDV Type",
"Indian Tribe Federally Recognized",
"Information Technology Commercial Item Category Code",
"Information Technology Commercial Item Category",
"Inter-Municipal Local Government",
"Interagency Contracting Authority Code",
"Interagency Contracting Authority",
"International Organization",
"Interstate Entity",
"Joint Venture Economically Disadvantaged Women Owned Small Business",
"Joint Venture Women Owned Small Business",
"Labor Surplus Area Firm",
"Last Modified Date",
"Limited Liability Corporation",
"Local Area Set Aside Code",
"Local Area Set Aside",
"Local Government Owned",
"Major Program",
"Manufacturer of Goods",
"Minority Institution",
| |
<gh_stars>0
"""
This module contains the `Mesh` class.
"""
from __future__ import absolute_import
from pyaedt.generic.general_methods import aedt_exception_handler, generate_unique_name, MethodNotSupportedError
from pyaedt.generic.DataHandlers import _dict2arg
from collections import OrderedDict
meshers = {
"HFSS": "MeshSetup",
"Icepak": "MeshRegion",
"HFSS3DLayout": "MeshSetup",
"Maxwell 2D": "MeshSetup",
"Maxwell 3D": "MeshSetup",
"Q3D Extractor": "MeshSetup",
"Mechanical": "MeshSetup",
"2D Extractor": "MeshSetup",
}
class MeshOperation(object):
"""MeshOperation class.
Parameters
----------
meshicepak : :class:`pyaedt.modules.MeshIcepak.MeshIcepak`
name:
props :
meshoptpe :
"""
def __init__(self, meshicepak, name, props, meshoptype):
self._meshicepak = meshicepak
self.name = name
self.props = props
self.type = meshoptype
@aedt_exception_handler
def _get_args(self):
"""Retrieve arguments."""
props = self.props
arg = ["NAME:" + self.name]
_dict2arg(props, arg)
return arg
@aedt_exception_handler
def create(self):
"""Create a mesh.
Returns
-------
type
"""
if self.type == "SurfApproxBased":
self._meshicepak.omeshmodule.AssignTrueSurfOp(self._get_args())
elif self.type == "DefeatureBased":
self._meshicepak.omeshmodule.AssignModelResolutionOp(self._get_args())
elif self.type == "SurfaceRepPriority":
self._meshicepak.omeshmodule.AssignSurfPriorityForTauOp(self._get_args())
elif self.type == "LengthBased":
self._meshicepak.omeshmodule.AssignLengthOp(self._get_args())
elif self.type == "SkinDepthBased":
self._meshicepak.omeshmodule.AssignSkinDepthOp(self._get_args())
elif self.type == "Curvilinear":
self._meshicepak.omeshmodule.AssignApplyCurvlinearElementsOp(self._get_args())
elif self.type == "RotationalLayerMesh":
self._meshicepak.omeshmodule.AssignRotationalLayerOp(self._get_args())
elif self.type == "DensityControlBased":
self._meshicepak.omeshmodule.AssignDensityControlOp(self._get_args())
elif self.type == "Icepak":
self._meshicepak.omeshmodule.AssignMeshOperation(self._get_args())
elif self.type == "CurvatureExtraction":
self._meshicepak.omeshmodule.AssignCurvatureExtractionOp(self._get_args())
else:
return False
@aedt_exception_handler
def update(self):
"""Update the mesh.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oModule.EditTrueSurfOp
>>> oModule.EditModelResolutionOp
>>> oModule.EditSurfPriorityForTauOp
>>> oModule.EditLengthOp
>>> oModule.EditApplyCurvlinearElementsOp
>>> oModule.EditRotationalLayerOp
>>> oModule.EditDensityControlOp
>>> oModule.EditMeshOperation
>>> oModule.EditSBRCurvatureExtractionOp
"""
if self.type == "SurfApproxBased":
self._meshicepak.omeshmodule.EditTrueSurfOp(self.name, self._get_args())
elif self.type == "DefeatureBased":
self._meshicepak.omeshmodule.EditModelResolutionOp(self.name, self._get_args())
elif self.type == "SurfaceRepPriority":
self._meshicepak.omeshmodule.EditSurfPriorityForTauOp(self.name, self._get_args())
elif self.type == "LengthBased":
self._meshicepak.omeshmodule.EditLengthOp(self.name, self._get_args())
elif self.type == "SkinDepthBased":
self._meshicepak.omeshmodule.EditSkinDepthOp(self.name, self._get_args())
elif self.type == "Curvilinear":
self._meshicepak.omeshmodule.EditApplyCurvlinearElementsOp(self.name, self._get_args())
elif self.type == "RotationalLayerMesh":
self._meshicepak.omeshmodule.EditRotationalLayerOp(self.name, self._get_args())
elif self.type == "DensityControlBased":
self._meshicepak.omeshmodule.EditDensityControlOp(self.name, self._get_args())
elif self.type == "Icepak":
self._meshicepak.omeshmodule.EditMeshOperation(self.name, self._get_args())
elif self.type == "CurvatureExtraction":
self._meshicepak.omeshmodule.EditSBRCurvatureExtractionOp(self.name, self._get_args())
else:
return False
return True
@aedt_exception_handler
def delete(self):
"""Delete the mesh.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oModule.DeleteOp
"""
self._meshicepak.omeshmodule.DeleteOp([self.name])
for el in self._meshicepak.meshoperations:
if el.name == self.name:
self._meshicepak.meshoperations.remove(el)
return True
class Mesh(object):
"""Mesh class.
This class manages AEDT mesh functions.
Parameters
----------
app : :class:`pyaedt.application.Analysis3D.FieldAnalysis3D`
"""
def __init__(self, app):
self._app = app
self._odesign = self._app.odesign
self.modeler = self._app.modeler
design_type = self._odesign.GetDesignType()
self.logger = self._app.logger
self._omeshmodule = self._odesign.GetModule(meshers[design_type])
self.id = 0
self.meshoperations = self._get_design_mesh_operations()
self.globalmesh = self._get_design_global_mesh()
pass
@property
def omeshmodule(self):
"""Aedt Mesh Module.
References
----------
>>> oDesign.GetModule("MeshSetup")
"""
return self._omeshmodule
@aedt_exception_handler
def _get_design_global_mesh(self):
""" """
try:
return self._app.design_properties["MeshSetup"]["MeshSettings"]
except:
return OrderedDict()
@aedt_exception_handler
def _get_design_mesh_operations(self):
""" """
meshops = []
try:
for ds in self._app.design_properties["MeshSetup"]["MeshOperations"]:
if isinstance(self._app.design_properties["MeshSetup"]["MeshOperations"][ds], (OrderedDict, dict)):
meshops.append(
MeshOperation(
self,
ds,
self._app.design_properties["MeshSetup"]["MeshOperations"][ds],
self._app.design_properties["MeshSetup"]["MeshOperations"][ds]["Type"],
)
)
except:
pass
return meshops
@aedt_exception_handler
def assign_surface_mesh(self, names, level, meshop_name=None):
"""Assign a surface mesh level to one or more objects.
Parameters
----------
names : list
One or more names of the objects.
level : int
Level of the surface mesh. Options are ``1`` through ``10``
meshop_name : str, optional
Name of the mesh operation. The default is ``None``.
Returns
-------
:class:`pyaedt.modules.Mesh.MeshOperation`
Mesh operation object.
References
----------
>>> oModule.AssignTrueSurfOp
"""
names = self.modeler.convert_to_selections(names, True)
if meshop_name:
for m in self.meshoperations:
if meshop_name == m.name:
meshop_name = generate_unique_name(meshop_name)
else:
meshop_name = generate_unique_name("SurfApprox")
self.logger.info("Assigning Mesh Level " + str(level) + " to " + str(names))
names = self._app._modeler._convert_list_to_ids(names)
if isinstance(names[0], int):
seltype = "Faces"
else:
seltype = "Objects"
props = OrderedDict(
{
"Type": "SurfApproxBased",
"CurvedSurfaceApproxChoice": "UseSlider",
seltype: names,
"SliderMeshSettings": level,
}
)
mop = MeshOperation(self, meshop_name, props, "SurfApproxBased")
mop.create()
self.meshoperations.append(mop)
return mop
@aedt_exception_handler
def assign_surface_mesh_manual(self, names, surf_dev=None, normal_dev=None, aspect_ratio=None, meshop_name=None):
"""Assign a surface mesh to a list of faces.
Parameters
----------
names : list
List of faces to apply the surface mesh to.
surf_dev : float, optional
Surface deviation. The default is ``None``.
normal_dev : float, optional
Normal deviation. The default is ``None``.
aspect_ratio : int, optional
Aspect ratio. The default is ``None``.
meshop_name : str, optional
Name of the mesh operation. The default is ``None``.
Returns
-------
:class:`pyaedt.modules.Mesh.MeshOperation`
Mesh operation object.
References
----------
>>> oModule.AssignTrueSurfOp
"""
names = self.modeler.convert_to_selections(names, True)
if meshop_name:
for m in self.meshoperations:
if meshop_name == m.name:
meshop_name = generate_unique_name(meshop_name)
else:
meshop_name = generate_unique_name("ModelResolution")
surf_dev_enable = 2
normal_dev_enable = 2
aspect_ratio_enable = 2
if not surf_dev:
surf_dev_enable = 1
surf_dev = "0.001"
if not normal_dev:
normal_dev_enable = 1
normal_dev = "1"
if not aspect_ratio:
aspect_ratio_enable = 1
aspect_ratio = "10"
props = OrderedDict(
{
"Type": "SurfApproxBased",
"Objects": names,
"CurvedSurfaceApproxChoice": "ManualSettings",
"SurfDevChoice": surf_dev_enable,
"SurfDev": surf_dev,
"NormalDevChoice": normal_dev_enable,
"NormalDev": normal_dev,
"AspectRatioChoice": aspect_ratio_enable,
"AspectRatio": aspect_ratio,
}
)
mop = MeshOperation(self, meshop_name, props, "SurfApproxBased")
mop.create()
self.meshoperations.append(mop)
return mop
@aedt_exception_handler
def assign_model_resolution(self, names, defeature_length=None, meshop_name=None):
"""Assign the model resolution.
Parameters
----------
names : list
List of objects to defeature.
defeature_length : float, optional
Defeaturing length in millimeters. The default is ``None``, in which case
automatic defeaturing is used.
meshop_name : str, optional
Name of the mesh operation. The default is ``None``.
Returns
-------
:class:`pyaedt.modules.Mesh.MeshOperation`
Mesh operation object.
References
----------
>>> oModule.AssignModelResolutionOp
"""
names = self.modeler.convert_to_selections(names, True)
if meshop_name:
for m in self.meshoperations:
if meshop_name == m.name:
meshop_name = generate_unique_name(meshop_name)
else:
meshop_name = generate_unique_name("ModelResolution")
for name in names:
if isinstance(name, int):
self.logger.error("Mesh Operation Applies to Objects only")
return False
if defeature_length is None:
props = OrderedDict({"Objects": names, "UseAutoLength": True})
else:
props = OrderedDict(
{
"Type": "DefeatureBased",
"Objects": names,
"UseAutoLength": False,
"DefeatureLength": str(defeature_length) + "mm",
}
)
mop = MeshOperation(self, meshop_name, props, "DefeatureBased")
mop.create()
self.meshoperations.append(mop)
return mop
@aedt_exception_handler
def assign_initial_mesh_from_slider(
self,
level=5,
method="Auto",
usedynamicsurface=True,
useflexmesh=False,
applycurvilinear=False,
usefallback=True,
usephi=True,
automodelresolution=True,
modelresolutionlength="0.0001mm",
):
"""Assign a surface mesh level to an object.
Parameters
----------
level : int, optional
Level of the surface mesh. Options are ``1`` through ``10``. The default is ``5.``
method : str, optional
Meshing method. Options are ``"Auto"``, ``"AnsoftTAU"``, and ``"AnsoftClassic"``
The default is ``"Auto"``.
usedynamicsurface : bool, optional
Whether to use a dynamic surface. The default is ``True``.
useflexmesh : bool, optional
Whether to use a flexible mesh. The default is ``False``.
applycurvilinear : bool, optional
Whether to apply curvilinear elements. The default is ``False``.
usefallback : bool, optional
Whether to retain as a fallback. The default is ``True``.
usephi : bool, optional
Whether to use the Phi mesher for layered geometry.
The default is ``True``.
automodelresolution : bool, optional
Whether to automatically calculate the resolution length
based on each object's effective thickness. The default is ``True``.
modelresolutionlength : float, optional
Resolution thickness with units if ``automodelresolution=False``.
The default ``"0.0001mm"``.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oModule.InitialMeshSettings
"""
if self._app.design_type == "2D Extractor" or self._app.design_type == "Maxwell 2D":
mesh_methods = ["Auto", "AnsoftClassic"]
else:
mesh_methods = ["Auto", "AnsoftTAU", "AnsoftClassic"]
assert method in mesh_methods
modelres = ["NAME:GlobalModelRes", "UseAutoLength:=", automodelresolution]
if not automodelresolution:
modelres.append("DefeatureLength:=")
modelres.append(modelresolutionlength)
surface_appr = [
"NAME:GlobalSurfApproximation",
"CurvedSurfaceApproxChoice:=",
"UseSlider",
"SliderMeshSettings:=",
level,
]
if self._app.design_type == "2D Extractor" or self._app.design_type == "Maxwell 2D":
args = ["NAME:MeshSettings", surface_appr, modelres, "MeshMethod:=", method]
else:
args = [
"NAME:MeshSettings",
surface_appr,
["NAME:GlobalCurvilinear", "Apply:=", applycurvilinear],
modelres,
"MeshMethod:=",
method,
"UseLegacyFaceterForTauVolumeMesh:=",
False,
"DynamicSurfaceResolution:=",
usedynamicsurface,
"UseFlexMeshingForTAUvolumeMesh:=",
useflexmesh,
]
if self._app.design_type == "HFSS":
args.append("UseAlternativeMeshMethodsAsFallBack:=")
args.append(usefallback)
args.append("AllowPhiForLayeredGeometry:=")
args.append(usephi)
self.omeshmodule.InitialMeshSettings(args)
return True
@aedt_exception_handler
def assign_surf_priority_for_tau(self, object_lists, surfpriority=0):
"""Assign a surface representation priority for the TAU mesh.
Parameters
----------
object_lists : list
List of objects to apply a surface representation
priority to.
surfpriority : int, optional
Surface representation priority. The default is ``0``.
Returns
-------
:class:`pyaedt.modules.Mesh.MeshOperation`
Mesh operation object.
References
----------
>>> oModule.AssignSurfPriorityForTauOp
"""
meshop_name = generate_unique_name("SurfaceRepPriority")
props = OrderedDict({"Type": "SurfaceRepPriority", "Objects": object_lists, "SurfaceRepPriority": surfpriority})
mop = MeshOperation(self, meshop_name, props, "SurfaceRepPriority")
mop.create()
self.meshoperations.append(mop)
return mop
@aedt_exception_handler
def generate_mesh(self, name):
"""Generate the mesh for a design.
Parameters
----------
name : str
Name of the design.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oDesign.GenerateMesh
"""
return self._odesign.GenerateMesh(name) == 0
@aedt_exception_handler
def delete_mesh_operations(self, mesh_type=None):
"""Remove mesh operations from a design.
Parameters
----------
mesh_type : optional
Type of the mesh operation to delete. The default is ``None``, in which
case all mesh operations are deleted.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
References
----------
>>> oModule.DeleteOp
"""
# Type "Area Based" not included since the delete command causes
# desktop to crash
# https://tfs.ansys.com:8443/tfs/ANSYS_Development/Portfolio/ACE%20Team/_queries?id=150923
mesh_op_types = ["Length Based", "Surface | |
cd = "#" #category delimiter to acces different attributes of a word (e.g. lemma instead of text: f"lemma{cd}predict"). Do not use ":" as it is already used by depparse for splitting passive/active deprel differentiation and such
#placeholders (prefix "ph_") to replace in preprocessing
ph_outcome = "<outcome>"
ph_key = "<key>"
ph_value = "<value>"
ph_dvalue = "<dupvalue>"
ph_subject = "<subject>"
reserved_placeholder_words = [ph_outcome, ph_key, ph_value, ph_dvalue, ph_subject]
#dependency relations (prefix "dep_"). used in variables to avoid misspelling somewhere in the program
dep_acl = "acl" #adjectival clause
dep_acl_recl = "acl:relcl" #adjectival/relative clause
dep_advcl = "advcl" #adverbial clause
dep_advmod = "advmod" #adverbial modifier
dep_amod = "amod" #adjectival modifier
dep_appos = "appos" #appositional modifier
dep_aux = "aux" #auxiliary
dep_case = "case" #case marking
dep_ccomp = "ccomp" #causal complement
dep_compound = "compound" #compound
dep_cop = "cop" #copula
dep_csubj = "csubj" #clausal subject
dep_det = "det" #determiner
dep_discourse = "discourse" #discourse element (interjectionis, emoticons, discourse particles)
dep_mark = "mark" #marker (marking a clause as subordinal to another clause)
dep_nmod = "nmod" #nominal modifier
dep_nsubj = "nsubj" #nominal subject
dep_nsubj_pass = "nsubj:pass" #nominal subject in passive
dep_nummod = "nummod" #numeric modifier
dep_obj = "obj" #object
dep_obl = "obl" #oblique nominal (non-core argument or adjunct)
dep_obl_tmod = "obl:tmod" #temporal oblique
dep_parataxis = "parataxis" #relation between word and other elements, e.g. senential paranthetical or clause after : or ;
dep_punct = "punct" #punctuation
dep_root = "root" #root dependency from root. only once per sentence
dep_vocative = "vocative" #used to mark an addressed dialogue participant
dep_xcomp = "xcomp" #open clausal complement
dictionary = [
{
"id" : "predict",
"keywords" : "predict predictions classify classifications",
"display" : "Make me a prediction.",
"write" : "Can you please make me a prediction?",
"execute" : "predict",
"description": "The 'predict' command will allow you to infer a prediction from your data intance. In case you did not provide a data instance yet, ERIC will ask you to provide a value for each feature."
},
{
"id": "whatif",
"keywords" : "what if change",
"display" : "What if X equals Z?",
"write" : "What if X equals Z?",
"execute" : "whatif",
"description": "The 'what-if' command gives you the opportunity to alter the data instance that ERIC is talking about. There will be a new entry on the clipboard."
},
{
"id": "whatif-gl",
"keywords" : "what if greater less change",
"display" : "What if X was greater/less?",
"write" : "What if X was greater/less?",
"execute" : "whatif-gl",
"description": "The 'what-if-greater-less' command fixes the values of all but one features and pertubates the values of this one feature. A graph will show you how the prediction changes."
},
{
"id": "why",
"keywords" : "why",
"display" : "Why did you predict X?",
"write" : "Why did you predict X?",
"execute" : "why",
"description": "The 'why' command provides information about why the ERIC predicted a specific output. It will present you an explanation. Afterwards will ask you to provide feedback."
},
{
"id": "why-not",
"keywords" : "why not",
"display" : "Why didn't you predict Z?",
"write" : "Why didn't you predict Z?",
"execute" : "why-not",
"description": "The 'why-not' command provides information on why an alternative outcome was not predicted. It will present you an explanation. Afterwards will ask you to provide feedback."
},
{
"id": "how-to",
"keywords" : "how",
"display" : "How do I get Y?",
"write" : "How do I get Y?",
"execute" : "how-to",
"description": "The 'how-to' command tells about the changes that must be done to get an alternative prediction outcome."
},
{
"id": "when",
"keywords" : "when",
"display" : "When do you predict Y?",
"write" : "When do you predict Y?",
"execute" : "when",
"description": "The 'when' command tells you for what feature values the model produces a certain outcome most likely."
},
{
"id" : "certainty",
"keywords" : "how certain uncertain are you sure",
"display" : "How certain are you?",
"write" : "How certain are you?",
"execute" : "certainty",
"description": "The 'certainty' command will reveal the certainty of a previously presented claim."
},
{
"id" : "featureNames",
"keywords" : "features names attributes input",
"display" : "What is your input?",
"write" : "What do you use as an input?",
"execute" : "featureNames",
"description": "The 'input' command will tell about the input features the AI uses to make a prediction."
},
{
"id" : "preview",
"keywords" : "features preview data sample",
"display" : "Show me some sample data.",
"write" : "Can you show me some sample data?",
"execute" : "preview",
"description": "The 'preview' command will give you a small preview of how training data instances look like."
},
{
"id" : "targetvalues",
"keywords" : "what else target outcome outcome predict output",
"display" : "What is your output?",
"write" : "What else can you predict?",
"execute" : "targetvalues",
"description": "The 'output' command will tell about the output the AI can generate."
},
{
"id" : "init",
"keywords" : "start hello welcome hi",
"display" : "Hi BOT.",
"write" : "Hi BOT.",
"execute" : "init",
"description": ""
}
]
#Punctuation in the key_sentences usually gets removed in preprocessing
nlp_dictionary = [
{
"id" : "predict",
"key_sentences": [
"predict",
f"would {ph_subject} presumably {ph_outcome}?",
f"is {ph_subject} likely to {ph_outcome}?",
f"is <subject> {ph_outcome}?",
f"is it foreseeable that {ph_subject} is {ph_outcome}?",
f"would {ph_subject} {ph_outcome}?",
"Make a prediction for the current instance",
"Make a prediction for the current data",
"Can you please make me a prediction?",
"Make me a prediction",
"Make prediction",
"prediction",
"Can you predict something for me?",
"Can you project something for me?",
f"Can you project if {ph_subject} is {ph_outcome}?",
f"Can you predict if {ph_subject} is {ph_outcome}?",
f"Can you calculate if {ph_subject} is {ph_outcome}?",
f"Can you anticipate if {ph_subject} is {ph_outcome}?",
f"Can you guess if {ph_subject} is {ph_outcome}?",
f"Can you foresee if {ph_subject} is {ph_outcome}?",
"predict for current instance",
"predict for current data",
"Tell me what happens",
"Tell us what happens",
"Show us what happens",
"Show me what happens"
],
"depparse": [
[#0
("root", dep_root, "what"),
("what", dep_nsubj, "outcome"),
("outcome", dep_amod, ["predicted", "current"])
],
[#1
("root", dep_root, "predict"),
("predict", dep_obj, "outcome")
],
[#2
("root", dep_root, "predict"),
("predict", dep_obl, ["data", "person"])
],
[#3
("root", dep_root, "predicted"),
("predicted", dep_nsubj_pass, f"upos{cd}NOUN")
],
[#4
("root", dep_root, "predict"),
("predict", dep_obj, "something"),
("something", dep_nmod, f"upos{cd}PRON")#,
#(f"upos{category_delimiter}PRON", dep_case, "for")
],
[#5
("root", dep_root, "make"),
("make", dep_obj, "prediction")
],
[#6
("root", dep_root, "happen"),
("happen", dep_nsubj, "what")
],
[#7
("root", dep_root, "predict")
],
[#8
("root", dep_root, "foreseeable"),
("foreseeable", dep_nsubj, ph_subject),
(ph_subject, dep_ccomp, ph_outcome)
],
[#9
("root", dep_root, [f"lemma{cd}forecast", f"lemma{cd}predict", f"lemma{cd}project", f"lemma{cd}anticipate"]),
([f"lemma{cd}forecast", f"lemma{cd}predict", f"lemma{cd}project", f"lemma{cd}anticipate"], dep_advcl, ph_outcome),
(ph_outcome, dep_nsubj, ph_subject)
]
]
},
{
"id": "whatif",
"key_sentences": [
"whatif",
# f"What if {subject} have {value} of {key}",
f"What if {ph_key} was {ph_value}?",
f"What if {ph_key} = {ph_value}?",
f"What if {ph_key} equals {ph_value}?",
f"What if {ph_key} is {ph_value} and {ph_key} is {ph_value}?",
f"What happens if {ph_key} equals {ph_value}?",
f"What if you change {ph_key} to {ph_value}?",
f"What happens if you change {ph_key} to {ph_value}?",
f"What if you set {ph_key} to {ph_value}?",
f"What happens if you set {ph_key} to {ph_value}?",
f"What if {ph_key} is changed to {ph_value}?",
f"What happens if {ph_key} is changed to {ph_value}?",
f"What happens if {ph_key} is {ph_value}",
f"Change {ph_key} to {ph_value}",
f"Set {ph_key} to {ph_value}.",
f"Set {ph_key} = {ph_value}",
f"{ph_key} = {ph_value}",
"set x = y"
],
"depparse": [
[#0
("root", dep_root, [f"lemma{cd}change", f"lemma{cd}set"]),
([f"lemma{cd}change", f"lemma{cd}set"], [dep_obj, dep_advcl, dep_acl], ph_key),
(ph_key, [dep_appos, dep_nummod, dep_obl, dep_obj, dep_nmod], ph_value)
],
[#1
("root", dep_root, f"lemma{cd}happen"),
(f"lemma{cd}happen", [dep_advcl, dep_parataxis], ph_value),
(f"lemma{cd}happen", [dep_advcl, dep_nsubj], ph_key)
],
[#2
("root", dep_root, "what"),
("what", dep_advcl, [f"lemma{cd}change", f"lemma{cd}set"]),
([f"lemma{cd}change", f"lemma{cd}set"], [dep_obj, dep_nsubj_pass], ph_key),
([f"lemma{cd}change", f"lemma{cd}set"], dep_obl, ph_value)
],
[#3
("root", dep_root, "what"),
("what", [dep_advcl, dep_nsubj], ph_value),
(ph_value, dep_nsubj, ph_key)
],
[#4
("root", dep_root, f"lemma{cd}happen"),
(f"lemma{cd}happen", dep_advcl, ph_key),
(f"lemma{cd}happen", [dep_advcl, dep_parataxis], ph_value)
],
[#5
("root", dep_root, ph_key),
(ph_key, dep_nsubj, [f"lemma{cd}change", f"lemma{cd}set"]),
(ph_key, dep_obl, ph_value)
],
[#6
("root", dep_root, [f"lemma{cd}change", f"lemma{cd}set"]),
([f"lemma{cd}change", f"lemma{cd}set"], dep_obj, ph_value),
(ph_value, [dep_amod, dep_compound], ph_key)
],
[#7
("root", dep_root, f"lemma{cd}happen"),
(f"lemma{cd}happen", dep_advcl, ph_key),
(ph_key, [dep_appos, dep_nummod], ph_value)
],
[#8
("root", dep_root, ph_key),
(ph_key, dep_punct, f"lemma{cd}="),
(ph_key, [dep_parataxis, dep_appos], ph_value)
],
| |
# coding: utf-8
import json
class ObservedData:
def __init__(self, opencti):
self.opencti = opencti
self.properties = """
id
standard_id
entity_type
parent_types
spec_version
created_at
updated_at
createdBy {
... on Identity {
id
standard_id
entity_type
parent_types
spec_version
identity_class
name
description
roles
contact_information
x_opencti_aliases
created
modified
objectLabel {
edges {
node {
id
value
color
}
}
}
}
... on Organization {
x_opencti_organization_type
x_opencti_reliability
}
... on Individual {
x_opencti_firstname
x_opencti_lastname
}
}
objectMarking {
edges {
node {
id
standard_id
entity_type
definition_type
definition
created
modified
x_opencti_order
x_opencti_color
}
}
}
objectLabel {
edges {
node {
id
value
color
}
}
}
externalReferences {
edges {
node {
id
standard_id
entity_type
source_name
description
url
hash
external_id
created
modified
}
}
}
revoked
confidence
created
modified
first_observed
last_observed
number_observed
objects {
edges {
node {
... on BasicObject {
id
entity_type
parent_types
}
... on BasicRelationship {
id
entity_type
parent_types
}
... on StixObject {
standard_id
spec_version
created_at
updated_at
}
... on AttackPattern {
name
}
... on Campaign {
name
}
... on CourseOfAction {
name
}
... on Individual {
name
}
... on Organization {
name
}
... on Sector {
name
}
... on Indicator {
name
}
... on Infrastructure {
name
}
... on IntrusionSet {
name
}
... on Position {
name
}
... on City {
name
}
... on Country {
name
}
... on Region {
name
}
... on Malware {
name
}
... on ThreatActor {
name
}
... on Tool {
name
}
... on Vulnerability {
name
}
... on Incident {
name
}
... on StixCoreRelationship {
standard_id
spec_version
created_at
updated_at
}
}
}
}
"""
"""
List ObservedData objects
:param filters: the filters to apply
:param search: the search keyword
:param first: return the first n rows from the after ID (or the beginning if not set)
:param after: ID of the first row for pagination
:return List of ObservedData objects
"""
def list(self, **kwargs):
filters = kwargs.get("filters", None)
search = kwargs.get("search", None)
first = kwargs.get("first", 500)
after = kwargs.get("after", None)
order_by = kwargs.get("orderBy", None)
order_mode = kwargs.get("orderMode", None)
custom_attributes = kwargs.get("customAttributes", None)
get_all = kwargs.get("getAll", False)
with_pagination = kwargs.get("withPagination", False)
if get_all:
first = 500
self.opencti.log(
"info", "Listing ObservedDatas with filters " + json.dumps(filters) + "."
)
query = (
"""
query ObservedDatas($filters: [ObservedDatasFiltering], $search: String, $first: Int, $after: ID, $orderBy: ObservedDatasOrdering, $orderMode: OrderingMode) {
observedDatas(filters: $filters, search: $search, first: $first, after: $after, orderBy: $orderBy, orderMode: $orderMode) {
edges {
node {
"""
+ (custom_attributes if custom_attributes is not None else self.properties)
+ """
}
}
pageInfo {
startCursor
endCursor
hasNextPage
hasPreviousPage
globalCount
}
}
}
"""
)
result = self.opencti.query(
query,
{
"filters": filters,
"search": search,
"first": first,
"after": after,
"orderBy": order_by,
"orderMode": order_mode,
},
)
return self.opencti.process_multiple(
result["data"]["observedDatas"], with_pagination
)
"""
Read a ObservedData object
:param id: the id of the ObservedData
:param filters: the filters to apply if no id provided
:return ObservedData object
"""
def read(self, **kwargs):
id = kwargs.get("id", None)
filters = kwargs.get("filters", None)
custom_attributes = kwargs.get("customAttributes", None)
if id is not None:
self.opencti.log("info", "Reading ObservedData {" + id + "}.")
query = (
"""
query ObservedData($id: String!) {
observedData(id: $id) {
"""
+ (
custom_attributes
if custom_attributes is not None
else self.properties
)
+ """
}
}
"""
)
result = self.opencti.query(query, {"id": id})
return self.opencti.process_multiple_fields(result["data"]["observedData"])
elif filters is not None:
result = self.list(filters=filters)
if len(result) > 0:
return result[0]
else:
return None
"""
Check if a observedData already contains a STIX entity
:return Boolean
"""
def contains_stix_object_or_stix_relationship(self, **kwargs):
id = kwargs.get("id", None)
stix_object_or_stix_relationship_id = kwargs.get(
"stixObjectOrStixRelationshipId", None
)
if id is not None and stix_object_or_stix_relationship_id is not None:
self.opencti.log(
"info",
"Checking StixObjectOrStixRelationship {"
+ stix_object_or_stix_relationship_id
+ "} in ObservedData {"
+ id
+ "}",
)
query = """
query ObservedDataContainsStixObjectOrStixRelationship($id: String!, $stixObjectOrStixRelationshipId: String!) {
observedDataContainsStixObjectOrStixRelationship(id: $id, stixObjectOrStixRelationshipId: $stixObjectOrStixRelationshipId)
}
"""
result = self.opencti.query(
query,
{
"id": id,
"stixObjectOrStixRelationshipId": stix_object_or_stix_relationship_id,
},
)
return result["data"]["observedDataContainsStixObjectOrStixRelationship"]
else:
self.opencti.log(
"error",
"[opencti_observedData] Missing parameters: id or entity_id",
)
"""
Create a ObservedData object
:param name: the name of the ObservedData
:return ObservedData object
"""
def create(self, **kwargs):
stix_id = kwargs.get("stix_id", None)
created_by = kwargs.get("createdBy", None)
object_marking = kwargs.get("objectMarking", None)
object_label = kwargs.get("objectLabel", None)
external_references = kwargs.get("externalReferences", None)
revoked = kwargs.get("revoked", None)
confidence = kwargs.get("confidence", None)
lang = kwargs.get("lang", None)
created = kwargs.get("created", None)
modified = kwargs.get("modified", None)
first_observed = kwargs.get("first_observed", None)
last_observed = kwargs.get("last_observed", None)
number_observed = kwargs.get("number_observed", None)
objects = kwargs.get("objects", None)
x_opencti_stix_ids = kwargs.get("x_opencti_stix_ids", None)
update = kwargs.get("update", False)
if (
first_observed is not None
and last_observed is not None
and objects is not None
):
self.opencti.log("info", "Creating ObservedData.")
query = """
mutation ObservedDataAdd($input: ObservedDataAddInput) {
observedDataAdd(input: $input) {
id
standard_id
entity_type
parent_types
}
}
"""
result = self.opencti.query(
query,
{
"input": {
"stix_id": stix_id,
"createdBy": created_by,
"objectMarking": object_marking,
"objectLabel": object_label,
"externalReferences": external_references,
"revoked": revoked,
"confidence": confidence,
"lang": lang,
"created": created,
"modified": modified,
"first_observed": first_observed,
"last_observed": last_observed,
"number_observed": number_observed,
"objects": objects,
"x_opencti_stix_ids": x_opencti_stix_ids,
"update": update,
}
},
)
return self.opencti.process_multiple_fields(
result["data"]["observedDataAdd"]
)
else:
self.opencti.log(
"error",
"[opencti_observedData] Missing parameters: first_observed, last_observed or objects",
)
"""
Add a Stix-Core-Object or stix_relationship to ObservedData object (object)
:param id: the id of the ObservedData
:param entity_id: the id of the Stix-Core-Object or stix_relationship
:return Boolean
"""
def add_stix_object_or_stix_relationship(self, **kwargs):
id = kwargs.get("id", None)
stix_object_or_stix_relationship_id = kwargs.get(
"stixObjectOrStixRelationshipId", None
)
if id is not None and stix_object_or_stix_relationship_id is not None:
if self.contains_stix_object_or_stix_relationship(
id=id,
stixObjectOrStixRelationshipId=stix_object_or_stix_relationship_id,
):
return True
self.opencti.log(
"info",
"Adding StixObjectOrStixRelationship {"
+ stix_object_or_stix_relationship_id
+ "} to ObservedData {"
+ id
+ "}",
)
query = """
mutation ObservedDataEdit($id: ID!, $input: StixMetaRelationshipAddInput) {
observedDataEdit(id: $id) {
relationAdd(input: $input) {
id
}
}
}
"""
self.opencti.query(
query,
{
"id": id,
"input": {
"toId": stix_object_or_stix_relationship_id,
"relationship_type": "object",
},
},
)
return True
else:
self.opencti.log(
"error",
"[opencti_observedData] Missing parameters: id and stix_object_or_stix_relationship_id",
)
return False
"""
Remove a Stix-Core-Object or stix_relationship to Observed-Data object (object_refs)
:param id: the id of the Observed-Data
:param entity_id: the id of the Stix-Core-Object or stix_relationship
:return Boolean
"""
def remove_stix_object_or_stix_relationship(self, **kwargs):
id = kwargs.get("id", None)
stix_object_or_stix_relationship_id = kwargs.get(
"stixObjectOrStixRelationshipId", None
)
if id is not None and stix_object_or_stix_relationship_id is not None:
self.opencti.log(
"info",
"Removing StixObjectOrStixRelationship {"
+ stix_object_or_stix_relationship_id
+ "} to Observed-Data {"
+ id
+ "}",
)
query = """
mutation ObservedDataEditRelationDelete($id: ID!, $toId: String!, $relationship_type: String!) {
observedDataEdit(id: $id) {
relationDelete(toId: $toId, relationship_type: $relationship_type) {
id
}
}
}
"""
self.opencti.query(
query,
{
"id": id,
"toId": stix_object_or_stix_relationship_id,
"relationship_type": "object",
},
)
return True
else:
self.opencti.log(
"error", "[opencti_observed_data] Missing parameters: id and entity_id"
)
return False
"""
Import a ObservedData object from a STIX2 object
:param stixObject: the Stix-Object ObservedData
:return ObservedData object
"""
def import_from_stix2(self, **kwargs):
stix_object = kwargs.get("stixObject", None)
extras = kwargs.get("extras", {})
update = kwargs.get("update", False)
if stix_object is not None:
observed_data_result = self.create(
stix_id=stix_object["id"],
createdBy=extras["created_by_id"]
if "created_by_id" in extras
else None,
objectMarking=extras["object_marking_ids"]
if "object_marking_ids" in extras
else None,
objectLabel=extras["object_label_ids"]
if "object_label_ids" in extras
else [],
externalReferences=extras["external_references_ids"]
if "external_references_ids" in extras
else [],
revoked=stix_object["revoked"] if "revoked" in stix_object else None,
confidence=stix_object["confidence"]
if "confidence" in stix_object
else None,
lang=stix_object["lang"] if "lang" in stix_object else None,
created=stix_object["created"] if "created" in stix_object else None,
modified=stix_object["modified"] if "modified" in stix_object else None,
first_observed=stix_object["first_observed"]
if "first_observed" in stix_object
else None,
last_observed=stix_object["last_observed"]
if "last_observed" in stix_object
else None,
number_observed=stix_object["number_observed"]
if "number_observed" in stix_object
else None,
objects=stix_object["object_refs"]
if "object_refs" in stix_object
else None,
x_opencti_stix_ids=stix_object["x_opencti_stix_ids"]
if "x_opencti_stix_ids" in stix_object
else None,
update=update,
)
if "objects" in stix_object:
for key, observable_item in stix_object["objects"].items():
stix_observable_result = self.opencti.stix_cyber_observable.create(
observableData=observable_item,
createdBy=extras["created_by_id"]
if "created_by_id" in extras
else None,
objectMarking=extras["object_marking_ids"]
if "object_marking_ids" in extras
else None,
objectLabel=extras["object_label_ids"]
if "object_label_ids" in extras
else [],
)
if stix_observable_result is not None:
self.add_stix_object_or_stix_relationship(
id=observed_data_result["id"],
stixObjectOrStixRelationshipId=stix_observable_result["id"],
)
self.opencti.stix2.mapping_cache[
stix_observable_result["id"]
] = {
"id": stix_observable_result["id"],
"type": | |
<reponame>kmkurn/ptst-semeval2021<gh_stars>1-10
#!/usr/bin/env python
# Copyright (c) 2021 <NAME>
from collections import defaultdict
from pathlib import Path
from statistics import median
import math
import os
import pickle
import tempfile
from anafora import AnaforaData
from rnnr import Event, Runner
from rnnr.attachments import EpochTimer, MeanReducer, ProgressBar
from sacred import Experiment
from sacred.observers import MongoObserver
from sacred.utils import apply_backspaces_and_linefeeds
from text2array import BucketIterator, ShuffleIterator
from tqdm import tqdm
from transformers import AutoConfig, AutoModelForTokenClassification
import numpy as np
import torch
from aatrn import compute_ambiguous_tag_pairs_mask
from callbacks import (
log_grads,
log_stats,
save_state_dict,
update_params,
)
from crf import LinearCRF
from evaluation import score_time
from ingredients.corpus import ing as corpus_ing, read_samples
from models import RoBERTagger
from utils import make_anafora, print_accs
ex = Experiment("sest10-ptst-testrun", ingredients=[corpus_ing])
ex.captured_out_filter = apply_backspaces_and_linefeeds
# Setup mongodb observer
mongo_url = os.getenv("SACRED_MONGO_URL")
db_name = os.getenv("SACRED_DB_NAME")
if None not in (mongo_url, db_name):
ex.observers.append(MongoObserver.create(url=mongo_url, db_name=db_name))
@ex.config
def default():
# directory to save finetuning artifacts
artifacts_dir = "timex_artifacts"
# whether to overwrite existing artifacts directory
overwrite = False
# temperature to regulate confidence (>1 means less confident)
temperature = 1.0
# whether to freeze the embedding layers
freeze_embeddings = True
# freeze encoder earlier layers up to this layer
freeze_encoder_up_to = 5
# device to run on [cpu, cuda]
device = "cuda" if torch.cuda.is_available() else "cpu"
# cumulative prob threshold
thresh = 0.95
# batch size
batch_size = 50
# learning rate
lr = 1e-5
# max number of epochs
max_epoch = 5
# path to directory containing the gold annotations
gold_path = ""
# whether to write predictions when finetuning is finished
predict_on_finished = False
# load model parameters from here (evaluate)
load_params = "model.pth"
# whether to save confusion matrix (evaluate)
save_confusion_matrix = False
@ex.named_config
def best():
lr = 9e-6
temperature = 2.56
@ex.capture
def run_eval(
model,
id2label,
samples,
corpus,
_log,
device="cpu",
batch_size=32,
gold_path="",
compute_loss=False,
confusion=False,
):
if not gold_path and not compute_loss:
_log.info("Skipping evaluation since gold data isn't provided and loss isn't required")
return None, None
runner = Runner()
runner.state.update({"preds": [], "_ids": []})
@runner.on(Event.BATCH)
def maybe_compute_prediction(state):
if not gold_path:
return
arr = state["batch"].to_array()
state["arr"] = arr
assert arr["mask"].all()
words = torch.from_numpy(arr["word_ids"]).long().to(device)
model.eval()
scores = model(words)
preds = LinearCRF(scores).argmax()
state["preds"].extend(preds.tolist())
state["_ids"].extend(arr["_id"].tolist())
if compute_loss:
state["scores"] = scores
@runner.on(Event.BATCH)
def maybe_compute_loss(state):
if not compute_loss:
return
arr = state["arr"] if "arr" in state else state["batch"].to_array()
state["arr"] = arr
if "scores" in state:
scores = state["scores"]
else:
assert arr["mask"].all()
words = torch.from_numpy(arr["word_ids"]).long().to(device)
model.eval()
scores = model(words)
mask = torch.from_numpy(arr["mask"]).bool().to(device)
ptst_mask = torch.from_numpy(arr["ptst_mask"]).bool().to(device)
masked_scores = scores.masked_fill(~ptst_mask, -1e9)
crf = LinearCRF(masked_scores)
crf_z = LinearCRF(scores)
ptst_loss = -crf.log_partitions().sum() + crf_z.log_partitions().sum()
state["ptst_loss"] = ptst_loss.item()
state["size"] = mask.size(0)
@runner.on(Event.BATCH)
def set_n_items(state):
state["n_items"] = int(state["arr"]["mask"].sum())
n_tokens = sum(len(s["word_ids"]) for s in samples)
ProgressBar(leave=False, total=n_tokens, unit="tok").attach_on(runner)
if compute_loss:
MeanReducer("mean_ptst_loss", value="ptst_loss").attach_on(runner)
with torch.no_grad():
runner.run(BucketIterator(samples, lambda s: len(s["word_ids"]), batch_size))
if runner.state["preds"]:
assert len(runner.state["preds"]) == len(samples)
assert len(runner.state["_ids"]) == len(samples)
for i, preds in zip(runner.state["_ids"], runner.state["preds"]):
samples[i]["preds"] = preds
if gold_path:
group = defaultdict(list)
for s in samples:
group[str(s["path"])].append(s)
with tempfile.TemporaryDirectory() as dirname:
dirname = Path(dirname)
for doc_path, doc_samples in group.items():
spans = [x for s in doc_samples for x in s["spans"]]
labels = [id2label[x] for s in doc_samples for x in s["preds"]]
doc_path = Path(doc_path[len(f"{corpus['path']}/") :])
data = make_anafora(spans, labels, doc_path.name)
(dirname / doc_path.parent).mkdir(parents=True, exist_ok=True)
data.to_file(f"{str(dirname / doc_path)}.xml")
return (
score_time(gold_path, str(dirname), confusion),
runner.state.get("mean_ptst_loss"),
)
return None, runner.state.get("mean_ptst_loss")
@ex.capture
def read_samples_(_log, **kwargs):
samples = list(read_samples(**kwargs))
for i, s in enumerate(samples):
s["_id"] = i
n_toks = sum(len(s["word_ids"]) for s in samples)
_log.info("Read %d samples and %d tokens", len(samples), n_toks)
return samples
@ex.command(unobserved=True)
def evaluate_src_model(_log, _run, device="cpu"):
"""Evaluate the source model."""
class Wrapper(torch.nn.Module):
def __init__(self, model):
super().__init__()
self.model = model
def forward(self, *args, **kwargs):
emissions = self.model(*args, **kwargs)[0]
bsz, slen, nl = emissions.shape
scores = emissions[:, :-1].unsqueeze(2)
assert scores.shape == (bsz, slen - 1, 1, nl)
scores = scores.expand(bsz, slen - 1, nl, nl)
scores = scores.clone()
scores[:, -1] += emissions[:, -1].unsqueeze(2)
assert scores.shape == (bsz, slen - 1, nl, nl)
return scores
model_name = "clulab/roberta-timex-semeval"
_log.info("Loading %s", model_name)
config = AutoConfig.from_pretrained(model_name)
model = Wrapper(AutoModelForTokenClassification.from_pretrained(model_name, config=config))
model.to(device)
_log.info("Evaluating")
eval_score, _ = run_eval(model, config.id2label, read_samples_())
print_accs(eval_score)
return eval_score["f1"]
@ex.command
def evaluate(
_log,
_run,
temperature=1.0,
artifacts_dir="artifacts",
load_params="model.pth",
device="cpu",
save_confusion_matrix=False,
):
"""Evaluate a trained target model."""
model_name = "clulab/roberta-timex-semeval"
_log.info("Loading %s", model_name)
config = AutoConfig.from_pretrained(model_name)
token_clf = AutoModelForTokenClassification.from_pretrained(model_name, config=config)
model = RoBERTagger(token_clf, config.num_labels, temperature)
artifacts_dir = Path(artifacts_dir)
_log.info("Loading model parameters from %s", artifacts_dir / load_params)
model.load_state_dict(torch.load(artifacts_dir / load_params, "cpu"))
model.to(device)
_log.info("Evaluating")
eval_score, _ = run_eval(model, config.id2label, read_samples_(), confusion=save_confusion_matrix)
c = eval_score.pop("confusion", None)
print_accs(eval_score, on="test", run=_run)
if c is not None:
labels = set()
for k in c.keys():
labels.update(k)
if "O" in labels:
labels.remove("O")
labels = sorted(labels)
labels.insert(0, "O")
label2id = {l: i for i, l in enumerate(labels)}
m = np.zeros((len(labels), len(labels)))
for k, cnt in c.items():
m[label2id[k[0]], label2id[k[1]]] = cnt
_log.info("Saving labels list in %s", artifacts_dir / "labels.pkl")
with open(artifacts_dir / "labels.pkl", "wb") as f:
pickle.dump(labels, f)
_log.info("Saving confusion matrix in %s", artifacts_dir / "confusion.npy")
np.save(artifacts_dir / "confusion.npy", m)
return eval_score["f1"]
@ex.command(unobserved=True)
def report_coverage(
corpus, _log, temperature=1.0, device="cpu", batch_size=16, thresh=0.95, gold_path=""
):
"""Report coverage of gold tags in the chart."""
samples = read_samples_()
model_name = "clulab/roberta-timex-semeval"
_log.info("Loading %s", model_name)
config = AutoConfig.from_pretrained(model_name)
token_clf = AutoModelForTokenClassification.from_pretrained(model_name, config=config)
model = RoBERTagger(token_clf, config.num_labels, temperature)
_log.info("Initializing transitions")
torch.nn.init.zeros_(model.start_transition)
torch.nn.init.zeros_(model.transition)
for lid, label in config.id2label.items():
if not label.startswith("I-"):
continue
with torch.no_grad():
model.start_transition[lid] = -1e9
for plid, plabel in config.id2label.items():
if plabel == "O" or plabel[2:] != label[2:]:
with torch.no_grad():
model.transition[plid, lid] = -1e9
model.to(device)
_log.info("Computing ambiguous PTST tag pairs mask")
model.eval()
ptst_masks, _ids = [], []
pbar = tqdm(total=sum(len(s["word_ids"]) for s in samples), unit="tok")
for batch in BucketIterator(samples, lambda s: len(s["word_ids"]), batch_size):
arr = batch.to_array()
assert arr["mask"].all()
words = torch.from_numpy(arr["word_ids"]).long().to(device)
with torch.no_grad():
ptst_mask = compute_ambiguous_tag_pairs_mask(model(words), thresh)
ptst_masks.extend(ptst_mask.tolist())
_ids.extend(arr["_id"].tolist())
pbar.update(int(arr["mask"].sum()))
pbar.close()
assert len(ptst_masks) == len(samples)
assert len(_ids) == len(samples)
for i, ptst_mask in zip(_ids, ptst_masks):
samples[i]["ptst_mask"] = ptst_mask
_log.info("Reporting coverage of gold labels")
group = defaultdict(list)
for s in samples:
k = str(s["path"])[len(f"{corpus['path']}/") :]
group[k].append(s)
n_cov_tp, n_total_tp, n_cov_ts, n_total_ts = 0, 0, 0, 0
for dirpath, _, filenames in os.walk(gold_path):
if not filenames:
continue
if len(filenames) > 1:
raise ValueError(f"more than 1 file is found in {dirpath}")
if not filenames[0].endswith(".TimeNorm.gold.completed.xml"):
raise ValueError(f"{filenames[0]} doesn't have the expected suffix")
doc_path = os.path.join(dirpath, filenames[0])
data = AnaforaData.from_file(doc_path)
prefix, suffix = f"{gold_path}/", ".TimeNorm.gold.completed.xml"
doc_path = doc_path[len(prefix) : -len(suffix)]
tok_spans = [p for s in group[doc_path] for p in s["spans"]]
tok_spans.sort()
labeling = {}
for ann in data.annotations:
if len(ann.spans) != 1:
raise ValueError("found annotation with >1 span")
span = ann.spans[0]
beg = 0
while beg < len(tok_spans) and tok_spans[beg][0] < span[0]:
beg += 1
end = beg
while end < len(tok_spans) and tok_spans[end][1] < span[1]:
end += 1
if (
beg < len(tok_spans)
and end < len(tok_spans)
and tok_spans[beg][0] == span[0]
and tok_spans[end][1] == span[1]
and beg not in labeling
):
labeling[beg] = f"B-{ann.type}"
for i in range(beg + 1, end + 1):
if i not in labeling:
labeling[i] = f"I-{ann.type}"
labels = ["O"] * len(tok_spans)
for k, v in labeling.items():
labels[k] = v
offset = 0
for s in group[doc_path]:
ts_covd = True
for i in range(1, len(s["spans"])):
plab = labels[offset + i - 1]
lab = labels[offset + i]
if s["ptst_mask"][i - 1][config.label2id[plab]][config.label2id[lab]]:
n_cov_tp += 1
else:
ts_covd = False
n_total_tp += 1
if ts_covd:
n_cov_ts += 1
n_total_ts += 1
offset += len(s["spans"])
_log.info(
"Number of covered tag pairs: %d out of %d (%.1f%%)",
n_cov_tp,
n_total_tp,
100.0 * n_cov_tp / n_total_tp,
)
_log.info(
"Number of covered tag sequences: %d out of %d (%.1f%%)",
n_cov_ts,
n_total_ts,
100.0 * n_cov_ts / n_total_ts,
)
@ex.automain
def finetune(
_log,
_run,
_rnd,
corpus,
artifacts_dir="artifacts",
overwrite=False,
temperature=1.0,
freeze_embeddings=True,
freeze_encoder_up_to=1,
device="cpu",
thresh=0.95,
batch_size=16,
lr=1e-5,
max_epoch=5,
predict_on_finished=False,
):
"""Finetune/train the source model on unlabeled target data."""
artifacts_dir = Path(artifacts_dir)
artifacts_dir.mkdir(exist_ok=overwrite)
samples = read_samples_()
eval_samples = read_samples_(max_length=None)
model_name = "clulab/roberta-timex-semeval"
_log.info("Loading %s", model_name)
config = AutoConfig.from_pretrained(model_name)
token_clf = AutoModelForTokenClassification.from_pretrained(model_name, config=config)
model = RoBERTagger(token_clf, config.num_labels, temperature)
_log.info("Initializing transitions")
torch.nn.init.zeros_(model.start_transition)
torch.nn.init.zeros_(model.transition)
for lid, label in config.id2label.items():
if not label.startswith("I-"):
continue
with torch.no_grad():
model.start_transition[lid] = -1e9
for plid, plabel in config.id2label.items():
if plabel == "O" or plabel[2:] != label[2:]:
with torch.no_grad():
model.transition[plid, lid] = -1e9
for name, p in model.named_parameters():
freeze = False
if freeze_embeddings and ".embeddings." in | |
NUM_FEATURES = 10
ALPH = " абвгґдеєжзиіїйклмнопрстуфхцчшщьюя'-"
import sys
def decision_tree(f1, f2, f3, f4, f5, f6, f7, f8, f9, f10):
# DecisionTreeClassifier(class_weight=None, criterion='entropy', max_depth=None,
# max_features=None, max_leaf_nodes=3000,
# min_impurity_decrease=0.0, min_impurity_split=None,
# min_samples_leaf=1, min_samples_split=2,
# min_weight_fraction_leaf=0.0, presort=False,
# random_state=42, splitter='best')
if f2 <= 21:
if f1 <= 12:
if f2 <= 17:
if f2 <= 16:
if f2 <= 3:
if f1 <= 11:
if f3 <= 3:
if f4 <= 23:
if f4 <= 1:
if f3 <= 2:
return 0
else:
return 3
else:
return 1
else:
if f1 <= 3:
if f3 <= 2:
return 1
else:
if f5 <= 27:
if f5 <= 9:
return 3
else:
if f4 <= 28:
if f5 <= 12:
if f6 <= 10:
return 3
else:
if f6 <= 20:
return 4
else:
return 3
else:
return 3
else:
return 3
else:
return 3
else:
return 1
else:
if f2 <= 2:
if f1 <= 3:
if f1 <= 2:
if f2 <= 1:
return 0
else:
if f3 <= 12:
return 0
else:
return 1
else:
if f3 <= 27:
if f3 <= 21:
if f3 <= 15:
return 1
else:
if f3 <= 16:
if f4 <= 19:
return 1
else:
return 0
else:
return 1
else:
if f4 <= 20:
if f4 <= 12:
return 1
else:
return 3
else:
return 1
else:
return 1
else:
if f1 <= 6:
if f1 <= 5:
return 1
else:
return 0
else:
if f1 <= 8:
if f1 <= 7:
if f3 <= 9:
return 3
else:
return 1
else:
return 1
else:
if f1 <= 10:
return 0
else:
if f3 <= 8:
return 3
else:
return 1
else:
if f10 <= 0:
if f3 <= 8:
return 1
else:
if f3 <= 16:
if f1 <= 6:
if f3 <= 11:
return 1
else:
if f3 <= 12:
return 0
else:
return 1
else:
if f3 <= 11:
if f4 <= 9:
return 2
else:
return 1
else:
return 1
else:
if f3 <= 20:
if f1 <= 8:
return 1
else:
if f1 <= 10:
return 0
else:
return 1
else:
return 1
else:
if f8 <= 0:
return 9
else:
if f3 <= 20:
if f7 <= 0:
return 9
else:
return 1
else:
return 1
else:
if f3 <= 20:
if f3 <= 18:
if f3 <= 8:
if f4 <= 27:
if f4 <= 26:
if f3 <= 6:
return 1
else:
if f4 <= 16:
return 3
else:
if f4 <= 19:
if f5 <= 1:
return 3
else:
if f5 <= 6:
return 4
else:
if f5 <= 8:
return 3
else:
return 4
else:
if f4 <= 22:
return 3
else:
return 1
else:
if f5 <= 30:
if f5 <= 14:
if f5 <= 10:
return 4
else:
if f5 <= 11:
return 1
else:
return 5
else:
return 4
else:
return 5
else:
if f3 <= 6:
return 1
else:
return 3
else:
return 1
else:
if f4 <= 15:
if f5 <= 13:
if f5 <= 10:
if f4 <= 11:
return 3
else:
if f5 <= 1:
return 3
else:
if f5 <= 6:
return 4
else:
if f5 <= 8:
return 3
else:
return 4
else:
return 3
else:
if f4 <= 12:
if f4 <= 5:
return 3
else:
if f5 <= 20:
if f4 <= 9:
if f5 <= 18:
return 3
else:
return 5
else:
return 3
else:
return 3
else:
if f5 <= 30:
if f5 <= 27:
if f6 <= 10:
if f6 <= 1:
return 4
else:
return 3
else:
if f5 <= 23:
if f5 <= 22:
if f5 <= 18:
return 4
else:
if f5 <= 19:
return 3
else:
return 4
else:
return 4
else:
return 3
else:
return 4
else:
return 3
else:
if f4 <= 27:
if f5 <= 19:
if f5 <= 10:
return 3
else:
if f2 <= 2:
return 1
else:
if f6 <= 30:
if f7 <= 32:
if f5 <= 18:
if f7 <= 1:
if f8 <= 27:
return 3
else:
return 5
else:
return 3
else:
return 3
else:
if f6 <= 19:
if f6 <= 17:
return 3
else:
return 5
else:
return 3
else:
return 3
else:
if f4 <= 16:
if f5 <= 23:
return 1
else:
return 3
else:
return 3
else:
return 1
else:
return 1
else:
if f2 <= 15:
if f3 <= 30:
if f2 <= 6:
if f2 <= 5:
if f8 <= 0:
if f1 <= 10:
return 2
else:
if f1 <= 11:
return 2
else:
return 1
else:
if f3 <= 17:
return 1
else:
if f3 <= 20:
return 1
else:
return 2
else:
if f3 <= 18:
return 1
else:
if f3 <= 19:
if f4 <= 24:
if f4 <= 3:
if f5 <= 1:
return 1
else:
return 3
else:
return 1
else:
return 3
else:
return 1
else:
if f2 <= 8:
if f1 <= 7:
return 0
else:
if f1 <= 8:
return 2
else:
return 0
else:
if f1 <= 3:
if f1 <= 2:
if f4 <= 13:
if f2 <= 11:
return 1
else:
if f4 <= 10:
if f4 <= 1:
return 2
else:
if f3 <= 19:
return 1
else:
return 2
else:
if f3 <= 3:
return 2
else:
if f3 <= 15:
if f6 <= 21:
return 2
else:
return 1
else:
return 2
else:
if f4 <= 18:
if f3 <= 11:
if f3 <= 9:
if f2 <= 10:
return 1
else:
return 2
else:
return 1
else:
if f2 <= 11:
return 1
else:
return 2
else:
if f2 <= 11:
return 1
else:
if f3 <= 13:
if f3 <= 10:
if f3 <= 1:
return 1
else:
return 2
else:
if f4 <= 26:
if f10 <= 0:
if f3 <= 11:
return 1
else:
return 0
else:
return 2
else:
return 1
else:
if f3 <= 27:
if f3 <= 23:
if f4 <= 32:
if f3 <= 22:
if f3 <= 18:
return 2
else:
if f3 <= 19:
return 1
else:
return 2
else:
return 2
else:
return 2
else:
return 1
else:
return 2
else:
if f4 <= 13:
if f4 <= 10:
if f4 <= 1:
if f2 <= 11:
if f3 <= 21:
return 2
else:
if f3 <= 24:
return 3
else:
return 2
else:
return 2
else:
if f4 <= 6:
if f3 <= 26:
if f3 <= 13:
return 2
else:
if f3 <= 15:
return 3
else:
if f3 <= 20:
if f2 <= 11:
return 2
else:
return 3
else:
return 2
else:
if f5 <= 11:
return 3
else:
if f5 <= 15:
return 2
else:
return 3
else:
if f4 <= 8:
return 2
else:
if f3 <= 13:
return 2
else:
if f3 <= 15:
return 3
else:
return 2
else:
if f2 <= 11:
if f3 <= 21:
return 2
else:
if f3 <= 24:
return 3
else:
return 2
else:
return 2
else:
if f3 <= 9:
if f2 <= 11:
return 2
else:
if f4 <= 19:
if f4 <= 18:
return 2
else:
if f3 <= 5:
if f10 <= 25:
return 2
else:
return 11
else:
return 4
else:
return 2
else:
if f3 <= 15:
if f2 <= 12:
if f5 <= 10:
return 2
else:
if f4 <= 30:
if f4 <= 27:
if f4 <= 23:
if f4 <= 22:
if f4 <= 18:
return 3
else:
if f4 <= 19:
if f2 <= 11:
return 3
else:
return 2
else:
return 2
else:
return 3
else:
return 2
else:
| |
cause page reading to stop.
# This must be done before auto-pass-through occurs, as we want to stop page reading even if pass-through will be automatically enabled by this focus change.
speech.cancelSpeech()
self.passThrough=passThrough
if not self.passThrough:
# We read the info from the browseMode document instead of the control itself.
speech.speakTextInfo(focusInfo, reason=OutputReason.FOCUS)
# However, we still want to update the speech property cache so that property changes will be spoken properly.
speech.speakObject(obj, controlTypes.OutputReason.ONLYCACHE)
# As we do not call nextHandler which would trigger the vision framework to handle gain focus,
# we need to call it manually here.
vision.handler.handleGainFocus(obj)
else:
# Although we are going to speak the object rather than textInfo content, we still need to silently speak the textInfo content so that the textInfo speech cache is updated correctly.
# Not doing this would cause later browseMode speaking to either not speak controlFields it had entered, or speak controlField exits after having already exited.
# See #7435 for a discussion on this.
speech.speakTextInfo(focusInfo, reason=OutputReason.ONLYCACHE)
self._replayFocusEnteredEvents()
nextHandler()
focusInfo.collapse()
self._set_selection(focusInfo, reason=OutputReason.FOCUS)
else:
# The virtual caret was already at the focused node.
if not self.passThrough:
# This focus change was caused by a virtual caret movement, so don't speak the focused node to avoid double speaking.
# However, we still want to update the speech property cache so that property changes will be spoken properly.
speech.speakObject(obj, OutputReason.ONLYCACHE)
if config.conf["virtualBuffers"]["autoFocusFocusableElements"]:
# As we do not call nextHandler which would trigger the vision framework to handle gain focus,
# we need to call it manually here.
# Note: this is usually called after the caret movement.
vision.handler.handleGainFocus(obj)
elif (
self._objPendingFocusBeforeActivate
and obj == self._objPendingFocusBeforeActivate
and obj is not self._objPendingFocusBeforeActivate
):
# With auto focus focusable elements disabled, when the user activates
# an element (e.g. by pressing enter) or presses a key which we pass
# through (e.g. control+enter), we call _focusLastFocusableObject.
# However, the activation/key press might cause a property change
# before we get the focus event, so NVDA's normal reporting of
# changes to the focus won't pick it up.
# The speech property cache on _objPendingFocusBeforeActivate reflects
# the properties before the activation/key, so use that to speak any
# changes.
speech.speakObject(
self._objPendingFocusBeforeActivate,
OutputReason.CHANGE
)
self._objPendingFocusBeforeActivate = None
else:
self._replayFocusEnteredEvents()
return nextHandler()
self._postGainFocus(obj)
event_gainFocus.ignoreIsReady=True
def _handleScrollTo(
self,
obj: Union[NVDAObject, textInfos.TextInfo],
) -> bool:
"""Handle scrolling the browseMode document to a given object in response to an event.
Subclasses should call this from an event which indicates that the document has scrolled.
@postcondition: The virtual caret is moved to L{obj} and the buffer content for L{obj} is reported.
@param obj: The object to which the document should scroll.
@return: C{True} if the document was scrolled, C{False} if not.
@note: If C{False} is returned, calling events should probably call their nextHandler.
"""
if self.programmaticScrollMayFireEvent and self._lastProgrammaticScrollTime and time.time() - self._lastProgrammaticScrollTime < 0.4:
# This event was probably caused by this browseMode document's call to scrollIntoView().
# Therefore, ignore it. Otherwise, the cursor may bounce back to the scroll point.
# However, pretend we handled it, as we don't want it to be passed on to the object either.
return True
if isinstance(obj, NVDAObject):
try:
scrollInfo = self.makeTextInfo(obj)
except (NotImplementedError, RuntimeError):
return False
elif isinstance(obj, textInfos.TextInfo):
scrollInfo = obj.copy()
else:
raise ValueError(f"{obj} is not a supported type")
#We only want to update the caret and speak the field if we're not in the same one as before
caretInfo=self.makeTextInfo(textInfos.POSITION_CARET)
# Expand to one character, as isOverlapping() doesn't treat, for example, (4,4) and (4,5) as overlapping.
caretInfo.expand(textInfos.UNIT_CHARACTER)
if not scrollInfo.isOverlapping(caretInfo):
if scrollInfo.isCollapsed:
scrollInfo.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(scrollInfo, reason=OutputReason.CARET)
scrollInfo.collapse()
self.selection = scrollInfo
return True
return False
def _isNVDAObjectInApplication_noWalk(self, obj):
"""Determine whether a given object is within an application without walking ancestors.
The base implementation simply checks whether the object has an application role.
Subclasses can override this if they can provide a definite answer without needing to walk.
For example, for virtual buffers, if the object is in the buffer,
it definitely isn't in an application.
L{_isNVDAObjectInApplication} calls this and walks to the next ancestor if C{None} is returned.
@return: C{True} if definitely in an application,
C{False} if definitely not in an application,
C{None} if this can't be determined without walking ancestors.
"""
if (
# roles such as application and dialog should be treated as being within a "application" and therefore outside of the browseMode document.
obj.role in self.APPLICATION_ROLES
# Anything other than an editable text box inside a combo box should be
# treated as being outside a browseMode document.
or (
obj.role != controlTypes.Role.EDITABLETEXT and obj.container
and obj.container.role == controlTypes.Role.COMBOBOX
)
):
return True
return None
def _isNVDAObjectInApplication(self, obj):
"""Determine whether a given object is within an application.
The object is considered to be within an application if it or one of its ancestors has an application role.
This should only be called on objects beneath the treeInterceptor's root NVDAObject.
@param obj: The object in question.
@type obj: L{NVDAObjects.NVDAObject}
@return: C{True} if L{obj} is within an application, C{False} otherwise.
@rtype: bool
"""
# We cache the result for each object we walk.
# There can be browse mode documents within other documents and the result might be different between these,
# so the cache must be maintained on the TreeInterceptor rather than the object itself.
try:
cache = self._isInAppCache
except AttributeError:
# Create this lazily, as this method isn't used by all browse mode implementations.
cache = self._isInAppCache = weakref.WeakKeyDictionary()
objs = []
def doResult(result):
# Cache this on descendants we've walked over.
for obj in objs:
cache[obj] = result
return result
while obj and obj != self.rootNVDAObject:
inApp = cache.get(obj)
if inApp is not None:
# We found a cached result.
return doResult(inApp)
objs.append(obj)
inApp = self._isNVDAObjectInApplication_noWalk(obj)
if inApp is not None:
return doResult(inApp)
# We must walk ancestors.
# Cache container.
container = obj.container
obj.container = container
obj = container
return doResult(False)
def _get_documentConstantIdentifier(self):
"""Get the constant identifier for this document.
This identifier should uniquely identify all instances (not just one instance) of a document for at least the current session of the hosting application.
Generally, the document URL should be used.
@return: The constant identifier for this document, C{None} if there is none.
"""
return None
def _get_shouldRememberCaretPositionAcrossLoads(self):
"""Specifies whether the position of the caret should be remembered when this document is loaded again.
This is useful when the browser remembers the scroll position for the document,
but does not communicate this information via APIs.
The remembered caret position is associated with this document using L{documentConstantIdentifier}.
@return: C{True} if the caret position should be remembered, C{False} if not.
@rtype: bool
"""
docConstId = self.documentConstantIdentifier
# Return True if the URL indicates that this is probably a web browser document.
# We do this check because we don't want to remember caret positions for email messages, etc.
if isinstance(docConstId, str):
protocols=("http", "https", "ftp", "ftps", "file")
protocol=docConstId.split("://", 1)[0]
return protocol in protocols
return False
def _getInitialCaretPos(self):
"""Retrieve the initial position of the caret after the buffer has been loaded.
This position, if any, will be passed to L{makeTextInfo}.
Subclasses should extend this method.
@return: The initial position of the caret, C{None} if there isn't one.
@rtype: TextInfo position
"""
if self.shouldRememberCaretPositionAcrossLoads:
try:
return self.rootNVDAObject.appModule._browseModeRememberedCaretPositions[self.documentConstantIdentifier]
except KeyError:
pass
return None
def getEnclosingContainerRange(self, textRange):
textRange = textRange.copy()
textRange.collapse()
try:
item = next(self._iterNodesByType("container", "up", textRange))
except (NotImplementedError,StopIteration):
try:
item = next(self._iterNodesByType("landmark", "up", textRange))
except (NotImplementedError,StopIteration):
return
return item.textInfo
def script_moveToStartOfContainer(self,gesture):
info=self.makeTextInfo(textInfos.POSITION_CARET)
info.expand(textInfos.UNIT_CHARACTER)
container=self.getEnclosingContainerRange(info)
if not container:
# Translators: Reported when the user attempts to move to the start or end of a container
# (list, table, etc.) but there is no container.
ui.message(_("Not in a container"))
return
container.collapse()
self._set_selection(container, reason=OutputReason.QUICKNAV)
if not willSayAllResume(gesture):
container.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(container, reason=OutputReason.FOCUS)
script_moveToStartOfContainer.resumeSayAllMode = sayAll.CURSOR.CARET
# Translators: Description for the Move to start of container command in browse mode.
script_moveToStartOfContainer.__doc__=_("Moves to the start of the container element, such as a list or table")
def script_movePastEndOfContainer(self,gesture):
info=self.makeTextInfo(textInfos.POSITION_CARET)
info.expand(textInfos.UNIT_CHARACTER)
container=self.getEnclosingContainerRange(info)
if not container:
# Translators: Reported when the user attempts to move to the start or end of a container
# (list, table, etc.) but there is no container.
ui.message(_("Not in a container"))
return
container.collapse(end=True)
docEnd=container.obj.makeTextInfo(textInfos.POSITION_LAST)
if container.compareEndPoints(docEnd,"endToEnd")>=0:
container=docEnd
# Translators: a message reported when:
# Review cursor is at the bottom line of the current navigator object.
# Landing at the end of a browse mode document when trying to jump to the end of the current container.
ui.message(_("Bottom"))
self._set_selection(container, reason=OutputReason.QUICKNAV)
if not willSayAllResume(gesture):
container.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(container, reason=OutputReason.FOCUS)
script_movePastEndOfContainer.resumeSayAllMode = sayAll.CURSOR.CARET
# Translators: Description for the Move past end of container command in browse mode.
script_movePastEndOfContainer.__doc__=_("Moves past the end of the container element, such as a list or table")
NOT_LINK_BLOCK_MIN_LEN = 30
def _isSuitableNotLinkBlock(self, textRange):
return len(textRange.text) >= self.NOT_LINK_BLOCK_MIN_LEN
def _iterNotLinkBlock(self, direction="next", pos=None):
links = self._iterNodesByType("link", direction=direction, pos=pos)
# We want to compare each link against the next link.
item1 = next(links, None)
if item1 is None:
return
for item2 in links:
# If the distance between the links is small, this is probably just a piece of non-link text within a block of links; e.g. an inactive link of a nav bar.
if direction=="previous":
textRange=item1.textInfo.copy()
textRange.collapse()
textRange.setEndPoint(item2.textInfo,"startToEnd")
else:
textRange=item2.textInfo.copy()
textRange.collapse()
textRange.setEndPoint(item1.textInfo,"startToEnd")
if self._isSuitableNotLinkBlock(textRange):
yield TextInfoQuickNavItem("notLinkBlock", self, textRange)
item1=item2
__gestures={
"kb:NVDA+d": "activateLongDesc",
"kb:alt+upArrow": "collapseOrExpandControl",
"kb:alt+downArrow": "collapseOrExpandControl",
"kb:tab": "tab",
"kb:shift+tab": "shiftTab",
"kb:shift+,": "moveToStartOfContainer",
"kb:,": "movePastEndOfContainer",
}
@script(
description=_(
# Translators: the description for the toggleScreenLayout script.
"Toggles on and off if the screen | |
import collections
import itertools
import random
import numpy as np
import torch
import torch.nn.functional as F
import pfrl
from pfrl import agent
from pfrl.utils.batch_states import batch_states
from pfrl.utils.mode_of_distribution import mode_of_distribution
from pfrl.utils.recurrent import (
concatenate_recurrent_states,
flatten_sequences_time_first,
get_recurrent_state_at,
mask_recurrent_state_at,
one_step_forward,
pack_and_forward,
)
def _mean_or_nan(xs):
"""Return its mean a non-empty sequence, numpy.nan for a empty one."""
return np.mean(xs) if xs else np.nan
def _elementwise_clip(x, x_min, x_max):
"""Elementwise clipping
Note: torch.clamp supports clipping to constant intervals
"""
return torch.min(torch.max(x, x_min), x_max)
def _add_advantage_and_value_target_to_episode(episode, gamma, lambd):
"""Add advantage and value target values to an episode."""
adv = 0.0
for transition in reversed(episode):
td_err = (
transition["reward"]
+ (gamma * transition["nonterminal"] * transition["next_v_pred"])
- transition["v_pred"]
)
adv = td_err + gamma * lambd * adv
transition["adv"] = adv
transition["v_teacher"] = adv + transition["v_pred"]
def _add_advantage_and_value_target_to_episodes(episodes, gamma, lambd):
"""Add advantage and value target values to a list of episodes."""
for episode in episodes:
_add_advantage_and_value_target_to_episode(episode, gamma=gamma, lambd=lambd)
def _add_log_prob_and_value_to_episodes_recurrent(
episodes,
model,
phi,
batch_states,
obs_normalizer,
device,
):
# Sort desc by lengths so that pack_sequence does not change the order
episodes = sorted(episodes, key=len, reverse=True)
# Prepare data for a recurrent model
seqs_states = []
seqs_next_states = []
for ep in episodes:
states = batch_states([transition["state"] for transition in ep], device, phi)
next_states = batch_states(
[transition["next_state"] for transition in ep], device, phi
)
if obs_normalizer:
states = obs_normalizer(states, update=False)
next_states = obs_normalizer(next_states, update=False)
seqs_states.append(states)
seqs_next_states.append(next_states)
flat_transitions = flatten_sequences_time_first(episodes)
# Predict values using a recurrent model
with torch.no_grad(), pfrl.utils.evaluating(model):
rs = concatenate_recurrent_states([ep[0]["recurrent_state"] for ep in episodes])
next_rs = concatenate_recurrent_states(
[ep[0]["next_recurrent_state"] for ep in episodes]
)
assert (rs is None) or (next_rs is None) or (len(rs) == len(next_rs))
(flat_distribs, flat_vs), _ = pack_and_forward(model, seqs_states, rs)
(_, flat_next_vs), _ = pack_and_forward(model, seqs_next_states, next_rs)
flat_actions = torch.tensor(
[b["action"] for b in flat_transitions], device=device
)
flat_log_probs = flat_distribs.log_prob(flat_actions).cpu().numpy()
flat_vs = flat_vs.cpu().numpy()
flat_next_vs = flat_next_vs.cpu().numpy()
# Add predicted values to transitions
for transition, log_prob, v, next_v in zip(
flat_transitions, flat_log_probs, flat_vs, flat_next_vs
):
transition["log_prob"] = float(log_prob)
transition["v_pred"] = float(v)
transition["next_v_pred"] = float(next_v)
def _add_log_prob_and_value_to_episodes(
episodes,
model,
phi,
batch_states,
obs_normalizer,
device,
):
dataset = list(itertools.chain.from_iterable(episodes))
# Compute v_pred and next_v_pred
states = batch_states([b["state"] for b in dataset], device, phi)
next_states = batch_states([b["next_state"] for b in dataset], device, phi)
if obs_normalizer:
states = obs_normalizer(states, update=False)
next_states = obs_normalizer(next_states, update=False)
with torch.no_grad(), pfrl.utils.evaluating(model):
distribs, vs_pred = model(states)
_, next_vs_pred = model(next_states)
actions = torch.tensor([b["action"] for b in dataset], device=device)
log_probs = distribs.log_prob(actions).cpu().numpy()
vs_pred = vs_pred.cpu().numpy().ravel()
next_vs_pred = next_vs_pred.cpu().numpy().ravel()
for transition, log_prob, v_pred, next_v_pred in zip(
dataset, log_probs, vs_pred, next_vs_pred
):
transition["log_prob"] = log_prob
transition["v_pred"] = v_pred
transition["next_v_pred"] = next_v_pred
def _limit_sequence_length(sequences, max_len):
assert max_len > 0
new_sequences = []
for sequence in sequences:
while len(sequence) > max_len:
new_sequences.append(sequence[:max_len])
sequence = sequence[max_len:]
assert 0 < len(sequence) <= max_len
new_sequences.append(sequence)
return new_sequences
def _yield_subset_of_sequences_with_fixed_number_of_items(sequences, n_items):
assert n_items > 0
stack = list(reversed(sequences))
while stack:
subset = []
count = 0
while count < n_items:
sequence = stack.pop()
subset.append(sequence)
count += len(sequence)
if count > n_items:
# Split last sequence
sequence_to_split = subset[-1]
n_exceeds = count - n_items
assert n_exceeds > 0
subset[-1] = sequence_to_split[:-n_exceeds]
stack.append(sequence_to_split[-n_exceeds:])
assert sum(len(seq) for seq in subset) == n_items
yield subset
def _compute_explained_variance(transitions):
"""Compute 1 - Var[return - v]/Var[return].
This function computes the fraction of variance that value predictions can
explain about returns.
"""
t = np.array([tr["v_teacher"] for tr in transitions])
y = np.array([tr["v_pred"] for tr in transitions])
vart = np.var(t)
if vart == 0:
return np.nan
else:
return float(1 - np.var(t - y) / vart)
def _make_dataset_recurrent(
episodes,
model,
phi,
batch_states,
obs_normalizer,
gamma,
lambd,
max_recurrent_sequence_len,
device,
):
"""Make a list of sequences with necessary information."""
_add_log_prob_and_value_to_episodes_recurrent(
episodes=episodes,
model=model,
phi=phi,
batch_states=batch_states,
obs_normalizer=obs_normalizer,
device=device,
)
_add_advantage_and_value_target_to_episodes(episodes, gamma=gamma, lambd=lambd)
if max_recurrent_sequence_len is not None:
dataset = _limit_sequence_length(episodes, max_recurrent_sequence_len)
else:
dataset = list(episodes)
return dataset
def _make_dataset(
episodes, model, phi, batch_states, obs_normalizer, gamma, lambd, device
):
"""Make a list of transitions with necessary information."""
_add_log_prob_and_value_to_episodes(
episodes=episodes,
model=model,
phi=phi,
batch_states=batch_states,
obs_normalizer=obs_normalizer,
device=device,
)
_add_advantage_and_value_target_to_episodes(episodes, gamma=gamma, lambd=lambd)
return list(itertools.chain.from_iterable(episodes))
def _yield_minibatches(dataset, minibatch_size, num_epochs):
assert dataset
buf = []
n = 0
while n < len(dataset) * num_epochs:
while len(buf) < minibatch_size:
buf = random.sample(dataset, k=len(dataset)) + buf
assert len(buf) >= minibatch_size
yield buf[-minibatch_size:]
n += minibatch_size
buf = buf[:-minibatch_size]
class PPO(agent.AttributeSavingMixin, agent.BatchAgent):
"""Proximal Policy Optimization
See https://arxiv.org/abs/1707.06347
Args:
model (torch.nn.Module): Model to train (including recurrent models)
state s |-> (pi(s, _), v(s))
optimizer (torch.optim.Optimizer): Optimizer used to train the model
gpu (int): GPU device id if not None nor negative
gamma (float): Discount factor [0, 1]
lambd (float): Lambda-return factor [0, 1]
phi (callable): Feature extractor function
value_func_coef (float): Weight coefficient for loss of
value function (0, inf)
entropy_coef (float): Weight coefficient for entropy bonus [0, inf)
update_interval (int): Model update interval in step
minibatch_size (int): Minibatch size
epochs (int): Training epochs in an update
clip_eps (float): Epsilon for pessimistic clipping of likelihood ratio
to update policy
clip_eps_vf (float): Epsilon for pessimistic clipping of value
to update value function. If it is ``None``, value function is not
clipped on updates.
standardize_advantages (bool): Use standardized advantages on updates
recurrent (bool): If set to True, `model` is assumed to implement
`pfrl.nn.Recurrent` and update in a recurrent
manner.
max_recurrent_sequence_len (int): Maximum length of consecutive
sequences of transitions in a minibatch for updatig the model.
This value is used only when `recurrent` is True. A smaller value
will encourage a minibatch to contain more and shorter sequences.
act_deterministically (bool): If set to True, choose most probable
actions in the act method instead of sampling from distributions.
max_grad_norm (float or None): Maximum L2 norm of the gradient used for
gradient clipping. If set to None, the gradient is not clipped.
value_stats_window (int): Window size used to compute statistics
of value predictions.
entropy_stats_window (int): Window size used to compute statistics
of entropy of action distributions.
value_loss_stats_window (int): Window size used to compute statistics
of loss values regarding the value function.
policy_loss_stats_window (int): Window size used to compute statistics
of loss values regarding the policy.
Statistics:
average_value: Average of value predictions on non-terminal states.
It's updated on (batch_)act_and_train.
average_entropy: Average of entropy of action distributions on
non-terminal states. It's updated on (batch_)act_and_train.
average_value_loss: Average of losses regarding the value function.
It's updated after the model is updated.
average_policy_loss: Average of losses regarding the policy.
It's updated after the model is updated.
n_updates: Number of model updates so far.
explained_variance: Explained variance computed from the last batch.
"""
saved_attributes = ("model", "optimizer", "obs_normalizer")
def __init__(
self,
model,
optimizer,
obs_normalizer=None,
gpu=None,
gamma=0.99,
lambd=0.95,
phi=lambda x: x,
value_func_coef=1.0,
entropy_coef=0.01,
update_interval=2048,
minibatch_size=64,
epochs=10,
clip_eps=0.2,
clip_eps_vf=None,
standardize_advantages=True,
batch_states=batch_states,
recurrent=False,
max_recurrent_sequence_len=None,
act_deterministically=False,
max_grad_norm=None,
value_stats_window=1000,
entropy_stats_window=1000,
value_loss_stats_window=100,
policy_loss_stats_window=100,
):
self.model = model
self.optimizer = optimizer
self.obs_normalizer = obs_normalizer
if gpu is not None and gpu >= 0:
assert torch.cuda.is_available()
self.device = torch.device("cuda:{}".format(gpu))
self.model.to(self.device)
if self.obs_normalizer is not None:
self.obs_normalizer.to(self.device)
else:
self.device = torch.device("cpu")
self.gamma = gamma
self.lambd = lambd
self.phi = phi
self.value_func_coef = value_func_coef
self.entropy_coef = entropy_coef
self.update_interval = update_interval
self.minibatch_size = minibatch_size
self.epochs = epochs
self.clip_eps = clip_eps
self.clip_eps_vf = clip_eps_vf
self.standardize_advantages = standardize_advantages
self.batch_states = batch_states
self.recurrent = recurrent
self.max_recurrent_sequence_len = max_recurrent_sequence_len
self.act_deterministically = act_deterministically
self.max_grad_norm = max_grad_norm
# Contains episodes used for next update iteration
self.memory = []
# Contains transitions of the last episode not moved to self.memory yet
self.last_episode = []
self.last_state = None
self.last_action = None
# Batch versions of last_episode, last_state, and last_action
self.batch_last_episode = None
self.batch_last_state = None
self.batch_last_action = None
# Recurrent states of the model
self.train_recurrent_states = None
self.train_prev_recurrent_states = None
self.test_recurrent_states = None
self.value_record = collections.deque(maxlen=value_stats_window)
self.entropy_record = collections.deque(maxlen=entropy_stats_window)
self.value_loss_record = collections.deque(maxlen=value_loss_stats_window)
self.policy_loss_record = collections.deque(maxlen=policy_loss_stats_window)
self.explained_variance = np.nan
self.n_updates = 0
def _initialize_batch_variables(self, num_envs):
self.batch_last_episode = [[] for _ in range(num_envs)]
self.batch_last_state = [None] * num_envs
self.batch_last_action = [None] * num_envs
def _update_if_dataset_is_ready(self):
dataset_size = (
sum(len(episode) for episode in self.memory)
+ len(self.last_episode)
+ (
0
if self.batch_last_episode is None
else sum(len(episode) for episode in self.batch_last_episode)
)
)
if dataset_size >= self.update_interval:
self._flush_last_episode()
if self.recurrent:
dataset = _make_dataset_recurrent(
episodes=self.memory,
model=self.model,
phi=self.phi,
| |
Example data of label.csv is as follows:
====== ======== ====
name movie rate
====== ======== ====
John StarWar1 5.0
Tim X-Man 3.5
Maggie StarWar1 4.5
====== ======== ====
>>> label_loader = dgl.data.EdgeLabelLoader(input='label.csv',
separator="\t")
>>> label_loader.addRelationalValidSet(['name', 'movie', 'rate'],
src_node_type='name',
dst_node_type='movie',
rows=np.arange(start=0, stop=100))
"""
if not isinstance(cols, list):
raise RuntimeError("The cols should be a list of string or int")
if len(cols) != 3:
raise RuntimeError("addRelationalValidSet accepts three columns " \
"for source node and destination node." \
"or three columns, the first column for source node, " \
"the second for destination node, " \
"and third for relation")
# TODO(xiangsx) add label/multilabel support in the future
rel_edges = self._load_relation_labels(cols, rows)
assert self._has_label is None or self._has_label is False, \
'For a single edge label loader, it can be has-label or no-label ' \
'but it can not be both.'
self._has_label = False
for rel_type, (src_nodes, dst_nodes) in rel_edges.items():
self._labels.append(((src_node_type, rel_type, dst_node_type),
src_nodes,
dst_nodes,
None,
(0., 1., 0.)))
def addTestSet(self, cols, multilabel=False, separator=None, rows=None, edge_type=None):
r"""Add Test Set.
Two or three columns of the **input** are chosen.
If only two columns are provied, they represent the
column names of the source nodes and destination nodes.
This represents the existance of the edges.
If three columns are provided, the first two columns
represent the column names of the source nodes and
destination nodes while the last column give the labels.
Multi-label is supported, but a separator is required to
split the labels.
Parameters
-----------
cols: list of str or list of int
Which columns to use. Supported data formats are:
(1) [str, str] column names for source node, destination node.
(2) [int, int] column numbers for source node, destination node.
(3) [str, str, str] column names for source node, destination node and labels.
The first column is treated as source node name,
the second column is treated as destination node name and
the third column is treated as label.
(4) [int, int, int] column numbers for node and labels.
The first column is treated as source node name,
the second column is treated as destination node name and
the third column is treated as label.
multilabel: bool
Whether it is a multi-label task.
Default: False
separator: str, optional
Delimiter(separator) used to split label data.
Default: None
rows: numpy.array or list of int
Which row(s) to load. None to load all.
Default: None
edge_type: str
Canonical edge type. If None, default edge type is chosen.
Default: None
Examples
---------
** Load test labels **
Example data of label.csv is as follows:
====== ======== ====
name movie rate
====== ======== ====
John StarWar1 5.0
Tim X-Man 3.5
Maggie StarWar1 4.5
====== ======== ====
>>> label_loader = dgl.data.EdgeLabelLoader(input='label.csv',
separator="\t")
>>> label_loader.addTestSet(['name', 'movie', 'rate'],
rows=np.arange(start=0, stop=100))
"""
if not isinstance(cols, list):
raise RuntimeError("The cols should be a list of string or int")
if len(cols) != 2 and len(cols) != 3:
assert len(cols) == 3, "Multi-class label requires one column for labels"
raise RuntimeError("addTestSet accepts two columns " \
"for source node and destination node." \
"or three columns, the first column for source node, " \
"the second for destination node, " \
"and third for labels")
if edge_type != None and len(edge_type) != 3:
raise RuntimeError("edge_type should be None or a tuple of " \
"(src_type, relation_type, dst_type)")
if multilabel:
assert separator is not None, "Multi-class label is supported, "\
"but a separator is required to split the labels"
src_nodes, dst_nodes, labels = \
self._load_labels(cols, multilabel, separator, rows)
if len(cols) == 3:
assert len(src_nodes) == len(labels), \
'Test nodes shape {} and labels shape {} mismatch'.format(len(src_nodes),
len(labels))
assert self._has_label is None or self._has_label is True, \
'For a single edge label loader, it can be has-label or no-label ' \
'but it can not be both'
self._has_label = True
else:
assert self._has_label is None or self._has_label is False, \
'For a single edge label loader, it can be has-label or no-label ' \
'but it can not be both'
self._has_label = False
assert self._is_multilabel is None or self._is_multilabel == multilabel, \
'For a single label loader, it can be multi-label or single-label ' \
'but it can not be both'
self._is_multilabel = multilabel
self._labels.append((edge_type,
src_nodes,
dst_nodes,
labels,
(0., 0., 1.)))
def addRelationalTestSet(self, cols, src_node_type='node', dst_node_type='node', rows=None):
r"""Add Testing Set with multiple relation types.
Three columns of the **input** are chosen. the first
two columns represent the column names of the source
nodes and destination nodes while the last column give
the relation type.
Parameters
-----------
cols: list of str or list of int
Which columns to use. Supported data formats are:
(1) [str, str, str] column names for source node, destination node and labels.
The first column is treated as source node name,
the second column is treated as destination node name and
the third column is treated as relation type.
(2) [int, int, int] column numbers for node and labels.
The first column is treated as source node name,
the second column is treated as destination node name and
the third column is treated as relation type.
src_node_type: str
Source node type.
Default: 'node'
dst_node_type: str
Destination node type.
Default: 'node'
rows: numpy.array or list of int
Which row(s) to load. None to load all.
Default: None
Notes
-----
We can use this func to load knowledge graphs
Examples
--------
** Load test labels **
Example data of label.csv is as follows:
====== ======== ====
name movie rate
====== ======== ====
John StarWar1 5.0
Tim X-Man 3.5
Maggie StarWar1 4.5
====== ======== ====
>>> label_loader = dgl.data.EdgeLabelLoader(input='label.csv',
separator="\t")
>>> label_loader.addRelationalTestSet(['name', 'movie', 'rate'],
src_node_type='name',
dst_node_type='movie',
rows=np.arange(start=0, stop=100))
"""
if not isinstance(cols, list):
raise RuntimeError("The cols should be a list of string or int")
if len(cols) != 3:
raise RuntimeError("addRelationalTestSet accepts three columns " \
"for source node and destination node." \
"or three columns, the first column for source node, " \
"the second for destination node, " \
"and third for relation")
# TODO(xiangsx) add label/multilabel support in the future
rel_edges = self._load_relation_labels(cols, rows)
assert self._has_label is None or self._has_label is False, \
'For a single edge label loader, it can be has-label or no-label ' \
'but it can not be both.'
self._has_label = False
for rel_type, (src_nodes, dst_nodes) in rel_edges.items():
self._labels.append(((src_node_type, rel_type, dst_node_type),
src_nodes,
dst_nodes,
None,
(0., 0., 1.)))
def addSet(self, cols, split_rate, multilabel=False, separator=None, rows=None, edge_type=None):
r"""Add Train/Valid/Test Set.
Two or three columns of the **input** are chosen.
If only two columns are provied, they represent the
column names of the source nodes and destination nodes.
This represents the existance of the edges.
If three columns are provided, the first two columns
represent the column names of the source nodes and
destination nodes while the last column give the labels.
Multi-label is supported, but a separator is required to
split the labels.
Parameters
-----------
cols: list of str or list of int
Which columns to use. Supported data formats are:
(1) [str, str] column names for source node, destination node.
(2) [int, int] column numbers for source node, destination node.
(3) [str, str, str] column names for source node, destination node and labels.
The first column is treated as source node name,
the second column is treated as destination node name and
the third column is treated as label.
(4) [int, int, int] column numbers for node and labels.
The first column is treated as source node name,
the second column is treated as destination node name and
the third column is treated as label.
split_rate: triple of float
[train, valid, test]: Random split rate, train + valid + test = 1.0,
any of train, valid and test can be 0.0
multilabel: bool
Whether it is a multi-label task.
Default: False
separator: str, optional
Delimiter(separator) used to split label data.
Default: None
| |
is supplied and dict values are something else than a list of strings or a list of ints
:exc:`~biopsykit.utils.exceptions.ValidationError`
if number of columns does not match
:exc:`~biopsykit.utils.exceptions.ValueRangeError`
if values are not within the required score range
References
----------
<NAME>. (2005). DS14: standard assessment of negative affectivity, social inhibition, and Type D personality.
*Psychosomatic medicine*, 67(1), 89-97.
"""
score_name = "Type_D"
score_range = [0, 4]
# create copy of data
data = data.copy()
if columns is not None:
# if columns parameter is supplied: slice columns from dataframe
_assert_has_columns(data, [columns])
data = data.loc[:, columns]
if subscales is None:
_assert_num_columns(data, 14)
subscales = {
"NegativeAffect": [2, 4, 5, 7, 9, 12, 13],
"SocialInhibition": [1, 3, 6, 8, 10, 11, 14],
}
_assert_value_range(data, score_range)
# Reverse scores 1, 3
# (numbers in the dictionary correspond to the *positions* of the items to be reversed in the item list specified
# by the subscale dict)
data = _invert_subscales(data, subscales=subscales, idx_dict={"SocialInhibition": [0, 1]}, score_range=score_range)
ds_data = _compute_questionnaire_subscales(data, score_name, subscales)
if len(data.columns) == 14:
# compute total score if all columns are present
ds_data[score_name] = data.sum(axis=1)
return pd.DataFrame(ds_data, index=data.index)
def rse(data: pd.DataFrame, columns: Optional[Union[Sequence[str], pd.Index]] = None) -> pd.DataFrame:
"""Compute the **Rosenberg Self-Esteem Inventory**.
The RSE is the most frequently used measure of global self-esteem. Higher scores indicate greater self-esteem.
.. note::
This implementation assumes a score range of [0, 3].
Use :func:`~biopsykit.questionnaires.utils.convert_scale()` to convert the items into the correct range
beforehand.
Parameters
----------
data : :class:`~pandas.DataFrame`
dataframe containing questionnaire data. Can either be only the relevant columns for computing this score or
a complete dataframe if ``columns`` parameter is supplied.
columns : list of str or :class:`pandas.Index`, optional
list with column names in correct order.
This can be used if columns in the dataframe are not in the correct order or if a complete dataframe is
passed as ``data``.
Returns
-------
:class:`~pandas.DataFrame`
RSE score
Raises
------
ValueError
if ``subscales`` is supplied and dict values are something else than a list of strings or a list of ints
:exc:`~biopsykit.utils.exceptions.ValidationError`
if number of columns does not match
:exc:`~biopsykit.utils.exceptions.ValueRangeError`
if values are not within the required score range
References
----------
<NAME>. (1965). Society and the Adolescent Self-Image. *Princeton University Press*, Princeton, NJ.
"""
score_name = "RSE"
score_range = [0, 3]
# create copy of data
data = data.copy()
if columns is not None:
# if columns parameter is supplied: slice columns from dataframe
_assert_has_columns(data, [columns])
data = data.loc[:, columns]
_assert_num_columns(data, 10)
_assert_value_range(data, score_range)
# Reverse scores 2, 5, 6, 8, 9
data = invert(data, cols=to_idx([2, 5, 6, 8, 9]), score_range=score_range)
return pd.DataFrame(data.sum(axis=1), columns=[score_name])
def scs(
data: pd.DataFrame,
columns: Optional[Union[Sequence[str], pd.Index]] = None,
subscales: Optional[Dict[str, Sequence[int]]] = None,
) -> pd.DataFrame:
"""Compute the **Self-Compassion Scale (SCS)**.
The Self-Compassion Scale measures the tendency to be compassionate rather than critical
toward the self in difficult times. It is typically assessed as a composite but can be broken down
into subscales. Higher scores indicate greater self-compassion.
It consists of the subscales, with the item indices (count-by-one, i.e., the first question has the index 1!):
* ``SelfKindness``: [5, 12, 19, 23, 26]
* ``SelfJudgment``: [1, 8, 11, 16, 21]
* ``CommonHumanity``: [3, 7, 10, 15]
* ``Isolation``: [4, 13, 18, 25]
* ``Mindfulness``: [9, 14, 17, 22]
* ``OverIdentified`` [2, 6, 20, 24]
.. note::
This implementation assumes a score range of [1, 5].
Use :func:`~biopsykit.questionnaires.utils.convert_scale()` to convert the items into the correct range
beforehand.
.. warning::
Column indices in ``subscales`` are assumed to start at 1 (instead of 0) to avoid confusion with
questionnaire item columns, which typically also start with index 1!
Parameters
----------
data : :class:`~pandas.DataFrame`
dataframe containing questionnaire data. Can either be only the relevant columns for computing this score or
a complete dataframe if ``columns`` parameter is supplied.
columns : list of str or :class:`pandas.Index`, optional
list with column names in correct order.
This can be used if columns in the dataframe are not in the correct order or if a complete dataframe is
passed as ``data``.
subscales : dict, optional
A dictionary with subscale names (keys) and column names or column indices (count-by-1) (values)
if only specific subscales should be computed.
Returns
-------
:class:`~pandas.DataFrame`
SCS score
Raises
------
ValueError
if ``subscales`` is supplied and dict values are something else than a list of strings or a list of ints
:exc:`~biopsykit.utils.exceptions.ValidationError`
if number of columns does not match
:exc:`~biopsykit.utils.exceptions.ValueRangeError`
if values are not within the required score range
References
----------
<NAME>. (2003). The development and validation of a scale to measure self-compassion.
*Self and identity*, 2(3), 223-250.
https://www.academia.edu/2040459
"""
score_name = "SCS"
score_range = [1, 5]
# create copy of data
data = data.copy()
if columns is not None:
# if columns parameter is supplied: slice columns from dataframe
_assert_has_columns(data, [columns])
data = data.loc[:, columns]
if subscales is None:
_assert_num_columns(data, 26)
subscales = {
"SelfKindness": [5, 12, 19, 23, 26],
"SelfJudgment": [1, 8, 11, 16, 21],
"CommonHumanity": [3, 7, 10, 15],
"Isolation": [4, 13, 18, 25],
"Mindfulness": [9, 14, 17, 22],
"OverIdentified": [2, 6, 20, 24],
}
_assert_value_range(data, score_range)
# Reverse scores 1, 2, 4, 6, 8, 11, 13, 16, 18, 20, 21, 24, 25
# (numbers in the dictionary correspond to the *positions* of the items to be reversed in the item list specified
# by the subscale dict)
data = _invert_subscales(
data,
subscales=subscales,
idx_dict={"SelfJudgment": [0, 1, 2, 3, 4], "Isolation": [0, 1, 2, 3], "OverIdentified": [0, 1, 2, 3]},
score_range=score_range,
)
# SCS is a mean, not a sum score!
scs_data = _compute_questionnaire_subscales(data, score_name, subscales, agg_type="mean")
if len(data.columns) == 26:
# compute total score if all columns are present
scs_data[score_name] = data.mean(axis=1)
return pd.DataFrame(scs_data, index=data.index)
def midi(data: pd.DataFrame, columns: Optional[Union[Sequence[str], pd.Index]] = None) -> pd.DataFrame:
"""Compute the **Midlife Development Inventory (MIDI) Sense of Control Scale**.
The Midlife Development Inventory (MIDI) sense of control scale assesses perceived control,
that is, how much an individual perceives to be in control of his or her environment. Higher scores indicate
greater sense of control.
.. note::
This implementation assumes a score range of [1, 7].
Use :func:`~biopsykit.questionnaires.utils.convert_scale()` to convert the items into the correct range
beforehand.
Parameters
----------
data : :class:`~pandas.DataFrame`
dataframe containing questionnaire data. Can either be only the relevant columns for computing this score or
a complete dataframe if ``columns`` parameter is supplied
columns : list of str or :class:`pandas.Index`, optional
list with column names in correct order.
This can be used if columns in the dataframe are not in the correct order or if a complete dataframe is
passed as ``data``.
Returns
-------
:class:`~pandas.DataFrame`
MIDI score
Raises
------
:exc:`~biopsykit.utils.exceptions.ValidationError`
if number of columns does not match
:exc:`~biopsykit.utils.exceptions.ValueRangeError`
if values are not within the required score range
References
----------
<NAME>., & <NAME>. (1998). The sense of control as a moderator of social class differences in
health and well-being. *Journal of personality and social psychology*, 74(3), 763.
"""
score_name = "MIDI"
score_range = [1, 7]
# create copy of data
data = data.copy()
if columns is not None:
# if columns parameter is supplied: slice columns from dataframe
_assert_has_columns(data, [columns])
data = data.loc[:, columns]
_assert_num_columns(data, 12)
_assert_value_range(data, score_range)
# Reverse scores 1, 2, 4, 5, 7, 9, 10, 11
data = invert(data, cols=to_idx([1, 2, 4, 5, 7, 9, 10, 11]), score_range=score_range)
# MIDI is a mean, not a sum score!
return pd.DataFrame(data.mean(axis=1), columns=[score_name])
def tsgs(
data: pd.DataFrame,
columns: Optional[Union[Sequence[str], pd.Index]] = None,
subscales: Optional[Dict[str, Sequence[Union[str, int]]]] = None,
) -> pd.DataFrame:
"""Compute the **Trait Shame and Guilt Scale**.
The TSGS assesses the experience of shame, guilt, and pride over the past few months with three separate subscales.
Shame and guilt are considered distinct emotions, with shame being a global negative feeling about the self,
and guilt being a negative feeling about a specific event rather than the self. Higher scores on each subscale
indicate higher | |
import pytest
import os
import sqlalchemy.orm.exc
from dblayer import *
from dblayer.func.func_citydb_pkg import *
from dblayer.func.func_citydb_view import *
from dblayer.func.func_citydb_view_nrg import *
from dblayer.func.func_postgis_geom import *
from dblayer.sim.pandapower import *
from dblayer.sim.pandathermal import *
from dblayer.sim.pandangas import *
import dblayer.helpers.utn.electrical_network as el_net
import dblayer.helpers.utn.thermal_network as th_net
import dblayer.helpers.utn.gas_network as gas_net
from dblayer.zerobnl.reader import *
from dblayer.zerobnl.writer import *
import pandangas.simulation as gas_sim
import pandas as pd
import networkx as nx
import zerobnl
@pytest.fixture()
def fix_connect():
'''
Fixture for testing. Returns the connection parameters for the database.
:return: PostgreSQL database connection parameters (dblayer.db.PostgreSQLConnectionInfo)
'''
# Define connection parameters.
return PostgreSQLConnectionInfo(
user = 'postgres',
pwd = '<PASSWORD>',
host = 'localhost',
port = '5432',
dbname = 'citydb'
)
@pytest.fixture()
def fix_access( fix_connect ):
'''
Fixture for testing. Provides access to the database.
:return: object for accessing the database (dblayer.Access)
'''
# Access the database.
access = DBAccess()
# Connect to database and retrieve engine, session and metadata.
access.connect_to_citydb( fix_connect )
return access
@pytest.fixture()
def fix_dockerfile():
'''
Specify name of dummy Dockerfile.
'''
return 'Dockerfile_base'
@pytest.fixture()
def fix_wrapper():
'''
Specify name of dummy wrapper.
'''
return 'wrapper_base.py'
@pytest.fixture()
def fix_create_sim( fix_dockerfile, fix_wrapper ):
'''
Fixture for testing. Creates a simple ready-to-run co-simulation setup.
:return: simulation setup with meta, envs, nodes, links, sequence and steps implemented (zerobnl.CoSim)
'''
# Create simulation setup.
sim = zerobnl.CoSim()
# Add meta model.
sim.create_meta_model(
meta_model = 'MetaBase',
list_of_attrs_to_set = [ ( 'a', 'unit' ) ],
list_of_attrs_to_get = [ ( 'b', 'unit' ) ]
)
# Add environment for instances of the meta model.
sim.create_environment(
env = 'EnvBase',
wrapper = os.path.join( os.path.dirname( __file__ ), 'data', fix_wrapper ),
dockerfile = os.path.join( os.path.dirname( __file__ ), 'data', fix_dockerfile )
)
# Add node based on meta model and environment.
sim.add_node(
node = 'Base0',
meta = 'MetaBase',
env = 'EnvBase',
init_values = { 'c': .5 },
files = [ os.path.join( os.path.dirname( __file__ ), 'data', 'dummy_file.txt' ) ]
)
# Add another node based on meta model and environment.
sim.add_node(
node = 'Base1',
meta = 'MetaBase',
env = 'EnvBase',
init_values = { 'c': .25 }
)
# Define links between nodes.
sim.add_link( get_node = 'Base0', get_attr = 'b', set_node = 'Base1', set_attr = 'a' )
sim.add_link( get_node = 'Base1', get_attr = 'b', set_node = 'Base0', set_attr = 'a' )
# Define simulation groups and sequence.
sim.create_sequence( [ [ 'Base0' ], [ 'Base1' ] ] )
# Define simulation time steps.
sim.set_time_unit( 'seconds' )
sim.create_steps( [15] * 4 * 60 )
return sim
@pytest.fixture()
def fix_electrical_network_id():
return 1000
@pytest.fixture()
def fix_thermal_network_id():
return 2000
@pytest.fixture()
def fix_gas_network_id():
return 3000
@pytest.fixture()
def fix_srid():
return 4326
def test_cleanup_citydb_schema( fix_access ):
fix_access.cleanup_citydb_schema()
def test_cleanup_simpkg_schema( fix_access ):
fix_access.cleanup_simpkg_schema()
def test_map_invalid_class( fix_access ):
with pytest.raises( RuntimeError ) as e:
fix_access.map_citydb_object_class( 'UnknownObjectClassName' )
assert 0 != len( str( e ) )
def test_fill_citydb( fix_access ):
# Insert new buildings and retrieve their IDs.
bui_id1 = fix_access.add_citydb_object( insert_building, name = 'BUILDING_01' )
bui_id2 = fix_access.add_citydb_object( insert_building, name = 'BUILDING_02' )
# Insert new heat pumps (associated to buildings).
fix_access.add_citydb_object( insert_heat_pump, name = 'HEATPUMP_01', nom_effcy = 1.2,
effcy_indicator = 'COP', inst_in_ctyobj_id = bui_id1 )
fix_access.add_citydb_object( insert_heat_pump, name = 'HEATPUMP_02', nom_effcy = 3.4,
effcy_indicator = 'COP', inst_in_ctyobj_id = bui_id2 )
# Insert new time series and retrieve their IDs.
ts_id1 = fix_access.add_citydb_object( insert_regular_time_series, name = 'TS_DEMAND_01',
values_array = [ 1, 2, 3 ], values_unit = 'kW', time_interval = 0.25, time_interval_unit = 'h' )
ts_id2 = fix_access.add_citydb_object( insert_regular_time_series, name = 'TS_DEMAND_02',
values_array = [ 4, 5, 6 ], values_unit = 'kW', time_interval = 0.25, time_interval_unit = 'h' )
# Insert new energy demands (associated to time series).
fix_access.add_citydb_object( insert_energy_demand, name = 'DEMAND_01',
time_series_id = ts_id1, cityobject_id = bui_id1 )
fix_access.add_citydb_object( insert_energy_demand, name = 'DEMAND_02',
time_series_id = ts_id2, cityobject_id = bui_id2 )
# Insert new generic attributes (associated to buildings).
fix_access.add_citydb_object( insert_genericattrib_real, attrname = 'BUILDING_01_ATTR_01',
attrvalue = 0.1, cityobject_id = bui_id1 )
fix_access.add_citydb_object( insert_genericattrib_integer, attrname = 'BUILDING_01_ATTR_02',
attrvalue = 2, cityobject_id = bui_id1 )
fix_access.add_citydb_object( insert_genericattrib_string, attrname = 'BUILDING_01_ATTR_03',
attrvalue = '3', cityobject_id = bui_id1 )
fix_access.add_citydb_object( insert_genericattrib_real, attrname = 'BUILDING_02_ATTR_01',
attrvalue = 4.5, cityobject_id = bui_id2 )
fix_access.add_citydb_object( insert_genericattrib_integer, attrname = 'BUILDING_02_ATTR_02',
attrvalue = 6, cityobject_id = bui_id2 )
fix_access.add_citydb_object( insert_genericattrib_string, attrname = 'BUILDING_02_ATTR_03',
attrvalue = '7', cityobject_id = bui_id2 )
fix_access.commit_citydb_session()
def test_read_citydb( fix_access ):
# Retrieve building data from default 3DCityDB table (citydb.building).
buildings = fix_access.get_citydb_objects( 'Building' )
# Expect a query result with two entries.
assert len( buildings ) == 2
# Retrieve building data from user-friendly 3DCityDB view (citydb_view.building).
with pytest.warns( RuntimeWarning ) as record:
# Retrieve the class mapped to the view.
Building = fix_access.map_citydb_object_class( 'Building', schema = 'citydb_view' )
# Use the mapped class to define filter conditions.
conditions = [ Building.name == 'BUILDING_02' ]
# Retrieve the data.
buildings = fix_access.get_citydb_objects( 'Building', conditions = conditions )
# Expect a query result with only one entry.
assert len( buildings ) == 1
# Retrieve the class mapped to the default table.
GenericAttribute = fix_access.map_citydb_object_class( 'GenericAttribute' )
# Retrieve all generic attributes associated to a building by joining data from 2 tables.
attributes = fix_access.join_citydb_objects(
[ 'GenericAttribute', 'Building' ],
conditions = [ GenericAttribute.cityobject_id == Building.id ]
)
assert( len( attributes ) == 6 )
# Check that only one warning was raised.
assert len( record ) == 1
# Check that the message matches.
assert record[0].message.args[0] == 'Class Building has already been mapped from table: building (schema: citydb). It will be re-mapped from table: building (schema: citydb_view).'
def test_read_simpkg_invalid( fix_connect ):
with pytest.raises( sqlalchemy.orm.exc.NoResultFound ) as e:
reader = DBReader( fix_connect )
# Try to read a scenario that does not exist.
sim = reader.read_from_db( 'TestSimX' )
assert 'ExceptionInfo NoResultFound' in str( e )
def test_write_simpkg( fix_connect, fix_create_sim ):
writer = DBWriter( fix_connect )
writer.write_to_db( fix_create_sim, 'TestSim' )
# # NOT YET IMPLEMENTED: Try to write a scenario with an already existing name to database.
# try:
# writer.write_to_db( fix_create_sim, 'TestSim' )
# except RuntimeError as e:
# expected_message = ''
# self.assertEqual( str( e ) )
def test_write_and_read_simpkg( fix_connect, fix_create_sim, fix_dockerfile, fix_wrapper ):
# Define simulation setup name.
sim_name = 'TestSim1'
# Write simulation setup to database. Do not write meta models and models, because
# they have already been written to the database in one of the previous tests.
sim_write = fix_create_sim
writer = DBWriter( fix_connect )
writer.write_to_db( sim_write, sim_name, write_meta_models = False, write_envs = False )
# Read simulation setup from database.
reader = DBReader( fix_connect )
sim_read = reader.read_from_db( sim_name )
assert type( sim_read.nodes ) is pd.DataFrame
assert type( sim_read.links ) is pd.DataFrame
assert len( sim_read.nodes ) == 2
assert len( sim_read.links ) == 2
for _, row in sim_read.nodes.iterrows():
assert row[ 'Env' ] == 'EnvBase'
assert row[ 'Meta' ] == 'MetaBase'
assert row[ 'ToSet' ] == [ ( 'a', 'unit' ) ]
assert row[ 'ToGet' ] == [ ( 'b', 'unit' ) ]
assert row[ 'Dockerfile' ] == os.path.join( os.path.dirname( __file__ ), 'data', fix_dockerfile )
assert row[ 'Wrapper' ] == os.path.join( os.path.dirname( __file__ ), 'data', fix_wrapper )
assert row[ 'Local' ] == False
assert row[ 'Parameters' ] == {}
assert sim_read.nodes.loc[ 'Base0' ].InitVal[ 'c' ] == 0.5
assert sim_read.nodes.loc[ 'Base1' ].InitVal[ 'c' ] == 0.25
assert sim_read.nodes.loc[ 'Base0' ].Files == [ os.path.join( os.path.dirname( __file__ ), 'data', 'dummy_file.txt' ) ]
assert sim_read.nodes.loc[ 'Base1' ].Files == []
assert sim_read.steps == [15] * 4 * 60
assert sim_read.sequence == [ [ 'Base0' ], [ 'Base1' ] ]
def test_write_and_read_associate_simpkg( fix_connect, fix_access, fix_create_sim ):
sim_name = 'TestSim2'
HeatPump = fix_access.map_citydb_object_class( 'HeatPump', schema = 'citydb_view',
table_name = 'nrg8_conv_system_heat_pump' )
conditions = [ HeatPump.name == 'HEATPUMP_02', HeatPump.nom_effcy==3.4 ]
heatpumps = fix_access.get_citydb_objects( 'HeatPump', conditions = conditions )
assert( len( heatpumps ) == 1 )
heatpump_id = heatpumps[0].id
GenericAttribute = fix_access.map_citydb_object_class( 'GenericAttribute' )
conditions = [ GenericAttribute.attrname == 'BUILDING_02_ATTR_01' ]
attributes = fix_access.get_citydb_objects( 'GenericAttribute', conditions = conditions )
attribute_id = attributes[0].id
associated_sim = fix_create_sim
associated_sim.nodes.loc[ 'Base0' ].InitVal[ 'c' ] = AssociateCityDBObjectAttribute(
obj = heatpumps[0], attr_name = 'nom_effcy' )
associated_sim.nodes.loc[ 'Base1' ].InitVal[ 'c' ] = AssociateCityDBGenericAttribute(
attr = attributes[0] )
writer = DBWriter( fix_connect )
writer.write_to_db( associated_sim, sim_name, write_meta_models = False, write_envs = False )
# Read simulation | |
are correct
self.assertEquals(art_stock.count, 2)
self.assertEquals(art2_stock.count, 3)
# Check if the book value of the items are equal to the cost of the articles
self.assertEquals(art_stock.book_value.amount, sp.amount)
self.assertEquals(art2_stock.book_value.amount, sp.amount)
def testTwoArticleMerges(self):
"""
Test that adds two types of articles, then sells one of them.
"""
# Create some objects to use in the tests
cur = Currency("EUR")
VAT.objects.create(name="HIGH", active=True)
sp = Cost(amount=Decimal("1.00000"), currency=cur)
sp2 = Cost(amount=Decimal("1.00000"), currency=cur)
art = self.articletype_1
art2 = self.articletype_2
# Construct some entries to use
entry_1 = { # 2 of Product1 in
'article': art,
'book_value': sp,
'count': 2,
'is_in': True
}
entry_2 = { # 3 of Product2 in
'article': art2,
'book_value': sp2,
'count': 3,
'is_in': True
}
entry_3 = { # 1 of Product2 out
'article': art2,
'book_value': sp2,
'count': 1,
'is_in': False
}
# Execute the needed modifications
log_1 = StockChangeSet.construct(description="Get 2xProduct1 3xProduct2", entries=[entry_1, entry_2],
source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
log_2 = StockChangeSet.construct(description="Get 2xProduct1 3xProduct2", entries=[entry_1, entry_2],
source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
log_3 = StockChangeSet.construct(description="Get 2xProduct1 3xProduct2 2xProduct1",
entries=[entry_1, entry_2, entry_1], source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
log_4 = StockChangeSet.construct(description="Sell 1xProduct2", entries=[entry_3], source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
# Get resulting stocks
art_stock = Stock.objects.get(article=art)
art2_stock = Stock.objects.get(article=art2)
# Check number of stockchanges in StockLogs
self.assertEqual(len(log_1. stockchange_set.all()), 2)
self.assertEqual(len(log_2. stockchange_set.all()), 2)
self.assertEqual(len(log_3. stockchange_set.all()), 3)
self.assertEqual(len(log_4. stockchange_set.all()), 1)
# Check if number of items in stock is correct
self.assertEqual(art_stock.count, 8)
self.assertEqual(art2_stock.count, 8)
# Check if book value of stock is correct
self.assertEqual(art_stock.book_value.amount, Decimal("1.00000"))
self.assertEqual(art2_stock.book_value.amount, Decimal("1.00000"))
# Check if currency of stock is correct
self.assertEqual(art_stock.book_value.currency, cur)
self.assertEqual(art2_stock.book_value.currency, cur)
def testOneTwoThreeFourFiveSix(self):
"""
Test that adds 6 articles with increasing book value, and checks if the resulting book value is correct.
"""
# Create some objects to use in the tests
cur = Currency("EUR")
VAT.objects.create(name="HIGH", active=True)
art = self.articletype_1
for i in range(1, 7): # 1 to 6. Average should be 3.5
# Define book value
book_value = Cost(amount=Decimal(str(i)), currency=cur)
# Construct entry for StockChangeSet
entries = [{
'article': art,
'book_value': book_value,
'count': 1,
'is_in': True
}]
# Do stock modification
StockChangeSet.construct(description="AverageTest{}".format(i), entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
# Get stock for article
st = Stock.objects.get(article=art)
# Check if the currency of the book value is correct
self.assertEqual(st.book_value.currency, cur)
# Check if the average equals 3.5
self.assertEqual(st.book_value.amount, Decimal("3.50000"))
def testMinusOneTwoThreeFourFiveSix(self):
"""
Test that tries to sell 6 items with increasing book value from an empty stock.
"""
# Create some objects to use in the tests
i = 0
cur = Currency("EUR")
VAT.objects.create(name="HIGH", active=True)
art = self.articletype_1
try:
for i in range(1, 7): # 1 to 6. Average should be 3.5
# Define book value
book_value = Cost(amount=Decimal(str(i)), currency=cur)
# Construct entry for StockChangeSet
entries = [{
'article': art,
'book_value': book_value,
'count': 1,
'is_in': False
}]
# Do stock modification
StockChangeSet.construct(description="NegativeStockTest{}".format(i), entries=entries,
source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
except StockSmallerThanZeroError:
i = 1
self.assertEqual(i, 1)
def testDifferentCurrencies(self):
"""
Test that
"""
# Create some objects to use in the tests
i = 0
eur = Currency("EUR")
usd = Currency("USD")
VAT.objects.create(name="HIGH", active=True)
cost_eur = Cost(amount=Decimal(str(1)), currency=eur) # 1 euro
cost_usd = Cost(amount=Decimal(str(1)), currency=usd) # 1 dollar
art = self.articletype_1
# Add 1 article with cost 1 euro
entries = [{
'article': art,
'book_value': cost_eur,
'count': 1,
'is_in': True
}]
StockChangeSet.construct(description="AddEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
# Check if the product was successfully added to the stock
self.assertEqual(len(Stock.objects.all()), 1)
self.assertEqual(len(StockChangeSet.objects.all()), 1)
self.assertEqual(len(StockChange.objects.all()), 1)
# (Try to) add 1 article with cost 1 dollar
entries = [{
'article': art,
'book_value': cost_usd,
'count': 1,
'is_in': True
}]
try:
StockChangeSet.construct(description="AddDollarStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
except CurrencyInconsistencyError:
i = 1
# Check if the CurrencyInconsistencyError occurred
self.assertEqual(i, 1)
# Check if there are no additional lines in the StockChangeSet and stockchanges
# The error should have rolled back the changes the second modification might have made
self.assertEqual(len(StockChangeSet.objects.all()), 1)
self.assertEqual(len(StockChange.objects.all()), 1)
# Try to add another item with a price of 1 euro
entries = [{
'article': art,
'book_value': cost_eur,
'count': 1,
'is_in': True
}]
i = 0
try:
StockChangeSet.construct(description="AddSecondEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
except CurrencyInconsistencyError:
i = 1
# This should have worked, so i should still be 0
self.assertEqual(i, 0)
# Check if the stock has indeed changed
self.assertEqual(len(StockChangeSet.objects.all()), 2)
self.assertEqual(len(StockChange.objects.all()), 2)
def testToZero(self):
"""
Test that tries to sell 6 items with increasing book value from an empty stock.
"""
# Create some objects to use in the tests
cur = Currency("EUR")
VAT.objects.create(name="HIGH", active=True)
art = self.articletype_1
book_value = None
for i in range(1, 7): # 1 to 6. Average should be 3.5
# Define book value
book_value = Cost(amount=Decimal(str(i)), currency=cur)
# Construct entry for StockChangeSet
entries = [{
'article': art,
'book_value': book_value,
'count': 1,
'is_in': True
}]
StockChangeSet.construct(description="AddSecondEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
for i in range(1, 7): # 1 to 6. Average should be 3.5
# Define book value, only remove for average cost.
book_value = Cost(amount=Decimal(3.5000), currency=cur)
entries = [{
'article': art,
'book_value': book_value,
'count': 1,
'is_in': False
}]
StockChangeSet.construct(description="AddSecondEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
entries = [{
'article': art,
'book_value': book_value,
'count': 0,
'is_in': False
}]
StockChangeSet.construct(description="AddSecondEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
self.assertEqual(len(StockChange.objects.all_without_label()), 13)
self.assertEqual(len(StockChange.objects.all_without_label()), 13)
self.assertEqual(len(StockChange.objects.all()), 13)
st = Stock.objects.all()
if DELETE_STOCK_ZERO_LINES:
self.assertEqual(st.__len__(), 0)
else:
self.assertEqual(st.__len__(), 1)
def testInvalidSource(self):
i = 0
# Create some objects to use in the tests
eur = Currency("EUR")
VAT.objects.create(name="HIGH", active=True)
cost_eur = Cost(amount=Decimal(str(1)), currency=eur) # 1 euro
art = self.articletype_1
# Sample article
entries = [{
'article': art,
'book_value': cost_eur,
'count': 1,
'is_in': True
}]
try:
StockChangeSet.construct(description="AddInvalidSource", entries=entries,
source="some_nonexisting_source_is_nonexisting")
except ValueError:
i = 1
# This should have failed, so i should be 1
self.assertEqual(i, 1)
@StockLabel.register
class ZStockLabel(StockLabel):
labeltype = "Zz"
@StockLabel.register
class TestStockLabel(StockLabel):
labeltype = "test"
class ForgottenStockLabel(StockLabel):
labeltype = "forgotten"
class LabelTest(TestCase, TestData):
def setUp(self):
self.setup_base_data()
self.eur = Currency("EUR")
self.vat = self.vat_group_high
self.accountinggroup = self.accounting_group_components
self.cost_eur = Cost(amount=Decimal(str(1)), currency=self.eur) # 1 euro
self.def_art = self.articletype_1
self.label1a = ZStockLabel(1)
self.def_entries = [{
'article': self.def_art,
'book_value': self.cost_eur,
'count': 1,
'is_in': True,
'label': self.label1a
}]
def tearDown(self):
self.assertEqual(Stock.do_check(), [])
def testBasicLabel(self):
eur = Currency("EUR")
VAT.objects.create(name="HIGH", active=True)
cost_eur = Cost(amount=Decimal(str(1)), currency=eur) # 1 euro
art = self.articletype_1
# Add 1 article with cost 1 euro
entries = [{
'article': art,
'book_value': cost_eur,
'count': 1,
'is_in': True,
'label': self.label1a
}]
StockChangeSet.construct(description="AddSecondEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
StockChangeSet.construct(description="AddSecondEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
self.assertEqual(len(StockChange.objects.filter(label=self.label1a)), 2)
self.assertEqual(len(StockChange.objects.all_without_label()), 0)
self.assertEqual(len(StockChange.objects.all()), 2)
self.assertEqual(len(Stock.objects.all()), 1)
t = TestStockLabel(4)
entries[0]["label"] = t
StockChangeSet.construct(description="AddSecondEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
entries[0]["label"] = None
StockChangeSet.construct(description="AddSecondEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
def raisesTest(self):
StockChangeSet.construct(description="AddSecondEuroStock", entries=self.entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
def testLabelFailBecauseNoLabel(self):
eur = Currency("EUR")
cost_eur = Cost(amount=Decimal(str(1)), currency=eur) # 1 euro
art = self.articletype_1
# Add 1 article with cost 1 euro
entries = [{
'article': art,
'book_value': cost_eur,
'count': 1,
'is_in': True,
'label': self.label1a
}]
StockChangeSet.construct(description="AddSecondEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
self.entries = [{
'article': art,
'book_value': cost_eur,
'count': 1,
'is_in': False,
}]
self.assertRaises(StockSmallerThanZeroError, self.raisesTest)
self.assertEqual(Stock.objects.get(label=self.label1a).count, 1)
self.assertEqual(StockChange.objects.all().__len__(), 1)
entries[0]['is_in'] = False
StockChangeSet.construct(description="AddSecondEuroStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
if DELETE_STOCK_ZERO_LINES:
self.assertEqual(Stock.objects.filter(label=self.label1a).__len__(), 0)
else:
self.assertEqual(Stock.objects.filter(label=self.label1a).__len__(), 1)
self.entries = entries
self.assertRaises(StockSmallerThanZeroError, self.raisesTest)
self.assertEqual(StockChange.objects.all().__len__(), 2)
if DELETE_STOCK_ZERO_LINES:
self.assertEqual(Stock.objects.filter(label=self.label1a).__len__(), 0)
else:
self.assertEqual(Stock.objects.filter(label=self.label1a).__len__(), 1)
def raise_Invalid_Label_type_added(self):
StockLabel.register(self.labeltype)
def testTestLabelWithoutName(self):
class InValidLabel(StockLabel):
labeltype = ""
self.labeltype = InValidLabel
self.assertRaises(ValueError, self.raise_Invalid_Label_type_added)
self.assertEqual(StockLabel.labeltypes.get("", None), None)
# Test what happens to the stock state if one line of stock is moved.
def testMoveStock(self):
# Add 1 article with cost 1 euro
entries = self.def_entries
StockChangeSet.construct(description="AddFirstStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
self.assertEqual(StockChange.objects.all().__len__(), 1)
self.assertEqual(Stock.objects.all().__len__(), 1)
self.assertEqual(Stock.objects.all_without_label().__len__(), 0)
entries = [{
'article': self.def_art,
'book_value': self.cost_eur,
'count': 1,
'is_in': True,
'label': None
}, {
'article': self.def_art,
'book_value': self.cost_eur,
'count': 1,
'is_in': False,
'label': self.label1a
}]
StockChangeSet.construct(description="AddSecondStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
self.assertEqual(StockChange.objects.all().__len__(), 3)
if DELETE_STOCK_ZERO_LINES:
self.assertEqual(Stock.objects.all().__len__(), 1)
else:
self.assertEqual(Stock.objects.all().__len__(), 2)
self.assertEqual(Stock.objects.all_without_label().__len__(), 1)
def testStockChangeCreateWithLabelTypeInsteadOfLabel(self):
# Add 1 article with cost 1 euro
self.entries = self.def_entries
self.entries[0]['label'] = None
self.entries[0]['labeltype'] = "TEST"
self.assertRaises(ValueError, self.raisesTest)
def testAverageStock(self):
entries = [{
'article': self.def_art,
'book_value': self.cost_eur,
'count': 1,
'is_in': True,
'label': None
}, {
'article': self.def_art,
'book_value': self.cost_eur,
'count': 2,
'is_in': True,
'label': self.label1a
}]
StockChangeSet.construct(description="AddSecondStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
self.cost_eur = self.cost_eur + self.cost_eur
entries = [{
'article': self.def_art,
'book_value': self.cost_eur,
'count': 1,
'is_in': True,
'label': None
}, {
'article': self.def_art,
'book_value': self.cost_eur,
'count': 1,
'is_in': True,
'label': self.label1a
}]
StockChangeSet.construct(description="AddSecondStock", entries=entries, source=StockChangeSet.SOURCE_TEST_DO_NOT_USE)
without_label = Stock.objects.all_without_label()
without_line = without_label[0]
self.assertEqual(without_label.__len__(), 1)
with_label = Stock.objects.filter(label=self.label1a)
self.assertEqual(with_label.__len__(), 1)
| |
<gh_stars>1-10
######################################################################
#
# Software Name : Cloudnet TOSCA toolbox
# Version: 1.0
# SPDX-FileCopyrightText: Copyright (c) 2020-21 Orange
# SPDX-License-Identifier: Apache-2.0
#
# This software is distributed under the Apache License 2.0
# the text of which is available at http://www.apache.org/licenses/LICENSE-2.0
# or see the "LICENSE-2.0.txt" file for more details.
#
# Author: <NAME> <<EMAIL>>
# Software description: TOSCA to Cloudnet Translator
######################################################################
from cloudnet.tosca.utils import normalize_name, short_type_name
from cloudnet.tosca.processors import Generator
import cloudnet.tosca.syntax as syntax
from cloudnet.tosca.syntax import * # TODO remove
from cloudnet.tosca.configuration import DEFAULT_CONFIGURATION
UML2 = 'UML2'
DEFAULT_CONFIGURATION[UML2] = {
# Target directory where UML2 diagrams are generated.
Generator.TARGET_DIRECTORY: 'uml2',
'kinds': {
'Compute': 'node', # OASIS TOSCA 1.2
'tosca.nodes.Compute': 'node', # OASIS TOSCA 1.2
'tosca.nodes.nfv.Vdu.Compute': 'node', # ETSI NVF SOL 001
'tosca.nodes.Abstract.Storage': 'database', # OASIS TOSCA 1.2
'tosca.nodes.nfv.Vdu.VirtualStorage': 'database', # ETSI NVF SOL 001 v0.9
'tosca.nodes.nfv.Vdu.VirtualBlockStorage': 'database', # ETSI NVF SOL 001 v0.10.0
'tosca.nodes.nfv.Vdu.VirtualObjectStorage': 'database', # ETSI NVF SOL 001 v0.10.0
'tosca.nodes.nfv.Vdu.VirtualFileStorage': 'database', # ETSI NVF SOL 001 v0.10.0
'tosca.nodes.network.Network': 'queue', # OASIS TOSCA 1.2
'tosca.nodes.nfv.NsVirtualLink': 'queue', # ETSI NVF SOL 001 v2.5.1
'tosca.nodes.nfv.VnfVirtualLink': 'queue', # ETSI NVF SOL 001
'tosca.capabilities.nfv.VirtualLinkable': 'queue', # ETSI NVF SOL 001
},
'direction': {
'tosca.relationships.network.BindsTo': 'up', # OASIS TOSCA
},
'artifact_types': {
},
'node_types': {
}
}
DEFAULT_CONFIGURATION['logging']['loggers'][__name__] = {
'level': 'INFO',
}
import logging # for logging purposes.
LOGGER = logging.getLogger(__name__)
class PlantUMLGenerator(Generator):
def generator_configuration_id(self):
return UML2
def generation(self):
self.info('UML2 diagram generation')
self.generate_UML2_class_diagram()
topology_template = syntax.get_topology_template(self.tosca_service_template.get_yaml())
if topology_template:
self.open_file('-uml2-component-diagram1.plantuml')
self.generate_UML2_component_diagram(topology_template, False)
self.close_file()
self.open_file('-uml2-component-diagram2.plantuml')
self.generate_UML2_component_diagram(topology_template, True)
self.close_file()
self.open_file('-uml2-deployment-diagram.plantuml')
self.generate_UML2_deployment_diagram(topology_template)
self.close_file()
self.generate_UML2_workflow_diagrams(topology_template)
self.generate_UML2_sequence_diagrams(topology_template)
def generate_UML2_class_diagram(self):
template_yaml = self.tosca_service_template.get_yaml()
# Get types.
data_types = syntax.get_data_types(template_yaml)
artifact_types = syntax.get_artifact_types(template_yaml)
capability_types = syntax.get_capability_types(template_yaml)
relationship_types = syntax.get_relationship_types(template_yaml)
interface_types = syntax.get_interface_types(template_yaml)
node_types = syntax.get_node_types(template_yaml)
group_types = syntax.get_group_types(template_yaml)
policy_types = syntax.get_policy_types(template_yaml)
# Return if no types is defined.
if len(data_types) == 0 and len(artifact_types) == 0 and len(capability_types) == 0 and len(relationship_types) == 0 and len(interface_types) == 0 and len(node_types) == 0 and len(group_types) == 0 and len(policy_types) == 0:
return
self.open_file('-uml2-class-diagram.plantuml')
self.generate('@startuml')
self.generate('set namespaceSeparator none')
def generate_class(class_name, class_kind, type_yaml, types):
def generate_field(field_name, field_yaml):
declaration = '+'
if is_property_required(field_yaml):
declaration = declaration + '<b>'
declaration = declaration + field_name
field_type = syntax.get_type(field_yaml)
if field_type:
declaration = declaration + ' : ' + field_type
if field_type in ['list', 'map']:
entry_schema_type = get_entry_schema_type(field_yaml)
if entry_schema_type == None:
entry_schema_type = '?'
declaration = declaration + '<' + entry_schema_type + '>'
field_default = syntax.get_default(field_yaml)
if field_default:
declaration = declaration + ' = ' + str(field_default)
self.generate(declaration)
def translateToscaOccurrences2UmlMultiplicity(occurrences):
lower_bound = occurrences[0]
upper_bound = occurrences[1]
if lower_bound == upper_bound:
return str(lower_bound)
if upper_bound == syntax.UNBOUNDED:
upper_bound = '*'
return str(lower_bound) + '..' + str(upper_bound)
derived_from = syntax.get_derived_from(type_yaml)
if derived_from:
if types.get(derived_from) == None:
self.generate('class "', derived_from, '" << (', class_kind, ',green) >> #DDDDDD', sep='')
self.generate('"', derived_from, '" <|-- "', class_name, '"', sep='')
self.generate('class "', class_name, '" << (', class_kind, ',green) >> {', sep='')
mime_type = type_yaml.get(MIME_TYPE)
if mime_type:
self.generate('+mime_type:', mime_type)
file_ext = type_yaml.get(FILE_EXT)
if file_ext:
self.generate('+file_ext:', file_ext)
attributes = get_dict(type_yaml, ATTRIBUTES)
if len(attributes):
self.generate('.. attributes ..')
for attribute_name, attribute_yaml in attributes.items():
generate_field(attribute_name, attribute_yaml)
properties = get_dict(type_yaml, PROPERTIES)
if len(properties):
self.generate('.. properties ..')
for property_name, property_yaml in properties.items():
generate_field(property_name, property_yaml)
capabilities = syntax.get_capabilities(type_yaml)
if len(capabilities):
self.generate('.. capabilities ..')
for capability_name, capability_yaml in capabilities.items():
self.generate('+', capability_name, sep='')
capability_type = get_capability_type(capability_yaml)
if capability_type:
capability_occurrence = translateToscaOccurrences2UmlMultiplicity(get_capability_occurrences(capability_yaml))
self.generate(' type : ', capability_type, '[', capability_occurrence, ']', sep='')
if type(capability_yaml) == dict:
capability_valid_source_types = capability_yaml.get(VALID_SOURCE_TYPES)
if capability_valid_source_types:
self.generate(' valid_source_types : ', capability_valid_source_types, sep='')
requirements = get_dict(type_yaml, REQUIREMENTS)
if len(requirements):
self.generate('.. requirements ..')
for requirement_name, requirement_yaml in requirements.items():
requirement_occurrences = syntax.get_requirement_occurrences(requirement_yaml)
if requirement_occurrences[0] > 0:
bold = '<b>'
else:
bold = ''
self.generate('+', bold, requirement_name, sep='')
requirement_capability_type = syntax.get_requirement_capability(requirement_yaml)
if requirement_capability_type:
uml_multiplicity = translateToscaOccurrences2UmlMultiplicity(requirement_occurrences)
self.generate(' capability : ', requirement_capability_type, '[', uml_multiplicity, ']', sep='')
requirement_relationship = syntax.get_requirement_relationship(requirement_yaml)
requirement_relationship_type = syntax.get_relationship_type(requirement_relationship)
if requirement_relationship_type:
self.generate(' relationship :', requirement_relationship_type)
requirement_node = syntax.get_requirement_node_type(requirement_yaml)
if requirement_node:
self.generate(' node :', requirement_node)
interfaces = get_dict(type_yaml, INTERFACES)
def generate_operation(operation_name, operation_value):
self.generate('+', operation_name, '()', sep='')
if isinstance(operation_value, str):
implementation = operation_value
primary_artifact_name = implementation
elif isinstance(operation_value, dict):
implementation = operation_value.get("implementation")
if isinstance(implementation, str):
primary_artifact_name = implementation
elif isinstance(implementation, dict):
primary_artifact_name = implementation.get("primary")
if isinstance(primary_artifact_name, dict):
primary_artifact_name = primary_artifact_name.get("file")
else:
implementation = None
if implementation is not None:
implementation_as_string = str(implementation)
artifact_type = self.type_system.get_artifact_type_by_filename(primary_artifact_name)
icon = self.get_representation('artifact', artifact_type, 'icon')
if icon is not None:
implementation_as_string += " <img:%s{scale=0.5}>" % icon
self.generate(" implementation: ", implementation_as_string, sep='')
if len(interfaces):
self.generate('--')
for interface_name, interface_yaml in interfaces.items():
self.generate('.. interface', interface_name, '..')
for key, value in syntax.get_operations(interface_yaml).get(OPERATIONS).items():
generate_operation(key, value)
if class_kind == 'I':
for key, value in syntax.get_operations(type_yaml).get(OPERATIONS).items():
generate_operation(key, value)
self.generate('}')
for attribute_name, attribute_yaml in attributes.items():
attribute_type = attribute_yaml.get(TYPE)
if data_types.get(attribute_type):
self.generate('"', class_name, '" *-- "1" "', attribute_type, '" : ', attribute_name, sep='')
if attribute_type in ['list', 'map']:
entry_schema_type = get_entry_schema_type(attribute_yaml)
if data_types.get(entry_schema_type):
self.generate('"', class_name, '" *-- "*" "', entry_schema_type, '" : ', attribute_name, sep='')
for property_name, property_yaml in properties.items():
property_type = syntax.get_property_type(property_yaml)
if data_types.get(property_type):
self.generate('"', class_name, '" *-- "1" "', property_type, '" : ', property_name, sep='')
if property_type in ['list', 'map']:
entry_schema_type = get_entry_schema_type(property_yaml)
if data_types.get(entry_schema_type):
self.generate('"', class_name, '" *-- "*" "', entry_schema_type, '" : ', property_name, sep='')
for capability_name, capability_yaml in capabilities.items():
capability_type = get_capability_type(capability_yaml)
if capability_type:
if capability_types.get(capability_type) == None:
self.generate('class "', capability_type, '" << (C,green) >> #DDDDDD', sep='')
self.generate('"', capability_type, '" "', translateToscaOccurrences2UmlMultiplicity(get_capability_occurrences(capability_yaml)), '" -* "', class_name, '" : ', capability_name, sep='')
if type(capability_yaml) == dict:
capability_valid_source_types = capability_yaml.get(VALID_SOURCE_TYPES)
if capability_valid_source_types:
for capability_valid_source_type in capability_valid_source_types:
if node_types.get(capability_valid_source_type) == None:
self.generate('class "', capability_valid_source_type, '" << (N,green) >> #DDDDDD', sep='')
self.generate('"', capability_valid_source_type, '" <.. "', class_name, '" : ', capability_name, '.valid_source_types', sep='')
for requirement_name, requirement_yaml in requirements.items():
requirement_capability_type = syntax.get_requirement_capability(requirement_yaml)
if requirement_capability_type:
if capability_types.get(requirement_capability_type) == None:
self.generate('class "', requirement_capability_type, '" << (C,green) >> #DDDDDD', sep='')
self.generate('"', class_name, '" *- "', translateToscaOccurrences2UmlMultiplicity(get_requirement_occurrences(requirement_yaml)), '" "', requirement_capability_type, '" : ', requirement_name, sep='')
requirement_relationship = syntax.get_requirement_relationship(requirement_yaml)
requirement_relationship_type = syntax.get_relationship_type(requirement_relationship)
if requirement_relationship_type:
if relationship_types.get(requirement_relationship_type) == None:
self.generate('class "', requirement_relationship_type, '" << (R,green) >> #DDDDDD', sep='')
self.generate('"', class_name, '" ..> "', requirement_relationship_type, '" : ', requirement_name, '.relationship', sep='')
requirement_node = syntax.get_requirement_node_type(requirement_yaml)
if requirement_node:
if node_types.get(requirement_node) == None:
self.generate('class "', requirement_node, '" << (N,green) >> #DDDDDD', sep='')
self.generate('"', class_name, '" ..> "', requirement_node, '" : ', requirement_name, '.node', sep='')
for interface_name, interface_yaml in interfaces.items():
interface_type = interface_yaml.get(TYPE)
if interface_type:
if interface_types.get(interface_type) == None:
self.generate('class "', interface_type, '" << (I,green) >> #DDDDDD', sep='')
self.generate('"', interface_type, '" <|.. "', class_name, '" : ', interface_name, sep='')
valid_target_types = type_yaml.get(VALID_TARGET_TYPES)
if valid_target_types:
for valid_target_type in valid_target_types:
if capability_types.get(valid_target_type) == None:
self.generate('class "', valid_target_type, '" << (C,green) >> #DDDDDD', sep='')
self.generate('"', class_name, '" ..> "', valid_target_type, '" : valid_target_types', sep='')
members = type_yaml.get(MEMBERS)
if members:
for member in members:
if node_types.get(member) == None:
self.generate('class "', member, '" << (N,green) >> #DDDDDD', sep='')
self.generate('"', class_name, '" ..> "*" "', member, '" : members', sep='')
targets = type_yaml.get(TARGETS)
if targets:
for target in targets:
if node_types.get(target) == None and group_types.get(target) == None:
if 'nodes.' in target:
stereotype = 'N'
elif 'groups.' in target:
stereotype = 'G'
else:
stereotype = 'X'
self.generate('class "', target, '" << (', stereotype, ',green) >> #DDDDDD', sep='')
self.generate('"', class_name, '" ..> "*" "', target, '" : targets', sep='')
def generate_classes(type_kind, class_kind, types):
# self.generate('package', type_kind, '{')
for type_name, type_yaml in types.items():
generate_class(type_name, class_kind, type_yaml, types)
# self.generate('}')
# Generate the UML class associated to each type.
generate_classes('data_types', 'D', data_types)
generate_classes('artifact_types', 'A', artifact_types)
generate_classes('capability_types', 'C', capability_types)
generate_classes('relationship_types', 'R', relationship_types)
generate_classes('interface_types', 'I', interface_types)
generate_classes('node_types', 'N', node_types)
generate_classes('group_types', 'G', group_types)
generate_classes('policy_types', 'P', policy_types)
self.generate('@enduml')
self.close_file()
def get_representation(self, type_kind, type_name, property_name):
representations = self.configuration.get(UML2, type_kind + '_types')
while True:
type_name = self.type_system.get_type_uri(type_name)
# search the graphical representation for the current type name
representation = representations.get(type_name)
if representation != None: # representation found
if property_name in representation: # property defined?
return representation.get(property_name) # so return it
# else try with the derived_from type
type_type = self.type_system.get_type(type_name)
if type_type is None:
# TODO: log error?
break # node type not found
type_name = type_type.get(syntax.DERIVED_FROM)
if type_name is None:
break # reach a root node type
# node representation | |
from lite_content.lite_exporter_frontend import generic
from lite_content.lite_exporter_frontend.generic import PERMISSION_FINDER_LINK
class GoodsList:
TITLE = "Product list"
CREATE_GOOD = "Add a product"
class Count:
ORGANISATION_ZERO = "Your organisation has no products listed."
ORGANISATION_ONE = "Your organisation has 1 product listed"
ORGANISATION_MANY = "Your organisation has %s products listed" # %s will add the count of goods
FILTERED_ZERO = "No products match your filters"
FILTERED_ONE = "1 product matches your filters"
FILTERED_MANY = "%s products match your filters" # %s will add the count of goods
class Filter:
DESCRIPTION = generic.DESCRIPTION
CONTROL_LIST_ENTRY = "Control list classification (CLC)"
PART_NUMBER = generic.PART_NUMBER
APPLY = "Apply filters"
CLEAR = "Clear filters"
SHOW = "Show filters"
HIDE = "Hide filters"
class Table:
DESCRIPTION = generic.DESCRIPTION
CONTROL_LIST_ENTRY = "Control list entries"
IS_GOOD_CONTROLLED = "Controlled"
PART_NUMBER = generic.PART_NUMBER
QUANTITY = "Quantity"
VALUE = "Value"
INCORPORATED = "Incorporated"
COUNTRIES = "Destinations"
STATUS = "Status"
PRODUCT_TYPE = "Product type"
class Documents:
TITLE = "Documents"
NO_DOCUMENT_ATTACHED = "There are no documents."
NAME = "File"
DESCRIPTION = "Description"
UPLOADED_BY = "Uploaded by"
class GoodPage:
TITLE = "Product"
ADD_TO_APPLICATION = "Add to application"
DELETE_GOOD = "Delete product"
YOUR_QUERY_HEADING = "Your query"
VERIFIED = "ECJU has verified this product based on the information provided"
IN_REVIEW = "ECJU is currently reviewing this product."
YOUR_GOOD = "Product"
NO_LONGER_CAN_BE_CHANGED = "This product has been used on an application so can’t be edited."
class Tabs:
DETAILS = "Details"
NOTES = "Notes"
ECJU_QUERIES = "ECJU queries"
GENERATED_DOCUMENTS = "ECJU documents"
class Query:
TITLE = "Product query"
CASE_OFFICER = "Case officer"
NO_ASSIGNED_CASE_OFFICER = "Not assigned"
REFERENCE = "ECJU reference"
CLC_RAISED_REASONS = "CLC query reasons"
GRADING_RAISED_REASONS = "Grading query reasons"
class Document:
DOWNLOAD = generic.Document.DOWNLOAD
DELETE = generic.Document.DELETE
PROCESSING = generic.Document.PROCESSING
ATTACH = generic.Document.ATTACH
REMOVE = generic.Document.REMOVE
class Table:
DESCRIPTION = "Description"
IS_GOOD_CONTROLLED = "Controlled"
CONTROL_LIST_ENTRY = "CLC"
CATEGORY = "Category"
MILITARY_USE = "Military use"
COMPONENT = "Component"
INFORMATION_SECURITY_FEATURES = "Information security features"
PURPOSE_SOFTWARE_TECHNOLOGY = "Purpose"
class Grading:
IS_GRADED = "Security graded"
GRADING = "Grade"
REFERENCE = "Reference"
ISSUING_AUTHORITY = "Issuing authority"
DATE_OF_ISSUE = "Date of issue"
class FirearmDetails:
PRODUCT_TYPE = "Product type"
SPORTING_SHOTGUN = "Sporting shotgun"
YEAR_OF_MANUFACTURE = "Year of manufacture"
REPLICA_FIREARM = "Replica firearm"
CALIBRE = "Calibre"
COVERED_BY_THE_FIREARMS_ACT_1968 = "Firearms Act 1968"
IDENTIFICATION_MARKINGS = "Serial numbers"
IDENTIFICATION_MARKINGS = "Identification markings"
class DocumentAvailabilityForm:
TITLE = "Do you have a document that shows what your product is and what it’s designed to do?"
DESCRIPTION = (
"This could be a specification, datasheet, sales brochure, drawing or anything else that fully "
"details what the product is and what it's designed for."
)
class DocumentSensitivityForm:
TITLE = "Is the document rated above OFFICIAL-SENSITIVE?"
ECJU_HELPLINE = (
"**<noscript>If the answer is No;</noscript>**\n\nContact ECJU to arrange a more secure way to send "
"this document.\n\n You can continue with the application "
"without attaching a document.\n\n**ECJU helpline**\n 020 7215 4594\n "
"[Find out about call charges (opens in new tab)](https://www.gov.uk/call-charges)"
)
SUBMIT_BUTTON = "Save and continue"
class Options:
YES = "Yes"
NO = "No"
class CreateGoodForm:
TITLE_APPLICATION = "Add a product to your application"
TITLE_GOODS_LIST = "Add a product to your product list"
SUBMIT_BUTTON = "Continue"
BACK_BUTTON = "Back"
class Description:
TITLE = generic.DESCRIPTION
DESCRIPTION = (
"Start with the product name to make it easier to find the product when needed. Include the commodity code "
"if you know it."
)
class IsControlled:
TITLE = "Is the product on the control list?"
DESCRIPTION = (
"Products that aren't on the " + PERMISSION_FINDER_LINK + " may be affected by [military end use controls]"
"(https://www.gov.uk/guidance/export-controls-military-goods-software-and-technology), "
"[current trade sanctions and embargoes]"
"(https://www.gov.uk/guidance/current-arms-embargoes-and-other-restrictions) or "
"[weapons of mass destruction controls](https://www.gov.uk/guidance/supplementary-wmd-end-use-controls). "
"If the product isn't subject to any controls, you'll get a no licence required (NLR) document from ECJU."
)
CLC_REQUIRED = (
"Products that aren't on the " + PERMISSION_FINDER_LINK + " may be affected by [military end use controls]"
"(https://www.gov.uk/guidance/export-controls-military-goods-software-and-technology), "
"[current trade sanctions and embargoes]"
"(https://www.gov.uk/guidance/current-arms-embargoes-and-other-restrictions) or "
"[weapons of mass destruction controls](https://www.gov.uk/guidance/supplementary-wmd-end-use-controls). "
"If the product isn't subject to any controls, you'll get a no licence required (NLR) document from ECJU."
)
YES = "Yes"
NO = "No"
UNSURE = "I don't know, raise a control list classification (CLC) query"
class ControlListEntry:
TITLE = "Control list entries"
DESCRIPTION = "Type to get suggestions. For example, ML1a."
class IsGraded:
TITLE = "Does the product have a security grading?"
DESCRIPTION = (
"For example, UK OFFICIAL or NATO UNCLASSIFIED. The security grading of the product doesn't affect if an "
"export licence is needed."
)
YES = "Yes"
NO = "No, it doesn't need one"
RAISE_QUERY = "No, it needs one, apply for a private venture (PV) grading"
class PartNumber:
TITLE = generic.PART_NUMBER
class ProductCategory:
TITLE = "Select a product category"
GROUP1_PLATFORM = "Platform, vehicle, system or machine"
GROUP1_DEVICE = "Device, equipment or object"
GROUP1_COMPONENTS = "Components, modules or accessories of something"
GROUP1_MATERIALS = "Materials or substances"
GROUP2_FIREARMS = "Firearms"
GROUP3_SOFTWARE = "Software"
GROUP3_TECHNOLOGY = "Technology"
class MilitaryUse:
TITLE = "Is the product for military use?"
YES_DESIGNED = "Yes, specially designed for military use"
YES_MODIFIED = "Yes, modified for military use"
MODIFIED_MILITARY_USE_DETAILS = "Provide details of the modifications"
NO = "No"
class ProductComponent:
TITLE = "Is the product a component?"
YES_DESIGNED = "Yes, it's designed specially for hardware"
YES_MODIFIED = "Yes, it's been modified for hardware"
YES_GENERAL_PURPOSE = "Yes, it's a general purpose component"
NO = "No"
DESIGNED_DETAILS = "Provide details of the hardware"
MODIFIED_DETAILS = "Provide details of the modifications and the hardware"
GENERAL_DETAILS = "Provide details of the types of applications it's intended to be used in"
class ProductInformationSecurity:
TITLE = "Is the product designed to employ 'information security' features?"
INFORMATION_SECURITY_DETAILS = "Provide details of the information security features"
NO = "No"
class TechnologySoftware:
TITLE = "Describe the purpose of the "
class FirearmGood:
class ProductType:
TITLE = "Select the type of product"
FIREARM = "Firearm"
COMPONENTS_FOR_FIREARM = "Component of a firearm"
AMMUNITION = "Ammunition"
COMPONENTS_FOR_AMMUNITION = "Component of firearm ammunition"
FIREARMS_ACCESSORY = "Accessory of a firearm"
SOFTWARE_RELATED_TO_FIREARM = "Software relating to a firearm"
TECHNOLOGY_RELATED_TO_FIREARM = "Technology relating to a firearm"
class FirearmsAmmunitionDetails:
TITLE = "Firearms and ammunition details"
YEAR_OF_MANUFACTURE = "Year of manufacture"
CALIBRE = "Enter the calibre"
class FirearmsReplica:
TITLE = "Is the product a replica firearm?"
DESCRIPTION = "Describe the firearm the product is a replica of"
class FirearmsActCertificate:
TITLE = "Is the product covered by Section 1, 2 or 5 of the Firearms Act 1968?"
FIREARMS_ACT = "Firearms Act 1968:"
SECTION_ONE = "Section 1 (opens in new tab)"
SECTION_ONE_LINK = "http://www.legislation.gov.uk/ukpga/1968/27/section/1"
SECTION_TWO = "Section 2 (opens in new tab)"
SECTION_TWO_LINK = "http://www.legislation.gov.uk/ukpga/1968/27/section/2"
SECTION_FIVE = "Section 5 (opens in new tab)"
SECTION_FIVE_LINK = "http://www.legislation.gov.uk/ukpga/1968/27/section/5"
SECTION_CERTIFICATE_NUMBER = "Certificate number"
EXPIRY_DATE = "Expiry date"
EXPIRY_DATE_HINT = "For example, 12 11 2022"
YES = "Yes"
NO = "No"
DONT_KNOW = "I don't know"
class IdentificationMarkings:
TITLE = "Has the product been marked with a serial number or other type of identification marking?"
MARKINGS_DETAILS = "Serial number or other type of marking"
MARKINGS_HELP_TEXT = "Enter one or more"
NO_MARKINGS_DETAILS = "Explain why the product has not been marked"
YES = "Yes"
NO = "No"
class GoodGradingForm:
TITLE = "Security grading"
DESCRIPTION = ""
PREFIX = "Prefix"
GRADING = "Grading"
SUFFIX = "Suffix"
OTHER_GRADING = "Other type of security grading"
ISSUING_AUTHORITY = "Issuing authority"
REFERENCE = "Reference"
DATE_OF_ISSUE = "Date of issue"
BUTTON = "Save and continue"
class GoodsQueryForm:
TITLE = "Product query"
DESCRIPTION = ""
BACK_LINK = "Back to product"
BUTTON = "Submit"
class CLCQuery:
TITLE = "Raise a control list classification (CLC) query"
class Code:
TITLE = "What do you think the CLC is for the product?"
DESCRIPTION = "For example, ML1a."
class Details:
TITLE = "Comments"
class PVGrading:
TITLE = "Apply for a private venture (PV) grading"
class Details:
TITLE = "Comments"
class Additional:
TITLE = "Comments"
DESCRIPTION = "Include | |
<gh_stars>1-10
# (c) Copyright [2018] Micro Focus or one of its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############################################################################################################
# __ __ ___ ____ ______ ____ __ ____ ___ ___ _ ____ __ __ ______ __ __ ___ ____ #
# | | | / _| \| | | / ]/ | | | | | | \| | | | | |/ \| \ #
# | | |/ [_| D | || | / /| o | | _ _ | | | o | | | | | | | _ | #
# | | | _| /|_| |_|| |/ / | | | \_/ | |___ | _/| ~ |_| |_| _ | O | | | #
# | : | [_| \ | | | / \_| _ | | | | | | | |___, | | | | | | | | | #
# \ /| | . \ | | | \ | | | | | | | | | | | | | | | | | | | #
# \_/ |_____|__|\_| |__| |____\____|__|__| |___|___|_____| |__| |____/ |__| |__|__|\___/|__|__| #
# #
############################################################################################################
# Vertica-ML-Python allows user to create RVD (Resilient Vertica Dataset). #
# RVD simplifies data exploration, data cleaning and machine learning in Vertica. #
# It is an object which keeps in it all the actions that the user wants to achieve #
# and execute them when they are needed. #
#####################################################################################
# #
# Author: <NAME> #
# #
######################
# Libraries
import numpy as np
import os
import math
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import shutil
import time
import matplotlib.colors as colors
from random import shuffle
from vertica_ml_python.rvc import RVC
from vertica_ml_python.fun import print_table
from vertica_ml_python.fun import isnotebook
from vertica_ml_python.fun import run_query
from vertica_ml_python.fun import column_matrix
# Drop Table if it exists
def drop_table(input_relation,cursor,print_info=True):
cursor.execute("select 1;")
try:
query="drop table {};".format(input_relation)
cursor.execute(query)
if (print_info):
print("The table {} was successfully dropped.".format(input_relation))
except:
print("/!\\ Warning: The table {} doesn't exist !".format(input_relation))
# Drop View if it exists
def drop_view(view_name,cursor,print_info=True):
cursor.execute("select 1;")
try:
query="drop view {};".format(view_name)
cursor.execute(query)
if (print_info):
print("The view {} was successfully dropped.".format(view_name))
except:
print("/!\\ Warning: The view {} doesn't exist !".format(view_name))
# Create a RVD from a csv file (= Vertica CSV parser using Flex Tables)
def read_csv(path,cursor,local=True,input_relation=None,delimiter=',',columns=None,types=None,
null='',enclosed_by='"',escape='\\',skip=1,temporary=False,skip_all=False,
split=False,split_name='vpython_split'):
if (not(isinstance(skip,int)) or (skip<0)):
raise TypeError("The parameter 'skip' must be a positive integer")
if (not(isinstance(temporary,bool))):
raise TypeError("The parameter 'temporary' must be a bool")
if (not(isinstance(skip_all,bool))):
raise TypeError("The parameter 'skip_all' must be a bool")
if (not(isinstance(local,bool))):
raise TypeError("The parameter 'local' must be a bool")
if (not(isinstance(split_name,str))):
raise TypeError("The parameter 'split_name' must be a varchar")
if (not(isinstance(escape,str))):
raise TypeError("The parameter 'escape' must be a varchar")
if (not(isinstance(enclosed_by,str))):
raise TypeError("The parameter 'enclosed_by' must be a varchar")
if (not(isinstance(null,str))):
raise TypeError("The parameter 'null' must be a varchar")
if (not(isinstance(delimiter,str))):
raise TypeError("The parameter 'delimiter' must be a varchar")
if (not(isinstance(path,str))):
raise TypeError("The parameter 'path' must be a varchar")
if (local):
local=" local "
else:
local=""
if (type(input_relation)!=str):
input_relation=path.split("/")[-1].split(".csv")[0]
if (temporary):
temporary="temporary"
else:
temporary=""
schema_input_relation=input_relation.split(".")
if (len(schema_input_relation)==1):
schema=None
else:
input_relation=schema_input_relation[1]
schema=schema_input_relation[0]
query="select column_name from columns where table_name='{}'".format(input_relation)
if (schema!=None):
query+=" and table_schema='{}'".format(schema)
cursor.execute(query)
query_result=cursor.fetchall()
if (query_result!=[]):
print("/!\\ Warning: The table {} already exists !".format(input_relation))
return
else:
if (columns==None):
flex_name="_vpython"+str(np.random.randint(10000000))+"_flex_"
query="drop table if exists "+flex_name
cursor.execute(query)
query="create flex table if not exists "+flex_name+"()"
cursor.execute(query)
query="copy "+flex_name+" from"+local+"'{}' parser fcsvparser(delimiter='{}',"
query+="enclosed_by='{}',escape='{}') null '{}'"
query=query.format(path,delimiter,enclosed_by,escape,null)
cursor.execute(query)
query="select compute_flextable_keys('"+flex_name+"');"
cursor.execute(query)
query="select key_name,data_type_guess from "+flex_name+"_keys"
cursor.execute(query)
query_result=cursor.fetchall()
columns=[]
for column in query_result:
columns+=[[item for item in column]]
print("The parser guess the following columns and types:")
for column,column_type in columns:
print(column+": "+column_type)
print("Illegal characters in the columns names will be erased.")
if not(skip_all):
next=False
while not(next):
print("Is any type wrong?\nIf one of the types is not correct, it will be considered as Varchar(100).")
print("0 - There is one type that I want to modify.")
print("1 - I wish to continue.")
print("2 - I wish to see the columns and their types again.")
next=input()
if (next==0 or next=='0'):
print("please write ['column_name','column_type'] to modify the type of the corresponding column.")
try:
column_name,column_type=eval(input())
for column in columns:
if (column[0]==column_name):
column[1]=column_type
print("type of "+column_name+" has been successfully changed.")
break
except:
print("Failed to change type. Try again.")
next=False
elif (next==2 or next=='2'):
for column,column_type in columns:
print(column+": "+column_type)
next=False
elif (next!=1 and next!='1'):
print("Please enter a value between 0 and 2.")
next=False
for column in columns:
try:
if (column[1]=="Interval"):
column[1]="Varchar(100)"
print("/!\\ Warning: Type of {} was changed to Varchar(100) [Interval type is not supported]".format(column[0]))
elif ("Varchar" not in column[1]):
query='select (case when "'+column[0]+'"=\''+null+'\' then null else "'+column[0]+'" end)::'+column[1]+' as "'+column[0]+'"'
query+=" from "+flex_name+" where "+column[0]+" is not null limit 1000"
cursor.execute(query)
except:
print("/!\\ Warning: Type of {} was changed to Varchar(100)".format(column[0]))
column[1]="Varchar(100)"
columns=['(case when "'+item[0]+'"=\''+null+'\' then null else "'+item[0]+'" end)::'+item[1]+' as "'+
item[0].replace('.','').replace('-','').replace('+','').replace('=','').replace('*','')
+'"' for item in columns]
if (split):
columns+=['random() as '+split_name]
query=("create {} table {} as select ".format(temporary,input_relation)+",".join(columns)+
" from "+flex_name)
query=query.format(input_relation)
cursor.execute(query)
query="drop table "+flex_name
cursor.execute(query)
else:
if (type(columns)!=list) or (type(types)!=list) or (len(types)!=len(columns)):
raise TypeError("The parameters 'types' and 'columns' must be two lists having the same size")
query="create table {}(".format(input_relation)
try:
for i in range(len(columns)):
query+=columns[i]+" "+types[i]+", "
if (split):
query+=" "+split_name+" float default random()"+");"
else:
query=query[0:-2]
query+=");"
except:
raise TypeError("The parameters 'types' and 'columns' must be two lists containing only varchars")
cursor.execute(query)
query="copy {}({}) from {} '{}' delimiter '{}' null '{}' enclosed by '{}' escape as '{}' skip {};".format(
input_relation,", ".join(columns),local,path,delimiter,null,enclosed_by,escape,skip)
cursor.execute(query)
print("The table {} has been successfully created.".format(input_relation))
return RVD(input_relation,cursor)
#
############################
# _______ _______ #
# | __ \ \ / / __ \ #
# | |__) \ \ / /| | | | #
# | _ / \ \/ / | | | | #
# | | \ \ \ / | |__| | #
# |_| \_\ \/ |_____/ #
# #
#############################
# #
# Resilient Vertica Dataset #
# #
#############################
#
##
class RVD:
###################
# #
# Special Methods #
# #
###################
#
# Initialization
#
# RVD has 7 main attributes: input_relation, cursor, dsn, columns, where, offset and limit
# It has also 7 other attributes to simplify the code and to have easy interaction with
# sql and matplotlib.
def __init__(self,input_relation,cursor=None,dsn=None,columns=None):
if ((isinstance(cursor,type(None))) and (isinstance(dsn,type(None)))):
raise Exception("At least one of the two parameters (dsn or cursor) must receive an input for the RVD creation")
if ((isinstance(cursor,type(None))) and not(isinstance(dsn,str))):
raise Exception("If the cursor is not informed, the dsn must be a varchar corresponding to a Vertica DSN")
elif (isinstance(dsn,str)):
import pyodbc
cursor=pyodbc.connect("DSN="+dsn).cursor()
self.dsn=dsn
schema_input_relation=input_relation.split(".")
if (len(schema_input_relation)==1):
# Name of the concerned table
self.schema=None
self.input_relation=input_relation
else:
self.input_relation=schema_input_relation[1]
self.schema=schema_input_relation[0]
# Cursor to the Vertica Database
self.cursor=cursor
# All the columns of the RVD
if (type(columns)!=list):
query="select column_name from columns where table_name='{}'".format(self.input_relation)
if (self.schema!=None):
query+=" and table_schema='{}'".format(self.schema)
cursor.execute(query)
columns=cursor.fetchall()
columns=[str(item) for sublist in columns for item in sublist]
if (columns!=[]):
self.columns=columns
view=False
else:
view=True
if (view):
query="select * from views where table_name='{}'".format(self.input_relation)
if (self.schema!=None):
query+=" and table_schema='{}'".format(self.schema)
cursor.execute(query)
columns=cursor.fetchall()
if (columns==[]):
print("/!\\ Warning: No table or views '{}' found.\nNothing was created.".format(self.input_relation))
del self
return None
name="_vpython"+str(np.random.randint(10000000))+"_tt_"
query="drop table if exists "+name
cursor.execute(query)
query="create temporary table "+name+" as select * from "+input_relation+" limit 1000"
cursor.execute(query)
query="select column_name from columns where table_name='"+name+"'"
cursor.execute(query)
columns=cursor.fetchall()
self.columns=[str(item) for sublist in columns for item in sublist]
self.input_relation=name
for column in self.columns:
new_rvc=RVC(column,parent=self)
setattr(self,column,new_rvc)
# Table Limitation
self.limit=None
# Table Offset
self.offset=0
# Rules for the cleaned data
self.where=[]
# Display the elapsed time during the query
self.time_on=False
# Display or not the sequal queries that are used during the RVD manipulation
self.query_on=False
# Use sqlparse to reindent the query
self.reindent=False
# Label Location and figure size
self.legend_loc=(None,None,None)
if (isnotebook()):
self.figsize=(9,7)
else:
self.figsize=(7,5)
# Figure color
rvd_colors=['dodgerblue','seagreen','indianred','gold','tan','pink','darksalmon','lightskyblue','lightgreen',
'palevioletred','coral']
all_colors=[item for item in colors.cnames]
shuffle(all_colors)
for c in all_colors:
if c not in rvd_colors:
rvd_colors+=[c]
self.colors=rvd_colors
# RVD history
self.rvd_history=[]
if (view):
self.input_relation=input_relation
query="drop table if exists "+name
cursor.execute(query)
# Get and Set item
def __getitem__(self,index):
return getattr(self,index)
def __setitem__(self,index,val):
setattr(self,index,val)
# Object Representation
def __repr__(self,limit=30,table_info=True):
if ((self.limit!=None) and (self.limit<limit)):
is_finished=True
else:
is_finished=False
query="select * from {} limit {}".format(self._table_transf_(),limit)
self._display_query_(query)
start_time = time.time()
self.cursor.execute(query)
self._display_time_(elapsed_time=time.time()-start_time)
query_result=self.cursor.fetchall()
data=[item for item in query_result]
formatted_text=""
if (data!=[]):
data_columns=[[item] for item in self.columns]
for row in data:
for idx,val in enumerate(row):
data_columns[idx]+=[val]
formatted_text+=print_table(data_columns,is_finished=is_finished,offset=max(self.offset,0))
else:
for column in self.columns:
formatted_text+=column+" "
formatted_text+="\n"
if (table_info):
formatted_text+="Name: {}, Number of rows: {}, Number of columns: {}".format(
self.input_relation,self.count(),len(self.columns))
if isnotebook():
formatted_text="Name: {}, Number of rows: {}, Number of columns: {}".format(
self.input_relation,self.count(),len(self.columns))
return formatted_text
# Object attr affectation
def __setattr__(self,attr,val):
# input_relation
if (attr=="input_relation"):
if not(isinstance(val,str)):
print("/!\\ Warning: attribute 'input_relation' must be a string corresponding to a "
+"table or view inside your Vertica DB.\nYou are not allowed to manually change"
+ "this attribute, it can destroy the RVD robustness.\nNothing was changed.")
else:
self.__dict__[attr]=val
# schema
if (attr=="schema"):
if not(isinstance(val,(str,type(None)))):
print("/!\\ Warning: attribute 'schema' must be a string corresponding to a "
+"schema inside your Vertica DB.\nYou are not allowed to manually change"
+ "this attribute, it can destroy the RVD robustness.\nNothing was changed.")
else:
self.__dict__[attr]=val
# cursor
elif (attr=="cursor"):
try:
val.execute("select 1;")
result=int(val.fetchone()[0])
if (result==1):
self.__dict__[attr]=val
else:
print("/!\\ Warning: attribute 'cursor' must be a cursor to a Vertica DB having "
+"the fetchone and fetchall methods.\nNothing was changed.")
except:
print("/!\\ Warning: attribute 'cursor' must be a cursor to a Vertica DB. Use pyodbc or jaydebeapi for "
+"respectively ODBC and JDBC connection using Python.\nNothing was changed.")
# columns
elif (attr=="columns"):
error=False
if not(isinstance(val,list)):
error=True
else:
for item in val:
if not(isinstance(item,str)):
error=True
if (error):
print("/!\\ Warning: attribute 'columns' must be the list of the different table/view columns."
+"\nNothing was changed.")
else:
self.__dict__[attr]=val
# | |
for i in range(0,len(A[0])):
if B[4][i] == 0:
p2 = 0
else:
p2 = 100 - 100 * A[4][i]/B[4][i]
print_debug("%21s: %10.2f %10.2f %10.2f" % (A[0][i], A[4][i], B[4][i], abs(p2)), False, "")
if p2 < -1:
print_debug(" <+", False, "")
if p2 > 1:
print_debug(" <-", False, "")
print_debug("\n", False, "")
if "performance.log" in options.in_file:
print_debug("\n\n_________________Watch performance.log for details________________\n", False, "")
else:
print_debug("\n\n__________________________________________________________________\n", False, "")
def perf(options1, args):
global options
options = options1
global s
s = options.silent
# save current OS
global is_windows
is_windows = (platform.system() == 'Windows' or
'CYGWIN_NT' in platform.system())
global is_mac
is_mac = (platform.system() == 'Darwin')
# save current path
pwd = os.getcwd()
pwd = pwd + os.sep
pwd1 = pwd
if is_windows:
pwd1 = "..\\..\\"
if options.perf_target != "":
test_only_r = " sse2-i32x4 sse2-i32x8 sse4-i32x4 sse4-i32x8 sse4-i16x8 \
sse4-i8x16 avx1-i32x4 avx1-i32x8 avx1-i32x16 avx1-i64x4 avx1.1-i32x8 \
avx1.1-i32x16 avx1.1-i64x4 avx2-i32x8 avx2-i32x16 avx2-i64x4 \
avx512knl-i32x16 avx512skx-i32x16 "
test_only = options.perf_target.split(",")
for iterator in test_only:
if not (" " + iterator + " " in test_only_r):
error("unknow option for target: " + iterator, 1)
# check if cpu usage is low now
cpu_percent = cpu_check()
if cpu_percent > 20:
error("CPU Usage is very high.\nClose other applications.\n", 2)
# prepare build.log, perf_temp and perf.log files
global perf_log
if options.in_file:
perf_log = pwd + options.in_file
common.remove_if_exists(perf_log)
else:
perf_log = ""
global build_log
build_log = pwd + os.sep + "logs" + os.sep + "perf_build.log"
common.remove_if_exists(build_log)
if os.path.exists(pwd + os.sep + "logs") == False:
os.makedirs(pwd + os.sep + "logs")
global perf_temp
perf_temp = pwd + "perf_temp"
global ispc_test
global ispc_ref
global ref_compiler
global refc_compiler
# check that required compilers exist
PATH_dir = string.split(os.getenv("PATH"), os.pathsep)
ispc_test_exists = False
ispc_ref_exists = False
ref_compiler_exists = False
if is_windows == False:
ispc_test = "ispc"
ref_compiler = "clang++"
refc_compiler = "clang"
if options.compiler != "":
if options.compiler == "clang" or options.compiler == "clang++":
ref_compiler = "clang++"
refc_compiler = "clang"
if options.compiler == "icc" or options.compiler == "icpc":
ref_compiler = "icpc"
refc_compiler = "icc"
if options.compiler == "gcc" or options.compiler == "g++":
ref_compiler = "g++"
refc_compiler = "gcc"
else:
ispc_test = "ispc.exe"
ref_compiler = "cl.exe"
ispc_ref = options.ref
if options.ref != "":
options.ref = True
if os.environ.get("ISPC_HOME") != None:
if os.path.exists(os.environ["ISPC_HOME"] + os.sep + ispc_test):
ispc_test_exists = True
ispc_test = os.environ["ISPC_HOME"] + os.sep + ispc_test
for counter in PATH_dir:
if ispc_test_exists == False:
if os.path.exists(counter + os.sep + ispc_test):
ispc_test_exists = True
ispc_test = counter + os.sep + ispc_test
if os.path.exists(counter + os.sep + ref_compiler):
ref_compiler_exists = True
if ispc_ref != "":
if os.path.exists(counter + os.sep + ispc_ref):
ispc_ref_exists = True
ispc_ref = counter + os.sep + ispc_ref
if not ispc_test_exists:
error("ISPC compiler not found.\nAdded path to ispc compiler to your PATH variable or ISPC_HOME variable\n", 1)
if not ref_compiler_exists:
error("C/C++ compiler %s not found.\nAdded path to %s compiler to your PATH variable.\n" % (ref_compiler, ref_compiler), 1)
if options.ref:
if not ispc_ref_exists:
error("ISPC reference compiler not found.\nAdded path to ispc reference compiler to your PATH variable.\n", 1)
# checks that config file exists
path_config = os.path.normpath(options.config)
if os.path.exists(path_config) == False:
error("config file not found: %s.\nSet path to your config file in --config.\n" % options.config, 1)
sys.exit()
# read lines from config file except comments
f = open(path_config, 'r')
f_lines = f.readlines()
f.close()
lines =[]
for i in range(len(f_lines)):
if f_lines[i][0] != "%":
lines.append(f_lines[i])
length = len(lines)
# end of preparations
print_debug("Okey go go go!\n\n", s, perf_log)
# report command line
if __name__ == "__main__":
print_debug("Command line: %s\n" % " ".join(map(str, sys.argv)), s, perf_log)
# report used ispc
print_debug("Testing ispc: " + ispc_test + "\n", s, perf_log)
#print compilers versions
common.print_version(ispc_test, ispc_ref, ref_compiler, False, perf_log, is_windows)
# begin
i = 0
answer = []
answer_ref = []
# loop for all tests
perf_targets = [""]
target_number = 1
target_str_temp = ""
if options.perf_target != "":
perf_targets = options.perf_target.split(',')
target_str_temp = " -DISPC_IA_TARGETS="
target_number = len(perf_targets)
# Generate build targets for tests
if options.generator:
generator = options.generator
else:
if is_windows == True:
generator = "Visual Studio 14 Win64"
else:
generator = "Unix Makefiles"
examples_folder_ref = "examples_ref"
examples_folder_test = "examples_test"
install_prefix = "install"
cmake_command = "cmake -G " + "\"" + generator + "\"" + " -DCMAKE_INSTALL_PREFIX=" + install_prefix + " " + pwd + "examples"
if is_windows == False:
cmake_command += " -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_COMPILER=clang"
for target_i in range(target_number):
cur_target = perf_targets[target_i]
target_str = target_str_temp + cur_target
if options.ref:
build_folder = examples_folder_ref + os.sep + cur_target
if os.path.exists(build_folder):
shutil.rmtree(build_folder)
os.makedirs(build_folder)
cmake_command_ref = "cd " + build_folder + " && " + cmake_command + \
" -DISPC_EXECUTABLE=" + ispc_ref + target_str + " >> " + build_log
if os.system(cmake_command_ref) != 0:
error("Cmake command failed with reference compiler %s\n" % ispc_ref, 1)
# Build and install tests for reference compiler
if is_windows == False:
bu_command_ref = "cd " + build_folder + " && make install >> "+ build_log+" 2>> "+ build_log
else:
bu_command_ref = "msbuild " + build_folder + os.sep + "INSTALL.vcxproj /V:m /p:Configuration=Release /t:rebuild >> " + build_log
if os.system(bu_command_ref) != 0:
error("Build failed with reference compiler %s\n" % ispc_ref, 1)
build_folder = examples_folder_test + os.sep + cur_target
if os.path.exists(build_folder):
shutil.rmtree(build_folder)
os.makedirs(build_folder)
cmake_command_test = "cd " + build_folder + " && " + cmake_command + \
" -DISPC_EXECUTABLE=" + ispc_test + target_str + " >> " + build_log
if os.system(cmake_command_test) != 0:
error("Cmake command failed with test compiler %s\n" % ispc_test, 1)
# Build and install tests for test compiler
if is_windows == False:
bu_command_test = "cd " + build_folder + " && make install >> "+ build_log+" 2>> "+ build_log
else:
bu_command_test = "msbuild " + build_folder + os.sep + "INSTALL.vcxproj /V:m /p:Configuration=Release /t:rebuild >> " + build_log
if os.system(bu_command_test) != 0:
error("Build failed with test compiler %s\n" % ispc_test, 1)
# Run tests
while i < length-2:
# we read name of test
print_debug("%s" % lines[i], s, perf_log)
# read location of test
folder = lines[i+1]
folder = folder[:-1]
example = folder
# read parameters of test
command = lines[i+2]
command = command[:-1]
temp = 0
# execute test for each target
for target_i in range(target_number):
test = [lines[i][:-1],[],[],[],[],[]]
test_ref = [lines[i][:-1],[],[],[],[],[]]
cur_target = perf_targets[target_i]
folder = os.path.normpath(options.path + os.sep + examples_folder_test + os.sep + cur_target + \
os.sep + install_prefix + os.sep + "examples" + os.sep + example)
folder_ref = os.path.normpath(options.path + os.sep + examples_folder_ref + os.sep + cur_target + \
os.sep + install_prefix + os.sep + "examples" + os.sep + example)
# check that test exists
if os.path.exists(folder) == False:
error("Can't find test %s. Your path is: \"%s\".\nChange current location to ISPC_HOME or set path to ISPC_HOME in --path.\n" %
(lines[i][:-1], options.path), 1)
if is_windows == False:
ex_command_ref = "cd "+ folder_ref + " && ./" + example + " " + command + " >> " + perf_temp + "_ref"
ex_command = "cd "+ folder + " && ./" + example + " " + command + " >> " + perf_temp + "_test"
else:
ex_command_ref = "cd "+ folder_ref + " && " + example + ".exe " + command + " >> " + perf_temp + "_ref"
ex_command = "cd "+ folder + " && " + example + ".exe " + command + " >> " + perf_temp + "_test"
commands = [ex_command, ex_command_ref]
# parsing config parameters
next_line = lines[i+3]
if next_line[0] == "!": # we should take only one part of test output
R = next_line.split(' ')
c1 = int(R[1]) #c1 is a number of string which we want to use in test output
c2 = int(R[2]) #c2 is total number of strings in test output
temp = 1
else:
c1 = 1
c2 = 1
next_line = lines[i+3]
if next_line[0] == "^":
temp = 1
if next_line[0] == "^" and target_number == 1: #we should concatenate result of this test with previous one
run_test(commands, c1, | |
get_curve_knots(curve_node_name):
"""
Returns given curve knots
:param curve_node_name: str
:return: list(str)
"""
curve_fn = curve_utils.get_curve_fn(curve=curve_node_name)
curve_knots = curve_fn.knots()
return [float(knot) for knot in curve_knots]
def get_curve_knots_positions(curve_node_name, world_space=False):
"""
Returns given curve knot positions
:param curve_node_name: str
:param world_space: bool
:return: list(tuple(float, float, float))
"""
space = maya.api.OpenMaya.MSpace.kWorld if world_space else maya.api.OpenMaya.MSpace.kObject
curve_fn = curve_utils.get_curve_fn(curve=curve_node_name)
knots = get_curve_knots(curve_node_name)
knots_positions = list()
for u in knots:
knot_position = curve_fn.getPointAtParam(u, space)
knots_positions.append((knot_position.x, knot_position.y, knot_position.z))
return knots_positions
def get_curve_degree(curve_node_name):
"""
Returns given curve degree
:param curve_node_name: str
:return: int
"""
return maya.cmds.getAttr('{}.degree'.format(curve_node_name))
def get_curve_spans(curve_node_name):
"""
Returns given curve degree
:param curve_node_name: str
:return: int
"""
return maya.cmds.getAttr('{}.spans'.format(curve_node_name))
def get_curve_form(curve_node_name):
"""
Returns given curve form
:param curve_node_name: str
:return: int
"""
return maya.cmds.getAttr('{}.f'.format(curve_node_name))
def get_curve_cvs(curve_node_name, world_space=False):
"""
Returns given curve CVs
:param curve_node_name: str
:param world_space: bool
:return: list
"""
space = maya.api.OpenMaya.MSpace.kWorld if world_space else maya.api.OpenMaya.MSpace.kObject
curve_fn = curve_utils.get_curve_fn(curve=curve_node_name)
cv_array = curve_fn.cvPositions(space)
cv_length = len(cv_array)
return [(cv_array[i].x, cv_array[i].y, cv_array[i].z) for i in range(cv_length)]
def get_curve_cv_position_in_world_space(curve_node_name, cv_index):
"""
Returns position of the given CV index in given curve node
:param curve_node_name: str
:param cv_index: int
:return: list(float, float, float)
"""
return maya.cmds.xform('{}.cv[{}]'.format(curve_node_name, cv_index), query=True, translation=True, worldSpace=True)
def get_curve_cv_position_in_object_space(curve_node_name, cv_index):
"""
Returns object space position of the given CV index in given curve node
:param curve_node_name: str
:param cv_index: int
:return: list(float, float, float)
"""
return maya.cmds.xform('{}.cv[{}]'.format(
curve_node_name, cv_index), query=True, translation=True, objectSpace=True)
def rebuild_curve(curve_node_name, spans, **kwargs):
"""
Rebuilds curve with given parameters
:param curve_node_name: str
:param spans: int
:param kwargs:
:return:
"""
construction_history = kwargs.get('construction_history', True)
replace_original = kwargs.get('replace_original', False)
keep_control_points = kwargs.get('keep_control_points', False)
keep_end_points = kwargs.get('keep_end_points', True)
keep_tangents = kwargs.get('keep_tangents', True)
# Degree: 1: linear; 2: quadratic; 3: cubic; 5: quintic; 7: hepetic
degree = kwargs.get('degree', 3)
# Rebuild Type: 0: uniform; 1: reduce spans; 2: match knots; 3: remove multiple knots;
# 4: curvature; 5: rebuild ends; 6: clean
rebuild_type = kwargs.get('rebuild_type', 0)
# End Knots: 0: uniform end knots; 1: multiple end knots
end_knots = kwargs.get('end_knots', 0)
# Keep range: 0: reparametrize the resulting curve from 0 to 1; 1: keep the original curve parametrization;
# 2: reparametrize the result from 0 to number of spans
keep_range = kwargs.get('keep_range', 1)
return maya.cmds.rebuildCurve(
curve_node_name, spans=spans, rpo=replace_original, rt=rebuild_type, end=end_knots, kr=keep_range,
kcp=keep_control_points, kep=keep_end_points, kt=keep_tangents, d=degree, ch=construction_history)
def create_circle_curve(name, **kwargs):
"""
Creates a new circle control
:param name: str
:param kwargs:
:return: str
"""
construction_history = kwargs.get('construction_history', True)
normal = kwargs.get('normal', (1, 0, 0))
return maya.cmds.circle(n=name, normal=normal, ch=construction_history)[0]
def create_curve(name, degree, cvs, knots, form, **kwargs):
"""
Creates a new Nurbs curve
:param name: str, name of the new curve
:param degree: int
:param cvs: list(tuple(float, float, float))
:param knots: list
:param form: int
:return: str
"""
is_2d = kwargs.pop('2d', False)
rational = kwargs.pop('rational', True)
num_cvs = len(cvs)
num_knots = len(knots)
cv_array = maya.api.OpenMaya.MPointArray(num_cvs, maya.api.OpenMaya.MPoint.kOrigin)
knots_array = maya.api.OpenMaya.MDoubleArray(num_knots, 0)
for i in range(num_cvs):
cv_array[i] = maya.api.OpenMaya.MPoint(cvs[i][0], cvs[i][1], cvs[i][2], 1.0)
for i in range(num_knots):
knots_array[i] = knots[i]
curve_fn = maya.api.OpenMaya.MFnNurbsCurve()
curve_data = maya.api.OpenMaya.MObject()
curve_obj = curve_fn.create(
cv_array,
knots,
degree,
form,
is_2d,
rational,
curve_data
)
new_curve = maya.api.OpenMaya.MFnDependencyNode(curve_obj).setName(name)
return new_curve
# return maya.cmds.curve(n=name, d=degree, p=points, k=knots, per=periodic)
def create_curve_from_transforms(transforms, spans=None, description='from_transforms'):
"""
Creates a curve from a list of transforms. Each transform will define a curve CV
Useful when creating a curve from a joint chain (spines/tails)
:param transforms: list<str>, list of tranfsorms to generate the curve from. Positions will be used to place CVs
:param spans: int, number of spans the final curve should have
:param description: str, description to given to the curve
:return: str name of the new curve
"""
return curve_utils.transforms_to_curve(transforms=transforms, spans=spans, description=description)
def create_wire(surface, curves, name='wire', **kwargs):
"""
Creates a new wire that wires given surface/curve to given curves
:param surface:str
:param curves: list(str)
:param name:str
:param kwargs:
:return: str, str
"""
curves = python.force_list(curves)
dropoff_distance = kwargs.get('dropoff_distance', [])
group_with_base = kwargs.get('group_with_base', False)
return maya.cmds.wire(surface, w=curves, n=name, dds=dropoff_distance, gw=group_with_base)
def find_deformer_by_type(geo_obj, deformer_type, **kwargs):
"""
Given a object find a deformer with deformer_type in its history
:param geo_obj: str, name of a mesh
:param deformer_type: str, correspnds to the Maya deformer type (skinCluster, blendShape, etc)
:return: list(str), names of deformers of type found in the history>
"""
# Whether to return all the deformer found of the given type or just the first one
return_all = kwargs.get('return_all', False)
return deformer_utils.find_deformer_by_type(geo_obj, deformer_type=deformer_type, return_all=return_all)
# =================================================================================================================
# JOINTS
# =================================================================================================================
def create_joint(name, size=1.0, *args, **kwargs):
"""
Creates a new joint
:param name: str, name of the new joint
:param size: float, size of the joint
:return: str
"""
pos = kwargs.pop('position', [0, 0, 0])
return maya.cmds.joint(name=name, rad=size, p=pos)
def orient_joint(joint, **kwargs):
"""
Orients given joint
:param joint: str
:return:
"""
# Aim At: 0: aim at world X; 1: aim at world Y; 2: aim at world Z; 3: aim at immediate child;
# 4: aim at immediate parent; 5: aim at local parent (aiming at the parent and the reverseing the direction)
aim_at = kwargs.get('aim_at', 3)
# Aim Up At: 0: parent rotation:; 1: child position; 2: parent position; 3: triangle plane
aim_up_at = kwargs.get('aim_up_at', 0)
orient = joint_utils.OrientJointAttributes(joint)
orient.set_default_values()
orient = joint_utils.OrientJoint(joint)
orient.set_aim_at(aim_at)
orient.set_aim_up_at(aim_up_at)
return orient.run()
def mirror_joint(joint, mirror_plane='YZ', mirror_behavior=True, search_replace=None):
"""
Mirrors given joint and its hierarchy
:param joint: str
:param mirror_plane: str
:param mirror_behavior: bool
:param search_replace: list(str)
:return: list(str)
"""
# TODO: Add option to cleanup all nodes that are not joints after mirrror (such as constraints)
if mirror_plane == 'YZ':
return maya.cmds.mirrorJoint(
joint, mirrorYZ=True, mirrorBehavior=mirror_behavior, searchReplace=search_replace)
elif mirror_plane == 'XY':
return maya.cmds.mirrorJoint(
joint, mirrorXY=True, mirrorBehavior=mirror_behavior, searchReplace=search_replace)
else:
return maya.cmds.mirrorJoint(
joint, mirrorXZ=True, mirrorBehavior=mirror_behavior, searchReplace=search_replace)
def orient_joints(joints_to_orient=None, **kwargs):
"""
Orients joints
:param joints_to_orient: list(str) or None
:param kwargs:
:return:
"""
force_orient_attributes = kwargs.get('force_orient_attributes', False)
return joint_utils.OrientJointAttributes.orient_with_attributes(
objects_to_orient=joints_to_orient, force_orient_attributes=force_orient_attributes)
def zero_orient_joint(joints_to_zero_orient):
"""
Zeroes the orientation of the given joints
:param joints_to_zero_orient: list(str)
"""
return joint_utils.OrientJointAttributes.zero_orient_joint(joints_to_zero_orient)
def start_joint_tool():
"""
Starts the DCC tool used to create new joints/bones
"""
return joint_utils.start_joint_tool()
def insert_joints(count, root_joint=None):
"""
Inserts the given number of joints between the root joint and its direct child
"""
return joint_utils.insert_joints(joint_count=count)
def set_joint_local_rotation_axis_visibility(flag, joints_to_apply=None):
"""
Sets the visibility of selected joints local rotation axis
:param flag: bool
:param joints_to_apply: list(str) or None
:return: bool
"""
return joint_utils.set_joint_local_rotation_axis_visibility(joints=joints_to_apply, bool_value=flag)
def get_joint_display_size():
"""
Returns current DCC joint display size
:return: float
"""
return maya.cmds.jointDisplayScale(query=True, absolute=True)
def set_joint_display_size(value):
"""
Returns current DCC joint display size
:param value: float
"""
if value <= 0.0:
return False
return maya.cmds.jointDisplayScale(value, absolute=True)
def toggle_xray_joints():
"""
Toggles XRay joints functionality (joints are rendered in front of the geometry)
"""
current_panel = maya.cmds.getPanel(withFocus=True)
if maya.cmds.modelEditor(current_panel, query=True, jointXray=True):
maya.cmds.modelEditor(current_panel, edit=True, jointXray=False)
else:
maya.cmds.modelEditor(current_panel, edit=True, jointXray=True)
def zero_scale_joint(jnt):
"""
Sets the given scale to zero and compensate the change by modifying the joint translation and rotation
:param jnt: str
"""
return maya.cmds.joint(jnt, edit=True, zeroScaleOrient=True)
def set_joint_orient(jnt, orient_axis, secondary_orient_axis=None, **kwargs):
"""
Sets the joint orientation and scale orientation so that the axis indicated by the first letter in the
argument will be aligned with the vector from this joint to its first child joint. For example, if the
argument is "xyz", the x-axis will point towards the child joint. The alignment of the remaining two
joint orient axes are dependent on whether or not the -sao/-secondaryAxisOrient flag is used.
If the secondary_orient_axis flag is used, see the documentation for that flag for how the remaining
axes are aligned. In the absence of a user specification for the secondary axis orientation, the rotation
axis indicated by the last letter in the argument will be aligned with the vector perpendicular to first
axis and the vector from this joint to its parent joint. The remaining axis is aligned according the right
hand rule. If the argument is "none", the joint orientation will be set to zero and its effect to the
hierarchy below will be offset by modifying the scale orientation. The flag will be ignored if: A. the
joint has non-zero rotations when the argument is not "none". B. the joint does not have child joint, or
| |
# Copyright 2017 Reswitched Team
#
# Permission to use, copy, modify, and/or distribute this software for any purpose with or
# without fee is hereby granted, provided that the above copyright notice and this permission
# notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
# SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE
# OR PERFORMANCE OF THIS SOFTWARE.
# kernel.py: IDA loader for 5.0.0+ Horizon Kernel
from __future__ import print_function
import gzip, math, os, re, struct, sys
from struct import unpack as up, pack as pk
from io import BytesIO
if sys.version_info[0] == 3:
iter_range = range
int_types = (int,)
ascii_string = lambda b: b.decode('ascii')
bytes_to_list = lambda b: list(b)
list_to_bytes = lambda l: bytes(l)
else:
iter_range = xrange
int_types = (int, long)
ascii_string = lambda b: str(b)
bytes_to_list = lambda b: map(ord, b)
list_to_bytes = lambda l: ''.join(map(chr, l))
class BinFile(object):
def __init__(self, li):
self._f = li
def read(self, arg):
if isinstance(arg, str):
fmt = '<' + arg
size = struct.calcsize(fmt)
raw = self._f.read(size)
out = struct.unpack(fmt, raw)
if len(out) == 1:
return out[0]
return out
elif arg is None:
return self._f.read()
else:
out = self._f.read(arg)
return out
def read_from(self, arg, offset):
old = self.tell()
try:
self.seek(offset)
out = self.read(arg)
finally:
self.seek(old)
return out
def seek(self, off):
self._f.seek(off)
def close(self):
self._f.close()
def tell(self):
return self._f.tell()
(DT_NULL, DT_NEEDED, DT_PLTRELSZ, DT_PLTGOT, DT_HASH, DT_STRTAB, DT_SYMTAB, DT_RELA, DT_RELASZ,
DT_RELAENT, DT_STRSZ, DT_SYMENT, DT_INIT, DT_FINI, DT_SONAME, DT_RPATH, DT_SYMBOLIC, DT_REL,
DT_RELSZ, DT_RELENT, DT_PLTREL, DT_DEBUG, DT_TEXTREL, DT_JMPREL, DT_BIND_NOW, DT_INIT_ARRAY,
DT_FINI_ARRAY, DT_INIT_ARRAYSZ, DT_FINI_ARRAYSZ, DT_RUNPATH, DT_FLAGS) = iter_range(31)
DT_GNU_HASH = 0x6ffffef5
DT_VERSYM = 0x6ffffff0
DT_RELACOUNT = 0x6ffffff9
DT_RELCOUNT = 0x6ffffffa
DT_FLAGS_1 = 0x6ffffffb
DT_VERDEF = 0x6ffffffc
DT_VERDEFNUM = 0x6ffffffd
STT_NOTYPE = 0
STT_OBJECT = 1
STT_FUNC = 2
STT_SECTION = 3
STB_LOCAL = 0
STB_GLOBAL = 1
STB_WEAK = 2
R_ARM_ABS32 = 2
R_ARM_TLS_DESC = 13
R_ARM_GLOB_DAT = 21
R_ARM_JUMP_SLOT = 22
R_ARM_RELATIVE = 23
R_AARCH64_ABS64 = 257
R_AARCH64_GLOB_DAT = 1025
R_AARCH64_JUMP_SLOT = 1026
R_AARCH64_RELATIVE = 1027
R_AARCH64_TLSDESC = 1031
MULTIPLE_DTS = set([DT_NEEDED])
class Range(object):
def __init__(self, start, size):
self.start = start
self.size = size
self.end = start+size
self._inclend = start+size-1
def overlaps(self, other):
return self.start <= other._inclend and other.start <= self._inclend
def includes(self, other):
return other.start >= self.start and other._inclend <= self._inclend
def __repr__(self):
return 'Range(0x%X -> 0x%X)' % (self.start, self.end)
class Segment(object):
def __init__(self, r, name, kind):
self.range = r
self.name = name
self.kind = kind
self.sections = []
def add_section(self, s):
for i in self.sections:
assert not i.range.overlaps(s.range), '%r overlaps %r' % (s, i)
self.sections.append(s)
class Section(object):
def __init__(self, r, name):
self.range = r
self.name = name
def __repr__(self):
return 'Section(%r, %r)' % (self.range, self.name)
def suffixed_name(name, suffix):
if suffix == 0:
return name
return '%s.%d' % (name, suffix)
class SegmentBuilder(object):
def __init__(self):
self.segments = []
def add_segment(self, start, size, name, kind):
r = Range(start, size)
for i in self.segments:
assert not r.overlaps(i.range), '%s: overlap: %08lx %08lx' % (name, start, size)
self.segments.append(Segment(r, name, kind))
def add_section(self, name, start, end=None, size=None):
assert end is None or size is None
if size == 0:
return
if size is None:
size = end-start
assert size > 0
r = Range(start, size)
for i in self.segments:
if i.range.includes(r):
i.add_section(Section(r, name))
return
assert False, "no containing segment for %r" % (name,)
def flatten(self):
self.segments.sort(key=lambda s: s.range.start)
parts = []
for segment in self.segments:
suffix = 0
segment.sections.sort(key=lambda s: s.range.start)
pos = segment.range.start
for section in segment.sections:
if pos < section.range.start:
parts.append((pos, section.range.start, suffixed_name(segment.name, suffix), segment.kind))
suffix += 1
pos = section.range.start
parts.append((section.range.start, section.range.end, section.name, segment.kind))
pos = section.range.end
if pos < segment.range.end:
parts.append((pos, segment.range.end, suffixed_name(segment.name, suffix), segment.kind))
suffix += 1
pos = segment.range.end
return parts
class ElfSym(object):
def __init__(self, name, info, other, shndx, value, size):
self.name = name
self.shndx = shndx
self.value = value
self.size = size
self.vis = other & 3
self.type = info & 0xF
self.bind = info >> 4
def __repr__(self):
return 'Sym(name=%r, shndx=0x%X, value=0x%X, size=0x%X, vis=%r, type=%r, bind=%r)' % (
self.name, self.shndx, self.value, self.size, self.vis, self.type, self.bind)
def is_valid_kernel_map_impl(ts, te, rs, re, ds, de, bs, be, i1, dn):
if ts != 0:
return False
if ts >= te:
return False
if te & 0xFFF:
return False
if te > rs:
return False
if rs & 0xFFF:
return False
if rs >= re:
return False
if re & 0xFFF:
return False
if re > ds:
return False
if ds & 0xFFF:
return False
if ds >= de:
return False
if de > bs:
return False
if bs > be:
return False
if be > i1:
return False
if not ((ds <= dn and dn < de) or (rs <= dn and dn < re)):
return False
return True
def is_valid_kernel_map(dat, ofs):
ts, te, rs, re, ds, de, bs, be, i1, dn, ns, ne = up('<IIIIIIIIIIII', dat[ofs:ofs+0x30])
return is_valid_kernel_map_impl(ts, te, rs, re, ds, de, bs, be, i1, dn)
def is_valid_kernel_map_5x(dat, ofs):
ts, te, rs, re, ds, de, bs, be, i1, dn, cl = up('<QQQQQQQQQQQ', dat[ofs:ofs+0x58])
return is_valid_kernel_map_impl(ts, te, rs, re, ds, de, bs, be, i1, dn)
class Kernel80(object):
def __init__(self, fileobj):
f = BinFile(fileobj)
crt0 = bytes(f.read(0x2000))
kmap = -1
for mapoff in iter_range(0, len(crt0) - 0x30, 4):
if is_valid_kernel_map(crt0, mapoff):
textOffset, textEndOffset, rodataOffset, rodataEndOffset, \
dataOffset, dataEndOffset, bssOffset, bssEndOffset, ini1Offset, \
dynamicOffset, initArrayOffset, initArrayEndOffset = up("<12I", crt0[mapoff:mapoff+0x30])
f.seek(ini1Offset)
if bytes(f.read(4)) == b'INI1' or (0x100000 <= ini1Offset and ini1Offset <= 0x400000):
kmap = mapoff
break
elif mapoff <= len(crt0) - 0x58 and is_valid_kernel_map_5x(crt0, mapoff):
textOffset, textEndOffset, rodataOffset, rodataEndOffset, \
dataOffset, dataEndOffset, bssOffset, bssEndOffset, ini1Offset, \
dynamicOffset, corelocalOffset = up("<11Q", crt0[mapoff:mapoff+0x58])
kmap = mapoff
break
f.seek(0)
assert kmap != -1
b = 0x80060000
self.textoff = textOffset
self.textsize = textEndOffset - textOffset
self.rodataoff = rodataOffset
self.rodatasize = rodataEndOffset - rodataOffset
self.dataoff = dataOffset
self.datasize = dataEndOffset - dataOffset
flatsize = self.dataoff + self.datasize
self.binfile = f
self.dynamicoff = dynamicOffset
self.bssoff = bssOffset
self.bssend = bssEndOffset
self.bsssize = self.bssend - self.bssoff
self.segment_builder = builder = SegmentBuilder()
for off,sz,name,kind in [
(self.textoff, self.textsize, ".text", "CODE"),
(self.rodataoff, self.rodatasize, ".rodata", "CONST"),
(self.dataoff, self.datasize, ".data", "DATA"),
(self.bssoff, self.bsssize, ".bss", "BSS"),
]:
builder.add_segment(off, sz, name, kind)
# read dynamic
self.armv7 = False#(f.read_from('Q', self.dynamicoff) > 0xFFFFFFFF or f.read_from('Q', self.dynamicoff+0x10) > 0xFFFFFFFF)
self.offsize = 4 if self.armv7 else 8
f.seek(self.dynamicoff)
self.dynamic = dynamic = {}
for i in MULTIPLE_DTS:
dynamic[i] = []
for i in iter_range((flatsize - self.dynamicoff) // 0x10):
tag, val = f.read('II' if self.armv7 else 'QQ')
if tag == DT_NULL:
break
if tag in MULTIPLE_DTS:
dynamic[tag].append(val)
else:
dynamic[tag] = val
dynamicend = f.tell()
builder.add_section('.dynamic', self.dynamicoff, dynamicend)
# read .dynstr
if DT_STRTAB in dynamic and DT_STRSZ in dynamic:
f.seek(dynamic[DT_STRTAB])
self.dynstr = f.read(dynamic[DT_STRSZ])
else:
self.dynstr = b'\x00'
print('warning: no dynstr')
for startkey, szkey, name in [
(DT_STRTAB, DT_STRSZ, '.dynstr'),
(DT_INIT_ARRAY, DT_INIT_ARRAYSZ, '.init_array'),
(DT_FINI_ARRAY, DT_FINI_ARRAYSZ, '.fini_array'),
(DT_RELA, DT_RELASZ, '.rela.dyn'),
(DT_REL, DT_RELSZ, '.rel.dyn'),
(DT_JMPREL, DT_PLTRELSZ, ('.rel.plt' if self.armv7 else '.rela.plt')),
]:
if startkey in dynamic and szkey in dynamic:
builder.add_section(name, dynamic[startkey], size=dynamic[szkey])
self.needed = [self.get_dynstr(i) for i in self.dynamic[DT_NEEDED]]
# load .dynsym
self.symbols = symbols = []
f.seek(dynamic[DT_SYMTAB])
while True:
if dynamic[DT_SYMTAB] < dynamic[DT_STRTAB] and f.tell() >= dynamic[DT_STRTAB]:
break
if self.armv7:
st_name, st_value, st_size, st_info, st_other, st_shndx = f.read('IIIBBH')
else:
st_name, st_info, st_other, st_shndx, st_value, st_size = f.read('IBBHQQ')
if st_name > len(self.dynstr):
break
symbols.append(ElfSym(self.get_dynstr(st_name), st_info, st_other, st_shndx, st_value, st_size))
builder.add_section('.dynsym', dynamic[DT_SYMTAB], end=f.tell())
# .hash, .gnu.hash
if DT_HASH in dynamic and DT_GNU_HASH in dynamic and DT_SYMTAB in dynamic:
builder.add_section('.hash', dynamic[DT_HASH], end=dynamic[DT_GNU_HASH])
builder.add_section('.gnu.hash', dynamic[DT_GNU_HASH], end=dynamic[DT_SYMTAB])
self.plt_entries = []
self.relocations = []
locations = set()
if DT_REL in dynamic:
locations |= self.process_relocations(f, symbols, dynamic[DT_REL], dynamic[DT_RELSZ])
if DT_RELA in dynamic:
locations |= self.process_relocations(f, symbols, dynamic[DT_RELA], dynamic[DT_RELASZ])
if DT_JMPREL in dynamic:
# Note: 5.0 kernel doesn't have it
pltlocations = self.process_relocations(f, symbols, dynamic[DT_JMPREL], dynamic[DT_PLTRELSZ])
locations |= pltlocations
plt_got_start = min(pltlocations)
plt_got_end = max(pltlocations) + self.offsize
if DT_PLTGOT in dynamic:
builder.add_section('.got.plt', dynamic[DT_PLTGOT], end=plt_got_end)
| |
<gh_stars>1-10
from _sims4_collections import frozendict
import itertools
import weakref
from animation import AnimationContext
from animation.animation_constants import AUTO_EXIT_REF_TAG
from element_utils import build_critical_section, build_critical_section_with_finally, build_element, must_run
from routing import PathPlanContext
from sims.sim_info_types import Species, SpeciesExtended
from sims4.callback_utils import protected_callback
from sims4.utils import setdefault_callable
from singletons import UNSET
import animation
import animation.arb
import gsi_handlers.interaction_archive_handlers
import native.animation
import routing
import services
import sims4.log
import sims4.resources
_unhash_bone_name_cache = {}
logger = sims4.log.Logger('Animation')
dump_logger = sims4.log.LoggerClass('Animation')
class AsmAutoExitInfo:
def __init__(self):
self.clear()
def clear(self):
if hasattr(self, 'asm') and self.asm is not None:
animation_context = self.asm[2]
animation_context.release_ref(AUTO_EXIT_REF_TAG)
self.asm = None
self.apply_carry_interaction_mask = 0
self.locked = False
class _FakePostureState:
def __init__(self, *_, **__):
self._body = None
def get_carry_state(self, *_, **__):
return (False, False)
def get_carry_track(self, *_, **__):
pass
def get_carry_posture(self, *_, **__):
pass
@property
def surface_target(self):
pass
@property
def body(self, *_, **__):
return self._body
@body.setter
def body(self, value):
self._body = value
FAKE_POSTURE_STATE = _FakePostureState()
class StubActor:
additional_interaction_constraints = None
age = UNSET
is_valid_posture_graph_object = False
party = None
def __init__(self, guid, template=None, debug_name=None, parent=None, species=None):
self.id = guid
self.template = template
if species is not None:
self._species = species
elif template is not None:
self._species = template.species
else:
self._species = Species.HUMAN
self.debug_name = debug_name
self.parent = parent
self.asm_auto_exit = AsmAutoExitInfo()
self.routing_context = PathPlanContext()
zone_id = services.current_zone_id()
routing_surface = routing.SurfaceIdentifier(zone_id or 0, 0, routing.SurfaceType.SURFACETYPE_WORLD)
self.routing_location = routing.Location(sims4.math.Vector3.ZERO(), sims4.math.Quaternion.IDENTITY(), routing_surface)
def __repr__(self):
return 'StubActor({})'.format(self.debug_name or self.id)
def ref(self, callback=None):
return weakref.ref(self, protected_callback(callback))
def resolve(self, cls):
return self
def is_in_inventory(self):
return False
def is_in_sim_inventory(self, sim=None):
return False
@property
def LineOfSight(self):
if self.template is not None:
return self.template.lineofsight_component
@property
def parts(self):
if self.template is not None:
return self.template.parts
@property
def is_part(self):
if self.template is not None:
return self.template.is_part
return False
@property
def part_suffix(self):
if self.template is not None:
return self.template.part_suffix
def is_mirrored(self, *args, **kwargs):
if self.template is not None:
return self.template.is_mirrored(*args, **kwargs)
return False
@property
def location(self):
zone_id = services.current_zone_id()
routing_surface = routing.SurfaceIdentifier(zone_id or 0, 0, routing.SurfaceType.SURFACETYPE_WORLD)
return sims4.math.Location(sims4.math.Transform(), routing_surface)
@property
def transform(self):
return self.location.transform
@property
def position(self):
return self.transform.translation
@property
def orientation(self):
return self.transform.orientation
@property
def forward(self):
return self.orientation.transform_vector(sims4.math.Vector3.Z_AXIS())
@property
def routing_surface(self):
return self.location.routing_surface
@property
def intended_transform(self):
return self.transform
@property
def intended_position(self):
return self.position
@property
def intended_forward(self):
return self.forward
@property
def intended_routing_surface(self):
return self.routing_surface
@property
def is_sim(self):
if self.template is not None:
return self.template.is_sim
return False
@property
def rig(self):
if self.template is not None:
return self.template.rig
@property
def species(self):
return self._species
@property
def extended_species(self):
return SpeciesExtended(self._species)
@property
def age(self):
return UNSET
def get_anim_overrides(self, target_name):
if self.template is not None:
return self.template.get_anim_overrides(target_name)
return AnimationOverrides()
def get_param_overrides(self, target_name, only_for_keys=None):
if self.template is not None:
return self.template.get_param_overrides(target_name, only_for_keys)
@property
def custom_posture_target_name(self):
if self.template is not None:
return self.template.custom_posture_target_name
@property
def route_target(self):
import interactions.utils.routing
return (interactions.utils.routing.RouteTargetType.OBJECT, self)
@property
def posture_state(self):
if self.template is not None:
return self.template.posture_state
return FAKE_POSTURE_STATE
@property
def posture(self):
return self.posture_state.body
@posture.setter
def posture(self, value):
self.posture_state.body = value
def get_social_group_constraint(self, si):
import interactions.constraints
return interactions.constraints.Anywhere()
def filter_supported_postures(self, postures, *args, **kwargs):
if self.template is not None:
return self.template.filter_supported_postures(postures, *args, **kwargs)
return postures
@property
def definition(self):
if self.template is not None:
return self.template.definition
def set_mood_asm_parameter(self, *args, **kwargs):
pass
def set_trait_asm_parameters(self, *args, **kwargs):
pass
def get_additional_scoring_for_surface(self, surface):
return 0
def get_carry_transition_constraint(self, sim, position, routing_surface):
import objects.components.carryable_component
import interactions.constraints
constraints = objects.components.carryable_component.CarryableComponent.DEFAULT_GEOMETRIC_TRANSITION_CONSTRAINT
constraints = constraints.constraint_non_mobile
final_constraint = interactions.constraints.Anywhere()
for constraint in constraints:
final_constraint = final_constraint.intersect(constraint.create_constraint(None, None, target_position=position, routing_surface=routing_surface))
if not final_constraint.valid:
return final_constraint
return final_constraint
def get_routing_context(self):
return self.routing_context
class AnimationOverrides:
__slots__ = ('animation_context', 'sounds', 'props', 'balloons', 'vfx', 'manifests', 'prop_state_values', 'reactionlet', 'balloon_target_override', 'required_slots', 'params', 'alternative_props')
def __init__(self, overrides=None, params=frozendict(), vfx=frozendict(), sounds=frozendict(), props=frozendict(), prop_state_values=frozendict(), manifests=frozendict(), required_slots=None, balloons=None, reactionlet=None, animation_context=None, alternative_props=None):
if overrides is None:
self.params = frozendict(params)
self.vfx = frozendict(vfx)
self.sounds = frozendict(sounds)
self.props = frozendict(props)
self.prop_state_values = frozendict(prop_state_values)
self.manifests = frozendict(manifests)
self.required_slots = required_slots or ()
self.balloons = balloons or ()
self.reactionlet = reactionlet or None
self.animation_context = animation_context or None
self.balloon_target_override = None
self.alternative_props = alternative_props or {}
else:
self.params = frozendict(params, overrides.params)
self.vfx = frozendict(vfx, overrides.vfx)
self.sounds = frozendict(sounds, overrides.sounds)
self.props = frozendict(props, overrides.props)
self.prop_state_values = frozendict(prop_state_values, overrides.prop_state_values)
self.manifests = frozendict(manifests, overrides.manifests)
self.required_slots = overrides.required_slots or (required_slots or ())
self.balloons = overrides.balloons or (balloons or ())
self.reactionlet = overrides.reactionlet or (reactionlet or None)
self.animation_context = overrides.animation_context or (animation_context or None)
self.balloon_target_override = overrides.balloon_target_override or None
self.alternative_props = overrides.alternative_props or {}
def __call__(self, overrides=None, **kwargs):
if not overrides and not kwargs:
return self
if kwargs:
overrides = AnimationOverrides(overrides=overrides, **kwargs)
return AnimationOverrides(overrides=overrides, params=self.params, vfx=self.vfx, sounds=self.sounds, props=self.props, prop_state_values=self.prop_state_values, manifests=self.manifests, required_slots=self.required_slots, balloons=self.balloons, reactionlet=self.reactionlet, animation_context=self.animation_context, alternative_props=self.alternative_props)
def __repr__(self):
items = []
for name in ('params', 'vfx', 'sounds', 'props', 'manifests', 'required_slots', 'balloons', 'reactionlet', 'animation_context'):
value = getattr(self, name)
if value:
items.append('{}={}'.format(name, value))
return '{}({})'.format(type(self).__name__, ', '.join(items))
def __bool__(self):
if self.params or (self.vfx or (self.sounds or (self.props or (self.prop_state_values or (self.manifests or (self.required_slots or (self.balloons or self.reactionlet))))))) or self.animation_context:
return True
return False
def __eq__(self, other):
if self is other:
return True
if type(self) != type(other):
return False
elif self.params != other.params or (self.vfx != other.vfx or (self.sounds != other.sounds or (self.props != other.props or (self.prop_state_values != other.prop_state_values or (self.manifests != other.manifests or (self.required_slots != other.required_slots or (self.balloons != other.balloons or (self.reactionlet != other.reactionlet or (self.animation_context != other.animation_context or self.balloon_target_override != other.balloon_target_override))))))))) or self.alternative_props != other.alternative_props:
return False
return True
def override_asm(self, asm, actor=None, suffix=None):
if asm is not None:
if self.params:
for (param_name, param_value) in self.params.items():
if isinstance(param_name, tuple):
(param_name, actor_name) = param_name
else:
actor_name = None
if actor_name is not None and asm.set_actor_parameter(actor_name, actor, param_name, param_value, suffix):
continue
asm.set_parameter(param_name, param_value)
if self.props:
for (prop_name, definition) in self.props.items():
alt_prop_def = self.alternative_props.get(prop_name, None)
asm.set_prop_override(prop_name, definition, alternative_def=alt_prop_def)
if self.prop_state_values:
for (prop_name, state_values) in self.prop_state_values.items():
asm.store_prop_state_values(prop_name, state_values)
if self.vfx:
for (vfx_actor_name, vfx_override_name) in self.vfx.items():
asm.set_vfx_override(vfx_actor_name, vfx_override_name)
if self.sounds:
for (name, key) in self.sounds.items():
sound_id = key.instance if key is not UNSET else None
asm.set_sound_override(name, sound_id)
if self.animation_context:
asm.context = AnimationContext()
def clip_event_type_name(event_type):
for (name, val) in vars(animation.ClipEventType).items():
if val == event_type:
return name
return 'Unknown({})'.format(event_type)
def create_run_animation(arb):
if arb.empty:
return
def run_animation(_):
arb_accumulator = services.current_zone().arb_accumulator_service
arb_accumulator.add_arb(arb)
return build_element(run_animation)
def flush_all_animations(timeline):
arb_accumulator = services.current_zone().arb_accumulator_service
yield from arb_accumulator.flush(timeline)
def flush_all_animations_instantly(timeline):
arb_accumulator = services.current_zone().arb_accumulator_service
yield from arb_accumulator.flush(timeline, animate_instantly=True)
def get_actors_for_arb_sequence(*arb_sequence):
all_actors = set()
om = services.object_manager()
if om:
for arb in arb_sequence:
if isinstance(arb, list):
arbs = arb
else:
arbs = (arb,)
for sub_arb in arbs:
for actor_id in sub_arb._actors():
actor = om.get(actor_id)
if actor is None:
continue
all_actors.add(actor)
return all_actors
def disable_asm_auto_exit(sim, sequence):
was_locked = None
def lock(_):
nonlocal was_locked
was_locked = sim.asm_auto_exit.locked
sim.asm_auto_exit.locked = True
def unlock(_):
sim.asm_auto_exit.locked = was_locked
return build_critical_section(lock, sequence, unlock)
def unhash_bone_name(bone_name_hash:int, try_appending_subroot=True) -> str:
if bone_name_hash not in _unhash_bone_name_cache:
for rig_key in sims4.resources.list(type=sims4.resources.Types.RIG):
try:
bone_name = native.animation.get_joint_name_for_hash_from_rig(rig_key, bone_name_hash)
except KeyError:
pass
else:
break
bone_name = None
if try_appending_subroot:
bone_name_hash_with_subroot = sims4.hash_util.hash32('1', initial_hash=bone_name_hash)
bone_name_with_subroot = unhash_bone_name(bone_name_hash_with_subroot, False)
if bone_name_with_subroot is not None:
bone_name = bone_name_with_subroot[:-1]
else:
bone_name = None
if try_appending_subroot:
bone_name_hash_with_subroot = sims4.hash_util.hash32('1', initial_hash=bone_name_hash)
bone_name_with_subroot = unhash_bone_name(bone_name_hash_with_subroot, False)
if bone_name_with_subroot is not None:
bone_name = bone_name_with_subroot[:-1]
_unhash_bone_name_cache[bone_name_hash] = bone_name
return _unhash_bone_name_cache[bone_name_hash]
def partition_boundary_on_params(boundary_to_params):
ks_to_vs = {}
for params in set(itertools.chain(*boundary_to_params.values())):
for (k, v) in params.items():
vs = setdefault_callable(ks_to_vs, k, set)
vs.add(v)
def get_matching_params_excluding_key(k, v):
results = []
for (boundary, param_sets) in boundary_to_params.items():
valid_params = set()
for params in param_sets:
vp = params.get(k, v)
if vp == v:
valid_params.add(sims4.collections.frozendict({kf: vf for (kf, vf) in params.items() if kf != k}))
results.append((boundary, valid_params))
return results
unique_keys = set()
for (k, vs) in ks_to_vs.items():
matching_params = None
for v in vs:
matching_params_v = get_matching_params_excluding_key(k, v)
if matching_params is None:
matching_params = matching_params_v
elif matching_params != matching_params_v:
unique_keys.add(k)
break
boundary_param_sets = {boundary: unique_keys for boundary in boundary_to_params}
return boundary_param_sets
def with_event_handlers(animation_context, handler, clip_event_type, sequence=None, tag=None):
handle = None
def begin(_):
nonlocal handle
handle = animation_context.register_event_handler(handler, clip_event_type, tag=tag)
def end(_):
if handle is not None:
handle.release()
return build_critical_section(begin, sequence, end)
def get_release_contexts_fn(contexts_to_release, tag):
def release_contexts(_):
for context in contexts_to_release:
context.release_ref(tag)
return release_contexts
def release_auto_exit(actor):
contexts_to_release = []
for other_actor in actor.asm_auto_exit.asm[1]:
if other_actor.is_sim:
if other_actor.asm_auto_exit.asm is not None:
animation_context = other_actor.asm_auto_exit.asm[2]
contexts_to_release.append(animation_context)
other_actor.asm_auto_exit.asm = None
return contexts_to_release
def get_auto_exit(actors, asm=None, interaction=None, required_actors=()):
arb_exit = None
contexts_to_release_all = []
for actor in actors:
if actor.is_sim:
if | |
#!/usr/bin/python
import os
import getopt
import sys
import bz2
import subprocess
import numpy as np
import develop as d
import coding_theory as code
from walker_updated import isNumber, parseDir
from coding_theory import make_prototyped_random_codes #make_prototyped_random_codes
import scipy.io as sio
########################################################################################################################
# Randdom Code Analysis: code to test the random code NNs for generalisation and calc some stats
#
########################################################################################################################
noName = 1
includeTop = 1 #0#1#0 # don;t include the directiory you call walker ing (avoids duplicates)
outSize = 0 # call this frm the command line to override and do only a set number of selectivity tests
verbose = 0
doAnalysis = False
doLog = True # code will use compressed log first thne fall back to log file (for output), so use this to force ti to use one or the other
doCompressedLog = True
#autostop, hack to avoid counting local codes inthe output layer
autoStop=True
categories = True
beParanoid = True # do we really want to double check all teh code?
# first parse any command line parameters
try:
opts, args = getopt.getopt(sys.argv[1:],"n:i:o:v:a:",["name=","includeTop=", "outSize=", "verbose=", "doAnalysis="])
except getopt.GetoptError:
print("{0}: [-n|--name=<name>] [-i|--includeTop=<include top level dir?>] [-o|--outSize=<output layer size>] [-v|--verbose=<be verbose?>] [-a|--doAnalysis=<do analysis?>]".format(sys.argv[0]))
sys.exit(1)
for opt,arg in opts:
if opt in ('-n', '--name'):
noName = 0
name = arg
#print("{}".format(name))
elif opt in ('-i', '--includeTop'):
includeTop = int(arg)
elif opt in ('-o', '--outSize'):
outSize = int(arg)
elif opt in ('-v', '--verbose'):
verbose = int(arg)
elif opt in ('-a', '--doAnalysis'):
doAnalysis = True
dirlist = [x[0] for x in os.walk(os.getcwd())]
print(dirlist)
if includeTop != 1:
dirlist = dirlist[1:]
# things we will fill in
results = {}
hamresults = {}
corr90results = {}
corr50results = {}
uncompressedFilename = 'output.log'
outputFilename = 'output.log.bz2'
analysisFilename = 'analysis.log'
select = 'Selecitivy'
noOfTestData = 1000
########################################################################################################################
# functions
########################################################################################################################
# def isNumber(n):
# try:
# p = float(n)
# except ValueError:
# return False
# return True
def parseDir(directory):
"""Function to parse a directory and read in informations from the output file"""
if verbose:
print(directory)
params=None
ham=None
correct=None
bzFile = os.path.join(directory,outputFilename)
unFile = os.path.join(directory,uncompressedFilename)
anFile = os.path.join(directory,analysisFilename)
cwd=os.getcwd()
doingBz = False
#
outputFile = None
if os.path.isfile(bzFile) and doCompressedLog:
outputFile = bz2.BZ2File(bzFile, 'r')
doingBz=True
elif params == None and os.path.isfile(unFile) and doLog:
outputFile = open(unFile, 'r')
if outputFile:
corr90 = None
corr50 = None
isOldFormat = 1
for rawline in outputFile:
if doingBz:
line = str(rawline,'utf-8')
else:
line = rawline
if line.startswith('Running with'):
params = line.replace(',', '').split(None)
isOldFormat = 0
print(params)
HLN = params[3]
if isOldFormat and line.startswith('Layer 0 has'):
# hack to deal with older file format
# print('old style file')
if verbose == 1:
print(line)
params = line.replace(' ', ' ').split(None)
params = ['m', 'e', 'h', params[3], params[1], params[6]]
# #break
if line.startswith('Input min Hamming'):
ham = line.replace(':', '').split(None)[-1]
print(line)
# break
if line.startswith('Training data:'):
if corr90 == None:
corr90 = line.replace(':', '').split(None)[4]
else:
corr50 = line.replace(':', '').split(None)[4]
print(line)
break
outputFile.close()
# break
if outSize == 0:
HLN = params[3]
if verbose == 1:
print(params)
print(HLN)
print(correct)
else:
HLN = str(outSize)
if verbose == 1:
print('HLN={}'.format(HLN))
if params==None:
print('unable to find params for directory {}'.format(directory))
return
paramLine='p'.join([n.replace('.','p') for n in params if isNumber(n)])
# with open('analysis.log','w') as analysis_file:
# runner = subprocess.Popen(args="~/neuralNetworks/code/NN_analysis_script1.py -n Random -H {}".format(HLN),
## stdout=analysis_file,
# stderr=subprocess.STDOUT,
# cwd = os.path.join(os.getcwd(),directory))
# return_code = runner.wait()
# if return_code != 0:
# # print some error message here!
# print('error')
if not os.path.isfile(anFile) or doAnalysis:
print(paramLine)
os.chdir(directory)
print(directory)
os.system('pwd')
subprocess.call(['~/neuralNetworks/code/NN_analysis_script1.py', '-n', 'Random', '-H', str(HLN), '2>&1', '|', 'tee', 'analysis.log'])
# os.system("~/neuralNetworks/code/NN_analysis_script1.py -n Random -H " + HLN + "2>&1 | tee analysis.log")
os.chdir(cwd)
#
count=0
if autoStop:
layerStop=int(HLN)/100
if verbose:
print('auto-stopping at layer {}'.format(layerStop))
if os.path.isfile(anFile):
with open(os.path.join(directory,analysisFilename),'r') as aFile:
for line in aFile:
if line.startswith(select):
count = count +1
if line.startswith('currently on neuron:'+str(layerStop)+'p'):
break
try:
results[paramLine].append(count)
hamresults[paramLine].append(ham)
corr90results[paramLine].append(corr90)
corr50results[paramLine].append(corr50)
except KeyError:
results[paramLine] = [ count ]
hamresults[paramLine] = [ ham ]
corr90results[paramLine] = [ corr90 ]
corr50results[paramLine] = [ corr50 ]
#
return paramLine,count
def hamming_distance(a, b):
distance = 0
for i in range(len(a)):
if a[i]!=b[i]:
distance += 1
return distance
def all_hamming_distances(code):
minHammingDistance = len(code[0])
distances = []
for a in code:
for b in code:
if (a != b).any():
distances.append(hamming_distance(a, b))
return distances
def min_hamming_distance(code):
minHammingDistance = len(code[0])
for a in code:
for b in code:
if (a != b).any():
tmp = hamming_distance(a, b)
if tmp < minHammingDistance:
minHammingDistance = tmp
return minHammingDistance
def analyses_classes(X, noOfTrainData, noOfExamples):
train_T_indices = [x for x in range(0, noOfTrainData, noOfExamples)]
for i in range(0, len(train_T_indices)):
print('i = {}'.format(i))
if i == 0:
this_class=X[0:train_T_indices[1]]
else:
this_class=X[train_T_indices[i - 1]:train_T_indices[i]]
code.min_hamming_distance(this_class)
distances = all_hamming_distances(this_class)
print('Mean Hamming distance for class {} is {}'.format(i,np.mean(distances)))
print('Std of Hamming distance for class {} is {}'.format(i, np.std(sum(this_class))))
print('Mean no. of activiations per class {}'.format(np.mean(sum(this_class))))
print('Example vector weight per class {}'.format(sum(this_class[0])))
return
def test_code(noOfTestData, lenOfInput, p, weight, k, decay, verbose):
# noOfTestData = 20
noOfExamples = noOfTestData // noOfPrototypes
print('{} examples per class'.format(noOfExamples))
Test_X = code.make_prototyped_random_codes(M=noOfTestData, n=lenOfInput, p=p, weight=weight,
k=k, symbolList=None, verbose=verbose, decay_templates=decay)
analyses_classes(X=Test_X, noOfTrainData=len(Test_X), noOfExamples=noOfExamples)
return
########################################################################################################################
# things we read in from the output.log file
#########################################################################################################################
for directory in dirlist:
parseDir(directory)
#print('{0}:{1}'.format(directory,parseDir(directory)))
#We have 10 prototypes for 500 codes, giving 50 examples of each
#Prototypes have 10 blocks of length 30 each
#decaying prototype by 0.0%
decay=0.0
noOfPrototypes = 10
noOfCategories=noOfPrototypes
########################################################################################################################
# loading the model
########################################################################################################################
def GeneralisationTest(noOfTestData=500, doPredictions=1, doMatLabResults=False):
"""Function to create a disjoing from the training set test set"""
X= np.load("allInputDataCurrent.npy")
T= np.load("allOutputDataCurrent.npy")
from keras.models import load_model
model = load_model("Random_model.h5")
# things we can calc from this:
noOfTrainData = len(X)
assert len(X) == len(T)
lenOfInput = len(X[3])
lenOfOutput = len(T[3])
lenOfBlock = int(lenOfInput / noOfPrototypes)
noOfExamples = noOfTrainData //noOfPrototypes
noOfNewExamples = noOfTestData // noOfPrototypes
lenOfR = lenOfInput - lenOfBlock
weightOfX = int(sum(X[0]))
weightOfR = weightOfX - lenOfBlock
inverseWeightOfR = lenOfR - weightOfR
denom=lenOfInput-(lenOfInput/noOfPrototypes) # denom is the floating point length of R
assert int(denom) == lenOfR
fractionalWeightOfR = weightOfR / denom
fractionalInverseWeightOfR = inverseWeightOfR / denom
weight = [fractionalWeightOfR, fractionalInverseWeightOfR]
weightOfT = int(sum(T[3]))
if lenOfOutput == noOfPrototypes:
use1HOT = 1
else:
use1HOT = 0
if categories == True:
noOfOutputs = noOfPrototypes
if use1HOT == 1:
sizeOfOutput = noOfPrototypes
print('Overwriting output vector size to length {}'.format(noOfPrototypes))
else:
noOfOutputs = noOfTrainData
print('Random vector, R, has weight {0}'.format(weightOfR))
#Test_X = code.make_prototyped_random_codes(M=noOfTestData, n=lenOfInput, p=noOfPrototypes, weight=[fractionalWeightOfR],
# k=2, symbolList=None, verbose=verbose, decay_templates=decay)
#### testing code
#this gives you matlab files of the codes so you can play with them if you want
if doMatLabResults:
Test_X = code.make_prototyped_random_codes(M=500, n=lenOfInput, p=noOfPrototypes, weight=[fractionalWeightOfR],
k=2, symbolList=None, verbose=verbose, decay_templates=decay)
sio.savemat('Test_X5000.mat', {'Test_X':Test_X})
R = code.make_random_codes(M=500, n=501, weight=weight, k=2,symbolList=[1,0], verbose=True)
sio.savemat('R3.mat', {'R':R})
#######
Test_X, All_X = code.get_test_x(X=X, noOfTestData=noOfTestData, lenOfInput=lenOfInput, noOfPrototypes=noOfPrototypes,
weight=[fractionalWeightOfR, fractionalInverseWeightOfR], k=2, symbolList=None, verbose=verbose, decay_templates=decay)
###### get T
######
## Now we get the correct sized Test_T
Test_T, prototypeOutputCodes = code.get_test_t(T,
noOfPrototypes=noOfPrototypes,
noOfTestData=noOfTestData,
lenOfOutput=len(T[0]),
verbose=False)
## This sections figures out how good the model is by getting it to predict the answers for the train
## and test sets
if doPredictions == 1:
d.prediction_tester(model, X, T, name='Training data')
if noOfTestData != 0:
d.prediction_tester(model, Test_X, Test_T, name='Test data', example_no=0)
np.save("GeneralisantionInputDataTest.npy", Test_X)
np.save("GeneralisationOutputDataTest.npy", Test_T)
return Test_X, Test_T
### now find hamming distances per class:
#test_T_indices = [x for x in range(0, noOfTestData, noOfNewExamples)]
#train_T_indices = [x for x in range(0, noOfTrainData, noOfExamples)]
_=GeneralisationTest(noOfTestData=500, doPredictions=1, doMatLabResults=False)
exit(1)
#
# #distances = all_hamming_distances(this_class,prototypeOutputCodes[5])
#
# analyses_classes(X=Test_X, noOfTrainData=noOfTestData, noOfExamples=noOfNewExamples)
# analyses_classes(X=X, noOfTrainData=noOfTrainData, noOfExamples=noOfExamples)
#
# analyses_classes(X=X, noOfTrainData=noOfTrainData, noOfExamples=noOfExamples)
#
# i=4
# ####### more tests
#
# # nx = noOfTestData
# # lx = lenOfInput
# # np = nPrototypes
# # w(R) --> fractional weight of R
# # decay
# nx = 500
# lx = 500
# nP = 10
# fractionalWeightOfR = 1/2.
# test_code(noOfTestData=nx, lenOfInput=lx, p=nP, weight=[fractionalWeightOfR], k=2, decay=0.0, verbose=True)
#
#
#
# test_code(noOfTestData=500, lenOfInput=250, p=10, weight=[fractionalWeightOfR], k=2, decay=0.0, verbose=True)
#
#
#
# ######
# for i in range(1, len(train_T_indices)):
# print(i)
# code.min_hamming_distance(X[train_T_indices[i-1]:train_T_indices[i]])
# distances=all_hamming_distances(X[train_T_indices[i-1]:train_T_indices[i]])
# print(np.mean(distances))
#
#
#
#
#
#
# verbose=True
# #!!! THIS IS ALL FUCKED!
# P = code.make_prototype_codes(M=noOfPrototypes, n=lenOfInput, setting=1, k=2,symbolList=[1,0], verbose=verbose)
# newN = int(lenOfInput - lenOfBlock)
# R = np.zeros([noOfTrainData, newN])
# p=noOfPrototypes
# n=lenOfInput
#
#
# #R = make_random_codes(2, 500, weight=[25/50.], k=2,symbolList=[1,0], verbose=True)
#
# R = code.make_random_codes(2, 500, weight=weight, k=2,symbolList=[1,0], verbose=True)
#
# n = 0
# for p in range(noOfPrototypes):
# for z in range(noOfExamples):
# mask = P[p] == 0.
# R[n,:] = X[n][mask]
# n = n + 1 # n is the number of codewords
#
# denom=lenOfInput-(lenOfInput/noOfPrototypes)
# weightOfR = (1/3*denom)/denom
#
# Y=code.make_random_codes(M=noOfTrainData, n=newN, X=G, weight=[inverseWeightOfR], k=2,symbolList=None, verbose=False)
#
# inverseWeightOfR = 2/3
# Test_X = code.make_prototyped_random_codes(M=noOfTestData, n=lenOfInput, p=noOfPrototypes, weight=[weightOfR],
# k=2, symbolList=None, verbose=True, decay_templates=decay)
# #X = | |
-63486,
134230706,
128868,
-1,
134294343,
128867,
-63484,
134228602,
128863,
-1,
134420193,
128782,
134613005,
133902415,
-1,
134678537,
134551781,
-1,
-63480,
134614168,
128840,
-1,
134676792,
128809,
134875147,
134612581,
128831,
-1,
134808261,
128832,
-63476,
134821325,
128845,
-1,
134949690,
128849,
135137300,
134553487,
-1,
135202834,
135074058,
128784,
135331839,
135135939,
-1,
-63471,
135201573,
128786,
-1,
135267103,
128785,
-63469,
135144720,
128881,
-1,
135398940,
128824,
135596059,
135075248,
-1,
135661592,
135537482,
-1,
-63465,
135607633,
128878,
-1,
135670908,
128854,
135921663,
135595253,
-1,
-63462,
135800410,
128883,
-1,
135859990,
128882,
136054817,
135531766,
-1,
136120352,
135993832,
-1,
136185887,
136064782,
128865,
-1,
136118981,
128866,
-1,
136131105,
128864,
-1,
136054540,
128880,
136448037,
135993822,
-1,
136513572,
136380728,
128796,
-1,
136446661,
128797,
-1,
136446674,
128801,
136710184,
136394528,
-1,
-63449,
136657144,
128842,
-1,
136720743,
128772,
136906795,
136653142,
-1,
-63446,
136848136,
128768,
-1,
136907529,
128833,
137103406,
136847903,
-1,
-63443,
137048750,
128789,
-1,
137106693,
128879,
137300017,
137043075,
-1,
-63440,
137241522,
128851,
-1,
137301903,
128810,
137496628,
137237532,
-1,
-63437,
137442401,
128841,
-1,
137503779,
128794,
-63435,
137442659,
128853,
-63434,
137634813,
128838,
-63433,
137696104,
128770,
-1,
137760689,
128771,
-63431,
127211558,
128126,
-63430,
137892184,
983054,
-1,
137954887,
9200,
138152003,
127148405,
-1,
138217535,
138096719,
128028,
-63426,
138155976,
128260,
-1,
138219683,
128246,
138477567,
138155036,
-1,
-63423,
138359870,
128551,
-63422,
138423478,
128544,
-1,
138481920,
128162,
138676296,
138097524,
-1,
138741831,
138621518,
-1,
-63418,
138683242,
128663,
-1,
138741409,
127975,
-1,
138676127,
127814,
139003979,
138620798,
-1,
-63414,
138947619,
9954,
-1,
139011331,
128562,
139200590,
138946239,
-1,
-63411,
139143075,
983194,
-1,
139200755,
983195,
139397201,
139141007,
-1,
-63408,
139333938,
127944,
-1,
139397318,
128657,
139593811,
139332358,
983053,
-1,
139535045,
983052,
-63404,
139538868,
128095,
-1,
139661604,
128673,
139856346,
108994447,
-1,
139921678,
139794268,
-1,
139987203,
139855337,
-1,
140052685,
139927001,
-1,
140118123,
139997263,
-1,
140183650,
140059890,
-1,
-63396,
140116798,
94105,
-63395,
140182329,
94104,
-63394,
140247863,
94103,
-63393,
140313389,
94102,
-63392,
140378919,
94101,
-63391,
140444453,
94100,
-1,
140509983,
94099,
140707942,
140127422,
-1,
-63388,
140651778,
93994,
-63387,
140711358,
93998,
-1,
140772166,
94007,
140970089,
140648003,
-1,
-63384,
140917094,
93980,
-1,
140968774,
93978,
-63382,
140914874,
93966,
-1,
141099846,
93962,
141297784,
140058997,
-1,
141363311,
141244735,
-1,
-63378,
141301182,
93997,
-1,
141361990,
93996,
141559922,
141304181,
-1,
-63375,
141497790,
93973,
-1,
141558598,
93972,
141756533,
141497372,
-1,
-63372,
141694398,
93988,
-1,
141755206,
93987,
141953143,
141689670,
93968,
-1,
141896973,
94032,
-1,
141891006,
93969,
142149764,
141244901,
-1,
142215295,
142096869,
-1,
142280829,
142159038,
-1,
-63364,
142227814,
94017,
-1,
142279494,
94014,
-63362,
142227814,
94016,
-1,
142410566,
94013,
142608514,
142159038,
-1,
-63359,
142546366,
94006,
-1,
142607174,
94012,
-63357,
142546366,
94005,
-1,
142738246,
94011,
142936207,
142096822,
-1,
143001739,
142880847,
-1,
143067273,
142945470,
-1,
-63352,
143005118,
94000,
-1,
143065926,
94009,
-63350,
143012026,
93964,
-1,
143196998,
93965,
-63348,
142943971,
93954,
-63347,
143336035,
93970,
-63346,
143399800,
93984,
-1,
143462235,
94001,
143657112,
142879433,
-1,
143722647,
143598834,
-1,
-63342,
143655742,
94111,
-63341,
143721271,
94110,
-63340,
143786797,
94109,
-63339,
143852327,
94108,
-63338,
143917855,
94107,
-1,
143983389,
94106,
-1,
143666624,
94002,
144246946,
143592694,
-1,
144312477,
144194021,
-1,
-63333,
144259559,
93995,
-63332,
144315838,
93999,
-1,
144376646,
94008,
144574624,
144252483,
-1,
-63329,
144521574,
93981,
-1,
144573254,
93979,
-63327,
144510234,
93967,
-1,
144704326,
93963,
144902314,
144180038,
-1,
144967849,
144845379,
-1,
145033383,
144908661,
-1,
-63322,
144970789,
93989,
-1,
145032006,
93971,
-63320,
144980539,
94015,
-1,
145170349,
93958,
-1,
144905648,
94019,
145426608,
144841859,
-1,
145492142,
145364400,
-1,
-63315,
145439078,
93977,
-1,
145490758,
93976,
-63313,
145439078,
93975,
-1,
145621830,
93974,
145819828,
145370302,
-1,
-63310,
145763586,
94004,
-63309,
145823166,
94003,
-1,
145883974,
94010,
146081977,
145757212,
-1,
146147512,
146019760,
-1,
-63305,
146085310,
93993,
-1,
146146118,
93992,
-1,
146080582,
93983,
146409660,
146027992,
-1,
-63301,
146346676,
93961,
-1,
146408262,
93960,
146606271,
146352354,
-1,
-63298,
146543653,
93986,
-1,
146604870,
93985,
146802882,
146548415,
-1,
-63295,
146742436,
93955,
-1,
146801478,
93952,
146999493,
146743183,
-1,
-63292,
146937278,
93957,
-1,
146998086,
93956,
147196104,
146937264,
-1,
-63289,
147133886,
94020,
-1,
147194694,
93990,
-63287,
147142947,
93991,
-63286,
147339040,
94018,
-63285,
147397496,
93982,
-63284,
147460788,
93959,
-1,
147523366,
93953,
147720441,
139998953,
-1,
147785943,
147658588,
94049,
147851474,
147718982,
94050,
147980287,
147792245,
94051,
-1,
147854364,
94052,
-63277,
147796852,
94056,
-63276,
147989322,
94053,
-63275,
148054547,
94057,
-63274,
148117340,
94055,
-1,
148180832,
94054,
148375776,
147718982,
94036,
148441306,
148316533,
94039,
-1,
148378652,
94040,
148572380,
148377440,
94067,
-1,
148508512,
94068,
-63267,
148517748,
94075,
-63266,
148641628,
94073,
-63265,
148706985,
94038,
-1,
148767558,
94037,
148965607,
148321140,
94058,
149031140,
148898630,
94059,
149159935,
148971893,
94060,
-1,
149034012,
94061,
-63259,
148976500,
94062,
-63258,
149168659,
94064,
-1,
149229601,
94063,
149424365,
148901728,
94045,
149489898,
149367130,
94071,
-1,
149432666,
94069,
149620972,
149430645,
94046,
-1,
149558300,
94047,
-1,
149559132,
94074,
149817585,
149365578,
94041,
-63249,
149762932,
94076,
-63248,
149824330,
94042,
-1,
149885608,
94048,
150079731,
149764415,
94065,
-1,
150017884,
94066,
150210805,
150025985,
94044,
-1,
150152010,
94043,
150341879,
150154324,
94072,
-1,
150284634,
94070,
150536191,
150282613,
94077,
-1,
150410268,
94078,
150604030,
147665490,
-1,
-63237,
150549098,
94096,
-63236,
150612812,
94095,
-63235,
150669200,
94098,
-1,
150733742,
94097,
150994943,
150547984,
-1,
150997250,
150874815,
-1,
-63231,
150943435,
94034,
-1,
150996597,
94035,
-1,
150931061,
94033,
151259402,
139922479,
-1,
151324935,
151200586,
-1,
-63226,
151268680,
128300,
-1,
151332713,
127908,
151584767,
151257088,
-1,
-63223,
151463031,
983226,
-1,
151522648,
983227,
151718157,
151200287,
-1,
-63220,
151653883,
128189,
-1,
151718106,
128656,
-1,
151658135,
127756,
151980415,
139857760,
-1,
152045911,
151923730,
-1,
152111414,
151983795,
-1,
152176948,
152051161,
-1,
152242456,
152121423,
-1,
152307989,
152178528,
67994,
-1,
152240837,
67995,
152439063,
152240966,
67992,
-1,
152371909,
67993,
-1,
152380234,
67996,
152635677,
152183157,
-1,
152701211,
152571744,
67980,
-1,
152634053,
67981,
152895487,
152634182,
67978,
-1,
152765125,
67979,
152963361,
152579262,
-1,
153028896,
152896326,
67987,
-1,
152961733,
67988,
-1,
152964960,
67989,
153225507,
152906120,
67982,
-1,
153158341,
67983,
153356582,
153164580,
-1,
-63195,
153294270,
67985,
-1,
153355078,
67990,
153553192,
153290534,
67974,
-1,
153486021,
67975,
-63191,
153500006,
67972,
-63190,
153630496,
67973,
-63189,
153692388,
67991,
-63188,
153757411,
67976,
-63187,
153822026,
67971,
-63186,
153886637,
67977,
-63185,
153951396,
67984,
-63184,
154015580,
67970,
-63183,
154080940,
67986,
-63182,
154144608,
67969,
-63181,
154209562,
67997,
-1,
154272582,
67968,
154533887,
152121403,
67998,
-1,
154403525,
67999,
154664959,
152046795,
-1,
154667348,
154538435,
-1,
154732860,
154611791,
-1,
-63174,
154673994,
68022,
-63173,
154734432,
68021,
-1,
154796870,
68020,
154995007,
154676414,
-1,
-63170,
154931040,
68017,
-1,
154993478,
68015,
155191618,
154935669,
-1,
-63167,
155127648,
68010,
-1,
155190086,
68009,
155388229,
155130660,
-1,
-63164,
155325886,
68013,
-1,
155386694,
68018,
155584839,
155321158,
68000,
-1,
155527764,
68016,
-63160,
155531622,
68004,
-63159,
155662112,
68005,
-63158,
155724168,
68011,
-63157,
155789540,
68019,
-63156,
155854563,
68007,
-63155,
155919178,
68003,
-63154,
155983789,
68008,
-63153,
156048548,
68012,
-63152,
156112732,
68002,
-63151,
156178092,
68014,
-63150,
156241760,
68001,
-63149,
156306714,
68023,
-1,
156370726,
68006,
156631039,
154608608,
-1,
-63146,
156511243,
68030,
-1,
156571873,
68031,
156764537,
151982519,
-1,
156830056,
156704217,
-1,
156895580,
156775198,
-1,
-63141,
156833214,
43749,
-1,
156894022,
43748,
157092191,
156839102,
-1,
-63138,
157035778,
43754,
-1,
157095358,
43753,
157288802,
157032821,
-1,
-63135,
157235558,
43747,
-1,
157295012,
43752,
157485413,
157224298,
-1,
-63132,
157423038,
43751,
-1,
157483846,
43750,
-63130,
157426506,
43745,
-63129,
157618016,
43744,
-1,
157682261,
43746,
157878643,
156775896,
-1,
157944178,
157820596,
-1,
158009711,
157877062,
-1,
158075246,
157942598,
-1,
-63123,
158020468,
43759,
-1,
158078812,
43757,
-1,
158020468,
43758,
-63120,
157955736,
43765,
-63119,
158283218,
43756,
-1,
158341291,
43755,
-1,
157882830,
43766,
158599542,
157811526,
-1,
-63115,
158540357,
43762,
-1,
158602798,
43761,
-63113,
158546119,
43764,
-63112,
158740487,
43763,
-1,
158796435,
43760,
-63110,
156709633,
128647,
-63109,
158936924,
983172,
-63108,
158999246,
128697,
-63107,
159064360,
128221,
-63106,
159129349,
127816,
-1,
159188630,
127830,
159386032,
151913286,
-1,
159451560,
159326581,
128104,
159517090,
159386939,
-1,
159582622,
159456729,
-1,
159648151,
159515462,
-1,
159713671,
159591614,
2126,
-63098,
159660517,
2129,
-1,
159716784,
2132,
159910281,
159658063,
2133,
-1,
159854671,
2120,
160041355,
159849252,
2122,
-1,
159984898,
2121,
-63092,
159988197,
2118,
-63091,
160115034,
2131,
-63090,
160180450,
2130,
-63089,
160245439,
2128,
-63088,
160309621,
2125,
-63087,
160374671,
2124,
-63086,
160439427,
2123,
-63085,
160504037,
2136,
-63084,
160568752,
2116,
-63083,
160633884,
2114,
-63082,
160697590,
2115,
-1,
160761633,
2113,
160958874,
159586140,
-1,
-63079,
160903247,
2119,
-1,
160964981,
2127,
-63077,
160904609,
2117,
-63076,
161094534,
2135,
-63075,
161158640,
2112,
-1,
161222460,
2134,
-63073,
159528633,
2138,
-63072,
161360022,
2142,
-63071,
161420454,
2139,
-1,
161481866,
2137,
161679783,
159449088,
-1,
161745318,
161626063,
-1,
-63067,
161690420,
128115,
-1,
161748365,
128114,
-1,
161678702,
128107,
-1,
161623269,
128094,
162073004,
159391008,
-1,
-63062,
162018091,
3386,
-63061,
162079382,
3369,
-1,
162139698,
| |
NURBS curves must have the same degree")
if self.q != new_nurbs.q:
raise Exception("In order to merge, the two NURBS curves must have the same degree")
if np.shape(self.V) != np.shape(new_nurbs.V):
raise Exception("The two NURBS patches must have the same number of V-knots")
if any([np.abs(v1-v2)>1e-12 for v1, v2 in zip(self.V, new_nurbs.V)]):
raise Exception("The two NURBS patches must have the same V-knot values")
# Combine the control points
P = np.concatenate((self.P, new_nurbs.P), axis=1)
# Combine the control point weights
W = np.concatenate((self.W, new_nurbs.W), axis=0)
# Highest index of the control points
n1 = np.shape(self.P)[1] - 1
n2 = np.shape(new_nurbs.P)[1] - 1
# Combine the knot vectors (inner knot has p+1 multiplicity)
eps = 0
U_start = np.zeros((self.p + 1,))
U_end = np.ones((self.p + 1,))
U_mid = np.ones((self.p + 1,)) / 2
U_mid[0] = U_mid[0] - eps # Quick and dirty fix for GMSH (avoid multiplicity equal to degree)
U_mid[-1] = U_mid[-1] + eps # Quick and dirty fix for GMSH (avoid multiplicity equal to degree)
U1 = 0.00 + self.U[self.p + 1:n1 + 1] / 2
U2 = 0.50 + new_nurbs.U[self.p + 1:n2 + 1] / 2
U = np.concatenate((U_start, U1, U_mid, U2, U_end))
# Create the merged NURBS surface
mergedNurbs = NurbsSurface(control_points=P, weights=W, u_degree=self.p, v_degree=self.q, u_knots=U, v_knots=self.V)
return mergedNurbs
def attach_nurbs_vdir(self, new_nurbs):
""" Attatch a new NURBS curve to the end of the instance NURBS curve and return the merged NURBS curve"""
# Check that the NURBS curves have the same degree
if self.p != new_nurbs.p:
raise Exception("In order to merge, the two NURBS curves must have the same degree")
if self.q != new_nurbs.q:
raise Exception("In order to merge, the two NURBS curves must have the same degree")
if np.shape(self.U) != np.shape(new_nurbs.U):
raise Exception("The two NURBS patches must have the same number of V-knots")
if any([np.abs(u1-u2)>1e-12 for u1, u2 in zip(self.U, new_nurbs.U)]):
raise Exception("The two NURBS patches must have the same V-knot values")
# Combine the control points
P = np.concatenate((self.P, new_nurbs.P), axis=2)
# Combine the control point weights
W = np.concatenate((self.W, new_nurbs.W), axis=1)
# Highest index of the control points
n1 = np.shape(self.P)[2] - 1
n2 = np.shape(new_nurbs.P)[2] - 1
# Combine the knot vectors (inner knot has p+1 multiplicity)
eps = 0
V_start = np.zeros((self.q + 1,))
V_end = np.ones((self.q + 1,))
V_mid = np.ones((self.q + 1,)) / 2
V_mid[0] = V_mid[0] - eps # Quick and dirty fix for GMSH (avoid multiplicity equal to degree)
V_mid[-1] = V_mid[-1] + eps # Quick and dirty fix for GMSH (avoid multiplicity equal to degree)
V1 = 0.00 + self.U[self.q + 1:n1 + 1] / 2
V2 = 0.50 + new_nurbs.U[self.q + 1:n2 + 1] / 2
V = np.concatenate((V_start, V1, V_mid, V2, V_end))
# Create the merged NURBS surface
mergedNurbs = NurbsSurface(control_points=P, weights=W, u_degree=self.p, v_degree=self.q, u_knots=self.U, v_knots=V)
return mergedNurbs
# ---------------------------------------------------------------------------------------------------------------- #
# Compute the unitary normal vectors
# ---------------------------------------------------------------------------------------------------------------- #
def get_normals(self, u, v):
""" Evaluate the unitary vectors normal to the surface the input (u,v) parametrization
The definition of the unitary normal vector is given in section 19.2 (Farin's textbook)
Parameters
----------
u : scalar or ndarray with shape (N,)
Scalar or array containing the u-parameter used to evaluate the normals
v : scalar or ndarray with shape (N,)
Scalar or array containing the v-parameter used to evaluate the normals
Returns
-------
normals : ndarray with shape (ndim, N)
Array containing the unitary vectors normal to the surface
"""
# Compute 2 vectors tangent to the surface
S_u = self.get_derivative(u, v, order_u=1, order_v=0)
S_v = self.get_derivative(u, v, order_u=0, order_v=1)
# Compute the normal vector as the cross product of the tangent vectors and normalize it
normals = np.cross(S_u, S_v, axisa=0, axisb=0, axisc=0)
normals = normals/np.sum(normals ** 2, axis=0) ** (1 / 2)
return normals
# ---------------------------------------------------------------------------------------------------------------- #
# Compute the mean and Gaussian curvatures
# ---------------------------------------------------------------------------------------------------------------- #
def get_curvature(self, u, v):
""" Evaluate the mean and gaussian curvatures of the surface the input (u,v) parametrization
The definition of the gaussian and mean curvatures are given by equations 19.11 and 19.12 (Farin's textbook)
Parameters
----------
u : scalar or ndarray with shape (N,)
Scalar or array containing the u-parameter used to evaluate the curvatures
v : scalar or ndarray with shape (N,)
Scalar or array containing the v-parameter used to evaluate the curvatures
Returns
-------
mean_curvature : ndarray with shape (N, )
Scalar or array containing the mean curvature of the surface
gaussian_curvature : ndarray with shape (N, )
Scalar or array containing the gaussian curvature of the surface
"""
# Compute the partial derivatives
S_u = self.get_derivative(u, v, order_u=1, order_v=0)
S_v = self.get_derivative(u, v, order_u=0, order_v=1)
S_uu = self.get_derivative(u, v, order_u=2, order_v=0)
S_uv = self.get_derivative(u, v, order_u=1, order_v=1)
S_vv = self.get_derivative(u, v, order_u=0, order_v=2)
# Compute the normal vector
N = self.get_normals(u, v)
# Compute the components of the first fundamental form of the surface
E = np.sum(S_u * S_u, axis=0)
F = np.sum(S_u * S_v, axis=0)
G = np.sum(S_v * S_v, axis=0)
# Compute the components of the second fundamental form of the surface
L = np.sum(S_uu * N, axis=0)
M = np.sum(S_uv * N, axis=0)
N = np.sum(S_vv * N, axis=0)
# Compute the mean curvature
mean_curvature = (1/2) * (N * E - 2 * M * F + L * G) / (E * G - F ** 2)
# Compute the gaussian curvature
gaussian_curvature = (L * N - M ** 2) / (E * G - F ** 2)
return mean_curvature, gaussian_curvature
# ---------------------------------------------------------------------------------------------------------------- #
# Plotting functions
# ---------------------------------------------------------------------------------------------------------------- #
def plot(self, fig=None, ax = None,
surface=True, surface_color='blue', colorbar=False,
boundary=True, control_points=False, normals=False, axis_off=False, ticks_off=False,
Nu=50, Nv=50, isocurves_u=None, isocurves_v=None):
# Prepare the plot
if fig is None:
# One dimension (law of evolution)
if self.ndim == 1:
fig = mpl.pyplot.figure(figsize=(6, 5))
ax = fig.add_subplot(111, projection='3d')
ax.view_init(azim=-105, elev=30)
ax.grid(False)
ax.xaxis.pane.fill = False
ax.yaxis.pane.fill = False
ax.zaxis.pane.fill = False
ax.xaxis.pane.set_edgecolor('k')
ax.yaxis.pane.set_edgecolor('k')
ax.zaxis.pane.set_edgecolor('k')
ax.xaxis.pane._alpha = 0.9
ax.yaxis.pane._alpha = 0.9
ax.zaxis.pane._alpha = 0.9
ax.set_xlabel('$u$ parameter', fontsize=11, color='k', labelpad=18)
ax.set_ylabel('$v$ parameter', fontsize=11, color='k', labelpad=18)
ax.set_zlabel('NURBS value', fontsize=11, color='k', labelpad=18)
# ax_xy.xaxis.set_major_formatter(mpl.ticker.FormatStrFormatter('%.1f'))
# ax_xy.yaxis.set_major_formatter(mpl.ticker.FormatStrFormatter('%.1f'))
# ax_xy.zaxis.set_major_formatter(mpl.ticker.FormatStrFormatter('%.1f'))
for t in ax.xaxis.get_major_ticks(): t.label.set_fontsize(8)
for t in ax.yaxis.get_major_ticks(): t.label.set_fontsize(8)
for t in ax.zaxis.get_major_ticks(): t.label.set_fontsize(8)
ax.xaxis.set_rotate_label(False)
ax.yaxis.set_rotate_label(False)
ax.zaxis.set_rotate_label(False)
if ticks_off:
ax.set_xticks([])
ax.set_yticks([])
ax.set_zticks([])
if axis_off:
ax.axis('off')
# Two dimensions (bi-variate plane)
if self.ndim == 2:
fig = plt.figure(figsize=(6, 5))
ax = fig.add_subplot(111)
ax.set_xlabel('$x$ axis', fontsize=12, color='k', labelpad=12)
ax.set_ylabel('$y$ axis', fontsize=12, color='k', labelpad=12)
for t in ax.xaxis.get_major_ticks(): t.label.set_fontsize(12)
for t in ax.yaxis.get_major_ticks(): t.label.set_fontsize(12)
if ticks_off:
ax.set_xticks([])
ax.set_yticks([])
if axis_off:
ax.axis('off')
# Three dimensions
elif self.ndim == 3:
fig = mpl.pyplot.figure(figsize=(6, 5))
ax = fig.add_subplot(111, projection='3d')
ax.view_init(azim=-105, elev=30)
ax.grid(False)
ax.xaxis.pane.fill = False
ax.yaxis.pane.fill = False
ax.zaxis.pane.fill = False
ax.xaxis.pane.set_edgecolor('k')
ax.yaxis.pane.set_edgecolor('k')
ax.zaxis.pane.set_edgecolor('k')
ax.xaxis.pane._alpha = 0.9
ax.yaxis.pane._alpha = 0.9
ax.zaxis.pane._alpha = 0.9
ax.set_xlabel('$x$ axis', fontsize=11, color='k', labelpad=18)
ax.set_ylabel('$y$ axis', fontsize=11, color='k', labelpad=18)
ax.set_zlabel('$z$ axis', fontsize=11, color='k', labelpad=18)
# ax_xy.xaxis.set_major_formatter(mpl.ticker.FormatStrFormatter('%.1f'))
# ax_xy.yaxis.set_major_formatter(mpl.ticker.FormatStrFormatter('%.1f'))
# ax_xy.zaxis.set_major_formatter(mpl.ticker.FormatStrFormatter('%.1f'))
for t in ax.xaxis.get_major_ticks(): t.label.set_fontsize(8)
for t in ax.yaxis.get_major_ticks(): t.label.set_fontsize(8)
for t in ax.zaxis.get_major_ticks(): t.label.set_fontsize(8)
ax.xaxis.set_rotate_label(False)
ax.yaxis.set_rotate_label(False)
ax.zaxis.set_rotate_label(False)
if ticks_off:
ax.set_xticks([])
ax.set_yticks([])
ax.set_zticks([])
if axis_off:
ax.axis('off')
# Add objects to the plot
if self.ndim == 1:
if surface: self.plot_surface(fig, ax, color=surface_color, colorbar=colorbar, Nu=Nu, Nv=Nv)
if control_points: self.plot_control_points(fig, ax)
if self.ndim == 2:
if surface: self.plot_surface(fig, ax, color=surface_color, colorbar=colorbar, Nu=Nu, Nv=Nv)
if control_points: self.plot_control_points(fig, ax)
if boundary: self.plot_boundary(fig, ax)
if isocurves_u:
self.plot_isocurve_u(fig, ax, u_values=np.linspace(0, 1, isocurves_u))
if isocurves_v:
self.plot_isocurve_v(fig, ax, v_values=np.linspace(0, 1, isocurves_v))
# Set the aspect ratio of the data
ax.set_aspect(1.0)
# Adjust pad
plt.tight_layout(pad=5.0, w_pad=None, h_pad=None)
# Add objects to the plot
if self.ndim == 3:
# Add objects to the plot
if surface: self.plot_surface(fig, ax, color=surface_color, colorbar=colorbar, Nu=Nu, Nv=Nv)
if boundary: self.plot_boundary(fig, ax)
if control_points: self.plot_control_points(fig, ax)
if normals: self.plot_normals(fig, ax)
if isocurves_u:
self.plot_isocurve_u(fig, ax, u_values=np.linspace(0, 1, isocurves_u))
if isocurves_v:
self.plot_isocurve_v(fig, ax, v_values=np.linspace(0, 1, isocurves_v))
# Set the scaling of the axes
| |
is not None:
if_none_match = pool_get_options.if_none_match
if_modified_since = None
if pool_get_options is not None:
if_modified_since = pool_get_options.if_modified_since
if_unmodified_since = None
if pool_get_options is not None:
if_unmodified_since = pool_get_options.if_unmodified_since
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'batchUrl': self._serialize.url("self.config.batch_url", self.config.batch_url, 'str', skip_quote=True),
'poolId': self._serialize.url("pool_id", pool_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if client_request_id is not None:
header_parameters['client-request-id'] = self._serialize.header("client_request_id", client_request_id, 'str')
if return_client_request_id is not None:
header_parameters['return-client-request-id'] = self._serialize.header("return_client_request_id", return_client_request_id, 'bool')
if ocp_date is not None:
header_parameters['ocp-date'] = self._serialize.header("ocp_date", ocp_date, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.BatchErrorException(self._deserialize, response)
header_dict = {}
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CloudPool', response)
header_dict = {
'client-request-id': 'str',
'request-id': 'str',
'ETag': 'str',
'Last-Modified': 'rfc-1123',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
get.metadata = {'url': '/pools/{poolId}'}
def patch(
self, pool_id, pool_patch_parameter, pool_patch_options=None, custom_headers=None, raw=False, **operation_config):
"""Updates the properties of the specified Pool.
This only replaces the Pool properties specified in the request. For
example, if the Pool has a StartTask associated with it, and a request
does not specify a StartTask element, then the Pool keeps the existing
StartTask.
:param pool_id: The ID of the Pool to update.
:type pool_id: str
:param pool_patch_parameter: The parameters for the request.
:type pool_patch_parameter: ~azure.batch.models.PoolPatchParameter
:param pool_patch_options: Additional parameters for the operation
:type pool_patch_options: ~azure.batch.models.PoolPatchOptions
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`BatchErrorException<azure.batch.models.BatchErrorException>`
"""
timeout = None
if pool_patch_options is not None:
timeout = pool_patch_options.timeout
client_request_id = None
if pool_patch_options is not None:
client_request_id = pool_patch_options.client_request_id
return_client_request_id = None
if pool_patch_options is not None:
return_client_request_id = pool_patch_options.return_client_request_id
ocp_date = None
if pool_patch_options is not None:
ocp_date = pool_patch_options.ocp_date
if_match = None
if pool_patch_options is not None:
if_match = pool_patch_options.if_match
if_none_match = None
if pool_patch_options is not None:
if_none_match = pool_patch_options.if_none_match
if_modified_since = None
if pool_patch_options is not None:
if_modified_since = pool_patch_options.if_modified_since
if_unmodified_since = None
if pool_patch_options is not None:
if_unmodified_since = pool_patch_options.if_unmodified_since
# Construct URL
url = self.patch.metadata['url']
path_format_arguments = {
'batchUrl': self._serialize.url("self.config.batch_url", self.config.batch_url, 'str', skip_quote=True),
'poolId': self._serialize.url("pool_id", pool_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; odata=minimalmetadata; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if client_request_id is not None:
header_parameters['client-request-id'] = self._serialize.header("client_request_id", client_request_id, 'str')
if return_client_request_id is not None:
header_parameters['return-client-request-id'] = self._serialize.header("return_client_request_id", return_client_request_id, 'bool')
if ocp_date is not None:
header_parameters['ocp-date'] = self._serialize.header("ocp_date", ocp_date, 'rfc-1123')
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
if if_none_match is not None:
header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str')
if if_modified_since is not None:
header_parameters['If-Modified-Since'] = self._serialize.header("if_modified_since", if_modified_since, 'rfc-1123')
if if_unmodified_since is not None:
header_parameters['If-Unmodified-Since'] = self._serialize.header("if_unmodified_since", if_unmodified_since, 'rfc-1123')
# Construct body
body_content = self._serialize.body(pool_patch_parameter, 'PoolPatchParameter')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.BatchErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'client-request-id': 'str',
'request-id': 'str',
'ETag': 'str',
'Last-Modified': 'rfc-1123',
'DataServiceId': 'str',
})
return client_raw_response
patch.metadata = {'url': '/pools/{poolId}'}
def disable_auto_scale(
self, pool_id, pool_disable_auto_scale_options=None, custom_headers=None, raw=False, **operation_config):
"""Disables automatic scaling for a Pool.
:param pool_id: The ID of the Pool on which to disable automatic
scaling.
:type pool_id: str
:param pool_disable_auto_scale_options: Additional parameters for the
operation
:type pool_disable_auto_scale_options:
~azure.batch.models.PoolDisableAutoScaleOptions
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`BatchErrorException<azure.batch.models.BatchErrorException>`
"""
timeout = None
if pool_disable_auto_scale_options is not None:
timeout = pool_disable_auto_scale_options.timeout
client_request_id = None
if pool_disable_auto_scale_options is not None:
client_request_id = pool_disable_auto_scale_options.client_request_id
return_client_request_id = None
if pool_disable_auto_scale_options is not None:
return_client_request_id = pool_disable_auto_scale_options.return_client_request_id
ocp_date = None
if pool_disable_auto_scale_options is not None:
ocp_date = pool_disable_auto_scale_options.ocp_date
# Construct URL
url = self.disable_auto_scale.metadata['url']
path_format_arguments = {
'batchUrl': self._serialize.url("self.config.batch_url", self.config.batch_url, 'str', skip_quote=True),
'poolId': self._serialize.url("pool_id", pool_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
if client_request_id is not None:
header_parameters['client-request-id'] = self._serialize.header("client_request_id", client_request_id, 'str')
if return_client_request_id is not None:
header_parameters['return-client-request-id'] = self._serialize.header("return_client_request_id", return_client_request_id, 'bool')
if ocp_date is not None:
header_parameters['ocp-date'] = self._serialize.header("ocp_date", ocp_date, 'rfc-1123')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.BatchErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'client-request-id': 'str',
'request-id': 'str',
'ETag': 'str',
'Last-Modified': 'rfc-1123',
'DataServiceId': 'str',
})
return client_raw_response
disable_auto_scale.metadata = {'url': '/pools/{poolId}/disableautoscale'}
def enable_auto_scale(
self, pool_id, auto_scale_formula=None, auto_scale_evaluation_interval=None, pool_enable_auto_scale_options=None, custom_headers=None, raw=False, **operation_config):
"""Enables automatic scaling for a Pool.
You cannot enable automatic scaling on a Pool if a resize operation is
in progress on the Pool. If automatic scaling of the Pool is currently
disabled, you must specify a valid autoscale formula as part of the
request. If automatic scaling of the Pool is already enabled, you may
specify a new autoscale formula and/or a new evaluation interval. You
cannot call this API for the same Pool more than once every 30 seconds.
:param pool_id: The ID of the Pool on which to enable automatic
scaling.
:type pool_id: str
:param auto_scale_formula: The formula for the desired number of
Compute Nodes in the Pool. The formula is checked for validity before
it is applied to the Pool. If the formula is not valid, the Batch
service rejects the request with detailed error information. For more
information about specifying this formula, see Automatically scale
Compute Nodes in an Azure Batch Pool
(https://azure.microsoft.com/en-us/documentation/articles/batch-automatic-scaling).
:type auto_scale_formula: str
:param auto_scale_evaluation_interval: The time interval at which to
automatically adjust the Pool size according to the autoscale formula.
The default value is 15 minutes. The minimum and maximum value are 5
minutes and 168 hours respectively. If you specify a value less than 5
minutes or greater than 168 hours, the Batch service rejects the
request with an invalid property value error; if you are calling the
REST API directly, the HTTP status code is 400 (Bad Request). If you
specify a new interval, then the existing autoscale evaluation
schedule will be stopped and a new autoscale evaluation schedule will
be started, with its starting time being the time when this request
was issued.
:type auto_scale_evaluation_interval: timedelta
:param pool_enable_auto_scale_options: Additional parameters for the
operation
:type pool_enable_auto_scale_options:
~azure.batch.models.PoolEnableAutoScaleOptions
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`BatchErrorException<azure.batch.models.BatchErrorException>`
"""
timeout = None
if pool_enable_auto_scale_options is not None:
timeout | |
0)
m.c231 = Constraint(expr=-m.x52*m.x589 + m.x244 == 0)
m.c232 = Constraint(expr=-m.x53*m.x589 + m.x245 == 0)
m.c233 = Constraint(expr=-m.x54*m.x589 + m.x246 == 0)
m.c234 = Constraint(expr=-m.x55*m.x589 + m.x247 == 0)
m.c235 = Constraint(expr=-m.x56*m.x589 + m.x248 == 0)
m.c236 = Constraint(expr=-m.x57*m.x589 + m.x249 == 0)
m.c237 = Constraint(expr=-m.x58*m.x589 + m.x250 == 0)
m.c238 = Constraint(expr=-m.x59*m.x589 + m.x251 == 0)
m.c239 = Constraint(expr=-m.x60*m.x589 + m.x252 == 0)
m.c240 = Constraint(expr=-m.x61*m.x589 + m.x253 == 0)
m.c241 = Constraint(expr=-m.x62*m.x589 + m.x254 == 0)
m.c242 = Constraint(expr=-m.x63*m.x589 + m.x255 == 0)
m.c243 = Constraint(expr=-m.x64*m.x589 + m.x256 == 0)
m.c244 = Constraint(expr=-m.x65*m.x589 + m.x257 == 0)
m.c245 = Constraint(expr=-m.x18*m.x590 + m.x258 == 0)
m.c246 = Constraint(expr=-m.x19*m.x590 + m.x259 == 0)
m.c247 = Constraint(expr=-m.x20*m.x590 + m.x260 == 0)
m.c248 = Constraint(expr=-m.x21*m.x590 + m.x261 == 0)
m.c249 = Constraint(expr=-m.x22*m.x590 + m.x262 == 0)
m.c250 = Constraint(expr=-m.x23*m.x590 + m.x263 == 0)
m.c251 = Constraint(expr=-m.x24*m.x590 + m.x264 == 0)
m.c252 = Constraint(expr=-m.x25*m.x590 + m.x265 == 0)
m.c253 = Constraint(expr=-m.x26*m.x590 + m.x266 == 0)
m.c254 = Constraint(expr=-m.x27*m.x590 + m.x267 == 0)
m.c255 = Constraint(expr=-m.x28*m.x590 + m.x268 == 0)
m.c256 = Constraint(expr=-m.x29*m.x590 + m.x269 == 0)
m.c257 = Constraint(expr=-m.x30*m.x590 + m.x270 == 0)
m.c258 = Constraint(expr=-m.x31*m.x590 + m.x271 == 0)
m.c259 = Constraint(expr=-m.x32*m.x590 + m.x272 == 0)
m.c260 = Constraint(expr=-m.x33*m.x590 + m.x273 == 0)
m.c261 = Constraint(expr=-m.x34*m.x591 + m.x274 == 0)
m.c262 = Constraint(expr=-m.x35*m.x591 + m.x275 == 0)
m.c263 = Constraint(expr=-m.x36*m.x591 + m.x276 == 0)
m.c264 = Constraint(expr=-m.x37*m.x591 + m.x277 == 0)
m.c265 = Constraint(expr=-m.x38*m.x591 + m.x278 == 0)
m.c266 = Constraint(expr=-m.x39*m.x591 + m.x279 == 0)
m.c267 = Constraint(expr=-m.x40*m.x591 + m.x280 == 0)
m.c268 = Constraint(expr=-m.x41*m.x591 + m.x281 == 0)
m.c269 = Constraint(expr=-m.x42*m.x591 + m.x282 == 0)
m.c270 = Constraint(expr=-m.x43*m.x591 + m.x283 == 0)
m.c271 = Constraint(expr=-m.x44*m.x591 + m.x284 == 0)
m.c272 = Constraint(expr=-m.x45*m.x591 + m.x285 == 0)
m.c273 = Constraint(expr=-m.x46*m.x591 + m.x286 == 0)
m.c274 = Constraint(expr=-m.x47*m.x591 + m.x287 == 0)
m.c275 = Constraint(expr=-m.x48*m.x591 + m.x288 == 0)
m.c276 = Constraint(expr=-m.x49*m.x591 + m.x289 == 0)
m.c277 = Constraint(expr=-m.x50*m.x592 + m.x290 == 0)
m.c278 = Constraint(expr=-m.x51*m.x592 + m.x291 == 0)
m.c279 = Constraint(expr=-m.x52*m.x592 + m.x292 == 0)
m.c280 = Constraint(expr=-m.x53*m.x592 + m.x293 == 0)
m.c281 = Constraint(expr=-m.x54*m.x592 + m.x294 == 0)
m.c282 = Constraint(expr=-m.x55*m.x592 + m.x295 == 0)
m.c283 = Constraint(expr=-m.x56*m.x592 + m.x296 == 0)
m.c284 = Constraint(expr=-m.x57*m.x592 + m.x297 == 0)
m.c285 = Constraint(expr=-m.x58*m.x592 + m.x298 == 0)
m.c286 = Constraint(expr=-m.x59*m.x592 + m.x299 == 0)
m.c287 = Constraint(expr=-m.x60*m.x592 + m.x300 == 0)
m.c288 = Constraint(expr=-m.x61*m.x592 + m.x301 == 0)
m.c289 = Constraint(expr=-m.x62*m.x592 + m.x302 == 0)
m.c290 = Constraint(expr=-m.x63*m.x592 + m.x303 == 0)
m.c291 = Constraint(expr=-m.x64*m.x592 + m.x304 == 0)
m.c292 = Constraint(expr=-m.x65*m.x592 + m.x305 == 0)
m.c293 = Constraint(expr=-m.x34*m.x593 + m.x306 == 0)
m.c294 = Constraint(expr=-m.x35*m.x593 + m.x307 == 0)
m.c295 = Constraint(expr=-m.x36*m.x593 + m.x308 == 0)
m.c296 = Constraint(expr=-m.x37*m.x593 + m.x309 == 0)
m.c297 = Constraint(expr=-m.x38*m.x593 + m.x310 == 0)
m.c298 = Constraint(expr=-m.x39*m.x593 + m.x311 == 0)
m.c299 = Constraint(expr=-m.x40*m.x593 + m.x312 == 0)
m.c300 = Constraint(expr=-m.x41*m.x593 + m.x313 == 0)
m.c301 = Constraint(expr=-m.x42*m.x593 + m.x314 == 0)
m.c302 = Constraint(expr=-m.x43*m.x593 + m.x315 == 0)
m.c303 = Constraint(expr=-m.x44*m.x593 + m.x316 == 0)
m.c304 = Constraint(expr=-m.x45*m.x593 + m.x317 == 0)
m.c305 = Constraint(expr=-m.x46*m.x593 + m.x318 == 0)
m.c306 = Constraint(expr=-m.x47*m.x593 + m.x319 == 0)
m.c307 = Constraint(expr=-m.x48*m.x593 + m.x320 == 0)
m.c308 = Constraint(expr=-m.x49*m.x593 + m.x321 == 0)
m.c309 = Constraint(expr=-m.x50*m.x594 + m.x322 == 0)
m.c310 = Constraint(expr=-m.x51*m.x594 + m.x323 == 0)
m.c311 = Constraint(expr=-m.x52*m.x594 + m.x324 == 0)
m.c312 = Constraint(expr=-m.x53*m.x594 + m.x325 == 0)
m.c313 = Constraint(expr=-m.x54*m.x594 + m.x326 == 0)
m.c314 = Constraint(expr=-m.x55*m.x594 + m.x327 == 0)
m.c315 = Constraint(expr=-m.x56*m.x594 + m.x328 == 0)
m.c316 = Constraint(expr=-m.x57*m.x594 + m.x329 == 0)
m.c317 = Constraint(expr=-m.x58*m.x594 + m.x330 == 0)
m.c318 = Constraint(expr=-m.x59*m.x594 + m.x331 == 0)
m.c319 = Constraint(expr=-m.x60*m.x594 + m.x332 == 0)
m.c320 = Constraint(expr=-m.x61*m.x594 + m.x333 == 0)
m.c321 = Constraint(expr=-m.x62*m.x594 + m.x334 == 0)
m.c322 = Constraint(expr=-m.x63*m.x594 + m.x335 == 0)
m.c323 = Constraint(expr=-m.x64*m.x594 + m.x336 == 0)
m.c324 = Constraint(expr=-m.x65*m.x594 + m.x337 == 0)
m.c325 = Constraint(expr=-m.x18*m.x595 + m.x338 == 0)
m.c326 = Constraint(expr=-m.x19*m.x595 + m.x339 == 0)
m.c327 = Constraint(expr=-m.x20*m.x595 + m.x340 == 0)
m.c328 = Constraint(expr=-m.x21*m.x595 + m.x341 == 0)
m.c329 = Constraint(expr=-m.x22*m.x595 + m.x342 == 0)
m.c330 = Constraint(expr=-m.x23*m.x595 + m.x343 == 0)
m.c331 = Constraint(expr=-m.x24*m.x595 + m.x344 == 0)
m.c332 = Constraint(expr=-m.x25*m.x595 + m.x345 == 0)
m.c333 = Constraint(expr=-m.x26*m.x595 + m.x346 == 0)
m.c334 = Constraint(expr=-m.x27*m.x595 + m.x347 == 0)
m.c335 = Constraint(expr=-m.x28*m.x595 + m.x348 == 0)
m.c336 = Constraint(expr=-m.x29*m.x595 + m.x349 == 0)
m.c337 = Constraint(expr=-m.x30*m.x595 + m.x350 == 0)
m.c338 = Constraint(expr=-m.x31*m.x595 + m.x351 == 0)
m.c339 = Constraint(expr=-m.x32*m.x595 + m.x352 == 0)
m.c340 = Constraint(expr=-m.x33*m.x595 + m.x353 == 0)
m.c341 = Constraint(expr=-m.x34*m.x596 + m.x354 == 0)
m.c342 = Constraint(expr=-m.x35*m.x596 + m.x355 == 0)
m.c343 = Constraint(expr=-m.x36*m.x596 + m.x356 == 0)
m.c344 = Constraint(expr=-m.x37*m.x596 + m.x357 == 0)
m.c345 = Constraint(expr=-m.x38*m.x596 + m.x358 == 0)
m.c346 = Constraint(expr=-m.x39*m.x596 + m.x359 == 0)
m.c347 = Constraint(expr=-m.x40*m.x596 + m.x360 == 0)
m.c348 = Constraint(expr=-m.x41*m.x596 + m.x361 == 0)
m.c349 = Constraint(expr=-m.x42*m.x596 + m.x362 == 0)
m.c350 = Constraint(expr=-m.x43*m.x596 + m.x363 == 0)
m.c351 = Constraint(expr=-m.x44*m.x596 + m.x364 == 0)
m.c352 = Constraint(expr=-m.x45*m.x596 + m.x365 == 0)
m.c353 = Constraint(expr=-m.x46*m.x596 + m.x366 == 0)
m.c354 = Constraint(expr=-m.x47*m.x596 + m.x367 == 0)
m.c355 = Constraint(expr=-m.x48*m.x596 + m.x368 == 0)
m.c356 = Constraint(expr=-m.x49*m.x596 + m.x369 == 0)
m.c357 = Constraint(expr=-m.x50*m.x597 + m.x370 == 0)
m.c358 = Constraint(expr=-m.x51*m.x597 + m.x371 == 0)
m.c359 = Constraint(expr=-m.x52*m.x597 + m.x372 == 0)
m.c360 = Constraint(expr=-m.x53*m.x597 + m.x373 == 0)
m.c361 = Constraint(expr=-m.x54*m.x597 + m.x374 == 0)
m.c362 = Constraint(expr=-m.x55*m.x597 + m.x375 == 0)
m.c363 = Constraint(expr=-m.x56*m.x597 + m.x376 == 0)
m.c364 = Constraint(expr=-m.x57*m.x597 + m.x377 == 0)
m.c365 = Constraint(expr=-m.x58*m.x597 + m.x378 == 0)
m.c366 = Constraint(expr=-m.x59*m.x597 + m.x379 == 0)
m.c367 = Constraint(expr=-m.x60*m.x597 + m.x380 == 0)
m.c368 = Constraint(expr=-m.x61*m.x597 + m.x381 == 0)
m.c369 = Constraint(expr=-m.x62*m.x597 + m.x382 == 0)
m.c370 = Constraint(expr=-m.x63*m.x597 + m.x383 == 0)
m.c371 = Constraint(expr=-m.x64*m.x597 + m.x384 == 0)
m.c372 = Constraint(expr=-m.x65*m.x597 + m.x385 == 0)
m.c373 = Constraint(expr=-m.x2*m.x598 + m.x386 == 0)
m.c374 = Constraint(expr=-m.x3*m.x598 + m.x387 == 0)
m.c375 = Constraint(expr=-m.x4*m.x598 + m.x388 == 0)
m.c376 = Constraint(expr=-m.x5*m.x598 + m.x389 == 0)
m.c377 = Constraint(expr=-m.x6*m.x598 + m.x390 == 0)
m.c378 = Constraint(expr=-m.x7*m.x598 + m.x391 == 0)
m.c379 = Constraint(expr=-m.x8*m.x598 + m.x392 == 0)
m.c380 = Constraint(expr=-m.x9*m.x598 + m.x393 == 0)
m.c381 = Constraint(expr=-m.x10*m.x598 + m.x394 == 0)
m.c382 = Constraint(expr=-m.x11*m.x598 + m.x395 == 0)
m.c383 = Constraint(expr=-m.x12*m.x598 + m.x396 == 0)
m.c384 = Constraint(expr=-m.x13*m.x598 + m.x397 == 0)
m.c385 = Constraint(expr=-m.x14*m.x598 + m.x398 == 0)
m.c386 = Constraint(expr=-m.x15*m.x598 + m.x399 == 0)
m.c387 = Constraint(expr=-m.x16*m.x598 + m.x400 == 0)
m.c388 = Constraint(expr=-m.x17*m.x598 + m.x401 == 0)
m.c389 = Constraint(expr=-m.x18*m.x599 + m.x402 == 0)
m.c390 = Constraint(expr=-m.x19*m.x599 + m.x403 == 0)
m.c391 = Constraint(expr=-m.x20*m.x599 + m.x404 == 0)
m.c392 = Constraint(expr=-m.x21*m.x599 + m.x405 == 0)
m.c393 = Constraint(expr=-m.x22*m.x599 + m.x406 == 0)
m.c394 = Constraint(expr=-m.x23*m.x599 + m.x407 == 0)
m.c395 = Constraint(expr=-m.x24*m.x599 + m.x408 == 0)
m.c396 = Constraint(expr=-m.x25*m.x599 + m.x409 == 0)
m.c397 = Constraint(expr=-m.x26*m.x599 + m.x410 == 0)
m.c398 = Constraint(expr=-m.x27*m.x599 + m.x411 == 0)
m.c399 = Constraint(expr=-m.x28*m.x599 + m.x412 == 0)
m.c400 = Constraint(expr=-m.x29*m.x599 + m.x413 == 0)
m.c401 = Constraint(expr=-m.x30*m.x599 + m.x414 == 0)
m.c402 = Constraint(expr=-m.x31*m.x599 + m.x415 == 0)
m.c403 = Constraint(expr=-m.x32*m.x599 + m.x416 == 0)
m.c404 = Constraint(expr=-m.x33*m.x599 + m.x417 == 0)
m.c405 = Constraint(expr=-m.x34*m.x600 + m.x418 == 0)
m.c406 = Constraint(expr=-m.x35*m.x600 + m.x419 == 0)
m.c407 = Constraint(expr=-m.x36*m.x600 + m.x420 == 0)
m.c408 = Constraint(expr=-m.x37*m.x600 + m.x421 == 0)
m.c409 = Constraint(expr=-m.x38*m.x600 + m.x422 == 0)
m.c410 = Constraint(expr=-m.x39*m.x600 + m.x423 == 0)
m.c411 = Constraint(expr=-m.x40*m.x600 + m.x424 == 0)
m.c412 = Constraint(expr=-m.x41*m.x600 + m.x425 == 0)
m.c413 = Constraint(expr=-m.x42*m.x600 + m.x426 == 0)
m.c414 = Constraint(expr=-m.x43*m.x600 + m.x427 == 0)
m.c415 = Constraint(expr=-m.x44*m.x600 + m.x428 == 0)
m.c416 = Constraint(expr=-m.x45*m.x600 + m.x429 == 0)
m.c417 = Constraint(expr=-m.x46*m.x600 + m.x430 == 0)
m.c418 = Constraint(expr=-m.x47*m.x600 + m.x431 == 0)
m.c419 = Constraint(expr=-m.x48*m.x600 + m.x432 == 0)
m.c420 = Constraint(expr=-m.x49*m.x600 + m.x433 == 0)
m.c421 = Constraint(expr=-m.x50*m.x601 + m.x434 == 0)
m.c422 = Constraint(expr=-m.x51*m.x601 + m.x435 == 0)
m.c423 = Constraint(expr=-m.x52*m.x601 + m.x436 == 0)
m.c424 = Constraint(expr=-m.x53*m.x601 + m.x437 == 0)
m.c425 = Constraint(expr=-m.x54*m.x601 + m.x438 == 0)
m.c426 = Constraint(expr=-m.x55*m.x601 + m.x439 == 0)
m.c427 = Constraint(expr=-m.x56*m.x601 + m.x440 == 0)
m.c428 = Constraint(expr=-m.x57*m.x601 + m.x441 == 0)
m.c429 = Constraint(expr=-m.x58*m.x601 + m.x442 == 0)
m.c430 = Constraint(expr=-m.x59*m.x601 + m.x443 == 0)
m.c431 = Constraint(expr=-m.x60*m.x601 + m.x444 == 0)
m.c432 = Constraint(expr=-m.x61*m.x601 + m.x445 == 0)
m.c433 = Constraint(expr=-m.x62*m.x601 + m.x446 == 0)
m.c434 = Constraint(expr=-m.x63*m.x601 + m.x447 == 0)
m.c435 = Constraint(expr=-m.x64*m.x601 + m.x448 == 0)
m.c436 = Constraint(expr=-m.x65*m.x601 + m.x449 == 0)
m.c437 = Constraint(expr=-m.x2*m.x602 + m.x450 == 0)
m.c438 = Constraint(expr=-m.x3*m.x602 + m.x451 == 0)
m.c439 = Constraint(expr=-m.x4*m.x602 + m.x452 == 0)
m.c440 = Constraint(expr=-m.x5*m.x602 + m.x453 == 0)
m.c441 = Constraint(expr=-m.x6*m.x602 + m.x454 == 0)
m.c442 = Constraint(expr=-m.x7*m.x602 + m.x455 == 0)
m.c443 = Constraint(expr=-m.x8*m.x602 + m.x456 == 0)
m.c444 = | |
<reponame>RomainGratier/Black-box_Optimization_via_Deep_Generative-Exploratory_Networks
import pandas as pd
import numpy as np
from tqdm import tqdm
import matplotlib.pyplot as plt
import os
import torch
from torch.autograd import Variable
from torch.nn import functional as F
from copy import deepcopy
import itertools
from tabulate import tabulate
from scipy.stats import norm, truncnorm
import random
from src.data import MNISTDataset, RotationDataset
from src.metrics import se, re, compute_thickness_ground_truth
from src.generative_model.metrics import calculate_fid_given_paths, calculate_kid_given_paths
from src.forward.uncertainty_estimation import get_uncertainty_per_batch
import src.config as cfg
if cfg.dcgan:
import src.config_dcgan as cfgan
else:
import src.config_gan as cfgan
import src.config_inference as cfginf
from src.uncertainty_policy import uncertainty_selection
if cfg.experiment == 'min_mnist':
import src.config_min_mnist as cfg_data
elif cfg.experiment == 'max_mnist':
import src.config_max_mnist as cfg_data
elif cfg.experiment == 'rotation_dataset':
import src.config_rotation as cfg_data
cuda = True if torch.cuda.is_available() else False
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
FloatTensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor
LongTensor = torch.cuda.LongTensor if cuda else torch.LongTensor
max_size = 2000
uncertainty_policy = 'epistemic'
def save_numpy_arr(path, arr):
np.save(path, arr)
return path
def get_truncated_normal(form, mean=0, sd=1, quant=0.8):
upp = norm.ppf(quant, mean, sd)
low = norm.ppf(1 - quant, mean, sd)
return truncnorm(
(low - mean) / sd, (upp - mean) / sd, loc=mean, scale=sd).rvs(form)
def generate_sample_from_GAN(y_cond, z, generator):
# Prepare labels
if (y_cond.shape[0] > 5000) & (z.shape[0] > 5000):
y_cond = np.array_split(y_cond, len(y_cond) // max_size)
z = np.array_split(z, len(z) // max_size)
if len(y_cond) != len(z):
print('WARNING PIPELINE BROKEN WHEN GENERATION')
X_chunk = []
for i, condition in enumerate(y_cond):
X_chunk.append(generator(Variable(FloatTensor(z[i])), Variable(FloatTensor(condition))).detach().cpu())
return torch.cat(X_chunk)
else:
return generator( Variable(FloatTensor(z)), Variable(FloatTensor(y_cond))).detach().cpu()
def predict_forward_model(forward, gen_images, bayesian=True):
if bayesian:
if gen_images.shape[0] > 5000:
gen_images = np.array_split(gen_images.cpu().detach(), len(gen_images.cpu().detach()) // max_size)
pred_chunk = []
epistemic_chunk = []
aleatoric_chunk = []
for i, gen_images_batch in enumerate(gen_images):
pred, epi, aleatoric = get_uncertainty_per_batch(forward, F.interpolate(gen_images_batch.to(device), size=32), device)
pred_chunk.append(pred)
epistemic_chunk.append(epi)
aleatoric_chunk.append(aleatoric)
return np.concatenate(pred_chunk), np.concatenate(epistemic_chunk), np.concatenate(aleatoric_chunk)
else:
y_pred, epistemic, aleatoric = get_uncertainty_per_batch(forward, F.interpolate(gen_images, size=32), device)
return y_pred, epistemic, aleatoric
else:
if gen_images.shape[0] > 5000:
gen_images = np.array_split(gen_images.cpu().detach(), len(gen_images.cpu().detach()) // max_size)
pred_chunk = []
for i, gen_images_batch in enumerate(gen_images):
pred_chunk.append(forward(F.interpolate(gen_images_batch.to(device), size=32)).squeeze(1).cpu().detach().numpy())
return np.concatenate(pred_chunk)
else:
y_pred = forward(F.interpolate(gen_images.to(device), size=32)).squeeze(1).cpu().detach().numpy()
return y_pred
def compute_fid_mnist(gen_img, index_distribution, real_dataset):
random_id = random.sample(index_distribution.tolist(), gen_img.shape[0])
real_imgs = real_dataset[random_id].numpy()
folder = 'save_data'
os.makedirs(folder, exist_ok=True)
path_gen = save_numpy_arr(os.path.join(folder, 'gen_img.npy'), gen_img.cpu().detach().numpy())
path_real = save_numpy_arr(os.path.join(folder, 'real_imgs.npy'), real_imgs)
paths = [path_real, path_gen]
fid_value = calculate_fid_given_paths(paths)
kid_value = calculate_kid_given_paths(paths)
return fid_value, kid_value
def get_main_data(distribution, size_full, testset):
if distribution == 'in':
if (cfg.experiment == 'max_mnist') | (cfg.experiment == 'rotation_dataset'):
if cfg.dcgan:
conditions = np.random.uniform(cfg_data.min_dataset, cfg_data.limit_dataset, (size_full, 1, 1, 1))
z = get_truncated_normal((size_full, cfgan.latent_dim, 1, 1), quant=cfginf.quantile_rate_z_gen)
else:
conditions = np.random.uniform(cfg_data.min_dataset, cfg_data.limit_dataset, size_full)
z = get_truncated_normal((size_full, cfgan.latent_dim), quant=cfginf.quantile_rate_z_gen)
df_test_in = pd.DataFrame(testset.y_data, columns=['label'])
index_distribution = df_test_in[df_test_in['label'] <= cfg_data.limit_dataset].index
print(f'size of in distribution data for fid/kid : {len(index_distribution)}')
real_dataset = deepcopy(testset.x_data)
if cfg.experiment == 'min_mnist':
if cfg.dcgan:
conditions = np.random.uniform(cfg_data.limit_dataset, cfg_data.max_dataset, (size_full, 1, 1, 1))
z = get_truncated_normal((size_full, cfgan.latent_dim, 1, 1), quant=cfginf.quantile_rate_z_gen)
else:
conditions = np.random.uniform(cfg_data.limit_dataset, cfg_data.max_dataset, size_full)
z = get_truncated_normal((size_full, cfgan.latent_dim), quant=cfginf.quantile_rate_z_gen)
df_test_in = pd.DataFrame(testset.y_data, columns=['label'])
index_distribution = df_test_in[(df_test_in['label'] >= cfg_data.limit_dataset) & (df_test_in['label'] <= cfg_data.max_dataset)].index
print(f'size of in distribution data for fid/kid : {len(index_distribution)}')
real_dataset = deepcopy(testset.x_data)
if distribution == 'out':
if (cfg.experiment == 'max_mnist') | (cfg.experiment == 'rotation_dataset'):
if cfg.dcgan:
conditions = np.random.uniform(cfg_data.limit_dataset, cfg_data.max_dataset, (size_full, 1, 1, 1))
z = get_truncated_normal((size_full, cfgan.latent_dim, 1, 1), quant=cfginf.quantile_rate_z_gen)
else:
conditions = np.random.uniform(cfg_data.limit_dataset, cfg_data.max_dataset, size_full)
z = get_truncated_normal((size_full, cfgan.latent_dim), quant=cfginf.quantile_rate_z_gen)
df_test_out = pd.DataFrame(testset.y_data, columns=['label'])
index_distribution = df_test_out[df_test_out['label'] > cfg_data.limit_dataset].index
print(f'size of out distribution data for fid/kid : {len(index_distribution)}')
real_dataset = deepcopy(testset.x_data)
if cfg.experiment == 'min_mnist':
if cfg.dcgan:
conditions = np.random.uniform(cfg_data.min_dataset , cfg_data.limit_dataset, (size_full, 1, 1, 1))
z = get_truncated_normal((size_full, cfgan.latent_dim, 1, 1), quant=cfginf.quantile_rate_z_gen)
else:
conditions = np.random.uniform(cfg_data.min_dataset , cfg_data.limit_dataset, size_full)
z = get_truncated_normal((size_full, cfgan.latent_dim), quant=cfginf.quantile_rate_z_gen)
df_test_out = pd.DataFrame(testset.y_data, columns=['label'])
index_distribution = df_test_out[df_test_out['label'] < cfg_data.limit_dataset].index
print(f'size of out distribution data for fid/kid : {len(index_distribution)}')
real_dataset = deepcopy(testset.x_data)
return conditions, z, index_distribution, real_dataset
def monte_carlo_inference_mse_batch(distribution, generator, forward, testset, bayesian=True, sample_number=2000):
size_full = int(sample_number * 1/cfginf.quantile_rate_uncertainty_policy)
# ------------ Get data ------------
conditions, z, _, _ = get_main_data(distribution, size_full, testset)
# ------------ Generate sample from z and y target ------------
images_generated = generate_sample_from_GAN(conditions, z, generator)
# ------------ random sample ------------
random_index = random.sample(np.arange(images_generated.shape[0]).tolist(), sample_number)
images_generated_rand = images_generated[random_index]
# ------------ Compute forward predictions ------------
if bayesian:
y_pred, epistemic, aleatoric = predict_forward_model(forward, images_generated_rand, bayesian=bayesian)
else:
y_pred = predict_forward_model(forward, images_generated_rand, bayesian=bayesian)
# ------------ Compare the forward model and the Measure from morphomnist ------------
images_generated_rand = images_generated_rand.squeeze(1).cpu().detach().numpy()
thickness = compute_thickness_ground_truth(images_generated_rand)
# ------------ Compute the se between the target and the morpho measure predictions ------------
se_measure_glob = se(thickness, y_pred)
re_measure_glob = re(thickness, y_pred)
print(f"{distribution} distribution RANDOM results")
print(f"The mean squared error : {np.mean(se_measure_glob)} \t The std of the squared error : {np.std(se_measure_glob)}")
print(f"The mean relative error : {np.mean(re_measure_glob)} \t The std of the squared error : {np.std(re_measure_glob)}")
# ------------ Global results ------------
stat_ms_glob = [np.mean(se_measure_glob), np.std(se_measure_glob)]; stat_mr_glob = [np.mean(re_measure_glob), np.std(re_measure_glob)];
if bayesian:
# ------------ Compute forward predictions ------------
y_pred, epistemic, aleatoric = predict_forward_model(forward, images_generated, bayesian=bayesian)
# ------------ Uncertainty policy ------------
if uncertainty_policy == 'epistemic':
index_certain, _ = uncertainty_selection(epistemic.squeeze())
elif uncertainty_policy == 'aleatoric':
index_certain, _ = uncertainty_selection(aleatoric.squeeze())
images_generated_sampled = images_generated[index_certain]
y_pred = y_pred[index_certain]
# ------------ Compare the forward model and the Measure from morphomnist ------------
# Move variable to cpu
images_generated_sampled = images_generated_sampled.squeeze(1).cpu().detach().numpy()
thickness = compute_thickness_ground_truth(images_generated_sampled)
# ------------ Compute the se between the target and the morpho measure predictions ------------
se_measure_pol = se(thickness, y_pred)
re_measure_pol = re(thickness, y_pred)
print(f"{distribution} distribution POLICY results")
print(f"The mean squared error : {np.mean(se_measure_pol)} \t The std of the squared error : {np.std(se_measure_pol)}")
print(f"The mean relative error : {np.mean(re_measure_pol)} \t The std of the squared error : {np.std(re_measure_pol)}")
# ------------ Global results ------------
stat_ms_pol = [np.mean(se_measure_pol), np.std(se_measure_pol)]; stat_mr_pol = [np.mean(re_measure_pol), np.std(re_measure_pol)];
return stat_ms_glob, stat_ms_pol, stat_mr_glob, stat_mr_pol
else:
return stat_ms_glob, [100.0, 100.0], stat_mr_glob, [100.0, 100.0]
def monte_carlo_inference_mse_sampling(distribution, generator, forward, testset, bayesian=True, sample_number=2000, size_sample=10):
ms_rand = []; ms_pol = []; mr_rand = []; mr_pol = [];
for i in range(size_sample):
print(f'Computed sample {i+1}/{size_sample}')
stat_ms_glob, stat_ms_pol, stat_mr_glob, stat_mr_pol = monte_carlo_inference_mse_batch(distribution,
generator,
forward,
testset,
bayesian=True,
sample_number = 2000)
# Update values
ms_rand.append(stat_ms_glob[0]); ms_pol.append(stat_ms_pol[0]); mr_rand.append(stat_mr_glob[0]); mr_pol.append(stat_mr_pol[0]);
stat_se_rand = [np.mean(ms_rand), np.std(ms_rand)]
stat_se_pol = [np.mean(ms_pol), np.std(ms_pol)]
stat_re_rand = [np.mean(mr_rand), np.std(mr_rand)]
stat_re_pol = [np.mean(mr_pol), np.std(mr_pol)]
return stat_se_rand, stat_se_pol, stat_re_rand, stat_re_pol
def monte_carlo_inference_fid_kid_batch(distribution, generator, forward, testset, bayesian=True, sample_number_fid_kid = 2000):
size_full = int(sample_number_fid_kid * 1/cfginf.quantile_rate_uncertainty_policy)
# ------------ Get data ------------
conditions, z, index_distribution, real_dataset = get_main_data(distribution, size_full, testset)
# ------------ Generate sample from z and y target ------------
images_generated = generate_sample_from_GAN(conditions, z, generator)
# ------------ random sample ------------
random_index = random.sample(np.arange(images_generated.shape[0]).tolist(), sample_number_fid_kid)
images_generated_rand = images_generated[random_index]
# ------------ Compute FID/KID from testset ------------
fid_value_gen_rand, kid_value_gen_rand = compute_fid_mnist(images_generated_rand, index_distribution, real_dataset)
# ------------ Compute policy measures ------------
if bayesian:
y_pred, epistemic, aleatoric = predict_forward_model(forward, images_generated, bayesian=True)
# ------------ Uncertainty policy ------------
if uncertainty_policy == 'epistemic':
index_certain, _ = uncertainty_selection(epistemic.squeeze())
elif uncertainty_policy == 'aleatoric':
index_certain, _ = uncertainty_selection(aleatoric.squeeze())
images_generated_sampled = images_generated[index_certain]
else:
# ------------ random sample ------------
random_index = random.sample(np.arange(images_generated.shape[0]).tolist(), sample_number_fid_kid)
images_generated_sampled = images_generated[random_index]
# ------------ Compute FID/KID from testset ------------
fid_value_gen_pol, kid_value_gen_pol = compute_fid_mnist(images_generated_sampled, index_distribution, real_dataset)
return fid_value_gen_rand, fid_value_gen_pol, kid_value_gen_rand, kid_value_gen_pol
def monte_carlo_inference_fid_kid_sampling(distribution, generator, forward, testset, bayesian=True, sample_number_fid_kid = 2000, size_sample=10):
fid_pol = []; fid_rand = []; kid_pol = []; kid_rand = [];
for i in range(size_sample):
print(f'Computed sample {i+1}/{size_sample}')
fid_value_gen_rand, fid_value_gen_pol, kid_value_gen_rand, kid_value_gen_pol = monte_carlo_inference_fid_kid_batch(distribution,
generator,
forward,
testset,
bayesian=bayesian,
sample_number_fid_kid = sample_number_fid_kid)
# Update values
fid_pol.append(fid_value_gen_pol[0]); fid_rand.append(fid_value_gen_rand[0]); kid_pol.append(kid_value_gen_pol[0]); kid_rand.append(kid_value_gen_rand[0]);
stat_fid_rand = [np.mean(fid_rand), np.std(fid_rand)]
stat_fid_pol = [np.mean(fid_pol), np.std(fid_pol)]
stat_kid_rand = [np.mean(kid_rand), np.std(kid_rand)]
stat_kid_pol = [np.mean(kid_pol), np.std(kid_pol)]
return stat_fid_rand, stat_fid_pol, stat_kid_rand, stat_kid_pol
def check_predictions_bayesian(distribution, y_pred, epistemic, aleatoric, conditions, images_generated):
# Erase in distribution sample
if distribution == 'in':
if cfg.experiment == 'min_mnist':
index_verified = np.argwhere(y_pred > cfg_data.limit_dataset).squeeze(1)
print(y_pred.shape)
y_pred = y_pred[index_verified]
print(y_pred.shape)
epistemic = epistemic[index_verified]
aleatoric = aleatoric[index_verified]
conditions = conditions[index_verified]
images_generated = images_generated[index_verified]
elif (cfg.experiment == 'max_mnist') & (cfg.experiment == 'rotation_dataset'):
index_verified = np.argwhere(y_pred < cfg_data.limit_dataset).squeeze(1)
y_pred = y_pred[index_verified]
epistemic = epistemic[index_verified]
aleatoric = aleatoric[index_verified]
conditions = conditions[index_verified]
images_generated = images_generated[index_verified]
elif distribution == 'out':
if cfg.experiment == 'min_mnist':
index_verified = np.argwhere(y_pred < cfg_data.limit_dataset).squeeze(1)
print(y_pred.shape)
y_pred = y_pred[index_verified]
print(y_pred.shape)
epistemic = epistemic[index_verified]
aleatoric | |
"19-1886973",
"19-1887251",
"19-1886924",
"18-1886608",
"18-1873144",
"18-1886472",
"18-1886509",
"18-1874533",
"18-1886214",
"20-1917935",
"18-1865406",
"18-1882454",
"18-1874967",
"18-1885609",
"18-1885617",
"18-1885441",
"18-1868164",
"18-1885360",
"20-1918692",
"17-1843218",
"18-1885298",
"18-1885253",
"18-1884820",
"18-1884859",
"18-1884773",
"18-1856186",
"18-1884530",
"18-1871415",
"17-1850486",
"18-1878179",
"18-1884302",
"18-1880010",
"18-1883840",
"18-1883846",
"18-1883599",
"18-1883605",
"18-1883522",
"18-1874751",
"18-1883324",
"18-1875403",
"18-1859554",
"18-1882683",
"18-1882613",
"18-1882460",
"18-1881998",
"18-1882049",
"18-1882046",
"18-1882050",
"18-1880452",
"17-1851854",
"18-1878308",
"18-1881319",
"17-0830460",
"18-1880912",
"18-1880969",
"18-1881018",
"20-1917896",
"18-1875377",
"20-1919731",
"18-1874364",
"18-1880412",
"18-1880506",
"18-1880279",
"18-1880091",
"18-1880152",
"20-1920777",
"18-1859051",
"18-1879854",
"18-1879885",
"18-1879626",
"18-1879493",
"18-1879559",
"18-1879378",
"18-1879429",
"18-1879319",
"18-1879249",
"18-1878765",
"18-1877044",
"18-1878576",
"18-1878280",
"18-1878190",
"18-1877933",
"17-1852725",
"18-1877942",
"18-1877764",
"17-1853587",
"18-1877652",
"18-1877286",
"18-1877609",
"18-1877098",
"18-1876874",
"18-1876476",
"18-1876478",
"18-1867188",
"18-1875589",
"18-1875282",
"18-1874984",
"18-1874896",
"18-1874959",
"18-1867964",
"18-1874786",
"18-1874685",
"18-1874672",
"18-1874701",
"18-1875958",
"18-1859811",
"18-1874404",
"18-1874268",
"18-1874367",
"18-1873938",
"18-1873936",
"18-1873904",
"18-1873941",
"18-1873723",
"18-1872591",
"20-1920856",
"18-1858865",
"18-1863582",
"20-1920395",
"18-1872599",
"20-1920375",
"18-1868658",
"20-1920405",
"20-1919946",
"20-1919943",
"18-1868428",
"17-1848726",
"18-1870990",
"18-1870882",
"20-1918626",
"18-1870557",
"18-1870560",
"18-1870438",
"18-1870551",
"17-1850659",
"18-1870082",
"18-1869622",
"18-1869360",
"17-1844163",
"16-0814467",
"18-1869234",
"18-1869261",
"18-1869169",
"18-1868862",
"18-1869061",
"18-1856545",
"18-1867875",
"18-1868931",
"18-1868976",
"18-1869044",
"18-1863804",
"18-1869048",
"17-1848142",
"18-1868771",
"18-1868810",
"18-1868859",
"18-1868717",
"18-1868621",
"16-0821553",
"18-1868157",
"18-1867666",
"18-1867547",
"20-1919744",
"20-1919751",
"17-0827203",
"18-1867119",
"18-1867135",
"18-1861652",
"18-1867139",
"18-1867360",
"18-1867096",
"18-1867028",
"18-1866846",
"18-1859398",
"18-1859782",
"18-1866879",
"18-1858859",
"18-1860680",
"18-1866781",
"18-1866690",
"17-1846559",
"14-0749345",
"18-1866470",
"18-1866507",
"18-1866033",
"17-1847948",
"18-1865703",
"18-1865747",
"18-1865677",
"18-1864798",
"18-1864169",
"18-1864008",
"18-1863811",
"17-1848182",
"17-1842737",
"17-1850246",
"18-1863865",
"18-1863888",
"18-1863766",
"17-1842744",
"18-1863793",
"17-0832847",
"18-1863550",
"18-1863596",
"18-1863446",
"17-1851338",
"18-1863262",
"18-1863361",
"18-1863362",
"18-1861630",
"17-1851269",
"18-1858565",
"18-1861334",
"18-1860854",
"18-1860690",
"18-1860338",
"18-1860053",
"18-1859761",
"18-1859133",
"18-1858414",
"18-1858258",
"18-1857821",
"17-1848026",
"18-1856912",
"17-1853857",
"16-0821475",
"18-1856963",
"18-1856261",
"18-1856628",
"18-1856590",
"18-1856250",
"18-1856077",
"18-1855818",
"18-1855689",
"18-1855514",
"17-1854178",
"17-1854006",
"17-1854168",
"17-1854175",
"17-1853162",
"17-1852073",
"17-1852014",
"17-1851855",
"17-1851617",
"16-0797867",
"17-1851618",
"17-1851452",
"17-1851527",
"17-1847378",
"17-1849685",
"17-1849635",
"17-1851169",
"17-1848676",
"17-1844898",
"17-1848725",
"17-1848619",
"16-0817419",
"17-1848446",
"17-1848049",
"17-1847692",
"17-1847503",
"17-1847549",
"17-1846950",
"17-1846186",
"17-1846095",
"17-1845156",
"17-1844402",
"17-1843510",
"17-1843141",
"17-1842643",
"17-1839370",
"17-1839379",
"17-1838858",
"17-1837006",
"17-1836339",
"17-1834992",
"17-1833516",
"17-0830473",
"17-0831750",
"17-0829758",
"16-0821879",
"16-0815289",
"16-0809706",
"13-0732424",
"19-1905445",
"19-1894716",
"18-1876238",
"20-1924123",
"19-1916193",
"19-1912728",
"19-1908917",
"19-1916308",
"20-1930003",
"20-1929659",
"20-1929165",
"20-1928398",
"20-1928428",
"20-1928273",
"20-1927981",
"20-1927830",
"20-1927658",
"20-1926860",
"20-1926859",
"20-1926722",
"20-1926678",
"20-1926552",
"20-1926528",
"20-1926548",
"20-1926390",
"20-1926124",
"20-1926072",
"20-1925733",
"20-1925565",
"20-1925489",
"20-1925310",
"20-1924873",
"20-1924475",
"20-1923820",
"20-1923255",
"20-1923155",
"20-1922705",
"20-1922299",
"20-1922230",
"20-1922000",
"20-1921276",
"20-1921147",
"20-1920992",
"20-1920667",
"20-1920587",
"20-1919801",
"20-1918697",
"20-1918586",
"20-1918518",
"20-1918325",
"20-1918321",
"20-1918196",
"20-1918194",
"19-1917314",
"19-1917203",
"19-1917130",
"19-1917062",
"19-1916992",
"19-1916902",
"19-1916203",
"19-1915782",
"19-1915594",
"19-1914857",
"19-1914677",
"19-1914308",
"19-1913747",
"19-1913490",
"19-1913291",
"19-1913248",
"19-1913251",
"19-1912777",
"19-1912452",
"19-1912057",
"19-1911919",
"19-1911188",
"19-1910996",
"19-1910781",
"19-1910761",
"19-1910420",
"19-1910131",
"19-1909739",
"19-1909141",
"19-1909183",
"19-1908979",
"19-1908709",
"19-1908416",
"19-1908138",
"19-1907896",
"19-1907784",
"19-1907680",
"19-1907392",
"19-1907209",
"19-1906623",
"19-1906653",
"19-1906547",
"19-1906179",
"19-1906039",
"19-1905772",
"19-1905576",
"19-1905482",
"19-1904800",
"19-1904686",
"19-1904434",
"19-1904204",
"19-1903678",
"19-1903661",
"19-1903000",
"19-1902747",
"19-1902661",
"19-1902664",
"19-1902534",
"19-1902161",
"19-1902262",
"19-1901648",
"19-1901633",
"19-1901232",
"19-1900467",
"19-1899024",
"19-1891440",
"20-1930845",
"20-1930806",
"20-1930360",
"20-1930046",
"20-1929644",
"20-1929602",
"20-1929222",
"20-1929099",
"20-1929069",
"20-1928389",
"20-1927786",
"20-1927410",
"20-1927217",
"20-1925124",
"20-1925144",
"20-1924598",
"20-1920120",
"20-1919430",
"20-1917888",
"19-1917654",
"19-1916741",
"19-1914937",
"19-1912473",
"19-1906614",
"19-1906043",
"19-1905578",
"19-1903940",
"19-1902302",
"19-1896590",
"19-1890901",
"19-1890132",
"18-1870362",
"18-1862763",
"16-0803163",
"20-1930664",
"20-1930481",
"20-1929231",
"20-1928269",
"20-1928256",
"20-1928136",
"20-1927866",
"20-1927425",
"20-1927453",
"20-1927121",
"20-1927246",
"20-1926423",
"20-1925870",
"20-1926150",
"20-1925839",
"20-1925850",
"20-1925784",
"20-1925571",
"20-1925513",
"20-1925222",
"20-1925246",
"20-1924955",
"20-1924960",
"20-1924976",
"20-1924958",
"20-1924953",
"20-1924650",
"20-1921735",
"20-1921374",
"20-1918841",
"19-1916896",
"19-1915981",
"19-1914474",
"19-1912983",
"19-1912437",
"19-1906008",
"19-1907323",
"19-1907430",
"19-1903196",
"19-1900615",
"19-1900355",
"18-1877368",
"20-1929226",
"20-1928918",
"20-1928927",
"20-1928771",
"20-1928576",
"20-1928724",
"20-1928456",
"20-1928142",
"20-1928154",
"20-1927955",
"20-1927720",
"20-1927594",
"20-1927578",
"20-1927362",
"20-1927327",
"20-1927113",
"20-1927020",
"20-1926983",
"20-1926473",
"20-1925202",
"20-1925223",
"20-1924954",
"20-1924881",
"20-1923973",
"20-1923960",
"20-1923720",
"20-1923619",
"20-1923058",
"20-1921812",
"20-1921158",
"20-1920634",
"20-1920477",
"20-1920382",
"20-1919702",
"20-1919203",
"20-1918826",
"20-1918694",
"20-1918598",
"20-1918495",
"19-1917214",
"19-1917076",
"19-1916794",
"19-1915728",
"19-1915623",
"19-1912744",
"19-1911977",
"19-1911720",
"19-1911535",
"19-1911278",
"19-1911147",
"19-1911022",
"19-1910997",
"19-1910354",
"19-1910244",
"19-1909947",
"19-1909808",
"19-1909653",
"19-1909260",
"19-1909315",
"19-1908935",
"19-1908560",
"19-1908382",
"19-1908194",
"19-1908073",
"19-1907980",
"19-1907880",
"19-1907478",
"19-1907454",
"19-1907295",
"19-1906898",
"19-1906767",
"19-1906669",
"19-1906308",
"19-1906207",
"19-1906114",
"19-1906133",
"19-1905487",
"19-1905270",
"19-1904852",
"19-1904874",
"19-1904546",
"19-1904422",
"19-1904443",
"19-1904230",
"19-1904233",
"19-1903955",
"19-1904015",
"19-1903905",
"19-1903751",
"19-1903281",
"19-1903283",
"19-1902108",
"19-1901309",
"19-1901119",
"19-1899989",
"19-1899760",
"19-1898196",
"19-1896547",
"18-1882377",
"20-1918854",
"20-1918677",
"18-1876680",
"19-1894075",
"19-1917429",
"19-1907794",
"20-1931458",
"20-1930028",
"20-1931218",
"20-1931243",
"20-1930933",
"20-1930953",
"20-1930869",
"20-1930740",
"20-1929868",
"20-1929785",
"20-1929567",
"20-1929170",
"20-1929194",
"20-1929198",
"20-1929076",
"20-1928884",
"20-1928938",
"20-1928982",
"20-1928988",
"20-1928785",
"20-1928660",
"20-1928518",
"20-1928439",
"20-1928442",
"20-1928192",
"20-1928009",
"20-1927767",
"20-1927772",
"20-1927587",
"20-1927559",
"20-1927275",
"20-1927295",
"20-1927298",
"20-1927163",
"20-1927050",
"20-1927080",
"20-1927101",
"20-1926912",
"20-1926928",
"20-1926834",
"20-1926685",
"20-1926770",
"20-1926611",
"20-1926432",
"20-1926259",
"20-1926346",
"20-1926214",
"20-1926037",
"20-1925967",
"20-1925822",
"20-1925816",
"20-1925808",
"20-1925659",
"20-1925632",
"20-1925634",
"20-1925553",
"20-1925564",
"20-1925541",
"20-1925463",
"20-1925467",
"20-1925491",
"20-1925424",
"20-1925218",
"20-1925063",
"20-1924911",
"20-1924844",
"20-1924729",
"20-1924458",
"20-1924004",
"20-1923482",
"20-1923379",
"20-1923149",
"20-1922513",
"20-1922497",
"20-1922156",
"20-1922062",
"20-1921986",
"20-1921778",
"20-1921311",
"20-1921289",
"20-1921159",
"20-1920799",
"20-1920802",
"20-1920276",
"20-1919846",
"20-1919531",
"20-1919179",
"20-1919236",
"20-1918789",
"20-1918884",
"20-1918744",
"20-1918822",
"19-1917716",
"19-1917665",
"19-1917599",
"19-1917647",
"19-1917251",
"19-1917176",
"19-1917072",
"19-1916349",
"19-1916279",
"19-1915908",
"19-1915725",
"19-1915649",
"19-1915477",
"19-1915386",
"19-1915273",
"19-1914929",
"19-1914607",
"19-1914599",
"19-1914615",
"19-1914536",
"19-1914245",
"19-1914281",
"19-1914219",
"19-1914183",
"19-1913704",
"19-1913757",
"19-1913593",
"19-1913135",
"19-1913117",
"19-1913046",
"19-1913077",
"19-1912733",
"19-1913034",
"19-1912634",
"19-1911989",
"19-1911856",
"19-1911748",
"19-1911620",
"19-1910789",
"19-1910841",
"19-1910416",
"19-1910164",
"19-1910159",
"19-1910256",
"19-1910178",
"19-1909913",
"19-1909849",
"19-1909833",
"19-1909601",
"19-1909629",
"19-1909326",
"19-1909008",
"19-1909063",
"19-1909015",
"19-1908912",
"19-1908830",
"19-1908861",
"19-1908763",
"19-1908825",
"19-1908719",
"19-1908617",
"19-1908565",
"19-1908422",
"19-1908305",
"19-1908220",
"19-1908169",
"19-1908081",
"19-1907333",
"19-1907286",
"19-1906940",
"19-1906847",
"19-1906554",
"19-1906206",
"19-1906072",
"19-1905982",
"19-1905983",
"19-1905549",
"19-1905614",
"19-1905450",
"19-1905486",
"19-1904964",
"19-1904520",
"19-1904295",
"19-1904222",
"19-1904218",
"19-1904102",
"19-1904062",
"19-1904012",
"19-1903771",
"19-1901349",
"19-1898432",
"19-1897882",
"15-0793059",
"20-1924605",
"19-1899729",
"19-1888366",
"20-1925385",
"20-1924447",
"20-1923936",
"20-1923749",
"20-1923505",
"20-1922994",
"20-1922901",
"20-1921837",
"20-1921798",
"20-1921713",
"20-1921131",
"20-1918942",
"20-1918162",
"19-1916163",
"19-1914072",
"19-1912484",
"19-1912247",
"20-1929292",
"20-1927150",
"20-1925915",
"20-1925932",
"20-1925627",
"20-1925574",
"20-1925413",
"20-1925384",
"20-1924260",
"20-1923271",
"20-1923311",
"20-1923313",
"20-1922803",
"20-1920239",
"20-1919818",
"20-1919689",
"20-1919142",
"20-1919239",
"20-1918866",
"20-1918520",
"19-1917598",
"19-1917159",
"19-1916862",
"19-1914778",
"19-1914478",
"19-1913013",
"19-1913027",
"19-1912492",
"19-1912502",
"19-1912480",
"19-1912320",
"19-1912195",
"19-1912191",
"19-1911402",
"19-1911422",
"19-1911280",
"19-1910306",
"19-1909821",
"19-1908014",
"19-1907343",
"19-1907386",
"19-1907293",
"19-1907147",
"19-1907162",
"19-1906425",
"19-1906024",
"19-1906224",
"19-1906231",
"19-1905260",
"19-1904591",
"19-1904057",
"19-1903747",
"19-1903550",
"19-1902797",
"19-1896692",
"19-1892286",
"19-1889443",
"20-1925604",
"20-1927839",
"19-1902222",
"19-1901708",
"19-1897474",
"19-1895603",
"20-1925172",
"20-1925106",
"20-1925110",
"20-1925037",
"20-1925075",
"20-1924799",
"20-1923742",
"20-1923486",
"20-1922232",
"20-1921755",
"20-1921881",
"20-1920321",
"20-1920183",
"20-1920006",
"20-1919668",
"20-1918847",
"20-1918828",
"20-1918818",
"20-1918503",
"20-1918199",
"20-1918246",
"20-1918066",
"20-1917963",
"20-1917768",
"20-1917811",
"19-1917592",
"19-1917663",
"19-1917359",
"19-1917350",
"19-1917199",
"19-1917034",
"19-1916499",
"19-1916253",
"19-1916076",
"19-1916143",
"19-1915977",
"19-1915688",
"19-1915601",
"19-1915475",
"19-1915355",
"19-1915218",
"19-1915141",
"19-1914817",
"19-1914355",
"19-1913947",
"19-1913999",
"19-1912503",
"19-1911960",
"19-1911877",
"19-1911701",
"19-1910589",
"19-1910607",
"19-1910423",
"19-1910345",
"19-1910257",
"19-1910044",
"19-1910123",
"19-1909958",
"19-1909550",
"19-1909076",
"19-1908963",
"19-1908795",
"19-1908668",
"19-1908594",
"19-1907495",
"19-1906901",
"19-1906911",
"19-1906763",
"19-1906659",
"19-1906518",
"19-1906445",
"19-1905817",
"19-1905671",
"19-1905307",
"19-1905264",
"19-1903520",
"19-1903234",
"19-1903239",
"19-1902567",
"19-1902228",
"19-1898905",
"19-1895145",]
#list of previously reported cases for 'substantial activity' question
ReportedFY21= [ "All Cases Reported FY21",
"16-0816754",
"16-0818526",
"16-0818523",
"17-0825952",
"17-1851572",
"21-1947703",
"21-1947706",
"20-1943648",
"20-1943642",
"20-1943703",
"20-1943706",
"18-1865143",
"18-1866161",
"18-1867558",
"18-1869440",
"21-1960622",
"20-1937186",
"20-1937152",
"20-1937192",
"20-1935000",
"20-1935003",
"20-1935004",
"18-1884083",
"18-1884203",
"18-1884475",
"18-1886395",
"19-1886955",
"19-1886999",
"19-1888901",
"19-1889757",
"19-1893687",
"19-1896432",
"19-1897279",
"19-1898450",
"19-1899649",
"19-1901304",
"20-1924398",
"19-1901698",
"19-1901748",
"19-1902554",
"19-1903585",
"19-1903655",
"19-1907443",
"19-1905022",
"20-1935381",
"19-1905876",
"19-1905882",
"19-1905897",
"20-1936056",
"19-1908953",
"19-1909073",
"19-1909635",
"20-1935971",
"19-1910101",
"19-1910752",
"20-1917829",
"19-1912141",
"19-1911835",
"21-1960601",
"21-1960180",
"21-1960176",
"21-1960921",
"19-1912327",
"20-1935569",
"20-1935935",
"19-1913674",
"19-1912761",
"19-1912720",
"19-1914771",
"19-1913522",
"19-1913523",
"19-1913823",
"20-1935960",
"20-1934791",
"19-1914576",
"19-1914956",
"20-1936383",
"20-1936377",
"20-1936370",
"21-1947340",
"20-1935998",
"20-1934847",
"19-1916803",
"19-1916483",
"19-1916900",
"21-1958692",
"21-1958703",
"19-1917363",
"20-1932079",
"20-1918251",
"20-1935236",
"20-1935230",
"20-1935220",
"20-1936007",
"20-1920751",
"20-1920885",
"20-1934917",
"20-1934912",
"20-1934906",
"20-1921015",
"20-1921728",
"20-1921088",
"20-1921449",
"20-1945412",
"20-1921616",
"20-1922622",
"20-1921550",
"20-1922604",
"20-1935921",
"20-1922553",
"20-1929560",
"20-1920536",
"20-1920019",
"20-1923176",
"20-1923434",
"20-1924120",
"20-1924069",
"20-1921248",
"20-1934851",
"20-1934853",
"20-1935910",
"20-1920401",
"20-1925803",
"20-1925886",
"21-1960613",
"20-1925956",
"20-1925963",
"20-1935024",
"20-1927040",
"21-1950940",
"21-1951036",
"21-1951038",
"20-1927548",
"20-1927901",
"20-1927899",
"20-1945472",
"20-1935194",
"20-1928748",
"20-1930112",
"20-1929559",
"20-1929948",
"20-1930102",
"20-1930185",
"20-1930223",
"20-1930704",
"20-1928931",
"20-1930729",
"20-1930911",
"20-1930237",
"20-1931340",
"18-1874908",
"21-1959712",
"20-1931561",
"20-1917845",
"20-1917836",
"20-1918562",
"18-1876075",
"18-1877645",
"18-1877810",
"18-1883629",
"18-1866888",
"19-1888053",
"19-1888766",
"19-1890593",
"19-1893195",
"19-1897128",
"19-1897933",
"19-1905962",
"19-1907192",
"19-1911425",
"19-1913257",
"19-1913385",
"19-1914546",
"19-1915324",
"20-1918189",
"20-1918331",
"20-1918438",
"20-1921062",
"20-1921365",
"20-1921607",
"20-1922944",
"20-1923079",
"20-1923269",
"20-1923250",
"20-1923906",
"20-1924070",
"20-1924244",
"20-1924591",
"20-1924773",
"20-1925108",
"20-1925308",
"20-1925309",
"20-1925488",
"20-1925495",
"20-1925524",
"20-1925582",
"20-1925700",
"20-1925725",
"20-1925807",
"20-1925950",
"20-1926132",
"20-1926206",
"20-1926178",
"20-1926209",
"20-1926260",
"20-1926275",
"20-1926280",
"20-1926328",
"20-1926416",
"20-1926398",
"20-1926340",
"20-1926401",
"20-1926493",
"20-1926456",
"20-1926514",
"20-1926516",
"20-1926531",
"20-1926624",
"20-1926786",
"20-1926777",
"20-1926763",
"20-1926717",
"20-1926718",
"20-1926869",
"20-1926981",
"20-1927072",
"20-1927159",
"20-1927256",
"20-1927360",
"20-1927596",
"20-1927615",
"20-1927646",
"20-1927759",
"20-1927744",
"20-1927852",
"20-1927969",
"20-1927961",
"20-1927952",
"20-1928069",
"20-1928063",
"20-1928213",
"20-1928223",
"20-1928317",
"20-1929044",
"20-1929140",
"20-1929334",
"20-1929516",
"20-1929443",
"20-1929485",
"20-1929520",
"20-1929575",
"20-1929578",
"20-1929617",
"20-1929743",
"20-1929912",
"20-1929899",
"20-1929900",
"20-1929955",
"20-1930088",
"20-1930313",
"20-1930434",
"20-1930654",
"20-1930726",
"20-1931607",
"20-1930913",
"20-1931136",
"20-1931331",
"20-1931283",
"20-1931633",
"20-1931629",
"20-1931841",
"20-1931705",
"20-1931684",
"19-1901965",
"20-1930326",
"19-1897934",
"17-0825325",
"20-1931806",
"20-1931805",
"18-1859037",
"18-1886147",
"20-1929591",
"18-1873144",
"20-1931872",
"20-1931928",
"18-1856851",
"19-1890744",
"20-1918979",
"19-1899945",
"19-1899940",
"20-1932029",
"20-1932031",
"20-1932026",
"19-1916175",
"20-1932134",
"20-1932138",
"20-1932122",
"20-1939680",
"20-1939688",
"20-1932123",
"19-1901807",
"20-1932137",
"20-1932183",
"20-1932326",
"19-1893134",
"20-1932241",
"20-1932304",
"20-1932346",
"18-1882084",
"19-1895362",
"20-1929194",
"20-1932422",
"20-1932420",
"20-1926126",
"20-1933048",
"20-1920182",
"20-1924912",
"18-1867964",
"20-1919005",
"20-1932578",
"20-1932589",
"20-1930740",
"20-1927767",
"20-1921778",
"18-1885542",
"19-1889988",
"18-1883579",
"20-1926910",
"20-1932684",
"19-1903405",
"19-1903403",
"18-1871049",
"19-1909242",
| |
<gh_stars>10-100
"""
The MIT License (MIT)
Copyright (c) 2020-Current Skelmis
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from typing import Set, Dict, Any, Union
import attr
@attr.s(slots=True, eq=True, kw_only=True)
class Options:
"""Options for the AntiSpamHandler, see :py:class:`antispam.AntiSpamHandler` for usage.
Parameters
----------
warn_threshold : int
Default: ``3``
This is the amount of duplicates within ``message_interval`` that will result in a warning.
kick_threshold : int
Default: ``2``
This is the amount of warns required before a kick is the next punishment.
I.e. After 2 warns, you will get kicked.
ban_threshold : int
Default: ``2``
This is the amount of kicks required before a ban is the next punishment.
I.e. After 2 kicks, you will get banned.
message_interval : int
Default: ``30000ms (30 seconds)``
Amount of time a message is kept before being discarded.
Essentially the amount of time (In milliseconds) a message can count towards spam.
message_duplicate_count : int
Default: ``5``
The amount of duplicate messages needed within message_interval to trigger a punishment.
I.e. Once you've sent 5 'spam' messages you'll get punished.
message_duplicate_accuracy : int
Default: ``90``
How ‘close’ messages need to be to be registered as duplicates (Out of 100)
You can test this with the code:
.. highlight:: python
.. code-block:: python
from fuzzywuzzy import fuzz
fuzz.token_sort_ratio("message one", "message two")
guild_log_warn_message : Union[str, dict]
Default: ``$MEMBERNAME was warned for spamming/sending duplicate messages.``
The message to be sent in the guild when someone is warned.
Please see the note at the bottom.
guild_log_kick_message : Union[str, dict]
Default: ``$MEMBERNAME was kicked for spamming/sending duplicate messages.``
The message to be sent in the guild when someone is kicked.
Please see the note at the bottom.
guild_log_ban_message : Union[str, dict]
Default: ``$MEMBERNAME was banned for spamming/sending duplicate messages.``
The message to be sent in the guild when someone is banned.
Please see the note at the bottom.
member_warn_message : Union[str, dict]
Default: ``Hey $MENTIONMEMBER, please stop spamming/sending duplicate messages.``
The message to be sent in the guild when a member is warned.
member_kick_message : Union[str, dict]
Default: ``Hey $MENTIONMEMBER, you are being kicked from $GUILDNAME for spamming/sending duplicate messages.``
The message to be sent to the member who is being kicked.
member_ban_message : Union[str, dict]
Default: ``Hey $MENTIONMEMBER, you are being banned from $GUILDNAME for spamming/sending duplicate messages.``
The message to be sent to the member who is being banned.
guild_log_warn_message_delete_after : int
Default: ``None``
How many seconds after sending the guild warn message to delete it.
guild_log_kick_message_delete_after : int
Default: ``None``
How many seconds after sending the guild kick message to delete it.
guild_log_ban_message_delete_after : int
Default: ``None``
How many seconds after sending the guild ban message to delete it.
member_warn_message_delete_after : int
Default: ``None``
How many seconds after sending the member warn message to delete it.
member_kick_message_delete_after : int
Default: ``None``
How many seconds after sending the member kick message to delete it.
member_ban_message_delete_after : int
Default: ``None``
How many seconds after sending the member ban message to delete it.
ignored_members : Set[int]
Default: ``Empty Set``
A Set of members to ignore messages from.
Set this with :py:meth:`antispam.AntiSpamHandler.add_ignored_item`
Remove members with :py:meth:`antispam.AntiSpamHandler.remove_ignored_item`
ignored_channels : Set[int]
Default: ``Empty Set``
A Set of channels to ignore messages in.
Set this with :py:meth:`antispam.AntiSpamHandler.add_ignored_item`
Remove channels with :py:meth:`antispam.AntiSpamHandler.remove_ignored_item`
ignored_roles : Set[int]
Default: ``Empty Set``
A Set of roles to ignore messages from.
Set this with :py:meth:`antispam.AntiSpamHandler.add_ignored_item`
Remove roles with :py:meth:`antispam.AntiSpamHandler.remove_ignored_item`
ignored_guilds : Set[int]
Default: ``Empty Set``
A Set of guilds to ignore messages in.
Set this with :py:meth:`antispam.AntiSpamHandler.add_ignored_item`
Remove guilds with :py:meth:`antispam.AntiSpamHandler.remove_ignored_item`
delete_spam : bool
Default: ``False``
Whether or not to delete messages marked as spam
Won’t delete messages if ``no_punish`` is ``True``
Note, this method is expensive.
It will delete all messages marked as spam, and this means an api call per message.
ignore_bots : bool
Default: ``True``
Should bots bypass anti-spam?
warn_only : bool
Default: ``False``
Whether or not to only warn users, this means it will not kick or ban them.
no_punish : bool
Default: ``False``
Don’t punish anyone, simply return whether or not they should be punished within ``propagate``.
This essentially lets the end user handle punishments themselves.
To check if someone should be punished, use the returned value from the propagate method.
If should_be_punished_this_message is True then this package believes they should be punished.
Otherwise just ignore that message since it shouldn’t be punished.
Use :py:class:`antispam.plugins.AntiSpamTracker` with this mode for best affect.
mention_on_embed : bool
Default: ``True``
If the message your trying to send is an embed, also send some content to mention the person being punished.
delete_zero_width_chars : bool
Default: ``test``
Should zero width characters be removed from messages. Useful as otherwise it helps
people bypass antispam measures.
per_channel_spam : bool
Default: ``False``
Track spam as per channel, rather then per guild.
I.e. False implies spam is tracked as ``Per Member Per Guild``
True implies ``Per Member Per Channel``
addons : Dict
Default: ``Empty Dict``
Use-able storage for plugins to store Options
Notes
-----
Guild log messages will **only** send if :py:attr:`antispam.dataclasses.guild.Guild.log_channel_id` is set.
You can set it with :py:meth:`antispam.AntiSpamHandler.add_guild_log_channel`
"""
# Ints
warn_threshold: int = attr.ib(default=3, validator=attr.validators.instance_of(int))
kick_threshold: int = attr.ib(default=2, validator=attr.validators.instance_of(int))
ban_threshold: int = attr.ib(default=2, validator=attr.validators.instance_of(int))
message_interval: int = attr.ib(
default=30000, validator=attr.validators.instance_of(int)
)
message_duplicate_count: int = attr.ib(
default=5, validator=attr.validators.instance_of(int)
)
message_duplicate_accuracy: int = attr.ib(
default=90, validator=attr.validators.instance_of(int)
)
# Strings
guild_log_warn_message: Union[str, dict] = attr.ib(
default="$MEMBERNAME was warned for spamming/sending duplicate messages.",
kw_only=True,
validator=attr.validators.instance_of((str, dict)),
)
guild_log_kick_message: Union[str, dict] = attr.ib(
default="$MEMBERNAME was kicked for spamming/sending duplicate messages.",
kw_only=True,
validator=attr.validators.instance_of((str, dict)),
)
guild_log_ban_message: Union[str, dict] = attr.ib(
default="$MEMBERNAME was banned for spamming/sending duplicate messages.",
kw_only=True,
validator=attr.validators.instance_of((str, dict)),
)
member_warn_message: Union[str, dict] = attr.ib(
default="Hey $MENTIONMEMBER, please stop spamming/sending duplicate messages.",
kw_only=True,
validator=attr.validators.instance_of((str, dict)),
)
member_kick_message: Union[str, dict] = attr.ib(
default="Hey $MENTIONMEMBER, you are being kicked from $GUILDNAME for spamming/"
"sending duplicate messages.",
kw_only=True,
validator=attr.validators.instance_of((str, dict)),
)
member_ban_message: Union[str, dict] = attr.ib(
default="Hey $MENTIONMEMBER, you are being banned from $GUILDNAME for spamming/"
"sending duplicate messages.",
kw_only=True,
validator=attr.validators.instance_of((str, dict)),
)
member_failed_kick_message: Union[str, dict] = attr.ib(
default="I failed to punish you because I lack permissions, but still you shouldn't spam.",
kw_only=True,
validator=attr.validators.instance_of((str, dict)),
)
member_failed_ban_message: Union[str, dict] = attr.ib(
default="I failed to punish you because I lack permissions, but still you shouldn't spam.",
kw_only=True,
validator=attr.validators.instance_of((str, dict)),
)
# delete_after
guild_log_ban_message_delete_after: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
)
guild_log_kick_message_delete_after: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
)
member_ban_message_delete_after: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
)
guild_log_warn_message_delete_after: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
)
member_kick_message_delete_after: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
)
member_warn_message_delete_after: int = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(int)),
)
# Sets
ignored_members: Set[int] = attr.ib(
default=attr.Factory(set),
validator=attr.validators.instance_of(set),
converter=set,
)
ignored_channels: Set[int] = attr.ib(
default=attr.Factory(set),
validator=attr.validators.instance_of(set),
converter=set,
)
ignored_roles: Set[int] = attr.ib(
default=attr.Factory(set),
validator=attr.validators.instance_of(set),
converter=set,
)
ignored_guilds: Set[int] = attr.ib(
default=attr.Factory(set),
validator=attr.validators.instance_of(set),
converter=set,
)
# Booleans
delete_spam: bool = attr.ib(
default=False, validator=attr.validators.instance_of(bool)
)
ignore_bots: bool = attr.ib(
default=True, validator=attr.validators.instance_of(bool)
)
warn_only: bool = attr.ib(
default=False, validator=attr.validators.instance_of(bool)
)
no_punish: bool = attr.ib(
default=False, validator=attr.validators.instance_of(bool)
)
mention_on_embed: bool = attr.ib(
default=True, validator=attr.validators.instance_of(bool)
)
delete_zero_width_chars: bool = attr.ib(
default=True, validator=attr.validators.instance_of(bool)
| |
<reponame>IBM/transition-amr-parser
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
from packaging import version
import torch
import torch.nn as nn
import torch.nn.functional as F
from fairseq import options, utils
from fairseq.models import (
FairseqEncoder,
FairseqIncrementalDecoder,
FairseqEncoderDecoderModel,
register_model,
register_model_architecture,
)
from fairseq.modules import (
AdaptiveSoftmax,
LayerNorm,
PositionalEmbedding,
SinusoidalPositionalEmbedding,
# TransformerDecoderLayer,
# TransformerEncoderLayer,
)
from torch_scatter import scatter_mean
from ..modules.transformer_layer import TransformerEncoderLayer, TransformerDecoderLayer
from .attention_masks import get_cross_attention_mask, get_cross_attention_mask_heads
from .graph_attention_masks import get_graph_self_attn_mask
DEFAULT_MAX_SOURCE_POSITIONS = 1024
DEFAULT_MAX_TARGET_POSITIONS = 1024
@register_model('transformer_tgt_pointer_graph')
class TransformerTgtPointerGraphModel(FairseqEncoderDecoderModel):
"""
Transformer model from `"Attention Is All You Need" (Vaswani, et al, 2017)
<https://arxiv.org/abs/1706.03762>`_.
Args:
encoder (TransformerEncoder): the encoder
decoder (TransformerDecoder): the decoder
The Transformer model provides the following named architectures and
command-line arguments:
.. argparse::
:ref: fairseq.models.transformer_parser
:prog:
"""
@classmethod
def hub_models(cls):
# fmt: off
return {
'transformer.wmt14.en-fr': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt14.en-fr.joined-dict.transformer.tar.bz2',
'transformer.wmt16.en-de': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt16.en-de.joined-dict.transformer.tar.bz2',
'transformer.wmt18.en-de': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt18.en-de.ensemble.tar.gz',
'transformer.wmt19.en-de': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-de.joined-dict.ensemble.tar.gz',
'transformer.wmt19.en-ru': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-ru.ensemble.tar.gz',
'transformer.wmt19.de-en': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt19.de-en.joined-dict.ensemble.tar.gz',
'transformer.wmt19.ru-en': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt19.ru-en.ensemble.tar.gz',
'transformer.wmt19.en-de.single_model': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-de.joined-dict.single_model.tar.gz',
'transformer.wmt19.en-ru.single_model': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt19.en-ru.single_model.tar.gz',
'transformer.wmt19.de-en.single_model': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt19.de-en.joined-dict.single_model.tar.gz',
'transformer.wmt19.ru-en.single_model': 'https://dl.fbaipublicfiles.com/fairseq/models/wmt19.ru-en.single_model.tar.gz',
}
# fmt: on
def __init__(self, encoder, decoder):
super().__init__(encoder, decoder)
@staticmethod
def add_args(parser):
"""Add model-specific arguments to the parser."""
# fmt: off
parser.add_argument('--activation-fn',
choices=utils.get_available_activation_fns(),
help='activation function to use')
parser.add_argument('--dropout', type=float, metavar='D',
help='dropout probability')
parser.add_argument('--attention-dropout', type=float, metavar='D',
help='dropout probability for attention weights')
parser.add_argument('--activation-dropout', '--relu-dropout', type=float, metavar='D',
help='dropout probability after activation in FFN.')
parser.add_argument('--encoder-embed-path', type=str, metavar='STR',
help='path to pre-trained encoder embedding')
parser.add_argument('--encoder-embed-dim', type=int, metavar='N',
help='encoder embedding dimension')
parser.add_argument('--encoder-ffn-embed-dim', type=int, metavar='N',
help='encoder embedding dimension for FFN')
parser.add_argument('--encoder-layers', type=int, metavar='N',
help='num encoder layers')
parser.add_argument('--encoder-attention-heads', type=int, metavar='N',
help='num encoder attention heads')
parser.add_argument('--encoder-normalize-before', action='store_true',
help='apply layernorm before each encoder block')
parser.add_argument('--encoder-learned-pos', action='store_true',
help='use learned positional embeddings in the encoder')
parser.add_argument('--decoder-embed-path', type=str, metavar='STR',
help='path to pre-trained decoder embedding')
parser.add_argument('--decoder-embed-dim', type=int, metavar='N',
help='decoder embedding dimension')
parser.add_argument('--decoder-ffn-embed-dim', type=int, metavar='N',
help='decoder embedding dimension for FFN')
parser.add_argument('--decoder-layers', type=int, metavar='N',
help='num decoder layers')
parser.add_argument('--decoder-attention-heads', type=int, metavar='N',
help='num decoder attention heads')
parser.add_argument('--decoder-learned-pos', action='store_true',
help='use learned positional embeddings in the decoder')
parser.add_argument('--decoder-normalize-before', action='store_true',
help='apply layernorm before each decoder block')
parser.add_argument('--share-decoder-input-output-embed', type=int, default=0,
help='share decoder input and output embeddings')
parser.add_argument('--share-all-embeddings', action='store_true',
help='share encoder, decoder and output embeddings'
' (requires shared dictionary and embed dim)')
parser.add_argument('--no-token-positional-embeddings', default=False, action='store_true',
help='if set, disables positional embeddings (outside self attention)')
parser.add_argument('--adaptive-softmax-cutoff', metavar='EXPR',
help='comma separated list of adaptive softmax cutoff points. '
'Must be used with adaptive_loss criterion'),
parser.add_argument('--adaptive-softmax-dropout', type=float, metavar='D',
help='sets adaptive softmax dropout for the tail projections')
# Use stack transformer
parser.add_argument('--encode-state-machine', type=bool,
help='controls encoding of stack and buffer')
# control BERT backprop
parser.add_argument('--bert-backprop', action='store_true',
help='Backpropagate through BERT', default=False)
parser.add_argument('--no-bert-precompute', action='store_true',
help='Compute BERT on the fly (debugging)',
default=False)
parser.add_argument('--pretrained-embed-dim', type=int,
help='Pretrained embeddings size',
default=768)
# additional
# NOTE do not set default values here; if set, make sure they are consistent with the arch registry
# since when loading model (e.g. saved pre some argument additions), the default values will be used first
# then the arch registry
parser.add_argument('--apply-tgt-vocab-masks', type=int,
help='whether to apply target (actions) vocabulary mask for output')
parser.add_argument('--apply-tgt-actnode-masks', type=int,
help='whether to apply target (actions) node mask for pointer')
parser.add_argument('--apply-tgt-src-align', type=int,
help='whether to apply target source alignment to guide the cross attention')
parser.add_argument('--apply-tgt-input-src', type=int,
help='whether to apply target input to include source token embeddings for better '
'representations of the graph nodes')
# additional: tgt src alignment masks for decoder cross-attention
parser.add_argument('--tgt-src-align-layers', nargs='*', type=int,
help='target source alignment in decoder cross-attention: which layers to use')
parser.add_argument('--tgt-src-align-heads', type=int,
help='target source alignment in decoder cross-attention: how many heads per layer to use')
parser.add_argument('--tgt-src-align-focus', nargs='*', type=str,
help='target source alignment in decoder cross-attention: what to focus per head')
# additional: pointer distribution from decoder self-attentions
parser.add_argument('--pointer-dist-decoder-selfattn-layers', nargs='*', type=int,
help='pointer distribution from decoder self-attention: which layers to use')
parser.add_argument('--pointer-dist-decoder-selfattn-heads', type=int,
help='pointer distribution from decoder self-attention: how many heads per layer to use')
parser.add_argument('--pointer-dist-decoder-selfattn-avg', type=int,
help='pointer distribution from decoder self-attention: whether to use the average '
'self-attention each layer (arithmetic mean); if set to 0, geometric mean is used; '
'if set to -1, no average is used and all the pointer distributions are used to '
'compute the loss')
parser.add_argument('--pointer-dist-decoder-selfattn-infer', type=int,
help='pointer distribution from decoder self-attention: at inference, which layer to use')
# additional: combine source token embeddings into action embeddings for decoder input for node representation
parser.add_argument('--tgt-input-src-emb', type=str, choices=['raw', 'bot', 'top'],
help='target input to include aligned source tokens: where to take the source embeddings; '
'"raw": raw RoBERTa embeddings from the very beginning; '
'"bot": bottom source embeddings before the encoder; '
'"top": top source embeddings after the encoder')
parser.add_argument('--tgt-input-src-backprop', type=int,
help='target input to include aligned source tokens: whether to back prop through the '
'source embeddings')
parser.add_argument('--tgt-input-src-combine', type=str, choices=['cat', 'add'],
help='target input to include aligned source tokens: how to combine the source token '
'embeddings and the action embeddings')
# additional: graph structure encoding into the decoder self-attention
parser.add_argument('--tgt-graph-layers', nargs='*', type=int,
help='target graph structure encoding in decoder self-attention: which layers to use')
parser.add_argument('--tgt-graph-heads', type=int,
help='target graph structure encoding in decoder self-attention: how many heads per layer')
parser.add_argument('--tgt-graph-mask', type=str, choices=['e1c1p1', 'e1c1p0', 'e0c1p1', 'e0c1p0'],
help='target graph structure encoding in decoder self-attention: how to set the graph mask')
# fmt: on
@classmethod
def build_model(cls, args, task):
"""Build a new model instance."""
# make sure all arguments are present in older models
base_architecture(args)
# user specific: make sure all arguments are present in older models during development
transformer_pointer(args)
if not hasattr(args, 'max_source_positions'):
args.max_source_positions = DEFAULT_MAX_SOURCE_POSITIONS
if not hasattr(args, 'max_target_positions'):
args.max_target_positions = DEFAULT_MAX_TARGET_POSITIONS
src_dict, tgt_dict = task.source_dictionary, task.target_dictionary
def build_embedding(dictionary, embed_dim, path=None):
num_embeddings = len(dictionary)
padding_idx = dictionary.pad()
emb = Embedding(num_embeddings, embed_dim, padding_idx)
# if provided, load from preloaded dictionaries
if path:
embed_dict = utils.parse_embedding(path)
utils.load_embedding(embed_dict, dictionary, emb)
return emb
if args.share_all_embeddings:
if src_dict != tgt_dict:
raise ValueError('--share-all-embeddings requires a joined dictionary')
if args.encoder_embed_dim != args.decoder_embed_dim:
raise ValueError(
'--share-all-embeddings requires --encoder-embed-dim to match --decoder-embed-dim')
if args.decoder_embed_path and (
args.decoder_embed_path != args.encoder_embed_path):
raise ValueError('--share-all-embeddings not compatible with --decoder-embed-path')
encoder_embed_tokens = build_embedding(
src_dict, args.encoder_embed_dim, args.encoder_embed_path
)
decoder_embed_tokens = encoder_embed_tokens
args.share_decoder_input_output_embed = True
else:
encoder_embed_tokens = build_embedding(
src_dict, args.encoder_embed_dim, args.encoder_embed_path
)
decoder_embed_tokens = build_embedding(
tgt_dict, args.decoder_embed_dim, args.decoder_embed_path
)
encoder = cls.build_encoder(args, src_dict, encoder_embed_tokens)
decoder = cls.build_decoder(args, tgt_dict, decoder_embed_tokens)
return cls(encoder, decoder)
@classmethod
def build_encoder(cls, args, src_dict, embed_tokens):
return TransformerEncoder(args, src_dict, embed_tokens)
@classmethod
def build_decoder(cls, args, tgt_dict, embed_tokens):
return TransformerDecoder(args, tgt_dict, embed_tokens)
def forward(self, src_tokens, src_lengths, prev_output_tokens, **kwargs):
"""
Run the forward pass for an encoder-decoder model.
First feed a batch of source tokens through the encoder. Then, feed the
encoder output and previous decoder outputs (i.e., input feeding/teacher
forcing) to the decoder to produce the next outputs::
encoder_out = self.encoder(src_tokens, src_lengths)
return self.decoder(prev_output_tokens, encoder_out)
Args:
src_tokens (LongTensor): tokens in the source language of shape
`(batch, src_len)`
src_lengths (LongTensor): source sentence lengths of shape `(batch)`
prev_output_tokens (LongTensor): previous decoder outputs of shape
`(batch, tgt_len)`, for input feeding/teacher forcing
Returns:
tuple:
- the decoder's output of shape `(batch, tgt_len, vocab)`
- a dictionary with any model-specific outputs
"""
encoder_out = self.encoder(src_tokens, src_lengths=src_lengths, **kwargs)
decoder_out = self.decoder(prev_output_tokens, encoder_out=encoder_out, **kwargs)
return decoder_out
class TransformerEncoder(FairseqEncoder):
"""
Transformer encoder consisting of *args.encoder_layers* layers. Each layer
is a :class:`TransformerEncoderLayer`.
Args:
args (argparse.Namespace): parsed command-line arguments
dictionary (~fairseq.data.Dictionary): encoding dictionary
embed_tokens (torch.nn.Embedding): input embedding
"""
def __init__(self, args, dictionary, embed_tokens):
super().__init__(dictionary)
self.register_buffer('version', torch.Tensor([3]))
self.dropout = args.dropout
self.bert_backprop = args.bert_backprop
self.no_bert_precompute = args.no_bert_precompute
# backprop needs on the fly extraction
if self.bert_backprop or self.no_bert_precompute:
roberta = torch.hub.load('pytorch/fairseq', 'roberta.base')
roberta.cuda()
self.roberta = roberta
# if args.no_bert_precompute:
# # Set BERT to purely evaluation mode
# self.roberta.eval()
embed_dim = embed_tokens.embedding_dim
self.padding_idx = embed_tokens.padding_idx
self.max_source_positions = args.max_source_positions
# BERT embeddings as input
input_embed_dim = args.pretrained_embed_dim
self.subspace = Linear(input_embed_dim, embed_dim, bias=False)
self.embed_tokens = embed_tokens
self.embed_scale = math.sqrt(embed_dim)
self.embed_positions = PositionalEmbedding(
args.max_source_positions, embed_dim, self.padding_idx,
learned=args.encoder_learned_pos,
) if not args.no_token_positional_embeddings else None
self.layers = nn.ModuleList([])
self.layers.extend([
TransformerEncoderLayer(args)
for i in range(args.encoder_layers)
])
if args.encoder_normalize_before:
self.layer_norm = LayerNorm(embed_dim)
else:
self.layer_norm = None
# copying the arguments for the separate model in decoding to use
self.args = args
def forward(self, src_tokens, src_lengths, memory, memory_pos, source_fix_emb, src_wordpieces, src_wp2w, **unused):
"""
Args:
src_tokens (LongTensor): tokens in the | |
people',
'editnotices/page/homeopathy',
'editnotices/page/homs camp',
'editnotices/page/houthi movement',
'editnotices/page/huj, gaza',
'editnotices/page/hulayqat',
'editnotices/page/human rights abuses in jammu and kashmir',
'editnotices/page/human rights abuses in kashmir',
'editnotices/page/human rights in israel',
'editnotices/page/human rights in the state of palestine',
'editnotices/page/human shield',
'editnotices/page/humanitarian aid during the 2006 lebanon war',
'editnotices/page/hummus',
'editnotices/page/hunin',
'editnotices/page/hunter biden',
'editnotices/page/hura',
'editnotices/page/hureiz',
'editnotices/page/husan',
'editnotices/page/husn camp',
'editnotices/page/hussniyya',
'editnotices/page/huwara',
'editnotices/page/huwwara checkpoint',
'editnotices/page/i am jazz',
'editnotices/page/i\'billin',
'editnotices/page/ian lustick',
'editnotices/page/ibdis',
'editnotices/page/ibei hanahal',
'editnotices/page/ibtin',
'editnotices/page/ibziq',
'editnotices/page/idhna',
'editnotices/page/idnibba',
'editnotices/page/ijlil al-qibliyya',
'editnotices/page/ijlil al-shamaliyya',
'editnotices/page/ijnisinya',
'editnotices/page/ijzim',
'editnotices/page/iksal',
'editnotices/page/iktaba',
'editnotices/page/ilhan omar',
'editnotices/page/illar, tulkarm',
'editnotices/page/illegal immigration to the united states',
'editnotices/page/ilut',
'editnotices/page/image and reality of the israel–palestine conflict',
'editnotices/page/immanuel (town)',
'editnotices/page/immatain',
'editnotices/page/immigration policy of donald trump',
'editnotices/page/impeachment inquiry against donald trump',
'editnotices/page/impeachment of donald trump',
'editnotices/page/imreish',
'editnotices/page/indian air force',
'editnotices/page/indian national congress',
'editnotices/page/indo-pakistani war of 1947–1948',
'editnotices/page/indo-pakistani war of 1965',
'editnotices/page/indo-pakistani war of 1971',
'editnotices/page/indo-pakistani wars and conflicts',
'editnotices/page/indur',
'editnotices/page/innaba',
'editnotices/page/insurgency in balochistan',
'editnotices/page/insurgency in jammu and kashmir',
'editnotices/page/insurgency in khyber pakhtunkhwa',
'editnotices/page/intercommunal conflict in mandatory palestine',
'editnotices/page/international crimes tribunal (bangladesh)',
'editnotices/page/international criminal court investigation in palestine',
'editnotices/page/international holocaust remembrance alliance',
'editnotices/page/international law and israeli settlements',
'editnotices/page/international law and the gaza war',
'editnotices/page/international reactions to the 2006 lebanon war',
'editnotices/page/intifada',
'editnotices/page/iqrit',
'editnotices/page/iran–israel proxy conflict',
'editnotices/page/iran–israel relations',
'editnotices/page/iraq',
'editnotices/page/iraq al-manshiyya',
'editnotices/page/iraq burin',
'editnotices/page/iraq suwaydan',
'editnotices/page/iraq–israel relations',
'editnotices/page/ireland–israel relations',
'editnotices/page/ireland–palestine relations',
'editnotices/page/irgun',
'editnotices/page/iron dome',
'editnotices/page/is genesis history?',
'editnotices/page/isawiya',
'editnotices/page/ishwa',
'editnotices/page/iskaka',
'editnotices/page/islam in israel',
'editnotices/page/islamic jihad movement in palestine',
'editnotices/page/islamic revolutionary guard corps',
'editnotices/page/islamic terrorism',
'editnotices/page/islamic–jewish relations',
'editnotices/page/islamization of jerusalem',
'editnotices/page/islin',
'editnotices/page/ismail haniyeh',
'editnotices/page/israel',
'editnotices/page/israel and state-sponsored terrorism',
'editnotices/page/israel and the apartheid analogy',
'editnotices/page/israel and torture in the occupied territories',
'editnotices/page/israel border police',
'editnotices/page/israel defense forces',
'editnotices/page/israel lobby in the united kingdom',
'editnotices/page/israel shahak',
'editnotices/page/israel shamir',
'editnotices/page/israel-related animal conspiracy theories',
'editnotices/page/israel–gaza barrier',
'editnotices/page/israel–jordan peace treaty',
'editnotices/page/israel–lebanon relations',
'editnotices/page/israel–syria mixed armistice commission',
'editnotices/page/israel–syria relations',
'editnotices/page/israel–turkey relations',
'editnotices/page/israel–united states relations',
'editnotices/page/israel, palestine, and the united nations',
'editnotices/page/israeli checkpoint',
'editnotices/page/israeli citizenship law',
'editnotices/page/israeli civil administration',
'editnotices/page/israeli criticism of the occupation',
'editnotices/page/israeli cuisine',
'editnotices/page/israeli declaration of independence',
'editnotices/page/israeli demolition of palestinian property',
'editnotices/page/israeli disengagement from gaza',
'editnotices/page/israeli expropriation of palestinian springs in the west bank',
'editnotices/page/israeli military governorate',
'editnotices/page/israeli occupation of sinai',
'editnotices/page/israeli occupation of the west bank',
'editnotices/page/israeli outpost',
'editnotices/page/israeli permit system in the west bank',
'editnotices/page/israeli salad',
'editnotices/page/israeli settlement',
'editnotices/page/israeli west bank barrier',
'editnotices/page/israeli wine',
'editnotices/page/israeli-occupied territories',
'editnotices/page/israeli–arab organ donations',
'editnotices/page/israeli–lebanese conflict',
'editnotices/page/israeli–palestinian conflict',
'editnotices/page/israeli–palestinian conflict in hebron',
'editnotices/page/israeli–palestinian peace process',
'editnotices/page/issa amro',
'editnotices/page/itamar',
'editnotices/page/itamar attack',
'editnotices/page/ivana trump',
'editnotices/page/ivanka trump',
'editnotices/page/izbat shufa',
'editnotices/page/izz ad-din al-qassam',
'editnotices/page/izz ad-din al-qassam brigades',
'editnotices/page/jab\'a',
'editnotices/page/jaba\', haifa',
'editnotices/page/jaba\', jenin',
'editnotices/page/jaba\', jerusalem',
'editnotices/page/jabalia',
'editnotices/page/jabalia camp',
'editnotices/page/jabbul',
'editnotices/page/jabel mukaber',
'editnotices/page/jackie walker (activist)',
'editnotices/page/jadeidi-makr',
'editnotices/page/jaffa riots',
'editnotices/page/jaggi vasudev',
'editnotices/page/jahula',
'editnotices/page/jaish-e-mohammed',
'editnotices/page/jake angeli',
'editnotices/page/jalamah',
'editnotices/page/jalazone',
'editnotices/page/jalbun',
'editnotices/page/jaljulia',
'editnotices/page/jalqamus',
'editnotices/page/jalud',
'editnotices/page/jamaat ansar al-sunna',
'editnotices/page/james comey',
'editnotices/page/jamma\'in',
'editnotices/page/jammala',
'editnotices/page/jammu and kashmir (princely state)',
'editnotices/page/jammu and kashmir (state)',
'editnotices/page/janna jihad',
'editnotices/page/jannatah',
'editnotices/page/january 2015 shebaa farms incident',
'editnotices/page/jarash, jerusalem',
'editnotices/page/jarba',
'editnotices/page/jared kushner',
'editnotices/page/jarisha',
'editnotices/page/jatt, israel',
'editnotices/page/jayyous',
'editnotices/page/jazz jennings',
'editnotices/page/jcall',
'editnotices/page/jeff sessions',
'editnotices/page/jeff <NAME>',
'editnotices/page/jeffrey epstein',
'editnotices/page/jenin',
'editnotices/page/jenin, jenin',
'editnotices/page/jeremy corbyn',
'editnotices/page/jericho',
'editnotices/page/jericho synagogue',
'editnotices/page/jerusalem',
'editnotices/page/jerusalem (comics)',
'editnotices/page/jerusalem embassy act',
'editnotices/page/jerusalem law',
'editnotices/page/jerusalem light rail',
'editnotices/page/jewish and democratic state',
'editnotices/page/jewish diaspora',
'editnotices/page/jewish exodus from arab and muslim countries',
'editnotices/page/jewish internet defense force',
'editnotices/page/jewish israeli stone-throwing',
'editnotices/page/jewish state',
'editnotices/page/jexodus',
'editnotices/page/jibiya, ramallah',
'editnotices/page/jifna',
'editnotices/page/jiljilyya',
'editnotices/page/jill stein',
'editnotices/page/jilya',
'editnotices/page/jimmy dore',
'editnotices/page/jimzu',
'editnotices/page/jinsafut',
'editnotices/page/jish',
'editnotices/page/jisr az-zarqa',
'editnotices/page/jit, qalqilya',
'editnotices/page/joe biden',
'editnotices/page/joe biden 2020 presidential campaign',
'editnotices/page/joe biden sexual assault allegation',
'editnotices/page/john brennan (cia officer)',
'editnotices/page/john hume',
'editnotices/page/jonathan cook',
'editnotices/page/jordan peterson',
'editnotices/page/jordan river',
'editnotices/page/jordan–israel mixed armistice commission',
'editnotices/page/jordanian annexation of the west bank',
'editnotices/page/joseph\'s tomb',
'editnotices/page/jubata ez-zeit',
'editnotices/page/jubb yusuf',
'editnotices/page/jubbet ad-dib',
'editnotices/page/judaization',
'editnotices/page/judea and samaria area',
'editnotices/page/judea and samaria division',
'editnotices/page/judicial watch',
'editnotices/page/juhdum',
'editnotices/page/juhor ad-dik',
'editnotices/page/julia salazar',
'editnotices/page/julian assange',
'editnotices/page/julian castro',
'editnotices/page/julis, gaza',
'editnotices/page/june 2017 jerusalem attack',
'editnotices/page/jurat ash-sham\'a',
'editnotices/page/jurish',
'editnotices/page/jusayr',
'editnotices/page/ka\'abiyye-tabbash-hajajre',
'editnotices/page/kabara, haifa',
'editnotices/page/kabul, israel',
'editnotices/page/kach (political party)',
'editnotices/page/kafr \'ana',
'editnotices/page/kafr \'aqab',
'editnotices/page/kafr \'inan',
'editnotices/page/kafr abbush',
'editnotices/page/kafr ad-dik',
'editnotices/page/kafr al-labad',
'editnotices/page/kafr bara',
'editnotices/page/kafr bir\'im',
'editnotices/page/kafr dan',
'editnotices/page/kafr ein',
'editnotices/page/kafr jammal',
'editnotices/page/kafr kanna',
'editnotices/page/kafr lam',
'editnotices/page/kafr laqif',
'editnotices/page/kafr malik',
'editnotices/page/kafr manda',
'editnotices/page/kafr misr',
'editnotices/page/kafr ni\'ma',
'editnotices/page/kafr qaddum',
'editnotices/page/kafr qallil',
'editnotices/page/kafr qara',
'editnotices/page/kafr qasim',
'editnotices/page/kafr qud',
'editnotices/page/kafr ra\'i',
'editnotices/page/kafr rumman',
'editnotices/page/kafr saba',
'editnotices/page/kafr sabt',
'editnotices/page/kafr sur',
'editnotices/page/kafr thulth',
'editnotices/page/kafr yasif',
'editnotices/page/kafr zibad',
'editnotices/page/kafra',
'editnotices/page/kalandia',
'editnotices/page/kamal nasser',
'editnotices/page/kamala harris',
'editnotices/page/kamanneh',
'editnotices/page/kanaf',
'editnotices/page/karatiyya',
'editnotices/page/kardala',
'editnotices/page/kargil war',
'editnotices/page/karine a affair',
'editnotices/page/karlie kloss',
'editnotices/page/karma, hebron',
'editnotices/page/kashmir',
'editnotices/page/kashmir conflict',
'editnotices/page/kasla, jerusalem',
'editnotices/page/katie hill (politician)',
'editnotices/page/katif (moshav)',
'editnotices/page/katzrin',
'editnotices/page/kaukab abu al-hija',
'editnotices/page/kawfakha',
'editnotices/page/kawkab al-hawa',
'editnotices/page/kawkaba',
'editnotices/page/kedar, gush etzion',
'editnotices/page/keith ellison',
'editnotices/page/kela alon',
'editnotices/page/kelli maroney',
'editnotices/page/kelli ward',
'editnotices/page/kellyanne conway',
'editnotices/page/kerem atzmona',
'editnotices/page/keshet, golan heights',
'editnotices/page/kevin folta',
'editnotices/page/kfar darom',
'editnotices/page/kfar haruv',
'editnotices/page/kfar saba',
'editnotices/page/kfar yam',
'editnotices/page/khaled abu toameh',
'editnotices/page/khalet al-maiyya',
'editnotices/page/khalida jarrar',
'editnotices/page/khalil al-wazir',
'editnotices/page/khalistan commando force',
'editnotices/page/khan al-ahmar',
'editnotices/page/khan al-duwayr',
'editnotices/page/khan al-shih',
'editnotices/page/khan yunis',
'editnotices/page/khan yunis camp',
'editnotices/page/kharas',
'editnotices/page/kharbatha al-misbah',
'editnotices/page/kharbatha bani harith',
'editnotices/page/kharruba',
'editnotices/page/khawaled',
'editnotices/page/khazar hypothesis of ashkenazi ancestry',
'editnotices/page/khirbat al-\'umur',
'editnotices/page/khirbat al-burj, haifa',
'editnotices/page/khirbat al-buwayra',
'editnotices/page/khirbat al-duhayriyya',
'editnotices/page/khirbat al-dumun',
'editnotices/page/khirbat al-jawfa',
'editnotices/page/khirbat al-kasayir',
'editnotices/page/khirbat al-lawz',
'editnotices/page/khirbat al-majdal',
'editnotices/page/khirbat al-manara',
'editnotices/page/khirbat al-mansura',
'editnotices/page/khirbat al-muntar',
'editnotices/page/khirbat al-sarkas',
'editnotices/page/khirbat al-sawamir',
'editnotices/page/khirbat al-shuna',
'editnotices/page/khirbat al-simia',
'editnotices/page/khirbat al-tannur',
'editnotices/page/khirbat al-taqa',
'editnotices/page/khirbat al-wa\'ra al-sawda\'',
'editnotices/page/khirbat al-zababida',
'editnotices/page/khirbat bayt far',
'editnotices/page/khirbat bayt lid',
'editnotices/page/khirbat iribbin',
'editnotices/page/khirbat ism allah',
'editnotices/page/khirbat jiddin',
'editnotices/page/khirbat karraza',
'editnotices/page/khirbat lid',
'editnotices/page/khirbat qumbaza',
'editnotices/page/khirbat sa\'sa\'',
'editnotices/page/khirbat umm burj',
'editnotices/page/khirbat umm sabuna',
'editnotices/page/khirbat zakariyya',
'editnotices/page/khirbat zalafa',
'editnotices/page/khirbat zawiya',
'editnotices/page/khirbet abu falah',
'editnotices/page/khirbet al-deir',
'editnotices/page/khirbet al-malih',
'editnotices/page/khirbet ar-ras al-ahmar',
'editnotices/page/khirbet beit zakariyyah',
'editnotices/page/khirbet qeis',
'editnotices/page/khirbet safa',
'editnotices/page/khirbet sir',
'editnotices/page/khirbet zanuta',
'editnotices/page/khiyam al-walid',
'editnotices/page/khubbayza',
'editnotices/page/khulda',
'editnotices/page/khursa',
'editnotices/page/khuzaʽa, khan yunis',
'editnotices/page/kidmat tzvi',
'editnotices/page/kifl haris',
'editnotices/page/killing of aisha al-rabi',
'editnotices/page/killing of alexander levlovich',
'editnotices/page/killing of esther ohana',
'editnotices/page/killing of muhammad al-durrah',
'editnotices/page/killings and massacres during the 1948 palestine war',
'editnotices/page/kimberly klacik',
'editnotices/page/king hussein\'s federation plan',
'editnotices/page/kirad al-baqqara',
'editnotices/page/kirad al-ghannama',
'editnotices/page/kirsten gillibrand',
'editnotices/page/kobar',
'editnotices/page/koenig memorandum',
'editnotices/page/kokhav ya\'akov',
'editnotices/page/kosovo',
'editnotices/page/kudna',
'editnotices/page/kufeir',
'editnotices/page/kufeirit',
'editnotices/page/kukhleh',
'editnotices/page/kur, tulkarm',
'editnotices/page/kursi, sea of galilee',
'editnotices/page/kuseife',
'editnotices/page/kuseis',
'editnotices/page/kuwaykat',
'editnotices/page/united nations general assembly resolution es-10/l.23',
'editnotices/page/lajjun',
'editnotices/page/lakiya',
'editnotices/page/land day',
'editnotices/page/land expropriation in the west bank',
'editnotices/page/land for peace',
'editnotices/page/land of israel',
'editnotices/page/lara logan',
'editnotices/page/latakia camp',
'editnotices/page/late termination of pregnancy',
'editnotices/page/latrun',
'editnotices/page/laurence olivier',
'editnotices/page/lausanne conference of 1949',
'editnotices/page/lavon affair',
'editnotices/page/law of return',
'editnotices/page/lazzaza',
'editnotices/page/lebanese civil war',
'editnotices/page/legal affairs of donald trump',
'editnotices/page/legitimacy of the state of israel',
'editnotices/page/leon uris',
'editnotices/page/letter to an anti-zionist friend',
'editnotices/page/lifta',
'editnotices/page/likud',
'editnotices/page/lillehammer affair',
'editnotices/page/linda gottfredson',
'editnotices/page/linda sarsour',
'editnotices/page/line of control',
'editnotices/page/links between trump associates and russian officials',
'editnotices/page/list of american state and local politicians convicted of crimes',
'editnotices/page/list of attacks against israeli civilians before 1967',
'editnotices/page/list of battles and operations in the 1948 palestine war',
'editnotices/page/list of beaches in israel',
'editnotices/page/list of concentration and internment camps',
'editnotices/page/list of countries by gdp (nominal)',
'editnotices/page/list of countries by gdp (ppp)',
'editnotices/page/list of executive actions by <NAME>',
'editnotices/page/list of federal judges appointed by <NAME>',
'editnotices/page/list of israeli assassinations',
'editnotices/page/list of israeli attacks on gaza in 2009',
'editnotices/page/list of israeli settlements',
'editnotices/page/list of israeli settlements with city status in the west bank',
'editnotices/page/list of israeli strikes and palestinian casualties in the 2014 israel–gaza conflict',
'editnotices/page/list of israeli universities and colleges',
'editnotices/page/list of killings and massacres in mandatory palestine',
'editnotices/page/list of lehi members',
'editnotices/page/list of massacres in india',
'editnotices/page/list of middle east peace proposals',
'editnotices/page/list of modern conflicts in the middle east',
'editnotices/page/list of mossad operations',
'editnotices/page/list of palestinian rocket attacks on israel in 2001',
'editnotices/page/list of palestinian rocket attacks on israel in 2002–2006',
'editnotices/page/list of palestinian rocket attacks on israel in 2007',
'editnotices/page/list of palestinian rocket attacks on israel in 2008',
'editnotices/page/list of palestinian rocket attacks on israel in 2009',
'editnotices/page/list of palestinian rocket attacks on israel in 2010',
'editnotices/page/list of palestinian rocket attacks on israel in 2011',
'editnotices/page/list of palestinian rocket attacks on israel in 2012',
'editnotices/page/list of palestinian rocket attacks on israel in 2013',
'editnotices/page/list of palestinian rocket attacks on israel in 2014',
'editnotices/page/list of palestinian rocket attacks on israel in 2015',
'editnotices/page/list of palestinian rocket attacks on israel in 2016',
'editnotices/page/list of palestinian rocket attacks on israel in 2017',
'editnotices/page/list | |
#!/usr/bin/python
from __future__ import print_function
import logging
from fabric.api import task,run,local,put,get,execute,settings
from fabric.decorators import *
from fabric.context_managers import shell_env,quiet
from fabric.exceptions import *
from fabric.utils import puts,fastprint
from time import sleep
from contextlib import contextmanager
import traceback
import os,sys,datetime,re,ast
import itertools
import glob,shlex,subprocess
import pprint
sys.path.append('..')
from environment import *
from experiments import *
from experiments import configs
from helper import get_cfgs,get_outfile_name,get_execfile_name,get_args,CONFIG_PARAMS,FLAG
# (see https://github.com/fabric/fabric/issues/51#issuecomment-96341022)
logging.basicConfig()
paramiko_logger = logging.getLogger("paramiko.transport")
paramiko_logger.disabled = True
COLORS = {
"info" : 32, #green
"warn" : 33, #yellow
"error" : 31, #red
"debug" : 36, #cyan
}
#OUT_FMT = "[{h}] {p}: {fn}:".format
PP = pprint.PrettyPrinter(indent=4)
NOW=datetime.datetime.now()
STRNOW=NOW.strftime("%Y%m%d-%H%M%S")
os.chdir('../..')
#MAX_TIME_PER_EXP = 60 * 2 # in seconds
MAX_TIME_PER_EXP = 60 * 10 # in seconds
EXECUTE_EXPS = True
SKIP = False
CC_ALG = ""
set_env()
@task
@hosts('localhost')
def using_vcloud():
set_env_vcloud()
@task
@hosts('localhost')
def using_istc():
set_env_istc()
@task
@hosts('localhost')
def using_ec2():
set_env_ec2()
@task
@hosts('localhost')
def using_local():
set_env_local()
## Basic usage:
## fab using_vcloud run_exps:experiment_1
## fab using_local run_exps:experiment_1
## fab using_istc run_exps:experiment_1
@task
@hosts('localhost')
def run_exps(exps,skip_completed='False',exec_exps='True',dry_run='False',iterations='1',check='True',delay='',same_node='False',overlap='False',shmem='True',cram='False'):
global SKIP, EXECUTE_EXPS,NOW,STRNOW
ITERS = int(iterations)
SKIP = skip_completed == 'True'
EXECUTE_EXPS = exec_exps == 'True'
CHECK = check == 'True'
env.dry_run = dry_run == 'True'
env.same_node = same_node == 'True'
env.overlap = overlap == 'True'
env.cram = cram == 'True'
if env.cluster != "ec2":
env.shmem = shmem == 'True'
if env.dry_run:
with color(level="warn"):
puts("this will be a dry run!",show_prefix=True)
with color():
puts("running experiment set:{}".format(exps),show_prefix=True)
# Make sure all experiment binaries exist
if CHECK:
execute(check_binaries,exps)
# Run experiments
for i in range(ITERS):
NOW=datetime.datetime.now()
STRNOW=NOW.strftime("%Y%m%d-%H%M%S")
execute(run_exp_old,exps,delay=delay)
# execute(run_exp,exps,delay=delay)
## Basic usage:
## fab using_vcloud network_test
## fab using_istc network_test:4
@task
@hosts(['localhost'])
def network_test(num_nodes=16,exps="network_experiment",skip_completed='False',exec_exps='True'):
env.batch_mode = False
global SKIP, EXECUTE_EXPS, MAX_TIME_PER_EXP
SKIP = skip_completed == 'True'
EXECUTE_EXPS = exec_exps == 'True'
MAX_TIME_PER_EXP = 60
num_nodes = int(num_nodes)
execute(check_binaries,exps)
if num_nodes < 2 or len(env.hosts) < num_nodes:
with color(level="error"):
puts("not enough hosts in ifconfig!",show_prefix=True)
abort()
exp_hosts=env.hosts[0:num_nodes]
pairs = list(itertools.combinations(exp_hosts,2))
for pair in pairs:
set_hosts(list(pair))
execute(run_exp,exps,network_test=True)
@task
@parallel
def check_cpu():
put("test_cpu.out",env.rem_homedir)
run("chmod a+x test_cpu.out; time ./test_cpu.out")
@task
@hosts('localhost')
def delete_local_results():
local("rm -f results/*");
@task
#@hosts('localhost')
@parallel
def delete_remote_results():
if env.cluster == "istc":
if env.shmem:
run("rm -f /dev/shm/results*.out")
else:
run("rm -f /home/%s/results*.out" % env.user)
else:
run("rm -f /home/ubuntu/results*.out")
@task
@parallel
def copy_schema():
if env.dry_run:
return
schemas = ["benchmarks/TPCC_full_schema.txt","benchmarks/YCSB_schema.txt","benchmarks/PPS_schema.txt"]
# Copying regular files should always succeed unless node is down
for schema in schemas:
if env.shmem:
put(schema,"/dev/shm/")
else:
put(schema,env.rem_homedir)
@task
@parallel
def copy_binaries(exp_fname):
if env.dry_run:
return
executable_files = ["rundb","runcl"]
succeeded = True
# Copying executable files may fail if a process is running the executable
with settings(warn_only=True):
for f in (executable_files):
local_fpath = os.path.join("binaries","{}{}".format(exp_fname,f))
if env.shmem:
remote_fpath = os.path.join("/dev/shm/","{}{}".format(exp_fname,f))
else:
remote_fpath = os.path.join(env.rem_homedir,"{}{}".format(exp_fname,f))
#res = put(f,env.rem_homedir,mirror_local_mode=True)
res = put(local_fpath,remote_fpath,mirror_local_mode=True)
if not res.succeeded:
with color("warn"):
puts("WARN: put: {} -> {} failed!".format(f,env.rem_homedir),show_prefix=True)
succeeded = False
break
if not succeeded:
with color("warn"):
puts("WARN: killing all executables and retrying...",show_prefix=True)
killall()
# If this fails again then we abort
for f in (executable_files):
local_fpath = os.path.join("binaries","{}{}".format(exp_fname,f))
if env.shmem:
remote_fpath = os.path.join("/dev/shm",f)
else:
remote_fpath = os.path.join(env.rem_homedir,f)
#res = put(f,env.rem_homedir,mirror_local_mode=True)
res = put(local_fpath,remote_fpath,mirror_local_mode=True)
if not res.succeeded:
with color("error"):
puts("ERROR: put: {} -> {} failed! (2nd attempt)... Aborting".format(f,env.rem_homedir),show_prefix=True)
abort()
@task
@parallel
def copy_ifconfig():
files = ["ifconfig.txt"]
# Copying regular files should always succeed unless node is down
for f in files:
if env.shmem:
put(f,"/dev/shm/")
else:
put(f,env.rem_homedir)
@task
@parallel
def copy_files(schema,exp_fname):
if env.dry_run:
return
executable_files = ["rundb","runcl"]
# if CC_ALG == "CALVIN":
# executable_files.append("runsq")
files = ["ifconfig.txt"]
files.append(schema)
succeeded = True
# Copying regular files should always succeed unless node is down
for f in files:
if env.shmem:
put(f,"/dev/shm/")
else:
put(f,env.rem_homedir)
# Copying executable files may fail if a process is running the executable
with settings(warn_only=True):
for f in (executable_files):
local_fpath = os.path.join("binaries","{}{}".format(exp_fname,f))
if env.shmem:
remote_fpath = os.path.join("/dev/shm/",f)
else:
remote_fpath = os.path.join(env.rem_homedir,f)
#res = put(f,env.rem_homedir,mirror_local_mode=True)
res = put(local_fpath,remote_fpath,mirror_local_mode=True)
if not res.succeeded:
with color("warn"):
puts("WARN: put: {} -> {} failed!".format(f,env.rem_homedir),show_prefix=True)
succeeded = False
break
if not succeeded:
with color("warn"):
puts("WARN: killing all executables and retrying...",show_prefix=True)
killall()
# If this fails again then we abort
for f in (executable_files):
local_fpath = os.path.join("binaries","{}{}".format(exp_fname,f))
if env.shmem:
remote_fpath = os.path.join("/dev/shm",f)
else:
remote_fpath = os.path.join(env.rem_homedir,f)
#res = put(f,env.rem_homedir,mirror_local_mode=True)
res = put(local_fpath,remote_fpath,mirror_local_mode=True)
if not res.succeeded:
with color("error"):
puts("ERROR: put: {} -> {} failed! (2nd attempt)... Aborting".format(f,env.rem_homedir),show_prefix=True)
abort()
#delay is in ms
@task
@parallel
def set_delay(delay='10'):
run("sudo tc qdisc add dev eth0 root netem delay {}ms".format(delay))
#delay is in ms
@task
@parallel
def reset_delay():
run("sudo tc qdisc del dev eth0 root")
@task
@parallel
def sync_clocks(max_offset=0.01,max_attempts=1,delay=15):
if env.dry_run:
return True
offset = sys.float_info.max
attempts = 0
while attempts < max_attempts:
if env.cluster == "ec2":
res = run("ntpdate -q 0.amazon.pool.ntp.org")
else:
res = run("ntpdate -q clock-2.cs.cmu.edu")
offset = float(res.stdout.split(",")[-2].split()[-1])
#print "Host ",env.host,": offset = ",offset
if abs(offset) < max_offset:
break
sleep(delay)
if env.cluster == "ec2":
res = run("sudo ntpdate -b 0.amazon.pool.ntp.org")
else:
res = run("sudo ntpdate -b clock-2.cs.cmu.edu")
sleep(delay)
attempts += 1
return attempts < max_attempts
@task
@hosts('localhost')
def compile():
compiled = False
with quiet():
compiled = local("make clean; make -j8",capture=True).succeeded
if not compiled:
with settings(warn_only=True):
compiled = local("make -j8") # Print compilation errors
if not compiled:
with color("error"):
puts("ERROR: cannot compile code!",show_prefix=True)
@task
@parallel
def killall():
with settings(warn_only=True):
if not env.dry_run:
run("pkill -f rundb")
run("pkill -f runcl")
# run("pkill -f runsq")
@task
@parallel
def run_cmd(cmd):
run(cmd)
@task
@parallel
def put_cmd(cmd):
put(cmd,env.rem_homedir,mirror_local_mode=True)
@task
@parallel
def deploy(schema_path,nids,exps,runfiles,fmt):
nid = iter(nids[env.host])
exp = iter(exps[env.host])
runfile = iter(runfiles[env.host])
succeeded = True
with shell_env(SCHEMA_PATH=schema_path):
with settings(warn_only=True,command_timeout=MAX_TIME_PER_EXP):
# if env.same_node:
cmd = ''
for r in env.roledefs["servers"]:
if r == env.host:
nn = nid.next()
rfile = runfile.next()
args = get_args(fmt,exp.next())
if env.shmem:
cmd += "(/dev/shm/{}rundb -nid{} {}>> /dev/shm/results{}.out 2>&1 &);".format(rfile,nn,args,nn)
# cmd += "(/dev/shm/rundb -nid{} >> /dev/shm/results{}.out 2>&1 &);".format(nn,nn)
else:
cmd += "(./{}rundb -nid{} {}>> results{}.out 2>&1 &);".format(rfile,nn,args,nn)
for r in env.roledefs["clients"]:
if r == env.host:
nn = nid.next()
rfile = runfile.next()
args = get_args(fmt,exp.next())
if env.shmem:
cmd += "(/dev/shm/{}runcl -nid{} {}>> /dev/shm/results{}.out 2>&1 &);".format(rfile,nn,args,nn)
else:
cmd += "(./{}runcl -nid{} {}>> results{}.out 2>&1 &);".format(rfile,nn,args,nn)
# for r in env.roledefs["sequencer"]:
# if r == env.host:
# nn = nid.next()
# args = get_args(fmt,exp.next())
# if env.shmem:
# cmd += "(/dev/shm/runsq -nid{} {}>> /dev/shm/results{}.out 2>&1 &);".format(nn,args,nn)
# else:
# cmd += "(./runsq -nid{} {}>> results{}.out 2>&1 &);".format(nn,args,nn)
cmd = cmd[:-3]
cmd += ")"
try:
res = run("echo $SCHEMA_PATH")
if not env.dry_run:
run(cmd)
else:
print(cmd)
except CommandTimeout:
pass
except NetworkError:
pass
# else:
# if env.host in env.roledefs["servers"]:
# nn = nid.next();
# cmd = "./rundb -nid{} >> results{}.out 2>&1".format(nn,nn)
# elif env.host in env.roledefs["clients"]:
# nn = nid.next();
# cmd = "./runcl -nid{} >> results{}.out 2>&1".format(nn,nn)
# elif "sequencer" in env.roledefs and env.host in env.roledefs["sequencer"]:
# nn = nid.next();
# cmd = "./runsq -nid{} >> results{}.out 2>&1".format(nn,nn)
# else:
# with color('error'):
# puts("host does not belong to any roles",show_prefix=True)
# puts("current roles:",show_prefix=True)
# puts(pprint.pformat(env.roledefs,depth=3),show_prefix=False)
#
# try:
# res = run("echo $SCHEMA_PATH")
# if not env.dry_run:
# run(cmd)
# except CommandTimeout:
# pass
# except NetworkError:
# pass
return True
@task
@parallel
def get_results(outfiles,nids):
succeeded = True
# if env.same_node:
for n in nids[env.host]:
if env.shmem:
rem_path=os.path.join(env.rem_homedir,"/dev/shm/results{}.out".format(n))
else:
rem_path=os.path.join(env.rem_homedir,"results{}.out".format(n))
loc_path=os.path.join(env.result_dir, "{}_{}".format(n,outfiles[env.host]))
with settings(warn_only=True):
if not env.dry_run:
res1 = get(remote_path=rem_path, local_path=loc_path)
succeeded = succeeded and res1.succeeded
with settings(warn_only=True):
if not env.dry_run:
if env.shmem:
res2 = run("rm -f /dev/shm/results*.out")
else:
res2 = run("rm -f results*.out")
succeeded = succeeded and res2.succeeded
# else:
# nid = env.hosts.index(env.host)
# rem_path=os.path.join(env.rem_homedir,"results.out")
# loc_path=os.path.join(env.result_dir, outfiles[env.host])
# with settings(warn_only=True):
# if not env.dry_run:
# res1 = get(remote_path=rem_path, local_path=loc_path)
# res2 = run("rm -f results.out")
# succeeded = res1.succeeded and res2.succeeded
return succeeded
@task
@hosts('localhost')
def write_config(cfgs):
dbx_cfg = os.path.join(env.local_path,"config.h")
f = open(dbx_cfg,'r');
lines = f.readlines()
f.close()
with open(dbx_cfg,'w') as f_cfg:
for line in lines:
found_cfg = False
for c in cfgs:
found_cfg = re.search("#define "+c + "\t",line) or re.search("#define "+c + " ",line);
if found_cfg:
f_cfg.write("#define " + c + " " + str(cfgs[c]) + "\n")
break
if not found_cfg: f_cfg.write(line)
@task
@hosts('localhost')
def write_ifconfig(roles,exp,rfile):
with color():
puts("writing roles to the ifconfig file:",show_prefix=True)
puts(pprint.pformat(roles,depth=3),show_prefix=False)
nids = {}
exps = {}
rfiles = {}
nid = 0
print(roles)
with open("ifconfig.txt",'w') as f:
for server in roles['servers']:
f.write(server + "\n")
if server not in nids:
nids[server] = [nid]
exps[server] = [exp]
rfiles[server] = [rfile]
else:
nids[server].append(nid)
exps[server].append(exp)
rfiles[server].append(rfile)
nid += 1
for client in roles['clients']:
f.write(client + "\n")
if client not in nids:
nids[client] = [nid]
exps[client] = [exp]
rfiles[client] = [rfile]
else:
nids[client].append(nid)
exps[client].append(exp)
rfiles[client].append(rfile)
nid += 1
# if "sequencer" in roles:
# assert CC_ALG == "CALVIN"
# sequencer = roles['sequencer'][0]
# f.write(sequencer + "\n")
# nids[sequencer] = [nid]
# exps[sequencer] = [exp]
# nid += 1
return nids,exps,rfiles
@task
@hosts('localhost')
def assign_roles(server_cnt,client_cnt,append=False):
if env.same_node:
servers=[env.hosts[0]] * server_cnt
clients=[env.hosts[0]] * client_cnt
elif env.cram:
ncnt = max(max(server_cnt,client_cnt) / 8,1)
servers = []
clients = []
for r in range(server_cnt):
servers.append(env.hosts[r%ncnt])
for | |
<gh_stars>0
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 9 16:19:08 2021
@author: <NAME>
"""
from copy import deepcopy
import numpy as np
import pandas as pd
import random
import matplotlib.pyplot as plt
from sklearn.metrics import accuracy_score
import seaborn as sns
from sklearn.cluster import KMeans as kms
class K_Means_Cluster:
def __init__(self, num_clusters):
"""
Initialize the class for K Means Clustering.
The process is as follows:
1. Read the data from the file into Datafrane.
2. Build the 2d numpy array to be used in K means algorithm
3. Call Initialize visualization
4. Initalize the number of clusters object in class
5. Convert the label column in the data into encoded values
6. Calls the predict all function
Parameters
----------
num_clusters : int
Integer value to store the number of clusters
Returns
-------
None.
"""
data = pd.read_csv('data/iris.data', names = ['slength', 'swidth', 'plength', 'pwidth', 'species'])
c1 = data['slength'].values
c2 = data['swidth'].values
c3 = data['plength'].values
c4 = data['pwidth'].values
#Input array
self.X = np.array(list(zip(c1,c2,c3,c4)), dtype=np.float32)
self.initial_visualization(data)
#Number of clusters
self.num_clusters = num_clusters
data["species"] = data["species"].astype('category')
data["species_cat"] = data["species"].cat.codes
#Label data
self.Y = np.array(data['species_cat'].values)
self.predict_all()
def check_accuracy(self, data, metric):
"""
Function checks the accuracy of the data for both metrics:
Euclidean
Manhattan
Parameters
----------
data : Pandas DataFrame
Dataframe includes the raw data values and includes the
predicted label values as well.
metric : string
describes the metric used for which we are checking in accuracy
Returns
-------
None.
"""
iris_setosa = data[data["species"] == "Iris-setosa"]
iris_virginica = data[data["species"] == "Iris-virginica"]
iris_versicolor = data[data["species"] == "Iris-versicolor"]
if metric == "Euclidean":
pred_setosa = data[data["pred_Y"] == 2]
pred_virginica = data[data["pred_Y"] == 0]
pred_versicolor = data[data["pred_Y"] == 1]
#True values
true_setosa_count = len(iris_setosa[iris_setosa["pred_Y"] == 2])
true_virginica_count = len(iris_virginica[iris_virginica["pred_Y"] == 0])
true_versicolor_count = len(iris_versicolor[iris_versicolor["pred_Y"] == 1])
else:
pred_setosa = data[data["pred_Y"] == 2]
pred_virginica = data[data["pred_Y"] == 1]
pred_versicolor = data[data["pred_Y"] == 0]
#True values
true_setosa_count = len(iris_setosa[iris_setosa["pred_Y"] == 2])
true_virginica_count = len(iris_virginica[iris_virginica["pred_Y"] == 1])
true_versicolor_count = len(iris_versicolor[iris_versicolor["pred_Y"] == 0])
#Counting the actual values
act_setosa_count = len(iris_setosa)
act_virginica_count = len(iris_virginica)
act_versicolor_count = len(iris_versicolor)
#Counting the predicted values
pred_setosa_count = len(pred_setosa)
pred_virginica_count = len(pred_virginica)
pred_versicolor_count = len(pred_versicolor)
#False values
false_setosa_count = act_setosa_count - true_setosa_count
false_virginica_count = act_virginica_count - true_virginica_count
false_versicolor_count = act_versicolor_count - true_versicolor_count
total_correct_count = true_setosa_count + true_virginica_count + true_versicolor_count
total_records = len(data)
print(f"Overall Accuracy using {metric} distance is : {(total_correct_count/total_records)*100}")
def initial_visualization(self, data):
"""
This function is to display the initial visualization.
1. How to choose K - values?
2. Visualizing the scatter plot and the centroids
Parameters
----------
data : Pandas dataframe
Raw data in the form of pandas data frame
Returns
-------
None.
"""
print("Understanding the data:")
print(data.info())
print(data.describe())
#Choosing the number of clusters
#Within cluster sum of squares
wcss = []
x = data.iloc[:, [0, 1 ,2, 3]].values
for i in range(1, 11):
kmeans = kms(n_clusters=i, init='k-means++', max_iter=300, n_init = 10, random_state=0)
kmeans.fit(x)
wcss.append(kmeans.inertia_)
plt.plot(range(1,11), wcss)
plt.title('The Elbow Methdod')
plt.xlabel('Number of Clusters')
plt.ylabel('WCSS')
plt.show()
#Visualizing the data in 3D
kmeans = kms(n_clusters=3, init='k-means++', max_iter=300, n_init=10, random_state=0)
y_kmeans = kmeans.fit_predict(x)
fig = plt.figure(figsize = (15, 15))
ax = fig.add_subplot(111, projection='3d')
plt.scatter(x[y_kmeans == 0, 0], x[y_kmeans ==0, 1], s =100, c = 'purple', label = 'Iris-setosa')
plt.scatter(x[y_kmeans == 1, 0], x[y_kmeans ==1, 1], s =100, c = 'orange', label = 'Iris-versicolor')
plt.scatter(x[y_kmeans == 2, 0], x[y_kmeans ==2, 1], s =100, c = 'green', label = 'Iris-virginica')
plt.scatter(kmeans.cluster_centers_[:, 0], kmeans.cluster_centers_[:, 1], s =100, c = 'red', label = 'Centroids')
plt.suptitle("Sample and Centroid Visualization")
plt.show()
print("Visualization complete")
def predict_all(self):
"""
This function builds will use Euclidean and Manhattan distances
to create the K -means clustering.
This also calls the data visualization function to check see
how the metrics have performed
Returns
-------
None.
"""
X = self.X
#Assigning the centroids
c1 = [X[0][0],X[1][0],X[2][0]]
#first feature cluster centroids
c2 = [X[0][1],X[1][1],X[2][1]]
#second feature cluster centroids
c3 = [X[0][2],X[1][2],X[2][2]]
#third feature cluster centroids
c4 = [X[0][3],X[1][3],X[2][3]]
#fourth feature cluster centroids
c = np.array(list(zip(c1, c2, c3, c4)))
#Initlizing a variable to store the old centroids
c_old = np.zeros(c.shape)
#Stores the centroid of the nearest point
clusters = np.zeros(len(X))
#Stores the error or difference between old centroid and new centroids
error = self.dist(c, c_old, None)
#Calculating using the Euclidean distance
while error != 0:
#Assigning the nearest point to its cluster
for i in range(len(X)):
distances = self.dist(X[i], c)
cluster = np.argmin(distances)
clusters[i] = cluster
#Push the centroid values to the old centroid values
c_old = deepcopy(c)
#Finding the new mean of each cluster
for i in range(self.num_clusters):
points = [X[j] for j in range(len(X)) if clusters[j] == i]
c[i] = np.mean(points, axis=0)
error = self.dist(c, c_old, None)
print(f"Y labels: {self.Y}")
print(f"Predicted Values: {clusters}")
#print(f"Accuracy Score (Euclidean distance): {accuracy_score(self.Y, clusters)}")
self.data_visualization(clusters, "Euclidean")
#Calculating using the Manhattan distance
#Assigning the centroids
c1 = [X[0][0],X[1][0],X[2][0]]
#first feature cluster centroids
c2 = [X[0][1],X[1][1],X[2][1]]
#second feature cluster centroids
c3 = [X[0][2],X[1][2],X[2][2]]
#third feature cluster centroids
c4 = [X[0][3],X[1][3],X[2][3]]
#fourth feature cluster centroids
c = np.array(list(zip(c1, c2, c3, c4)))
#Initlizing a variable to store the old centroids
c_old = np.zeros(c.shape)
#Stores the centroid of the nearest point
clusters = np.zeros(len(X))
#Stores the error or difference between old centroid and new centroids
error = self.dist(c, c_old, None)
#Calculating using the Euclidean distance
while error != 0:
#Assigning the nearest point to its cluster
for i in range(len(X)):
distances = self.manhattan_dist(X[i], c)
cluster = np.argmin(distances)
clusters[i] = cluster
#Push the centroid values to the old centroid values
c_old = deepcopy(c)
#Finding the new mean of each cluster
for i in range(self.num_clusters):
points = [X[j] for j in range(len(X)) if clusters[j] == i]
c[i] = np.mean(points, axis=0)
error = self.dist(c, c_old, None)
print(f"Y labels: {self.Y}")
print(f"Predicted Values: {clusters}")
self.data_visualization(clusters, "Manhattan")
def data_visualization(self, clusters, metric):
"""
Visualizing the cluster formations
Parameters
----------
clusters : List of integers
This is list of predicted cluster values. Each row corresponds
to each predicted value in X
metric : string
The argument states which metric is being used.
Returns
-------
None.
"""
df2 = pd.read_csv("data/iris.data", names = ['slength',
'swidth',
'plength',
'pwidth',
'species'])
df2["pred_Y"] = clusters
iris_outcome = pd.crosstab(df2['species'], "count")
print(f"Actual Class Count: {iris_outcome}")
iris_outcome2 = pd.crosstab(df2['pred_Y'], "count")
print(f"Predicted Class Count: {iris_outcome2}")
iris_outcome3 = pd.crosstab([df2['species'],df2['pred_Y']], "count")
print(f"Predicted Class Count: {iris_outcome3}")
fig2, axes2 = plt.subplots(2,4, sharey=True)
#fig2.set_title(f"Actual vs Predicted (metric)")
plt.suptitle(f"Actual vs Predicted ({metric})")
#Actual data charts
sns.barplot(ax=axes2[0][0], x = "species", y = "slength", data=df2)
sns.barplot(ax=axes2[0][1], x = "species", y = "swidth", data=df2)
sns.barplot(ax=axes2[0][2], x = "species", y = "plength", data=df2)
sns.barplot(ax=axes2[0][3], x = "species", y = "pwidth", data=df2)
#Predicted data charts
sns.barplot(ax=axes2[1][0], x = "pred_Y", y = "slength", data=df2)
sns.barplot(ax=axes2[1][1], x = "pred_Y", y = "swidth", data=df2)
sns.barplot(ax=axes2[1][2], x = "pred_Y", y = "plength", data=df2)
sns.barplot(ax=axes2[1][3], x = "pred_Y", y = "pwidth", data=df2)
plt.show()
self.check_accuracy(df2, metric)
def predict(self):
X = self.X
#Assigning the centroids
c1 = [X[0][0],X[1][0],X[2][0]]
#first feature cluster centroids
c2 = [X[0][1],X[1][1],X[2][1]]
#second feature cluster centroids
c3 = [X[0][2],X[1][2],X[2][2]]
#third feature cluster centroids
c4 = [X[0][3],X[1][3],X[2][3]]
#fourth feature cluster centroids
c = np.array(list(zip(c1, c2, c3, c4)))
print(c)
#Initlizing a variable to store the old centroids
c_old = np.zeros(c.shape)
#Stores the centroid of the nearest point
clusters = np.zeros(len(X))
#Stores the error or difference between old centroid and new centroids
error = self.dist(c, c_old, None)
while error != 0:
#Assigning the nearest point to its cluster
for i in range(len(X)):
print("X[i]:")
print(X[i])
print("c:")
print(c)
distances = self.dist(X[i], c)
cluster = np.argmin(distances)
clusters[i] = cluster
#Push the centroid values to the old centroid values
c_old = | |
<reponame>80avin/goodix-fp-dump<gh_stars>1-10
from struct import pack as encode
from struct import unpack as decode
from sys import version_info
from time import sleep, time
from typing import List, Literal, Optional, Tuple, Union
from usb.control import get_status
from usb.core import Device as USBDevice
from usb.core import USBError, USBTimeoutError, find
from usb.legacy import (CLASS_DATA, CLASS_VENDOR_SPEC, ENDPOINT_IN,
ENDPOINT_OUT, ENDPOINT_TYPE_BULK)
from usb.util import endpoint_direction, endpoint_type, find_descriptor
if version_info < (3, 8):
raise SystemError("This program require Python 3.8 or newer")
FLAGS_MESSAGE_PROTOCOL: Literal[0xa0] = 0xa0
FLAGS_TRANSPORT_LAYER_SECURITY: Literal[0xb0] = 0xb0
COMMAND_NOP: Literal[0x00] = 0x00
COMMAND_MCU_GET_IMAGE: Literal[0x20] = 0x20
COMMAND_MCU_SWITCH_TO_FDT_DOWN: Literal[0x32] = 0x32
COMMAND_MCU_SWITCH_TO_FDT_UP: Literal[0x34] = 0x34
COMMAND_MCU_SWITCH_TO_FDT_MODE: Literal[0x36] = 0x36
COMMAND_NAV_0: Literal[0x50] = 0x50
COMMAND_MCU_SWITCH_TO_IDLE_MODE: Literal[0x70] = 0x70
COMMAND_WRITE_SENSOR_REGISTER: Literal[0x80] = 0x80
COMMAND_READ_SENSOR_REGISTER: Literal[0x82] = 0x82
COMMAND_UPLOAD_CONFIG_MCU: Literal[0x90] = 0x90
COMMAND_SET_POWERDOWN_SCAN_FREQUENCY: Literal[0x94] = 0x94
COMMAND_ENABLE_CHIP: Literal[0x96] = 0x96
COMMAND_RESET: Literal[0xa2] = 0xa2
COMMAND_MCU_ERASE_APP: Literal[0xa4] = 0xa4
COMMAND_READ_OTP: Literal[0xa6] = 0xa6
COMMAND_FIRMWARE_VERSION: Literal[0xa8] = 0xa8
COMMAND_QUERY_MCU_STATE: Literal[0xae] = 0xae
COMMAND_ACK: Literal[0xb0] = 0xb0
COMMAND_REQUEST_TLS_CONNECTION: Literal[0xd0] = 0xd0
COMMAND_TLS_SUCCESSFULLY_ESTABLISHED: Literal[0xd4] = 0xd4
COMMAND_PRESET_PSK_WRITE_R: Literal[0xe0] = 0xe0
COMMAND_PRESET_PSK_READ_R: Literal[0xe4] = 0xe4
COMMAND_WRITE_FIRMWARE: Literal[0xf0] = 0xf0
COMMAND_READ_FIRMWARE: Literal[0xf2] = 0xf2
COMMAND_CHECK_FIRMWARE: Literal[0xf4] = 0xf4
COMMAND_GET_IAP_VERSION: Literal[0xf6] = 0xf6
def encode_command(cmd0: int, cmd1: int) -> int:
return cmd0 << 4 | cmd1 << 1
def decode_command(command: int) -> Tuple[int, int]:
if command & 0x1:
raise ValueError("Invalid command")
return command >> 4 & 0xf, command >> 1 & 0x7
def encode_message_pack(payload: bytes,
flags: int = FLAGS_MESSAGE_PROTOCOL,
length: Optional[int] = None) -> bytes:
if length is None:
length = len(payload)
data = b""
data += encode("<B", flags)
data += encode("<H", length)
data += encode("<B", sum(data) & 0xff)
data += payload
return data
def decode_message_pack(data: bytes) -> Tuple[bytes, int, int]:
length = decode("<H", data[1:3])[0]
if sum(data[0:3]) & 0xff != data[3]:
raise ValueError("Invalid data")
return data[4:4 + length], data[0], length
def check_message_pack(data: bytes,
flags: int = FLAGS_MESSAGE_PROTOCOL) -> bytes:
data = decode_message_pack(data)
if data[1] != flags or len(data[0]) < data[2]:
raise ValueError("Invalid message pack")
return data[0]
def encode_message_protocol(payload: bytes,
command: int,
length: Optional[int] = None,
checksum: bool = True) -> bytes:
if length is None:
length = len(payload)
data = b""
data += encode("<B", command)
data += encode("<H", length + 1)
data += payload
data += encode("<B", 0xaa - sum(data) & 0xff if checksum else 0x88)
return data
def decode_message_protocol(data: bytes,
checksum: bool = True) -> Tuple[bytes, int, int]:
length = decode("<H", data[1:3])[0]
if checksum:
if data[2 + length] != 0xaa - sum(data[0:2 + length]) & 0xff:
raise ValueError("Invalid data")
elif data[2 + length] != 0x88:
raise ValueError("Invalid data")
return data[3:2 + length], data[0], length - 1
def check_message_protocol(data: bytes,
command: int,
checksum: bool = True) -> bytes:
data = decode_message_protocol(data, checksum)
if data[1] != command or len(data[0]) < data[2]:
raise ValueError("Invalid message protocol")
return data[0]
def decode_ack(data: bytes) -> Tuple[int, bool]:
if not data[1] & 0x1:
raise ValueError("Invalid data")
return data[0], data[1] & 0x2 == 0x2
def check_ack(data: bytes, command: int) -> bool:
data = decode_ack(data)
if data[0] != command:
raise ValueError("Invalid ack")
return data[1]
def decode_image(data: bytes) -> List[int]:
image = []
for i in range(0, len(data), 6):
chunk = data[i:i + 6]
image.append(((chunk[0] & 0xf) << 8) + chunk[1])
image.append((chunk[3] << 4) + (chunk[0] >> 4))
image.append(((chunk[5] & 0xf) << 8) + chunk[2])
image.append((chunk[4] << 4) + (chunk[5] >> 4))
return image
def decode_mcu_state(
data: bytes) -> Tuple[int, bool, bool, bool, int, int, int, int, int]:
return data[0], data[1] & 0x1 == 0x1, data[
1] & 0x2 == 0x2, data[1] & 0x4 == 0x4, data[2] >> 4, data[9], decode(
"<H", data[10:11]), data[12], data[13]
class Device:
def __init__(self, product: int, timeout: Optional[float] = 5) -> None:
print(f"__init__({product}, {timeout})")
if timeout is not None:
timeout += time()
while True:
device = find(idVendor=0x27c6, idProduct=product)
if device is not None:
try:
get_status(device)
break
except USBError as error:
if (error.backend_error_code != -1 and
error.backend_error_code != -4):
raise error
if timeout is not None and time() > timeout:
if device is None:
raise USBTimeoutError("Device not found", -5, 19)
raise USBTimeoutError("Invalid device state", -12, 131)
sleep(0.01)
self.device: USBDevice = device
print(f"Found Goodix device: \"{self.device.product}\" "
f"from \"{self.device.manufacturer}\" "
f"on bus {self.device.bus} "
f"address {self.device.address}.")
interface_data = find_descriptor(
self.device.get_active_configuration(),
custom_match=lambda interface: interface.bInterfaceClass ==
CLASS_DATA or interface.bInterfaceClass == CLASS_VENDOR_SPEC)
if interface_data is None:
raise USBError("Interface data not found", -5, 6)
print(f"Found interface data: {interface_data.bInterfaceNumber}")
endpoint_in = find_descriptor(
interface_data,
custom_match=lambda endpoint: endpoint_direction(
endpoint.bEndpointAddress) == ENDPOINT_IN and endpoint_type(
endpoint.bmAttributes) == ENDPOINT_TYPE_BULK)
if endpoint_in is None:
raise USBError("Endpoint in not found", -5, 6)
self.endpoint_in: int = endpoint_in.bEndpointAddress
print(f"Found endpoint in: {hex(self.endpoint_in)}")
endpoint_out = find_descriptor(
interface_data,
custom_match=lambda endpoint: endpoint_direction(
endpoint.bEndpointAddress) == ENDPOINT_OUT and endpoint_type(
endpoint.bmAttributes) == ENDPOINT_TYPE_BULK)
if endpoint_out is None:
raise USBError("Endpoint out not found", -5, 6)
self.endpoint_out: int = endpoint_out.bEndpointAddress
print(f"Found endpoint out: {hex(self.endpoint_out)}")
# Empty device reply buffer (Current patch while waiting for a fix)
self.empty_buffer()
def empty_buffer(self) -> None:
print("empty_buffer()")
try:
while True:
self.read(timeout=0.1)
except USBTimeoutError as error:
if error.backend_error_code == -7:
return
raise error
def write(self, data: bytes, timeout: Optional[float] = 1) -> None:
timeout = 0 if timeout is None else round(timeout * 1000)
length = len(data)
if length % 0x40:
data += b"\x00" * (0x40 - length % 0x40)
for i in range(0, length, 0x40):
self.device.write(self.endpoint_out, data[i:i + 0x40], timeout)
def read(self, size: int = 0x2000, timeout: Optional[float] = 1) -> bytes:
timeout = 0 if timeout is None else round(timeout * 1000)
return self.device.read(self.endpoint_in, size, timeout).tobytes()
def wait_disconnect(self, timeout: Optional[float] = 5) -> None:
print(f"wait_disconnect({timeout})")
if timeout is not None:
timeout += time()
while True:
try:
get_status(self.device)
except USBError as error:
if (error.backend_error_code == -1 or
error.backend_error_code == -4):
break
raise error
if timeout is not None and time() > timeout:
raise USBTimeoutError("Device is still connected", -7, 110)
sleep(0.01)
def nop(self) -> None:
print("nop()")
self.write(
encode_message_pack(
encode_message_protocol(b"\x00\x00\x00\x00",
COMMAND_NOP,
checksum=False)))
try:
message = self.read(timeout=0.1)
except USBTimeoutError as error:
if error.backend_error_code == -7:
return
raise error
check_ack(
check_message_protocol(check_message_pack(message), COMMAND_ACK),
COMMAND_NOP)
def mcu_get_image(self) -> bytes:
print("mcu_get_image()")
self.write(
encode_message_pack(
encode_message_protocol(b"\x01\x00", COMMAND_MCU_GET_IMAGE)))
check_ack(
check_message_protocol(check_message_pack(self.read()),
COMMAND_ACK), COMMAND_MCU_GET_IMAGE)
return check_message_pack(self.read() + self.read(0x1000),
FLAGS_TRANSPORT_LAYER_SECURITY)
def mcu_switch_to_fdt_down(self, mode: bytes) -> bytes:
print(f"mcu_switch_to_fdt_down({mode})")
self.write(
encode_message_pack(
encode_message_protocol(mode, COMMAND_MCU_SWITCH_TO_FDT_DOWN)))
check_ack(
check_message_protocol(check_message_pack(self.read()),
COMMAND_ACK), COMMAND_MCU_SWITCH_TO_FDT_DOWN)
return check_message_protocol(
check_message_pack(self.read(timeout=None)),
COMMAND_MCU_SWITCH_TO_FDT_DOWN)
def mcu_switch_to_fdt_up(self, mode: bytes) -> bytes:
print(f"mcu_switch_to_fdt_up({mode})")
self.write(
encode_message_pack(
encode_message_protocol(mode, COMMAND_MCU_SWITCH_TO_FDT_UP)))
check_ack(
check_message_protocol(check_message_pack(self.read()),
COMMAND_ACK), COMMAND_MCU_SWITCH_TO_FDT_UP)
return check_message_protocol(
check_message_pack(self.read(timeout=None)),
COMMAND_MCU_SWITCH_TO_FDT_UP)
def mcu_switch_to_fdt_mode(self, mode: bytes) -> bytes:
print(f"mcu_switch_to_fdt_mode({mode})")
self.write(
encode_message_pack(
encode_message_protocol(mode, COMMAND_MCU_SWITCH_TO_FDT_MODE)))
check_ack(
check_message_protocol(check_message_pack(self.read()),
COMMAND_ACK), COMMAND_MCU_SWITCH_TO_FDT_MODE)
return check_message_protocol(check_message_pack(self.read()),
COMMAND_MCU_SWITCH_TO_FDT_MODE)
def nav_0(self) -> bytes:
print("nav_0()")
self.write(
encode_message_pack(
encode_message_protocol(b"\x01\x00", COMMAND_NAV_0)))
check_ack(
check_message_protocol(check_message_pack(self.read()),
COMMAND_ACK), COMMAND_NAV_0)
return check_message_protocol(check_message_pack(self.read()),
COMMAND_NAV_0, False)
def mcu_switch_to_idle_mode(self, sleep_time: int) -> None:
print(f"mcu_switch_to_idle_mode({sleep_time})")
self.write(
encode_message_pack(
encode_message_protocol(
encode("<B", sleep_time) + b"\x00",
COMMAND_MCU_SWITCH_TO_IDLE_MODE)))
check_ack(
check_message_protocol(check_message_pack(self.read()),
COMMAND_ACK),
COMMAND_MCU_SWITCH_TO_IDLE_MODE)
def write_sensor_register(self, address: Union[int, List[int]],
value: Union[bytes, List[bytes]]) -> None:
print(f"write_sensor_register({address}, {value})")
if isinstance(address, int):
if not isinstance(value, bytes):
raise ValueError("Invalid value")
message = b"\x00" + encode("<H", address) + value
else:
if isinstance(value, bytes):
raise ValueError("Invalid value")
length = len(address)
if len(value) != length:
raise ValueError("Invalid value")
message = b""
message += b"\x01"
for i in length:
if len(value[i]) != 2:
raise ValueError("Invalid value")
message += encode("<H", address[i])
message += value[i]
self.write(
encode_message_pack(
encode_message_protocol(message,
COMMAND_WRITE_SENSOR_REGISTER)))
check_ack(
check_message_protocol(check_message_pack(self.read()),
COMMAND_ACK), COMMAND_WRITE_SENSOR_REGISTER)
def read_sensor_register(self, address: Union[int, List[int]],
length: int) -> Union[bytes, List[bytes]]:
print(f"read_sensor_register({address}, {length})")
if isinstance(address, int):
message = b"\x00" + encode("<H", address) + encode("<B", length)
else:
if length != 2:
raise ValueError("Invalid length")
message = b""
message += b"\x01"
for value in address:
message += encode("<H", value)
message += encode("<B", length)
self.write(
encode_message_pack(
encode_message_protocol(message, COMMAND_READ_SENSOR_REGISTER)))
check_ack(
check_message_protocol(check_message_pack(self.read()),
COMMAND_ACK), COMMAND_READ_SENSOR_REGISTER)
message = check_message_protocol(check_message_pack(self.read()),
COMMAND_READ_SENSOR_REGISTER)
if isinstance(address, int):
if len(message) < length:
raise SystemError("Invalid response length")
return message
length = len(message) - 1
if length < len(address) * 2:
raise SystemError("Invalid response length")
value = []
for i in range(0, length, 2):
value.append(message[i:i + 2])
return value
def upload_config_mcu(self, config: bytes) -> bool:
print(f"upload_config_mcu({config})")
self.write(
encode_message_pack(
encode_message_protocol(config, COMMAND_UPLOAD_CONFIG_MCU)))
check_ack(
check_message_protocol(check_message_pack(self.read()),
COMMAND_ACK), COMMAND_UPLOAD_CONFIG_MCU)
message = check_message_protocol(check_message_pack(self.read()),
COMMAND_UPLOAD_CONFIG_MCU)
if len(message) < 1:
raise SystemError("Invalid response length")
return message[0] == 0x01
def set_powerdown_scan_frequency(self,
powerdown_scan_frequency: int) -> bool:
print(f"set_powerdown_scan_frequency({powerdown_scan_frequency})")
self.write(
encode_message_pack(
encode_message_protocol(encode("<H", powerdown_scan_frequency),
COMMAND_SET_POWERDOWN_SCAN_FREQUENCY)))
check_ack(
check_message_protocol(check_message_pack(self.read()),
COMMAND_ACK),
COMMAND_SET_POWERDOWN_SCAN_FREQUENCY)
message = check_message_protocol(check_message_pack(self.read()),
COMMAND_SET_POWERDOWN_SCAN_FREQUENCY)
if len(message) < 1:
raise SystemError("Invalid response length")
return message[0] == 0x01
def enable_chip(self, enable: bool) -> None:
print(f"enable_chip({enable})")
self.write(
encode_message_pack(
encode_message_protocol(
encode("<B", 0x1 if enable else 0x0) + b"\x00",
| |
Get Pond Shape
@return: a PondShape object, holding all the information describing the shape of the lake.
@rtype: PondShape
'''
return self.pond_shape_object
def get_benthic_photosynthesis_measurements(self):
'''
Get Benthic Photosynthesis Measurements
@return: the list containing all the Benthic Photosynthesis Measurement objects, that hold the information regarding benthic photosynthesis.
@rtype: list containing BenthicPhotosynthesisMeasurement objects
'''
return self.__benthic_photosynthesis_measurements
def get_phytoplankton_photosynthesis_measurements(self):
'''
Get Phytoplankton Photosynthesis Measurements
@return: the list containing all the Phytoplankton Photosynthesis Measurement objects, that hold the information regarding benthic photosynthesis.
@rtype: list containing PhytoplanktonPhotoSynthesisMeasurement objects
'''
return self.__phytoplankton_photosynthesis_measurements
def get_max_depth(self):
'''
Get Max Depth
Calls get max depth method in PondShape instance.
@return: maximum depth of lake
'''
return self.get_pond_shape().get_max_depth()
def get_time_interval(self):
'''
Get Time Interval
Get the time interval used for calculations.
@rtype: float
'''
return self.__time_interval
def get_list_of_times(self):
'''
Gets a list of the times of day used for calculations.
Example: if the day length was 2 hours, and the time interval was 0.25 (quarter-hours), this would return
[0.0,0.25,0.5,0.75,1.0,1.25,1.5,1.75,2.0]
@rtype: list
'''
start_time = 0.0
end_time = self.get_length_of_day()
time_interval = self.get_time_interval()
time_list = []
time =start_time
while time<=end_time:
time_list.append(time)
time+=time_interval
return time_list
#######################
# SETTERS
#######################
def set_year(self, year):
'''
Set year
Validates it first using validate_year
'''
self.__year = self.validate_year(year)
def set_lake_id(self, lake_id):
'''
Set Lake ID
@param lake_id:
'''
self.__lake_ID = lake_id
def set_day_of_year(self, day_of_year):
'''
Set Day Of Year
Validates the value
@param day_of_year:
'''
validated_day_of_year = self.validate_day_of_year(day_of_year)
self.__day_of_year = validated_day_of_year
def set_length_of_day(self, length_of_day):
'''
Set Length Of Day
Validates the value
@param length_of_day:
'''
validated_length_of_day = self.validate_length_of_day(length_of_day)
self.__length_of_day = validated_length_of_day
def set_noon_surface_light(self, noon_surface_light):
'''
Set Noon Surface Light
Validates the value
@param noon_surface_light:
'''
validated_light = self.validate_noon_surface_light(noon_surface_light)
self.__noon_surface_light = validated_light
def set_light_attenuation_coefficient(self, light_attenuation_coefficient):
'''
Set Light Attenuation Coefficient
Validates the value
@param light_attenuation_coefficient: Also known as light extinction coefficient, or just kd. Units in inverse meters.
'''
validated_light_attenuation_coefficient = self.validate_light_attenuation_coefficient(light_attenuation_coefficient)
self.__light_attenuation_coefficient = validated_light_attenuation_coefficient
def set_time_interval(self, time_interval):
'''
Set Time Interval
@param time_interval: fractional hours. For example, 0.5 = half hours, 0.25 = 15 minutes.
'''
self.__time_interval = time_interval
def set_pond_shape(self, pond_shape_object):
'''
Set Time Interval
Validates the value
@param pond_shape_object: a PondShape object of some type. So long as it extends PondShape, it should work.
'''
if(isinstance(pond_shape_object, PondShape)):
self.pond_shape_object = pond_shape_object
else:
raise Exception("cannot set pond shape. Invalid type")
def set_benthic_photosynthesis_measurements(self, values=[]):
'''
Set Benthic Photosynthesis Measurements
Given a list of BenthicPhotosynthesisMeasurement objects, replaces the current list with values.
Validates the list using validate_types_of_all_items_in_list()
'''
all_valid = self.validate_types_of_all_items_in_list(values, BenthicPhotosynthesisMeasurement)
if(all_valid):
self.__benthic_photosynthesis_measurements = values
else:
raise Exception("ERROR: all values in benthic_photosynthesis_measurements must be of type BenthicPhotosynthesisMeasurement")
def set_phytoplankton_photosynthesis_measurements(self, values=[]):
'''
Set Phytoplankton Photosynthesis Measurements
Given a list of PhytoPlanktonPhotosynthesisMeasurement objects, replaces the current list with values.
Validates the list using validate_types_of_all_items_in_list()
Also makes sure that there are less than or equal to MAXIMUM_NUMBER_OF_THERMAL_LAYERS measurements.
'''
# TODO: use a dict to ensure 3 unique layers.
all_valid = self.validate_types_of_all_items_in_list(values, PhytoPlanktonPhotosynthesisMeasurement)
length_valid = len(values) <= self.MAXIMUM_NUMBER_OF_THERMAL_LAYERS
if(not all_valid):
raise Exception("ERROR: all values in phytoplankton_photosynthesis_measurements must be of type PhytoPlanktonPhotosynthesisMeasurement")
elif(not length_valid):
raise Exception("ERROR: there must be 0 to 3 thermal layers")
else:
self.__phytoplankton_photosynthesis_measurements = values
#############################
# DELETERS
#############################
def del_year(self):
del self.__year
def del_lake_id(self):
del self.__lake_ID
def del_day_of_year(self):
del self.__day_of_year
def del_length_of_day(self):
del self.__length_of_day
def del_noon_surface_light(self):
del self.__noon_surface_light
def del_light_attenuation_coefficient(self):
del self.__light_attenuation_coefficient
def del_benthic_photosynthesis_measurements(self):
del self.__benthic_photosynthesis_measurements
def del_phytoplankton_photosynthesis_measurements(self):
del self.__phytoplankton_photosynthesis_measurements
def del_time_interval(self):
del self.__time_interval
########################################
# Properties
########################################
#TODO: write decent docstrings
year = property(get_year, set_year, del_year, "year's docstring")
lake_ID = property(get_lake_id, set_lake_id, del_lake_id, "lake_ID's docstring")
day_of_year = property(get_day_of_year, set_day_of_year, del_day_of_year, "day_of_year's docstring")
length_of_day = property(get_length_of_day, set_length_of_day, del_length_of_day, "length_of_day's docstring")
noon_surface_light = property(get_noon_surface_light, set_noon_surface_light, del_noon_surface_light, "noon_surface_light's docstring")
light_attenuation_coefficient = property(get_light_attenuation_coefficient, set_light_attenuation_coefficient, del_light_attenuation_coefficient, "light_attenuation_coefficient's docstring")
benthic_photosynthesis_measurements = property(get_benthic_photosynthesis_measurements, set_benthic_photosynthesis_measurements, del_benthic_photosynthesis_measurements, "benthic_photosynthesis_measurements's docstring")
phytoplankton_photosynthesis_measurements = property(get_phytoplankton_photosynthesis_measurements, set_phytoplankton_photosynthesis_measurements, del_phytoplankton_photosynthesis_measurements, "phytoplankton_photosynthesis_measurements's docstring")
time_interval = property(get_time_interval, set_time_interval, del_time_interval, "time_interval's docstring")
########################################
# Appenders/mutators
########################################
def add_benthic_measurement(self, measurement=BenthicPhotosynthesisMeasurement):
if(isinstance(measurement, BenthicPhotosynthesisMeasurement)):
self.benthic_photosynthesis_measurements.append(measurement)
else:
raise Exception("ERROR: cannot add measurement to benthic measurements list - measurement must be of type BenthicPhotosynthesisMeasurement")
def add_benthic_measurement_if_photic(self, measurement):
z1Percent = self.calculate_depth_of_specific_light_percentage(self.PHOTIC_ZONE_LIGHT_PENETRATION_LEVEL_LOWER_BOUND)
if(measurement.get_depth() <= z1Percent):
self.add_benthic_measurement(measurement)
else:
raise Exception("measurement not within photic zone")
def add_phytoplankton_measurement(self, measurement=PhytoPlanktonPhotosynthesisMeasurement):
if(isinstance(measurement, PhytoPlanktonPhotosynthesisMeasurement)):
if(len(self.phytoplankton_photosynthesis_measurements) > 0):
existing_measurement = next((i for i in self.phytoplankton_photosynthesis_measurements if (i.get_thermal_layer() == measurement.get_thermal_layer())), None) # source: http://stackoverflow.com/questions/7125467/find-object-in-list-that-has-attribute-equal-to-some-value-that-meets-any-condi
if(existing_measurement is not None):
index = measurement.get_thermal_layer() - 1
self.phytoplankton_photosynthesis_measurements.remove(existing_measurement)
try:
self.phytoplankton_photosynthesis_measurements.insert(index, measurement)
except TypeError:
error = "TypeError: index is ", index, " and measurement is ", measurement, " for pond ", self.get_lake_id(), " day ", self.get_day_of_year()
raise Exception(error)
self.phytoplankton_photosynthesis_measurements.append(measurement)
else:
raise Exception("ERROR: cannot add measurement to benthic measurements list - measurement must be of type PhytoPlanktonPhotosynthesisMeasurement")
def remove_benthic_measurement(self, measurement=BenthicPhotosynthesisMeasurement):
self.benthic_photosynthesis_measurements.remove(measurement)
def update_shape(self, other_pond_shape):
our_shape = self.get_pond_shape()
if(isinstance(other_pond_shape, BathymetricPondShape)):
our_shape.update_shape(other_pond_shape)
self.pond_shape_object = our_shape
############################################
############################################
# # SCIENCE FUNCTIONS
# # This section is where the science occurs.
############################################
############################################
###########################################################
# BENTHIC PHOTO METHODS
###########################################################
##############################
# BENTHIC PRIMARY PRODUCTIVITY
##############################
def calculate_daily_whole_lake_benthic_primary_production_m2(self, depth_interval=DEFAULT_DEPTH_INTERVAL_FOR_CALCULATIONS, use_littoral_area=True):
'''
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
Almost everything else in this entire project works to make this method work.
#TODO: (someday) allow specification of littoral or surface area
#TODO: (someday) user-specified depth interval.
@return: Benthic Primary Production, mg C per meter squared, per day.
@rtype: float
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
'''
time_interval = self.get_time_interval()
length_of_day = self.get_length_of_day() # TODO: Fee normalized this around zero. Doesn't seem necessary, but might affect the periodic function.
benthic_primary_production_answer = 0.0 # mg C per day
current_depth_interval = 0.0
previous_depth = 0.0
current_depth = 0.0
total_littoral_area=0.0
total_littoral_area = self.calculate_total_littoral_area()
total_surface_area = self.get_pond_shape().get_water_surface_area_at_depth(0.0)
# for each depth interval #TODO: integration over whole lake?
while current_depth < self.calculate_photic_zone_lower_bound():
bpprz = 0.0 # mg C* m^-2 *day
# depth interval calculation
previous_depth = current_depth
current_depth += depth_interval
current_depth_interval = current_depth - previous_depth
area = self.get_pond_shape().get_sediment_area_at_depth(current_depth, current_depth_interval)
try:
ik_z = self.get_benthic_ik_at_depth(current_depth)
benthic_pmax_z = self.get_benthic_pmax_at_depth(current_depth)
except:
raise
if(True == use_littoral_area):
f_area = area / total_littoral_area # TODO: these add up to 1.0, right?
else:
f_area = area / total_surface_area
# for every time interval
t = 0.0 # start of day
while t < length_of_day:
bpprzt = 0.0
izt = self.calculate_light_at_depth_and_time(current_depth, t)
bpprzt = self.calculate_benthic_primary_production_z_t(izt, benthic_pmax_z, ik_z)
bpprz += bpprzt
t += time_interval
bpprz = bpprz / (self.BASE_TIME_UNIT / time_interval) # account for the fractional time interval. e.g. dividing by 1/0.25 is equiv to dividing by 4
weighted_bpprz = bpprz * f_area # normalizing
benthic_primary_production_answer += weighted_bpprz
return benthic_primary_production_answer
def get_benthic_pmax_at_depth(self, depth=0.0):
'''
Get Benthic Pmax At Depth
Uses interpolation to get the pmax value at the specified depth, if not known.
Validates depth first.
@return: value of pmax at specified depth.
@rtype: float
'''
# if depth is lower than the depth of 1% light, pmax approaches zero.
if(self.check_if_depth_in_photic_zone(depth) == False):
return 0
validated_depth = self.validate_depth(depth)
pmax_values_list = []
depths_list = []
for measurement_value in self.get_benthic_photosynthesis_measurements():
pmax_value = measurement_value.get_pmax()
depth_value = measurement_value.get_depth()
pmax_values_list.append(pmax_value)
depths_list.append(depth_value)
bpmax_at_depth = self.interpolate_values_at_depth(validated_depth, depths_list, pmax_values_list)
return bpmax_at_depth
def get_benthic_ik_at_depth(self, depth=0.0):
'''
Get Benthic Ik At Depth
Uses interpolation to get the Ik value at the specified depth, if not known.
Validates depth first.
@return: value of pmax at specified depth.
@rtype: float
'''
validated_depth = self.validate_depth(depth)
values_list = []
depths_list = []
for measurement_value in self.get_benthic_photosynthesis_measurements():
ik_value = measurement_value.get_ik()
depth_value = measurement_value.get_depth()
values_list.append(ik_value)
depths_list.append(depth_value)
try:
ik_at_depth = self.interpolate_values_at_depth(validated_depth, depths_list, values_list)
except:
raise
return ik_at_depth
def calculate_benthic_primary_production_z_t(self, light_at_time_and_depth, benthic_pmax_z_t, benthic_ik_z_t):
'''
Benthic primary production rate at a specific depth and time
@return:
@rtype: float
'''
bpprzt = benthic_pmax_z_t * np.tanh(light_at_time_and_depth / benthic_ik_z_t)
return bpprzt
def get_benthic_measurements_sorted_by_depth(self):
'''
Sorted BenthicPhotosynthesisMeasurement list, by depth.
@return: sorted benthic measurements
@rtype: list of BenthicPhotosynthesisMeasurement objects.
'''
# http://stackoverflow.com/questions/403421/how-to-sort-a-list-of-objects-in-python-based-on-an-attribute-of-the-objects
unsorted_measurements = self.get_benthic_photosynthesis_measurements()
sorted_measurements = sorted(unsorted_measurements, key=lambda x: x.get_depth(), reverse=False)
return sorted_measurements
###########################################################
# PHYTO PHOTO METHODS
###########################################################
def calculate_daily_whole_lake_phytoplankton_primary_production_m2(self,
depth_interval=DEFAULT_DEPTH_INTERVAL_FOR_CALCULATIONS,
use_photoinhibition=None):
'''
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
Calculate Daily Whole-lake Phytoplankton Primary Production
Almost everything | |
wcsprm = _wcs.Wcsprm(header=header_bytes, key=key,
relax=relax, keysel=keysel_flags,
colsel=colsel)
except _wcs.NoWcsKeywordsFoundError:
# The header may have SIP or distortions, but no core
# WCS. That isn't an error -- we want a "default"
# (identity) core Wcs transformation in that case.
if colsel is None:
wcsprm = _wcs.Wcsprm(header=None, key=key,
relax=relax, keysel=keysel_flags,
colsel=colsel)
else:
raise
if naxis is not None:
wcsprm = wcsprm.sub(naxis)
self.naxis = wcsprm.naxis
header = fits.Header.fromstring(header_string)
det2im = self._read_det2im_kw(header, fobj, err=minerr)
cpdis = self._read_distortion_kw(
header, fobj, dist='CPDIS', err=minerr)
sip = self._read_sip_kw(header)
if (wcsprm.naxis != 2 and
(det2im[0] or det2im[1] or cpdis[0] or cpdis[1] or sip)):
raise ValueError(
"""
Paper IV lookup tables and SIP distortions only work in 2 dimensions.
However, WCSLIB has detected {0} dimensions in the core WCS keywords.
To use core WCS in conjunction with Paper IV lookup tables or SIP
distortion, you must select or reduce these to 2 dimensions using the
naxis kwarg.
""".format(wcsprm.naxis))
header_naxis = header.get('NAXIS', None)
if header_naxis is not None and header_naxis < wcsprm.naxis:
warnings.warn(
"The WCS transformation has more axes ({0:d}) than the "
"image it is associated with ({1:d})".format(
wcsprm.naxis, header_naxis), FITSFixedWarning)
self._get_naxis(header)
WCSBase.__init__(self, sip, cpdis, wcsprm, det2im)
if fix:
self.fix(translate_units=translate_units)
for fd in close_fds:
fd.close()
def __copy__(self):
new_copy = self.__class__()
WCSBase.__init__(new_copy, self.sip,
(self.cpdis1, self.cpdis2),
self.wcs,
(self.det2im1, self.det2im2))
new_copy.__dict__.update(self.__dict__)
return new_copy
def __deepcopy__(self, memo):
new_copy = self.__class__()
new_copy.naxis = copy.deepcopy(self.naxis, memo)
WCSBase.__init__(new_copy, copy.deepcopy(self.sip, memo),
(copy.deepcopy(self.cpdis1, memo),
copy.deepcopy(self.cpdis2, memo)),
copy.deepcopy(self.wcs, memo),
(copy.deepcopy(self.det2im1, memo),
copy.deepcopy(self.det2im2, memo)))
for key in self.__dict__:
val = self.__dict__[key]
new_copy.__dict__[key] = copy.deepcopy(val, memo)
return new_copy
def copy(self):
"""
Return a shallow copy of the object.
Convenience method so user doesn't have to import the
:mod:`copy` stdlib module.
"""
return copy.copy(self)
def deepcopy(self):
"""
Return a deep copy of the object.
Convenience method so user doesn't have to import the
:mod:`copy` stdlib module.
"""
return copy.deepcopy(self)
def sub(self, axes=None):
copy = self.deepcopy()
copy.wcs = self.wcs.sub(axes)
copy.naxis = copy.wcs.naxis
return copy
if _wcs is not None:
sub.__doc__ = _wcs.Wcsprm.sub.__doc__
def _fix_scamp(self):
"""
Remove SCAMP's PVi_m distortion parameters if SIP distortion parameters
are also present. Some projects (e.g., Palomar Transient Factory)
convert SCAMP's distortion parameters (which abuse the PVi_m cards) to
SIP. However, wcslib gets confused by the presence of both SCAMP and
SIP distortion parameters.
See https://github.com/astropy/astropy/issues/299.
"""
# Nothing to be done if no WCS attached
if self.wcs is None:
return
# Nothing to be done if no PV parameters attached
pv = self.wcs.get_pv()
if not pv:
return
# Nothing to be done if axes don't use SIP distortion parameters
if not all(ctype.endswith('-SIP') for ctype in self.wcs.ctype):
return
# Nothing to be done if any radial terms are present...
# Loop over list to find any radial terms.
# Certain values of the `j' index are used for storing
# radial terms; refer to Equation (1) in
# <http://web.ipac.caltech.edu/staff/shupe/reprints/SIP_to_PV_SPIE2012.pdf>.
pv = np.asarray(pv)
# Loop over distinct values of `i' index
for i in set(pv[:, 0]):
# Get all values of `j' index for this value of `i' index
js = set(pv[:, 1][pv[:, 0] == i])
# Find max value of `j' index
max_j = max(js)
for j in (3, 11, 23, 39):
if j < max_j and j in js:
return
self.wcs.set_pv([])
warnings.warn("Removed redundant SCAMP distortion parameters " +
"because SIP parameters are also present", FITSFixedWarning)
def fix(self, translate_units='', naxis=None):
"""
Perform the fix operations from wcslib, and warn about any
changes it has made.
Parameters
----------
translate_units : str, optional
Specify which potentially unsafe translations of
non-standard unit strings to perform. By default,
performs none.
Although ``"S"`` is commonly used to represent seconds,
its translation to ``"s"`` is potentially unsafe since the
standard recognizes ``"S"`` formally as Siemens, however
rarely that may be used. The same applies to ``"H"`` for
hours (Henry), and ``"D"`` for days (Debye).
This string controls what to do in such cases, and is
case-insensitive.
- If the string contains ``"s"``, translate ``"S"`` to
``"s"``.
- If the string contains ``"h"``, translate ``"H"`` to
``"h"``.
- If the string contains ``"d"``, translate ``"D"`` to
``"d"``.
Thus ``''`` doesn't do any unsafe translations, whereas
``'shd'`` does all of them.
naxis : int array[naxis], optional
Image axis lengths. If this array is set to zero or
``None``, then `~astropy.wcs.Wcsprm.cylfix` will not be
invoked.
"""
if self.wcs is not None:
self._fix_scamp()
fixes = self.wcs.fix(translate_units, naxis)
for key, val in six.iteritems(fixes):
if val != "No change":
warnings.warn(
("'{0}' made the change '{1}'.").
format(key, val),
FITSFixedWarning)
def calcFootprint(self, header=None, undistort=True, axes=None):
"""
Calculates the footprint of the image on the sky.
A footprint is defined as the positions of the corners of the
image on the sky after all available distortions have been
applied.
Parameters
----------
header : astropy.io.fits header object, optional
undistort : bool, optional
If `True`, take SIP and distortion lookup table into
account
axes : length 2 sequence ints, optional
If provided, use the given sequence as the shape of the
image. Otherwise, use the ``NAXIS1`` and ``NAXIS2``
keywords from the header that was used to create this
`WCS` object.
Returns
-------
coord : (4, 2) array of (*x*, *y*) coordinates.
"""
if axes is not None:
naxis1, naxis2 = axes
else:
if header is None:
try:
# classes that inherit from WCS and define naxis1/2
# do not require a header parameter
naxis1 = self._naxis1
naxis2 = self._naxis2
except AttributeError:
warnings.warn("Need a valid header in order to calculate footprint\n", AstropyUserWarning)
return None
else:
naxis1 = header.get('NAXIS1', None)
naxis2 = header.get('NAXIS2', None)
corners = np.zeros(shape=(4, 2), dtype=np.float64)
if naxis1 is None or naxis2 is None:
return None
corners[0, 0] = 1.
corners[0, 1] = 1.
corners[1, 0] = 1.
corners[1, 1] = naxis2
corners[2, 0] = naxis1
corners[2, 1] = naxis2
corners[3, 0] = naxis1
corners[3, 1] = 1.
if undistort:
return self.all_pix2world(corners, 1)
else:
return self.wcs_pix2world(corners, 1)
def _read_det2im_kw(self, header, fobj, err=0.0):
"""
Create a `Paper IV`_ type lookup table for detector to image
plane correction.
"""
if fobj is None:
return (None, None)
if not isinstance(fobj, fits.HDUList):
return (None, None)
try:
axiscorr = header[str('AXISCORR')]
d2imdis = self._read_d2im_old_format(header, fobj, axiscorr)
return d2imdis
except KeyError:
pass
dist = 'D2IMDIS'
d_kw = 'D2IM'
err_kw = 'D2IMERR'
tables = {}
for i in range(1, self.naxis + 1):
d_error = header.get(err_kw + str(i), 0.0)
if d_error < err:
tables[i] = None
continue
distortion = dist + str(i)
if distortion in header:
dis = header[distortion].lower()
if dis == 'lookup':
assert isinstance(fobj, fits.HDUList), ('An astropy.io.fits.HDUList'
'is required for Lookup table distortion.')
dp = (d_kw + str(i)).strip()
d_extver = header.get(dp + '.EXTVER', 1)
if i == header[dp + '.AXIS.{0:d}'.format(i)]:
d_data = fobj[str('D2IMARR'), d_extver].data
else:
d_data = (fobj[str('D2IMARR'), d_extver].data).transpose()
d_header = fobj[str('D2IMARR'), d_extver].header
d_crpix = (d_header.get(str('CRPIX1'), 0.0), d_header.get(str('CRPIX2'), 0.0))
d_crval = (d_header.get(str('CRVAL1'), 0.0), d_header.get(str('CRVAL2'), 0.0))
d_cdelt = (d_header.get(str('CDELT1'), 1.0), d_header.get(str('CDELT2'), 1.0))
d_lookup = DistortionLookupTable(d_data, d_crpix,
d_crval, d_cdelt)
tables[i] = d_lookup
else:
warnings.warn('Polynomial distortion is not implemented.\n', AstropyUserWarning)
else:
tables[i] = None
if not tables:
return (None, None)
else:
return (tables.get(1), tables.get(2))
def _read_d2im_old_format(self, header, fobj, axiscorr):
warnings.warn("The use of ``AXISCORR`` for D2IM correction has been deprecated."
"The new style of this correction is described at"
""
"PyWCS will read in files with ``AXISCORR`` but to_fits() will write"
"out files in the new style",
AstropyDeprecationWarning)
cpdis = [None, None]
crpix = [0., 0.]
crval = [0., 0.]
cdelt = [1., 1.]
try:
d2im_data = fobj[(str('D2IMARR'), 1)].data
except KeyError:
return (None, None)
except AttributeError:
return (None, None)
d2im_data = np.array([d2im_data])
d2im_hdr = fobj[(str('D2IMARR'), 1)].header
naxis = d2im_hdr[str('NAXIS')]
for i in range(1, naxis + 1):
crpix[i - 1] = d2im_hdr.get(str('CRPIX') + str(i), 0.0)
crval[i - 1] = d2im_hdr.get(str('CRVAL') + str(i), 0.0)
cdelt[i - 1] = d2im_hdr.get(str('CDELT') + str(i), 1.0)
cpdis = DistortionLookupTable(d2im_data, crpix, crval, cdelt)
if axiscorr == 1:
return (cpdis, None)
elif axiscorr == 2:
return (None, cpdis)
else:
warnings.warn("Expected AXISCORR to be 1 or 2", AstropyUserWarning)
return (None, None)
def | |
A524': 83520,
'ANATOLIAN HIEROGLYPH A525': 83521,
'ANATOLIAN HIEROGLYPH A526': 83522,
'ANATOLIAN HIEROGLYPH A527': 83523,
'ANATOLIAN HIEROGLYPH A528': 83524,
'ANATOLIAN HIEROGLYPH A529': 83525,
'ANATOLIAN HIEROGLYPH A530': 83526,
'ANGER SYMBOL': 128162,
'ANGRY FACE': 128544,
'ANGUISHED FACE': 128551,
'ANT': 128028,
'ANTENNA WITH BARS': 128246,
'ANTICLOCKWISE DOWNWARDS AND UPWARDS OPEN CIRCLE ARROWS': 128260,
'ANTICLOCKWISE TRIANGLE-HEADED BOTTOM U-SHAPED ARROW': 11149,
'ANTICLOCKWISE TRIANGLE-HEADED LEFT U-SHAPED ARROW': 11150,
'ANTICLOCKWISE TRIANGLE-HEADED OPEN CIRCLE ARROW': 11119,
'ANTICLOCKWISE TRIANGLE-HEADED RIGHT U-SHAPED ARROW': 11148,
'ANTICLOCKWISE TRIANGLE-HEADED TOP U-SHAPED ARROW': 11151,
'APC': 983195,
'APPLICATION PROGRAM COMMAND': 983194,
'ARABIC CURLY DAMMA': 2277,
'ARABIC CURLY DAMMATAN': 2280,
'ARABIC CURLY FATHA': 2276,
'ARABIC CURLY FATHATAN': 2279,
'ARABIC CURLY KASRA': 2278,
'ARABIC CURLY KASRATAN': 2281,
'ARABIC DAMMA WITH DOT': 2302,
'ARABIC DOUBLE RIGHT ARROWHEAD ABOVE': 2299,
'ARABIC DOUBLE RIGHT ARROWHEAD ABOVE WITH DOT': 2300,
'ARABIC FATHA WITH DOT ABOVE': 2293,
'ARABIC FATHA WITH RING': 2292,
'ARABIC KASRA WITH DOT BELOW': 2294,
'ARABIC LEFT ARROWHEAD ABOVE': 2295,
'ARABIC LEFT ARROWHEAD BELOW': 2297,
'ARABIC LETTER AIN WITH THREE DOTS BELOW': 2227,
'ARABIC LETTER BEH WITH HAMZA ABOVE': 2209,
'ARABIC LETTER BEH WITH SMALL V BELOW': 2208,
'ARABIC LETTER DAL WITH THREE DOTS BELOW': 2222,
'ARABIC LETTER FEH WITH DOT BELOW AND THREE DOTS ABOVE': 2212,
'ARABIC LETTER GAF WITH INVERTED STROKE': 2224,
'ARABIC LETTER JEEM WITH TWO DOTS ABOVE': 2210,
'ARABIC LETTER KAF WITH DOT BELOW': 2228,
'ARABIC LETTER KASHMIRI YEH': 1568,
'ARABIC LETTER LAM WITH DOUBLE BAR': 2214,
'ARABIC LETTER LOW ALEF': 2221,
'ARABIC LETTER MARK': 1564,
'ARABIC LETTER MEEM WITH THREE DOTS ABOVE': 2215,
'ARABIC LETTER QAF WITH DOT BELOW': 2213,
'ARABIC LETTER REH WITH LOOP': 2218,
'ARABIC LETTER ROHINGYA YEH': 2220,
'ARABIC LETTER SAD WITH THREE DOTS BELOW': 2223,
'ARABIC LETTER STRAIGHT WAW': 2225,
'ARABIC LETTER TAH WITH TWO DOTS ABOVE': 2211,
'ARABIC LETTER WAW WITH DOT WITHIN': 2219,
'ARABIC LETTER YEH WITH TWO DOTS BELOW AND DOT ABOVE': 2217,
'ARABIC LETTER YEH WITH TWO DOTS BELOW AND HAMZA ABOVE': 2216,
'ARABIC LETTER ZAIN WITH INVERTED V ABOVE': 2226,
'ARABIC MARK SIDEWAYS NOON GHUNNA': 2303,
'ARABIC MATHEMATICAL AIN': 126479,
'ARABIC MATHEMATICAL ALEF': 126464,
'ARABIC MATHEMATICAL BEH': 126465,
'ARABIC MATHEMATICAL DAD': 126489,
'ARABIC MATHEMATICAL DAL': 126467,
'ARABIC MATHEMATICAL DOTLESS BEH': 126492,
'ARABIC MATHEMATICAL DOTLESS FEH': 126494,
'ARABIC MATHEMATICAL DOTLESS NOON': 126493,
'ARABIC MATHEMATICAL DOTLESS QAF': 126495,
'ARABIC MATHEMATICAL DOUBLE-STRUCK AIN': 126639,
'ARABIC MATHEMATICAL DOUBLE-STRUCK BEH': 126625,
'ARABIC MATHEMATICAL DOUBLE-STRUCK DAD': 126649,
'ARABIC MATHEMATICAL DOUBLE-STRUCK DAL': 126627,
'ARABIC MATHEMATICAL DOUBLE-STRUCK FEH': 126640,
'ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN': 126651,
'ARABIC MATHEMATICAL DOUBLE-STRUCK HAH': 126631,
'ARABIC MATHEMATICAL DOUBLE-STRUCK JEEM': 126626,
'ARABIC MATHEMATICAL DOUBLE-STRUCK KHAH': 126647,
'ARABIC MATHEMATICAL DOUBLE-STRUCK LAM': 126635,
'ARABIC MATHEMATICAL DOUBLE-STRUCK MEEM': 126636,
'ARABIC MATHEMATICAL DOUBLE-STRUCK NOON': 126637,
'ARABIC MATHEMATICAL DOUBLE-STRUCK QAF': 126642,
'ARABIC MATHEMATICAL DOUBLE-STRUCK REH': 126643,
'ARABIC MATHEMATICAL DOUBLE-STRUCK SAD': 126641,
'ARABIC MATHEMATICAL DOUBLE-STRUCK SEEN': 126638,
'ARABIC MATHEMATICAL DOUBLE-STRUCK SHEEN': 126644,
'ARABIC MATHEMATICAL DOUBLE-STRUCK TAH': 126632,
'ARABIC MATHEMATICAL DOUBLE-STRUCK TEH': 126645,
'ARABIC MATHEMATICAL DOUBLE-STRUCK THAL': 126648,
'ARABIC MATHEMATICAL DOUBLE-STRUCK THEH': 126646,
'ARABIC MATHEMATICAL DOUBLE-STRUCK WAW': 126629,
'ARABIC MATHEMATICAL DOUBLE-STRUCK YEH': 126633,
'ARABIC MATHEMATICAL DOUBLE-STRUCK ZAH': 126650,
'ARABIC MATHEMATICAL DOUBLE-STRUCK ZAIN': 126630,
'ARABIC MATHEMATICAL FEH': 126480,
'ARABIC MATHEMATICAL GHAIN': 126491,
'ARABIC MATHEMATICAL HAH': 126471,
'ARABIC MATHEMATICAL INITIAL AIN': 126511,
'ARABIC MATHEMATICAL INITIAL BEH': 126497,
'ARABIC MATHEMATICAL INITIAL DAD': 126521,
'ARABIC MATHEMATICAL INITIAL FEH': 126512,
'ARABIC MATHEMATICAL INITIAL GHAIN': 126523,
'ARABIC MATHEMATICAL INITIAL HAH': 126503,
'ARABIC MATHEMATICAL INITIAL HEH': 126500,
'ARABIC MATHEMATICAL INITIAL JEEM': 126498,
'ARABIC MATHEMATICAL INITIAL KAF': 126506,
'ARABIC MATHEMATICAL INITIAL KHAH': 126519,
'ARABIC MATHEMATICAL INITIAL LAM': 126507,
'ARABIC MATHEMATICAL INITIAL MEEM': 126508,
'ARABIC MATHEMATICAL INITIAL NOON': 126509,
'ARABIC MATHEMATICAL INITIAL QAF': 126514,
'ARABIC MATHEMATICAL INITIAL SAD': 126513,
'ARABIC MATHEMATICAL INITIAL SEEN': 126510,
'ARABIC MATHEMATICAL INITIAL SHEEN': 126516,
'ARABIC MATHEMATICAL INITIAL TEH': 126517,
'ARABIC MATHEMATICAL INITIAL THEH': 126518,
'ARABIC MATHEMATICAL INITIAL YEH': 126505,
'ARABIC MATHEMATICAL JEEM': 126466,
'ARABIC MATHEMATICAL KAF': 126474,
'ARABIC MATHEMATICAL KHAH': 126487,
'ARABIC MATHEMATICAL LAM': 126475,
'ARABIC MATHEMATICAL LOOPED AIN': 126607,
'ARABIC MATHEMATICAL LOOPED ALEF': 126592,
'ARABIC MATHEMATICAL LOOPED BEH': 126593,
'ARABIC MATHEMATICAL LOOPED DAD': 126617,
'ARABIC MATHEMATICAL LOOPED DAL': 126595,
'ARABIC MATHEMATICAL LOOPED FEH': 126608,
'ARABIC MATHEMATICAL LOOPED GHAIN': 126619,
'ARABIC MATHEMATICAL LOOPED HAH': 126599,
'ARABIC MATHEMATICAL LOOPED HEH': 126596,
'ARABIC MATHEMATICAL LOOPED JEEM': 126594,
'ARABIC MATHEMATICAL LOOPED KHAH': 126615,
'ARABIC MATHEMATICAL LOOPED LAM': 126603,
'ARABIC MATHEMATICAL LOOPED MEEM': 126604,
'ARABIC MATHEMATICAL LOOPED NOON': 126605,
'ARABIC MATHEMATICAL LOOPED QAF': 126610,
'ARABIC MATHEMATICAL LOOPED REH': 126611,
'ARABIC MATHEMATICAL LOOPED SAD': 126609,
'ARABIC MATHEMATICAL LOOPED SEEN': 126606,
'ARABIC MATHEMATICAL LOOPED SHEEN': 126612,
'ARABIC MATHEMATICAL LOOPED TAH': 126600,
'ARABIC MATHEMATICAL LOOPED TEH': 126613,
'ARABIC MATHEMATICAL LOOPED THAL': 126616,
'ARABIC MATHEMATICAL LOOPED THEH': 126614,
'ARABIC MATHEMATICAL LOOPED WAW': 126597,
'ARABIC MATHEMATICAL LOOPED YEH': 126601,
'ARABIC MATHEMATICAL LOOPED ZAH': 126618,
'ARABIC MATHEMATICAL LOOPED ZAIN': 126598,
'ARABIC MATHEMATICAL MEEM': 126476,
'ARABIC MATHEMATICAL NOON': 126477,
'ARABIC MATHEMATICAL OPERATOR HAH WITH DAL': 126705,
'ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL': 126704,
'ARABIC MATHEMATICAL QAF': 126482,
'ARABIC MATHEMATICAL REH': 126483,
'ARABIC MATHEMATICAL SAD': 126481,
'ARABIC MATHEMATICAL SEEN': 126478,
'ARABIC MATHEMATICAL SHEEN': 126484,
'ARABIC MATHEMATICAL STRETCHED AIN': 126575,
'ARABIC MATHEMATICAL STRETCHED BEH': 126561,
'ARABIC MATHEMATICAL STRETCHED DAD': 126585,
'ARABIC MATHEMATICAL STRETCHED DOTLESS BEH': 126588,
'ARABIC MATHEMATICAL STRETCHED DOTLESS FEH': 126590,
'ARABIC MATHEMATICAL STRETCHED FEH': 126576,
'ARABIC MATHEMATICAL STRETCHED GHAIN': 126587,
'ARABIC MATHEMATICAL STRETCHED HAH': 126567,
'ARABIC MATHEMATICAL STRETCHED HEH': 126564,
'ARABIC MATHEMATICAL STRETCHED JEEM': 126562,
'ARABIC MATHEMATICAL STRETCHED KAF': 126570,
'ARABIC MATHEMATICAL STRETCHED KHAH': 126583,
'ARABIC MATHEMATICAL STRETCHED MEEM': 126572,
'ARABIC MATHEMATICAL STRETCHED NOON': 126573,
'ARABIC MATHEMATICAL STRETCHED QAF': 126578,
'ARABIC MATHEMATICAL STRETCHED SAD': 126577,
'ARABIC MATHEMATICAL STRETCHED SEEN': 126574,
'ARABIC MATHEMATICAL STRETCHED SHEEN': 126580,
'ARABIC MATHEMATICAL STRETCHED TAH': 126568,
'ARABIC MATHEMATICAL STRETCHED TEH': 126581,
'ARABIC MATHEMATICAL STRETCHED THEH': 126582,
'ARABIC MATHEMATICAL STRETCHED YEH': 126569,
'ARABIC MATHEMATICAL STRETCHED ZAH': 126586,
'ARABIC MATHEMATICAL TAH': 126472,
'ARABIC MATHEMATICAL TAILED AIN': 126543,
'ARABIC MATHEMATICAL TAILED DAD': 126553,
'ARABIC MATHEMATICAL TAILED DOTLESS NOON': 126557,
'ARABIC MATHEMATICAL TAILED DOTLESS QAF': 126559,
'ARABIC MATHEMATICAL TAILED GHAIN': 126555,
'ARABIC MATHEMATICAL TAILED HAH': 126535,
'ARABIC MATHEMATICAL TAILED JEEM': 126530,
'ARABIC MATHEMATICAL TAILED KHAH': 126551,
'ARABIC MATHEMATICAL TAILED LAM': 126539,
'ARABIC MATHEMATICAL TAILED NOON': 126541,
'ARABIC MATHEMATICAL TAILED QAF': 126546,
'ARABIC MATHEMATICAL TAILED SAD': 126545,
'ARABIC MATHEMATICAL TAILED SEEN': 126542,
'ARABIC MATHEMATICAL TAILED SHEEN': 126548,
'ARABIC MATHEMATICAL TAILED YEH': 126537,
'ARABIC MATHEMATICAL TEH': 126485,
'ARABIC MATHEMATICAL THAL': 126488,
'ARABIC MATHEMATICAL THEH': 126486,
'ARABIC MATHEMATICAL WAW': 126469,
'ARABIC MATHEMATICAL YEH': 126473,
'ARABIC MATHEMATICAL ZAH': 126490,
'ARABIC MATHEMATICAL ZAIN': 126470,
'ARABIC NUMBER MARK ABOVE': 1541,
'ARABIC OPEN DAMMATAN': 2289,
'ARABIC OPEN FATHATAN': 2288,
'ARABIC OPEN KASRATAN': 2290,
'ARABIC RIGHT ARROWHEAD ABOVE': 2296,
'ARABIC RIGHT ARROWHEAD ABOVE WITH DOT': 2301,
'ARABIC RIGHT ARROWHEAD BELOW': 2298,
'ARABIC SEQUENCE NOON WITH KEHEH': 983629,
'ARABIC SEQUENCE YEH WITH HAMZA ABOVE WITH AE': 983628,
'ARABIC SEQUENCE YEH WITH HAMZA ABOVE WITH ALEF': 983621,
'ARABIC SEQUENCE YEH WITH HAMZA ABOVE WITH ALEF MAKSURA': 983623,
'ARABIC SEQUENCE YEH WITH HAMZA ABOVE WITH E': 983627,
'ARABIC SEQUENCE YEH WITH HAMZA ABOVE WITH OE': 983624,
'ARABIC SEQUENCE YEH WITH HAMZA ABOVE WITH U': 983625,
'ARABIC SEQUENCE YEH WITH HAMZA ABOVE WITH WAW': 983622,
'ARABIC SEQUENCE YEH WITH HAMZA ABOVE WITH YU': 983626,
'ARABIC SIGN SAMVAT': 1540,
'ARABIC SMALL HIGH WAW': 2291,
'ARABIC SYMBOL DOT ABOVE': 64434,
'ARABIC SYMBOL DOT BELOW': 64435,
'ARABIC SYMBOL DOUBLE VERTICAL BAR BELOW': 64444,
'ARABIC SYMBOL FOUR DOTS ABOVE': 64442,
'ARABIC SYMBOL FOUR DOTS BELOW': 64443,
'ARABIC SYMBOL RING': 64447,
'ARABIC SYMBOL SMALL TAH ABOVE': 64448,
'ARABIC SYMBOL SMALL TAH BELOW': 64449,
'ARABIC SYMBOL THREE DOTS ABOVE': 64438,
'ARABIC SYMBOL THREE DOTS BELOW': 64439,
'ARABIC SYMBOL THREE DOTS POINTING DOWNWARDS ABOVE': 64440,
'ARABIC SYMBOL THREE DOTS POINTING DOWNWARDS BELOW': 64441,
'ARABIC SYMBOL TWO DOTS ABOVE': 64436,
'ARABIC SYMBOL TWO DOTS BELOW': 64437,
'ARABIC SYMBOL TWO DOTS VERTICALLY ABOVE': 64445,
'ARABIC SYMBOL TWO DOTS VERTICALLY BELOW': 64446,
'ARABIC TONE LOOP ABOVE': 2284,
'ARABIC TONE LOOP BELOW': 2287,
'ARABIC TONE ONE DOT ABOVE': 2282,
'ARABIC TONE ONE DOT BELOW': 2285,
'ARABIC TONE TWO DOTS ABOVE': 2283,
'ARABIC TONE TWO DOTS BELOW': 2286,
'ARABIC TURNED DAMMA BELOW': 2275,
'ARABIC WAVY HAMZA BELOW': 1631,
'ARMENIAN DRAM SIGN': 1423,
'ARTICULATED LORRY': 128667,
'ARTIST PALETTE': 127912,
'ASTONISHED FACE': 128562,
'ASTRONOMICAL SYMBOL FOR URANUS': 9954,
'ATHLETIC SHOE': 128095,
'AUBERGINE': 127814,
'AUTOMATED TELLER MACHINE': 127975,
'AUTOMOBILE': 128663,
'BABY': 128118,
'BABY ANGEL': 128124,
'BABY BOTTLE': 127868,
'BABY CHICK': 128036,
'BABY SYMBOL': 128700,
'BACK OF ENVELOPE': 128386,
'BACK WITH LEFTWARDS ARROW ABOVE': 128281,
'BACKSLANTED SOUTH ARROW WITH HOOKED TAIL': 11099,
'BACKSLANTED SOUTH ARROW WITH HORIZONTAL TAIL': 11101,
'BACKSPACE': 983056,
'BACTRIAN CAMEL': 128043,
'BADMINTON RACQUET AND SHUTTLECOCK': 127992,
'BAGGAGE CLAIM': 128708,
'BALLOON': 127880,
'BALLOT BOLD SCRIPT X': 128502,
'BALLOT BOX WITH BALLOT': 128499,
'BALLOT BOX WITH BOLD CHECK': 128505,
'BALLOT BOX WITH BOLD SCRIPT X': 128503,
'BALLOT BOX WITH LIGHT X': 11197,
'BALLOT BOX WITH SCRIPT X': 128501,
'BALLOT SCRIPT X': 128500,
'BAMUM LETTER PHASE-A FIRI': 92217,
'BAMUM LETTER PHASE-A GBIEE FON': 92161,
'BAMUM LETTER PHASE-A GHEUAEGHEUAE': 92193,
'BAMUM LETTER PHASE-A GHEUAERAE': 92188,
'BAMUM LETTER PHASE-A KAFA': 92199,
'BAMUM LETTER PHASE-A KAQ': 92240,
'BAMUM LETTER PHASE-A KET': 92211,
'BAMUM LETTER PHASE-A KEUKEUTNDA': 92179,
'BAMUM LETTER PHASE-A KPOQ': 92219,
'BAMUM LETTER PHASE-A KUOQ': 92213,
'BAMUM LETTER PHASE-A LAPAQ': 92183,
'BAMUM LETTER PHASE-A LET KUT': 92184,
'BAMUM LETTER PHASE-A LOMMAE': 92216,
'BAMUM LETTER PHASE-A LU': 92243,
'BAMUM LETTER PHASE-A LUAEP': 92207,
'BAMUM LETTER PHASE-A MAEKEUP': 92186,
'BAMUM LETTER PHASE-A MAEM': 92238,
'BAMUM LETTER PHASE-A MAEMBGBIEE': 92171,
'BAMUM LETTER PHASE-A MAEMKPEN': 92203,
'BAMUM LETTER PHASE-A MAEMVEUX': 92174,
'BAMUM LETTER PHASE-A MAENYI': 92210,
'BAMUM LETTER PHASE-A MAESI': 92230,
'BAMUM LETTER PHASE-A MANSUAE': 92175,
'BAMUM LETTER PHASE-A MAP PIEET': 92221,
'BAMUM LETTER PHASE-A MBANYI': 92232,
'BAMUM LETTER PHASE-A MBAQ': 92246,
'BAMUM LETTER PHASE-A MEUNJOMNDEUQ': 92197,
'BAMUM LETTER PHASE-A MGBASA': 92196,
'BAMUM LETTER PHASE-A MON NGGEUAET': 92190,
'BAMUM LETTER PHASE-A MOOMEUT': 92214,
'BAMUM LETTER PHASE-A MOOMPUQ': 92198,
'BAMUM LETTER PHASE-A MVEUAENGAM': 92176,
'BAMUM LETTER PHASE-A NAA MFON': 92164,
'BAMUM LETTER | |
from Bio import Phylo
from Bio.Phylo import PhyloXML
from Bio.Phylo import PhyloXMLIO
from collections import defaultdict as ddict
from Bio.Phylo.PhyloXML import Property as Prop
from Bio.Phylo.PhyloXML import Clade as PClade
from Bio.Phylo.BaseTree import Tree as BTree
from Bio.Phylo.BaseTree import Clade as BClade
import string
from numpy import pi as rpi
rpi2 = 2.0*rpi
import numpy as np
import array as arr
import collections as colls
import sys
#core_test = lambda ok,tot,pr: 1.0-st.binom.sf(ok,tot,pr)
lev_sep = "."
# Here are three functions that I'd love to see in Biopython but they
# are not there (yet).
def partial_branch_length(clade, selective_targets):
def _partial_branch_length_( clade, selective_targets ):
if clade.is_terminal() and clade.name in selective_targets:
return [clade.branch_length]
if not any([c.name in selective_targets for c in clade.get_terminals()]):
return [0.0]
ret = [0.0]
for c in clade.clades:
ret += [partial_branch_length( c, selective_targets)]
ret += [clade.branch_length]
return ret
return sum( _partial_branch_length_( clade,selective_targets ) )
def reroot(tree, new_root):
outgroup = new_root
outgroup_path = tree.get_path(outgroup)
if len(outgroup_path) == 0:
# Outgroup is the current root -- no change
return
prev_blen = outgroup.branch_length
if outgroup.is_terminal():
# Create a new root with a 0-length branch to the outgroup
outgroup.branch_length = 0.0
new_root = tree.root.__class__(
branch_length=tree.root.branch_length, clades=[outgroup])
# The first branch reversal (see the upcoming loop) is modified
if len(outgroup_path) == 1:
# Trivial tree like '(A,B);
new_parent = new_root
else:
parent = outgroup_path.pop(-2)
parent.clades.pop(parent.clades.index(outgroup))
prev_blen, parent.branch_length = parent.branch_length, prev_blen
new_root.clades.insert(0, parent)
new_parent = parent
else:
# Use the given outgroup node as the new (trifurcating) root
new_root = outgroup
new_root.branch_length = tree.root.branch_length
new_parent = new_root
# Tracing the outgroup lineage backwards, reattach the subclades under a
# new root clade. Reverse the branches directly above the outgroup in
# the tree, but keep the descendants of those clades as they are.
for parent in outgroup_path[-2::-1]:
parent.clades.pop(parent.clades.index(new_parent))
prev_blen, parent.branch_length = parent.branch_length, prev_blen
new_parent.clades.insert(0, parent)
new_parent = parent
# Finally, handle the original root according to number of descendents
old_root = tree.root
if outgroup in old_root.clades:
assert len(outgroup_path) == 1
old_root.clades.pop(old_root.clades.index(outgroup))
else:
old_root.clades.pop(old_root.clades.index(new_parent))
if len(old_root) == 1:
# Delete the old bifurcating root & add branch lengths
ingroup = old_root.clades[0]
if ingroup.branch_length:
ingroup.branch_length += prev_blen
else:
ingroup.branch_length = prev_blen
new_parent.clades.insert(0, ingroup)
# ENH: If annotations are attached to old_root, do... something.
else:
# Keep the old trifurcating/polytomous root as an internal node
old_root.branch_length = prev_blen
new_parent.clades.insert(0, old_root)
tree.root = new_root
tree.rooted = True
return
def get_parent(tree, child_clade):
node_path = tree.get_path(child_clade)
return node_path[-2] if len(node_path) > 1 else None
def reroot_mid_fat_edge( tree, node ):
if tree.root == node:
return
fat = get_parent( tree, node )
bl = node.branch_length
node.branch_length = bl*0.5
new_clade = PClade(branch_length=bl*0.5, clades = [node])
if fat:
fat.clades = [c for c in fat.clades if c != node] + [new_clade]
reroot( tree, new_clade )
else:
tree.root.clades = [new_clade] + [c for c in tree.root.clades if c != node]
reroot( tree, new_clade)
def clades2terms( tree, startswith = None ):
c2t = {}
def clades2terms_rec( c ):
if startswith:
if c.name and c.name.startswith( startswith ):
c2t[c] = c.get_terminals()
else:
c2t[c] = c.get_terminals()
for cc in c.clades:
clades2terms_rec(cc)
clades2terms_rec( tree.root )
return c2t
def dist_matrix( tree ):
terminals = list(tree.get_terminals())
term_names = [t.name for t in terminals]
# can be made faster with recursion
for n in tree.get_nonterminals():
n.ids = set( [nn.name for nn in n.get_terminals()] )
dists = dict([(n,dict([(nn,0.0) for nn in term_names])) for n in term_names])
def dist_matrix_rec( clade ):
bl = clade.branch_length
if clade.is_terminal():
for t in term_names:
if t!=clade.name:
dists[clade.name][t] += bl
dists[t][clade.name] += bl
return
for t1 in clade.ids:
for t2 in terminals:
if t2.name not in clade.ids:
dists[t1][t2.name] += bl
dists[t2.name][t1] += bl
for c in clade.clades:
dist_matrix_rec( c )
dist_matrix_rec( tree.root )
return dists
class PpaTree:
def __load_tree_txt__( self, fn ):
tree = Phylo.BaseTree.Tree()
try:
rows = [l.decode('utf-8').rstrip().split("\t")[0] for l in
open(fn, 'rb')]
except IOError:
raise IOError()
clades = [r.split(lev_sep) for r in rows]
tree = BTree()
tree.root = BClade()
def add_clade_rec( father, txt_tree ):
fl = set([t[0] for t in txt_tree])
father.clades = []
for c in fl:
nclade = BClade( branch_length = 1.0,
name = c )
father.clades.append( nclade )
children = [t[1:] for t in txt_tree if len(t)>1 and t[0] == c]
if children:
add_clade_rec( nclade, children )
add_clade_rec( tree.root, clades )
self.ignore_branch_len = 1
return tree.as_phyloxml()
def __read_tree__( self, fn ):
for ff in ['phyloxml','newick','nexus',"txt"]:
try:
if ff in ['txt']:
tree = self.__load_tree_txt__( fn )
else:
tree = Phylo.read(fn, ff)
if len(tree.root.get_terminals()) == 1:
raise ValueError
except ValueError:
continue
except IOError:
sys.stderr.write("Error: No tree file found: "+fn+"\n")
raise IOError
except Exception:
continue
else:
return tree.as_phyloxml()
sys.stderr.write("Error: unrecognized input format "+fn+"\n")
raise ValueError
def __init__( self, filename, warnings = False ):
self.warnings = warnings
if filename is None:
self.tree = None
return
try:
self.tree = self.__read_tree__(filename)
self.add_full_paths()
except:
sys.exit(0)
def core_test( self, ok, tot, pr ):
# scipy included here for non-compatibility with scons
import scipy.stats as st
if pr in self.ctc and tot in self.ctc[pr] and ok in self.ctc[pr][tot]:
return self.ctc[pr][tot][ok]
ret = 1.0-st.binom.sf(ok,tot,pr)
if not pr in self.ctc: self.ctc[pr] = {}
if not tot in self.ctc[pr]: self.ctc[pr][tot] = {}
if not ok in self.ctc[pr][tot]: self.ctc[pr][tot][ok] = ret
return ret
def is_core( self, clade, targs, er = 0.95 ):
intersection = clade.imgids & targs
len_intersection = len(intersection)
if len(clade.imgids) >= 2 and len_intersection < 2:
return False, 0.0, None
add = 0
for subclade in clade.clades:
if "?" in subclade.name:
out = subclade.imgids - intersection # targs
add += len(out)
if add and len_intersection >= add:
len_intersection += int(round(add/1.99))
core = self.core_test( len_intersection, clade.nterminals, er )
if core < 0.05 or len_intersection == 0:
return False, core, None
nsubclades, nsubclades_absent = 0, 0
for subclade in set(clade.get_nonterminals()) - set([clade]):
if "?" in subclade.full_name: # full??/
continue
if subclade.nterminals == 1:
nsubclades += 1 # !!!
if len(subclade.imgids & targs) == 0:
nsubclades_absent += 1
continue
sc_intersection = subclade.imgids & targs
sc_len_intersection = len(sc_intersection)
sc_add = 0
for sc_subclade in subclade.clades:
if "?" in sc_subclade.name:
sc_out = sc_subclade.imgids - sc_intersection
sc_add += len(sc_out)
if add and sc_len_intersection >= sc_add:
sc_len_intersection += int(round(sc_add/1.99))
subcore = self.core_test( sc_len_intersection, subclade.nterminals, er )
if subcore < 0.05:
return False, core, None
if nsubclades > 0 and nsubclades == nsubclades_absent:
return False, core, None
return True, core, intersection
def _find_core( self, terminals, er = 0.95, root_name = None, skip_qm = True ):
#terminals_s = set(terminals)
def _find_core_rec( clade ):
if root_name:
#clname = lev_sep.join( [root_name]+clade.full_name.split(lev_sep)[1:] )
#clname = lev_sep.join( clade.full_name[1:] )
clname = clade.full_name
else:
clname = clade.full_name
if clade.is_terminal():
if clade.imgid in terminals:
#n = terminals[clade.imgid]
return [(clname,1,1,
#n,n,n,
1.0)]
return []
if skip_qm and clade.name and "?" in clade.name:
return []
if len(clade.imgids) == 1:
cimg = list(clade.imgids)[0]
if cimg in terminals:
#n = terminals[cimg]
return [(clname,1,1,
#n,n,n,
1.0)]
return []
core,pv,intersection = self.is_core( clade, terminals, er = er )
if core:
#ns = [terminals[ii] for ii in terminals_s if ii in clade.imgids]
return [( clname,
len(intersection),len(clade.imgids),
#len(clade.imgids&terminals_s),len(clade.imgids),
#min(ns),max(ns),np.mean(ns),
pv)]
rets = []
for c in clade.clades:
rets += _find_core_rec(c)
return rets
return _find_core_rec( self.tree.root )
def add_full_paths( self ):
def _add_full_paths_( clade, path ):
lpath = path + ([clade.name] if clade.name else [])
clade.full_name = ".".join( lpath )
for c in clade.clades:
_add_full_paths_( c, lpath )
_add_full_paths_( self.tree.root, [] )
def find_cores( self, cl_taxa_file, min_core_size = 1, error_rate = 0.95, subtree = None, skip_qm = True ):
if subtree:
self.subtree( 'name', subtree )
self.ctc = {}
imgids2terminals = {}
for t in self.tree.get_terminals():
t.imgid = int(t.name[3:] if "t__"in t.name else t.name)
t.nterminals = 1
imgids2terminals[t.imgid] = t
# can be made faster with recursion
for n in self.tree.get_nonterminals():
n.imgids = set( [nn.imgid for nn in n.get_terminals()] )
n.nterminals = len( n.imgids )
self.add_full_paths() # unnecessary
ret = {}
for vec in (l.strip().split('\t') for l in open(cl_taxa_file)):
sid = int(vec[0])
#tgts_l = [int(s) for s | |
not The.normalize : return value
if value == The.missing : return value
if isinstance(value,str) : return value
lo, hi = lohi(m,x)
return (value - lo) / (hi - lo + 0.0001)
"""
As seen above, _normalize_ has some special cases.
Firstly, if some global flag has disabled
normalization, we just return the unaltered value.
Similarly, if we are trying to normalize some
missing value or some non-numeric value, we also
return the unaltered value.
### SquaredDifference
The _dist_ function needs to compute the square of
the differences between two values _v1,v2_ from some
model _m_. It is assumed that each value is
weighted; i.e. it can add up to some amount _most_
to the distance measure. This is done using the
_squaredDifference_ function- which uses some
distance heuristic first proposed by <NAME>
\cite{aha91}. For example, if in doubt, assume the
maximum distance. Such doubts arise if (e.g.) we are
comparing missing values.
"""
def squaredDifference(m,v1,v2,most,sum=0,n=0):
def furthestFromV1() :
return 0 if v1 > 0.5 else 1
if not v1 == v2 == The.missing:
if v1 == The.missing:
v1,v2 = v2,v1 # at the very least, v1 is known
if isinstance(v1,str) and isinstance(v2,str):
if v2 == The.missing or v1 != v2 :
inc = 1
else:
if v2 == The.missing: v2 = furthestFromV1()
inc = (v1 - v2)**2
return (sum + most*inc, # sum of incs, so far
n + most) # sum of max incs, so far
"""
Note that the _squaredDifference_ function knows how
to handle numerics as well as non-numerics
differently to numerics. Two non-numerics have zero
distance if they are the same (and distance equal to
max, otherwise).
## Dividing the Data
### FastDiv
With the above machinery, we can very quickly
recursively divide some training data in half using
_fastdiv_. This function finds the distance _c_
between two distance items _west,east_. All data has
some distance _a,b_ to _west,east_. Using _a,b,c_,
the _fastdiv_ function uses the cosine rule to sort
the data along where it falls on a line running from
_west_ to _east_. This function then returns the
data, divided on the median value.
"""
def fastdiv(m,data,details, how):
"Divide data at median of two distant items."
west, east = twoDistantPoints(m,data,how)
c = dist(m, west, east, how)
for i in data:
a = dist(m,i, west, how)
b = dist(m,i, east, how)
i.x = (a*a + c*c - b*b)/(2*c) # cosine rule
data = sorted(data,key=lambda i: i.x)
n = len(data)/2
details.also(west=west, east=east, c=c, cut=data[n].x)
return data[:n], data[n:]
"""
### TwoDistantPoints
CHUNK is fast since it uses the linear-time
"FastMap" heuristic \cite{fal95} to find the
_twoDistantPoints_. This heuristic starts by
picking any item at random. Next, it finds the
furthest item from the first pick. Finally, it finds
the furthest item from the second item. Note that
this is fast since this
requires only one scans of the data for each pick.
While the two found items may not be the most
distant points, they are far enough away to guide
data division.
"""
def twoDistantPoints(m,data,how):
def furthest(i):
out,d= i,0
for j in data:
tmp = dist(m,i,j,how)
if tmp > d: out,d = j,tmp
return out
one = any(data) # 1) pick any thing
west = furthest(one) # 2) far from thing
east = furthest(west) # 3) far from west
return west,east
"""
While _twoDistantPoints_ looks
simple, it actually offers a profound summation of
important aspects of a data set. According to <NAME>, this FastMap heuristic belongs to a class of
algorithms that find approximations to the
eigenvectors of a data set \cite{platt05}. Spectral
learners \cite{kamvar03} use these eigenvectors to
reason along the most important dimensions in a data
set.
### Settings
By applying _fastdiv_ recursively, we can build a
binary tree whose leaves contain similar
examples. That tree generation is controlled by the
following _settings_. By default, we will stop when
any leaf has less than _minSize=10_ or if we have
recursed more that _depthMax=10_ items. Also, just
to make sure we get at least a few branches in the
tree, we will recurse at least _minSize=2_ times.
Further, when we recurse, if _verbose=True_, we will
trace the traversely by printing one _b4_ string for
each level of the recursion. Finally, when computing
distances, this code uses _how=x.dec_; i.e. the
decisions of each item in the data.
"""
def settings(**has):
"Return control settings for recursive descent."
return Slots(minSize = 10, # min leaf size
depthMin= 2, # no pruning till depthMin
depthMax= 10, # max tree depth
b4 = '|.. ', # indent string
verbose = False, # show trace info?
how= lambda x:x.dec # how to measure distance
).override(has)
"""
### Chunk (main function)
Finally, we arrive at the main _chunk_ function.
This function uses the above _settings_ to build a
tree that recursively divides the data. The _chunk_
function holds those _settings_ in its _slots_
variable (which is set on the first line of the
function, if it is not already known). Local
functions within _chunk_ use these _slots_ to
control how the tree is built.
"""
def chunk(m,data,slots=None, lvl=0,up=None):
"Return a tree of split data."
slots = slots or settings()
def tooFew() :
return len(data) < slots.minSize
def tooDeep():
return lvl > slots.depthMax
def show(suffix):
if slots.verbose:
print slots.b4*lvl + str(len(data)) + suffix
tree= Slots(_up=up,value=None,_left=None,_right=None)
if tooDeep() or tooFew():
show(".")
tree.value = data
else:
show("")
wests,easts = fastdiv(m, data, tree, slots.how)
if not worse(wests, easts, tree) :
tree._left = chunk(m, wests, slots, lvl+1, tree)
if not worse(easts, wests, tree) :
tree._right = chunk(m, easts, slots, lvl+1, tree)
return tree
def worse(down1,down2,here): return False
"""
Note some subtleties in the above code. Firstly,
since we use _fastdiv_, our _chunk_ function is a
very fast method to divide data.
Secondly, the function _worse_ is a hook for any
clever pruning you might want to add to this process
(this _chunk_ function only recurses on subtrees
that are not _worse_). While we do not use _worse_
here, this function could be used to prune sub-trees
that fail some test; e.g. that do not reduce the
variance of the current tree.
Thirdly, _chunk_ returns a tree of _Slots_ where
each node contains pointers to its _\_left_ and
_\_right_ kids as well as a pointer _\_up_ to the
parent node (which, for the root node, points to
_None_). Note that all leaves of this tree have
empty child pointers. Such childless leaves hold
the items that fall into that leaf in the _value_
field.
## Support Utilities
### Some Standard Tricks
"""
import sys,math,random
sys.dont_write_bytecode = True # disable writing .pyc files
seed = random.seed # convenient shorthand
any = random.choice # another convenient shorthand
def say(x):
"Output a string, no trailing new line."
sys.stdout.write(x)
def showd(d):
"""Catch key values to string, sorted on keys.
Ignore hard to read items (marked with '_')."""
return ' '.join([':%s %s' % (k,v)
for k,v in
sorted(d.items())
if not "_" in k])
"""
_Slots_ is based on a Peter Norvig trick from
http://norvig.com/python-iaq.html. When all you want
to do is create an object that holds data in several
fields, the following will do. For an example of
using _Slots_, see _settings_ (above).
"""
class Slots():
"Place to read/write named slots."
id = -1
def __init__(i,**d) :
i.id = Slots.id = Slots.id + 1
i.override(d)
def override(i,d): i.__dict__.update(d); return i
def also(i, **d) : i.override(d)
def __eq__(i,j) : return i.id == j.id
def __ne__(i,j) : return i.id != j.id
def __repr__(i) : return '{' + showd(i.__dict__) + '}'
"""
Note that _Slots_ can pretty print themselves using
the _showd_ function (shown above). Also, since our
_Slots_ have a unique _id_, then we can quickly test
for equality and inequality.
### Tree Iterators
To simplify the processing of trees, we define some
iterators to return all _nodes_ or just the _leafs_
of the tree.
"""
def nodes(t,lvl=0):
"Iterator. Return all nodes."
if t:
yield lvl,t
for t1 in [t._left,t._right]:
for lvl1,leaf in nodes(t1,lvl+1):
yield lvl1,leaf
def leafs(t):
"Iterator: returns all leaf nodes."
for lvl,node in nodes(t):
if not node._left and not node._right:
yield lvl,node
"""
### Pretty Printing
The _ditto_ function marks repeated entries in a column
with a "_._".
"""
def ditto(lst,old,mark="."):
"""Show 'mark' if an item of lst is same as old.
As a side-effect, update cache of 'old' values."""
out = []
for i,now in enumerate(lst):
before = old.get(i,None) # get old it if exists
out += [mark if before == now else now]
old[i] = now # next time, 'now' is the 'old' value
return out # the lst with ditto marks inserted
"""
Once we "ditto" a list of lists, we | |
<reponame>suresh198526/pulumi-azure
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['Cluster']
class Cluster(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
enable_disk_encryption: Optional[pulumi.Input[bool]] = None,
enable_purge: Optional[pulumi.Input[bool]] = None,
enable_streaming_ingest: Optional[pulumi.Input[bool]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['ClusterIdentityArgs']]] = None,
language_extensions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
optimized_auto_scale: Optional[pulumi.Input[pulumi.InputType['ClusterOptimizedAutoScaleArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['ClusterSkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
trusted_external_tenants: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
virtual_network_configuration: Optional[pulumi.Input[pulumi.InputType['ClusterVirtualNetworkConfigurationArgs']]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Manages a Kusto (also known as Azure Data Explorer) Cluster
## Example Usage
```python
import pulumi
import pulumi_azure as azure
rg = azure.core.ResourceGroup("rg", location="East US")
example = azure.kusto.Cluster("example",
location=rg.location,
resource_group_name=rg.name,
sku=azure.kusto.ClusterSkuArgs(
name="Standard_D13_v2",
capacity=2,
),
tags={
"Environment": "Production",
})
```
## Import
Kusto Clusters can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:kusto/cluster:Cluster example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.Kusto/Clusters/cluster1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] enable_disk_encryption: Specifies if the cluster's disks are encrypted.
:param pulumi.Input[bool] enable_purge: Specifies if the purge operations are enabled.
:param pulumi.Input[bool] enable_streaming_ingest: Specifies if the streaming ingest is enabled.
:param pulumi.Input[pulumi.InputType['ClusterIdentityArgs']] identity: A identity block.
:param pulumi.Input[Sequence[pulumi.Input[str]]] language_extensions: An list of `language_extensions` to enable. Valid values are: `PYTHON` and `R`.
:param pulumi.Input[str] location: The location where the Kusto Cluster should be created. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Kusto Cluster to create. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['ClusterOptimizedAutoScaleArgs']] optimized_auto_scale: An `optimized_auto_scale` block as defined below.
:param pulumi.Input[str] resource_group_name: Specifies the Resource Group where the Kusto Cluster should exist. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['ClusterSkuArgs']] sku: A `sku` block as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] trusted_external_tenants: Specifies a list of tenant IDs that are trusted by the cluster.
:param pulumi.Input[pulumi.InputType['ClusterVirtualNetworkConfigurationArgs']] virtual_network_configuration: A `virtual_network_configuration` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: A list of Availability Zones in which the cluster instances should be created in. Changing this forces a new resource to be created.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['enable_disk_encryption'] = enable_disk_encryption
__props__['enable_purge'] = enable_purge
__props__['enable_streaming_ingest'] = enable_streaming_ingest
__props__['identity'] = identity
__props__['language_extensions'] = language_extensions
__props__['location'] = location
__props__['name'] = name
__props__['optimized_auto_scale'] = optimized_auto_scale
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if sku is None:
raise TypeError("Missing required property 'sku'")
__props__['sku'] = sku
__props__['tags'] = tags
__props__['trusted_external_tenants'] = trusted_external_tenants
__props__['virtual_network_configuration'] = virtual_network_configuration
__props__['zones'] = zones
__props__['data_ingestion_uri'] = None
__props__['uri'] = None
super(Cluster, __self__).__init__(
'azure:kusto/cluster:Cluster',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
data_ingestion_uri: Optional[pulumi.Input[str]] = None,
enable_disk_encryption: Optional[pulumi.Input[bool]] = None,
enable_purge: Optional[pulumi.Input[bool]] = None,
enable_streaming_ingest: Optional[pulumi.Input[bool]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['ClusterIdentityArgs']]] = None,
language_extensions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
optimized_auto_scale: Optional[pulumi.Input[pulumi.InputType['ClusterOptimizedAutoScaleArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['ClusterSkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
trusted_external_tenants: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
uri: Optional[pulumi.Input[str]] = None,
virtual_network_configuration: Optional[pulumi.Input[pulumi.InputType['ClusterVirtualNetworkConfigurationArgs']]] = None,
zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Cluster':
"""
Get an existing Cluster resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] data_ingestion_uri: The Kusto Cluster URI to be used for data ingestion.
:param pulumi.Input[bool] enable_disk_encryption: Specifies if the cluster's disks are encrypted.
:param pulumi.Input[bool] enable_purge: Specifies if the purge operations are enabled.
:param pulumi.Input[bool] enable_streaming_ingest: Specifies if the streaming ingest is enabled.
:param pulumi.Input[pulumi.InputType['ClusterIdentityArgs']] identity: A identity block.
:param pulumi.Input[Sequence[pulumi.Input[str]]] language_extensions: An list of `language_extensions` to enable. Valid values are: `PYTHON` and `R`.
:param pulumi.Input[str] location: The location where the Kusto Cluster should be created. Changing this forces a new resource to be created.
:param pulumi.Input[str] name: The name of the Kusto Cluster to create. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['ClusterOptimizedAutoScaleArgs']] optimized_auto_scale: An `optimized_auto_scale` block as defined below.
:param pulumi.Input[str] resource_group_name: Specifies the Resource Group where the Kusto Cluster should exist. Changing this forces a new resource to be created.
:param pulumi.Input[pulumi.InputType['ClusterSkuArgs']] sku: A `sku` block as defined below.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] trusted_external_tenants: Specifies a list of tenant IDs that are trusted by the cluster.
:param pulumi.Input[str] uri: The FQDN of the Azure Kusto Cluster.
:param pulumi.Input[pulumi.InputType['ClusterVirtualNetworkConfigurationArgs']] virtual_network_configuration: A `virtual_network_configuration` block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] zones: A list of Availability Zones in which the cluster instances should be created in. Changing this forces a new resource to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["data_ingestion_uri"] = data_ingestion_uri
__props__["enable_disk_encryption"] = enable_disk_encryption
__props__["enable_purge"] = enable_purge
__props__["enable_streaming_ingest"] = enable_streaming_ingest
__props__["identity"] = identity
__props__["language_extensions"] = language_extensions
__props__["location"] = location
__props__["name"] = name
__props__["optimized_auto_scale"] = optimized_auto_scale
__props__["resource_group_name"] = resource_group_name
__props__["sku"] = sku
__props__["tags"] = tags
__props__["trusted_external_tenants"] = trusted_external_tenants
__props__["uri"] = uri
__props__["virtual_network_configuration"] = virtual_network_configuration
__props__["zones"] = zones
return Cluster(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="dataIngestionUri")
def data_ingestion_uri(self) -> pulumi.Output[str]:
"""
The Kusto Cluster URI to be used for data ingestion.
"""
return pulumi.get(self, "data_ingestion_uri")
@property
@pulumi.getter(name="enableDiskEncryption")
def enable_disk_encryption(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies if the cluster's disks are encrypted.
"""
return pulumi.get(self, "enable_disk_encryption")
@property
@pulumi.getter(name="enablePurge")
def enable_purge(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies if the purge operations are enabled.
"""
return pulumi.get(self, "enable_purge")
@property
@pulumi.getter(name="enableStreamingIngest")
def enable_streaming_ingest(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies if the streaming ingest is enabled.
"""
return pulumi.get(self, "enable_streaming_ingest")
@property
@pulumi.getter
def identity(self) -> pulumi.Output['outputs.ClusterIdentity']:
"""
A identity block.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter(name="languageExtensions")
def language_extensions(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
An list of `language_extensions` to enable. Valid values are: `PYTHON` and `R`.
"""
return pulumi.get(self, "language_extensions")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The location where the Kusto Cluster should be created. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the Kusto Cluster to create. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="optimizedAutoScale")
def optimized_auto_scale(self) -> pulumi.Output[Optional['outputs.ClusterOptimizedAutoScale']]:
"""
An `optimized_auto_scale` block as defined below.
"""
return pulumi.get(self, "optimized_auto_scale")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
Specifies the Resource Group where the Kusto Cluster should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter
def sku(self) -> pulumi.Output['outputs.ClusterSku']:
"""
A `sku` block as defined below.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="trustedExternalTenants")
def trusted_external_tenants(self) -> pulumi.Output[Sequence[str]]:
"""
Specifies a list of tenant IDs that are trusted by the cluster.
"""
return pulumi.get(self, "trusted_external_tenants")
@property
@pulumi.getter
def uri(self) -> pulumi.Output[str]:
"""
The FQDN of the Azure Kusto Cluster.
"""
return pulumi.get(self, "uri")
@property
@pulumi.getter(name="virtualNetworkConfiguration")
def virtual_network_configuration(self) -> pulumi.Output[Optional['outputs.ClusterVirtualNetworkConfiguration']]:
"""
A `virtual_network_configuration` block as defined below.
"""
return pulumi.get(self, "virtual_network_configuration")
@property
@pulumi.getter
def zones(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
| |
query_args = self._query_builder(
"query",
"vendorTokens",
fields,
{"sort": sort, "filter": filter,},
_ARGUMENT_LEGENDS.vendor_tokens_each,
True,
)
return self.iterate_edges(
objects.VendorTokenConnection, query_args, headers, token, "vendorTokens",
)
def create_catalog(
self,
input: inputs.CatalogCreateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CatalogFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Catalog:
query_args = self._query_builder(
"mutation",
"createCatalog",
fields,
{"input": input,},
_ARGUMENT_LEGENDS.create_catalog,
False,
)
return objects.Catalog(
self.api(headers=headers, token=token, **query_args)["data"][
"createCatalog"
],
)
def import_catalog(
self,
input: inputs.CatalogImportInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CatalogFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Catalog:
query_args = self._query_builder(
"mutation",
"importCatalog",
fields,
{"input": input,},
_ARGUMENT_LEGENDS.import_catalog,
False,
)
return objects.Catalog(
self.api(headers=headers, token=token, **query_args)["data"][
"importCatalog"
],
)
def update_catalog(
self,
id: str,
input: inputs.CatalogUpdateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CatalogFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Catalog:
query_args = self._query_builder(
"mutation",
"updateCatalog",
fields,
{"id": id, "input": input,},
_ARGUMENT_LEGENDS.update_catalog,
False,
)
return objects.Catalog(
self.api(headers=headers, token=token, **query_args)["data"][
"updateCatalog"
],
)
def update_catalogs_each(
self,
input: inputs.CatalogUpdateInput,
sort: Union[inputs.SortInput, None, CinnamonUndefined] = CinnamonUndefined,
filter: Union[dict, None, CinnamonUndefined] = CinnamonUndefined,
show_deleted: Union[bool, None, CinnamonUndefined] = CinnamonUndefined,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CatalogConnectionFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> Iterable[objects.CatalogConnection]:
query_args = self._query_builder(
"mutation",
"updateCatalogs",
fields,
{
"input": input,
"sort": sort,
"filter": filter,
"show_deleted": show_deleted,
},
_ARGUMENT_LEGENDS.update_catalogs_each,
True,
)
return self.iterate_edges(
objects.CatalogConnection, query_args, headers, token, "updateCatalogs",
)
def sync_catalog(
self,
id: str,
input: Union[
inputs.CatalogSyncInput, None, CinnamonUndefined
] = CinnamonUndefined,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CatalogFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Catalog:
query_args = self._query_builder(
"mutation",
"syncCatalog",
fields,
{"id": id, "input": input,},
_ARGUMENT_LEGENDS.sync_catalog,
False,
)
return objects.Catalog(
self.api(headers=headers, token=token, **query_args)["data"]["syncCatalog"],
)
def sync_catalog_products(
self,
id: str,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CatalogFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Catalog:
query_args = self._query_builder(
"mutation",
"syncCatalogProducts",
fields,
{"id": id,},
_ARGUMENT_LEGENDS.sync_catalog_products,
False,
)
return objects.Catalog(
self.api(headers=headers, token=token, **query_args)["data"][
"syncCatalogProducts"
],
)
def delete_catalog(
self,
id: str,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.DeletionFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Deletion:
query_args = self._query_builder(
"mutation",
"deleteCatalog",
fields,
{"id": id,},
_ARGUMENT_LEGENDS.delete_catalog,
False,
)
return objects.Deletion(
self.api(headers=headers, token=token, **query_args)["data"][
"deleteCatalog"
],
)
def create_creative_font(
self,
input: inputs.CreativeFontCreateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeFontFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.CreativeFont:
query_args = self._query_builder(
"mutation",
"createCreativeFont",
fields,
{"input": input,},
_ARGUMENT_LEGENDS.create_creative_font,
False,
)
return objects.CreativeFont(
self.api(headers=headers, token=token, **query_args)["data"][
"createCreativeFont"
],
)
def update_creative_font(
self,
id: str,
input: inputs.CreativeFontUpdateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeFontFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.CreativeFont:
query_args = self._query_builder(
"mutation",
"updateCreativeFont",
fields,
{"id": id, "input": input,},
_ARGUMENT_LEGENDS.update_creative_font,
False,
)
return objects.CreativeFont(
self.api(headers=headers, token=token, **query_args)["data"][
"updateCreativeFont"
],
)
def update_creative_fonts_each(
self,
input: inputs.CreativeFontUpdateInput,
sort: Union[inputs.SortInput, None, CinnamonUndefined] = CinnamonUndefined,
filter: Union[dict, None, CinnamonUndefined] = CinnamonUndefined,
show_deleted: Union[bool, None, CinnamonUndefined] = CinnamonUndefined,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeFontConnectionFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> Iterable[objects.CreativeFontConnection]:
query_args = self._query_builder(
"mutation",
"updateCreativeFonts",
fields,
{
"input": input,
"sort": sort,
"filter": filter,
"show_deleted": show_deleted,
},
_ARGUMENT_LEGENDS.update_creative_fonts_each,
True,
)
return self.iterate_edges(
objects.CreativeFontConnection,
query_args,
headers,
token,
"updateCreativeFonts",
)
def delete_creative_font(
self,
id: str,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.DeletionFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Deletion:
query_args = self._query_builder(
"mutation",
"deleteCreativeFont",
fields,
{"id": id,},
_ARGUMENT_LEGENDS.delete_creative_font,
False,
)
return objects.Deletion(
self.api(headers=headers, token=token, **query_args)["data"][
"deleteCreativeFont"
],
)
def create_creative_image(
self,
input: inputs.CreativeImageCreateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeImageFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.CreativeImage:
query_args = self._query_builder(
"mutation",
"createCreativeImage",
fields,
{"input": input,},
_ARGUMENT_LEGENDS.create_creative_image,
False,
)
return objects.CreativeImage(
self.api(headers=headers, token=token, **query_args)["data"][
"createCreativeImage"
],
)
def update_creative_image(
self,
id: str,
input: inputs.CreativeImageUpdateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeImageFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.CreativeImage:
query_args = self._query_builder(
"mutation",
"updateCreativeImage",
fields,
{"id": id, "input": input,},
_ARGUMENT_LEGENDS.update_creative_image,
False,
)
return objects.CreativeImage(
self.api(headers=headers, token=token, **query_args)["data"][
"updateCreativeImage"
],
)
def update_creative_images_each(
self,
input: inputs.CreativeImageUpdateInput,
sort: Union[inputs.SortInput, None, CinnamonUndefined] = CinnamonUndefined,
filter: Union[dict, None, CinnamonUndefined] = CinnamonUndefined,
show_deleted: Union[bool, None, CinnamonUndefined] = CinnamonUndefined,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeImageConnectionFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> Iterable[objects.CreativeImageConnection]:
query_args = self._query_builder(
"mutation",
"updateCreativeImages",
fields,
{
"input": input,
"sort": sort,
"filter": filter,
"show_deleted": show_deleted,
},
_ARGUMENT_LEGENDS.update_creative_images_each,
True,
)
return self.iterate_edges(
objects.CreativeImageConnection,
query_args,
headers,
token,
"updateCreativeImages",
)
def delete_creative_image(
self,
id: str,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.DeletionFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Deletion:
query_args = self._query_builder(
"mutation",
"deleteCreativeImage",
fields,
{"id": id,},
_ARGUMENT_LEGENDS.delete_creative_image,
False,
)
return objects.Deletion(
self.api(headers=headers, token=token, **query_args)["data"][
"deleteCreativeImage"
],
)
def create_creative_layer(
self,
input: inputs.CreativeLayerCreateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeLayerFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.CreativeLayer:
query_args = self._query_builder(
"mutation",
"createCreativeLayer",
fields,
{"input": input,},
_ARGUMENT_LEGENDS.create_creative_layer,
False,
)
return objects.CreativeLayer(
self.api(headers=headers, token=token, **query_args)["data"][
"createCreativeLayer"
],
)
def update_creative_layer(
self,
id: str,
input: inputs.CreativeLayerUpdateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeLayerFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.CreativeLayer:
query_args = self._query_builder(
"mutation",
"updateCreativeLayer",
fields,
{"id": id, "input": input,},
_ARGUMENT_LEGENDS.update_creative_layer,
False,
)
return objects.CreativeLayer(
self.api(headers=headers, token=token, **query_args)["data"][
"updateCreativeLayer"
],
)
def update_creative_layers_each(
self,
input: inputs.CreativeLayerUpdateInput,
sort: Union[inputs.SortInput, None, CinnamonUndefined] = CinnamonUndefined,
filter: Union[dict, None, CinnamonUndefined] = CinnamonUndefined,
show_deleted: Union[bool, None, CinnamonUndefined] = CinnamonUndefined,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeLayerConnectionFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> Iterable[objects.CreativeLayerConnection]:
query_args = self._query_builder(
"mutation",
"updateCreativeLayers",
fields,
{
"input": input,
"sort": sort,
"filter": filter,
"show_deleted": show_deleted,
},
_ARGUMENT_LEGENDS.update_creative_layers_each,
True,
)
return self.iterate_edges(
objects.CreativeLayerConnection,
query_args,
headers,
token,
"updateCreativeLayers",
)
def delete_creative_layer(
self,
id: str,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.DeletionFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Deletion:
query_args = self._query_builder(
"mutation",
"deleteCreativeLayer",
fields,
{"id": id,},
_ARGUMENT_LEGENDS.delete_creative_layer,
False,
)
return objects.Deletion(
self.api(headers=headers, token=token, **query_args)["data"][
"deleteCreativeLayer"
],
)
def create_creative_template(
self,
input: inputs.CreativeTemplateCreateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeTemplateFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.CreativeTemplate:
query_args = self._query_builder(
"mutation",
"createCreativeTemplate",
fields,
{"input": input,},
_ARGUMENT_LEGENDS.create_creative_template,
False,
)
return objects.CreativeTemplate(
self.api(headers=headers, token=token, **query_args)["data"][
"createCreativeTemplate"
],
)
def update_creative_template(
self,
id: str,
input: inputs.CreativeTemplateUpdateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeTemplateFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.CreativeTemplate:
query_args = self._query_builder(
"mutation",
"updateCreativeTemplate",
fields,
{"id": id, "input": input,},
_ARGUMENT_LEGENDS.update_creative_template,
False,
)
return objects.CreativeTemplate(
self.api(headers=headers, token=token, **query_args)["data"][
"updateCreativeTemplate"
],
)
def update_creative_templates_each(
self,
input: inputs.CreativeTemplateUpdateInput,
sort: Union[inputs.SortInput, None, CinnamonUndefined] = CinnamonUndefined,
filter: Union[dict, None, CinnamonUndefined] = CinnamonUndefined,
show_deleted: Union[bool, None, CinnamonUndefined] = CinnamonUndefined,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.CreativeTemplateConnectionFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> Iterable[objects.CreativeTemplateConnection]:
query_args = self._query_builder(
"mutation",
"updateCreativeTemplates",
fields,
{
"input": input,
"sort": sort,
"filter": filter,
"show_deleted": show_deleted,
},
_ARGUMENT_LEGENDS.update_creative_templates_each,
True,
)
return self.iterate_edges(
objects.CreativeTemplateConnection,
query_args,
headers,
token,
"updateCreativeTemplates",
)
def delete_creative_template(
self,
id: str,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.DeletionFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Deletion:
query_args = self._query_builder(
"mutation",
"deleteCreativeTemplate",
fields,
{"id": id,},
_ARGUMENT_LEGENDS.delete_creative_template,
False,
)
return objects.Deletion(
self.api(headers=headers, token=token, **query_args)["data"][
"deleteCreativeTemplate"
],
)
def create_entitlement(
self,
input: inputs.EntitlementInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.EntitlementFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: Union[str, None] = None,
) -> objects.Entitlement:
query_args = self._query_builder(
"mutation",
"createEntitlement",
fields,
{"input": input,},
_ARGUMENT_LEGENDS.create_entitlement,
False,
)
return objects.Entitlement(
self.api(headers=headers, token=token, **query_args)["data"][
"createEntitlement"
],
)
def update_entitlement(
self,
id: str,
input: inputs.EntitlementUpdateInput,
fields: List[
Union[QueryField, QueryFieldSet, str]
] = fields_module.EntitlementFields._sdk_default_fields,
headers: Union[dict, None] = None,
token: | |
import unittest
def expression_tree():
"""
Build expression tree.
"""
from algs18_binary_tree.expression_tree import Value, Expression, add, mult, sub, divide
# Sample Recursive Expression
add1 = Expression(add, Value(3), Value(1))
div2 = Expression(divide, add1, Value(4))
add3 = Expression(add, Value(1), Value(5))
mult4 = Expression(mult, add3, Value(9))
mult5 = Expression(mult, Value(2), Value(6))
sub6 = Expression(sub, mult4, mult5)
mult7 = Expression(mult, div2, sub6)
return mult7
def debug_expression():
"""
Request evaluation of simple expression.
"""
from algs18_binary_tree.expression_tree import Value, Expression, add, mult
# Sample Recursive Expression
a = Expression(add, Value(1), Value(5))
m = Expression(mult, a, Value(9))
return m
class TestBinaryTree(unittest.TestCase):
def test_speaking_tree(self):
from algs18_binary_tree.speaking_binary_tree import SpeakingBinaryTree
bt = SpeakingBinaryTree()
self.assertEqual(
'To insert `5`, create a new subtree with root of `5`.', bt.insert(5))
self.assertEqual('To insert `3`, `3` is smaller than or equal to `5`, so insert `3` into the left subtree of `5`, but there is no left subtree, so create a new subtree with root of `3`.', bt.insert(3))
self.assertEqual('To insert `1`, `1` is smaller than or equal to `5`, so insert `1` into the left subtree of `5` rooted at `3`. Now `1` is smaller than or equal to `3`, so insert `1` into the left subtree of `3`, but there is no left subtree, so create a new subtree with root of `1`.', bt.insert(1))
def test_baseline_expression(self):
from algs18_binary_tree.expression_tree import Value, build_expression
num1 = Value(17)
self.assertEqual(17, num1.eval())
def exp(left, right):
"""^"""
return left ** right
expr = build_expression('((8^2)*(7/4))', new_operators={'^': exp})
self.assertEqual(112, expr.eval())
def test_damaged_recursive_data_structures(self):
from algs18_binary_tree.expression_tree import Expression, Value, add
a = Expression(add, Value(1), Value(5))
# This is dangerous!
a.left = a
with self.assertRaises(RuntimeError):
print(a.eval())
def test_expression(self):
from algs18_binary_tree.expression_tree import build_expression
mult7 = expression_tree()
self.assertEqual(42.0, mult7.eval())
self.assertEqual(
'(((3 + 1) / 4) * (((1 + 5) * 9) - (2 * 6)))', str(mult7))
# Build expression uses floats
expr = build_expression('(((3 + 1) / 4) * (((1 + 5) * 9) - (2 * 6)))')
self.assertEqual(42.0, expr.eval())
self.assertEqual(
'(((3.0 + 1.0) / 4.0) * (((1.0 + 5.0) * 9.0) - (2.0 * 6.0)))', str(expr))
mult2 = debug_expression()
self.assertEqual(54, mult2.eval())
self.assertEqual('((1 + 5) * 9)', str(mult2))
def mod(a, b):
"""%"""
return a % b
expr = build_expression('((9 % 2) * 5)', new_operators={'%': mod})
self.assertEqual(5.0, expr.eval())
self.assertEqual([9.0, 2.0, '%', 5.0, '*'], list(expr.postfix()))
expr = build_expression('(1.9 + 18.232)')
self.assertEqual(1.9 + 18.232, expr.eval())
expr = build_expression('(A1 + 4)')
self.assertEqual('A1', expr.left.reference)
self.assertEqual(4.0, expr.right.value)
expr = build_expression('3')
self.assertEqual(3.0, expr.value)
def test_bt(self):
from algs18_binary_tree.binary_search_tree import BinaryTree
bt1 = BinaryTree()
# can work even when empty
self.assertTrue(bt1.remove(7) is None)
self.assertTrue(bt1.min() is None)
bt1.insert(5)
self.assertTrue(5 in bt1)
bt1.insert(2)
self.assertEqual(5, bt1.root.value)
self.assertTrue(2 in bt1)
self.assertEqual([2, 5], list(bt1))
bt1.insert(1)
self.assertTrue(1 in bt1)
self.assertEqual([1, 2, 5], list(bt1))
def test_bt_remove(self):
from algs18_binary_tree.binary_search_tree import BinaryTree
# delete with left child having right child
bt1 = BinaryTree()
bt1.insert(5)
bt1.insert(2)
bt1.insert(4)
bt1.insert(7)
self.assertEqual([2, 4, 5, 7], list(bt1))
bt1.remove(5)
self.assertEqual([2, 4, 7], list(bt1))
# delete with left child having only left child
bt2 = BinaryTree()
bt2.insert(5)
bt2.insert(2)
bt2.insert(1)
bt2.insert(7)
bt2.insert(8)
self.assertEqual([1, 2, 5, 7, 8], list(bt2))
bt2.remove(5)
self.assertEqual([1, 2, 7, 8], list(bt2))
# delete with no left child
bt3 = BinaryTree()
bt3.insert(5)
bt3.insert(7)
bt3.insert(8)
self.assertEqual([5, 7, 8], list(bt3))
bt3.remove(5)
self.assertEqual([7, 8], list(bt3))
# delete with no children
bt4 = BinaryTree()
bt4.insert(5)
self.assertEqual([5], list(bt4))
bt4.remove(5)
self.assertEqual([], list(bt4))
def test_bt_duplicates(self):
from algs18_binary_tree.binary_search_tree import BinaryTree
bt1 = BinaryTree()
bt1.insert(5)
bt1.insert(5)
bt1.insert(4)
bt1.insert(5)
self.assertEqual([4, 5, 5, 5], list(bt1))
def test_tree(self):
from algs18_binary_tree.binary_search_tree import BinaryTree
bt1 = BinaryTree()
for n in [19, 14, 53, 3, 15, 26, 58]:
bt1.insert(n)
last = -1
while not bt1.is_empty():
m = bt1.min()
self.assertTrue(m > last)
last = m
bt1.remove(m)
def test_stress(self):
from algs18_binary_tree.binary_search_tree import BinaryTree
bt1 = BinaryTree()
N = 31
keys = list(range(N))
for k in keys:
bt1.insert(k)
self.assertEqual(list(range(k + 1)), list(bt1))
self.assertEqual(list(range(N)), list(bt1))
# remove in order
for k in keys:
bt1.remove(k)
self.assertEqual(list(range(k + 1, N)), list(bt1))
for k in keys:
bt1.insert(k)
self.assertEqual(list(range(k + 1)), list(bt1))
self.assertEqual(list(range(N)), list(bt1))
# remove in reverse order
for k in reversed(keys):
bt1.remove(k)
def test_traversal(self):
from algs18_binary_tree.binary_search_tree import BinaryTree
bt1 = BinaryTree()
bt1.insert(23)
bt1.insert(17)
bt1.insert(40)
bt1.insert(30)
total = 0
for v in bt1:
total += v
self.assertEqual(110, total)
def test_copy(self):
from algs18_binary_tree.binary_search_tree import BinaryTree
bt1 = BinaryTree()
bt1.insert(23)
bt1.insert(17)
bt1.insert(40)
bt1.insert(30)
bt2 = bt1.copy()
total = 0
for v in bt2:
total += v
self.assertEqual(110, total)
def test_bt_stress(self):
from algs18_binary_tree.binary_search_tree import BinaryTree
bt1 = BinaryTree()
N = 31
keys = list(range(N))
for k in keys:
bt1.insert(k)
self.assertEqual(list(range(k + 1)), list(bt1))
self.assertEqual(list(range(N)), list(bt1))
# remove in order
for k in keys:
bt1.remove(k)
self.assertEqual(list(range(k + 1, N)), list(bt1))
n = 0
for k in keys:
bt1.insert(k)
n += 1
self.assertEqual(list(range(k + 1)), list(bt1))
self.assertEqual(list(range(N)), list(bt1))
# remove in reverse order
for k in reversed(keys):
bt1.remove(k)
def test_string_structure(self):
from algs18_binary_tree.binary_search_tree import BinaryTree
from algs18_binary_tree.speaking_binary_tree import tree_structure
bt1 = BinaryTree()
self.assertFalse(99 in bt1)
bt1.insert(5)
bt1.insert(4)
bt1.insert(6)
bt1.insert(2)
bt1.insert(7)
bt1.insert(1)
bt1.insert(3)
# Prefix representation, with value first, then left and then right
self.assertEqual('(5,(4,(2,(1,,),(3,,)),),(6,,(7,,)))',
tree_structure(bt1.root))
def test_recreate_tree(self):
from algs18_binary_tree.binary_search_tree import BinaryTree, recreate_tree
from algs18_binary_tree.speaking_binary_tree import tree_structure
root = recreate_tree('(19,,)')
self.assertEqual('19', root.value)
root = recreate_tree('(19,3,22)')
self.assertEqual('3', root.left.value)
self.assertEqual('22', root.right.value)
root = recreate_tree('(19,3,(22,21,24))')
self.assertEqual('3', root.left.value)
self.assertEqual('22', root.right.value)
self.assertEqual('21', root.right.left.value)
self.assertEqual('24', root.right.right.value)
root = recreate_tree('(26,,(29,,)')
self.assertEqual('26', root.value)
root = recreate_tree('(19,(14,(3,,),(15,,)),(53,(26,,(29,,)),(58,,)))')
self.assertEqual('19', root.value)
self.assertEqual(8, root.size())
# create and flatten again.
bt1 = BinaryTree()
bt1.insert(10)
bt1.insert(15)
bt1.insert(13)
bt1.insert(11)
s = tree_structure(bt1.root)
self.assertEqual('(10,,(15,(13,(11,,),),))', s)
n = recreate_tree(s)
self.assertEqual(s, tree_structure(n))
bt1 = BinaryTree()
bt1.insert(12)
bt1.insert(5)
s = tree_structure(bt1.root)
self.assertEqual('(12,(5,,),)', s)
n = recreate_tree(s)
self.assertEqual(s, tree_structure(n))
root = recreate_tree('(26,(23,,),)')
self.assertEqual('26', root.value)
root = recreate_tree('(23,5,(30,29,))')
self.assertEqual('23', root.value)
def test_stress_recreate(self):
from algs18_binary_tree.binary_search_tree import BinaryTree, recreate_tree
from algs18_binary_tree.speaking_binary_tree import tree_structure
# create all subsets of 1..7
groups = [[1], [2], [3], [4], [5], [6], [7]]
for _ in range(6):
# Create complete tree with three levels.
for group in groups:
bt = BinaryTree()
for x in [4, 2, 6, 1, 3, 5, 7]:
bt.insert(x)
for s in group:
bt.remove(s)
s = tree_structure(bt.root)
# recreate and convert to int
n = recreate_tree(s, int)
bt.root = n
# validate all values BUT in set are found
for i in range(1, 8):
if not i in group:
self.assertTrue(i in bt)
# expand deletions
new_groups = []
for group in groups:
for i in range(1, 8):
if not i in group:
new_groups.append(group + [i])
groups = new_groups
def test_val_height_valid_on_remove(self):
from algs18_binary_tree.balanced_binary_search_tree import BinaryTree
from algs18_binary_tree.avl import check_avl_property
bt1 = BinaryTree()
bt1.insert(7)
self.assertEqual(7, bt1.min())
bt1.insert(4)
bt1.insert(10)
bt1.insert(8)
self.assertEqual(2, bt1.root.height)
self.assertEqual(4, bt1.root.size())
check_avl_property(bt1.root)
bt1.remove(7)
self.assertEqual(3, bt1.root.size())
self.assertEqual(1, bt1.root.height)
check_avl_property(bt1.root)
self.assertEqual(4, bt1.min())
self.assertTrue(4 in bt1)
self.assertTrue(10 in bt1)
self.assertTrue(8 in bt1)
self.assertFalse(7 in bt1)
def test_avl_stress(self):
import random
from algs18_binary_tree.balanced_binary_search_tree import BinaryTree
from algs18_binary_tree.avl import check_avl_property
bt1 = BinaryTree()
N = 63
keys = list(range(N))
for k in keys:
bt1.insert(k)
self.assertEqual(list(range(k + 1)), list(bt1))
check_avl_property(bt1.root)
self.assertEqual(list(range(N)), list(bt1))
# remove in order
for k in keys:
bt1.remove(k)
self.assertEqual(list(range(k + 1, N)), list(bt1))
check_avl_property(bt1.root)
for k in keys:
bt1.insert(k)
check_avl_property(bt1.root)
self.assertEqual(list(range(k + 1)), list(bt1))
self.assertEqual(list(range(N)), list(bt1))
# remove in reverse order
for k in reversed(keys):
bt1.remove(k)
check_avl_property(bt1.root)
self.assertEqual(list(range(k)), list(bt1))
for k in keys:
bt1.insert(k)
check_avl_property(bt1.root)
self.assertEqual(list(range(k + 1)), list(bt1))
self.assertEqual(list(range(N)), list(bt1))
# remove in random order. This revealed subtle defect in _remove_min()
shuffled = list(keys)
random.shuffle(shuffled)
for k in shuffled:
bt1.remove(k)
check_avl_property(bt1.root)
self.assertTrue(bt1.is_empty())
def test_count_rotations_avl(self):
from algs18_binary_tree.balanced_binary_search_tree import BinaryTree
bt1 = BinaryTree()
self.assertTrue(bt1.min() is None)
for i in [50, 30, 70, 20, 40, 60, 10, 45]:
bt1.insert(i)
self.assertEqual(50, bt1.root.value)
self.assertEqual(30, bt1.root.left.value)
self.assertEqual(20, bt1.root.left.left.value)
bt1.insert(5)
# rotate
self.assertEqual(10, bt1.root.left.left.value)
def test_binary_tree_from_chapter_06(self):
from algs18_binary_tree.pq_binary_search_tree import PQ
from algs15_priority_queue.test import TestPriorityQueue
from resources.english import english_words
words = english_words()
pair = TestPriorityQueue().priority_queue_stress_test(PQ(), len(words))
# Note: we cannot guarantee individual words BUT we can guarantee length
self.assertEqual((len('formaldehydesulphoxylate'),
len('a')), (len(pair[0]), len(pair[1])))
def test_pq_stress(self):
from algs18_binary_tree.pq_binary_search_tree import PQ
from algs18_binary_tree.avl import check_avl_property
pq1 = PQ()
self.assertTrue(pq1.is_empty())
self.assertFalse(pq1.is_full())
with self.assertRaises(ValueError):
pq1.enqueue(999, None)
with self.assertRaises(RuntimeError):
pq1.peek()
self.assertFalse(9 in pq1)
N = 31
keys = list(range(N))
n = 0
for k in keys:
pq1.enqueue(k, k)
n += 1
self.assertEqual(list(range(k + 1)), [key for key, _ | |
(40, "Invalid process security level.")
else:
# Delete a file with the path given by RBX and RCX
begin_offset = int.from_bytes(self.processes[pid].threads[tid].registers['RBX'].get_bytes(0, 4)[1], byteorder='little')
length = int.from_bytes(self.processes[pid].threads[tid].registers['RCX'].get_bytes(0, 4)[1], byteorder='little')
# Get the data
processmemory_use = self.processes[pid].get_processmemory_thread(tid)
exitcode, data = processmemory_use.get_bytes(begin_offset, length)
if exitcode != 0:
exitcode = (exitcode, None)
else:
# Delete the file
path = str(data, ENCODING)
if path.startswith('/') or path.startswith('\\'):
# Absolute path
fullpath = path
else:
# Relative path
fullpath = os.path.join(self.processes[pid].cmdhandler.current_working_dir, path)
exitcode = self.computer.filesystem.delete_file(fullpath)
elif syscallid == 30:
if self.processes[pid].security_level == 1:
exitcode = (40, "Invalid process security level.")
else:
# Rename a file with the path given by RBX and RCX, with the new name given by R9 and R10
begin_offset_filename = int.from_bytes(self.processes[pid].threads[tid].registers['RBX'].get_bytes(0, 4)[1], byteorder='little')
length_filename = int.from_bytes(self.processes[pid].threads[tid].registers['RCX'].get_bytes(0, 4)[1], byteorder='little')
begin_offset_newname = int.from_bytes(self.processes[pid].threads[tid].registers['R9'].get_bytes(0, 4)[1], byteorder='little')
length_newname = int.from_bytes(self.processes[pid].threads[tid].registers['R10'].get_bytes(0, 4)[1], byteorder='little')
# Get the data
processmemory_use = self.processes[pid].get_processmemory_thread(tid)
exitcode, filename = processmemory_use.get_bytes(begin_offset_filename, length_filename)
if exitcode != 0:
exitcode = (exitcode, None)
else:
exitcode, newname = processmemory_use.get_bytes(begin_offset_newname, length_newname)
if exitcode != 0:
exitcode = (exitcode, None)
else:
# Rename the file
path = str(filename, ENCODING)
if path.startswith('/') or path.startswith('\\'):
# Absolute path
fullpath = path
else:
# Relative path
fullpath = os.path.join(self.processes[pid].cmdhandler.current_working_dir, path)
exitcode = self.computer.filesystem.rename_file(fullpath, str(newname, ENCODING))
elif syscallid == 31:
if self.processes[pid].security_level == 1:
exitcode = (40, "Invalid process security level.")
else:
# Create a folder with the path given by RBX and RCX
begin_offset = int.from_bytes(self.processes[pid].threads[tid].registers['RBX'].get_bytes(0, 4)[1], byteorder='little')
length = int.from_bytes(self.processes[pid].threads[tid].registers['RCX'].get_bytes(0, 4)[1], byteorder='little')
# Get the data
processmemory_use = self.processes[pid].get_processmemory_thread(tid)
exitcode, foldername = processmemory_use.get_bytes(begin_offset, length)
if exitcode != 0:
exitcode = (exitcode, None)
else:
# Create the folder
path = str(foldername, ENCODING)
if path.startswith('/') or path.startswith('\\'):
# Absolute path
fullpath = path
else:
# Relative path
fullpath = os.path.join(self.processes[pid].cmdhandler.current_working_dir, path)
exitcode = self.computer.filesystem.create_directory(fullpath)
elif syscallid == 32:
if self.processes[pid].security_level == 1:
exitcode = (40, "Invalid process security level.")
else:
# Delete a folder with the path given by RBX and RCX
begin_offset = int.from_bytes(self.processes[pid].threads[tid].registers['RBX'].get_bytes(0, 4)[1], byteorder='little')
length = int.from_bytes(self.processes[pid].threads[tid].registers['RCX'].get_bytes(0, 4)[1], byteorder='little')
# Get the data
processmemory_use = self.processes[pid].get_processmemory_thread(tid)
exitcode, foldername = processmemory_use.get_bytes(begin_offset, length)
if exitcode != 0:
exitcode = (exitcode, None)
else:
# Delete the folder
path = str(foldername, ENCODING)
if path.startswith('/') or path.startswith('\\'):
# Absolute path
fullpath = path
else:
# Relative path
fullpath = os.path.join(self.processes[pid].cmdhandler.current_working_dir, path)
exitcode = self.computer.filesystem.delete_directory(fullpath)
elif syscallid == 33:
# Return a list of the filenames in the directory given by RBX and RCX, separated by newlines and put it into the stack along with the length in RBX
begin_offset = int.from_bytes(self.processes[pid].threads[tid].registers['RBX'].get_bytes(0, 4)[1], byteorder='little')
length = int.from_bytes(self.processes[pid].threads[tid].registers['RCX'].get_bytes(0, 4)[1], byteorder='little')
# Get the data
processmemory_use = self.processes[pid].get_processmemory_thread(tid)
exitcode, data = processmemory_use.get_bytes(begin_offset, length)
if exitcode != 0:
exitcode = (exitcode, None)
else:
# Read the folder
path = str(data, ENCODING)
if path.startswith('/') or path.startswith('\\'):
# Absolute path
fullpath = path
else:
# Relative path
fullpath = os.path.join(self.processes[pid].cmdhandler.current_working_dir, path)
exitcode = self.computer.filesystem.list_directory(fullpath)
if exitcode[0] == 0:
# Write the data in the stack
self.processes[pid].threads[tid].stack.push(exitcode[1])
# Modify the processes registers
self.processes[pid].threads[tid].registers['RES'].data[4 : 8] = int.to_bytes(len(self.processes[pid].threads[tid].stack.data) + self.processes[pid].processmemory.ss, 4, byteorder='little')
self.processes[pid].threads[tid].registers['RBX'].data[0 : 4] = int.to_bytes(len(exitcode[1]), 4, byteorder="little")
elif syscallid == 34:
if self.processes[pid].security_level == 1:
exitcode = (40, "Invalid process security level.")
else:
# Run a command defined by RBX and RCX on the command line
begin_offset = int.from_bytes(self.processes[pid].threads[tid].registers['RBX'].get_bytes(0, 4)[1], byteorder='little')
length = int.from_bytes(self.processes[pid].threads[tid].registers['RCX'].get_bytes(0, 4)[1], byteorder='little')
# Get the data
processmemory_use = self.processes[pid].get_processmemory_thread(tid)
exitcode, data = processmemory_use.get_bytes(begin_offset, length)
if exitcode != 0:
exitcode = (exitcode, None)
else:
# Run the command
command = str(data, ENCODING)
exitcode = self.processes[pid].cmdhandler.handle(command)
if exitcode[0] == 0:
exitcode = (0, None)
elif syscallid == 35:
# Get the current working directory and put it into stack with the length in RBX
data = self.processes[pid].cmdhandler.current_working_dir
# Write the data in the stack
self.processes[pid].threads[tid].stack.push(bytes(data, ENCODING))
# Modify the processes registers
self.processes[pid].threads[tid].registers['RBX'].data[0 : 4] = int.to_bytes(len(data), 4, byteorder='little')
self.processes[pid].threads[tid].registers['RES'].data[4 : 8] = int.to_bytes(len(self.processes[pid].threads[tid].stack.data) + self.processes[pid].processmemory.ss, 4, byteorder='little')
exitcode = (0, None)
elif syscallid == 36:
if self.processes[pid].security_level == 1:
exitcode = (40, "Invalid process security level.")
else:
# Format the FileSystem
self.computer.filesystem._format()
exitcode = (0, None)
elif syscallid == 37:
# Get the current time as a 8 byte integer and put it into RBX
t = int.to_bytes(int(time.time()), 8, byteorder='little')
self.processes[pid].threads[tid].registers['RBX'].data[0 : 8] = t
exitcode = (0, None)
elif syscallid == 38:
if self.processes[pid].security_level == 1:
exitcode = (40, "Invalid process security level.")
else:
# Shut down the computer
self.computer.shutdown()
exitcode = (0, None)
elif syscallid == 39:
if self.processes[pid].security_level == 1:
exitcode = (40, "Invalid process security level.")
else:
# Set the password to be defined by RBX and RCX
begin_offset = int.from_bytes(self.processes[pid].threads[tid].registers['RBX'].get_bytes(0, 4)[1], byteorder='little')
length = int.from_bytes(self.processes[pid].threads[tid].registers['RCX'].get_bytes(0, 4)[1], byteorder='little')
# Get the data
processmemory_use = self.processes[pid].get_processmemory_thread(tid)
exitcode, data = processmemory_use.get_bytes(begin_offset, length)
if exitcode != 0:
exitcode = (exitcode, None)
else:
# Set the password
self.computer.filesystem.password = <PASSWORD>(data).<PASSWORD>()
self.computer.filesystem._backend_update()
exitcode = (0, None)
elif syscallid == 40:
# Write to the processes STDOut with the beginning offset in RBX, and the end of the string indicated by a null byte
begin_offset = int.from_bytes(self.processes[pid].threads[tid].registers['RBX'].get_bytes(0, 4)[1], byteorder='little')
# Get the data
processmemory_use = self.processes[pid].get_processmemory_thread(tid)
# Get each byte
i = begin_offset
data = ''
while True:
exitcode, byte = processmemory_use.get_bytes(i, 1)
if exitcode != 0:
exitcode = (exitcode, None)
break
if byte == b'\x00':
exitcode = (0, None)
break
data += str(byte, ENCODING)
i += 1
if exitcode[0] == 0:
# Write the data to the STDOut
exitcode = self.processes[pid].stdout.write(bytes(data, ENCODING), self.terminal)
else:
exitcode = (30, "Invalid SYSCall.")
# Update memory in process
self.update_process_memory_global(pid, tid)
# In case of errors, set the thread's waiting state to not running/error
self.processes[pid].threads[tid].waiting = False
# Handle exitcode
self.processes[pid].threads[tid].registers['RAX'].data[0 : 4] = int.to_bytes(exitcode[0], 4, byteorder='little')
except Exception as e:
# Handle exitcode
self.halt_thread(pid, tid, 255)
# Add to log
self.log += '\n' + str(e)
def interrupt(self, iid, pid, tid):
"""Call a computer interrupt.
Args: iid -> the interrupt ID to call
pid -> the process ID
tid -> the thread ID"""
try:
self.processes[pid].threads[tid].waiting = True
# Wait until the CPU has finished the thread (and registers are committed)
while True:
ready = True
for cpu in self.computer.cpu.cores:
try:
if cpu.pname == ('proc', pid) and cpu.tid == tid:
ready = False
except Exception:
pass
if ready:
break
# Run the interrupt (NOTE: all interrupts must call update_process_memory_global after modifying memory)
# NOTE: All interrupt calls must modify memory in the processes memory data, not global memory data. Using the method update_process_memory_global, memory can be synced up with all processes.
# Call the interrupt
exitcode = self.computer.interrupt(iid, pid, tid)
# Update memory in process
self.update_process_memory_global(pid, tid)
# In case of errors, set the pidtid to not running/error
self.processes[pid].threads[tid].waiting = False
# Handle exitcode
self.processes[pid].threads[tid].registers['RAX'].data[0 : 4] = int.to_bytes(exitcode[0], 4, byteorder='little')
except Exception as e:
# Handle exitcode
self.halt_thread(pid, tid, 255)
def call_library(self, pid, tid, lid, call):
"""Preform a dynamic library call.
Args: pid -> process ID of the process that called the library
tid -> thread ID of the thread that called the library
lid -> library ID to call to
call -> call ID to call"""
try:
self.processes[pid].threads[tid].waiting = True
# Wait until the CPU has finished the thread (and registers are committed)
while True:
ready = True
for cpu in self.computer.cpu.cores:
try:
if cpu.pname == ('proc', pid) and cpu.tid == tid:
ready = False
except Exception:
pass
if ready:
break
# Run the library call (NOTE: all library calls must call update_process_memory_global after modifying memory)
# NOTE: All library calls must modify memory in the processes memory data, not global memory data. Using the method update_process_memory_global, memory can be synced up with all processes.
lid = int.from_bytes(lid, byteorder='little')
call = int.from_bytes(call, byteorder='little')
if lid < len(self.processes[pid].threads[tid].dynamic_libraries):
# Get the library
lib = self.processes[pid].threads[tid].dynamic_libraries[lid]
if call in lib.defined_calls:
# Call the library
exitcode = self.processes[pid].threads[tid].dynamic_libraries[lid].handle(call)
else:
# Invalid call ID
exitcode = (28, "Call ID is invalid.")
else:
# Invalid LID
exitcode = (27, "Library ID is invalid.")
# Update memory in process
self.update_process_memory_global(pid, tid)
# In case of errors, set the thread's waiting state to not running/error
self.processes[pid].threads[tid].waiting = False
# Handle exitcode
self.processes[pid].threads[tid].registers['RAX'].data[0 : 4] = int.to_bytes(exitcode[0], 4, byteorder='little')
except Exception as e:
# Handle exitcode
self.halt_thread(pid, tid, 255)
def execute_core(self, threads, core_id):
"""Execute the threads on a CPU core.
Args: threads -> the thread TIDs and process PIDs to run
core_id -> the core ID to run the threads on"""
# Iterate over each thread
for pid, tid in threads:
# Check if the thread is waiting for IO or is done
if self.processes[pid].threads[tid].waiting or not self.processes[pid].threads[tid].running:
continue
# Get the thread data
registers = self.processes[pid].threads[tid].registers
processmemory = self.processes[pid].get_processmemory_thread(tid)
# Load memory
self.computer.memory.edit_memory_partition(('proc', pid), processmemory)
# Load the correct core
self.computer.cpu.init_core(core_id, processmemory, ('proc', pid), tid)
if registers:
self.computer.cpu.cores[core_id].registers = registers
else:
self.processes[pid].threads[tid].registers = self.computer.cpu.cores[core_id].registers
# Run the core for a certain number of operations
self.computer.cpu.begin_execute_core_num(core_id, self.max_operations_per_thread)
self.computer.cpu.await_execution(core_id)
# Update process processmemory
self.processes[pid].update_global_pm(self.computer.cpu.cores[core_id].processmemory)
self.processes[pid].update_thread_stack(tid, self.computer.cpu.cores[core_id].processmemory.stack)
self.processes[pid].update_thread_registers(tid, self.computer.cpu.cores[core_id].registers)
# Check for ending
if hasattr(self.computer.cpu.cores[core_id], 'output_exit') and not self.processes[pid].threads[tid].waiting:
self.processes[pid].threads[tid].running = False
self.processes[pid].threads[tid].output = self.computer.cpu.get_return(core_id)
# Check for process error
if self.processes[pid].threads[tid].output[0] != 0:
# Set process error
self.processes[pid].state = 't'
self.processes[pid].output = self.processes[pid].threads[tid].output
# Check for process ending
elif all([not self.processes[pid].threads[t].running for t in self.processes[pid].threads]):
# All threads are done
self.processes[pid].state = 't'
self.processes[pid].output = (0, None)
# Unload the core
self.computer.cpu.unload_core(core_id)
def _process_mainloop(self):
"""Main process running loop. Should be run on a separate thread."""
self.running = True
# Main loop
while self.running:
# Split threads up for each CPU core
num_cores = len(self.computer.cpu.cores)
split_threads = [[] for i in range(num_cores)]
current_core_num = 0
# Go through each process
for pid in self.processes:
if self.processes[pid].state == 't':
continue
# Go through each thread
for tid in self.processes[pid].threads:
# Add this PID and TID to the according core
split_threads[current_core_num].append((pid, tid))
# Loop the current core number
current_core_num += 1
if current_core_num >= num_cores:
current_core_num = 0
executor_threads = | |
<filename>sapextractor/algo/prod/obj_centr_log.py
import pandas as pd
from dateutil import parser
from pm4pymdl.objects.mdl.exporter import exporter as mdl_exporter
from pm4pymdl.objects.ocel.exporter import exporter as ocel_exporter
from sapextractor.utils.dates import timestamp_column_from_dt_tm
from pandas.core.frame import DataFrame
from sapextractor.database_connection.interface import DatabaseConnection
def apply(con, keep_first=True, min_extr_date="2020-01-01 00:00:00", gjahr="2020", enable_changes=True,
enable_payments=True, allowed_act_doc_types=None, allowed_act_changes=None, mandt="800"):
print("WIP: production, 'apply' function")
print("######################################################")
print("Create Production Order")
print("######################################################")
afko_res = con.prepare_and_execute_query("AFKO", ["AUFNR","PLNBEZ", "GAMNG", "GMEIN"], additional_query_part=" WHERE MANDT = '"+mandt+"'");
# Remove deleted ones
aufk_res = con.prepare_and_execute_query("AUFK", ["AUFNR"], additional_query_part=" WHERE LOEKZ != 'X' AND MANDT = '"+mandt+"'");
afko_res = afko_res.merge(aufk_res,left_on="AUFNR",right_on="AUFNR",how="inner");
print(afko_res)
afko_res['event_PRODORD'] = "OR" + afko_res["AUFNR"] #afko_res.apply(lambda row: "OR"+row.AUFNR)
# removed: CDTCODE='CO01' AND
jcds_res = con.prepare_and_execute_query("JCDS", ["OBJNR", "CHGNR", "UDATE","UTIME","CDTCODE"], additional_query_part=" WHERE MANDT = '"+mandt+"'");
print(jcds_res)
dataframe = afko_res.merge(jcds_res,left_on="event_PRODORD",right_on="OBJNR",how="inner")
print(dataframe)
s026 = con.prepare_and_execute_query("S026", ["MATNR","MCOMB", "AUFNR"], additional_query_part=" WHERE MANDT = '"+mandt+"'").drop_duplicates();
# s026.rename(columns={'MATNR': 'event_REQMAT'},inplace=True)
print(s026)
dataframe['event_REQMAT'] = "";
timestamp_column_from_dt_tm.apply(dataframe, "UDATE", "UTIME", "event_timestamp")
min_extr_date = parser.parse(min_extr_date)
dataframe = dataframe[dataframe["event_timestamp"] >= min_extr_date]
dataframe = dataframe.sort_values("event_timestamp")
dataframe = dataframe.drop_duplicates(subset=["event_PRODORD"],keep="first")
# # Define relevant Production orders
# relevant_production_orders_series = pd.Series(data=["OR000000822321"]);
relevant_production_orders = dataframe["event_PRODORD"];
print("relevant prod orders:", relevant_production_orders);
# # Filter data
# dataframe = dataframe.merge(relevant_production_orders,left_on="event_PRODORD",right_on="event_PRODORD",how="inner")
# print("#### MERGED DATAFRAME: ")
# print(dataframe)
dataframe["DOCTYPE_RequiredMaterial"] = "";
dataframe["DOCTYPE_RequiredMaterial"] = dataframe["DOCTYPE_RequiredMaterial"].astype(object)
dataframe["event_REQMAT"] = "";
# dataframe["event_REQMAT"] = dataframe["event_REQMAT"].astype(object)
relevant_materials_series = pd.Series();
dataframe: pd.DataFrame;
for index, row in dataframe.iterrows():
filtered = s026[(("OR"+s026['AUFNR']) == row['event_PRODORD'])];
if not filtered.empty :
print("Conncted Material:");
if row['PLNBEZ'] == " ":
dataframe.loc[index,'PLNBEZ'] = filtered['MATNR'].tolist()[0]
relevant_materials_series = relevant_materials_series.append(filtered["MCOMB"]);
connected_mat_list = filtered["MCOMB"].tolist()
dataframe.at[index,'DOCTYPE_RequiredMaterial'] = connected_mat_list
# for i, mat in enumerate(connected_mat_list):
# dataframe.at[index,"event_REQMAT"+counter] = filtered["MCOMB"].toList(;
dataframe.at[index,'event_REQMAT'] = str(connected_mat_list);
relevant_materials = pd.DataFrame({'event_REQMAT': relevant_materials_series}).drop_duplicates(subset=["event_REQMAT"]);
print(dataframe)
dataframe.rename(columns={'PLNBEZ': 'event_MATNR'},inplace=True)
dataframe.rename(columns={'GAMNG': 'event_MNG'},inplace=True)
dataframe = dataframe.assign(DOCTYPE_ProdOrd = lambda x: x['event_PRODORD'])
dataframe = dataframe.assign(DOCTYPE_Material = lambda x: x['event_MATNR'])
# dataframe = dataframe.assign(DOCTYPE_RequiredMaterial = lambda x: str(x['event_REQMAT'].tolist()))
# dataframe = dataframe.reset_index()
# dataframe["event_id"] = dataframe.index.astype(str)
dataframe["event_activity"] = "Create Production Order"
dataframe = dataframe.drop(["AUFNR","OBJNR","CHGNR","UDATE","UTIME","CDTCODE","GMEIN"],axis=1)
print(dataframe)
# eban_cols = con.get_columns("EBAN")
# print(eban_cols)
# eban_test = con.prepare_and_execute_query("EBAN", eban_cols, additional_query_part=" WHERE BANFN = '0010044618' AND MANDT = '"+mandt+"'");
# # resb = con.execute_read_sql("SELECT * FROM RESB WHERE AUFNR = '000000822321' AND MANDT = '"+mandt+"'",["RSNUM","RSPOS", "MATNR", "BDMNG","MEINS", "AUFNR", "BAUGR","BANFN","BNFPO"]);
# print(eban_test);
# eban_test.to_csv ('exported_eban.csv', index = True, header=True)
print("######################################################")
print("Plan Material// Create Purchase Requsition")
print("######################################################")
# TODO
# print("THIS STEP IS IN PROGRESS")
# Deletion Indicator: LOEKZ = X
eban = con.prepare_and_execute_query("EBAN", ["BANFN","MATNR", "MENGE", "MEINS","BADAT"], additional_query_part=" WHERE LOEKZ != 'X' AND MANDT = '"+mandt+"'");
# eban.rename(columns={'BANFN': 'event_PURCHREQ'},inplace=True)
eban['event_PURCHREQ'] = "PR" + eban["BANFN"];
eban.rename(columns={'MATNR': 'event_REQMAT'},inplace=True)
# Filter Materials
eban = eban.merge(relevant_materials,left_on="event_REQMAT",right_on="event_REQMAT",how="inner")
# Save relevant Purchase Requisitions
relevant_purchase_requisitions_series = pd.Series();
relevant_purchase_requisitions_series = relevant_purchase_requisitions_series.append(eban['event_PURCHREQ']);
relevant_purchase_requisitions = pd.DataFrame({'event_PURCHREQ': relevant_purchase_requisitions_series}).drop_duplicates(subset=["event_PURCHREQ"]);
print("Relevant Purchase Requisitions:")
print(relevant_purchase_requisitions)
eban.rename(columns={'MENGE': 'event_MNG'},inplace=True)
eban.rename(columns={'MEINS': 'event_EIN'},inplace=True)
eban["event_activity"] = "Create Purchase Requisition"
eban['DOCTYPE_PurchReq'] = eban['event_PURCHREQ']
eban['DOCTYPE_RequiredMaterial'] = eban['event_REQMAT']
eban['TIME'] = "235959"; # TODO: Made up timestamp
# timestamp_column_from_dt_tm.apply(eban, "BADAT", "TIME", "event_timestamp") # filter only based on day
# # TODO: TEMP REMOVED, in favor of having only release event (with TIME!) in log
# # eban = eban[eban["event_timestamp"] >= min_extr_date]
# eban['TIME'] = "000000"; # but set time to start of day after that
# timestamp_column_from_dt_tm.apply(eban, "BADAT", "TIME", "event_timestamp")
eban = eban.drop(["BADAT","TIME","BANFN"],axis=1)
print(eban)
print("######################################################")
print("Release Purchase Requisition")
print("######################################################")
# TODO: Include TCODE and CHNGIND: update?
cdpos_res = con.prepare_and_execute_query("CDPOS", ["OBJECTID","CHANGENR", "FNAME","VALUE_NEW"], additional_query_part=" WHERE (VALUE_NEW = 'X' OR VALUE_NEW='XX') AND CHNGIND='U' AND FNAME='FRGZU' AND MANDANT = '"+mandt+"'");
print(cdpos_res)
#Removed: TCODE = 'ME54' AND
cdhdr_res = con.prepare_and_execute_query("CDHDR", ["OBJECTID","CHANGENR", "UDATE", "UTIME"], additional_query_part=" WHERE MANDANT = '"+mandt+"'");
print(cdhdr_res)
release_purch_req_data = cdpos_res.merge(cdhdr_res,left_on=["OBJECTID","CHANGENR"],right_on=["OBJECTID","CHANGENR"],how="inner") #.drop_duplicates(subset=["OBJECTID","CHANGENR","UDATE","UTIME"])
print(release_purch_req_data)
timestamp_column_from_dt_tm.apply(release_purch_req_data, "UDATE", "UTIME", "event_timestamp")
release_purch_req_data = release_purch_req_data[release_purch_req_data["event_timestamp"] >= min_extr_date]
release_purch_req_data = release_purch_req_data.sort_values("event_timestamp")
# release_purch_req_data["event_id"] = release_purch_req_data.index.astype(str)
release_purch_req_data["event_activity"] = ""
release_purch_req_data["event_activity"] = release_purch_req_data.apply(lambda x: 'Release Purchase Requisition (1)' if x['VALUE_NEW'] == 'X' else ('Release Purchase Requisition (2)' if x['VALUE_NEW'] == 'XX' else 'Release PurReq: ERR_UNKNOWN_CHANGE'), axis=1)
release_purch_req_data.rename(columns={'PLNBEZ': 'event_REQMAT'},inplace=True)
release_purch_req_data['event_PURCHREQ'] = "PR" + release_purch_req_data["OBJECTID"]
release_purch_req_data.rename(columns={'VALUE_NEW': 'event_NEW-FRGZU'},inplace=True)
release_purch_req_data = release_purch_req_data.assign(DOCTYPE_PurchReq = lambda x: x['event_PURCHREQ'])
release_purch_req_data["event_REQMAT"] = ""
if not release_purch_req_data.empty:
release_purch_req_data["event_REQMAT"] = release_purch_req_data.apply(lambda x:eban.loc[eban['event_PURCHREQ'] == x['event_PURCHREQ']]['event_REQMAT'].values[0] if eban.loc[eban['event_PURCHREQ'] == x['event_PURCHREQ']]['event_REQMAT'].values.size > 0 else '', axis=1)
# Filter only relevant Purchase Requisitions
release_purch_req_data = release_purch_req_data.merge(relevant_purchase_requisitions,left_on="event_PURCHREQ",right_on="event_PURCHREQ",how="inner")
release_purch_req_data = release_purch_req_data.assign(DOCTYPE_RequiredMaterial = lambda x: x['event_REQMAT'])
# release_purch_req_data['DOCTYPE_RequiredMaterial'] = release_purch_req_data['event_REQMAT']
print(release_purch_req_data)
release_purch_req_data = release_purch_req_data.drop(["OBJECTID","CHANGENR","FNAME","UDATE","UTIME"],axis=1)
print(release_purch_req_data)
print("######################################################")
print("Convert PR to Purchase Order")
print("######################################################")
ekpo_res = con.prepare_and_execute_query("EKPO",["EBELN","MATNR","MENGE", "MEINS", "BANFN"],additional_query_part=" WHERE BANFN != ' ' AND MANDT = '"+mandt+"'");
print(ekpo_res)
# cdpos_res_2 = con.prepare_and_execute_query("CDPOS",["OBJECTID","CHANGENR"],additional_query_part=" WHERE OBJECTCLAS = 'EINKBELEG' AND CHNGIND = 'I' AND MANDANT = '"+mandt+"'");
# print(cdpos_res_2)
cdhdr_res_2 = con.prepare_and_execute_query("CDHDR", ["OBJECTID","CHANGENR", "UDATE", "UTIME"], additional_query_part=" WHERE OBJECTCLAS = 'EINKBELEG' AND CHANGE_IND = 'I' AND MANDANT = '"+mandt+"'");
print(cdhdr_res_2)
convert_to_purch_order = ekpo_res.merge(cdhdr_res_2,left_on=["EBELN"],right_on=["OBJECTID"],how="inner") #.drop_duplicates(subset=["OBJECTID","CHANGENR","UDATE","UTIME"])
print(convert_to_purch_order)
timestamp_column_from_dt_tm.apply(convert_to_purch_order, "UDATE", "UTIME", "event_timestamp")
convert_to_purch_order = convert_to_purch_order[convert_to_purch_order["event_timestamp"] >= min_extr_date]
convert_to_purch_order = convert_to_purch_order.sort_values("event_timestamp")
convert_to_purch_order.rename(columns={'PLNBEZ': 'event_REQMAT'},inplace=True)
convert_to_purch_order['event_PURCHORD'] = "PUOR" + convert_to_purch_order["EBELN"]
convert_to_purch_order['event_PURCHREQ'] = "PR" + convert_to_purch_order["BANFN"] # Can be empty
# convert_to_purch_order['event_PURCHREQ'] = convert_to_purch_order.apply(lambda x: "" if (x["BANFN"] == "" or x["BANFN"] == " ") else ("PR"+x["BANFN"]), axis=1)
convert_to_purch_order.rename(columns={'MATNR': 'event_REQMAT'},inplace=True)
if not convert_to_purch_order.empty:
convert_to_purch_order = convert_to_purch_order.merge(relevant_materials,left_on="event_REQMAT",right_on="event_REQMAT",how="inner")
print(convert_to_purch_order);
# Filter only relevant Purchase Requisitions
convert_to_purch_order = convert_to_purch_order.merge(relevant_purchase_requisitions,left_on="event_PURCHREQ",right_on="event_PURCHREQ",how="inner")
# Save relevant Purchase Orders
relevant_purchase_orders_series = pd.Series();
relevant_purchase_orders_series = relevant_purchase_orders_series.append(convert_to_purch_order['event_PURCHORD']);
relevant_purchase_orders = pd.DataFrame({'event_PURCHORD': relevant_purchase_orders_series}).drop_duplicates(subset=["event_PURCHORD"]);
convert_to_purch_order.rename(columns={'MENGE': 'event_MNG'},inplace=True)
convert_to_purch_order.rename(columns={'MEINS': 'event_EIN'},inplace=True)
convert_to_purch_order = convert_to_purch_order.assign(DOCTYPE_PurchOrd = lambda x: x['event_PURCHORD'])
convert_to_purch_order['DOCTYPE_PurchReq'] = "";
convert_to_purch_order['DOCTYPE_PurchReq'] = convert_to_purch_order.apply(lambda x: None if x['event_PURCHREQ'] == '' else x['event_PURCHREQ'], axis=1)
convert_to_purch_order = convert_to_purch_order.assign(DOCTYPE_RequiredMaterial = lambda x: x['event_REQMAT'])
convert_to_purch_order["event_activity"] = "Convert to Purchase Order"
print(convert_to_purch_order)
convert_to_purch_order = convert_to_purch_order.drop(["OBJECTID","CHANGENR","UDATE","UTIME","EBELN","BANFN"],axis=1)
print(convert_to_purch_order)
print("######################################################")
print("Release Purchase Order")
print("######################################################") # AND FNAME='FRGZU' ? (VALUE_NEW = 'X') AND
release_purchase_order_cdpos = con.prepare_and_execute_query("CDPOS", ["OBJECTID","CHANGENR", "FNAME","VALUE_NEW"], additional_query_part=" WHERE CHNGIND='U' AND VALUE_NEW = 'X' AND FNAME='FRGZU' AND OBJECTCLAS = 'EINKBELEG' AND MANDANT = '"+mandt+"'");
print(release_purchase_order_cdpos)
release_purchase_order_cdhdr = con.prepare_and_execute_query("CDHDR", ["OBJECTID","CHANGENR", "UDATE", "UTIME"], additional_query_part=" WHERE OBJECTCLAS = 'EINKBELEG' AND MANDANT = '"+mandt+"'");
print(release_purchase_order_cdhdr)
release_purch_order = release_purchase_order_cdpos.merge(release_purchase_order_cdhdr,left_on=["OBJECTID","CHANGENR"],right_on=["OBJECTID","CHANGENR"],how="inner") #.drop_duplicates(subset=["OBJECTID","CHANGENR","UDATE","UTIME"])
print(release_purch_order)
timestamp_column_from_dt_tm.apply(release_purch_order, "UDATE", "UTIME", "event_timestamp")
release_purch_order = release_purch_order[release_purch_order["event_timestamp"] >= min_extr_date]
release_purch_order = release_purch_order.sort_values("event_timestamp")
# # release_purch_req_data["event_id"] = release_purch_req_data.index.astype(str)
# # release_purch_req_data["event_activity"] = "Release Purchase Requisition"
release_purch_order["event_activity"] = ""
if not release_purch_order.empty:
release_purch_order["event_activity"] = release_purch_order.apply(lambda x: 'Release Purchase Order (Normal)' if x['VALUE_NEW'] == 'X' else 'Release Purchase Order (Special)', axis=1)
release_purch_order['event_PURCHORD'] = "PUOR" + release_purch_order["OBJECTID"]
# Filter only relevant Purchase Requisitions
release_purch_order = release_purch_order.merge(relevant_purchase_orders,left_on="event_PURCHORD",right_on="event_PURCHORD",how="inner")
release_purch_order.rename(columns={'VALUE_NEW': 'event_NEW-FRGZU'},inplace=True)
release_purch_order = release_purch_order.assign(DOCTYPE_PurchOrd = lambda x: x['event_PURCHORD'])
print(release_purch_order)
release_purch_order = release_purch_order.drop(["OBJECTID","CHANGENR","FNAME","UDATE","UTIME"],axis=1)
print(release_purch_order)
print("######################################################")
print("Reject Purchase Order")
print("######################################################") # (VALUE_NEW = '08') AND
reject_purchase_order_cdpos = con.prepare_and_execute_query("CDPOS", ["OBJECTID","CHANGENR", "FNAME","VALUE_NEW"], additional_query_part=" WHERE (VALUE_NEW = 'B') AND CHNGIND='U' AND FNAME='FRGKE' AND OBJECTCLAS = 'EINKBELEG' AND MANDANT = '"+mandt+"'");
print(reject_purchase_order_cdpos)
reject_purchase_order_cdhdr = con.prepare_and_execute_query("CDHDR", ["OBJECTID","CHANGENR", "UDATE", "UTIME"], additional_query_part=" WHERE OBJECTCLAS = 'EINKBELEG' AND MANDANT = '"+mandt+"'");
print(reject_purchase_order_cdhdr)
reject_purch_order = reject_purchase_order_cdpos.merge(reject_purchase_order_cdhdr,left_on=["OBJECTID","CHANGENR"],right_on=["OBJECTID","CHANGENR"],how="inner") #.drop_duplicates(subset=["OBJECTID","CHANGENR","UDATE","UTIME"])
reject_purch_order = reject_purch_order.merge(ekpo_res.drop(["MENGE", "MEINS", "BANFN"],axis=1),left_on=["OBJECTID"],right_on=["EBELN"],how="inner")
reject_purch_order.rename(columns={'MATNR': 'event_REQMAT'},inplace=True)
if not reject_purch_order.empty:
reject_purch_order = reject_purch_order.merge(relevant_materials,left_on="event_REQMAT",right_on="event_REQMAT",how="inner")
print(reject_purch_order)
timestamp_column_from_dt_tm.apply(reject_purch_order, "UDATE", "UTIME", "event_timestamp")
reject_purch_order = reject_purch_order[reject_purch_order["event_timestamp"] >= min_extr_date]
reject_purch_order = reject_purch_order.sort_values("event_timestamp")
reject_purch_order["event_activity"] = ""
if not reject_purch_order.empty:
reject_purch_order["event_activity"] = reject_purch_order.apply(lambda x: 'Reject Purchase Order' if x['VALUE_NEW'] == 'B' else 'Reject PurOrd: ERR_UNKNOWN_CHANGE', axis=1)
reject_purch_order['event_PURCHORD'] = "PUOR" + reject_purch_order["OBJECTID"]
# Filter only relevant Purchase Requisitions
reject_purch_order = reject_purch_order.merge(relevant_purchase_orders,left_on="event_PURCHORD",right_on="event_PURCHORD",how="inner")
reject_purch_order.rename(columns={'VALUE_NEW': 'event_NEW-FRGZU'},inplace=True)
reject_purch_order = reject_purch_order.assign(DOCTYPE_PurchOrd = lambda x: x['event_PURCHORD'])
reject_purch_order = reject_purch_order.assign(DOCTYPE_RequiredMaterial = lambda x: x['event_REQMAT'])
print(reject_purch_order)
reject_purch_order = reject_purch_order.drop(["OBJECTID","CHANGENR","FNAME","UDATE","UTIME","EBELN"],axis=1)
print(reject_purch_order)
print("######################################################")
print("Reconsider Purchase Order")
print("######################################################") # (VALUE_NEW = '08') AND
reconsider_purchase_order_cdpos = con.prepare_and_execute_query("CDPOS", ["OBJECTID","CHANGENR", "FNAME","VALUE_NEW"], additional_query_part=" WHERE (VALUE_NEW = 'A') AND CHNGIND='U' AND FNAME='FRGKE' AND OBJECTCLAS = 'EINKBELEG' AND MANDANT = '"+mandt+"'");
print(reconsider_purchase_order_cdpos)
reconsider_purchase_order_cdhdr = con.prepare_and_execute_query("CDHDR", ["OBJECTID","CHANGENR", "UDATE", "UTIME"], additional_query_part=" WHERE OBJECTCLAS = 'EINKBELEG' AND MANDANT = '"+mandt+"'");
print(reconsider_purchase_order_cdhdr)
reconsider_purch_order = reconsider_purchase_order_cdpos.merge(reconsider_purchase_order_cdhdr,left_on=["OBJECTID","CHANGENR"],right_on=["OBJECTID","CHANGENR"],how="inner") #.drop_duplicates(subset=["OBJECTID","CHANGENR","UDATE","UTIME"])
print(reconsider_purch_order)
timestamp_column_from_dt_tm.apply(reconsider_purch_order, "UDATE", "UTIME", "event_timestamp")
reconsider_purch_order = reconsider_purch_order[reconsider_purch_order["event_timestamp"] >= min_extr_date]
reconsider_purch_order = reconsider_purch_order.sort_values("event_timestamp")
reconsider_purch_order["event_activity"] = ""
if not reconsider_purch_order.empty:
reconsider_purch_order["event_activity"] = reconsider_purch_order.apply(lambda x: 'Reconsider Purchase Order' if x['VALUE_NEW'] == 'A' else 'Reconsider PurOrd: ERR_UNKNOWN_CHANGE', axis=1)
reconsider_purch_order['event_PURCHORD'] = "PUOR" + reconsider_purch_order["OBJECTID"]
# Filter only relevant Purchase Requisitions
reconsider_purch_order = reconsider_purch_order.merge(relevant_purchase_orders,left_on="event_PURCHORD",right_on="event_PURCHORD",how="inner")
reconsider_purch_order.rename(columns={'VALUE_NEW': 'event_NEW-FRGKE'},inplace=True)
reconsider_purch_order = reconsider_purch_order.assign(DOCTYPE_PurchOrd = lambda x: x['event_PURCHORD'])
print(reconsider_purch_order)
reconsider_purch_order = reconsider_purch_order.drop(["OBJECTID","CHANGENR","FNAME","UDATE","UTIME"],axis=1)
print(reconsider_purch_order)
print("######################################################")
print("Goods Receipt for Purchase Order")
print("######################################################")
goods_receipt_for_purchase_order = con.prepare_and_execute_query("EKBE",["EBELN","CPUDT","CPUTM","BELNR","MATNR", "MENGE"], additional_query_part=" WHERE (NOT CPUDT = '00000000') AND MANDT = '"+mandt+"'")
print(goods_receipt_for_purchase_order)
timestamp_column_from_dt_tm.apply(goods_receipt_for_purchase_order, "CPUDT", "CPUTM", "event_timestamp")
goods_receipt_for_purchase_order = goods_receipt_for_purchase_order[goods_receipt_for_purchase_order["event_timestamp"] >= min_extr_date]
goods_receipt_for_purchase_order = goods_receipt_for_purchase_order.sort_values("event_timestamp")
if not goods_receipt_for_purchase_order.empty:
goods_receipt_for_purchase_order["event_activity"] = goods_receipt_for_purchase_order.apply(lambda x: 'Goods Receipt for Purchase Order', axis=1)
goods_receipt_for_purchase_order['event_PURCHORD'] | |
self.data_mean[self.data_mean_keys[j]] += self.data_mean[self.data_mean_keys[i]]
print("Mean total TKE has been calculated for {}".format(self.data_mean_keys[j]))
break
@timer
def formatMeanDataToOpenFOAM(self, ke_relaxfactor=1.):
# Go through inflow patches
for i, patch in enumerate(self.inflow_patches):
# Create time folders
timefolder0 = self.avg_folder_patchpaths[i] + '0/'
timefolder100000 = self.avg_folder_patchpaths[i] + '100000/'
os.makedirs(timefolder0, exist_ok=True)
os.makedirs(timefolder100000, exist_ok=True)
# For each patch, go through (mean) properties
for j in range(len(self.property_keys)):
# Pick up only property corresponding current patch
if patch in self.property_keys[j]:
# Get property name
property_name = self.property_keys[j].replace('_' + patch, '')
# Get mean property data
data_mean = self.data_mean[self.property_keys[j]]
if property_name in ('k', 'epsilon'): data_mean *= ke_relaxfactor
# Open file for writing
fid = open(timefolder0 + property_name, 'w')
print('Writing {0} to {1}'.format(property_name, timefolder0))
# Define datatype and average (placeholder) value
if property_name in ('k', 'T', 'pd', 'nuSGS', 'kappat', 'epsilon'):
datatype, average = 'scalar', '0'
else:
datatype, average = 'vector', '(0 0 0)'
# Write the file header
fid.write('/*--------------------------------*- C++ -*----------------------------------*\\\n')
fid.write('| ========= | |\n')
fid.write('| \\\\ / F ield | OpenFOAM: The Open Source CFD Toolbox |\n')
fid.write('| \\\\ / O peration | Version: 1.6 |\n')
fid.write('| \\\\ / A nd | Web: http://www.OpenFOAM.org |\n')
fid.write('| \\\\/ M anipulation | |\n')
fid.write('\*---------------------------------------------------------------------------*/\n')
fid.write('FoamFile\n')
fid.write('{\n')
fid.write(' version 2.0;\n')
fid.write(' format ascii;\n')
fid.write(' class ')
fid.write(datatype)
fid.write('AverageField;\n')
fid.write(' object values;\n')
fid.write('}\n')
fid.write('// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //\n\n')
fid.write('// This inflow plane has been averaged from {0} s to {1} s\n'.format(self.starttime_real, self.stoptime_real))
if ke_relaxfactor < 1. and property_name in ('k', 'epsilon'): fid.write('// and relaxed to {} of full magnitude for time-varying inflow BC\n'.format(ke_relaxfactor))
if property_name == 'k' and self.get_tke_total: fid.write('// This is total TKE\n')
if self.get_horizontal_mean: fid.write('// Both temporal and horizontal averaging is done\n')
fid.write('// Min: {}, max: {}, mean: {}\n'.format(np.min(data_mean, axis=0), np.max(data_mean, axis=0), np.mean(data_mean, axis=0)))
fid.write('// Average\n')
fid.write(average)
fid.write('\n\n\n')
fid.write(str(data_mean.shape[0]))
fid.write('\n')
fid.write('(\n')
# Write property data
for k in range(data_mean.shape[0]):
# If U, replace comma with nothing
if property_name == 'U':
fid.write(str(tuple(data_mean[k])).replace(',', ''))
else:
fid.write(str(data_mean[k, 0]))
fid.write('\n')
fid.write(')')
fid.close()
# # Also copy files to 10000 time directory
# print('\nCopying {} to {}'.format(property_name, timefolder100000))
# shutil.copy(timefolder0 + property_name, timefolder100000)
print("\nDon't forget to copy 'points' to {patch}/ folder too!")
if __name__ == '__main__':
casename = 'ABL_N_H'
casedir = '/media/yluan'
boundarydata_folder = 'boundaryData_epsilon' # 'boundaryData', 'boundaryData_epsilonTotal'
filenames = "*"
smooth_k = False
n_timesample = 1000
starttime, stoptime = 20000, 25000
case = InflowBoundaryField(casename=casename, casedir=casedir, boundarydata_folder=boundarydata_folder, debug=True)
case._readPoints()
case.readPropertyData(filenames=filenames, n_timesample=n_timesample)
case.calculatePropertyMean(starttime=starttime, stoptime=stoptime, get_tke_total=True)
if smooth_k:
idx_sorted_south = np.argsort(case.points['points_south'][:, 2])
idx_revertsort_south = np.argsort(idx_sorted_south)
points_sorted_south = case.points['points_south'][idx_sorted_south]
idx_sorted_west = np.argsort(case.points['points_west'][:, 2])
idx_revertsort_west = np.argsort(idx_sorted_west)
points_sorted_west = case.points['points_west'][idx_sorted_west]
k_south_sorted = case.data_mean['k_south'][idx_sorted_south]
k_west_sorted = case.data_mean['k_west'][idx_sorted_west]
kmin_south_i = np.where(k_south_sorted == min(k_south_sorted))[0][0]
z_kmin_south = points_sorted_south[:, 2][kmin_south_i]
kmin_west_i = np.where(k_west_sorted == min(k_west_sorted))[0][0]
z_kmin_west = points_sorted_west[:, 2][kmin_west_i]
zmax_south = zmax_west = 1000.
z_uniq_south = np.unique(points_sorted_south[:, 2])
z_uniq_west = np.unique(points_sorted_west[:, 2])
z_uniq_south = z_uniq_south[z_uniq_south >= z_kmin_south]
z_uniq_west = z_uniq_west[z_uniq_west >= z_kmin_west - 10.]
kdiff_south = k_south_sorted[-1] - k_south_sorted[kmin_south_i]
kdiff_west = k_west_sorted[-1] - k_west_sorted[kmin_west_i - 300]
dk_south = kdiff_south/len(z_uniq_south)
dk_west = kdiff_west/len(z_uniq_west)
for i in range(kmin_south_i + 300, len(k_south_sorted) - 300):
# if k_south_sorted[i] > k_south_sorted[-1]: k_south_sorted[i] = k_south_sorted[-1]
k_south_sorted[i:i + 300] = k_south_sorted[i - 300] + dk_south
for i in range(kmin_west_i, len(k_west_sorted) - 300):
# if k_west_sorted[i] > k_west_sorted[-1]: k_west_sorted[i] = k_west_sorted[-1]
k_west_sorted[i:i + 300] = k_west_sorted[i - 300] + dk_west
case.data_mean['k_south'] = k_south_sorted[idx_revertsort_south]
case.data_mean['k_west'] = k_west_sorted[idx_revertsort_west]
case.formatMeanDataToOpenFOAM(ke_relaxfactor=1)
from PlottingTool import Plot2D
idx_sorted = np.argsort(case.points['points_south'][:, 2])
points_sorted = case.points['points_south'][:, 2][idx_sorted]/750.
idx_sorted2 = np.argsort(case.points['points_west'][:, 2])
points_sorted2 = case.points['points_west'][:, 2][idx_sorted2]/750.
for name in case.filenames:
if name == 'k':
xlabel = r'$\langle k \rangle$ [m$^2$/s$^2$]'
elif name == 'U':
xlabel = r'$\langle U \rangle$ [m/s]'
elif name == 'T':
xlabel = r'$\langle T \rangle$ [K]'
elif name == 'epsilon':
xlabel = r'$\langle \epsilon_{\mathrm{SGS}} \rangle$ [m$^2$/s$^3$]'
data_sorted = case.data_mean[name + '_south'][idx_sorted]
data_sorted2 = case.data_mean[name + '_west'][idx_sorted2]
xlim = (min(min(data_sorted.ravel()), min(data_sorted2.ravel())) - 0.1*min(data_sorted.ravel()),
max(max(data_sorted.ravel()), max(data_sorted2.ravel())) + 0.1*max(data_sorted.ravel()))
# Use magnitude
if name == 'U':
data_sorted = np.sqrt(data_sorted[:, 0]**2 + data_sorted[:, 1]**2 + data_sorted[:, 2]**2)
data_sorted2 = np.sqrt(data_sorted2[:, 0]**2 + data_sorted2[:, 1]**2 + data_sorted2[:, 2]**2)
listx = (data_sorted, data_sorted2)
listy = (points_sorted, points_sorted2)
myplot = Plot2D(listx, listy, plot_type='infer',
show=False, save=True, name=name, xlabel=xlabel, ylabel=r'$\frac{z}{z_i}$ [-]',
figdir=case.avg_folder_path, figwidth='1/3', xlim=xlim)
myplot.initializeFigure()
myplot.plotFigure(linelabel=('South', 'West'))
myplot.axes.fill_between(xlim, 27/750., 153/750., alpha=0.25)
myplot.finalizeFigure()
kappa = .4
uref = 8.
zref = 90.
z0 = .2
cmu = 0.03
ustar = kappa*uref/np.log((zref + z0)/z0)
u = ustar/kappa*np.log((case.points['points_south'][:, 2][idx_sorted] + z0)/z0)
k = ustar**2/np.sqrt(cmu)
epsilon = ustar**3/kappa/(case.points['points_south'][:, 2][idx_sorted] + z0)
ulim = (min(u) - 0.1*min(u),
max(u) + 0.1*max(u))
epslim = (min(epsilon) - 0.1*min(epsilon),
max(epsilon) + 0.1*max(epsilon))
myplot = Plot2D(u, points_sorted, plot_type='infer',
show=False, save=True, name='U_atmBC', xlabel='U [m/s]', ylabel="z/zi [-]", # r'$\frac{z}{z_i}$ [-]',
figdir=case.avg_folder_path, figwidth='1/3', xlim=ulim)
myplot.initializeFigure()
myplot.plotFigure() # linelabel=('South', 'West'))
myplot.axes.fill_between(xlim, 27/750., 153/750., alpha=0.25)
myplot.finalizeFigure()
myplot = Plot2D(epsilon, points_sorted, plot_type='infer',
show=False, save=True, name='epsilon_atmBC', xlabel='\epsilon [m2/s3]', ylabel=r'$\frac{z}{z_i}$ [-]',
figdir=case.avg_folder_path, figwidth='1/3', xlim=epslim)
myplot.initializeFigure()
myplot.plotFigure()
myplot.axes.fill_between(xlim, 27/750., 153/750., alpha=0.25)
myplot.finalizeFigure()
# kSouth = case.data['k_south']
# uSouth = case.data['U_south']
# casename = 'ALM_N_H_ParTurb'
# filenames = 'Cd'
# startTime1 = 20000
# stopTime1 = 22000
# frameSkip = 182#28
#
# turb = TurbineOutputs(casename = casename, casedir = '/media/yluan/Toshiba External Drive')
#
# turb.readPropertyData(filenames = filenames)
#
# turb.calculatePropertyMean(starttime = startTime1, stoptime = stopTime1)
#
# listX1 = (turb.times_selected[::frameSkip],)*3
# listY1 = (turb.data[filenames + '_Turb0_Bld0_mean'][::frameSkip],
# turb.data[filenames + '_Turb0_Bld1_mean'][::frameSkip],
# turb.data[filenames + '_Turb0_Bld2_mean'][::frameSkip])
# listY2 = (turb.data[filenames + '_Turb1_Bld0_mean'][::frameSkip],
# turb.data[filenames + '_Turb1_Bld1_mean'][::frameSkip],
# turb.data[filenames + '_Turb1_Bld2_mean'][::frameSkip])
#
# startTime2 = 21000
# stopTime2 = 22000
# turb.calculatePropertyMean(starttime = startTime2, stoptime = stopTime2)
#
# listX2 = (turb.times_selected[::frameSkip],)*3
# listY3 = (turb.data[filenames + '_Turb0_Bld0_mean'][::frameSkip],
# turb.data[filenames + '_Turb0_Bld1_mean'][::frameSkip],
# turb.data[filenames + '_Turb0_Bld2_mean'][::frameSkip])
# listY4 = (turb.data[filenames + '_Turb1_Bld0_mean'][::frameSkip],
# turb.data[filenames + '_Turb1_Bld1_mean'][::frameSkip],
# turb.data[filenames + '_Turb1_Bld2_mean'][::frameSkip])
#
# figDir = '/media/yluan/Toshiba External Drive/' + casename + '/turbineOutput/Result'
#
# # Custom colors
# colors, _ = Plot2D.setColors()
#
# plotsLabel = ('Blade 1', 'Blade 2', 'Blade 3')
# transparentBg = False
# xLim1 = (startTime1, stopTime1)
# yLim = (min(np.min(listY1), np.min(listY2), np.min(listY3), np.min(listY4)), max(np.max(listY1), np.max(listY2), np.max(listY3), np.max(listY4)))
#
# show = False
#
# clPlot = Plot2D(listY1, listX1, save = True, name = 'Turb0_' + filenames + '1', xLabel = 'Time [s]', yLabel = r'$C_d$ [-]', figDir = figDir, xLim = yLim, yLim = xLim1, figWidth = 'half', figHeightMultiplier = 2., show = show, colors = colors[:3][:], gradientBg = True, gradientBgRange = (startTime1, 21800), gradientBgDir = 'y')
# clPlot.initializeFigure()
#
# clPlot.plotFigure(plotsLabel = plotsLabel)
#
# clPlot.finalizeFigure(transparentBg = transparentBg)
#
# # clPlot2 = Plot2D(listX1, listY2, save = True, name = 'Turb1_' + filenames + '1', xLabel = 'Time [s]', yLabel = r'$C_d$ [-]', figDir = figDir, xLim = xLim1, yLim = yLim, figWidth = 'full', show = show, colors = colors[3:6][:], gradientBg = True, gradientBgRange = (startTime1, 21800))
# # clPlot2.initializeFigure()
# # clPlot2.plotFigure(plotsLabel = plotsLabel)
# # clPlot2.finalizeFigure(transparentBg = transparentBg)
# #
# #
# #
# #
# #
# #
# # xLim2 = (startTime2, stopTime2)
# #
# # show = True
# #
# # clPlot = Plot2D(listX2, listY3, save = True, name = 'Turb0_' + filenames + '2', xLabel = 'Time [s]', yLabel = r'$C_d$ [-]',
# # figDir = figDir, xLim = xLim2, yLim = yLim, figWidth = 'full', show = show, colors = colors[:3][:], gradientBg = True, gradientBgRange = (startTime1, 21800))
# # clPlot.initializeFigure()
# #
# # clPlot.plotFigure(plotsLabel = plotsLabel)
# #
# # clPlot.finalizeFigure(transparentBg = transparentBg)
# #
# # clPlot2 = Plot2D(listX2, listY4, save = True, name = 'Turb1_' + filenames + '2', xLabel = 'Time [s]',
# # yLabel = r'$C_d$ [-]', figDir = figDir, xLim = xLim2, yLim = yLim, figWidth = 'full', show = show, colors = colors[3:6][:], gradientBg = True, gradientBgRange = (startTime1, 21800))
# # clPlot2.initializeFigure()
# # clPlot2.plotFigure(plotsLabel | |
<reponame>minsuu/vqa_mcb<gh_stars>100-1000
import caffe
import numpy as np
import random
import os
import sys
import re
import json
import spacy
from operator import mul
GLOVE_EMBEDDING_SIZE = 300
CURRENT_DATA_SHAPE = None
SPATIAL_COORD = None
GLOVE = None
class LoadVQADataProvider:
def __init__(self, ques_file_path, img_file_pre, vdict_path, adict_path, \
batchsize=128, max_length=15, n_ans_vocabulary=1000, mode='train', data_shape=(2048)):
self.batchsize = batchsize
self.d_vocabulary = None
self.batch_index = None
self.batch_len = None
self.rev_adict = None
self.max_length = max_length
self.n_ans_vocabulary = n_ans_vocabulary
self.mode = mode
self.data_shape = data_shape
assert self.mode == 'test'
# spatial coordinates
normalized_coords = np.linspace(0, 2, num=14, endpoint=True, dtype=np.float32) / 200
self.x_coords = np.tile(normalized_coords, (14, 1)).reshape(1, 14, 14)
normalized_coords = normalized_coords.reshape((14, 1))
self.y_coords = np.tile(normalized_coords, (1, 14)).reshape(1, 14, 14)
self.coords = np.concatenate([self.x_coords, self.y_coords])
self.quesFile = ques_file_path
self.img_file_pre = img_file_pre
# load ques file
with open(self.quesFile,'r') as f:
print 'reading : ', self.quesFile
qdata = json.load(f)
qdic = {}
for q in qdata['questions']:
qdic[q['question_id']] = { 'qstr':q['question'], 'iid':q['image_id']}
self.qdic = qdic
# load vocabulary
with open(vdict_path,'r') as f:
vdict = json.load(f)
with open(adict_path,'r') as f:
adict = json.load(f)
self.n_vocabulary, self.vdict = len(vdict), vdict
self.n_ans_vocabulary, self.adict = len(adict), adict
self.nlp = spacy.load('en', vectors='en_glove_cc_300_1m_vectors')
self.glove_dict = {} # word -> glove vector
def getQuesIds(self):
return self.qdic.keys()
def getImgId(self,qid):
return self.qdic[qid]['iid']
def getQuesStr(self,qid):
return self.qdic[qid]['qstr']
def getAnsObj(self,qid):
if self.mode == 'test-dev' or self.mode == 'test':
return -1
return self.adic[qid]
def seq_to_list(self, s):
t_str = s.lower()
for i in [r'\?',r'\!',r'\'',r'\"',r'\$',r'\:',r'\@',r'\(',r'\)',r'\,',r'\.',r'\;']:
t_str = re.sub( i, '', t_str)
for i in [r'\-',r'\/']:
t_str = re.sub( i, ' ', t_str)
q_list = re.sub(r'\?','',t_str.lower()).split(' ')
q_list = filter(lambda x: len(x) > 0, q_list)
return q_list
def extract_answer(self,answer_obj):
""" Return the most popular answer in string."""
if self.mode == 'test-dev' or self.mode == 'test':
return -1
answer_list = [ answer_obj[i]['answer'] for i in xrange(10)]
dic = {}
for ans in answer_list:
if dic.has_key(ans):
dic[ans] +=1
else:
dic[ans] = 1
max_key = max((v,k) for (k,v) in dic.items())[1]
return max_key
def extract_answer_prob(self,answer_obj):
""" Return the most popular answer in string."""
if self.mode == 'test-dev' or self.mode == 'test':
return -1
answer_list = [ ans['answer'] for ans in answer_obj]
prob_answer_list = []
for ans in answer_list:
if self.adict.has_key(ans):
prob_answer_list.append(ans)
if len(prob_answer_list) == 0:
if self.mode == 'val' or self.mode == 'test-dev' or self.mode == 'test':
return 'hoge'
else:
raise Exception("This should not happen.")
else:
return random.choice(prob_answer_list)
def create_answer_vocabulary_dict(self, genome=False):
n_ans_vocabulary=self.n_ans_vocabulary
qid_list = self.getQuesIds()
adict = {'':0}
nadict = {'':1000000}
vid = 1
for qid in qid_list:
if genome and qid[0] == 'g':
continue
answer_obj = self.getAnsObj(qid)
answer_list = [ans['answer'] for ans in answer_obj]
for q_ans in answer_list:
# create dict
if adict.has_key(q_ans):
nadict[q_ans] += 1
else:
nadict[q_ans] = 1
adict[q_ans] = vid
vid +=1
# debug
klist = []
for k,v in sorted(nadict.items()):
klist.append((k,v))
nalist = []
for k,v in sorted(nadict.items(), key=lambda x:x[1]):
nalist.append((k,v))
alist = []
for k,v in sorted(adict.items(), key=lambda x:x[1]):
alist.append((k,v))
# remove words that appear less than once
n_del_ans = 0
n_valid_ans = 0
adict_nid = {}
for i, w in enumerate(nalist[:-n_ans_vocabulary]):
del adict[w[0]]
n_del_ans += w[1]
for i, w in enumerate(nalist[-n_ans_vocabulary:]):
n_valid_ans += w[1]
adict_nid[w[0]] = i
print 'Valid answers are : ', n_valid_ans
print 'Invalid answers are : ', n_del_ans
return n_ans_vocabulary, adict_nid
def create_vocabulary_dict(self):
#qid_list = self.vqa.getQuesIds()
qid_list = self.getQuesIds()
vdict = {'':0}
ndict = {'':0}
vid = 1
for qid in qid_list:
# sequence to list
q_str = self.getQuesStr(qid)
q_list = self.seq_to_list(q_str)
# create dict
for w in q_list:
if vdict.has_key(w):
ndict[w] += 1
else:
ndict[w] = 1
vdict[w] = vid
vid +=1
# debug
klist = []
for k,v in sorted(ndict.items()):
klist.append((k,v))
nlist = []
for k,v in sorted(ndict.items(), key=lambda x:x[1]):
nlist.append((k,v))
vlist = []
for k,v in sorted(vdict.items(), key=lambda x:x[1]):
vlist.append((k,v))
n_vocabulary = len(vlist)
#from IPython import embed; embed(); sys.exit()
return n_vocabulary, vdict
def qlist_to_vec(self, max_length, q_list):
"""
Converts a list of words into a format suitable for the embedding layer.
Arguments:
max_length -- the maximum length of a question sequence
q_list -- a list of words which are the tokens in the question
Returns:
qvec -- A max_length length vector containing one-hot indices for each word
cvec -- A max_length length sequence continuation indicator vector
glove_matrix -- A max_length x GLOVE_EMBEDDING_SIZE matrix containing the glove embedding for
each word
"""
qvec = np.zeros(max_length)
cvec = np.zeros(max_length)
glove_matrix = np.zeros(max_length * GLOVE_EMBEDDING_SIZE).reshape(max_length, GLOVE_EMBEDDING_SIZE)
for i in xrange(max_length):
if i < max_length - len(q_list):
cvec[i] = 0
else:
w = q_list[i-(max_length-len(q_list))]
if w not in self.glove_dict:
self.glove_dict[w] = self.nlp(u'%s' % w).vector
glove_matrix[i] = self.glove_dict[w]
# is the word in the vocabulary?
if self.vdict.has_key(w) is False:
w = ''
qvec[i] = self.vdict[w]
cvec[i] = 0 if i == max_length - len(q_list) else 1
return qvec, cvec, glove_matrix
def answer_to_vec(self, ans_str):
""" Return answer id if the answer is included in vocaburary otherwise '' """
if self.mode =='test-dev' or self.mode == 'test':
return -1
if self.adict.has_key(ans_str):
ans = self.adict[ans_str]
else:
ans = self.adict['']
return ans
def vec_to_answer(self, ans_symbol):
""" Return answer id if the answer is included in vocaburary otherwise '' """
if self.rev_adict is None:
rev_adict = {}
for k,v in self.adict.items():
rev_adict[v] = k
self.rev_adict = rev_adict
return self.rev_adict[ans_symbol]
def create_batch(self,qid_list):
qvec = (np.zeros(self.batchsize*self.max_length)).reshape(self.batchsize,self.max_length)
cvec = (np.zeros(self.batchsize*self.max_length)).reshape(self.batchsize,self.max_length)
ivec = (np.zeros(self.batchsize*reduce(mul, self.data_shape))).reshape(self.batchsize,*self.data_shape)
avec = (np.zeros(self.batchsize)).reshape(self.batchsize)
glove_matrix = np.zeros(self.batchsize * self.max_length * GLOVE_EMBEDDING_SIZE).reshape(\
self.batchsize, self.max_length, GLOVE_EMBEDDING_SIZE)
for i,qid in enumerate(qid_list):
# load raw question information
q_str = self.getQuesStr(qid)
q_ans = self.getAnsObj(qid)
q_iid = self.getImgId(qid)
# convert question to vec
q_list = self.seq_to_list(q_str)
t_qvec, t_cvec, t_glove_matrix = self.qlist_to_vec(self.max_length, q_list)
# convert answer to vec
try:
if type(qid) == int:
t_ivec = np.load(self.img_file_pre + str(q_iid).zfill(12) + '.jpg.npz')['x']
t_ivec = ( t_ivec / np.sqrt((t_ivec**2).sum()) )
elif qid[0] == 't':
t_ivec = np.load(self.img_file_pre_t + str(q_iid).zfill(12) + '.jpg.npz')['x']
t_ivec = ( t_ivec / np.sqrt((t_ivec**2).sum()) )
elif qid[0] =='v':
t_ivec = np.load(self.img_file_pre_v + str(q_iid).zfill(12) + '.jpg.npz')['x']
t_ivec = ( t_ivec / np.sqrt((t_ivec**2).sum()) )
elif qid[0] == 'g':
t_ivec = np.load(self.img_file_pre_g + str(q_iid) + '.jpg.npz')['x']
t_ivec = ( t_ivec / np.sqrt((t_ivec**2).sum()) )
else:
raise Exception('Error occured here')
t_ivec = np.load(self.img_file_pre + str(q_iid).zfill(12) + '.jpg.npz')['x']
t_ivec = ( t_ivec / np.sqrt((t_ivec**2).sum()) )
if SPATIAL_COORD:
t_ivec = np.concatenate([t_ivec, self.coords.copy()])
except:
t_ivec = 0.
print 'data not found for qid : ', q_iid, self.mode
# convert answer to vec
if self.mode == 'val' or self.mode == 'test-dev' or self.mode == 'test':
q_ans_str = self.extract_answer(q_ans)
else:
q_ans_str = self.extract_answer_prob(q_ans)
t_avec = self.answer_to_vec(q_ans_str)
qvec[i,...] = t_qvec
cvec[i,...] = t_cvec
ivec[i,...] = t_ivec
avec[i,...] = t_avec
glove_matrix[i,...] = t_glove_matrix
return qvec, cvec, ivec, avec, glove_matrix
def get_batch_vec(self):
if self.batch_len is None:
#qid_list = self.vqa.getQuesIds()
self.n_skipped = 0
qid_list = self.getQuesIds()
# random.shuffle(qid_list)
self.qid_list = qid_list
self.batch_len = len(qid_list)
self.batch_index = 0
self.epoch_counter = 0
def has_at_least_one_valid_answer(t_qid):
#answer_obj = self.vqa.qa[t_qid]['answers']
answer_obj = self.getAnsObj(t_qid)
answer_list = [ans['answer'] for ans in answer_obj]
for ans in answer_list:
if self.adict.has_key(ans):
return True
counter = 0
t_qid_list = []
t_iid_list = []
while counter < self.batchsize:
# get qid
t_qid = self.qid_list[self.batch_index]
# get answer
#t_ans = self.extract_answer(self.vqa.qa[t_qid]['answers'])
# get image id
#t_ann = self.vqa.loadQA([t_qid])[0]
#t_iid = t_ann['image_id']
t_iid = self.getImgId(t_qid)
if self.mode == 'val' or self.mode == 'test-dev' or self.mode == 'test':
t_qid_list.append(t_qid)
t_iid_list.append(t_iid)
counter += 1
elif has_at_least_one_valid_answer(t_qid):
t_qid_list.append(t_qid)
t_iid_list.append(t_iid)
counter += 1
else:
self.n_skipped += 1
if self.batch_index < self.batch_len-1:
self.batch_index += 1
else:
self.epoch_counter += 1
#qid_list = self.vqa.getQuesIds()
qid_list = self.getQuesIds()
# random.shuffle(qid_list)
self.qid_list = qid_list
self.batch_index = 0
print("%d questions were skipped in a single epoch" % self.n_skipped)
self.n_skipped = 0
t_batch = self.create_batch(t_qid_list)
return t_batch + (t_qid_list, t_iid_list, self.epoch_counter)
class VQADataProviderLayer(caffe.Layer):
"""
Provide input data for VQA.
"""
def setup(self, bottom, top):
self.batchsize = json.loads(self.param_str)['batchsize']
names = ['data','cont','feature','label']
if GLOVE:
names.append('glove')
self.top_names = names
top[0].reshape(15,self.batchsize)
top[1].reshape(15,self.batchsize)
top[2].reshape(self.batchsize, *CURRENT_DATA_SHAPE)
top[3].reshape(self.batchsize)
if GLOVE:
top[4].reshape(15,self.batchsize,GLOVE_EMBEDDING_SIZE)
self.mode = json.loads(self.param_str)['mode']
if self.mode == 'val' or self.mode == 'test-dev' or self.mode == 'test':
pass
else:
raise NotImplementedError
def reshape(self, bottom, top):
pass
def forward(self, bottom, top):
if self.mode == | |
# -*- coding: utf8 -*-
# engine
# helper class for cuatro
# <NAME> 2021
import numpy as np
import random
import copy
import time
version = 'engine.v.1.0.0'
class State:
"""instance attributes:
size: int: size of one side of the board (defines a cube that holds the game)
win: int: how many items in a row constitute a win of the game
state: numpy array or shape (size, sizel size): 3D matrix containing 0 if position is empty,
1 for player one items and 2 for player 2 items
winner: int: winner of the game (0 until one player has won, then 1 or 2)
winning_diag: list of 'win' tuples of three ints (coordinates of the positions that constitute the win of the game
nex_turn: int (1 or 2) player that will play next
previous_turn: int (1 or 2) player that has played last
play: numpy array of shape (size, size) containint ints. It contains how many plays have been performed on each
of the 2d coordinates in the board. It marks the next third coordinate for each 2d coordinate play
pl1: numpy array of shape (size, size, size) containing 1 if the position is occupied by an item of player 1 and
0 otherwise
pl2: numpy array of shape (size, size, size) containing 1 if the position is occupied by an item of player 2 and
0 otherwise
empty: numpy array of shape (size, size, size) containing 1 if the position is empty and 0 otherwise
last_play: tuple of two ints containing the last play performed
last_3dplay: tuple of three ints containing the last 3dplay performed
game_over: bool: True if all the positions of the 3d board are occupied or one of the players have won and False
otherwise
diags: dictionary of lists of lists of 'win' tuples containing 3 ints. The key of the dictionary is a tuple with
3 ints (coordinates of a 3dpos). the values are lists of diags (a diag is a list containing 'win'
coordinates, which are tuples of three ints). All diags in a list contain the coordinate of the key.
history: list of dicts. Each dict contains the history of a turn. The dictionary fields are 'turn', 'play',
'play3d', 'offensive_score', 'defensive_score' and 'offensive_diag'
valid_pos: list of tuples with 2 ints: list of all valid plays at this time
valid_3dpos: list of tuples with 3 ints: list of all valid 3dplays at this time
"""
def __init__(self, size=5, win=4, next_turn=1):
"""this method initiallizes the instance
size: int: size of the board"""
random.seed(time.time())
self.size = size
self.win = win
self.state = np.zeros((self.size, self.size, self.size)).astype('int8')
self.winner = 0
self.winning_diag = None
self.next_turn = next_turn
self.previous_turn = 3 - next_turn
self.play = np.array([[0 for _ in range(self.size)] for _ in range(self.size)])
self.pl1 = None
self.pl2 = None
self.get_pl()
self.last_play = None # last play done
self.last_3dplay = None
self.valid_pos = None
self.valid_3dpos = None
self.get_valid_pos()
self.game_over = False # whether game is over or not
self.history = []
# get the diagonals of size self.win that cross very cell in the 3d board
self.diags = dict()
diags = []
for i in range(self.size):
for j in range(self.size):
for k in range(self.size):
self.diags[(i, j, k)] = []
diags += self.get_diags(i, j, k)
for i in range(self.size):
for j in range(self.size):
for k in range(self.size):
for diag in diags:
if (i, j, k) in diag:
self.diags[(i, j, k)].append(diag)
#for key in self.diags.keys(): # todo eliminate the reformatted version after changes
#self.old_diags[key] = [[tuple(diag[i][j] for i in range(self.win)) for j in range(3)] for diag in self.diags[key]]
def get_valid_pos(self):
"""updates the valid_pos and valid_3dpos instance attributes
"""
self.valid_pos = []
self.valid_3dpos = []
for i in range(self.size):
for j in range(self.size):
if self.play[i][j] < self.size:
self.valid_pos.append((i, j))
self.valid_3dpos.append((i, j, self.play[i][j]))
def get_diags(self, i, j, k):
"""creates all the diagonals of self.win me"""
diags = []
diags.append([(i + a, j, k) if i + a < self.size else None for a in range(self.win)])
diags.append([(i, j + a, k) if j + a < self.size else None for a in range(self.win)])
diags.append([(i, j, k + a) if k + a < self.size else None for a in range(self.win)])
diags.append([(i + a, j + a, k) if i + a < self.size and j + a < self.size else None for a in range(self.win)])
diags.append([(i + a, j - a, k) if i + a < self.size and j - a > -1 else None for a in range(self.win)])
diags.append([(i + a, j, k + a) if i + a < self.size and k + a < self.size else None for a in range(self.win)])
diags.append([(i + a, j, k - a) if i + a < self.size and k - a > -1 else None for a in range(self.win)])
diags.append([(i, j + a, k + a) if j + a < self.size and k + a < self.size else None for a in range(self.win)])
diags.append([(i, j + a, k - a) if j + a < self.size and k - a > -1 else None for a in range(self.win)])
diags.append([(i + a, j + a, k + a) if i + a < self.size and j + a < self.size and k + a < self.size else None for a in range(self.win)])
diags.append([(i - a, j + a, k + a) if i - a > -1 and j + a < self.size and k + a < self.size else None for a in range(self.win)])
diags.append([(i + a, j - a, k + a) if i + a < self.size and j - a > -1 and k + a < self.size else None for a in range(self.win)])
diags.append([(i - a, j - a, k + a) if i - a > -1 and j - a > -1 and k + a < self.size else None for a in range(self.win)])
diags = [diag for diag in diags if None not in diag]
return diags
def get_score(self, pos3d):
"""computes the score of playing in position pos for both the next_turn (offensive score) and the
previous_turn (defensive score) and returns scores, best diag etc (#todo complete this)
pos3d: tuple of three ints: (coordinates of the play)
returns: score: tuple of:
offensive_score: float
num_offensive_score: int
defensive_score: float
num_defensive_score: int
best_diag: list of tuples containing 3 ints"""
own_score = []
other_score = []
best_diag = None
for diag in self.diags[pos3d]:
own_score.append(0.)
other_score.append(0.)
for item in diag:
if item in self.valid_3dpos: # the position of item is reachable and it is empty
own_score[-1] += 0.5
other_score[-1] += 0.5
else: # the position of item is not reachable but may or may not be empty
if self.next_turn == 1:
if self.pl1[item]: # the position of item is occupied by the current turn
own_score[-1] += 1 # the position of item is occupied by the current turn
other_score[-1] -= self.win
elif self.pl2[item]: # the position of item is occupied by the def
own_score[-1] -= self.win
other_score[-1] += 1
else: # the position of item is not occupied (and it is not reachable either)
own_score[-1] += 0.1
other_score[-1] += 0.1
if self.next_turn == 2:
if self.pl2[item]: # the position of item is occupied by the current turn
own_score[-1] += 1 # the position of item is occupied by the current turn
other_score[-1] -= self.win
elif self.pl1[item]: # the position of item is occupied by the def
own_score[-1] -= self.win
other_score[-1] += 1
else: # the position of item is not occupied (and it is not reachable either)
own_score[-1] += 0.1
other_score[-1] += 0.1
if own_score[-1] == max(own_score):
best_diag = [pos for pos in diag] # make a copy of diag just in case
offensive_score = max(own_score)
num_offensive_score = own_score.count(offensive_score)
defensive_score = max(other_score)
num_defensive_score = other_score.count(defensive_score)
return offensive_score, num_offensive_score, defensive_score, num_defensive_score, best_diag
def get_best_score_play(self):
"""gets the play for which the | |
'status': 'accepted', 'year': '2020'})
self.assertEqual(response.status_code, 200)
data = response.json()['data']
lots = data['lots']
self.assertEqual(len(lots), 1)
def test_simple_template_import_missing_data_cannot_validate(self):
# as producer
# upload lines that cannot be validated
jsoned = self.upload_file('carbure_template_simple_missing_data_cannot_validate.xlsx', self.test_producer)
# get number of lots in excel file
total_lots = jsoned['data']['total']
nb_lots = jsoned['data']['loaded']
# make sure all lines were loaded minus the one missing biocarburant_code
self.assertEqual(nb_lots, total_lots - 1)
# make sure they were saved successfully
lots = LotV2.objects.filter(added_by_user=self.user1)
self.assertEqual(lots.count(), nb_lots)
txs = LotTransaction.objects.filter(lot__in=lots)
self.assertEqual(txs.count(), nb_lots)
# validate-all
txs = LotTransaction.objects.filter(lot__status='Draft')
response = self.client.post(reverse('api-v3-validate-lot'), {'entity_id': self.test_producer.id, 'tx_ids': [tx.id for tx in txs]})
self.assertEqual(response.status_code, 200)
# get drafts
lots = LotV2.objects.filter(added_by_user=self.user1, status='Draft')
debug_transactions()
self.assertEqual(lots.count(), nb_lots) # they are still all with status draft
# get drafts via api - same result expected
response = self.client.get(reverse('api-v3-lots-get'), {'entity_id': self.test_producer.id, 'status': 'draft', 'year': '2020'})
self.assertEqual(response.status_code, 200)
data = response.json()['data']
lots = data['lots']
self.assertEqual(len(lots), nb_lots)
# make sure they all have a GenericError
for lot in lots:
errors = GenericError.objects.filter(tx=lot['id']).count()
self.assertGreater(errors, 0)
# delete-all-drafts
txs = LotTransaction.objects.filter(lot__status='Draft')
response = self.client.post(reverse('api-v3-delete-lot'), {'entity_id': self.test_producer.id, 'tx_ids': [tx.id for tx in txs]})
self.assertEqual(response.status_code, 200)
res = response.json()
self.assertEqual(res['deleted'], nb_lots)
self.assertEqual(LotV2.objects.all().count(), 0)
self.assertEqual(LotTransaction.objects.all().count(), 0)
self.assertEqual(GenericError.objects.all().count(), 0)
def test_simple_template_import_cannot_validate(self):
# as producer
# upload lines that cannot be validated
jsoned = self.upload_file('carbure_template_simple_wrong_data_cannot_validate.xlsx', self.test_producer)
# get number of lots in excel file
nb_lots = jsoned['data']['total']
# make sure all lines were loaded
self.assertEqual(nb_lots, jsoned['data']['loaded'])
# make sure they were saved successfully
lots = LotV2.objects.filter(added_by_user=self.user1)
self.assertEqual(lots.count(), nb_lots)
txs = LotTransaction.objects.filter(lot__in=lots)
self.assertEqual(txs.count(), nb_lots)
# validate-all
txs = LotTransaction.objects.filter(lot__status='Draft')
response = self.client.post(reverse('api-v3-validate-lot'), {'entity_id': self.test_producer.id, 'tx_ids': [tx.id for tx in txs]})
self.assertEqual(response.status_code, 200)
# expect (nb_lots - nb_invalid_dates) submitted and 0 valid (2 lots have a stupid date)
j = response.json()['data']
self.assertEqual(j['submitted'], nb_lots)
self.assertEqual(j['valid'], 0)
# get drafts
lots = LotV2.objects.filter(added_by_user=self.user1, status='Draft')
self.assertEqual(lots.count(), nb_lots) # they are still all with status draft
# get drafts via api - same result expected
response = self.client.get(reverse('api-v3-lots-get'), {'entity_id': self.test_producer.id, 'status': 'draft', 'year': '2020'})
self.assertEqual(response.status_code, 200)
data = response.json()['data']
lots = data['lots']
self.assertEqual(len(lots), nb_lots - 2) # 3 lots have a stupid date that won't be counted in 2020
# make sure they all have an error
for lot in lots:
errors = GenericError.objects.filter(tx=lot['id']).count()
self.assertGreater(errors, 0)
# delete-all-drafts
txs = LotTransaction.objects.filter(lot__status='Draft')
response = self.client.post(reverse('api-v3-delete-lot'), {'entity_id': self.test_producer.id, 'tx_ids': [tx.id for tx in txs]})
self.assertEqual(response.status_code, 200)
res = response.json()
self.assertEqual(res['deleted'], nb_lots)
def test_duplicates_producer(self):
# cleanup db
LotTransaction.objects.all().delete()
LotV2.objects.all().delete()
# as producer, create lot
dae = 'TEST2020FR00923-DUP-32094'
lot = {
'production_site': self.production_site.name,
'biocarburant_code': 'ETH',
'matiere_premiere_code': 'BLE',
'volume': 15000,
'pays_origine_code': 'FR',
'supplier_certificate': 'ISCC-TOTO-02',
'eec': 1,
'ep': 5,
'etd': 12,
'dae': dae,
'delivery_date': '2020-12-31',
'client': self.test_operator.name,
'delivery_site': '001',
'entity_id': self.test_producer.id,
}
response = self.client.post(reverse('api-v3-add-lot'), lot)
self.assertEqual(response.status_code, 200)
# validate
tx = LotTransaction.objects.get(dae=dae)
response = self.client.post(reverse('api-v3-validate-lot'), {'tx_ids': [tx.id], 'entity_id': self.test_producer.id})
self.assertEqual(response.status_code, 200)
# create same lot
response = self.client.post(reverse('api-v3-add-lot'), lot)
self.assertEqual(response.status_code, 200)
# lot is flagged as duplicate
tx = LotTransaction.objects.get(dae=dae, lot__status='Draft')
self.assertEqual(tx.potential_duplicate, True)
# as operator, create same lot
lot['production_site'] = ''
lot['supplier_certificate'] = 'ISCC-TOTO-02'
lot['production_site_commissioning_date'] = '11/12/1998'
lot['producer_name'] = self.test_producer.name
lot['entity_id'] = self.test_operator.id
response = self.client.post(reverse('api-v3-add-lot'), lot)
self.assertEqual(response.status_code, 200)
tx = LotTransaction.objects.get(dae=dae, lot__added_by=self.test_operator)
self.assertEqual(tx.potential_duplicate, True)
def test_dates_format(self):
# cleanup db
LotTransaction.objects.all().delete()
LotV2.objects.all().delete()
# as producer, create lot
dae = 'TEST2020FR00923-DUP-32094'
lot = {
'production_site': "unknown production site",
'production_site_commissioning_date': '2001-12-01',
'biocarburant_code': 'ETH',
'matiere_premiere_code': 'BLE',
'volume': 15000,
'pays_origine_code': 'FR',
'eec': 1,
'ep': 5,
'etd': 12,
'dae': dae,
'delivery_date': '2020-12-01',
'client': self.test_operator.name,
'delivery_site': '001',
'entity_id': self.test_producer.id,
}
response = self.client.post(reverse('api-v3-add-lot'), lot)
self.assertEqual(response.status_code, 200)
# check
tx = LotTransaction.objects.get(dae=dae)
dt1 = datetime.date(2001, 12, 1)
dt2 = datetime.date(2020, 12, 1)
self.assertEqual(tx.lot.unknown_production_site_com_date, dt1)
self.assertEqual(tx.delivery_date, dt2)
LotTransaction.objects.all().delete()
LotV2.objects.all().delete()
# as producer, create lot
dae = 'TEST2020FR00923-DUP-32094'
lot = {
'production_site': "unknown production site",
'production_site_commissioning_date': '01/12/2001',
'biocarburant_code': 'ETH',
'matiere_premiere_code': 'BLE',
'volume': 15000,
'pays_origine_code': 'FR',
'eec': 1,
'ep': 5,
'etd': 12,
'dae': dae,
'delivery_date': '01/12/2020',
'client': self.test_operator.name,
'delivery_site': '001',
'entity_id': self.test_producer.id,
}
response = self.client.post(reverse('api-v3-add-lot'), lot)
self.assertEqual(response.status_code, 200)
# check
tx = LotTransaction.objects.get(dae=dae)
dt1 = datetime.date(2001, 12, 1)
dt2 = datetime.date(2020, 12, 1)
self.assertEqual(tx.lot.unknown_production_site_com_date, dt1)
self.assertEqual(tx.delivery_date, dt2)
def create_lot(self, **kwargs):
producer = self.test_producer
production_site = self.production_site
lot = {
'supplier_certificate': 'ISCC-TOTO-02',
'biocarburant_code': 'ETH',
'matiere_premiere_code': 'BLE',
'producer': producer.name,
'production_site': production_site.name,
'volume': 15000,
'pays_origine_code': 'FR',
'eec': 1,
'ep': 5,
'etd': 12,
'dae': get_random_dae(),
'delivery_date': '2020-12-31',
'client': self.test_operator.name,
'delivery_site': '001',
'entity_id': self.test_producer.id,
}
lot.update(kwargs)
response = self.client.post(reverse('api-v3-add-lot'), lot)
self.assertEqual(response.status_code, 200)
data = response.json()['data']
tx_id = data['id']
lot_id = data['lot']['id']
return tx_id, lot_id
def test_production_site_strip(self):
psitename = ' ' + self.production_site.name + ' '
tx_id, lot_id = self.create_lot(production_site=psitename)
lot = LotV2.objects.get(id=lot_id)
self.assertEqual(lot.production_site_is_in_carbure, True)
self.assertEqual(lot.carbure_production_site.name, self.production_site.name)
def test_download_templates(self):
response = self.client.get(reverse('api-v3-template-simple'), {'entity_id': self.test_producer.id})
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('api-v3-template-advanced'), {'entity_id': self.test_producer.id})
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('api-v3-template-blend'), {'entity_id': self.test_operator.id})
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('api-v3-template-trader'), {'entity_id': self.entity3.id})
self.assertEqual(response.status_code, 200)
def test_real_behaviour(self):
# download simple template
response = self.client.get(reverse('api-v3-template-simple'), {'entity_id': self.test_producer.id})
self.assertEqual(response.status_code, 200)
filecontent = response.content
# upload simple template
f = SimpleUploadedFile("template.xslx", filecontent)
response = self.client.post(reverse('api-v3-upload'), {'entity_id': self.test_producer.id, 'file': f})
self.assertEqual(response.status_code, 200)
self.assertEqual(LotV2.objects.all().count(), 10)
self.assertEqual(LotTransaction.objects.all().count(), 10)
# download advanced template
response = self.client.get(reverse('api-v3-template-advanced'), {'entity_id': self.test_producer.id})
self.assertEqual(response.status_code, 200)
filecontent = response.content
# upload advanced template
f = SimpleUploadedFile("templateadvanced.xslx", filecontent)
response = self.client.post(reverse('api-v3-upload'), {'entity_id': self.test_producer.id, 'file': f})
self.assertEqual(LotV2.objects.all().count(), 20)
self.assertEqual(LotTransaction.objects.all().count(), 20)
def test_client_case_sensitiveness(self):
# as producer / trader
dae = 'TEST2020FR00923-094-32094'
lot = {
'production_site': self.production_site.name,
'production_site_commissioning_date': '01/12/2002',
'supplier_reference': 'PRODSITEREFERENCE',
'biocarburant_code': 'ETH',
'matiere_premiere_code': 'BLE',
'volume': 15000,
'pays_origine_code': 'FR',
'eec': 1.5,
'ep': 5,
'etd': 12,
'dae': dae,
'delivery_date': '2020-12-31',
'client': self.test_operator.name.lower(),
'delivery_site': '1',
'entity_id': self.test_producer.id,
}
# add manual lot
response = self.client.post(reverse('api-v3-add-lot'), lot)
self.assertEqual(response.status_code, 200)
tx = LotTransaction.objects.get(dae=dae)
response = self.client.get(reverse('api-v3-lots-get-details'), {'entity_id': self.test_producer.id, 'tx_id': tx.id})
self.assertEqual(response.status_code, 200)
data = response.json()['data']
self.assertEqual(data['transaction']['client_is_in_carbure'], True)
def test_double_count_certificates_expiration(self):
# create 2 double count certificates. one valid, one expired
today = datetime.date.today()
vfrom = today - datetime.timedelta(days=365)
vuntil = today
DoubleCountingRegistration.objects.update_or_create(certificate_id="DC_CERT_01", certificate_holder="Super testeur",
defaults={'registered_address':"blablabla", 'valid_from': vfrom, 'valid_until': today})
DoubleCountingRegistration.objects.update_or_create(certificate_id="DC_CERT_02", certificate_holder="Super testeur",
defaults={'registered_address':"blablabla", 'valid_from': vfrom, 'valid_until': vuntil - datetime.timedelta(days=7)})
# upload lot using first
self.production_site.dc_reference = "DC_CERT_01"
self.production_site.save()
tx_id, lot_id = self.create_lot(matiere_premiere_code="MARC_DE_RAISIN", biocarburant_code="ETH", delivery_date=today.strftime("%d/%m/%Y"))
self.assertEqual(GenericError.objects.filter(error="UNKNOWN_DOUBLE_COUNTING_CERTIFICATE").count(), 0)
self.assertEqual(GenericError.objects.filter(error="EXPIRED_DOUBLE_COUNTING_CERTIFICATE").count(), 0)
# upload lot using second
self.production_site.dc_reference = "DC_CERT_02"
self.production_site.save()
tx_id, lot_id = self.create_lot(matiere_premiere_code="MARC_DE_RAISIN", biocarburant_code="ETH", delivery_date=today.strftime("%d/%m/%Y"))
self.assertEqual(GenericError.objects.filter(error="UNKNOWN_DOUBLE_COUNTING_CERTIFICATE").count(), 0)
self.assertEqual(GenericError.objects.filter(error="EXPIRED_DOUBLE_COUNTING_CERTIFICATE").count(), 1)
# upload lot using unknown cert
GenericError.objects.all().delete()
self.production_site.dc_reference = "BLIPBLOP"
self.production_site.save()
tx_id, lot_id = self.create_lot(matiere_premiere_code="MARC_DE_RAISIN", biocarburant_code="ETH", double_counting_registration="UNKNOWN_DC_CERT")
self.assertEqual(GenericError.objects.filter(error="UNKNOWN_DOUBLE_COUNTING_CERTIFICATE").count(), 1)
self.assertEqual(GenericError.objects.filter(error="EXPIRED_DOUBLE_COUNTING_CERTIFICATE").count(), 0)
print("### TEST DC OPERATOR ###")
# same test, but as an operator (no production site associated with account)
GenericError.objects.all().delete()
# upload lot using first
tx_id, lot_id = self.create_lot(matiere_premiere_code="MARC_DE_RAISIN", double_counting_registration="DC_CERT_01", biocarburant_code="ETH", delivery_date=today.strftime("%d/%m/%Y"), producer="UNKNOWN", production_site="UNKNOWN", entity_id=self.test_operator.id, production_site_commissioning_date='01/12/2001')
self.assertEqual(GenericError.objects.filter(error="UNKNOWN_DOUBLE_COUNTING_CERTIFICATE").count(), 0)
self.assertEqual(GenericError.objects.filter(error="EXPIRED_DOUBLE_COUNTING_CERTIFICATE").count(), 0)
# upload lot using second
GenericError.objects.all().delete()
tx_id, lot_id = self.create_lot(matiere_premiere_code="MARC_DE_RAISIN", double_counting_registration="DC_CERT_02", biocarburant_code="ETH", delivery_date=today.strftime("%d/%m/%Y"), producer="UNKNOWN", production_site="UNKNOWN", entity_id=self.test_operator.id, production_site_commissioning_date='01/12/2001')
self.assertEqual(GenericError.objects.filter(error="UNKNOWN_DOUBLE_COUNTING_CERTIFICATE").count(), 0)
self.assertEqual(GenericError.objects.filter(error="EXPIRED_DOUBLE_COUNTING_CERTIFICATE").count(), 1)
# upload lot using unknown cert
GenericError.objects.all().delete()
tx_id, lot_id = self.create_lot(matiere_premiere_code="MARC_DE_RAISIN", double_counting_registration="UNKNOWN_DC_CERT", biocarburant_code="ETH", delivery_date=today.strftime("%d/%m/%Y"), producer="UNKNOWN", production_site="UNKNOWN", entity_id=self.test_operator.id, production_site_commissioning_date='01/12/2001')
self.assertEqual(GenericError.objects.filter(error="UNKNOWN_DOUBLE_COUNTING_CERTIFICATE").count(), 1)
self.assertEqual(GenericError.objects.filter(error="EXPIRED_DOUBLE_COUNTING_CERTIFICATE").count(), 0)
class DeclarationTests(TransactionTestCase):
home = os.environ['CARBURE_HOME']
fixtures = ['{home}/web/fixtures/json/countries.json'.format(home=home),
'{home}/web/fixtures/json/feedstock.json'.format(home=home),
'{home}/web/fixtures/json/biofuels.json'.format(home=home),
'{home}/web/fixtures/json/depots.json'.format(home=home)]
def setUp(self):
user_model = get_user_model()
self.user_email = '<EMAIL>'
self.user_password = '<PASSWORD>'
self.user1 = user_model.objects.create_user(email=self.user_email, name='Le Super Testeur 1', password=self.user_password)
# a few entities
self.test_producer, _ = Entity.objects.update_or_create(name='Le Super Producteur 1', entity_type='Producteur')
self.test_operator, _ = Entity.objects.update_or_create(name='OPERATEUR1', entity_type='Opérateur')
# some rights
UserRights.objects.update_or_create(user=self.user1, entity=self.test_producer, role='RW')
UserRights.objects.update_or_create(user=self.user1, entity=self.test_operator, role='RW')
loggedin = self.client.login(username=self.user_email, password=self.user_password)
self.assertTrue(loggedin)
# pass otp verification
response = self.client.get(reverse('otp-verify'))
self.assertEqual(response.status_code, 200)
device = EmailDevice.objects.get(user=self.user1)
response = self.client.post(reverse('otp-verify'), {'otp_token': device.token})
self.assertEqual(response.status_code, 302)
def create_lot(self, **kwargs):
producer = self.test_producer
lot = {
'supplier_certificate': 'ISCC-TOTO-02',
'biocarburant_code': 'ETH',
'matiere_premiere_code': 'BLE',
'producer': producer.name,
'production_site': "usine non repertoriee",
'volume': 15000,
'pays_origine_code': 'FR',
'eec': 1,
'ep': 5,
'etd': 12,
'dae': get_random_dae(),
'delivery_date': '2020-12-31',
'client': self.test_operator.name,
'delivery_site': '001',
'entity_id': self.test_producer.id,
}
lot.update(kwargs)
response = self.client.post(reverse('api-v3-add-lot'), lot)
self.assertEqual(response.status_code, 200)
data = response.json()['data']
tx_id = data['id']
lot_id = data['lot']['id']
return tx_id, lot_id
def test_declare(self):
today = datetime.date.today()
# create lots for client
tx_id, lot_id = self.create_lot(delivery_date=today.strftime('%Y-%m-%d'))
# validate
tx = LotTransaction.objects.get(id=tx_id)
tx.delivery_status = LotTransaction.PENDING
tx.save()
tx.lot.status = LotV2.VALIDATED
tx.lot.save()
# try validate declaration (doesnt work)
| |
""" Code that plots fields from the CMAC radar object. """
import os
from datetime import datetime
import operator
import cartopy.crs as ccrs
import netCDF4
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import pyart
from pyart.graph.common import (
generate_radar_name, generate_radar_time_begin)
from .config import get_plot_values, get_field_names
plt.switch_backend('agg')
def quicklooks_ppi(radar, config, sweep=None, image_directory=None,
dd_lobes=True):
"""
Quicklooks PPI, images produced with regards to CMAC
Parameter
---------
radar : Radar
Radar object that has CMAC applied to it.
config : str
A string of the radar name found from config.py that contains values
for plotting, specific to that radar.
Optional Parameters
-------------------
image_directory : str
File path to the image folder of which to save the CMAC images. If no
image file path is given, image path defaults to users home directory.
dd_lobes : bool
Plot DD lobes between radars if dd_lobes is True.
"""
if image_directory is None:
image_directory = os.path.expanduser('~')
radar_start_date = netCDF4.num2date(
radar.time['data'][0], radar.time['units'],
only_use_cftime_datetimes=False, only_use_python_datetimes=True)
# Retrieve the plot parameter values based on the radar.
plot_config = get_plot_values(config)
field_config = get_field_names(config)
save_name = plot_config['save_name']
date_string = datetime.strftime(radar_start_date, '%Y%m%d.%H%M%S')
combined_name = '.' + save_name + '.' + date_string
#min_lat = plot_config['min_lat']
#max_lat = plot_config['max_lat']
#min_lon = plot_config['min_lon']
#max_lon = plot_config['max_lon']
max_lat = radar.gate_latitude['data'].max() + .1
min_lat = radar.gate_latitude['data'].min() - .1
max_lon = radar.gate_longitude['data'].max() + .1
min_lon = radar.gate_longitude['data'].min() - .1
# Creating a plot of reflectivity before CMAC.
lal = np.arange(min_lat, max_lat, .8)
lol = np.arange(min_lon, max_lon, .8)
if dd_lobes:
grid_lat = np.arange(min_lat, max_lat, 0.01)
grid_lon = np.arange(min_lon, max_lon, 0.01)
facility = plot_config['facility']
if facility == 'I4':
dms_radar1_coords = [plot_config['site_i4_dms_lon'],
plot_config['site_i4_dms_lat']]
dms_radar2_coords = [plot_config['site_i5_dms_lon'],
plot_config['site_i5_dms_lat']]
elif facility == 'I5':
dms_radar1_coords = [plot_config['site_i5_dms_lon'],
plot_config['site_i5_dms_lat']]
dms_radar2_coords = [plot_config['site_i4_dms_lon'],
plot_config['site_i4_dms_lat']]
elif facility == 'I6':
dms_radar1_coords = [plot_config['site_i6_dms_lon'],
plot_config['site_i6_dms_lat']]
dms_radar2_coords = [plot_config['site_i4_dms_lon'],
plot_config['site_i4_dms_lat']]
dec_radar1 = [_dms_to_decimal(
dms_radar1_coords[0][0], dms_radar1_coords[0][1],
dms_radar1_coords[0][2]), _dms_to_decimal(
dms_radar1_coords[1][0], dms_radar1_coords[1][1],
dms_radar1_coords[1][2])]
dec_radar2 = [_dms_to_decimal(
dms_radar2_coords[0][0], dms_radar2_coords[0][1],
dms_radar2_coords[0][2]), _dms_to_decimal(
dms_radar2_coords[1][0], dms_radar2_coords[1][1],
dms_radar2_coords[1][2])]
bca = _get_bca(dec_radar2[0], dec_radar2[1], dec_radar1[0],
dec_radar1[1], grid_lon, grid_lat)
grid_lon, grid_lat = np.meshgrid(grid_lon, grid_lat)
if sweep is None:
if radar.nsweeps < 4:
sweep = 2
else:
sweep = plot_config['sweep']
# Plot of the raw reflectivity from the radar.
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1,
subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('reflectivity', sweep=sweep, resolution='50m', ax=ax,
vmin=-8, vmax=64, mask_outside=False,
cmap=pyart.graph.cm_colorblind.HomeyerRainbow,
min_lat=min_lat, min_lon=min_lon,
max_lat=max_lat, max_lon=max_lon,
lat_lines=lal, lon_lines=lol,
projection=ccrs.PlateCarree())
if dd_lobes:
ax.contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/reflectivity' + combined_name + '.png')
del fig, ax
# Four panel plot of gate_id, velocity_texture, reflectivity, and
# cross_correlation_ratio.
cat_dict = {}
print('##')
print('## Keys for each gate id are as follows:')
for pair_str in radar.fields['gate_id']['notes'].split(','):
print('## ', str(pair_str))
cat_dict.update({pair_str.split(':')[1]:int(pair_str.split(':')[0])})
sorted_cats = sorted(cat_dict.items(), key=operator.itemgetter(1))
cat_colors = {'rain': 'green',
'multi_trip': 'red',
'no_scatter': 'gray',
'snow': 'cyan',
'melting': 'yellow'}
lab_colors = ['red', 'cyan', 'grey', 'green', 'yellow']
if 'ground_clutter' in radar.fields.keys():
cat_colors['clutter'] = 'black'
lab_colors = np.append(lab_colors, 'black')
if 'terrain_blockage' in radar.fields['gate_id']['notes']:
cat_colors['terrain_blockage'] = 'brown'
lab_colors = np.append(lab_colors, 'brown')
lab_colors = [cat_colors[kitty[0]] for kitty in sorted_cats]
cmap = matplotlib.colors.ListedColormap(lab_colors)
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(2, 2,
figsize=[15, 10], subplot_kw=dict(projection=ccrs.PlateCarree()))
ax[0, 0].set_aspect('auto')
display.plot_ppi_map('gate_id', sweep=sweep, min_lon=min_lon, ax=ax[0, 0],
max_lon=max_lon, min_lat=min_lat,
max_lat=max_lat, resolution='50m',
lat_lines=lal, lon_lines=lol, cmap=cmap,
vmin=0, vmax=6, projection=ccrs.PlateCarree())
if dd_lobes:
ax[0, 0].contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
cbax = ax[0, 0]
if 'ground_clutter' in radar.fields.keys() or 'terrain_blockage' in radar.fields['gate_id']['notes']:
tick_locs = np.linspace(
0, len(sorted_cats) - 1, len(sorted_cats)) + 0.5
else:
tick_locs = np.linspace(
0, len(sorted_cats), len(sorted_cats)) + 0.5
display.cbs[-1].locator = matplotlib.ticker.FixedLocator(tick_locs)
catty_list = [sorted_cats[i][0] for i in range(len(sorted_cats))]
display.cbs[-1].formatter = matplotlib.ticker.FixedFormatter(catty_list)
display.cbs[-1].update_ticks()
ax[0, 1].set_aspect('auto')
display.plot_ppi_map('reflectivity', sweep=sweep, vmin=-8, vmax=40.0,
ax=ax[0, 1], min_lon=min_lon, max_lon=max_lon,
min_lat=min_lat,
max_lat=max_lat, lat_lines=lal, lon_lines=lol,
resolution='50m',
cmap=pyart.graph.cm_colorblind.HomeyerRainbow,
projection=ccrs.PlateCarree())
if dd_lobes:
ax[0, 1].contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
ax[1, 0].set_aspect('auto')
display.plot_ppi_map('velocity_texture', sweep=sweep, vmin=0, vmax=14,
min_lon=min_lon, max_lon=max_lon, min_lat=min_lat,
max_lat=max_lat, lat_lines=lal, lon_lines=lol,
resolution='50m', ax=ax[1, 0],
title=_generate_title(
radar, 'velocity_texture', sweep),
cmap=pyart.graph.cm.NWSRef,
projection=ccrs.PlateCarree())
if dd_lobes:
ax[1, 0].contour(grid_lon, grid_lat, bca, latlon='True',
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
rhv_field = field_config['cross_correlation_ratio']
ax[1, 1].set_aspect('auto')
display.plot_ppi_map(rhv_field, sweep=sweep, vmin=.5,
vmax=1, min_lon=min_lon, max_lon=max_lon,
min_lat=min_lat, max_lat=max_lat, lat_lines=lal,
lon_lines=lol, resolution='50m', ax=ax[1, 1],
cmap=pyart.graph.cm.Carbone42,
projection=ccrs.PlateCarree())
if dd_lobes:
ax[1, 1].contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/cmac_four_panel_plot' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot with reflectivity corrected with gate ids.
cmac_gates = pyart.correct.GateFilter(radar)
cmac_gates.exclude_all()
cmac_gates.include_equal('gate_id', cat_dict['rain'])
cmac_gates.include_equal('gate_id', cat_dict['melting'])
cmac_gates.include_equal('gate_id', cat_dict['snow'])
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('reflectivity',
sweep=sweep, resolution='50m',
vmin=-8, vmax=40, mask_outside=False,
cmap=pyart.graph.cm_colorblind.HomeyerRainbow,
title=_generate_title(
radar, 'masked_corrected_reflectivity',
sweep), ax=ax,
min_lat=min_lat, min_lon=min_lon,
max_lat=max_lat, max_lon=max_lon,
lat_lines=lal, lon_lines=lol,
gatefilter=cmac_gates,
projection=ccrs.PlateCarree())
if dd_lobes:
ax.contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/masked_corrected_reflectivity' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot with reflectivity corrected with attenuation.
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('corrected_reflectivity', sweep=sweep,
vmin=0, vmax=40.0, resolution='50m',
title=_generate_title(
radar, 'corrected_reflectivity',
sweep),
cmap=pyart.graph.cm_colorblind.HomeyerRainbow,
min_lat=min_lat, min_lon=min_lon,
max_lat=max_lat, max_lon=max_lon,
lat_lines=lal, lon_lines=lol, ax=ax,
projection=ccrs.PlateCarree())
if dd_lobes:
ax.contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/corrected_reflectivity' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot with differential phase.
phase_field = field_config['input_phidp_field']
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map(phase_field, sweep=sweep,
resolution='50m', ax=ax,
min_lat=min_lat, min_lon=min_lon,
max_lat=max_lat, max_lon=max_lon,
lat_lines=lal, lon_lines=lol,
projection=ccrs.PlateCarree())
fig.savefig(
image_directory
+ '/differential_phase' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot of specific attenuation.
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('specific_attenuation', sweep=sweep, vmin=0,
vmax=1.0, resolution='50m', ax=ax,
min_lat=min_lat, min_lon=min_lon,
max_lat=max_lat, max_lon=max_lon,
lat_lines=lal, lon_lines=lol,
projection=ccrs.PlateCarree())
if dd_lobes:
ax.contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/specific_attenuation' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot of corrected differential phase.
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('corrected_differential_phase', sweep=sweep,
title=_generate_title(
radar, 'corrected_differential_phase',
sweep), ax=ax,
resolution='50m', min_lat=min_lat,
min_lon=min_lon, max_lat=max_lat, max_lon=max_lon,
lat_lines=lal, lon_lines=lol,
projection=ccrs.PlateCarree())
if dd_lobes:
ax.contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/corrected_differential_phase' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot of corrected specific differential phase.
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('corrected_specific_diff_phase', sweep=sweep,
vmin=0, vmax=6, resolution='50m',
title=_generate_title(
radar, 'corrected_specific_diff_phase',
sweep), ax=ax,
min_lat=min_lat, min_lon=min_lon, max_lat=max_lat,
max_lon=max_lon, lat_lines=lal, lon_lines=lol,
projection=ccrs.PlateCarree())
if dd_lobes:
ax.contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/corrected_specific_diff_phase' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot with region dealias corrected velocity.
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('corrected_velocity', sweep=sweep, resolution='50m',
cmap=pyart.graph.cm.NWSVel, vmin=-30, ax=ax,
vmax=30, min_lat=min_lat, min_lon=min_lon,
max_lat=max_lat, max_lon=max_lon, lat_lines=lal,
lon_lines=lol, projection=ccrs.PlateCarree())
if dd_lobes:
ax.contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/corrected_velocity' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot of rain rate A
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('rain_rate_A', sweep=sweep, resolution='50m',
vmin=0, vmax=120, min_lat=min_lat, min_lon=min_lon,
max_lat=max_lat, ax=ax, max_lon=max_lon, lat_lines=lal,
lon_lines=lol, projection=ccrs.PlateCarree())
if dd_lobes:
ax.contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/rain_rate_A' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot of filtered corrected differential phase.
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('filtered_corrected_differential_phase', sweep=sweep,
title=_generate_title(
radar, 'filtered_corrected_differential_phase',
sweep),
resolution='50m', min_lat=min_lat, ax=ax,
min_lon=min_lon, max_lat=max_lat, max_lon=max_lon,
lat_lines=lal, lon_lines=lol,
cmap=pyart.graph.cm.Theodore16,
projection=ccrs.PlateCarree())
if dd_lobes:
ax.contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/filtered_corrected_differential_phase' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot of filtered corrected specific differential phase.
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('filtered_corrected_specific_diff_phase', sweep=sweep,
title=_generate_title(
radar, 'filtered_corrected_specific_diff_phase',
sweep), ax=ax,
resolution='50m', min_lat=min_lat,
min_lon=min_lon, max_lat=max_lat, max_lon=max_lon,
lat_lines=lal, lon_lines=lol,
cmap=pyart.graph.cm.Theodore16,
projection=ccrs.PlateCarree())
if dd_lobes:
ax.contour(grid_lon, grid_lat, bca,
levels=[np.pi/6, 5*np.pi/6], linewidths=2,
colors='k')
fig.savefig(
image_directory
+ '/filtered_corrected_specific_diff_phase' + combined_name + '.png')
plt.close(fig)
del fig, ax, display
# Creating a plot of corrected differential phase.
display = pyart.graph.RadarMapDisplay(radar)
fig, ax = plt.subplots(1, 1, subplot_kw=dict(projection=ccrs.PlateCarree()),
figsize=[12, 8])
ax.set_aspect('auto')
display.plot_ppi_map('specific_differential_attenuation', sweep=sweep,
title=_generate_title(
radar, 'specific_differential_attenuation',
sweep), ax=ax,
resolution='50m', min_lat=min_lat,
min_lon=min_lon, max_lat=max_lat, max_lon=max_lon,
lat_lines=lal, lon_lines=lol, gatefilter=cmac_gates,
projection=ccrs.PlateCarree())
if dd_lobes:
| |
<filename>varconlib/scripts/graph_results.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 15 15:32:43 2018
@author: dberke
"""
# Script to plot cached results from lineFind.py
import datetime as dt
from pathlib import Path
from glob import glob
import varconlib as vcl
import pandas as pd
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.dates as dates
import matplotlib.lines as lines
from scipy.optimize import curve_fit
from tqdm import tqdm
from adjustText import adjust_text
from simulateScatter import injectGaussianNoise
def polynomial1D(x, m, b):
"""Return the value of a line with slope m and offset b
x: the independent variable
m: the slope of the line
b: the offset of the line
"""
return (m * x) + b
def fitCCDslope(CCDdata):
"""Return the least-squares fit of the CCD wavelength-to-y(pix) function
CCDdata: a spectral format for one of HARPS' CCDs, in pandas table format
"""
x = CCDdata['centcolwl']
y = CCDdata['centcolypix']
popt, pcov = curve_fit(polynomial1D, x, y)
return popt
def readHARPSspectralformat(filename):
"""Return a pandas dataframe containing the spectral format of a HARPS CCD
filename: the csv file to read in
returns: a dataframe containing the contents of the file
"""
return pd.read_csv(filename, header=0, engine='c')
def getHARPSxypos(wl, data, slope, offset):
"""Return the x,y pixel on HARPS CCD given a wavelength [378.113, 691.219]
wl: wavelength to be mapped back to x,y coordinates. Must be within
378.113nm and 530.43nm or 533.73nm and 691.219nm
returns: a tuple of x, y pixel values
"""
CCD_x_width = 4096
# CCD_y_height = 2048
# if not ((378.113 <= wl <= 530.43) or (533.73 <= wl <= 691.219)):
# error_string1 = "Given wavelength ({}) not in HARPS' spectral range".\
# format(wl)
# error_string2 = " (378.113nm - 530.43nm or 533.73 - 691.219nm)"
# raise ValueError(error_string1+error_string2)
for minwl, maxwl, minfsr, maxfsr in zip(data['startwl'], data['endwl'],
data['FSRmin'], data['FSRmax']):
if minfsr <= wl <= maxfsr:
lowerwl, upperwl = minwl, maxwl
break
try:
xfrac = (wl - lowerwl) / (upperwl - lowerwl)
except UnboundLocalError:
print(wl, data, slope, offset)
raise
xpos = xfrac * CCD_x_width
ypos = polynomial1D(wl, slope, offset)
return (xpos, ypos)
def plot_HARPS_CCDs(pairlist):
"""Plot the HARPS CCDs at 1-to-1 pixel scale, with wavelengths
"""
maxradvel = 143500
minradvel = -68800
mpl.rcParams['font.size'] = 24
fig_blue = plt.figure(figsize=(40.96, 20.48), dpi=100, tight_layout=True)
fig_red = plt.figure(figsize=(40.96, 20.48), dpi=100, tight_layout=True)
ax_blue = fig_blue.add_subplot(1, 1, 1)
ax_red = fig_red.add_subplot(1, 1, 1)
axes = (ax_blue, ax_red)
bluetable = readHARPSspectralformat(blueCCDpath)
redtable = readHARPSspectralformat(redCCDpath)
tables = (bluetable, redtable)
blueparams = fitCCDslope(bluetable)
redparams = fitCCDslope(redtable)
parameters = (blueparams, redparams)
colors = ({'main': 'Blue',
'mid': 'DarkCyan'},
{'main': 'Red',
'mid': 'Maroon'})
for ax in axes:
ax.set_xlim(left=0, right=4096)
ax.set_ylim(bottom=0, top=2048)
ax.set_xlabel('Pixels')
ax.set_ylabel('Pixels')
vert_joins = [x for x in range(512, 4096, 512)]
ax.vlines(vert_joins, 0, 2048, color='black', linewidth=1)
ax.hlines(1024, 0, 4096, color='black', linewidth=1)
for ax, table, params, color in zip(axes, tables, parameters, colors):
# Plot a bunch of evenly-spaced point to outline the location of the
# light.
for wl in tqdm(np.linspace(table['FSRmin'].min(),
table['FSRmax'].max(), 10000),
unit='Reference points'):
x, y = getHARPSxypos(wl, table, *params)
ax.plot(x, y, color=color['main'], linestyle='',
marker='.', markersize=2)
# Plot the positions of the central columns
for wl in table['centcolwl']:
x, y, = getHARPSxypos(wl, table, *params)
ax.plot(x, y, color=color['mid'], linestyle='',
marker='|', markersize=24)
# Plot the locations of each line in each pair
for pair in tqdm(pairlist, unit='Line pairs'):
line1 = float(pair[0])
line2 = float(pair[1])
if line2 < 530.43:
axis = ax_blue
table = bluetable
params = blueparams
elif line1 > 533.73:
axis = ax_red
table = redtable
params = redparams
x1, y1 = getHARPSxypos(line1, table, *params)
x2, y2 = getHARPSxypos(line2, table, *params)
# Plot the first line of the pair
axis.plot(x1, y1, color='Purple', linestyle='', marker='P',
markersize=12, alpha=1)
# Annotate it with its wavelength
axis.annotate(pair[0], xy=(x1, y1), xytext=(x1-55, y1+11),
fontsize=15)
# Plot the maximum limits of where it falls on the detector, assuming
# a maximum radial velocity shift of ±30 km/s
blueshift1 = vcl.getwlseparation(-30000+minradvel, line1) + line1
redshift1 = vcl.getwlseparation(30000+maxradvel, line1) + line1
x3, y3 = getHARPSxypos(blueshift1, table, *params)
x4, y4 = getHARPSxypos(redshift1, table, *params)
lims1 = ((x3, y3), (x4, y4))
for lims in lims1:
axis.plot(lims[0], lims[1], color='Purple', linestyle='',
marker='|', markersize=24)
bluerad1 = vcl.getwlseparation(minradvel, line1) + line1
redrad1 = vcl.getwlseparation(maxradvel, line1) + line1
x7, y7 = getHARPSxypos(bluerad1, table, *params)
x8, y8 = getHARPSxypos(redrad1, table, *params)
axis.plot(x7, y7, color='Purple', linestyle='',
marker=8, markersize=8, alpha=1)
axis.plot(x8, y8, color='Purple', linestyle='',
marker=9, markersize=8, alpha=1)
# Plot the second line of the pair.
axis.plot(x2, y2, color='Green', linestyle='', marker='P',
markersize=12, alpha=1)
# Annotate it with its wavelength
axis.annotate(pair[1], xy=(x2, y2), xytext=(x2-55, y2-31),
fontsize=15)
# Plot the maximum limits of where it falls on the detector, assuming
# a maximum radial velocity shift of ±30 km/s
blueshift2 = vcl.getwlseparation(-30000+minradvel, line2) + line2
redshift2 = vcl.getwlseparation(30000+maxradvel, line2) + line2
x5, y5 = getHARPSxypos(blueshift2, table, *params)
x6, y6 = getHARPSxypos(redshift2, table, *params)
lims2 = ((x5, y5), (x6, y6))
for lims in lims2:
axis.plot(lims[0], lims[1], color='Green', linestyle='',
marker='|', markersize=24)
bluerad2 = vcl.getwlseparation(minradvel, line2) + line2
redrad2 = vcl.getwlseparation(maxradvel, line2) + line2
x9, y9 = getHARPSxypos(bluerad2, table, *params)
x10, y10 = getHARPSxypos(redrad2, table, *params)
axis.plot(x9, y9, color='Green', linestyle='',
marker=8, markersize=8, alpha=1)
axis.plot(x10, y10, color='Green', linestyle='',
marker=9, markersize=8, alpha=1)
outfile_blue = '/Users/dberke/Pictures/CCD_blue.png'
outfile_red = '/Users/dberke/Pictures/CCD_red.png'
fig_blue.savefig(outfile_blue)
fig_red.savefig(outfile_red)
plt.close(fig_blue)
plt.close(fig_red)
mpl.rcdefaults()
def plot_absorption_spectrum(pairlist):
"""Plot line pairs along with transmission spectrum
"""
import subprocess
for pair in tqdm(pairlist):
args = ['/Users/dberke/code/plotSpec.py',
'HD45184/ADP.2014-09-26T16:54:56.573.fits',
'HD45184/ADP.2015-09-30T02:00:51.583.fits',
'-o', 'Trans_{}_{}.png'.format(pair[0], pair[1]),
'-r', '-3.9', '-i', '0', '-j', '1.05', '-vtz', '-n',
'{}'.format(float(pair[0]) -
((float(pair[1]) - float(pair[0])) * 0.75)),
'-m',
'{}'.format(float(pair[1]) +
((float(pair[1]) - float(pair[0])) * 0.75)),
'-l', pair[0], pair[1]]
subprocess.run(args)
def plot_line_offsets(pairlist, data, filepath):
"""Plot a histogram of each chosen line's offsets
"""
for linepair in tqdm(pairlist):
filtdata1 = data[data['line1_nom_wl'] == float(linepair[0])]
filtdata2 = data[data['line2_nom_wl'] == float(linepair[1])]
outpath1 = filepath / 'graphs' / 'Hist_{}.png'.format(linepair[0])
outpath2 = filepath / 'graphs' / 'Hist_{}.png'.format(linepair[1])
fig1 = plt.figure(figsize=(8, 8))
fig2 = plt.figure(figsize=(8, 8))
ax1 = fig1.add_subplot(1, 1, 1)
ax2 = fig2.add_subplot(1, 1, 1)
ax1.set_title(linepair[0])
ax2.set_title(linepair[1])
ax1.set_xlabel(r'$\delta v$ around expected position [m/s]', size=18)
ax2.set_xlabel(r'$\delta v$ around expected position [m/s]', size=18)
offsets1 = filtdata1['line1_gauss_vel_offset']
offsets2 = filtdata2['line2_gauss_vel_offset']
median1 = np.median(offsets1)
median2 = np.median(offsets2)
offsets1 -= median1
offsets2 -= median2
std1 = np.std(offsets1)
std2 = np.std(offsets2)
ax1.hist(offsets1, bins=14, edgecolor='Black',
label='Median: {:.4f} m/s\nStdDev: {:.4f} m/s'.
format(median1, std1))
ax2.hist(offsets2, bins=14, edgecolor='Black',
label='Median: {:.4f} m/s\nStdDev: {:.4f} m/s'.
format(median2, std2))
ax1.legend(fontsize=16)
ax2.legend(fontsize=16)
fig1.savefig(str(outpath1))
fig2.savefig(str(outpath2))
plt.close(fig1)
plt.close(fig2)
def plot_scatter_by_atomic_number(baseDir):
"""Create a plot of scatter among transitions by atomic number
"""
stars = ('HD146233', 'HD45184', 'HD183658', 'HD138573')
files = []
for star in stars:
files.append(baseDir / star / '{}.csv'.format(star))
frames = [pd.read_csv(file, header=0, parse_dates=[1], engine='c',
converters={'line1_nom_wl': str,
'line2_nom_wl': str}) for file in files]
data = pd.concat(frames)
fig = plt.figure(figsize=(12, 10))
ax = fig.add_subplot(1, 1, 1)
labels = []
for pair in vcl.pairlist:
if not vcl.badlines.isdisjoint(pair):
print('Bad lines! {}, {}'.format(pair, vcl.elemdict[pair]))
continue
scatters = []
atomnum = vcl.elemdict[pair]
xpositions = np.linspace(atomnum - 0.4, atomnum + 0.4, len(stars))
for star, pos in zip(stars, xpositions):
filtdata = data[(data['line1_nom_wl'] == pair[0]) &
(data['line2_nom_wl'] == pair[1]) &
(data['object'] == star)]
gaussvel = filtdata['vel_diff_gauss']
gaussvel -= np.median(gaussvel)
RMS = np.sqrt(np.mean(np.square(gaussvel)))
print(RMS)
print(np.std(gaussvel))
scatters.append(RMS)
# ax.plot([pos]*len(gaussvel), gaussvel, color='Green', marker='_',
# linestyle='')
ax.plot(xpositions, scatters, color='Black', linewidth=1, marker='.')
labels.append(plt.text(xpositions[0], scatters[0], '{}'.format(pair),
ha='center', va='center', fontsize=6))
adjust_text(labels, arrowprops=dict(arrowstyle='->', color='red'))
plt.show()
# plt.close(fig)
def plot_as_function_of_depth(base_dir):
"""Plot the scattar in line pair separation as a function of line depth
Parameters
----------
base_dir : Path object
A Path object representing the root directory for a star wherein to
search for the various data files containing the information to plot.
"""
# Directory to put plots in
plot_dir = Path('/Users/dberke/Pictures/linedepths')
stars = ('HD146233', ) # 'HD45184')
color = 'ForestGreen'
# Number of iterations to use when simulating scatter.
num_iters = 100
for star in stars:
fig = plt.figure(figsize=(12, 10))
ax = fig.add_subplot(1, 1, 1)
ax.set_xlabel('Mean line depth (normalized)')
ax.set_ylabel('RMS scatter in pair velocity separation [m/s]')
ax.set_xlim(left=0.26, right=0.72)
# ax.set_ylim(bottom=0, top=74)
labels = []
legend_elements = []
infile = base_dir / '{star}.csv'.format(star=star)
data = pd.read_csv(infile, header=0, parse_dates=[1], engine='c',
converters={'line1_nom_wl': str,
'line2_nom_wl': str})
all_lines = []
all_scatters = []
all_sim_scatters = []
for pair in vcl.pairlist:
print('Simulating scatter in line pair {}'.format(pair))
if not vcl.badlines.isdisjoint(pair):
continue
pair_vel_seps = []
filtdata = data[(data['line1_nom_wl'] == pair[0]) &
(data['line2_nom_wl'] == | |
<gh_stars>1-10
#!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing parameter definition support."""
import json
import re
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from broker.brokertest import TestBrokerCommand
test_schema = {
"schema": "http://json-schema.org/draft-04/schema#",
"type": "object",
"properties": {
"key": {
"type": "string"
},
"values": {
"type": "array",
"items": {
"type": "integer"
},
"minItems": 1,
"uniqueItems": True
}
},
"additionalProperties": False
}
default_param_defs = {
"testdefault": {
# No explicit type
"description": "Blah",
},
"testrequired": {
"type": "string",
"required": True,
},
"test_rebuild_required": {
"type": "string",
"activation": "rebuild",
},
"teststring": {
"type": "string",
"default": "default",
},
"testint": {
"type": "int",
"default": "60",
"invalid_default": "bad_int",
},
"testfloat": {
"type": "float",
"default": "100.100",
"invalid_default": "bad_float",
},
"testboolean": {
"type": "boolean",
"default": "yes",
"invalid_default": "bad_boolean",
},
"testfalsedefault": {
"type": "boolean",
"default": "no",
},
"testlist": {
"type": "list",
"default": "val1,val2",
},
"testjson": {
"type": "json",
"default": '{"key": "param_key", "values": [0]}',
"schema": json.dumps(test_schema),
"invalid_default": "bad json value",
},
}
param_features = {
"host": ["pre_host"],
"hardware": ["bios_setup"],
"interface": ["src_route"],
}
class TestAddParameterDefinition(TestBrokerCommand):
@classmethod
def setUpClass(cls):
super(TestAddParameterDefinition, cls).setUpClass()
cls.proto = cls.protocols['aqdsystems_pb2']
desc = cls.proto.Feature.DESCRIPTOR
cls.activation_type = desc.fields_by_name["activation"].enum_type
def test_100_add_all(self):
for path, params in default_param_defs.items():
cmd = ["add_parameter_definition", "--archetype", "aquilon",
"--path", "foo/" + path, "--template", "foo"]
if "type" in params:
cmd.extend(["--value_type", params["type"]])
if params.get("required", False):
cmd.append("--required")
if "activation" in params:
cmd.extend(["--activation", params["activation"]])
if "schema" in params:
cmd.extend(["--schema", params["schema"]])
self.noouttest(cmd)
def test_105_show_paramdef(self):
cmd = ["show_parameter_definition", "--archetype", "aquilon",
"--path", "foo/testrequired"]
out = self.commandtest(cmd)
self.output_equals(out, """
Parameter Definition: testrequired [required]
Archetype: aquilon
Value Type: string
Template: foo
Activation: dispatch
""", cmd)
def test_120_clean_path(self):
for path in ["/foo/startslash", "foo/endslash/"]:
cmd = ["add_parameter_definition", "--archetype", "aquilon",
"--path=%s" % path, "--template=foo", "--value_type=string"]
self.noouttest(cmd)
def test_130_valid_path(self):
for path in ["multi/part1/part2", "foo", "valid/with_under", "valid/with.dot",
"valid/with-dash", "with_under", "with.dot", "with-dash"]:
cmd = ["add_parameter_definition", "--archetype", "aquilon",
"--path=foo/%s" % path, "--template=foo", "--value_type=string"]
self.noouttest(cmd)
cmd = ["del_parameter_definition", "--archetype", "aquilon",
"--path=foo/%s" % path]
self.noouttest(cmd)
def load_feature_paramdefs(self, feature, feature_type):
for path, params in default_param_defs.items():
# Activation cannot be set for feature parameters
if "activation" in params:
continue
cmd = ["add_parameter_definition", "--feature", feature,
"--type", feature_type, "--path", path]
if "type" in params:
cmd.extend(["--value_type", params["type"]])
if "default" in params:
cmd.extend(["--default", params["default"]])
if params.get("required", False):
cmd.append("--required")
if "schema" in params:
cmd.extend(["--schema", params["schema"]])
self.noouttest(cmd)
def test_200_add_feature_all(self):
for feature_type, features in param_features.items():
for feature in features:
self.load_feature_paramdefs(feature, feature_type)
def test_205_show_testrequired(self):
cmd = ["show_parameter_definition", "--feature", "pre_host", "--type=host",
"--path=testrequired"]
out = self.commandtest(cmd)
self.output_equals(out, """
Parameter Definition: testrequired [required]
Feature: pre_host
Type: host
Value Type: string
""", cmd)
def test_220_clean_path(self):
for path in ["/startslash", "endslash/"]:
cmd = ["add_parameter_definition", "--feature", "pre_host", "--type=host",
"--path=%s" % path, "--value_type=string"]
self.noouttest(cmd)
cmd = ["search_parameter_definition", "--feature", "pre_host", "--type=host"]
out = self.commandtest(cmd)
self.searchoutput(out, r'Parameter Definition: startslash\s*', cmd)
self.searchoutput(out, r'Parameter Definition: endslash\s*', cmd)
def test_230_valid_path(self):
for path in ["multi/part1/part2", "noslash", "valid/with_under", "valid/with.dot",
"valid/with-dash", "with_under", "with.dot", "with-dash"]:
cmd = ["add_parameter_definition", "--path=%s" % path,
"--feature", "pre_host", "--type=host", "--value_type=string"]
self.noouttest(cmd)
cmd = ["del_parameter_definition", "--feature", "pre_host", "--type=host",
"--path=%s" % path]
self.noouttest(cmd)
def test_240_add_same_feature_name(self):
for type in ["host", "hardware", "interface"]:
self.noouttest(["add_parameter_definition",
"--feature", "shinynew", "--type", type,
"--path", "car", "--value_type", "string"])
def test_300_add_existing(self):
cmd = ["add_parameter_definition", "--archetype", "aquilon",
"--path=foo/teststring", "--value_type=string", "--description=blaah",
"--template=foo", "--required"]
err = self.badrequesttest(cmd)
self.matchoutput(err,
"The path cannot be a strict subset or superset "
"of an existing definition.",
cmd)
def test_300_path_conflict_sub(self):
cmd = ["add_parameter_definition", "--archetype", "aquilon",
"--path", "foo/testjson/subpath", "--template", "foo",
"--value_type", "list"]
out = self.badrequesttest(cmd)
self.matchoutput(out,
"The path cannot be a strict subset or superset "
"of an existing definition.",
cmd)
def test_300_path_conflict_super(self):
cmd = ["add_parameter_definition", "--archetype", "aquilon",
"--path", "foo", "--template", "foo",
"--value_type", "json"]
out = self.badrequesttest(cmd)
self.matchoutput(out,
"The path cannot be a strict subset or superset "
"of an existing definition.",
cmd)
def test_300_add_feature_existing(self):
cmd = ["add_parameter_definition", "--feature", "pre_host", "--type=host",
"--path=teststring", "--value_type=string", "--description=blaah",
"--required"]
err = self.badrequesttest(cmd)
self.matchoutput(err,
"The path cannot be a strict subset or superset "
"of an existing definition.",
cmd)
def test_300_path_conflict_feature(self):
cmd = ["add_parameter_definition",
"--feature", "pre_host", "--type=host",
"--path=testjson/subpath", "--value_type=string"]
out = self.badrequesttest(cmd)
self.matchoutput(out,
"The path cannot be a strict subset or superset "
"of an existing definition.",
cmd)
def test_300_invalid_feature_defaults(self):
for path, params in default_param_defs.items():
if "invalid_default" not in params:
continue
cmd = ["add_parameter_definition", "--feature", "pre_host", "--type", "host",
"--path", path + "_invalid_default",
"--value_type", params["type"],
"--default", params["invalid_default"]]
out = self.badrequesttest(cmd)
self.matchoutput(out, "for default for path=%s" % path, cmd)
def test_300_add_noncompileable_arch(self):
cmd = ["add_parameter_definition", "--archetype", "windows",
"--path=foo/testint", "--description=blaah",
"--template=foo", "--value_type=int"]
out = self.unimplementederrortest(cmd)
self.matchoutput(out, "Archetype windows is not compileable, "
"parameters are not supported.", cmd)
def test_300_add_archetype_default(self):
cmd = ["add_parameter_definition", "--archetype", "aquilon",
"--path=foo/test_arch_default", "--default=default",
"--template=foo", "--value_type=string"]
out = self.unimplementederrortest(cmd)
self.matchoutput(out,
"Archetype-wide parameter definitions cannot have "
"default values.",
cmd)
def test_300_invalid_path(self):
for path in ["!badchar", "@badchar", "#badchar", "$badchar", "%badchar", "^badchar",
"&badchar", "*badchar" ":badchar", ";badcharjk", "+badchar"]:
cmd = ["add_parameter_definition", "--archetype", "aquilon",
"--path=foo/%s" % path, "--template=foo", "--value_type=string"]
err = self.badrequesttest(cmd)
self.matchoutput(err,
"'%s' is not a valid value for a path component." % path,
cmd)
def test_300_wrong_toplevel_type(self):
cmd = ["add_parameter_definition", "--archetype", "aquilon",
"--path", "bad_toplevel_type", "--template", "bad_toplevel_type",
"--value_type", "list"]
out = self.badrequesttest(cmd)
self.matchoutput(out,
"Only the JSON type can be used for top-level "
"parameter definitions.",
cmd)
def test_300_show_bad_path(self):
cmd = ["show_parameter_definition", "--archetype", "aquilon",
"--path", "foo/path-does-not-exist"]
out = self.notfoundtest(cmd)
self.matchoutput(out,
"Path foo/path-does-not-exist does not match any "
"parameter definitions of archetype aquilon.",
cmd)
def test_300_show_archetype_no_params(self):
cmd = ["show_parameter_definition", "--archetype", "windows",
"--path", "path-does-not-exist"]
out = self.badrequesttest(cmd)
self.matchoutput(out,
"Unknown parameter template path-does-not-exist.",
cmd)
def test_300_invalid_path_feature(self):
for path in ["!badchar", "@badchar", "#badchar", "$badchar", "%badchar", "^badchar",
"&badchar", "*badchar", ":badchar", ";badcharjk", "+badchar"]:
cmd = ["add_parameter_definition", "--feature", "pre_host", "--type=host",
"--path=%s" % path, "--value_type=string"]
err = self.badrequesttest(cmd)
self.matchoutput(err,
"'%s' is not a valid value for a path component." % path,
cmd)
def test_300_add_bad_feature_type(self):
cmd = ["add_parameter_definition", "--feature", "pre_host",
"--type=no-such-type",
"--path=testpath", "--value_type=string"]
err = self.badrequesttest(cmd)
self.matchoutput(err,
"Unknown feature type 'no-such-type'. The valid "
"values are: hardware, host, interface.",
cmd)
def test_300_search_bad_feature_type(self):
cmd = ["search_parameter_definition", "--feature", "pre_host",
"--type=no-such-type"]
err = self.badrequesttest(cmd)
self.matchoutput(err,
"Unknown feature type 'no-such-type'. The valid "
"values are: hardware, host, interface.",
cmd)
def test_300_show_bad_path_feature(self):
cmd = ["show_parameter_definition", "--feature", "pre_host",
"--type", "host", "--path", "path-does-not-exist"]
out = self.notfoundtest(cmd)
self.matchoutput(out,
"Path path-does-not-exist does not match any "
"parameter definitions of host feature pre_host.",
cmd)
def test_300_show_feature_no_params(self):
cmd = ["show_parameter_definition", "--feature", "unused_no_params",
"--type", "host", "--path", "path-does-not-exist"]
out = self.notfoundtest(cmd)
self.matchoutput(out,
"No parameter definitions found for host feature "
"unused_no_params.",
cmd)
def test_400_verify_all(self):
cmd = ["search_parameter_definition", "--archetype", "aquilon"]
out = self.commandtest(cmd)
for path, params in default_param_defs.items():
pattern = "Parameter Definition: " + path
if params.get("required", False):
pattern += r' \[required\]'
pattern += r"\s*"
pattern += r'Archetype: aquilon\s+'
if "type" in params:
pattern += "Value Type: " + params["type"] + r"\s*"
if params["type"] == "json" and "schema" in params:
pattern += r"Schema: \{\n(^ .*\n)+\s*\}\s*"
else:
pattern += r"Value Type: string\s*"
pattern += r"Template: foo\s*"
if "activation" in params:
pattern += "Activation: " + params["activation"] + r"\s*"
else:
pattern += r"Activation: dispatch\s*"
self.searchoutput(out, pattern, cmd)
self.searchoutput(out, r'Parameter Definition: startslash\s*', cmd)
self.searchoutput(out, r'Parameter Definition: endslash\s*', cmd)
def test_400_verify_all_proto(self):
cmd = ["search_parameter_definition", "--archetype", "aquilon", "--format=proto"]
result = self.protobuftest(cmd, expect=12)[:]
param_defs = {param_def.path: param_def for param_def in result}
self.assertIn('foo/endslash', param_defs)
self.assertEqual(param_defs['foo/endslash'].value_type, 'string')
self.assertIn('foo/startslash', param_defs)
self.assertEqual(param_defs['foo/startslash'].value_type, 'string')
for path, params in default_param_defs.items():
self.assertIn("foo/" + path, param_defs)
paramdef = param_defs["foo/" + path]
self.assertEqual(paramdef.template, "foo")
if "type" in params:
self.assertEqual(paramdef.value_type, params["type"])
else:
self.assertEqual(paramdef.value_type, "string")
self.assertEqual(paramdef.default, "")
self.assertEqual(paramdef.is_required,
params.get("required", False))
self.assertEqual(paramdef.archetype, "aquilon")
if "activation" in params:
val = self.activation_type.values_by_name[params["activation"].upper()]
self.assertEqual(paramdef.activation, val.number)
else:
self.assertEqual(paramdef.activation, self.proto.DISPATCH)
def test_410_verify_feature_all(self):
cmd = ["search_parameter_definition", "--feature", "pre_host", "--type=host"]
out = self.commandtest(cmd)
for path, params in default_param_defs.items():
if "activation" in params:
continue
pattern = "Parameter Definition: " + path
if params.get("required", False):
pattern += r' \[required\]'
pattern += r'\s*Feature: pre_host\s*'
| |
#!/opt/anaconda2/bin/python
# -*- coding: utf-8 -*-
"""
################################################################################
#
# Copyright (c) 2016 <NAME>
# All rights reserved
# Distributed under the terms of the MIT license
#
################################################################################
#
# Filename: avnet_estimator.py
#
# Decription:
# AVNET estimator
#
# Authors:
# <NAME>
#
################################################################################
#
# History:
# --------
# Date Who Ticket Description
# ---------- --- --------- ------------------------------------------------
# 2016-03-17 wm Initial version
#
################################################################################
"""
from __future__ import print_function
DEBUG = False
__all__ = []
__version__ = "0.0.1"
__date__ = '2016-03-17'
__updated__ = '2016-03-17'
"""
Index,Name,Type,Description,Values
1,PRODUCT_NUMBER,Integer,Product number,"[1249, 10283064]"
9,CUSTOMER_SEGMENT1,Character,Customer segment with respect to dimension 1,A and B
29,SPECIAL_PART,Character,Special part indicator,"Maybe, No, and Yes"
3,TRANSACTION_DATE,Date,Transaction date,"[2012-07-27, 2015-08-03]"
15,CUSTOMER_FIRST_ORDER_DATE,Date,Date of customer's first order,"[1990-10-19, 2015-04-16]"
Good:
4,PRODUCT_PRICE,Float,"GROSS_SALES / TOTAL_BOXES_SOLD if PRODUCT_SALES_UNIT = N,
GROSS_SALES / SHIPPING_WEIGHT if PRODUCT_SALES_UNIT = Y",[1.05 to 181.49]
5,GROSS_SALES,Float,Gross sales total (Nb. negative values represent returns),[0 to 5664.82]
11,CUSTOMER_TYPE1,Integer,Customer type,"[1, 3]"
12,CUSTOMER_TYPE2,Character,"Customer type, a subset of CUSTOMER_TYPE1","A, B, and C"
13,CUSTOMER_MANAGED_LEVEL,Character,"Level at which customer is managed, N = National and L = Local",L
14,CUSTOMER_ACCOUNT_TYPE,Character,Customer account type,ST and DM
20,BRAND,Character,Product manufacturer's brand,IN_HOUSE and NOT_IN_HOUSE
22,PRODUCT_SALES_UNIT,Character,"Y = Product sold by weight, N = Product sold by the number of boxes",N and Y
23,SHIPPING_WEIGHT,Float,Total shipping weight (Nb. negative values represent returns),[0 to 11513.25]
24,TOTAL_BOXES_SOLD,Integer,Total boxes sold to the customer (Nb. negative values represent returns),[0 to 301]
25,PRODUCT_COST1,Float,"Product cost estimation 1 (Nb. negative values represent returns).
This is the cost to the Supplier and is a marker of ""market price"".",[0 to 5664.4]
26,PRODUCT_UNIT_OF_MEASURE,Character,"Product unit of measure - B = Box, LB = Pound, EA = Each","B, EA, and LB"
27,ORDER_SOURCE,Character,How the customer order was placed,A and B
28,PRICE_METHOD,Integer,Method used to price the product,"[1, 5]"
Bad:
6,REGION,Integer,Region which sold to the customer,[1]
7,WAREHOUSE,Integer,Warehouse which sold to the customer,[1]
10,CUSTOMER_SEGMENT2,Integer,Customer segment with respect to dimension 2,[1]
2,CUSTOMER_NUMBER,Integer,Customer number,"[1, 146]"
8,CUSTOMER_ZIP,Integer,Customer zip code,"[1, 98]"
16,PRODUCT_CLASS_ID1,Integer,ID for highest level of product classification,"[1, 12]"
17,PRODUCT_CLASS_ID2,Integer,ID for second highest level of product classification,"[15, 31]"
18,PRODUCT_CLASS_ID3,Integer,ID for second from the lowest level of product classification,"[136, 368]"
19,PRODUCT_CLASS_ID4,Integer,ID for the lowest level of product classification,"[1371, 7772]"
21,PRODUCT_ATTRIBUTE_X,Integer,A certain product attribute,"[5, 999]"
"""
FACTORIZABLE = [
'CUSTOMER_SEGMENT1', 'CUSTOMER_TYPE2', 'CUSTOMER_MANAGED_LEVEL',
'CUSTOMER_ACCOUNT_TYPE', 'BRAND', 'PRODUCT_SALES_UNIT',
'PRODUCT_UNIT_OF_MEASURE', 'ORDER_SOURCE', 'SPECIAL_PART'
]
TO_DROP = ['REGION', 'WAREHOUSE', 'CUSTOMER_SEGMENT2']
ATTRIBUTES = ['PRODUCT_CLASS_ID1', 'PRODUCT_CLASS_ID2', 'PRODUCT_CLASS_ID3',
'PRODUCT_CLASS_ID4',
'BRAND', # binary
'PRODUCT_ATTRIBUTE_X',
'PRODUCT_SALES_UNIT', # binary
'PRODUCT_UNIT_OF_MEASURE',
'SPECIAL_PART'
]
VARS = ['CUSTOMER_NUMBER', 'TRANSACTION_DATE_1', 'TRANSACTION_DATE_2',
'TRANSACTION_DATE_3', 'PRODUCT_PRICE', 'GROSS_SALES',
'CUSTOMER_ZIP', 'CUSTOMER_TYPE1', 'CUSTOMER_TYPE2',
'CUSTOMER_MANAGED_LEVEL', # binary
'CUSTOMER_ACCOUNT_TYPE', # binary
'CUSTOMER_FIRST_ORDER_DATE_1', 'CUSTOMER_FIRST_ORDER_DATE_2',
'CUSTOMER_FIRST_ORDER_DATE_3', 'SHIPPING_WEIGHT', 'TOTAL_BOXES_SOLD',
'PRODUCT_COST1',
'ORDER_SOURCE', #binary
'PRICE_METHOD']
def OneHot(df, colnames):
from pandas import get_dummies, concat
for col in colnames:
dummies = get_dummies(df[col])
#ndumcols = dummies.shape[1]
dummies.rename(columns={p: col + '_' + str(i + 1) for i, p in enumerate(dummies.columns.values)}, inplace=True)
df = concat([df, dummies], axis=1)
pass
df = df.drop(colnames, axis=1)
return df
def ParseDates(df, colnames):
from pandas import concat
for col in colnames:
#date, time = df[col].str.split(' ')
ymd = df[col].str.split(' ').str.get(0)
y_m_d = ymd.str.split('-', expand=True).astype(int)
y_m_d.rename(columns={p: col + '_' + str(i + 1) for i, p in enumerate(y_m_d.columns.values)}, inplace=True)
df = concat([df, y_m_d], axis=1)
pass
df = df.drop(colnames, axis=1)
return df
def Fractions(df, symbols, colnames):
from pandas import Series
vec = Series()
for col in colnames:
valcounts = df[col].value_counts(normalize=True)
valcounts = valcounts.reindex(symbols[col], fill_value=0.)
vec = vec.append(valcounts.rename(lambda i: col + '_' + str(i)))
pass
return vec
def AvnetScorer(y_true, y_pred):
from sklearn.metrics import confusion_matrix
#from numpy import digitize
#y_pred = digitize(y_pred, [0.5, 1.5])
cmx = confusion_matrix(y_true, y_pred, labels=[0, 1, 2]).T
from numpy import array, multiply
cost = array([[0.0, 0.20, 0.70], [0.50, 0.0, 0.01], [1.00, 0.01, 0.0]])
score_m = multiply(cmx, cost)
score = 1e6 * (1. - score_m.sum() / cmx.sum())
return score
#from sklearn.base import BaseEstimator, ClassifierMixin
#from xgb_sklearn import XGBClassifier, XGBRegressor
#class XGBClassifier_01(XGBClassifier):
# def __init__(self, max_depth=3, learning_rate=0.1,
# n_estimators=100, silent=True,
# objective="binary:logistic",
# nthread=-1, gamma=0, min_child_weight=1,
# max_delta_step=0, subsample=1, colsample_bytree=1, colsample_bylevel=1,
# reg_alpha=0, reg_lambda=1, scale_pos_weight=1,
# base_score=0.5, seed=0, missing=None, num_pairsample=1, booster_type='gbtree'):
# super(XGBClassifier_01, self).__init__(max_depth, learning_rate,
# n_estimators, silent, objective,
# nthread, gamma, min_child_weight,
# max_delta_step, subsample,
# colsample_bytree, colsample_bylevel,
# reg_alpha, reg_lambda,
# scale_pos_weight, base_score, seed, missing, num_pairsample, booster_type)
# pass
# def fit(self, X, y, sample_weight=None, eval_set=None, eval_metric=None,
# early_stopping_rounds=None, verbose=True):
# from numpy import clip
# new_y = clip(y, 0, 1)
# return super(XGBClassifier_01, self).fit(X, new_y, sample_weight,
# eval_set, eval_metric, early_stopping_rounds, verbose)
#
# pass
#class XGBRegressorQ(XGBRegressor):
# def __init__(self, max_depth=3, learning_rate=0.1,
# n_estimators=100, silent=True,
# objective="binary:logistic",
# nthread=-1, gamma=0, min_child_weight=1,
# max_delta_step=0, subsample=1, colsample_bytree=1, colsample_bylevel=1,
# reg_alpha=0, reg_lambda=1, scale_pos_weight=1,
# base_score=0.5, seed=0, missing=None, num_pairsample=1, booster_type='gbtree'):
# super(XGBRegressorQ, self).__init__(max_depth, learning_rate,
# n_estimators, silent, objective,
# nthread, gamma, min_child_weight,
# max_delta_step, subsample,
# colsample_bytree, colsample_bylevel,
# reg_alpha, reg_lambda,
# scale_pos_weight, base_score, seed, missing, num_pairsample, booster_type)
# pass
#
# def predict(self, data, output_margin=False, ntree_limit=0):
# # pylint: disable=missing-docstring,invalid-name
# result = super(XGBRegressorQ, self).predict(data, output_margin,
# ntree_limit)
# from numpy import digitize
## result = digitize(result, [0.33, 0.67])
# #result = digitize(result, [0.5, 1.5])
# result = digitize(result, [0.6, 1.6])
# return result
# pass
def work(estimator,
nest,
njobs,
nfolds,
cv_grid,
clf_kwargs,
do_hyperopt):
from numpy.random import seed as random_seed
random_seed(1)
from pandas import read_csv
train = read_csv('../../data/example_data.csv')
train = train.drop(TO_DROP, axis=1)
for col in FACTORIZABLE:
from pandas import factorize
train[col] = factorize(train[col])[0]
pass
train = ParseDates(train, ['TRANSACTION_DATE', 'CUSTOMER_FIRST_ORDER_DATE'])
symbols = {}
for col in [
'PRICE_METHOD', 'ORDER_SOURCE', 'CUSTOMER_ACCOUNT_TYPE',
'CUSTOMER_MANAGED_LEVEL', 'CUSTOMER_TYPE2', 'CUSTOMER_TYPE1',
'CUSTOMER_ZIP', 'CUSTOMER_NUMBER'
]:
uniq = set(train[col])
symbols[col] = list(uniq)
pass
grouped = train.groupby(['PRODUCT_NUMBER', 'CUSTOMER_SEGMENT1'])
samples = []
for k, df in grouped:
#print('{' + '"{}", \'{}\''.format(k[0], 'B' if k[1] else 'A') + '},')
sample = Fractions(df, symbols, [
'PRICE_METHOD', 'ORDER_SOURCE', 'CUSTOMER_ACCOUNT_TYPE',
'CUSTOMER_MANAGED_LEVEL', 'CUSTOMER_TYPE2', 'CUSTOMER_TYPE1',
#'CUSTOMER_ZIP', 'CUSTOMER_NUMBER'
])
ATTRIBUTES2 = ['PRODUCT_CLASS_ID1',
'BRAND', # binary
'PRODUCT_SALES_UNIT', # binary
'PRODUCT_UNIT_OF_MEASURE',
'SPECIAL_PART'
]
sample = sample.append(df.iloc[0][ATTRIBUTES2])
########################
boxes_sold = df['TOTAL_BOXES_SOLD']
pcost1 = df['PRODUCT_COST1'].abs()
pcost1_per_item = pcost1 / boxes_sold
pcost1_mean = pcost1_per_item.mean()
pcost1_std = pcost1_per_item.std()
sample.set_value('PCOST1_REL_STD', pcost1_std / pcost1_mean)
sample.set_value('PCOST1_REL_MAX', pcost1_per_item.max() / pcost1_mean)
sample.set_value('PCOST1_REL_MIN', pcost1_per_item.min() / pcost1_mean)
price = df['PRODUCT_PRICE'].abs()
price_mean = price.mean()
price_std = price.std()
sample.set_value('PRICE_REL_STD', price_std / price_mean)
sample.set_value('PRICE_REL_MAX', price.max() / price_mean)
sample.set_value('PRICE_REL_MIN', price.min() / price_mean)
if sample['PRODUCT_UNIT_OF_MEASURE'] < 2:
commision = price / pcost1_per_item
else:
commision = df['GROSS_SALES'].abs() / pcost1
commision_mean = commision.mean()
commision_std = commision.std()
sample.set_value('COMMN_MEAN', commision_mean)
sample.set_value('COMMN_REL_STD', commision_std / commision_mean)
sample.set_value('COMMN_REL_MAX', commision.max() / commision_mean)
sample.set_value('COMMN_REL_MIN', commision.min() / commision_mean)
# tx_days = df['TRANSACTION_DATE_1'].combine(
# df[['TRANSACTION_DATE_2', 'TRANSACTION_DATE_3']],
# func=lambda y, m_d: (y - 1970)* 365 + m_d['TRANSACTION_DATE_2'] * 30 + m_d['TRANSACTION_DATE_3'] - 1)
# sample.set_value('FIRST_TX', tx_days.min())
# august2014 = (2014 - 1970) * 365 + 8 * 30
# sample.set_value('LAST_365D_TX', (tx_days > august2014).sum())
# monthly = df['TRANSACTION_DATE_2'].value_counts(normalize=True)
# monthly = monthly.reindex([i + 1 for i in range(12)], fill_value=0.)
# sample.set_value('TX_Q1', monthly[[1, 2, 3]].sum())
# sample.set_value('TX_Q2', monthly[[4, 5, 6]].sum())
# sample.set_value('TX_Q3', monthly[[7, 8, 9]].sum())
# sample.set_value('TX_Q4', monthly[[10, 11, 12]].sum())
# sample = sample.append(monthly.rename(lambda i: 'TX_M_' + str(i)))
#
# tx_days = df['TRANSACTION_DATE_1'].combine(
# df[['TRANSACTION_DATE_2', 'TRANSACTION_DATE_3']],
# func=lambda y, m_d: y * 365 + m_d['TRANSACTION_DATE_2'] * 30 + m_d['TRANSACTION_DATE_3'])
# tx_days.sort()
# delta_tx_days = tx_days.diff()
# means_delta_tx_days = delta_tx_days.mean()
# sample.set_value('DTX_DAYS_MEAN', means_delta_tx_days)
# sample.set_value('DTX_DAYS_REL_STD', delta_tx_days.std() / means_delta_tx_days)
########################
# #most frequent customer
# custcounts = df['CUSTOMER_NUMBER'].value_counts()
# topcust = custcounts.index[0]
# sample.set_value('TOP_CUST', topcust)
# # most frequent zip
# zipcounts = df['CUSTOMER_ZIP'].value_counts()
# topzip = zipcounts.index[0]
# sample.set_value('TOP_ZIP', topzip)
# # number of unique transactions
# sample.set_value('NTRANS', len(df))
# # number of unique customers
# custcounts = df['CUSTOMER_NUMBER'].value_counts()
# sample.set_value('NCUST', len(custcounts))
#sample = sample.append(df.iloc[0][['SPECIAL_PART']])
samples.append(sample)
pass
from pandas import DataFrame
train_df = DataFrame.from_records(samples)
train_y = train_df['SPECIAL_PART'].values
train_X = train_df.drop(['SPECIAL_PART'], axis=1)
train_keys = [k for k, _ in grouped]
from numpy import digitize
train_y = digitize(train_y, [0.5])
avnet_kwargs = \
{
#'objective': 'reg:logistic',
'objective': 'rank:pairwise',
'learning_rate': 0.045,
'min_child_weight': 50,
'subsample': 1.0,
'colsample_bytree': 1.0,
'max_depth': 7,
'n_estimators': nest,
'nthread': njobs,
'seed': 0,
#'cache_opt': 1,
'missing': float('nan')
#'scoring': NegQWKappaScorer
}
# override kwargs with any changes
for k, v in clf_kwargs.items():
avnet_kwargs[k] = v
pass
# create model instance
from xgb_sklearn import XGBClassifier
if estimator == 'XGBClassifier':
clf = XGBClassifier(**avnet_kwargs)
pass
else:
clf = globals()[estimator](**avnet_kwargs)
pass
from sklearn.metrics import make_scorer
tco_scorer = make_scorer(AvnetScorer)
if do_hyperopt:
def objective(space):
param_grid = {'objective': ['binary:logistic']}
#param_grid = {'objective': ['binary:logitraw']}
#param_grid = {'objective': ['rank:pairwise']}
#param_grid = {'objective': ['rank:pairwise'], 'booster_type': ['gblinear']}
for k, v in space.items():
if k in ['n_estimators', 'max_depth', 'min_child_weight', 'num_pairwise']:
v = int(v)
pass
param_grid[k] = [v]
pass
from sklearn.cross_validation import StratifiedKFold, LeaveOneOut
from sklearn.grid_search import GridSearchCV
from sklearn.cross_validation import _PartitionIterator
class CustomLOO(_PartitionIterator):
def __init__(self, train_keys):
ids = set(t[0] for t in train_keys)
self.n_folds = len(ids)
self.n = len(train_keys)
from numpy import zeros, array
test_folds = zeros(len(train_keys))
for i, k in enumerate(ids):
mask = [t[0] == k for t in train_keys]
test_folds[array(mask)] = i
pass
self.test_folds = test_folds
pass
#def _iter_test_indices(self):
# return range(self.n_folds)
def _iter_test_masks(self):
for i in range(self.n_folds):
yield self.test_folds == i
def __len__(self):
return self.n_folds
pass
grid = GridSearchCV(estimator=clf,
param_grid=param_grid,
#cv=StratifiedKFold(train_y, n_folds=nfolds),
#cv=LeaveOneOut(91),
cv=CustomLOO(train_keys),
scoring=tco_scorer,
n_jobs=1,
#verbose=2,
refit=False)
grid.fit(train_X, train_y)
print('best score: {:.5f} best params: {}'.format(grid.best_score_, grid.best_params_))
return -grid.best_score_
from | |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.10
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3,0,0):
new_instancemethod = lambda func, inst, cls: _IGESControl.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_IGESControl', [dirname(__file__)])
except ImportError:
import _IGESControl
return _IGESControl
if fp is not None:
try:
_mod = imp.load_module('_IGESControl', fp, pathname, description)
finally:
fp.close()
return _mod
_IGESControl = swig_import_helper()
del swig_import_helper
else:
import _IGESControl
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
class SwigPyIterator(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
__swig_destroy__ = _IGESControl.delete_SwigPyIterator
def __iter__(self): return self
SwigPyIterator.value = new_instancemethod(_IGESControl.SwigPyIterator_value,None,SwigPyIterator)
SwigPyIterator.incr = new_instancemethod(_IGESControl.SwigPyIterator_incr,None,SwigPyIterator)
SwigPyIterator.decr = new_instancemethod(_IGESControl.SwigPyIterator_decr,None,SwigPyIterator)
SwigPyIterator.distance = new_instancemethod(_IGESControl.SwigPyIterator_distance,None,SwigPyIterator)
SwigPyIterator.equal = new_instancemethod(_IGESControl.SwigPyIterator_equal,None,SwigPyIterator)
SwigPyIterator.copy = new_instancemethod(_IGESControl.SwigPyIterator_copy,None,SwigPyIterator)
SwigPyIterator.next = new_instancemethod(_IGESControl.SwigPyIterator_next,None,SwigPyIterator)
SwigPyIterator.__next__ = new_instancemethod(_IGESControl.SwigPyIterator___next__,None,SwigPyIterator)
SwigPyIterator.previous = new_instancemethod(_IGESControl.SwigPyIterator_previous,None,SwigPyIterator)
SwigPyIterator.advance = new_instancemethod(_IGESControl.SwigPyIterator_advance,None,SwigPyIterator)
SwigPyIterator.__eq__ = new_instancemethod(_IGESControl.SwigPyIterator___eq__,None,SwigPyIterator)
SwigPyIterator.__ne__ = new_instancemethod(_IGESControl.SwigPyIterator___ne__,None,SwigPyIterator)
SwigPyIterator.__iadd__ = new_instancemethod(_IGESControl.SwigPyIterator___iadd__,None,SwigPyIterator)
SwigPyIterator.__isub__ = new_instancemethod(_IGESControl.SwigPyIterator___isub__,None,SwigPyIterator)
SwigPyIterator.__add__ = new_instancemethod(_IGESControl.SwigPyIterator___add__,None,SwigPyIterator)
SwigPyIterator.__sub__ = new_instancemethod(_IGESControl.SwigPyIterator___sub__,None,SwigPyIterator)
SwigPyIterator_swigregister = _IGESControl.SwigPyIterator_swigregister
SwigPyIterator_swigregister(SwigPyIterator)
import OCC.Standard
import OCC.XSControl
import OCC.IFSelect
import OCC.MMgt
import OCC.TCollection
import OCC.TColStd
import OCC.Interface
import OCC.Message
import OCC.Dico
import OCC.TopoDS
import OCC.TopLoc
import OCC.gp
import OCC.TopAbs
import OCC.TopTools
def register_handle(handle, base_object):
"""
Inserts the handle into the base object to
prevent memory corruption in certain cases
"""
try:
if base_object.IsKind("Standard_Transient"):
base_object.thisHandle = handle
base_object.thisown = False
except:
pass
class IGESControl_ActorWrite(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
:rtype: None
"""
_IGESControl.IGESControl_ActorWrite_swiginit(self,_IGESControl.new_IGESControl_ActorWrite(*args))
def Recognize(self, *args):
"""
* Recognizes a ShapeMapper
:param start:
:type start: Handle_Transfer_Finder &
:rtype: bool
"""
return _IGESControl.IGESControl_ActorWrite_Recognize(self, *args)
def Transfer(self, *args):
"""
* Transfers Shape to IGES Entities ModeTrans may be : 0 -> groups of Faces or 1 -> BRep
:param start:
:type start: Handle_Transfer_Finder &
:param FP:
:type FP: Handle_Transfer_FinderProcess &
:rtype: Handle_Transfer_Binder
"""
return _IGESControl.IGESControl_ActorWrite_Transfer(self, *args)
def GetHandle(self):
try:
return self.thisHandle
except:
self.thisHandle = Handle_IGESControl_ActorWrite(self)
self.thisown = False
return self.thisHandle
__swig_destroy__ = _IGESControl.delete_IGESControl_ActorWrite
IGESControl_ActorWrite.Recognize = new_instancemethod(_IGESControl.IGESControl_ActorWrite_Recognize,None,IGESControl_ActorWrite)
IGESControl_ActorWrite.Transfer = new_instancemethod(_IGESControl.IGESControl_ActorWrite_Transfer,None,IGESControl_ActorWrite)
IGESControl_ActorWrite_swigregister = _IGESControl.IGESControl_ActorWrite_swigregister
IGESControl_ActorWrite_swigregister(IGESControl_ActorWrite)
class Handle_IGESControl_ActorWrite(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_IGESControl.Handle_IGESControl_ActorWrite_swiginit(self,_IGESControl.new_Handle_IGESControl_ActorWrite(*args))
# register the handle in the base object
if len(args) > 0:
register_handle(self, args[0])
DownCast = staticmethod(_IGESControl.Handle_IGESControl_ActorWrite_DownCast)
__swig_destroy__ = _IGESControl.delete_Handle_IGESControl_ActorWrite
Handle_IGESControl_ActorWrite.Nullify = new_instancemethod(_IGESControl.Handle_IGESControl_ActorWrite_Nullify,None,Handle_IGESControl_ActorWrite)
Handle_IGESControl_ActorWrite.IsNull = new_instancemethod(_IGESControl.Handle_IGESControl_ActorWrite_IsNull,None,Handle_IGESControl_ActorWrite)
Handle_IGESControl_ActorWrite.GetObject = new_instancemethod(_IGESControl.Handle_IGESControl_ActorWrite_GetObject,None,Handle_IGESControl_ActorWrite)
Handle_IGESControl_ActorWrite_swigregister = _IGESControl.Handle_IGESControl_ActorWrite_swigregister
Handle_IGESControl_ActorWrite_swigregister(Handle_IGESControl_ActorWrite)
def Handle_IGESControl_ActorWrite_DownCast(*args):
return _IGESControl.Handle_IGESControl_ActorWrite_DownCast(*args)
Handle_IGESControl_ActorWrite_DownCast = _IGESControl.Handle_IGESControl_ActorWrite_DownCast
class IGESControl_AlgoContainer(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Empty constructor
:rtype: None
"""
_IGESControl.IGESControl_AlgoContainer_swiginit(self,_IGESControl.new_IGESControl_AlgoContainer(*args))
def GetHandle(self):
try:
return self.thisHandle
except:
self.thisHandle = Handle_IGESControl_AlgoContainer(self)
self.thisown = False
return self.thisHandle
__swig_destroy__ = _IGESControl.delete_IGESControl_AlgoContainer
IGESControl_AlgoContainer_swigregister = _IGESControl.IGESControl_AlgoContainer_swigregister
IGESControl_AlgoContainer_swigregister(IGESControl_AlgoContainer)
class Handle_IGESControl_AlgoContainer(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_IGESControl.Handle_IGESControl_AlgoContainer_swiginit(self,_IGESControl.new_Handle_IGESControl_AlgoContainer(*args))
# register the handle in the base object
if len(args) > 0:
register_handle(self, args[0])
DownCast = staticmethod(_IGESControl.Handle_IGESControl_AlgoContainer_DownCast)
__swig_destroy__ = _IGESControl.delete_Handle_IGESControl_AlgoContainer
Handle_IGESControl_AlgoContainer.Nullify = new_instancemethod(_IGESControl.Handle_IGESControl_AlgoContainer_Nullify,None,Handle_IGESControl_AlgoContainer)
Handle_IGESControl_AlgoContainer.IsNull = new_instancemethod(_IGESControl.Handle_IGESControl_AlgoContainer_IsNull,None,Handle_IGESControl_AlgoContainer)
Handle_IGESControl_AlgoContainer.GetObject = new_instancemethod(_IGESControl.Handle_IGESControl_AlgoContainer_GetObject,None,Handle_IGESControl_AlgoContainer)
Handle_IGESControl_AlgoContainer_swigregister = _IGESControl.Handle_IGESControl_AlgoContainer_swigregister
Handle_IGESControl_AlgoContainer_swigregister(Handle_IGESControl_AlgoContainer)
def Handle_IGESControl_AlgoContainer_DownCast(*args):
return _IGESControl.Handle_IGESControl_AlgoContainer_DownCast(*args)
Handle_IGESControl_AlgoContainer_DownCast = _IGESControl.Handle_IGESControl_AlgoContainer_DownCast
class IGESControl_Controller(OCC.XSControl.XSControl_Controller):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Initializes the use of IGES Norm (the first time) and returns a Controller for IGES-5.1 If <modefnes> is True, sets it to internal FNES format
:param modefnes: default value is Standard_False
:type modefnes: bool
:rtype: None
"""
_IGESControl.IGESControl_Controller_swiginit(self,_IGESControl.new_IGESControl_Controller(*args))
def Init(*args):
"""
* Standard Initialisation. It creates a Controller for IGES and records it to various names, available to select it later Returns True when done, False if could not be done Also, it creates and records an Adaptor for FNES
:rtype: bool
"""
return _IGESControl.IGESControl_Controller_Init(*args)
Init = staticmethod(Init)
def GetHandle(self):
try:
return self.thisHandle
except:
self.thisHandle = Handle_IGESControl_Controller(self)
self.thisown = False
return self.thisHandle
__swig_destroy__ = _IGESControl.delete_IGESControl_Controller
IGESControl_Controller_swigregister = _IGESControl.IGESControl_Controller_swigregister
IGESControl_Controller_swigregister(IGESControl_Controller)
def IGESControl_Controller_Init(*args):
"""
* Standard Initialisation. It creates a Controller for IGES and records it to various names, available to select it later Returns True when done, False if could not be done Also, it creates and records an Adaptor for FNES
:rtype: bool
"""
return _IGESControl.IGESControl_Controller_Init(*args)
class Handle_IGESControl_Controller(OCC.XSControl.Handle_XSControl_Controller):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_IGESControl.Handle_IGESControl_Controller_swiginit(self,_IGESControl.new_Handle_IGESControl_Controller(*args))
# register the handle in the base object
if len(args) > 0:
register_handle(self, args[0])
DownCast = staticmethod(_IGESControl.Handle_IGESControl_Controller_DownCast)
__swig_destroy__ = _IGESControl.delete_Handle_IGESControl_Controller
Handle_IGESControl_Controller.Nullify = new_instancemethod(_IGESControl.Handle_IGESControl_Controller_Nullify,None,Handle_IGESControl_Controller)
Handle_IGESControl_Controller.IsNull = new_instancemethod(_IGESControl.Handle_IGESControl_Controller_IsNull,None,Handle_IGESControl_Controller)
Handle_IGESControl_Controller.GetObject = new_instancemethod(_IGESControl.Handle_IGESControl_Controller_GetObject,None,Handle_IGESControl_Controller)
Handle_IGESControl_Controller_swigregister = _IGESControl.Handle_IGESControl_Controller_swigregister
Handle_IGESControl_Controller_swigregister(Handle_IGESControl_Controller)
def Handle_IGESControl_Controller_DownCast(*args):
return _IGESControl.Handle_IGESControl_Controller_DownCast(*args)
Handle_IGESControl_Controller_DownCast = _IGESControl.Handle_IGESControl_Controller_DownCast
class IGESControl_IGESBoundary(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Creates an object and calls inherited constuctor.
:rtype: None
* Creates an object and calls inherited constuctor.
:param CS:
:type CS: IGESToBRep_CurveAndSurface &
:rtype: None
"""
_IGESControl.IGESControl_IGESBoundary_swiginit(self,_IGESControl.new_IGESControl_IGESBoundary(*args))
def Check(self, *args):
"""
* Checks result of translation of IGES boundary entities (types 141, 142 or 508). Checks consistency of 2D and 3D representations and keeps only one if they are inconsistent. Checks the closure of resulting wire and if it is not closed, checks 2D and 3D representation and updates the resulting wire to contain only closed representation.
:param result:
:type result: bool
:param checkclosure:
:type checkclosure: bool
:param okCurve3d:
:type okCurve3d: bool
:param okCurve2d:
:type okCurve2d: bool
:rtype: void
"""
return _IGESControl.IGESControl_IGESBoundary_Check(self, *args)
def GetHandle(self):
try:
return self.thisHandle
except:
self.thisHandle = Handle_IGESControl_IGESBoundary(self)
self.thisown = False
return self.thisHandle
__swig_destroy__ = _IGESControl.delete_IGESControl_IGESBoundary
IGESControl_IGESBoundary.Check = new_instancemethod(_IGESControl.IGESControl_IGESBoundary_Check,None,IGESControl_IGESBoundary)
IGESControl_IGESBoundary_swigregister = _IGESControl.IGESControl_IGESBoundary_swigregister
IGESControl_IGESBoundary_swigregister(IGESControl_IGESBoundary)
class Handle_IGESControl_IGESBoundary(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
_IGESControl.Handle_IGESControl_IGESBoundary_swiginit(self,_IGESControl.new_Handle_IGESControl_IGESBoundary(*args))
# register the handle in the base object
if len(args) > 0:
register_handle(self, args[0])
DownCast = staticmethod(_IGESControl.Handle_IGESControl_IGESBoundary_DownCast)
__swig_destroy__ = _IGESControl.delete_Handle_IGESControl_IGESBoundary
Handle_IGESControl_IGESBoundary.Nullify = new_instancemethod(_IGESControl.Handle_IGESControl_IGESBoundary_Nullify,None,Handle_IGESControl_IGESBoundary)
Handle_IGESControl_IGESBoundary.IsNull = new_instancemethod(_IGESControl.Handle_IGESControl_IGESBoundary_IsNull,None,Handle_IGESControl_IGESBoundary)
Handle_IGESControl_IGESBoundary.GetObject = new_instancemethod(_IGESControl.Handle_IGESControl_IGESBoundary_GetObject,None,Handle_IGESControl_IGESBoundary)
Handle_IGESControl_IGESBoundary_swigregister = _IGESControl.Handle_IGESControl_IGESBoundary_swigregister
Handle_IGESControl_IGESBoundary_swigregister(Handle_IGESControl_IGESBoundary)
def Handle_IGESControl_IGESBoundary_DownCast(*args):
return _IGESControl.Handle_IGESControl_IGESBoundary_DownCast(*args)
Handle_IGESControl_IGESBoundary_DownCast = _IGESControl.Handle_IGESControl_IGESBoundary_DownCast
class IGESControl_Reader(OCC.XSControl.XSControl_Reader):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Creates a Reader from scratch
:rtype: None
* Creates a Reader from an already existing Session
:param WS:
:type WS: Handle_XSControl_WorkSession &
:param scratch: default value is Standard_True
:type scratch: bool
:rtype: None
"""
_IGESControl.IGESControl_Reader_swiginit(self,_IGESControl.new_IGESControl_Reader(*args))
def SetReadVisible(self, *args):
"""
* Set the transion of ALL Roots (if theReadOnlyVisible is False) or of Visible Roots (if theReadOnlyVisible is True)
:param ReadRoot:
:type ReadRoot: bool
:rtype: None
"""
return _IGESControl.IGESControl_Reader_SetReadVisible(self, *args)
def GetReadVisible(self, *args):
"""
:rtype: bool
"""
return _IGESControl.IGESControl_Reader_GetReadVisible(self, *args)
def IGESModel(self, *args):
"""
* Returns the model as a IGESModel. It can then be consulted (header, product)
:rtype: Handle_IGESData_IGESModel
"""
return _IGESControl.IGESControl_Reader_IGESModel(self, *args)
def PrintTransferInfo(self, *args):
"""
* Prints Statistics and check list for Transfer
:param failwarn:
:type failwarn: IFSelect_PrintFail
:param mode:
:type mode: IFSelect_PrintCount
:rtype: None
"""
return _IGESControl.IGESControl_Reader_PrintTransferInfo(self, *args)
__swig_destroy__ = _IGESControl.delete_IGESControl_Reader
IGESControl_Reader.SetReadVisible = new_instancemethod(_IGESControl.IGESControl_Reader_SetReadVisible,None,IGESControl_Reader)
IGESControl_Reader.GetReadVisible = new_instancemethod(_IGESControl.IGESControl_Reader_GetReadVisible,None,IGESControl_Reader)
IGESControl_Reader.IGESModel = new_instancemethod(_IGESControl.IGESControl_Reader_IGESModel,None,IGESControl_Reader)
IGESControl_Reader.PrintTransferInfo = new_instancemethod(_IGESControl.IGESControl_Reader_PrintTransferInfo,None,IGESControl_Reader)
IGESControl_Reader_swigregister = _IGESControl.IGESControl_Reader_swigregister
IGESControl_Reader_swigregister(IGESControl_Reader)
class IGESControl_ToolContainer(object):
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
__repr__ = _swig_repr
def __init__(self, *args):
"""
* Empty constructor
:rtype: None
"""
_IGESControl.IGESControl_ToolContainer_swiginit(self,_IGESControl.new_IGESControl_ToolContainer(*args))
def IGESBoundary(self, *args):
"""
* Returns IGESControl_IGESBoundary
:rtype: Handle_IGESToBRep_IGESBoundary
"""
return _IGESControl.IGESControl_ToolContainer_IGESBoundary(self, *args)
def GetHandle(self):
try:
return self.thisHandle
except:
self.thisHandle = Handle_IGESControl_ToolContainer(self)
self.thisown = False
return self.thisHandle
__swig_destroy__ = _IGESControl.delete_IGESControl_ToolContainer
IGESControl_ToolContainer.IGESBoundary = new_instancemethod(_IGESControl.IGESControl_ToolContainer_IGESBoundary,None,IGESControl_ToolContainer)
IGESControl_ToolContainer_swigregister = _IGESControl.IGESControl_ToolContainer_swigregister
IGESControl_ToolContainer_swigregister(IGESControl_ToolContainer)
class Handle_IGESControl_ToolContainer(object):
| |
if i is None:
return self.getTypedRuleContexts(StlParser.ExpressionContext)
else:
return self.getTypedRuleContext(StlParser.ExpressionContext,i)
def UntilOperator(self):
return self.getToken(StlParser.UntilOperator, 0)
def interval(self):
return self.getTypedRuleContext(StlParser.IntervalContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitExprUntil"):
return visitor.visitExprUntil(self)
else:
return visitor.visitChildren(self)
class ExprNotContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprNotContext, self).__init__(parser)
self.copyFrom(ctx)
def NotOperator(self):
return self.getToken(StlParser.NotOperator, 0)
def expression(self):
return self.getTypedRuleContext(StlParser.ExpressionContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitExprNot"):
return visitor.visitExprNot(self)
else:
return visitor.visitChildren(self)
class ExprNextContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprNextContext, self).__init__(parser)
self.copyFrom(ctx)
def NextOperator(self):
return self.getToken(StlParser.NextOperator, 0)
def expression(self):
return self.getTypedRuleContext(StlParser.ExpressionContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitExprNext"):
return visitor.visitExprNext(self)
else:
return visitor.visitChildren(self)
class ExprAndContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprAndContext, self).__init__(parser)
self.copyFrom(ctx)
def expression(self, i=None):
if i is None:
return self.getTypedRuleContexts(StlParser.ExpressionContext)
else:
return self.getTypedRuleContext(StlParser.ExpressionContext,i)
def AndOperator(self):
return self.getToken(StlParser.AndOperator, 0)
def accept(self, visitor):
if hasattr(visitor, "visitExprAnd"):
return visitor.visitExprAnd(self)
else:
return visitor.visitChildren(self)
class ExprUnlessContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprUnlessContext, self).__init__(parser)
self.copyFrom(ctx)
def expression(self, i=None):
if i is None:
return self.getTypedRuleContexts(StlParser.ExpressionContext)
else:
return self.getTypedRuleContext(StlParser.ExpressionContext,i)
def UnlessOperator(self):
return self.getToken(StlParser.UnlessOperator, 0)
def interval(self):
return self.getTypedRuleContext(StlParser.IntervalContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitExprUnless"):
return visitor.visitExprUnless(self)
else:
return visitor.visitChildren(self)
class ExprPreviousContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprPreviousContext, self).__init__(parser)
self.copyFrom(ctx)
def PreviousOperator(self):
return self.getToken(StlParser.PreviousOperator, 0)
def expression(self):
return self.getTypedRuleContext(StlParser.ExpressionContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitExprPrevious"):
return visitor.visitExprPrevious(self)
else:
return visitor.visitChildren(self)
class ExprHistContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprHistContext, self).__init__(parser)
self.copyFrom(ctx)
def HistoricallyOperator(self):
return self.getToken(StlParser.HistoricallyOperator, 0)
def expression(self):
return self.getTypedRuleContext(StlParser.ExpressionContext,0)
def interval(self):
return self.getTypedRuleContext(StlParser.IntervalContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitExprHist"):
return visitor.visitExprHist(self)
else:
return visitor.visitChildren(self)
class ExprFallContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprFallContext, self).__init__(parser)
self.copyFrom(ctx)
def FallOperator(self):
return self.getToken(StlParser.FallOperator, 0)
def LPAREN(self):
return self.getToken(StlParser.LPAREN, 0)
def expression(self):
return self.getTypedRuleContext(StlParser.ExpressionContext,0)
def RPAREN(self):
return self.getToken(StlParser.RPAREN, 0)
def accept(self, visitor):
if hasattr(visitor, "visitExprFall"):
return visitor.visitExprFall(self)
else:
return visitor.visitChildren(self)
class ExprPredicateContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprPredicateContext, self).__init__(parser)
self.copyFrom(ctx)
def expression(self, i=None):
if i is None:
return self.getTypedRuleContexts(StlParser.ExpressionContext)
else:
return self.getTypedRuleContext(StlParser.ExpressionContext,i)
def comparisonOp(self):
return self.getTypedRuleContext(StlParser.ComparisonOpContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitExprPredicate"):
return visitor.visitExprPredicate(self)
else:
return visitor.visitChildren(self)
class ExprXorContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprXorContext, self).__init__(parser)
self.copyFrom(ctx)
def expression(self, i=None):
if i is None:
return self.getTypedRuleContexts(StlParser.ExpressionContext)
else:
return self.getTypedRuleContext(StlParser.ExpressionContext,i)
def XorOperator(self):
return self.getToken(StlParser.XorOperator, 0)
def accept(self, visitor):
if hasattr(visitor, "visitExprXor"):
return visitor.visitExprXor(self)
else:
return visitor.visitChildren(self)
class ExprRiseContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprRiseContext, self).__init__(parser)
self.copyFrom(ctx)
def RiseOperator(self):
return self.getToken(StlParser.RiseOperator, 0)
def LPAREN(self):
return self.getToken(StlParser.LPAREN, 0)
def expression(self):
return self.getTypedRuleContext(StlParser.ExpressionContext,0)
def RPAREN(self):
return self.getToken(StlParser.RPAREN, 0)
def accept(self, visitor):
if hasattr(visitor, "visitExprRise"):
return visitor.visitExprRise(self)
else:
return visitor.visitChildren(self)
class ExprOrContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprOrContext, self).__init__(parser)
self.copyFrom(ctx)
def expression(self, i=None):
if i is None:
return self.getTypedRuleContexts(StlParser.ExpressionContext)
else:
return self.getTypedRuleContext(StlParser.ExpressionContext,i)
def OrOperator(self):
return self.getToken(StlParser.OrOperator, 0)
def accept(self, visitor):
if hasattr(visitor, "visitExprOr"):
return visitor.visitExprOr(self)
else:
return visitor.visitChildren(self)
class ExprAlwaysContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprAlwaysContext, self).__init__(parser)
self.copyFrom(ctx)
def AlwaysOperator(self):
return self.getToken(StlParser.AlwaysOperator, 0)
def expression(self):
return self.getTypedRuleContext(StlParser.ExpressionContext,0)
def interval(self):
return self.getTypedRuleContext(StlParser.IntervalContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitExprAlways"):
return visitor.visitExprAlways(self)
else:
return visitor.visitChildren(self)
class ExprRealContext(ExpressionContext):
def __init__(self, parser, ctx): # actually a StlParser.ExpressionContext)
super(StlParser.ExprRealContext, self).__init__(parser)
self.copyFrom(ctx)
def real_expression(self):
return self.getTypedRuleContext(StlParser.Real_expressionContext,0)
def accept(self, visitor):
if hasattr(visitor, "visitExprReal"):
return visitor.visitExprReal(self)
else:
return visitor.visitChildren(self)
def expression(self, _p=0):
_parentctx = self._ctx
_parentState = self.state
localctx = StlParser.ExpressionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 6
self.enterRecursionRule(localctx, 6, self.RULE_expression, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 102
token = self._input.LA(1)
if token in [StlParser.NotOperator]:
localctx = StlParser.ExprNotContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 61
self.match(StlParser.NotOperator)
self.state = 62
self.expression(17)
elif token in [StlParser.AlwaysOperator]:
localctx = StlParser.ExprAlwaysContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 63
self.match(StlParser.AlwaysOperator)
self.state = 65
_la = self._input.LA(1)
if _la==StlParser.LBRACK:
self.state = 64
self.interval()
self.state = 67
self.expression(11)
elif token in [StlParser.EventuallyOperator]:
localctx = StlParser.ExprEvContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 68
self.match(StlParser.EventuallyOperator)
self.state = 70
_la = self._input.LA(1)
if _la==StlParser.LBRACK:
self.state = 69
self.interval()
self.state = 72
self.expression(10)
elif token in [StlParser.HistoricallyOperator]:
localctx = StlParser.ExprHistContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 73
self.match(StlParser.HistoricallyOperator)
self.state = 75
_la = self._input.LA(1)
if _la==StlParser.LBRACK:
self.state = 74
self.interval()
self.state = 77
self.expression(7)
elif token in [StlParser.OnceOperator]:
localctx = StlParser.ExpreOnceContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 78
self.match(StlParser.OnceOperator)
self.state = 80
_la = self._input.LA(1)
if _la==StlParser.LBRACK:
self.state = 79
self.interval()
self.state = 82
self.expression(6)
elif token in [StlParser.PreviousOperator]:
localctx = StlParser.ExprPreviousContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 83
self.match(StlParser.PreviousOperator)
self.state = 84
self.expression(2)
elif token in [StlParser.NextOperator]:
localctx = StlParser.ExprNextContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 85
self.match(StlParser.NextOperator)
self.state = 86
self.expression(1)
elif token in [StlParser.MINUS, StlParser.ABS, StlParser.IntegerLiteral, StlParser.RealLiteral, StlParser.Identifier]:
localctx = StlParser.ExprRealContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 87
self.real_expression(0)
elif token in [StlParser.LPAREN]:
localctx = StlParser.ExprParenContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 88
self.match(StlParser.LPAREN)
self.state = 89
self.expression(0)
self.state = 90
self.match(StlParser.RPAREN)
elif token in [StlParser.RiseOperator]:
localctx = StlParser.ExprRiseContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 92
self.match(StlParser.RiseOperator)
self.state = 93
self.match(StlParser.LPAREN)
self.state = 94
self.expression(0)
self.state = 95
self.match(StlParser.RPAREN)
elif token in [StlParser.FallOperator]:
localctx = StlParser.ExprFallContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 97
self.match(StlParser.FallOperator)
self.state = 98
self.match(StlParser.LPAREN)
self.state = 99
self.expression(0)
self.state = 100
self.match(StlParser.RPAREN)
else:
raise NoViableAltException(self)
self._ctx.stop = self._input.LT(-1)
self.state = 143
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,12,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 141
la_ = self._interp.adaptivePredict(self._input,11,self._ctx)
if la_ == 1:
localctx = StlParser.ExprPredicateContext(self, StlParser.ExpressionContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 104
if not self.precpred(self._ctx, 19):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 19)")
self.state = 105
self.comparisonOp()
self.state = 106
self.expression(20)
pass
elif la_ == 2:
localctx = StlParser.ExprOrContext(self, StlParser.ExpressionContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 108
if not self.precpred(self._ctx, 16):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 16)")
self.state = 109
self.match(StlParser.OrOperator)
self.state = 110
self.expression(17)
pass
elif la_ == 3:
localctx = StlParser.ExprAndContext(self, StlParser.ExpressionContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 111
if not self.precpred(self._ctx, 15):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 15)")
self.state = 112
self.match(StlParser.AndOperator)
self.state = 113
self.expression(16)
pass
elif la_ == 4:
localctx = StlParser.ExprImpliesContext(self, StlParser.ExpressionContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 114
if not self.precpred(self._ctx, 14):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 14)")
self.state = 115
self.match(StlParser.ImpliesOperator)
self.state = 116
self.expression(15)
pass
elif la_ == 5:
localctx = StlParser.ExprIffContext(self, StlParser.ExpressionContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 117
if not self.precpred(self._ctx, 13):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 13)")
self.state = 118
self.match(StlParser.IffOperator)
self.state = 119
self.expression(14)
pass
elif la_ == 6:
localctx = StlParser.ExprXorContext(self, StlParser.ExpressionContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 120
if not self.precpred(self._ctx, 12):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 12)")
self.state = 121
self.match(StlParser.XorOperator)
self.state = 122
self.expression(13)
pass
elif la_ == 7:
localctx = StlParser.ExprUntilContext(self, StlParser.ExpressionContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 123
if not self.precpred(self._ctx, 9):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 9)")
self.state = 124
self.match(StlParser.UntilOperator)
self.state = 126
_la = self._input.LA(1)
if _la==StlParser.LBRACK:
self.state = 125
self.interval()
self.state = 128
self.expression(10)
pass
elif la_ == 8:
localctx = StlParser.ExprUnlessContext(self, StlParser.ExpressionContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 129
if not self.precpred(self._ctx, 8):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 8)")
self.state = 130
self.match(StlParser.UnlessOperator)
self.state = 132
_la = self._input.LA(1)
if _la==StlParser.LBRACK:
self.state = 131
self.interval()
self.state = 134
self.expression(9)
pass
elif la_ == 9:
localctx = StlParser.ExprSinceContext(self, StlParser.ExpressionContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expression)
self.state = 135
if not self.precpred(self._ctx, 5):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 5)")
self.state = 136
self.match(StlParser.SinceOperator)
self.state = 138
_la = self._input.LA(1)
if _la==StlParser.LBRACK:
self.state = 137
self.interval()
self.state = 140
self.expression(6)
pass
self.state = 145
| |
conn.lower():
return True
return False
if self.msg.getheader('keep-alive'):
return False
if conn and 'keep-alive' in conn.lower():
return False
pconn = self.msg.getheader('proxy-connection')
if pconn and 'keep-alive' in pconn.lower():
return False
return True
def close(self):
if self.fp:
self.fp.close()
self.fp = None
return
def isclosed(self):
return self.fp is None
def read(self, amt=None):
if self.fp is None:
return ''
else:
if self._method == 'HEAD':
self.close()
return ''
if self.chunked:
return self._read_chunked(amt)
if amt is None:
if self.length is None:
s = self.fp.read()
else:
s = self._safe_read(self.length)
self.length = 0
self.close()
return s
if self.length is not None:
if amt > self.length:
amt = self.length
s = self.fp.read(amt)
if self.length is not None:
self.length -= len(s)
if not self.length:
self.close()
return s
def _read_chunked(self, amt):
chunk_left = self.chunk_left
value = []
while True:
if chunk_left is None:
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong('chunk size')
i = line.find(';')
if i >= 0:
line = line[:i]
try:
chunk_left = int(line, 16)
except ValueError:
self.close()
raise IncompleteRead(''.join(value))
if chunk_left == 0:
break
if amt is None:
value.append(self._safe_read(chunk_left))
else:
if amt < chunk_left:
value.append(self._safe_read(amt))
self.chunk_left = chunk_left - amt
return ''.join(value)
if amt == chunk_left:
value.append(self._safe_read(amt))
self._safe_read(2)
self.chunk_left = None
return ''.join(value)
value.append(self._safe_read(chunk_left))
amt -= chunk_left
self._safe_read(2)
chunk_left = None
while True:
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong('trailer line')
if not line:
break
if line == '\r\n':
break
self.close()
return ''.join(value)
def _safe_read(self, amt):
"""Read the number of bytes requested, compensating for partial reads.
Normally, we have a blocking socket, but a read() can be interrupted
by a signal (resulting in a partial read).
Note that we cannot distinguish between EOF and an interrupt when zero
bytes have been read. IncompleteRead() will be raised in this
situation.
This function should be used when <amt> bytes "should" be present for
reading. If the bytes are truly not available (due to EOF), then the
IncompleteRead exception can be used to detect the problem.
"""
s = []
while amt > 0:
chunk = self.fp.read(min(amt, MAXAMOUNT))
if not chunk:
raise IncompleteRead(''.join(s), amt)
s.append(chunk)
amt -= len(chunk)
return ''.join(s)
def fileno(self):
return self.fp.fileno()
def getheader(self, name, default=None):
if self.msg is None:
raise ResponseNotReady()
return self.msg.getheader(name, default)
def getheaders(self):
"""Return list of (header, value) tuples."""
if self.msg is None:
raise ResponseNotReady()
return self.msg.items()
class HTTPConnection():
_http_vsn = 11
_http_vsn_str = 'HTTP/1.1'
response_class = HTTPResponse
default_port = HTTP_PORT
auto_open = 1
debuglevel = 0
strict = 0
def __init__(self, host, port=None, strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
self.timeout = timeout
self.source_address = source_address
self.sock = None
self._buffer = []
self.__response = None
self.__state = _CS_IDLE
self._method = None
self._tunnel_host = None
self._tunnel_port = None
self._tunnel_headers = {}
self._set_hostport(host, port)
if strict is not None:
self.strict = strict
return
def set_tunnel(self, host, port=None, headers=None):
""" Sets up the host and the port for the HTTP CONNECT Tunnelling.
The headers argument should be a mapping of extra HTTP headers
to send with the CONNECT request.
"""
self._tunnel_host = host
self._tunnel_port = port
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
def _set_hostport(self, host, port):
if port is None:
i = host.rfind(':')
j = host.rfind(']')
if i > j:
try:
port = int(host[i + 1:])
except ValueError:
raise InvalidURL("nonnumeric port: '%s'" % host[i + 1:])
host = host[:i]
else:
port = self.default_port
if host and host[0] == '[' and host[-1] == ']':
host = host[1:-1]
self.host = host
self.port = port
return
def set_debuglevel(self, level):
self.debuglevel = level
def _tunnel(self):
self._set_hostport(self._tunnel_host, self._tunnel_port)
self.send('CONNECT %s:%d HTTP/1.0\r\n' % (self.host, self.port))
for header, value in self._tunnel_headers.iteritems():
self.send('%s: %s\r\n' % (header, value))
self.send('\r\n')
response = self.response_class(self.sock, strict=self.strict, method=self._method)
version, code, message = response._read_status()
if code != 200:
self.close()
raise socket.error('Tunnel connection failed: %d %s' % (code,
message.strip()))
while True:
line = response.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong('header line')
if line == '\r\n':
break
def connect(self):
"""Connect to the host and port specified in __init__."""
self.sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address)
if self._tunnel_host:
self._tunnel()
def close(self):
"""Close the connection to the HTTP server."""
if self.sock:
self.sock.close()
self.sock = None
if self.__response:
self.__response.close()
self.__response = None
self.__state = _CS_IDLE
return
def send(self, data):
"""Send `data' to the server."""
if self.sock is None:
if self.auto_open:
self.connect()
else:
raise NotConnected()
if self.debuglevel > 0:
print 'send:', repr(data)
blocksize = 8192
if hasattr(data, 'read') and not isinstance(data, array):
if self.debuglevel > 0:
print 'sendIng a read()able'
datablock = data.read(blocksize)
while datablock:
self.sock.sendall(datablock)
datablock = data.read(blocksize)
else:
self.sock.sendall(data)
return
def _output(self, s):
r"""Add a line of output to the current request buffer.
Assumes that the line does *not* end with \r\n.
"""
self._buffer.append(s)
def _send_output(self, message_body=None):
r"""Send the currently buffered request and clear the buffer.
Appends an extra \r\n to the buffer.
A message_body may be specified, to be appended to the request.
"""
self._buffer.extend(('', ''))
msg = '\r\n'.join(self._buffer)
del self._buffer[:]
if isinstance(message_body, str):
msg += message_body
message_body = None
self.send(msg)
if message_body is not None:
self.send(message_body)
return
def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0):
"""Send a request to the server.
`method' specifies an HTTP request method, e.g. 'GET'.
`url' specifies the object being requested, e.g. '/index.html'.
`skip_host' if True does not add automatically a 'Host:' header
`skip_accept_encoding' if True does not add automatically an
'Accept-Encoding:' header
"""
if self.__response and self.__response.isclosed():
self.__response = None
if self.__state == _CS_IDLE:
self.__state = _CS_REQ_STARTED
else:
raise CannotSendRequest()
self._method = method
if not url:
url = '/'
hdr = '%s %s %s' % (method, url, self._http_vsn_str)
self._output(hdr)
if self._http_vsn == 11:
if not skip_host:
netloc = ''
if url.startswith('http'):
nil, netloc, nil, nil, nil = urlsplit(url)
if netloc:
try:
netloc_enc = netloc.encode('ascii')
except UnicodeEncodeError:
netloc_enc = netloc.encode('idna')
self.putheader('Host', netloc_enc)
else:
try:
host_enc = self.host.encode('ascii')
except UnicodeEncodeError:
host_enc = self.host.encode('idna')
if host_enc.find(':') >= 0:
host_enc = '[' + host_enc + ']'
if self.port == self.default_port:
self.putheader('Host', host_enc)
else:
self.putheader('Host', '%s:%s' % (host_enc, self.port))
if not skip_accept_encoding:
self.putheader('Accept-Encoding', 'identity')
return
def putheader(self, header, *values):
"""Send a request header line to the server.
For example: h.putheader('Accept', 'text/html')
"""
if self.__state != _CS_REQ_STARTED:
raise CannotSendHeader()
hdr = '%s: %s' % (header, '\r\n\t'.join([ str(v) for v in values ]))
self._output(hdr)
def endheaders(self, message_body=None):
"""Indicate that the last header line has been sent to the server.
This method sends the request to the server. The optional
message_body argument can be used to pass message body
associated with the request. The message body will be sent in
the same packet as the message headers if possible. The
message_body should be a string.
"""
if self.__state == _CS_REQ_STARTED:
self.__state = _CS_REQ_SENT
else:
raise CannotSendHeader()
self._send_output(message_body)
def request(self, method, url, body=None, headers={}):
"""Send a complete request to the server."""
self._send_request(method, url, body, headers)
def _set_content_length(self, body):
thelen = None
try:
thelen = str(len(body))
except TypeError as te:
try:
thelen = str(os.fstat(body.fileno()).st_size)
except (AttributeError, OSError):
if self.debuglevel > 0:
print 'Cannot stat!!'
if thelen is not None:
self.putheader('Content-Length', thelen)
return
def _send_request(self, method, url, body, headers):
header_names = dict.fromkeys([ k.lower() for k in headers ])
skips = {}
if 'host' in header_names:
skips['skip_host'] = 1
if 'accept-encoding' in header_names:
skips['skip_accept_encoding'] = 1
self.putrequest(method, url, **skips)
if body and 'content-length' not in header_names:
self._set_content_length(body)
for hdr, value in headers.iteritems():
self.putheader(hdr, value)
self.endheaders(body)
def getresponse(self, buffering=False):
"""Get the response from the server."""
if self.__response and self.__response.isclosed():
self.__response = None
if self.__state != _CS_REQ_SENT or self.__response:
raise ResponseNotReady()
args = (self.sock,)
kwds = {'strict': self.strict,'method': self._method}
if self.debuglevel > 0:
args += (self.debuglevel,)
if buffering:
kwds['buffering'] = True
response = self.response_class(*args, **kwds)
response.begin()
self.__state = _CS_IDLE
if response.will_close:
self.close()
else:
self.__response = response
return response
class HTTP():
"""Compatibility class with httplib.py from 1.5."""
_http_vsn = 10
_http_vsn_str = 'HTTP/1.0'
debuglevel = 0
_connection_class = HTTPConnection
def __init__(self, host='', port=None, strict=None):
"""Provide a default host, since the superclass requires one."""
if | |
retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage_resources.Bucket:
r"""Creates a new bucket.
Args:
request (Union[google.storage_v1.types.InsertBucketRequest, dict]):
The request object. Request message for InsertBucket.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.storage_v1.types.Bucket:
A bucket.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a storage.InsertBucketRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, storage.InsertBucketRequest):
request = storage.InsertBucketRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.insert_bucket]
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def list_channels(self,
request: Union[storage.ListChannelsRequest, dict] = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage_resources.ListChannelsResponse:
r"""List active object change notification channels for
this bucket.
Args:
request (Union[google.storage_v1.types.ListChannelsRequest, dict]):
The request object. Request message for ListChannels.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.storage_v1.types.ListChannelsResponse:
The result of a call to
Channels.ListChannels
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a storage.ListChannelsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, storage.ListChannelsRequest):
request = storage.ListChannelsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_channels]
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def list_buckets(self,
request: Union[storage.ListBucketsRequest, dict] = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListBucketsPager:
r"""Retrieves a list of buckets for a given project.
Args:
request (Union[google.storage_v1.types.ListBucketsRequest, dict]):
The request object. Request message for ListBuckets.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.storage_v1.services.storage.pagers.ListBucketsPager:
The result of a call to
Buckets.ListBuckets
Iterating over this object will yield
results and resolve additional pages
automatically.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a storage.ListBucketsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, storage.ListBucketsRequest):
request = storage.ListBucketsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_buckets]
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListBucketsPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
def lock_bucket_retention_policy(self,
request: Union[storage.LockRetentionPolicyRequest, dict] = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> storage_resources.Bucket:
r"""Locks retention policy on a bucket.
Args:
request (Union[google.storage_v1.types.LockRetentionPolicyRequest, dict]):
The request object. Request message for
LockRetentionPolicy.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.storage_v1.types.Bucket:
A bucket.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a storage.LockRetentionPolicyRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, storage.LockRetentionPolicyRequest):
request = storage.LockRetentionPolicyRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.lock_bucket_retention_policy]
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def get_bucket_iam_policy(self,
request: Union[storage.GetIamPolicyRequest, dict] = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> policy_pb2.Policy:
r"""Gets the IAM policy for the specified bucket.
Args:
request (Union[google.storage_v1.types.GetIamPolicyRequest, dict]):
The request object. A wrapper around the IAM get policy
request to support our common_request_params.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.v1.policy_pb2.Policy:
Defines an Identity and Access Management (IAM) policy. It is used to
specify access control policies for Cloud Platform
resources.
A Policy is a collection of bindings. A binding binds
one or more members to a single role. Members can be
user accounts, service accounts, Google groups, and
domains (such as G Suite). A role is a named list of
permissions (defined by IAM or configured by users).
A binding can optionally specify a condition, which
is a logic expression that further constrains the
role binding based on attributes about the request
and/or target resource.
**JSON Example**
{
"bindings": [
{
"role":
"roles/resourcemanager.organizationAdmin",
"members": [ "user:<EMAIL>",
"group:<EMAIL>",
"domain:google.com",
"serviceAccount:<EMAIL>"
]
}, { "role":
"roles/resourcemanager.organizationViewer",
"members": ["user:<EMAIL>"],
"condition": { "title": "expirable access",
"description": "Does not grant access after
Sep 2020", "expression": "request.time <
timestamp('2020-10-01T00:00:00.000Z')", } }
]
}
**YAML Example**
bindings: - members: - user:\ <EMAIL> -
group:\ <EMAIL> - domain:google.com -
serviceAccount:\ <EMAIL>
role: roles/resourcemanager.organizationAdmin -
members: - user:\ <EMAIL> role:
roles/resourcemanager.organizationViewer
condition: title: expirable access description:
Does not grant access after Sep 2020 expression:
request.time <
timestamp('2020-10-01T00:00:00.000Z')
For a description of IAM and its features, see the
[IAM developer's
guide](\ https://cloud.google.com/iam/docs).
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a storage.GetIamPolicyRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, storage.GetIamPolicyRequest):
request = storage.GetIamPolicyRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_bucket_iam_policy]
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def set_bucket_iam_policy(self,
request: Union[storage.SetIamPolicyRequest, dict] = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> policy_pb2.Policy:
r"""Updates an IAM policy for the specified bucket.
Args:
request (Union[google.storage_v1.types.SetIamPolicyRequest, dict]):
The request object. A wrapper around the IAM set policy
request to support our common_request_params.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.iam.v1.policy_pb2.Policy:
Defines an Identity and Access Management (IAM) policy. It is used to
specify access control policies for Cloud Platform
resources.
A Policy is a collection of bindings. A binding binds
one or more members to a single role. Members can be
user accounts, service accounts, Google groups, and
domains (such as G Suite). A role is a named list of
permissions (defined by IAM or configured by users).
A binding can optionally specify a condition, which
is a logic expression that further constrains the
role binding based on attributes about the request
and/or target resource.
**JSON Example**
{
"bindings": [
{
"role":
"roles/resourcemanager.organizationAdmin",
"members": [ "user:<EMAIL>",
"group:<EMAIL>",
"domain:google.com",
"serviceAccount:<EMAIL>"
]
}, { "role":
"roles/resourcemanager.organizationViewer",
"members": ["user:<EMAIL>"],
"condition": { "title": "expirable access",
"description": "Does not grant access after
| |
<filename>pyjs/src/pyjs/browser.py
# Copyright (C) 2009, 2010, <NAME> <<EMAIL>>
# Copyright (C) 2010, <NAME> <<EMAIL>>
import os
import sys
import time
import shutil
from pyjs import linker
from pyjs import translator
if translator.name == 'proto':
required_modules = [
'pyjslib', 'sys', 'imp', 'dynamic', 'pyjamas', 'pyjamas.DOM',
]
early_static_app_libs = ['_pyjs.js']
elif translator.name == 'dict':
required_modules = [
'__builtin__', 'sys', 'imp', 'dynamic', 'pyjamas', 'pyjamas.DOM',
]
early_static_app_libs = []
else:
raise ValueError("unknown translator engine '%s'" % translator.name)
from pyjs import util
from cStringIO import StringIO
from optparse import OptionParser
import pyjs
import re
import traceback
try:
from hashlib import md5
except:
from md5 import md5
AVAILABLE_PLATFORMS = ('IE6', 'Opera', 'OldMoz', 'Safari', 'Mozilla')
if pyjs.pyjspth is None:
BOILERPLATE_PATH = os.path.join(os.path.dirname(__file__), 'boilerplate')
else:
BOILERPLATE_PATH = os.path.join(pyjs.pyjspth, 'pyjs', 'src','pyjs', 'boilerplate')
APP_HTML_TEMPLATE = """\
<html>
<!-- auto-generated html - You should consider editing and adapting this
to suit your requirements. No doctype used here to force quirks mode; see
wiki for details: http://pyjs.org/wiki/csshellandhowtodealwithit/
-->
<head>
<meta name="pygwt:module" content="%(modulename)s">
%(css)s
<title>%(title)s</title>
</head>
<body style="background-color:white">
<script type="text/javascript" src="%(bootstrap_file)s"></script>
<iframe id="__pygwt_historyFrame" style="width:0;height:0;border:0"></iframe>
</body>
</html>
"""
class BrowserLinker(linker.BaseLinker):
# parents are specified in most-specific last
platform_parents = {
'mozilla':['browser'],
'ie6':['browser'],
'safari':['browser'],
'oldmoz':['browser'],
'opera':['browser'],
}
def __init__(self, *args, **kwargs):
self.multi_file = kwargs.pop('multi_file', False)
self.cache_buster = kwargs.pop('cache_buster', False)
self.bootstrap_file = kwargs.pop('bootstrap_file', 'bootstrap.js')
self.public_folder = kwargs.pop('public_folder', 'public')
self.runtime_options = kwargs.pop('runtime_options', [])
super(BrowserLinker, self).__init__(*args, **kwargs)
def visit_start(self):
super(BrowserLinker, self).visit_start()
self.boilerplate_path = None
self.early_static_app_libs += early_static_app_libs
self.merged_public = set()
self.app_files = {}
self.renamed_libs = {}
def visit_end_platform(self, platform):
if not platform:
return
if self.cache_buster:
# rename the files to their hashed equivalents
renamed = []
for p in self.done[platform]:
if p in self.renamed_libs:
new_p = self.renamed_libs[p]
else:
f = open(p)
md5sum = md5(f.read()).hexdigest()
f.close()
name, ext = os.path.splitext(p)
new_p = name + '.' + md5sum + ext
# if we are keeping all intermediate files
if self.keep_lib_files:
# copy the file to it's hashed equivalent
shutil.copyfile(p, new_p)
else: # keep new file only
# clean out any previous version of the hashed file
if os.access(new_p, os.F_OK):
os.unlink(new_p)
os.rename(p, new_p)
self.renamed_libs[p] = new_p
renamed.append(new_p)
self.done[platform] = renamed
self.app_files[platform] = self._generate_app_file(platform)
def visit_end(self):
html_output_filename = os.path.join(self.output, self.top_module + '.html')
if not os.path.exists(html_output_filename):
# autogenerate
self._create_app_html(html_output_filename)
self._create_nocache_html()
if not self.keep_lib_files:
for fname in self.remove_files:
if fname.find(self.output) == 0:
os.unlink(fname)
def merge_resources(self, dir_name):
if not dir_name in self.merged_public:
public_folder = os.path.join(dir_name, self.public_folder)
if os.path.exists(public_folder) and os.path.isdir(public_folder):
util.copytree_exists(public_folder,
self.output)
self.merged_public.add(dir_name)
for libs in [self.js_libs, self.dynamic_js_libs,
self.static_js_libs, self.early_static_js_libs, self.late_static_js_libs]:
for lib in libs:
if not lib in self.merged_public:
for path in self.path:
if os.path.exists(lib) and os.path.isfile(lib):
util.copy_exists(lib, os.path.join(self.output, os.path.basename(lib)))
self.merged_public.add(lib)
break
def find_boilerplate(self, name):
if not self.top_module_path:
raise RuntimeError('Top module not found %r' % self.top_module)
if not self.boilerplate_path:
self.boilerplate_path = [BOILERPLATE_PATH]
module_bp_path = os.path.join(
os.path.dirname(self.top_module_path), 'boilerplate')
if os.path.isdir(module_bp_path):
self.boilerplate_path.insert(0, module_bp_path)
for p in self.boilerplate_path:
bp = os.path.join(p, name)
if os.path.exists(bp):
return bp
raise RuntimeError("Boilerplate not found %r" % name)
def read_boilerplate(self, name):
f = file(self.find_boilerplate(name))
res = f.read()
f.close()
return res
def unique_list_values(self, lst):
keys = {}
for k in lst:
keys[k] = 1
return keys.keys()
def _generate_app_file(self, platform):
# TODO: cache busting
template = self.read_boilerplate('all.cache.html')
name_parts = [self.top_module, platform, 'cache.html']
done = self.done[platform]
len_ouput_dir = len(self.output)+1
app_name = self.top_module
platform_name = platform.lower()
dynamic = 0,
app_headers = ''
available_modules = self.unique_list_values(self.visited_modules[platform])
early_static_app_libs = [] + self.early_static_app_libs
static_app_libs = []
dynamic_app_libs = []
dynamic_js_libs = [] + self.dynamic_js_libs
static_js_libs = [] + self.static_js_libs
early_static_js_libs = [] + self.early_static_js_libs
late_static_js_libs = [] + self.late_static_js_libs
dynamic_modules = []
not_unlinked_modules = [re.compile(m[1:]) for m in self.unlinked_modules if m[0] == '!']
for m in required_modules:
not_unlinked_modules.append(re.compile('^%s$' % m))
unlinked_modules = [re.compile(m) for m in self.unlinked_modules if m[0] != '!' and m not in not_unlinked_modules]
def static_code(libs, msg = None):
code = []
for lib in libs:
fname = lib
if not os.path.isfile(fname):
fname = os.path.join(self.output, lib)
if not os.path.isfile(fname):
raise RuntimeError('File not found %r' % lib)
if fname[len_ouput_dir:] == self.output:
name = fname[len_ouput_dir:]
else:
name = os.path.basename(lib)
code.append("""<script type="text/javascript"><!--""")
if not msg is None:
code.append("/* start %s: %s */" % (msg, name))
f = file(fname)
code.append(f.read())
if not msg is None:
code.append("/* end %s */" % (name,))
code.append("""--></script>""")
self.remove_files[fname] = True
fname = fname.split('.')
if fname[-2] == '__%s__' % platform_name:
del fname[-2]
fname = '.'.join(fname)
if os.path.isfile(fname):
self.remove_files[fname] = True
return "\n".join(code)
def js_modname(path):
return 'js@'+os.path.basename(path)+'.'+md5(path).hexdigest()
def skip_unlinked(lst):
new_lst = []
pltfrm = '__%s__' % platform_name
for path in lst:
fname = os.path.basename(path).rpartition(pyjs.MOD_SUFFIX)[0]
frags = fname.split('.')
# TODO: do not combine module chunks until we write the file
if self.cache_buster and len(frags[-1])==32 and len(frags[-1].strip('0123456789abcdef'))==0:
frags.pop()
if frags[-1] == pltfrm:
frags.pop()
fname = '.'.join(frags)
in_not_unlinked_modules = False
for m in not_unlinked_modules:
if m.match(fname):
in_not_unlinked_modules = True
new_lst.append(path)
break
if not in_not_unlinked_modules:
in_unlinked_modules = False
for m in unlinked_modules:
if m.match(fname):
in_unlinked_modules = True
if fname in available_modules:
available_modules.remove(fname)
if not in_unlinked_modules:
new_lst.append(path)
return new_lst
if self.multi_file:
dynamic_js_libs = self.unique_list_values(dynamic_js_libs + [m for m in list(self.js_libs) if not m in static_js_libs])
dynamic_app_libs = self.unique_list_values([m for m in done if not m in early_static_app_libs])
else:
static_js_libs = self.unique_list_values(static_js_libs + [m for m in list(self.js_libs) if not m in dynamic_js_libs])
static_app_libs = self.unique_list_values([m for m in done if not m in early_static_app_libs])
dynamic_js_libs = skip_unlinked(dynamic_js_libs)
dynamic_app_libs = skip_unlinked(dynamic_app_libs)
static_js_libs = skip_unlinked(static_js_libs)
static_app_libs = skip_unlinked(static_app_libs)
dynamic_modules = self.unique_list_values(available_modules + [js_modname(lib) for lib in dynamic_js_libs])
available_modules = self.unique_list_values(available_modules + early_static_app_libs + dynamic_modules)
if len(dynamic_modules) > 0:
dynamic_modules = "['" + "','".join(dynamic_modules) + "']"
else:
dynamic_modules = "[]"
appscript = "<script><!--\n$wnd.__pygwt_modController.init($pyjs.appname, window)\n$wnd.__pygwt_modController.load($pyjs.appname, [\n'%s'\n])\n--></script>"
jsscript = """<script type="text/javascript" src="%(path)s" onload="$pyjs.script_onload('%(modname)s')" onreadystatechange="$pyjs.script_onreadystate('%(modname)s')"></script>"""
dynamic_app_libs = appscript % "',\n'".join([lib[len_ouput_dir:].replace('\\', '/') for lib in dynamic_app_libs])
dynamic_js_libs = '\n'.join([jsscript % {'path': lib, 'modname': js_modname(lib)} for lib in dynamic_js_libs])
early_static_app_libs = static_code(early_static_app_libs)
static_app_libs = static_code(static_app_libs)
early_static_js_libs = static_code(early_static_js_libs, "javascript lib")
static_js_libs = static_code(static_js_libs, "javascript lib")
late_static_js_libs = static_code(late_static_js_libs, "javascript lib")
setoptions = "\n".join([("$pyjs.options['%s'] = %s;" % (n, v)).lower() for n,v in self.runtime_options])
file_contents = template % locals()
if self.cache_buster:
md5sum = md5(file_contents).hexdigest()
name_parts.insert(2, md5sum)
out_path = os.path.join(self.output, '.'.join((name_parts)))
out_file = file(out_path, 'w')
out_file.write(file_contents)
out_file.close()
return out_path
def _create_nocache_html(self):
# nocache
template = self.read_boilerplate('home.nocache.html')
out_path = os.path.join(self.output, self.top_module + ".nocache.html")
select_tmpl = """O(["true","%s"],"%s");\n"""
script_selectors = StringIO()
for platform in self.platforms:
cache_html = os.path.basename(self.app_files[platform])
sel = select_tmpl % (platform, cache_html)
script_selectors.write(sel)
out_file = file(out_path, 'w')
out_file.write(template % dict(
app_name = self.top_module,
script_selectors = script_selectors.getvalue()
))
out_file.close()
def _create_app_html(self, file_name):
""" Checks if a base HTML-file is available in the PyJamas
output directory.
If the HTML-file isn't available, it will be created.
If a CSS-file with the same name is available
in the output directory, a reference to this CSS-file
is included.
If no CSS-file is found, this function will look for a special
CSS-file in the output directory, with the name
"pyjamas_default.css", and if found it will be referenced
in the generated HTML-file.
"""
# if html file in output directory exists, leave it alone.
if os.path.exists(file_name):
return 0
if os.path.exists(
os.path.join(self.output, self.top_module + '.css' )):
css = "<link rel='stylesheet' href='" + self.top_module + ".css'>"
elif os.path.exists(
os.path.join(self.output, 'pyjamas_default.css' )):
css = "<link rel='stylesheet' href='pyjamas_default.css'>"
else:
css = ''
title = 'PyJamas Auto-Generated HTML file ' + self.top_module
base_html = APP_HTML_TEMPLATE % {'modulename': self.top_module,
'title': title, 'css': css,
'bootstrap_file': self.bootstrap_file,
}
fh = open (file_name, 'w')
fh.write (base_html)
fh.close ()
return 1
MODIFIED_TIME = {}
def is_modified(path):
current_mtime = os.path.getmtime(path)
if current_mtime == MODIFIED_TIME.get(path):
return False
else:
MODIFIED_TIME[path] = current_mtime
print('mtime changed for %s.' % path)
return True
def serve(path):
print("\nMonitoring file modifications in %s ..." % \
os.path.abspath(os.curdir))
def build(top_module, pyjs, options, app_platforms,
runtime_options, args):
print "Building :", top_module
print "PYJSPATH :", '\n '.join(['['] + [p for p in pyjs.path]) + '\n]'
translator_arguments= translator.get_compile_options(options)
l = BrowserLinker(args,
output=options.output,
platforms=app_platforms,
path=pyjs.path,
js_libs=options.js_includes,
unlinked_modules=options.unlinked_modules,
keep_lib_files=options.keep_lib_files,
compile_inplace=options.compile_inplace,
translator_arguments=translator_arguments,
multi_file=options.multi_file,
cache_buster=options.cache_buster,
bootstrap_file=options.bootstrap_file,
public_folder=options.public_folder,
runtime_options=runtime_options,
list_imports=options.list_imports,
)
l()
if not options.list_imports:
print "Built to :", os.path.abspath(options.output)
return
print "Dependencies"
for f, deps in l.dependencies.items():
print "%s\n%s" % (f, '\n'.join(map(lambda x: "\t%s" % x, deps)))
print
print "Visited Modules"
for plat, deps in l.visited_modules.items():
print "%s\n%s" % (plat, '\n'.join(map(lambda x: "\t%s" % x, deps)))
print
def build_script():
usage = """
usage: %prog [options] <application module name>
This is the command | |
retrieved bgpIPRouteProperty data using find and the newly added bgpIPRouteProperty data available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._create(locals())
def remove(self):
"""Deletes all the bgpIPRouteProperty data in this instance from server.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, AdvertiseAsBgp3107=None, AdvertiseAsBgp3107Sr=None, AdvertiseAsRfc8277=None, AsPathASString=None, Count=None, DescriptiveName=None, Name=None, NoOfASPathSegmentsPerRouteRange=None, NoOfClusters=None, NoOfCommunities=None, NoOfExternalCommunities=None, NoOfLabels=None, NoOfTlvs=None):
"""Finds and retrieves bgpIPRouteProperty data from the server.
All named parameters support regex and can be used to selectively retrieve bgpIPRouteProperty data from the server.
By default the find method takes no parameters and will retrieve all bgpIPRouteProperty data from the server.
Args:
AdvertiseAsBgp3107 (bool): Will cause this route to be sent as BGP 3107 MPLS SAFI route
AdvertiseAsBgp3107Sr (bool): Will cause this route to be sent as BGP 3107 SR MPLS SAFI route
AdvertiseAsRfc8277 (bool): Will cause this route to be sent as RFC 8277 MPLS SAFI route
AsPathASString (list(str)): Displays configured AS paths. Random AS paths are appended after Non-Random AS paths when configured. Each row displays the AS Path configured for the 1st route of a Route Range.
Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group
DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but maybe offers more context
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
NoOfASPathSegmentsPerRouteRange (number): Number Of non-random or manually configured AS Path Segments Per Route Range
NoOfClusters (number): Number of Clusters
NoOfCommunities (number): Number of Communities
NoOfExternalCommunities (number): Number of Extended Communities
NoOfLabels (number): Number of Labels
NoOfTlvs (number): Number of TLVs
Returns:
self: This instance with matching bgpIPRouteProperty data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of bgpIPRouteProperty data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the bgpIPRouteProperty data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def AgeOutRoutes(self, Percentage):
"""Executes the ageOutRoutes operation on the server.
Age out percentage of BGP Routes in a Route Range
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Percentage (number): This parameter requires a percentage of type kInteger
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('AgeOutRoutes', payload=locals(), response_object=None)
def AgeOutRoutes(self, Percentage, SessionIndices):
"""Executes the ageOutRoutes operation on the server.
Age out percentage of BGP Routes in a Route Range
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Percentage (number): This parameter requires a percentage of type kInteger
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('AgeOutRoutes', payload=locals(), response_object=None)
def AgeOutRoutes(self, SessionIndices, Percentage):
"""Executes the ageOutRoutes operation on the server.
Age out percentage of BGP Routes in a Route Range
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a percentage of type kInteger
Percentage (number): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('AgeOutRoutes', payload=locals(), response_object=None)
def Ageoutroutes(self, Arg2, Arg3):
"""Executes the ageoutroutes operation on the server.
Completely/Partially age out routes contained in this route range.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the group. An empty list indicates all instances in the group.
Arg3 (number): What percentage of routes to age out. 100% means all routes.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('Ageoutroutes', payload=locals(), response_object=None)
def GenerateIpv6Routes(self, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8, Arg9, Arg10, Arg11, Arg12, Arg13, Arg14, Arg15, Arg16):
"""Executes the generateIpv6Routes operation on the server.
Generate Primary and Duplicate Routes with advanced prefix length distribution options.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (number): Number of Primary Routes per Device.
Arg3 (number): Percentage to Duplicate Primary Routes per Device.
Arg4 (number): Number of Routes per Route Range.
Arg5 (str): Network Address Start Value.
Arg6 (str): Network Address Step Value.
Arg7 (str(custom|even|exponential|fixed|internet|random)): Prefix Length Distribution Type.
Arg8 (str(perDevice|perPort|perTopology)): Prefix Length Distribution Scope.
Arg9 (obj(ixnetwork_restpy.files.Files)): Source file having custom distribution information.
Arg10 (number): Prefix Length Start Value. Applicable only for Fixed, Even and Exponential distribution type.
Arg11 (number): Prefix Length End Value. Applicable only for Even and Exponential distribution type.
Arg12 (bool): Do not include Loopback Address in the generated Address Range
Arg13 (bool): Do not include Multicast Address in the generated Address Range
Arg14 (str): Address Ranges that will be skipped. You can provide multiple ranges separated by ','. Example: aa:0:1:b: - bb:0:2:c:, aa00: - bb00:1
Arg15 (str): AS Path Suffix for Primary Routes
Arg16 (str): AS Path Suffix for Duplicate Routes
Returns:
list(str): ID to associate each async action invocation.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
self._check_arg_type(Arg9, Files)
return self._execute('GenerateIpv6Routes', payload=locals(), response_object=None)
def GenerateRoutes(self, Arg2, Arg3, Arg4, Arg5, Arg6, Arg7, Arg8, Arg9, Arg10, Arg11, Arg12, Arg13, Arg14, Arg15, Arg16):
"""Executes the generateRoutes operation on the server.
Generate Primary and Duplicate Routes with advanced prefix length distribution options.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (number): Number of Primary Routes per Device.
Arg3 (number): Percentage to Duplicate Primary Routes per Device.
Arg4 (number): Number of Routes per Route Range.
Arg5 (str): Network Address Start Value.
Arg6 (str): Network Address Step Value.
Arg7 (str(custom|even|exponential|fixed|internet|random)): Prefix Length Distribution Type.
Arg8 (str(perDevice|perPort|perTopology)): Prefix Length Distribution Scope.
Arg9 (obj(ixnetwork_restpy.files.Files)): Source file having custom distribution information.
Arg10 (number): Prefix Length Start Value. Applicable only for Fixed, Even and Exponential distribution type.
Arg11 (number): Prefix Length End Value. Applicable only for Even and Exponential distribution type.
Arg12 (bool): Do not include Loopback Address in the generated Address Range
Arg13 (bool): Do not include Multicast Address in the generated Address Range
Arg14 (str): Address Ranges that will be skipped. You can provide multiple ranges separated by ','. Example: 192.0.0.0 - 172.16.17.32
Arg15 (str): AS Path Suffix for Primary Routes
Arg16 (str): AS Path Suffix for Duplicate Routes
Returns:
list(str): ID to associate each async action invocation.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
self._check_arg_type(Arg9, Files)
return self._execute('GenerateRoutes', payload=locals(), response_object=None)
def ImportBgpRoutes(self, Arg2, Arg3, Arg4, Arg5, Arg6):
"""Executes the importBgpRoutes operation on the server.
Import IPv4 routes from standard route file. Supported format - Cisco IOS, Juniper JUNOS, Classis Ixia (.csv) and standard CSV.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (str(replicate|roundRobin)): Option to specify distribution type, for distributing imported routes across all BGP Peer. Options: Round-Robin, for allocating routes sequentially, and Replicate, for allocating all routes to each Peer.
Arg3 (bool): Import only the best routes (provided route file has this information).
Arg4 (str(overwriteTestersAddress|preserveFromFile)): Option for setting Next Hop modification type.
Arg5 (str(cisco|csv|juniper)): Import routes file type. Route import may fail in file type is not matching with the file being imported.
Arg6 (obj(ixnetwork_restpy.files.Files)): Select source file having route information.
Returns:
list(str): ID to associate each asynchronous action invocation.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
self._check_arg_type(Arg6, Files)
return self._execute('ImportBgpRoutes', payload=locals(), response_object=None)
def ReadvertiseRoutes(self):
"""Executes the readvertiseRoutes operation on the server.
Re-advertise Aged out OSPF Routes in a Route Range
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('ReadvertiseRoutes', payload=locals(), response_object=None)
def ReadvertiseRoutes(self, SessionIndices):
"""Executes the readvertiseRoutes operation on the server.
Re-advertise Aged out OSPF Routes in a Route Range
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 | |
S_StartSIdentify__Fv()")
del_items(0x8006803C)
SetType(0x8006803C, "void S_StartIdShow__Fv()")
del_items(0x80068210)
SetType(0x80068210, "void S_StartTalk__Fv()")
del_items(0x80068440)
SetType(0x80068440, "void S_StartTavern__Fv()")
del_items(0x80068538)
SetType(0x80068538, "void S_StartBarMaid__Fv()")
del_items(0x8006860C)
SetType(0x8006860C, "void S_StartDrunk__Fv()")
del_items(0x800686E0)
SetType(0x800686E0, "void StartStore__Fc(char s)")
del_items(0x800689D0)
SetType(0x800689D0, "void DrawSText__Fv()")
del_items(0x80068A10)
SetType(0x80068A10, "void DrawSTextTSK__FP4TASK(struct TASK *T)")
del_items(0x80068AD8)
SetType(0x80068AD8, "void DoThatDrawSText__Fv()")
del_items(0x80068C84)
SetType(0x80068C84, "void STextESC__Fv()")
del_items(0x80068DE0)
SetType(0x80068DE0, "void STextUp__Fv()")
del_items(0x80068F78)
SetType(0x80068F78, "void STextDown__Fv()")
del_items(0x80069128)
SetType(0x80069128, "void S_SmithEnter__Fv()")
del_items(0x800691FC)
SetType(0x800691FC, "void SetGoldCurs__Fii(int pnum, int i)")
del_items(0x80069278)
SetType(0x80069278, "void SetSpdbarGoldCurs__Fii(int pnum, int i)")
del_items(0x800692F4)
SetType(0x800692F4, "void TakePlrsMoney__Fl(long cost)")
del_items(0x80069740)
SetType(0x80069740, "void SmithBuyItem__Fv()")
del_items(0x80069934)
SetType(0x80069934, "void S_SBuyEnter__Fv()")
del_items(0x80069B58)
SetType(0x80069B58, "void SmithBuyPItem__Fv()")
del_items(0x80069CE0)
SetType(0x80069CE0, "void S_SPBuyEnter__Fv()")
del_items(0x80069F10)
SetType(0x80069F10, "unsigned char StoreGoldFit__Fi(int idx)")
del_items(0x8006A1C8)
SetType(0x8006A1C8, "void PlaceStoreGold__Fl(long v)")
del_items(0x8006A42C)
SetType(0x8006A42C, "void StoreSellItem__Fv()")
del_items(0x8006A720)
SetType(0x8006A720, "void S_SSellEnter__Fv()")
del_items(0x8006A824)
SetType(0x8006A824, "void SmithRepairItem__Fv()")
del_items(0x8006AA94)
SetType(0x8006AA94, "void S_SRepairEnter__Fv()")
del_items(0x8006ABF0)
SetType(0x8006ABF0, "void S_WitchEnter__Fv()")
del_items(0x8006ACA0)
SetType(0x8006ACA0, "void WitchBuyItem__Fv()")
del_items(0x8006AEA0)
SetType(0x8006AEA0, "void S_WBuyEnter__Fv()")
del_items(0x8006B08C)
SetType(0x8006B08C, "void S_WSellEnter__Fv()")
del_items(0x8006B190)
SetType(0x8006B190, "void WitchRechargeItem__Fv()")
del_items(0x8006B308)
SetType(0x8006B308, "void S_WRechargeEnter__Fv()")
del_items(0x8006B464)
SetType(0x8006B464, "void S_BoyEnter__Fv()")
del_items(0x8006B59C)
SetType(0x8006B59C, "void BoyBuyItem__Fv()")
del_items(0x8006B620)
SetType(0x8006B620, "void HealerBuyItem__Fv()")
del_items(0x8006B8C4)
SetType(0x8006B8C4, "void S_BBuyEnter__Fv()")
del_items(0x8006BA9C)
SetType(0x8006BA9C, "void StoryIdItem__Fv()")
del_items(0x8006BDE8)
SetType(0x8006BDE8, "void S_ConfirmEnter__Fv()")
del_items(0x8006BF04)
SetType(0x8006BF04, "void S_HealerEnter__Fv()")
del_items(0x8006BF9C)
SetType(0x8006BF9C, "void S_HBuyEnter__Fv()")
del_items(0x8006C1A8)
SetType(0x8006C1A8, "void S_StoryEnter__Fv()")
del_items(0x8006C240)
SetType(0x8006C240, "void S_SIDEnter__Fv()")
del_items(0x8006C3BC)
SetType(0x8006C3BC, "void S_TalkEnter__Fv()")
del_items(0x8006C5B4)
SetType(0x8006C5B4, "void S_TavernEnter__Fv()")
del_items(0x8006C624)
SetType(0x8006C624, "void S_BarmaidEnter__Fv()")
del_items(0x8006C694)
SetType(0x8006C694, "void S_DrunkEnter__Fv()")
del_items(0x8006C704)
SetType(0x8006C704, "void STextEnter__Fv()")
del_items(0x8006C910)
SetType(0x8006C910, "void CheckStoreBtn__Fv()")
del_items(0x8006CA44)
SetType(0x8006CA44, "void ReleaseStoreBtn__Fv()")
del_items(0x8006CA58)
SetType(0x8006CA58, "void _GLOBAL__D_pSTextBoxCels()")
del_items(0x8006CA80)
SetType(0x8006CA80, "void _GLOBAL__I_pSTextBoxCels()")
del_items(0x8006CAA8)
SetType(0x8006CAA8, "unsigned short GetDown__C4CPad_addr_8006CAA8(struct CPad *this)")
del_items(0x8006CAD0)
SetType(0x8006CAD0, "void SetRGB__6DialogUcUcUc_addr_8006CAD0(struct Dialog *this, unsigned char R, unsigned char G, unsigned char B)")
del_items(0x8006CAF0)
SetType(0x8006CAF0, "void SetBorder__6Dialogi_addr_8006CAF0(struct Dialog *this, int Type)")
del_items(0x8006CAF8)
SetType(0x8006CAF8, "void ___6Dialog_addr_8006CAF8(struct Dialog *this, int __in_chrg)")
del_items(0x8006CB20)
SetType(0x8006CB20, "struct Dialog *__6Dialog_addr_8006CB20(struct Dialog *this)")
del_items(0x8006CB7C)
SetType(0x8006CB7C, "void T_DrawView__Fii(int StartX, int StartY)")
del_items(0x8006CD2C)
SetType(0x8006CD2C, "void T_FillSector__FPUcT0iiiib(unsigned char *P3Tiles, unsigned char *pSector, int xi, int yi, int w, int h, bool AddSec)")
del_items(0x8006CF24)
SetType(0x8006CF24, "void T_FillTile__FPUciii(unsigned char *P3Tiles, int xx, int yy, int t)")
del_items(0x8006D014)
SetType(0x8006D014, "void T_Pass3__Fv()")
del_items(0x8006D3D4)
SetType(0x8006D3D4, "void CreateTown__Fi(int entry)")
del_items(0x8006D53C)
SetType(0x8006D53C, "unsigned char *GRL_LoadFileInMemSig__FPCcPUl(char *Name, unsigned long *Len)")
del_items(0x8006D620)
SetType(0x8006D620, "void GRL_StripDir__FPcPCc(char *Dest, char *Src)")
del_items(0x8006D6B8)
SetType(0x8006D6B8, "void InitVPTriggers__Fv()")
del_items(0x8006D700)
SetType(0x8006D700, "unsigned char ForceTownTrig__Fv()")
del_items(0x8006DA18)
SetType(0x8006DA18, "unsigned char ForceL1Trig__Fv()")
del_items(0x8006DCC8)
SetType(0x8006DCC8, "unsigned char ForceL2Trig__Fv()")
del_items(0x8006E128)
SetType(0x8006E128, "unsigned char ForceL3Trig__Fv()")
del_items(0x8006E5A4)
SetType(0x8006E5A4, "unsigned char ForceL4Trig__Fv()")
del_items(0x8006EAB0)
SetType(0x8006EAB0, "void Freeupstairs__Fv()")
del_items(0x8006EB70)
SetType(0x8006EB70, "unsigned char ForceSKingTrig__Fv()")
del_items(0x8006EC64)
SetType(0x8006EC64, "unsigned char ForceSChambTrig__Fv()")
del_items(0x8006ED58)
SetType(0x8006ED58, "unsigned char ForcePWaterTrig__Fv()")
del_items(0x8006EE4C)
SetType(0x8006EE4C, "void CheckTrigForce__Fv()")
del_items(0x8006F168)
SetType(0x8006F168, "void FadeGameOut__Fv()")
del_items(0x8006F204)
SetType(0x8006F204, "bool IsTrigger__Fii(int x, int y)")
del_items(0x8006F268)
SetType(0x8006F268, "void CheckTriggers__Fi(int pnum)")
del_items(0x8006F784)
SetType(0x8006F784, "int GetManaAmount__Fii(int id, int sn)")
del_items(0x8006FA4C)
SetType(0x8006FA4C, "void UseMana__Fii(int id, int sn)")
del_items(0x8006FB90)
SetType(0x8006FB90, "unsigned char CheckSpell__FiicUc(int id, int sn, char st, unsigned char manaonly)")
del_items(0x8006FC30)
SetType(0x8006FC30, "void CastSpell__Fiiiiiiii(int id, int spl, int sx, int sy, int dx, int dy, int caster, int spllvl)")
del_items(0x8006FEDC)
SetType(0x8006FEDC, "void DoResurrect__Fii(int pnum, int rid)")
del_items(0x80070190)
SetType(0x80070190, "void DoHealOther__Fii(int pnum, int rid)")
del_items(0x800703F4)
SetType(0x800703F4, "void snd_update__FUc(unsigned char bStopAll)")
del_items(0x800703FC)
SetType(0x800703FC, "void snd_get_volume__FPCcPl(char *pszKey, long *plVolume)")
del_items(0x80070464)
SetType(0x80070464, "void snd_stop_snd__FP4TSnd(struct TSnd *pSnd)")
del_items(0x80070484)
SetType(0x80070484, "void snd_play_snd__FP4TSFXll(struct TSFX *pSnd, long lVolume, long lPan)")
del_items(0x800704E4)
SetType(0x800704E4, "void snd_play_msnd__FUsll(unsigned short pszName, long lVolume, long lPan)")
del_items(0x80070574)
SetType(0x80070574, "void snd_init__FUl(unsigned long hWnd)")
del_items(0x800705C4)
SetType(0x800705C4, "void music_stop__Fv()")
del_items(0x80070608)
SetType(0x80070608, "void music_fade__Fv()")
del_items(0x80070648)
SetType(0x80070648, "void music_start__Fi(int nTrack)")
del_items(0x800706CC)
SetType(0x800706CC, "void music_hold__Fv()")
del_items(0x8007072C)
SetType(0x8007072C, "void music_release__Fv()")
del_items(0x8007077C)
SetType(0x8007077C, "void ClrCursor__Fi(int num)")
del_items(0x800707CC)
SetType(0x800707CC, "void flyabout__7GamePad(struct GamePad *this)")
del_items(0x80070C88)
SetType(0x80070C88, "void CloseInvChr__Fv()")
del_items(0x80070CD0)
SetType(0x80070CD0, "void WorldToOffset__Fiii(int pnum, int WorldX, int WorldY)")
del_items(0x80070D7C)
SetType(0x80070D7C, "char pad_UpIsUp__Fi(int pval)")
del_items(0x80070DEC)
SetType(0x80070DEC, "char pad_UpIsUpRight__Fi(int pval)")
del_items(0x80070E5C)
SetType(0x80070E5C, "struct GamePad *__7GamePadi(struct GamePad *this, int player_num)")
del_items(0x80070F54)
SetType(0x80070F54, "void SetMoveStyle__7GamePadc(struct GamePad *this, char style_num)")
del_items(0x80070F94)
SetType(0x80070F94, "void SetDownButton__7GamePadiPFi_v(struct GamePad *this, int pad_val, void (*func)())")
del_items(0x80070FD8)
SetType(0x80070FD8, "void SetComboDownButton__7GamePadiPFi_v(struct GamePad *this, int pad_val, void (*func)())")
del_items(0x8007101C)
SetType(0x8007101C, "void SetAllButtons__7GamePadP11KEY_ASSIGNS(struct GamePad *this, struct KEY_ASSIGNS *actions)")
del_items(0x8007127C)
SetType(0x8007127C, "void GetAllButtons__7GamePadP11KEY_ASSIGNS(struct GamePad *this, struct KEY_ASSIGNS *actions)")
del_items(0x8007142C)
SetType(0x8007142C, "int GetActionButton__7GamePadPFi_v(struct GamePad *this, void (*func)())")
del_items(0x80071488)
SetType(0x80071488, "void SetUpAction__7GamePadPFi_vT1(struct GamePad *this, void (*func)(), void (*upfunc)())")
del_items(0x800714C4)
SetType(0x800714C4, "void RunFunc__7GamePadi(struct GamePad *this, int pad)")
del_items(0x80071588)
SetType(0x80071588, "void ButtonDown__7GamePadi(struct GamePad *this, int button)")
del_items(0x80071994)
SetType(0x80071994, "void TestButtons__7GamePad(struct GamePad *this)")
del_items(0x80071AD8)
SetType(0x80071AD8, "bool CheckCentre__FP12PlayerStructi(struct PlayerStruct *player, int dir)")
del_items(0x80071BCC)
SetType(0x80071BCC, "int CheckDirs__7GamePadi(struct GamePad *this, int dir)")
del_items(0x80071CE4)
SetType(0x80071CE4, "int LeftOf__Fi(int dir)")
del_items(0x80071CFC)
SetType(0x80071CFC, "int RightOf__Fi(int dir)")
del_items(0x80071D18)
SetType(0x80071D18, "int CheckSide__7GamePadi(struct GamePad *this, int dir)")
del_items(0x80071D6C)
SetType(0x80071D6C, "int CheckBodge__7GamePadi(struct GamePad *this, int dir)")
del_items(0x80072178)
SetType(0x80072178, "void walk__7GamePadc(struct GamePad *this, char cmd)")
del_items(0x80072484)
SetType(0x80072484, "void check_around_player__7GamePad(struct GamePad *this)")
del_items(0x80072824)
SetType(0x80072824, "void show_combos__7GamePad(struct GamePad *this)")
del_items(0x800729FC)
SetType(0x800729FC, "void Handle__7GamePad(struct GamePad *this)")
del_items(0x80073084)
SetType(0x80073084, "void GamePadTask__FP4TASK(struct TASK *T)")
del_items(0x80073154)
SetType(0x80073154, "void PostGamePad__Fiiii(int val, int var1, int var2, int var3)")
del_items(0x80073238)
SetType(0x80073238, "void Init_GamePad__Fv()")
del_items(0x80073268)
SetType(0x80073268, "void InitGamePadVars__Fv()")
del_items(0x800732F8)
SetType(0x800732F8, "int SetWalkStyle__Fii(int pnum, int style)")
del_items(0x80073368)
SetType(0x80073368, "void MoveToScrollTarget__7CBlocks_addr_80073368(struct CBlocks *this)")
del_items(0x8007337C)
SetType(0x8007337C, "unsigned short GetDown__C4CPad_addr_8007337C(struct CPad *this)")
del_items(0x800733A4)
SetType(0x800733A4, "unsigned short GetUp__C4CPad_addr_800733A4(struct CPad *this)")
del_items(0x800733CC)
SetType(0x800733CC, "unsigned short GetCur__C4CPad_addr_800733CC(struct CPad *this)")
del_items(0x800733F4)
SetType(0x800733F4, "void DoGameTestStuff__Fv()")
del_items(0x80073420)
SetType(0x80073420, "void DoInitGameStuff__Fv()")
del_items(0x80073454)
SetType(0x80073454, "void *SMemAlloc(unsigned long bytes, char *filename, int linenumber, unsigned long flags)")
del_items(0x80073474)
SetType(0x80073474, "unsigned char SMemFree(void *ptr, char *filename, int linenumber, unsigned long flags)")
del_items(0x80073494)
SetType(0x80073494, "void GRL_InitGwin__Fv()")
del_items(0x800734A0)
SetType(0x800734A0, "unsigned long (*GRL_SetWindowProc__FPFUlUilUl_Ul(unsigned long (*NewProc)()))()")
del_items(0x800734B0)
SetType(0x800734B0, "void GRL_CallWindowProc__FUlUilUl(unsigned long hw, unsigned int msg, long wp, unsigned long lp)")
del_items(0x800734D8)
SetType(0x800734D8, "unsigned char GRL_PostMessage__FUlUilUl(unsigned long hWnd, unsigned int Msg, long wParam, unsigned long lParam)")
del_items(0x80073584)
SetType(0x80073584, "char *Msg2Txt__Fi(int Msg)")
del_items(0x800735CC)
SetType(0x800735CC, "enum LANG_TYPE LANG_GetLang__Fv()")
del_items(0x800735D8)
SetType(0x800735D8, "void LANG_SetDb__F10LANG_DB_NO(enum LANG_DB_NO NewLangDbNo)")
del_items(0x80073758)
SetType(0x80073758, "char *GetStr__Fi(int StrId)")
del_items(0x800737C0)
SetType(0x800737C0, "void LANG_ReloadMainTXT__Fv()")
del_items(0x800737E0)
SetType(0x800737E0, "void LANG_SetLang__F9LANG_TYPE(enum LANG_TYPE NewLanguageType)")
del_items(0x80073958)
SetType(0x80073958, "void DumpCurrentText__Fv()")
del_items(0x800739B0)
SetType(0x800739B0, "int CalcNumOfStrings__FPPc(char **TPtr)")
del_items(0x800739BC)
SetType(0x800739BC, "void GetLangFileName__F9LANG_TYPEPc(enum LANG_TYPE NewLanguageType, char *Dest)")
del_items(0x80073ADC)
SetType(0x80073ADC, "char *GetLangFileNameExt__F9LANG_TYPE(enum LANG_TYPE NewLanguageType)")
del_items(0x80073B5C)
SetType(0x80073B5C, "void TempPrintMissile__FiiiiiiiiccUcUcUcc(int ScrX, int ScrY, int OtPos, int spell, int aframe, int direction, int anim, int sfx, int xflip, int yflip, int red, int grn, int blu, int semi)")
del_items(0x800740A0)
SetType(0x800740A0, "void FuncTOWN__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074220)
SetType(0x80074220, "void FuncRPORTAL__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074380)
SetType(0x80074380, "void FuncFIREBOLT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074418)
SetType(0x80074418, "void FuncHBOLT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x800744C8)
SetType(0x800744C8, "void FuncLIGHTNING__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007452C)
SetType(0x8007452C, "void FuncGUARDIAN__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074644)
SetType(0x80074644, "void FuncFIREWALL__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x800746DC)
SetType(0x800746DC, "void FuncFIREMOVE__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074774)
SetType(0x80074774, "void FuncFLAME__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x800747DC)
SetType(0x800747DC, "void FuncARROW__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007487C)
SetType(0x8007487C, "void FuncFARROW__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007495C)
SetType(0x8007495C, "void FuncLARROW__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074A34)
SetType(0x80074A34, "void FuncMAGMABALL__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074AC4)
SetType(0x80074AC4, "void FuncBONESPIRIT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074BE0)
SetType(0x80074BE0, "void FuncACID__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074C7C)
SetType(0x80074C7C, "void FuncACIDSPLAT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074CE4)
SetType(0x80074CE4, "void FuncACIDPUD__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074D4C)
SetType(0x80074D4C, "void FuncFLARE__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074E80)
SetType(0x80074E80, "void FuncFLAREXP__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80074FC4)
SetType(0x80074FC4, "void FuncCBOLT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007502C)
SetType(0x8007502C, "void FuncBOOM__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80075084)
SetType(0x80075084, "void FuncELEMENT__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80075150)
SetType(0x80075150, "void FuncMISEXP__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x800751B4)
SetType(0x800751B4, "void FuncRHINO__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x800751BC)
SetType(0x800751BC, "void FuncFLASH__FP13MissileStructiii(struct MissileStruct *Ms, int x, int y, int OtPos)")
del_items(0x800756E4)
SetType(0x800756E4, "void FuncMANASHIELD__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x8007578C)
SetType(0x8007578C, "void FuncFLASH2__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x80075794)
SetType(0x80075794, "void FuncRESURRECTBEAM__FP13MissileStructiii(struct MissileStruct *Ms, int ScrX, int ScrY, int OtPos)")
del_items(0x800757C8)
SetType(0x800757C8, "void PRIM_GetPrim__FPP8POLY_FT4_addr_800757C8(struct POLY_FT4 **Prim)")
del_items(0x80075844)
SetType(0x80075844, "struct CPlayer *GetPlayer__7CPlayeri_addr_80075844(int PNum)")
del_items(0x80075894)
SetType(0x80075894, "int GetLastOtPos__C7CPlayer_addr_80075894(struct CPlayer *this)")
del_items(0x800758A0)
SetType(0x800758A0, "int GetLastScrY__C7CPlayer_addr_800758A0(struct CPlayer *this)")
del_items(0x800758AC)
SetType(0x800758AC, "int GetLastScrX__C7CPlayer_addr_800758AC(struct CPlayer *this)")
del_items(0x800758B8)
SetType(0x800758B8, "int GetNumOfFrames__7TextDat_addr_800758B8(struct TextDat *this)")
del_items(0x800758CC)
SetType(0x800758CC, "struct FRAME_HDR *GetFr__7TextDati_addr_800758CC(struct TextDat *this, int FrNum)")
del_items(0x800758E8)
SetType(0x800758E8, "void ML_Init__Fv()")
del_items(0x80075920)
SetType(0x80075920, "int ML_GetList__Fi(int Level)")
del_items(0x800759A0)
SetType(0x800759A0, "int ML_SetRandomList__Fi(int Level)")
del_items(0x80075A38)
SetType(0x80075A38, "int ML_SetList__Fii(int Level, int List)")
del_items(0x80075AE8)
SetType(0x80075AE8, "int ML_GetPresetMonsters__FiPiUl(int currlevel, int *typelist, unsigned long QuestsNeededMask)")
del_items(0x80075CA4)
SetType(0x80075CA4, "struct POLY_FT4 *DefaultObjPrint__FP12ObjectStructiiP7TextDatiii(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos, int XOffSet, int YOffSet)")
del_items(0x80075E38)
SetType(0x80075E38, "struct POLY_FT4 *LightObjPrint__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80075EF0)
SetType(0x80075EF0, "struct POLY_FT4 *DoorObjPrint__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80076184)
SetType(0x80076184, "void DrawLightSpark__Fiii(int xo, int yo, int ot)")
del_items(0x8007625C)
SetType(0x8007625C, "struct POLY_FT4 *PrintOBJ_L1LIGHT__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x800762E4)
SetType(0x800762E4, "struct POLY_FT4 *PrintOBJ_SKFIRE__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80076310)
SetType(0x80076310, "struct POLY_FT4 *PrintOBJ_LEVER__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007633C)
SetType(0x8007633C, "struct POLY_FT4 *PrintOBJ_CHEST1__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80076368)
SetType(0x80076368, "struct POLY_FT4 *PrintOBJ_CHEST2__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80076394)
SetType(0x80076394, "struct POLY_FT4 *PrintOBJ_CHEST3__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x800763C0)
SetType(0x800763C0, "struct POLY_FT4 *PrintOBJ_CANDLE1__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x800763E4)
SetType(0x800763E4, "struct POLY_FT4 *PrintOBJ_CANDLE2__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80076408)
SetType(0x80076408, "struct POLY_FT4 *PrintOBJ_CANDLEO__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80076434)
SetType(0x80076434, "struct POLY_FT4 *PrintOBJ_BANNERL__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80076460)
SetType(0x80076460, "struct POLY_FT4 *PrintOBJ_BANNERM__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007648C)
SetType(0x8007648C, "struct POLY_FT4 *PrintOBJ_BANNERR__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x800764B8)
SetType(0x800764B8, "struct POLY_FT4 *PrintOBJ_SKPILE__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x800764E4)
SetType(0x800764E4, "struct POLY_FT4 *PrintOBJ_SKSTICK1__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80076510)
SetType(0x80076510, "struct POLY_FT4 *PrintOBJ_SKSTICK2__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x8007653C)
SetType(0x8007653C, "struct POLY_FT4 *PrintOBJ_SKSTICK3__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80076568)
SetType(0x80076568, "struct POLY_FT4 *PrintOBJ_SKSTICK4__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x80076594)
SetType(0x80076594, "struct POLY_FT4 *PrintOBJ_SKSTICK5__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int ScrX, int ScrY, struct TextDat *ObjDat, int OtPos)")
del_items(0x800765C0)
SetType(0x800765C0, "struct POLY_FT4 *PrintOBJ_CRUX1__FP12ObjectStructiiP7TextDati(struct ObjectStruct *OStr, int | |
a context-menu.
*Keyboard shortcuts* in addition to the ones in the menu:
=========== ============================================================
Key Effect
=========== ============================================================
arrows scroll through components/epochs
alt+arrows scroll to beginning/end
t topomap plot of the component under the pointer
a array-plot of the source time course of the component under
the pointer
f plot the frequency spectrum for the component under the
pointer
b butterfly plot of the original and cleaned data (of the
epoch under the pointer, or of the grand average if the
pointer is over other elements)
B Butterfly plot of condition averages
=========== ============================================================
"""
_doc_name = 'component selection'
_title = 'ICA Source Time Course'
_wildcard = "ICA fiff file (*-ica.fif)|*.fif"
def __init__(
self,
parent: Frame,
i_first: int,
):
FileFrameChild.__init__(self, parent, None, None, parent.model)
SharedToolsMenu.__init__(self)
# prepare canvas
self.canvas = FigureCanvasPanel(self)
self.figure = self.canvas.figure
self.figure.subplots_adjust(0, 0, 1, 1, 0, 0)
self.figure.set_facecolor('white')
# attributes
self.parent = parent
self.model = parent.model
self.doc = parent.model.doc
self.n_comp_actual = self.n_comp = self.config.ReadInt('layout_n_comp', 10)
self.n_comp_in_ica = len(self.doc.components)
self.i_first = i_first
self.n_epochs = self.config.ReadInt('layout_n_epochs', 20)
self.i_first_epoch = 0
self.pad_time = 0 # need to pad x-axis when showing fewer epochs than fit on axis)
self.n_epochs_in_data = len(self.doc.sources)
self.y_scale = self.config.ReadFloat('y_scale', 10) # scale factor for y axis
self._marked_component_i = None
self._marked_component_h = None
self._marked_epoch_i = None
self._marked_epoch_h = None
self.show_range = True # show axis with pre/post ICA data range
# Toolbar
tb = self.InitToolbar(can_open=False)
tb.AddSeparator()
self.up_button = tb.AddTool(wx.ID_UP, "Up", Icon("tango/actions/go-up"))
self.down_button = tb.AddTool(wx.ID_DOWN, "Down", Icon("tango/actions/go-down"))
self.back_button = tb.AddTool(wx.ID_BACKWARD, "Back", Icon("tango/actions/go-previous"))
self.next_button = tb.AddTool(wx.ID_FORWARD, "Next", Icon("tango/actions/go-next"))
tb.AddSeparator()
SharedToolsMenu.AddToolbarButtons(self, tb)
tb.AddStretchableSpace()
self.InitToolbarTail(tb)
tb.Realize()
# event bindings
self.doc.callbacks.subscribe('case_change', self.CaseChanged)
self.Bind(wx.EVT_TOOL, self.OnUp, id=wx.ID_UP)
self.Bind(wx.EVT_TOOL, self.OnDown, id=wx.ID_DOWN)
self.Bind(wx.EVT_TOOL, self.OnBackward, id=wx.ID_BACKWARD)
self.Bind(wx.EVT_TOOL, self.OnForward, id=wx.ID_FORWARD)
self.canvas.mpl_connect('key_release_event', self.OnCanvasKey)
# re-Bind mouse click
self.canvas.Unbind(wx.EVT_LEFT_DOWN)
self.canvas.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
self.canvas.Unbind(wx.EVT_RIGHT_DOWN)
self.canvas.Bind(wx.EVT_RIGHT_DOWN, self.OnRightDown)
self._plot()
self.UpdateTitle()
self.Show()
def _get_source_data(self):
"Return ``(source_data, epoch-labels)`` tuple for current page"
n_comp = self.n_comp
n_comp_actual = self.n_comp_actual
epoch_index = slice(self.i_first_epoch, self.i_first_epoch + self.n_epochs)
data = self.doc.sources.sub(case=epoch_index, component=slice(self.i_first, self.i_first + n_comp))
y = data.get_data(('component', 'case', 'time')).reshape((n_comp_actual, -1))
if y.base is not None and data.x.base is not None:
y = y.copy()
start = n_comp - 1 + self.show_range
stop = -1 + (n_comp - n_comp_actual) + self.show_range
y += np.arange(start * self.y_scale, stop * self.y_scale, -self.y_scale)[:, None]
# pad epoch labels for x-axis
epoch_labels = self.doc.epoch_labels[epoch_index]
if len(epoch_labels) < self.n_epochs:
epoch_labels += ('',) * (self.n_epochs - len(epoch_labels))
return y, epoch_labels
def _pad(self, y):
"Pad time-axis when data contains fewer epochs than the x-axis"
if self.pad_time:
return np.pad(y, (0, self.pad_time), 'constant')
else:
return y
def _get_raw_range(self):
epoch_index = slice(self.i_first_epoch, self.i_first_epoch + self.n_epochs)
y_min = self._pad(self.doc.pre_ica_min[epoch_index].x.ravel())
y_max = self._pad(self.doc.pre_ica_max[epoch_index].x.ravel())
return y_min, y_max
def _get_clean_range(self):
epoch_index = slice(self.i_first_epoch, self.i_first_epoch + self.n_epochs)
epochs = self.doc.epochs[epoch_index]
y_clean = asndvar(self.doc.apply(epochs))
y_min = y_clean.min('sensor').x.ravel()
y_max = y_clean.max('sensor').x.ravel()
y_min /= self.doc.pre_ica_range_scale
y_max /= self.doc.pre_ica_range_scale
return self._pad(y_min), self._pad(y_max)
def _plot(self):
# partition figure
self.figure.clf()
figheight = self.figure.get_figheight()
n_comp = self.n_comp
# make sure there are no empty lines
if self.i_first and self.n_comp_in_ica - self.i_first < n_comp:
self.i_first = max(0, self.n_comp_in_ica - n_comp)
# further layout-relevant properties
n_comp_actual = min(self.n_comp_in_ica - self.i_first, n_comp)
self.n_comp_actual = n_comp_actual
elen = len(self.doc.sources.time)
# layout
n_rows = n_comp + self.show_range
axheight = 1 / (n_rows + 0.5) # 0.5 = bottom space for epoch labels
# topomaps
ax_size_in = axheight * figheight
axwidth = ax_size_in / self.figure.get_figwidth()
left = axwidth / 2
self.topo_plots = []
self.topo_labels = []
for i in range(n_comp_actual):
i_comp = self.i_first + i
ax = self.figure.add_axes((left, 1 - (i + 1) * axheight, axwidth, axheight))
layers = AxisData([DataLayer(self.doc.components[i_comp], PlotType.IMAGE)])
p = _ax_topomap(ax, layers, **TOPO_ARGS)
text = ax.text(0, 0.5, "# %i" % i_comp, va='center', ha='right', color='k')
ax.i = i
ax.i_comp = i_comp
self.topo_plots.append(p)
self.topo_labels.append(text)
# source time course data
y, xtick_labels = self._get_source_data()
# axes
left = 1.5 * axwidth
bottom = 1 - n_rows * axheight
xticks = np.arange(elen / 2, elen * self.n_epochs, elen)
ax = self.figure.add_axes((left, bottom, 1 - left, 1 - bottom), frameon=False, yticks=(), xticks=xticks, xticklabels=xtick_labels)
ax.tick_params(bottom=False)
ax.i = -1
ax.i_comp = None
# store canvas before plotting lines
self.canvas.draw()
# plot epochs
self.lines = ax.plot(y.T, color=LINE_COLOR[True], clip_on=False)
# line color
reject_color = LINE_COLOR[False]
for i in range(n_comp_actual):
if not self.doc.accept[i + self.i_first]:
self.lines[i].set_color(reject_color)
# data pre/post range
if self.show_range:
pre_color = UNAMBIGUOUS_COLORS['orange']
post_color = UNAMBIGUOUS_COLORS['bluish green']
ax.text(-10, 0.1, 'Range: Raw', va='bottom', ha='right', color=pre_color)
ax.text(-10, -0.1, 'Cleaned', va='top', ha='right', color=post_color)
# raw
ys_raw = self._get_raw_range()
self.y_range_pre_lines = [ax.plot(yi, color=pre_color, clip_on=False)[0] for yi in ys_raw]
# cleaned
ys_clean = self._get_clean_range()
self.y_range_post_lines = [ax.plot(yi, color=post_color, clip_on=False)[0] for yi in ys_clean]
# axes limits
self.ax_tc_ylim = (-0.5 * self.y_scale, (n_rows - 0.5) * self.y_scale)
ax.set_ylim(self.ax_tc_ylim)
ax.set_xlim((0, y.shape[1]))
# epoch demarcation
for x in range(elen, elen * self.n_epochs, elen):
ax.axvline(x, ls='--', c='k')
self.ax_tc = ax
self.canvas.draw()
def _plot_update_raw_range(self):
y_min, y_max = self._get_raw_range()
for line, data in zip(self.y_range_pre_lines, (y_min, y_max)):
line.set_ydata(data)
def _plot_update_clean_range(self):
y_min, y_max = self._get_clean_range()
for line, data in zip(self.y_range_post_lines, (y_min, y_max)):
line.set_ydata(data)
def _event_i_comp(self, event):
if event.inaxes:
if event.inaxes.i_comp is None:
i_in_axes = ceil(event.ydata / self.y_scale + 0.5)
if i_in_axes == 1 and self.show_range:
return
i_comp = int(self.i_first + self.n_comp + self.show_range - i_in_axes)
if i_comp < self.n_comp_in_ica:
return i_comp
else:
return event.inaxes.i_comp
def _event_i_epoch(self, event):
if event.inaxes is not None and event.inaxes.i_comp is None:
i_epoch = self.i_first_epoch + int(event.xdata // len(self.doc.sources.time))
if 0 <= i_epoch < len(self.doc.epochs):
return i_epoch
def CanBackward(self):
return self.i_first_epoch > 0
def CanDown(self):
return self.i_first + self.n_comp < self.n_comp_in_ica
def CanForward(self):
return self.i_first_epoch + self.n_epochs < self.n_epochs_in_data
def CanUp(self):
return self.i_first > 0
def CaseChanged(self, index):
"Update the states of the segments on the current page"
if isinstance(index, int):
index = [index]
elif isinstance(index, slice):
start = index.start or 0
stop = index.stop or self.doc.n_epochs
index = range(start, stop)
elif index.dtype.kind == 'b':
index = np.nonzero(index)[0]
# filter to visible epochs
i_last = self.i_first + self.n_comp_actual
index = [i_comp for i_comp in index if self.i_first <= i_comp <= i_last]
# update epoch plots
if index:
for i_comp in index:
self.lines[i_comp - self.i_first].set_color(LINE_COLOR[self.doc.accept[i_comp]])
self._plot_update_clean_range()
self.canvas.draw()
def GoToComponentEpoch(self, component: int = None, epoch: int = None):
if component is not None:
self._marked_component_i = component
self.SetFirstComponent(component // self.n_comp * self.n_comp)
if epoch is not None:
self._marked_epoch_i = epoch
self.SetFirstEpoch(epoch // self.n_epochs * self.n_epochs)
self.Raise()
def OnBackward(self, event):
"Turn the page backward"
self.SetFirstEpoch(self.i_first_epoch - self.n_epochs)
def OnCanvasKey(self, event):
if event.key is None:
return
elif event.key == 'alt+down':
self.SetFirstComponent(self.n_comp_in_ica - self.n_comp)
elif event.key == 'down':
if self.CanDown():
self.OnDown(None)
elif event.key == 'alt+up':
self.SetFirstComponent(0)
elif event.key == 'up':
if self.CanUp():
self.OnUp(None)
elif event.key == 'alt+right':
self.SetFirstEpoch(((self.n_epochs_in_data - 1) // self.n_epochs) * self.n_epochs)
elif event.key == 'right':
if self.CanForward():
self.OnForward(None)
elif event.key == 'alt+left':
self.SetFirstEpoch(0)
elif event.key == 'left':
if self.CanBackward():
self.OnBackward(None)
elif event.key == 'B':
self.parent.PlotConditionAverages(self)
elif event.key == 'b':
self.PlotEpochButterfly(self._event_i_epoch(event))
elif not event.inaxes:
return
# component-specific plots
i_comp = self._event_i_comp(event)
if i_comp is None: # source time course axes
return
elif event.key in 'tT':
self.parent.PlotCompTopomap(i_comp)
elif event.key == 'a':
self.parent.PlotCompSourceArray(i_comp)
elif event.key == 'f':
self.parent.PlotCompFFT(i_comp)
def OnClose(self, event):
if super(SourceFrame, self).OnClose(event):
self.doc.callbacks.remove('case_change', self.CaseChanged)
self.config.WriteInt('layout_n_comp', self.n_comp)
self.config.WriteInt('layout_n_epochs', self.n_epochs)
self.config.WriteFloat('y_scale', self.y_scale)
self.config.Flush()
def OnDown(self, event):
"Turn the page backward"
self.SetFirstComponent(self.i_first + self.n_comp)
def OnForward(self, event):
"Turn the page forward"
self.SetFirstEpoch(self.i_first_epoch + self.n_epochs)
def OnLeftDown(self, event):
"Called by mouse clicks"
mpl_event = self.canvas._to_matplotlib_event(event)
i_comp = self._event_i_comp(mpl_event)
if i_comp is None:
return
self.model.toggle(i_comp)
def OnRightDown(self, event):
mpl_event = self.canvas._to_matplotlib_event(event)
i_comp = self._event_i_comp(mpl_event)
i_epoch = self._event_i_epoch(mpl_event)
if i_comp is None and i_epoch is None:
return
menu = self.parent._context_menu(i_comp, i_epoch)
self.PopupMenu(menu, event.Position)
menu.Destroy()
def OnSetLayout(self, event):
caption = "Set ICA Source Layout"
msg = "Number of components and epochs (e.g., '10 20')"
default = '%i %i' % (self.n_comp, self.n_epochs)
dlg = wx.TextEntryDialog(self, msg, caption, default)
while True:
if dlg.ShowModal() == wx.ID_OK:
value = dlg.GetValue()
try:
n_comp, n_epochs = map(int, value.split())
except Exception:
wx.MessageBox("Invalid entry: %r. Need two integers \n"
"(e.g., '10 20').", "Invalid Entry",
wx.OK | wx.ICON_ERROR)
else:
dlg.Destroy()
break
else:
dlg.Destroy()
| |
<reponame>nikadilli/imgMS
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import make_axes_locatable
import numpy as np
import pandas as pd
import warnings
from decimal import Decimal
import datetime
import logging
import itertools
from imgMS.side_functions import *
from imgMS.MSEval import *
class MSData():
"""
LA-ICP-MS data structure. Primary object for LA-ICP-MS data reduction with imgMS.
Parameters
----------
datareader: MSEval.DataReader
Object for reading and import of LA-ICP-MS data.
logger: logger class (optional)
If logger is pssed all methods of MSData will log in the activity.
"""
def __init__(self, datareader=None, logger=None):
self.logger = logger
if datareader is not None:
self.datareader = datareader
self.data = self.datareader()
self.data.index = list(map(float, self.data.index))
self.time = np.array(self.data.index)
self.matrix = self.data.values
self.isotope_names = np.array(self.data.columns)
if self.logger is not None:
self.logger.info(f'Reading data {self.datareader.filename}.')
else:
self.datareader = None
self.data = None
self.time = None
self.matrix = None
self.isotope_names = []
if self.logger is not None:
self.logger.info(f'Creating empty MSData.')
self.isotopes = {}
self.param = None
self.selector = None
self.starts, self.ends = None, None
self.laser_off, self.laser_on = None, None
self.names = None
self.srms = None
self.means = None
self.quantified = None
self.lod = None
self.corrected_IS = None
self.corrected_TS = None
self.create_isotopes()
def __call__(self, isotopes=None, *args, **kwargs):
"""
Plots data in time dependant graph.
Parameters
----------
isotopes: list (optional)
List of isotopes to be shown in the plot. |If not specified plots all isotopes in measurement.
*args, **kwargs:
All other plotting arguments to be passed to matplotlib.pyplot.plot.
"""
plot_data(self.data, isotopes=isotopes, *args, **kwargs)
def __repr__(self):
res = f'{self.__class__.__name__}\n Analysis time: {self.time.max()}s\n File path: {self.datareader.filename}\n Measured isotopes: {self.isotope_names}'
return res
def time_to_number(self, time, integration_time=None):
"""
Takes time in seconds returns number of measured values.
The result depends on integration time of MS method.
Parameters
----------
time: float
Time in seconds to be converted into number of values in data.
integration_time: float (optional)
Integration time of LA-ICP-MS measurement. If not specified will be calculated from data.
"""
if not integration_time:
integration_time = (self.time[2]-self.time[1])
val = int(abs(time)//integration_time)
if time < 0:
val = -val
return val
def read_param(self, path):
"""
Import excel file with additional parameters (names of peaks, internal standard values,
total sum correction coefficients) for data reduction. A sample PARAM file can be found in data folder.
Parameters
----------
path: str
Path to excel param file.
"""
self.param = Param(path, self.logger)
if self.param.peak_names:
self.names = self.param.peak_names
def read_srms(self, path='./SRM.xlsx'):
"""
Import excel file with standard reference values. Default file is part of the imgMS package
and contains values for NIST610, NIST612 and NIST 614.
Parameters
----------
path: str
Path to excel SRM file.
"""
self.srms = pd.ExcelFile(path).parse(index_col=0)
def set_names(self, names):
"""
Sets list of names of peaks. The name of SRM must be equal to the name in SRM file.
Parameters
----------
names: list
List of names.
"""
self.names = names
def create_isotopes(self):
for key in self.isotope_names:
self.isotopes[key] = Isotope(key, self, logger=self.logger)
def select(self, method='treshold', selector=None, s=60, sdmul=10, iolite=None):
"""
Selects starts and ends of peaks using imgMS.Selector.
Parameters
----------
method: str (optional)
Name of the method to be used for identifying peaks. Possible options are 'treshold' and 'iolite'.
Default is 'treshold'.
selector: MSEval.Selector (optional)
Class for identifying peaks. If Selector is passed none of the other parameters are necessary. If not,
Selector is created by selected settings.
s: float (optional)
Start of the first peak in seconds from the start of analysis. Default is 60. Necessary if Selector is not
passed and used for synchronisation of data with iolite.
sdmul: float (optional)
Coeficient by which a standard deviation of background is multiplied to calculate treshold. Only used if
method = treshold.
iolite: MSEval.Iolite (optional)
Iolite class holding data from .Iolite file. Necessary if method = Iolite and Selector not passed.
"""
if selector is None:
self.selector = Selector(
self, s=s, sdmul=sdmul, iolite=iolite, logger=self.logger)
else:
self.selector = selector
self.selector.method = method
self.starts, self.ends = self.selector()
self.laser_on, self.laser_off = self.selector.create_on_off(
self.starts, self.ends)
def graph(self, ax=None, logax=False, el=None, *args, **kwargs):
"""
Create matplotlib graph of intensity in time for ablation and highlights peaks and background signal
if the peaks are already identifyied.
Parameters
----------
ax: matplotlib axes (optional)
Axes to plot in, if not specified, create new ax.
logax: bool (optional)
If True use logarythmic x axes. Default False.
el: str (optional)
Element to plot. If not specified plot all measured elements.
*args, **kwargs:
All other plotting arguments to be passed to matplotlib.pyplot.plot.
"""
if ax == None:
fig, ax = plt.subplots()
ax.cla()
ax.clear()
# if element is defined, plot only one element, otherwise all
if el:
self.data.plot(ax=ax, y=el, kind='line', legend=False)
else:
self.data.plot(ax=ax, kind='line', legend=True)
ax.legend(loc='upper center', bbox_to_anchor=(0.5, -0.075),
fancybox=True, shadow=True, ncol=10)
if logax:
ax.set_yscale('log')
if self.starts and self.ends:
# create lines for start and end of each ablation
for i in range(0, len(self.starts)):
ax.axvline(x=self.time[self.starts[i]],
color='blue', linewidth=2)
for i in range(0, len(self.ends)):
ax.axvline(x=self.time[self.ends[i]],
color='blue', linewidth=2)
if self.laser_off:
# higlights bacground
for off in self.laser_off:
try:
ax.axvspan(
self.time[off[0]], self.time[off[1]], alpha=0.2, color='red')
except:
warnings.warn('something is wrong')
if self.laser_on:
# higlihts ablation
for on in self.laser_on:
ax.axvspan(self.time[on[0]], self.time[on[1]],
alpha=0.2, color='green')
plt.show()
def average_isotopes(self, despiked=True, bcgcor_method='all', method='intensity'):
"""
Calculate average value for all peaks for all isotopes.
Parameters
----------
bcgcor_method : str
Method of background calculation. Possible options are [None, 'all', 'beginning', 'end']. Default is 'all'.
despiked : bool
If True use despiked data, else use original. Default is True.
method : str
Method to get average peaks. Possible options are ['intensity', 'integral']. Default is 'intensity'.
Returns
-------
data : DataFrame
DF where index are peak names and columns are isotopes
"""
if self.logger is not None:
self.logger.info(
f'Averaging of signal using despiking: {despiked}, bcgcor: {bcgcor_method}, method: {method}.')
self.means = pd.DataFrame()
for el, isotope in self.isotopes.items():
isotope.average_intensity(
despiked=despiked, bcgcor_method=bcgcor_method, method=method)
self.means[el] = isotope.means
if self.names:
self.means.index = self.names
else:
if self.logger is not None:
self.logger.error('Missing peak names.')
return self.means
def quantify_isotopes(self, srm_name='NIST610'):
"""
Calculate quantified value for all peaks for all isotopes.
Parameters
----------
srm_name : str
Standard reference material used for quantification. The name must be at least one of the
peaks and listed in SRM file.
Returns
-------
data : DataFrame
Quantified data in DF where index are peak names and columns are isotopes
"""
self.quantified = pd.DataFrame()
for el, isotope in self.isotopes.items():
if element_strip(el) not in self.srms.columns:
if self.logger is not None:
self.logger.error(f'Missing srm {el}.')
isotope.quantify(srm_name=srm_name)
self.quantified[el] = isotope.quantified
if self.logger is not None:
self.logger.info(
f'Quantification of signal using SRM: {srm_name}.')
self.quantified.index = [
name for name in self.names if name != srm_name]
return self.quantified
def IS_correction(self):
"""
Calculates correction for each element given in internal standard correction
from PARAM file.
Returns
-------
corrected data : dict
dict of internal standards used for correction as keys and DataFrames
where index are peak names and columns are isotopes with values in ppm.
"""
self.corrected_IS = {}
if self.param is not None:
if self.param.is_coef.empty:
if self.logger is not None:
self.logger.error(
'Missing coefficients of internal standards.')
else:
if self.logger is not None:
self.logger.error(
'Missing param file for IS correction.')
self.correction_elements = list(self.param.is_coef.columns)
for el in self.correction_elements:
if el in list(map(elem_resolution, self.isotope_names)):
self.corrected_IS[el] = (correction(
self.quantified, el, self.param.is_coef))
return self.corrected_IS
def TS_correction(self, suma=1000000, skip_isotopes=[], return_oxides=False):
"""
Calculates total sum correction [1] using coefficients given in PARAM file.
If coefficients in PARAM file are not given, uses default values. Default values
assume all elements are in most common oxide form.
[1] <NAME>., <NAME>., <NAME>., <NAME>., <NAME>., <NAME>. and <NAME>., 2008.
In situ analysis of major and trace elements of anhydrous minerals by LA-ICP-MS
without applying an internal standard. Chemical Geology, 257(1-2), pp.34-43.
Parameters
----------
suma : float
Total sum of measured elements in ppm used for correction.
Normally is equal to 100% (default).
skip_isotopes : list
List of isotopes to be skipped, in total sum correction one element
can't be measured on multiple isotopes.
return_oxides : bool
If True return data in oxide form as | |
IDL colormap 15 :: STERN SPECIAL ###
color_map_luts['idl15'] = \
(
array([ 0.0000000, 0.0703125, 0.1406250, 0.2109375, 0.2812500, 0.3515625,
0.4218750, 0.4960938, 0.5664062, 0.6367188, 0.7070312, 0.7773438,
0.8476562, 0.9179688, 0.9921875, 0.9726562, 0.9531250, 0.9335938,
0.9140625, 0.8945312, 0.8710938, 0.8515625, 0.8320312, 0.8125000,
0.7929688, 0.7695312, 0.7500000, 0.7304688, 0.7109375, 0.6914062,
0.6718750, 0.6484375, 0.6289062, 0.6093750, 0.5898438, 0.5703125,
0.5468750, 0.5273438, 0.5078125, 0.4882812, 0.4687500, 0.4492188,
0.4257812, 0.4062500, 0.3867188, 0.3671875, 0.3476562, 0.3242188,
0.3046875, 0.2851562, 0.2656250, 0.2460938, 0.2265625, 0.2031250,
0.1835938, 0.1640625, 0.1445312, 0.1250000, 0.1015625, 0.0820312,
0.0625000, 0.0429688, 0.0234375, 0.0000000, 0.2500000, 0.2539062,
0.2578125, 0.2617188, 0.2656250, 0.2695312, 0.2734375, 0.2773438,
0.2812500, 0.2851562, 0.2890625, 0.2929688, 0.2968750, 0.3007812,
0.3046875, 0.3085938, 0.3125000, 0.3164062, 0.3203125, 0.3242188,
0.3281250, 0.3320312, 0.3359375, 0.3398438, 0.3437500, 0.3476562,
0.3515625, 0.3554688, 0.3593750, 0.3632812, 0.3671875, 0.3710938,
0.3750000, 0.3789062, 0.3828125, 0.3867188, 0.3906250, 0.3945312,
0.3984375, 0.4023438, 0.4062500, 0.4101562, 0.4140625, 0.4179688,
0.4218750, 0.4257812, 0.4296875, 0.4335938, 0.4375000, 0.4414062,
0.4453125, 0.4492188, 0.4531250, 0.4570312, 0.4609375, 0.4648438,
0.4687500, 0.4726562, 0.4765625, 0.4804688, 0.4843750, 0.4882812,
0.4921875, 0.4960938, 0.5000000, 0.5039062, 0.5078125, 0.5117188,
0.5156250, 0.5195312, 0.5234375, 0.5273438, 0.5312500, 0.5351562,
0.5390625, 0.5429688, 0.5468750, 0.5507812, 0.5546875, 0.5585938,
0.5625000, 0.5664062, 0.5703125, 0.5742188, 0.5781250, 0.5820312,
0.5859375, 0.5898438, 0.5937500, 0.5976562, 0.6015625, 0.6054688,
0.6093750, 0.6132812, 0.6171875, 0.6210938, 0.6250000, 0.6289062,
0.6328125, 0.6367188, 0.6406250, 0.6445312, 0.6484375, 0.6523438,
0.6562500, 0.6601562, 0.6640625, 0.6679688, 0.6718750, 0.6757812,
0.6796875, 0.6835938, 0.6875000, 0.6914062, 0.6953125, 0.6992188,
0.7031250, 0.7070312, 0.7109375, 0.7148438, 0.7187500, 0.7226562,
0.7265625, 0.7304688, 0.7343750, 0.7382812, 0.7421875, 0.7460938,
0.7500000, 0.7539062, 0.7578125, 0.7617188, 0.7656250, 0.7695312,
0.7734375, 0.7773438, 0.7812500, 0.7851562, 0.7890625, 0.7929688,
0.7968750, 0.8007812, 0.8046875, 0.8085938, 0.8125000, 0.8164062,
0.8203125, 0.8242188, 0.8281250, 0.8320312, 0.8359375, 0.8398438,
0.8437500, 0.8476562, 0.8515625, 0.8554688, 0.8593750, 0.8632812,
0.8671875, 0.8710938, 0.8750000, 0.8789062, 0.8828125, 0.8867188,
0.8906250, 0.8945312, 0.8984375, 0.9023438, 0.9062500, 0.9101562,
0.9140625, 0.9179688, 0.9218750, 0.9257812, 0.9296875, 0.9335938,
0.9375000, 0.9414062, 0.9453125, 0.9492188, 0.9531250, 0.9570312,
0.9609375, 0.9648438, 0.9687500, 0.9726562, 0.9765625, 0.9804688,
0.9843750, 0.9882812, 0.9921875, 0.9960938]),
array([ 0.0000000, 0.0039062, 0.0078125, 0.0117188, 0.0156250, 0.0195312,
0.0234375, 0.0273438, 0.0312500, 0.0351562, 0.0390625, 0.0429688,
0.0468750, 0.0507812, 0.0546875, 0.0585938, 0.0625000, 0.0664062,
0.0703125, 0.0742188, 0.0781250, 0.0820312, 0.0859375, 0.0898438,
0.0937500, 0.0976562, 0.1015625, 0.1054688, 0.1093750, 0.1132812,
0.1171875, 0.1210938, 0.1250000, 0.1289062, 0.1328125, 0.1367188,
0.1406250, 0.1445312, 0.1484375, 0.1523438, 0.1562500, 0.1601562,
0.1640625, 0.1679688, 0.1718750, 0.1757812, 0.1796875, 0.1835938,
0.1875000, 0.1914062, 0.1953125, 0.1992188, 0.2031250, 0.2070312,
0.2109375, 0.2148438, 0.2187500, 0.2226562, 0.2265625, 0.2304688,
0.2343750, 0.2382812, 0.2421875, 0.2460938, 0.2500000, 0.2539062,
0.2578125, 0.2617188, 0.2656250, 0.2695312, 0.2734375, 0.2773438,
0.2812500, 0.2851562, 0.2890625, 0.2929688, 0.2968750, 0.3007812,
0.3046875, 0.3085938, 0.3125000, 0.3164062, 0.3203125, 0.3242188,
0.3281250, 0.3320312, 0.3359375, 0.3398438, 0.3437500, 0.3476562,
0.3515625, 0.3554688, 0.3593750, 0.3632812, 0.3671875, 0.3710938,
0.3750000, 0.3789062, 0.3828125, 0.3867188, 0.3906250, 0.3945312,
0.3984375, 0.4023438, 0.4062500, 0.4101562, 0.4140625, 0.4179688,
0.4218750, 0.4257812, 0.4296875, 0.4335938, 0.4375000, 0.4414062,
0.4453125, 0.4492188, 0.4531250, 0.4570312, 0.4609375, 0.4648438,
0.4687500, 0.4726562, 0.4765625, 0.4804688, 0.4843750, 0.4882812,
0.4921875, 0.4960938, 0.5000000, 0.5039062, 0.5078125, 0.5117188,
0.5156250, 0.5195312, 0.5234375, 0.5273438, 0.5312500, 0.5351562,
0.5390625, 0.5429688, 0.5468750, 0.5507812, 0.5546875, 0.5585938,
0.5625000, 0.5664062, 0.5703125, 0.5742188, 0.5781250, 0.5820312,
0.5859375, 0.5898438, 0.5937500, 0.5976562, 0.6015625, 0.6054688,
0.6093750, 0.6132812, 0.6171875, 0.6210938, 0.6250000, 0.6289062,
0.6328125, 0.6367188, 0.6406250, 0.6445312, 0.6484375, 0.6523438,
0.6562500, 0.6601562, 0.6640625, 0.6679688, 0.6718750, 0.6757812,
0.6796875, 0.6835938, 0.6875000, 0.6914062, 0.6953125, 0.6992188,
0.7031250, 0.7070312, 0.7109375, 0.7148438, 0.7187500, 0.7226562,
0.7265625, 0.7304688, 0.7343750, 0.7382812, 0.7421875, 0.7460938,
0.7500000, 0.7539062, 0.7578125, 0.7617188, 0.7656250, 0.7695312,
0.7734375, 0.7773438, 0.7812500, 0.7851562, 0.7890625, 0.7929688,
0.7968750, 0.8007812, 0.8046875, 0.8085938, 0.8125000, 0.8164062,
0.8203125, 0.8242188, 0.8281250, 0.8320312, 0.8359375, 0.8398438,
0.8437500, 0.8476562, 0.8515625, 0.8554688, 0.8593750, 0.8632812,
0.8671875, 0.8710938, 0.8750000, 0.8789062, 0.8828125, 0.8867188,
0.8906250, 0.8945312, 0.8984375, 0.9023438, 0.9062500, 0.9101562,
0.9140625, 0.9179688, 0.9218750, 0.9257812, 0.9296875, 0.9335938,
0.9375000, 0.9414062, 0.9453125, 0.9492188, 0.9531250, 0.9570312,
0.9609375, 0.9648438, 0.9687500, 0.9726562, 0.9765625, 0.9804688,
0.9843750, 0.9882812, 0.9921875, 0.9960938]),
array([ 0.0000000, 0.0039062, 0.0117188, 0.0195312, 0.0273438, 0.0351562,
0.0429688, 0.0507812, 0.0585938, 0.0664062, 0.0742188, 0.0820312,
0.0898438, 0.0976562, 0.1054688, 0.1132812, 0.1210938, 0.1289062,
0.1367188, 0.1445312, 0.1523438, 0.1601562, 0.1679688, 0.1757812,
0.1835938, 0.1914062, 0.1992188, 0.2070312, 0.2148438, 0.2226562,
0.2304688, 0.2382812, 0.2460938, 0.2539062, 0.2617188, 0.2695312,
0.2773438, 0.2851562, 0.2929688, 0.3007812, 0.3085938, 0.3164062,
0.3242188, 0.3320312, 0.3398438, 0.3476562, 0.3554688, 0.3632812,
0.3710938, 0.3789062, 0.3867188, 0.3945312, 0.4023438, 0.4101562,
0.4179688, 0.4257812, 0.4335938, 0.4414062, 0.4492188, 0.4570312,
0.4648438, 0.4726562, 0.4804688, 0.4882812, 0.4960938, 0.5039062,
0.5117188, 0.5195312, 0.5273438, 0.5351562, 0.5429688, 0.5507812,
0.5585938, 0.5664062, 0.5742188, 0.5820312, 0.5898438, 0.5976562,
0.6054688, 0.6132812, 0.6210938, 0.6289062, 0.6367188, 0.6445312,
0.6523438, 0.6601562, 0.6679688, 0.6757812, 0.6835938, 0.6914062,
0.6992188, 0.7070312, 0.7148438, 0.7226562, 0.7304688, 0.7382812,
0.7460938, 0.7539062, 0.7617188, 0.7695312, 0.7773438, 0.7851562,
0.7929688, 0.8007812, 0.8085938, 0.8164062, 0.8242188, 0.8320312,
0.8398438, 0.8476562, 0.8554688, 0.8632812, 0.8710938, 0.8789062,
0.8867188, 0.8945312, 0.9023438, 0.9101562, 0.9179688, 0.9257812,
0.9335938, 0.9414062, 0.9492188, 0.9570312, 0.9648438, 0.9726562,
0.9804688, 0.9882812, 0.9960938, 0.9804688, 0.9648438, 0.9492188,
0.9296875, 0.9140625, 0.8984375, 0.8828125, 0.8632812, 0.8476562,
0.8320312, 0.8164062, 0.7968750, 0.7812500, 0.7656250, 0.7500000,
0.7304688, 0.7148438, 0.6992188, 0.6835938, 0.6640625, 0.6484375,
0.6328125, 0.6171875, 0.5976562, 0.5820312, 0.5664062, 0.5507812,
0.5312500, 0.5156250, 0.5000000, 0.4843750, 0.4648438, 0.4492188,
0.4335938, 0.4179688, 0.3984375, 0.3828125, 0.3671875, 0.3515625,
0.3320312, 0.3164062, 0.3007812, 0.2851562, 0.2656250, 0.2500000,
0.2343750, 0.2187500, 0.1992188, 0.1835938, 0.1679688, 0.1523438,
0.1328125, 0.1171875, 0.1015625, 0.0859375, 0.0664062, 0.0507812,
0.0351562, 0.0195312, 0.0000000, 0.0117188, 0.0273438, 0.0429688,
0.0585938, 0.0742188, 0.0859375, 0.1015625, 0.1171875, 0.1328125,
0.1484375, 0.1601562, 0.1757812, 0.1914062, 0.2070312, 0.2226562,
0.2343750, 0.2500000, 0.2656250, 0.2812500, 0.2968750, 0.3085938,
0.3242188, 0.3398438, 0.3554688, 0.3710938, 0.3828125, 0.3984375,
0.4140625, 0.4296875, 0.4453125, 0.4570312, 0.4726562, 0.4882812,
0.5039062, 0.5195312, 0.5351562, 0.5468750, 0.5625000, 0.5781250,
0.5937500, 0.6093750, 0.6210938, 0.6367188, 0.6523438, 0.6679688,
0.6835938, 0.6953125, 0.7109375, 0.7265625, 0.7421875, 0.7578125,
0.7695312, 0.7851562, 0.8007812, 0.8164062, 0.8320312, 0.8437500,
0.8593750, 0.8750000, 0.8906250, 0.9062500, 0.9179688, 0.9335938,
0.9492188, 0.9648438, 0.9804688, 0.9960938]),
array([ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
1.0, 1.0, 1.0, 1.0, 1.0, 1.0]),
)
### IDL colormap 16 :: Haze ###
color_map_luts['idl16'] = \
(
array([ 0.6523438, 0.6523438, 0.9960938, 0.9921875, 0.9726562, 0.9648438,
0.9570312, 0.9492188, 0.9453125, 0.9375000, 0.9296875, 0.9218750,
0.9140625, 0.9062500, 0.8984375, 0.8906250, 0.8828125, 0.8750000,
0.8671875, 0.8593750, 0.8515625, 0.8437500, 0.8359375, 0.8281250,
0.8203125, 0.8125000, 0.8046875, 0.7968750, 0.7890625, 0.7812500,
0.7734375, 0.7656250, 0.7578125, 0.7500000, 0.7421875, 0.7343750,
0.7265625, 0.7187500, 0.7109375, 0.7031250, 0.6953125, 0.6875000,
0.6796875, 0.6718750, 0.6640625, 0.6562500, 0.6484375, 0.6406250,
0.6328125, 0.6250000, 0.6171875, 0.6093750, 0.6015625, 0.5937500,
0.5859375, 0.5781250, 0.5703125, 0.5625000, 0.5546875, 0.5507812,
0.5429688, 0.5351562, 0.5273438, 0.5195312, 0.5117188, 0.5039062,
0.4960938, 0.4882812, 0.4804688, 0.4726562, 0.4648438, 0.4570312,
0.4492188, 0.4414062, 0.4335938, 0.4257812, 0.4179688, 0.4101562,
0.4023438, 0.3945312, 0.3867188, 0.3789062, 0.3710938, 0.3632812,
0.3554688, 0.3476562, 0.3398438, 0.3320312, 0.3242188, 0.3164062,
0.3085938, 0.3007812, 0.2929688, 0.2851562, 0.2773438, 0.2695312,
0.2617188, 0.2539062, 0.2460938, 0.2382812, 0.2304688, 0.2226562,
0.2148438, 0.2070312, 0.1992188, 0.1914062, 0.1835938, 0.1757812,
0.1679688, 0.1601562, 0.1562500, 0.1484375, 0.1406250, 0.1328125,
0.1250000, 0.1171875, 0.1093750, 0.1015625, 0.0937500, 0.0859375,
0.0781250, 0.0703125, 0.0625000, 0.0546875, 0.0468750, 0.0507812,
0.0312500, 0.0234375, 0.0156250, 0.0156250, 0.0234375, 0.0273438,
0.0351562, 0.0429688, 0.0507812, 0.0585938, 0.0664062, 0.0742188,
0.0820312, 0.0898438, 0.0976562, 0.1054688, 0.1132812, 0.1210938,
0.1289062, 0.1367188, 0.1445312, 0.1523438, 0.1601562, 0.1679688,
0.1757812, 0.1835938, 0.1914062, 0.1992188, 0.2070312, 0.2148438,
0.2226562, 0.2304688, 0.2382812, 0.2460938, 0.2539062, 0.2617188,
0.2695312, 0.2773438, 0.2851562, 0.2929688, 0.3007812, 0.3085938,
0.3164062, 0.3242188, 0.3320312, 0.3398438, 0.3476562, 0.3554688,
0.3632812, 0.3710938, 0.3789062, 0.3867188, 0.3945312, 0.4023438,
0.4101562, 0.4179688, 0.4218750, 0.4296875, 0.4375000, 0.4453125,
0.4531250, 0.4609375, 0.4687500, 0.4765625, 0.4843750, 0.4921875,
0.5000000, 0.5078125, 0.5156250, 0.5234375, 0.5312500, 0.5390625,
0.5468750, 0.5546875, 0.5625000, 0.5703125, 0.5781250, 0.5859375,
0.5937500, 0.6015625, 0.6093750, 0.6171875, 0.6250000, 0.6328125,
0.6406250, 0.6484375, 0.6562500, 0.6640625, 0.6718750, 0.6796875,
0.6875000, 0.6953125, 0.7031250, 0.7109375, 0.7187500, 0.7265625,
0.7343750, 0.7421875, 0.7500000, 0.7578125, 0.7656250, 0.7734375,
0.7812500, 0.7890625, 0.7968750, 0.8046875, 0.8125000, 0.8203125,
0.8242188, 0.8320312, | |
import json
from airone.lib.test import AironeViewTest
from airone.lib.types import AttrTypeValue
from datetime import date
from django.urls import reverse
from entity.models import Entity, EntityAttr
from entry.models import Entry
from entry.settings import CONFIG
from group.models import Group
class ViewTest(AironeViewTest):
def test_get_entries(self):
admin = self.admin_login()
# create Entity & Entries
entity = Entity.objects.create(name='Entity', created_user=admin)
for index in range(0, CONFIG.MAX_LIST_ENTRIES + 1):
name = 'e-%s' % index
Entry.objects.create(name=name, schema=entity, created_user=admin)
# send request without keyword
resp = self.client.get(reverse('entry:api_v1:get_entries', args=[entity.id]))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertTrue('results' in resp.json())
self.assertEqual(len(resp.json()['results']), CONFIG.MAX_LIST_ENTRIES)
# send request with empty keyword
resp = self.client.get(reverse('entry:api_v1:get_entries', args=[entity.id]),
{'keyword': ''})
self.assertEqual(resp.status_code, 200)
self.assertTrue('results' in resp.json())
self.assertEqual(len(resp.json()['results']), CONFIG.MAX_LIST_ENTRIES)
# send request with keyword parameter
resp = self.client.get(reverse('entry:api_v1:get_entries', args=[entity.id]),
{'keyword': '10'})
self.assertEqual(resp.status_code, 200)
self.assertTrue('results' in resp.json())
self.assertEqual(len(resp.json()['results']), 2)
self.assertTrue(
all([x['name'] == 'e-10' or x['name'] == 'e-100' for x in resp.json()['results']]))
# send request with invalid keyword parameter
resp = self.client.get(reverse('entry:api_v1:get_entries', args=[entity.id]),
{'keyword': 'invalid-keyword'})
self.assertEqual(resp.status_code, 200)
self.assertTrue('results' in resp.json())
self.assertEqual(len(resp.json()['results']), 0)
# send request to check keyword would be insensitive case
resp = self.client.get(reverse('entry:api_v1:get_entries', args=[entity.id]),
{'keyword': 'E-0'})
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.json()['results']), 1)
self.assertTrue(resp.json()['results'][0]['name'], 'e-0')
"""
Check for cases with special characters
"""
add_chars = ['!', '"', '#', '$', '%', '\'', '(', ')', '-', '=', '^', '~', '@', '`',
'[', ']', '{', '}', ';', '+', ':', '*', ',', '<', '>', '.', '/', '?', '_', ' '
'&', '|']
test_suites = []
for i, add_char in enumerate(add_chars):
entry_name = 'test%s%s' % (i, add_char)
entry = Entry.objects.create(name=entry_name, schema=entity, created_user=admin)
entry.register_es()
test_suites.append({
'search_word': add_char, 'ret_cnt': 1, 'ret_entry_name': entry_name
})
for test_suite in test_suites:
resp = self.client.get(reverse('entry:api_v1:get_entries', args=[entity.id]),
{'keyword': test_suite['search_word']})
ret_cnt = test_suite['ret_cnt'] if test_suite[
'search_word'] != '-' else CONFIG.MAX_LIST_ENTRIES
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.json()['results']), ret_cnt)
self.assertEqual(resp.json()['results'][0]['name'], test_suite['ret_entry_name'])
def test_get_entries_with_multiple_ids(self):
admin = self.admin_login()
# create Entities & Entries
for entity_name in ['Entity1', 'Entity2']:
entity = Entity.objects.create(name='Entity', created_user=admin)
for index in range(0, 10):
name = 'e-%s' % index
Entry.objects.create(name=name, schema=entity, created_user=admin)
# specify multiple IDs of Entity
entity_ids = '%s,%s' % (Entity.objects.first().id, Entity.objects.last().id)
resp = self.client.get('/entry/api/v1/get_entries/%s/' % (entity_ids))
self.assertEqual(resp.status_code, 200)
self.assertTrue('results' in resp.json())
self.assertEqual(len(resp.json()['results']), 20)
# specify multiple IDs including invalid ones
# this expects that the only entries of valid id will be returned.
entity_ids = ',,,%s,,,,,9999' % Entity.objects.first().id
resp = self.client.get('/entry/api/v1/get_entries/%s/' % entity_ids)
self.assertEqual(resp.status_code, 200)
self.assertTrue('results' in resp.json())
self.assertEqual(len(resp.json()['results']), 10)
def test_get_entries_with_multiple_entities(self):
admin = self.admin_login()
# create Entity&Entries
for entity_name in ['Entity1', 'Entity2']:
entity = Entity.objects.create(name=entity_name, created_user=admin)
for index in range(0, 5):
name = 'e-%s' % index
Entry.objects.create(name=name, schema=entity, created_user=admin)
entity_ids = ','.join([str(x.id) for x in Entity.objects.all()])
resp = self.client.get(reverse('entry:api_v1:get_entries', args=[entity_ids]))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertTrue('results' in resp.json())
self.assertEqual(len(resp.json()['results']), 10)
def test_get_entries_with_inactive_parameter(self):
user = self.guest_login()
# create entries, then delete them to search inactive entries
entity = Entity.objects.create(name='Entity', created_user=user)
for name in ['foo', 'bar', 'baz']:
entry = Entry.objects.create(name=name, schema=entity, created_user=user)
entry.delete()
# confirms that there is no active entry in this entity
resp = self.client.get(reverse('entry:api_v1:get_entries', args=[entity.id]))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.json()['results'], [])
# confirms that deleted entries are got when 'is_active=False' is specified
resp = self.client.get(reverse('entry:api_v1:get_entries', args=[entity.id]), {
'keyword': 'ba',
'is_active': False,
})
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(len(resp.json()['results']), 2)
def test_get_referrals(self):
admin = self.admin_login()
# create Entity&Entries
ref_entity = Entity.objects.create(name='Referred Entity', created_user=admin)
ref_entry = Entry.objects.create(name='Referred Entry', schema=ref_entity,
created_user=admin)
entity = Entity.objects.create(name='Entity', created_user=admin)
entity.attrs.add(EntityAttr.objects.create(**{
'name': 'Refer',
'type': AttrTypeValue['object'],
'created_user': admin,
'parent_entity': entity,
}))
for index in range(0, CONFIG.MAX_LIST_REFERRALS + 1):
name = 'e-%s' % index
e = Entry.objects.create(name=name, schema=entity, created_user=admin)
e.complement_attrs(admin)
ref_attr = e.attrs.get(name='Refer')
ref_attr.add_value(admin, ref_entry)
# send request without keyword
resp = self.client.get(reverse('entry:api_v1:get_referrals', args=[ref_entry.id]))
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.json()['total_count'], CONFIG.MAX_LIST_REFERRALS + 1)
self.assertEqual(resp.json()['found_count'], CONFIG.MAX_LIST_REFERRALS)
self.assertTrue(
all(['id' in x and 'name' in x and 'entity' in x for x in resp.json()['entries']]))
# send request with keyword parameter
resp = self.client.get(reverse('entry:api_v1:get_referrals', args=[ref_entry.id]),
{'keyword': 'e-10'})
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.json()['total_count'], CONFIG.MAX_LIST_REFERRALS + 1)
self.assertEqual(resp.json()['found_count'], 1)
# send request with invalid keyword parameter
resp = self.client.get(reverse('entry:api_v1:get_referrals', args=[ref_entry.id]),
{'keyword': 'invalid_keyword'})
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp.json()['total_count'], CONFIG.MAX_LIST_REFERRALS + 1)
self.assertEqual(resp.json()['found_count'], 0)
def test_get_attr_referrals_of_group(self):
user = self.guest_login()
# initialize instances to be used in this test case
groups = [Group.objects.create(name=x) for x in ['g-foo', 'g-bar', 'g-baz']]
entity = Entity.objects.create(name='Entity', created_user=user)
for (name, type_index) in [('grp', 'group'), ('arr_group', 'array_group')]:
entity.attrs.add(EntityAttr.objects.create(**{
'name': name,
'type': AttrTypeValue[type_index],
'created_user': user,
'parent_entity': entity,
}))
# test to get groups through API calling of get_attr_referrals
for attr in entity.attrs.all():
resp = self.client.get(reverse('entry:api_v1:get_attr_referrals', args=[attr.id]))
self.assertEqual(resp.status_code, 200)
# This expects results has all groups information.
self.assertEqual(sorted(resp.json()['results'], key=lambda x: x['id']),
sorted([{'id': g.id, 'name': g.name} for g in Group.objects.all()],
key=lambda x: x['id']))
# test to get groups which are only active and matched with keyword
groups[2].delete()
for attr in entity.attrs.all():
resp = self.client.get(reverse('entry:api_v1:get_attr_referrals', args=[attr.id]),
{'keyword': 'ba'})
self.assertEqual(resp.status_code, 200)
# This expects results has only information of 'g-bar' because 'g-foo' is
# not matched with keyword and 'g-baz' has already been deleted.
self.assertEqual(resp.json()['results'],
[{'id': groups[1].id, 'name': groups[1].name}])
def test_get_attr_referrals_of_entry(self):
admin = self.admin_login()
# create Entity&Entries
ref_entity = Entity.objects.create(name='Referred Entity', created_user=admin)
entity = Entity.objects.create(name='Entity', created_user=admin)
entity_attr = EntityAttr.objects.create(**{
'name': 'Refer',
'type': AttrTypeValue['object'],
'created_user': admin,
'parent_entity': entity,
})
entity_attr.referral.add(ref_entity)
entity.attrs.add(entity_attr)
for index in range(CONFIG.MAX_LIST_REFERRALS, -1, -1):
Entry.objects.create(name='e-%s' % index, schema=ref_entity, created_user=admin)
entry = Entry.objects.create(name='entry', schema=entity, created_user=admin)
# get Attribute object after complement them in the entry
entry.complement_attrs(admin)
attr = entry.attrs.get(name='Refer')
# try to get entries without keyword
resp = self.client.get(reverse('entry:api_v1:get_attr_referrals', args=[attr.id]))
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.json()['results']), CONFIG.MAX_LIST_REFERRALS)
# specify invalid Attribute ID
resp = self.client.get(reverse('entry:api_v1:get_attr_referrals', args=[9999]))
self.assertEqual(resp.status_code, 400)
# speify valid Attribute ID and a enalbed keyword
resp = self.client.get(reverse('entry:api_v1:get_attr_referrals', args=[attr.id]),
{'keyword': 'e-1'})
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertTrue('results' in resp.json())
# This means e-1 and 'e-10' to 'e-19' are returned
self.assertEqual(len(resp.json()['results']), 11)
# speify valid Attribute ID and a unabailabe keyword
resp = self.client.get(reverse('entry:api_v1:get_attr_referrals', args=[attr.id]),
{'keyword': 'hoge'})
self.assertEqual(resp.status_code, 200)
self.assertEqual(len(resp.json()['results']), 0)
# Add new data
for index in [101, 111, 100, 110]:
Entry.objects.create(name='e-%s' % index, schema=ref_entity, created_user=admin)
# Run with 'e-1' as keyword
resp = self.client.get(reverse('entry:api_v1:get_attr_referrals', args=[attr.id]),
{'keyword': 'e-1'})
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertTrue('results' in resp.json())
# Check the number of return values
self.assertEqual(len(resp.json()['results']), 15)
# Check if it is sorted in the expected order
targets = [1, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 100, 101, 110, 111]
for i, res in enumerate(resp.json()['results']):
self.assertEqual(res['name'], 'e-%s' % targets[i])
def test_get_attr_referrals_with_entity_attr(self):
"""
This test is needed because the get_attr_referrals API will receive an ID
of Attribute from entry.edit view, but also receive an EntityAttr's one
from entry.create view.
"""
admin = self.admin_login()
# create Entity&Entries
ref_entity = Entity.objects.create(name='Referred Entity', created_user=admin)
for index in range(0, CONFIG.MAX_LIST_REFERRALS + 1):
Entry.objects.create(name='e-%s' % index, schema=ref_entity, created_user=admin)
entity = Entity.objects.create(name='Entity', created_user=admin)
entity_attr = EntityAttr.objects.create(**{
'name': 'Refer',
'type': AttrTypeValue['named_object'],
'created_user': admin,
'parent_entity': entity,
})
entity_attr.referral.add(ref_entity)
entity.attrs.add(entity_attr)
resp = self.client.get(reverse('entry:api_v1:get_attr_referrals', args=[entity_attr.id]),
{'keyword': 'e-1'})
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertTrue('results' in resp.json())
# This means e-1 and 'e-10' to 'e-19' are returned
self.assertEqual(len(resp.json()['results']), 11)
def test_advanced_search(self):
admin = self.admin_login()
# create referred Entity and Entries
ref_entity = Entity.objects.create(name='Referred Entity', created_user=admin)
for index in range(0, 20):
Entry.objects.create(name='r-%s' % index, schema=ref_entity, created_user=admin)
attr_infos = [
{'name': 'attr_ref', 'type': AttrTypeValue['object'], 'ref': ref_entity},
{'name': 'attr_val', 'type': AttrTypeValue['string']},
]
entity = Entity.objects.create(name='Entity', created_user=admin)
for attr_info in attr_infos:
entity_attr = EntityAttr.objects.create(**{
'name': attr_info['name'],
'type': attr_info['type'],
'created_user': admin,
'parent_entity': entity,
})
if 'ref' in attr_info:
entity_attr.referral.add(attr_info['ref'])
entity.attrs.add(entity_attr)
for index in range(0, 20):
entry = Entry.objects.create(name='e-%d' % index, schema=entity, created_user=admin)
entry.complement_attrs(admin)
for attr_name in ['attr_ref', 'attr_val']:
attr = entry.attrs.get(name=attr_name)
if attr.schema.type & AttrTypeValue['string']:
attr.add_value(admin, str(index))
elif attr.schema.type & AttrTypeValue['object']:
attr.add_value(admin, Entry.objects.get(name='r-%d' % index))
# checks the the API request to get entries with 'or' cond_link parameter
params = {
'cond_link': 'or',
'cond_params': [
{'type': 'text', 'value': '5'},
{'type': 'entry', 'value': str(Entry.objects.get(name='r-6').id)},
],
}
resp = self.client.post(reverse('entry:api_v1:search_entries', args=[entity.id]),
json.dumps(params), 'application/json')
self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(len(resp.json()['results']), 3)
self.assertTrue(any([x for x in resp.json()['results'] if x['name'] == 'e-5']))
self.assertTrue(any([x for x in resp.json()['results'] if x['name'] == 'e-15']))
self.assertTrue(any([x for x in resp.json()['results'] if x['name'] == 'e-6']))
# checks the the API request to not get entries with 'or' cond_link parameter
params = {
'cond_link': 'or',
'cond_params': [
{'type': 'text', 'value': 'abcd'},
],
}
resp = self.client.post(reverse('entry:api_v1:search_entries', args=[entity.id]),
json.dumps(params), 'application/json')
| |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 4.0.2
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info < (2, 7, 0):
raise RuntimeError("Python 2.7 or later required")
# Import the low-level C/C++ module
if __package__ or "." in __name__:
from . import _coefficient
else:
import _coefficient
try:
import builtins as __builtin__
except ImportError:
import __builtin__
_swig_new_instance_method = _coefficient.SWIG_PyInstanceMethod_New
_swig_new_static_method = _coefficient.SWIG_PyStaticMethod_New
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _swig_setattr_nondynamic_instance_variable(set):
def set_instance_attr(self, name, value):
if name == "thisown":
self.this.own(value)
elif name == "this":
set(self, name, value)
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
set(self, name, value)
else:
raise AttributeError("You cannot add instance attributes to %s" % self)
return set_instance_attr
def _swig_setattr_nondynamic_class_variable(set):
def set_class_attr(cls, name, value):
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
set(cls, name, value)
else:
raise AttributeError("You cannot add class attributes to %s" % cls)
return set_class_attr
def _swig_add_metaclass(metaclass):
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
def wrapper(cls):
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
return wrapper
class _SwigNonDynamicMeta(type):
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
import weakref
import mfem._ser.globals
import mfem._ser.array
import mfem._ser.mem_manager
import mfem._ser.matrix
import mfem._ser.vector
import mfem._ser.operators
import mfem._ser.intrules
import mfem._ser.sparsemat
import mfem._ser.densemat
import mfem._ser.eltrans
import mfem._ser.fe
import mfem._ser.geom
class Coefficient(object):
r"""Proxy of C++ mfem::Coefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
def SetTime(self, t):
r"""SetTime(Coefficient self, double t)"""
return _coefficient.Coefficient_SetTime(self, t)
SetTime = _swig_new_instance_method(_coefficient.Coefficient_SetTime)
def GetTime(self):
r"""GetTime(Coefficient self) -> double"""
return _coefficient.Coefficient_GetTime(self)
GetTime = _swig_new_instance_method(_coefficient.Coefficient_GetTime)
def Eval(self, *args):
r"""
Eval(Coefficient self, ElementTransformation T, IntegrationPoint ip) -> double
Eval(Coefficient self, ElementTransformation T, IntegrationPoint ip, double t) -> double
"""
return _coefficient.Coefficient_Eval(self, *args)
Eval = _swig_new_instance_method(_coefficient.Coefficient_Eval)
__swig_destroy__ = _coefficient.delete_Coefficient
# Register Coefficient in _coefficient:
_coefficient.Coefficient_swigregister(Coefficient)
class ConstantCoefficient(Coefficient):
r"""Proxy of C++ mfem::ConstantCoefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
constant = property(_coefficient.ConstantCoefficient_constant_get, _coefficient.ConstantCoefficient_constant_set, doc=r"""constant : double""")
def __init__(self, c=1.0):
r"""__init__(ConstantCoefficient self, double c=1.0) -> ConstantCoefficient"""
_coefficient.ConstantCoefficient_swiginit(self, _coefficient.new_ConstantCoefficient(c))
def Eval(self, T, ip):
r"""Eval(ConstantCoefficient self, ElementTransformation T, IntegrationPoint ip) -> double"""
return _coefficient.ConstantCoefficient_Eval(self, T, ip)
Eval = _swig_new_instance_method(_coefficient.ConstantCoefficient_Eval)
__swig_destroy__ = _coefficient.delete_ConstantCoefficient
# Register ConstantCoefficient in _coefficient:
_coefficient.ConstantCoefficient_swigregister(ConstantCoefficient)
class PWConstCoefficient(Coefficient):
r"""Proxy of C++ mfem::PWConstCoefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
r"""
__init__(PWConstCoefficient self, int NumOfSubD=0) -> PWConstCoefficient
__init__(PWConstCoefficient self, Vector c) -> PWConstCoefficient
"""
_coefficient.PWConstCoefficient_swiginit(self, _coefficient.new_PWConstCoefficient(*args))
def UpdateConstants(self, c):
r"""UpdateConstants(PWConstCoefficient self, Vector c)"""
return _coefficient.PWConstCoefficient_UpdateConstants(self, c)
UpdateConstants = _swig_new_instance_method(_coefficient.PWConstCoefficient_UpdateConstants)
def __call__(self, i):
r"""__call__(PWConstCoefficient self, int i) -> double &"""
return _coefficient.PWConstCoefficient___call__(self, i)
__call__ = _swig_new_instance_method(_coefficient.PWConstCoefficient___call__)
def GetNConst(self):
r"""GetNConst(PWConstCoefficient self) -> int"""
return _coefficient.PWConstCoefficient_GetNConst(self)
GetNConst = _swig_new_instance_method(_coefficient.PWConstCoefficient_GetNConst)
def Eval(self, T, ip):
r"""Eval(PWConstCoefficient self, ElementTransformation T, IntegrationPoint ip) -> double"""
return _coefficient.PWConstCoefficient_Eval(self, T, ip)
Eval = _swig_new_instance_method(_coefficient.PWConstCoefficient_Eval)
__swig_destroy__ = _coefficient.delete_PWConstCoefficient
# Register PWConstCoefficient in _coefficient:
_coefficient.PWConstCoefficient_swigregister(PWConstCoefficient)
class FunctionCoefficient(Coefficient):
r"""Proxy of C++ mfem::FunctionCoefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
r"""
__init__(FunctionCoefficient self, std::function< double (mfem::Vector const &) > F) -> FunctionCoefficient
__init__(FunctionCoefficient self, std::function< double (mfem::Vector const &,double) > TDF) -> FunctionCoefficient
__init__(FunctionCoefficient self, double (*)(mfem::Vector &) f) -> FunctionCoefficient
__init__(FunctionCoefficient self, double (*)(mfem::Vector &,double) tdf) -> FunctionCoefficient
"""
_coefficient.FunctionCoefficient_swiginit(self, _coefficient.new_FunctionCoefficient(*args))
def Eval(self, T, ip):
r"""Eval(FunctionCoefficient self, ElementTransformation T, IntegrationPoint ip) -> double"""
return _coefficient.FunctionCoefficient_Eval(self, T, ip)
Eval = _swig_new_instance_method(_coefficient.FunctionCoefficient_Eval)
__swig_destroy__ = _coefficient.delete_FunctionCoefficient
# Register FunctionCoefficient in _coefficient:
_coefficient.FunctionCoefficient_swigregister(FunctionCoefficient)
class GridFunctionCoefficient(Coefficient):
r"""Proxy of C++ mfem::GridFunctionCoefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
r"""
__init__(GridFunctionCoefficient self) -> GridFunctionCoefficient
__init__(GridFunctionCoefficient self, mfem::GridFunction const * gf, int comp=1) -> GridFunctionCoefficient
"""
_coefficient.GridFunctionCoefficient_swiginit(self, _coefficient.new_GridFunctionCoefficient(*args))
def SetGridFunction(self, gf):
r"""SetGridFunction(GridFunctionCoefficient self, mfem::GridFunction const * gf)"""
return _coefficient.GridFunctionCoefficient_SetGridFunction(self, gf)
SetGridFunction = _swig_new_instance_method(_coefficient.GridFunctionCoefficient_SetGridFunction)
def GetGridFunction(self):
r"""GetGridFunction(GridFunctionCoefficient self) -> mfem::GridFunction const *"""
return _coefficient.GridFunctionCoefficient_GetGridFunction(self)
GetGridFunction = _swig_new_instance_method(_coefficient.GridFunctionCoefficient_GetGridFunction)
def Eval(self, T, ip):
r"""Eval(GridFunctionCoefficient self, ElementTransformation T, IntegrationPoint ip) -> double"""
return _coefficient.GridFunctionCoefficient_Eval(self, T, ip)
Eval = _swig_new_instance_method(_coefficient.GridFunctionCoefficient_Eval)
__swig_destroy__ = _coefficient.delete_GridFunctionCoefficient
# Register GridFunctionCoefficient in _coefficient:
_coefficient.GridFunctionCoefficient_swigregister(GridFunctionCoefficient)
class TransformedCoefficient(Coefficient):
r"""Proxy of C++ mfem::TransformedCoefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
r"""
__init__(TransformedCoefficient self, Coefficient q, double (*)(double) F) -> TransformedCoefficient
__init__(TransformedCoefficient self, Coefficient q1, Coefficient q2, double (*)(double,double) F) -> TransformedCoefficient
"""
_coefficient.TransformedCoefficient_swiginit(self, _coefficient.new_TransformedCoefficient(*args))
def Eval(self, T, ip):
r"""Eval(TransformedCoefficient self, ElementTransformation T, IntegrationPoint ip) -> double"""
return _coefficient.TransformedCoefficient_Eval(self, T, ip)
Eval = _swig_new_instance_method(_coefficient.TransformedCoefficient_Eval)
__swig_destroy__ = _coefficient.delete_TransformedCoefficient
# Register TransformedCoefficient in _coefficient:
_coefficient.TransformedCoefficient_swigregister(TransformedCoefficient)
class DeltaCoefficient(Coefficient):
r"""Proxy of C++ mfem::DeltaCoefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
r"""
__init__(DeltaCoefficient self) -> DeltaCoefficient
__init__(DeltaCoefficient self, double x, double s) -> DeltaCoefficient
__init__(DeltaCoefficient self, double x, double y, double s) -> DeltaCoefficient
__init__(DeltaCoefficient self, double x, double y, double z, double s) -> DeltaCoefficient
"""
_coefficient.DeltaCoefficient_swiginit(self, _coefficient.new_DeltaCoefficient(*args))
def SetDeltaCenter(self, center):
r"""SetDeltaCenter(DeltaCoefficient self, Vector center)"""
return _coefficient.DeltaCoefficient_SetDeltaCenter(self, center)
SetDeltaCenter = _swig_new_instance_method(_coefficient.DeltaCoefficient_SetDeltaCenter)
def SetScale(self, _s):
r"""SetScale(DeltaCoefficient self, double _s)"""
return _coefficient.DeltaCoefficient_SetScale(self, _s)
SetScale = _swig_new_instance_method(_coefficient.DeltaCoefficient_SetScale)
def SetFunction(self, f):
r"""SetFunction(DeltaCoefficient self, double (*)(double) f)"""
return _coefficient.DeltaCoefficient_SetFunction(self, f)
SetFunction = _swig_new_instance_method(_coefficient.DeltaCoefficient_SetFunction)
def SetTol(self, _tol):
r"""SetTol(DeltaCoefficient self, double _tol)"""
return _coefficient.DeltaCoefficient_SetTol(self, _tol)
SetTol = _swig_new_instance_method(_coefficient.DeltaCoefficient_SetTol)
def SetWeight(self, w):
r"""SetWeight(DeltaCoefficient self, Coefficient w)"""
w.thisown=0
return _coefficient.DeltaCoefficient_SetWeight(self, w)
def Center(self):
r"""Center(DeltaCoefficient self) -> double const *"""
return _coefficient.DeltaCoefficient_Center(self)
Center = _swig_new_instance_method(_coefficient.DeltaCoefficient_Center)
def Scale(self):
r"""Scale(DeltaCoefficient self) -> double"""
return _coefficient.DeltaCoefficient_Scale(self)
Scale = _swig_new_instance_method(_coefficient.DeltaCoefficient_Scale)
def Tol(self):
r"""Tol(DeltaCoefficient self) -> double"""
return _coefficient.DeltaCoefficient_Tol(self)
Tol = _swig_new_instance_method(_coefficient.DeltaCoefficient_Tol)
def Weight(self):
r"""Weight(DeltaCoefficient self) -> Coefficient"""
return _coefficient.DeltaCoefficient_Weight(self)
Weight = _swig_new_instance_method(_coefficient.DeltaCoefficient_Weight)
def GetDeltaCenter(self, center):
r"""GetDeltaCenter(DeltaCoefficient self, Vector center)"""
return _coefficient.DeltaCoefficient_GetDeltaCenter(self, center)
GetDeltaCenter = _swig_new_instance_method(_coefficient.DeltaCoefficient_GetDeltaCenter)
def EvalDelta(self, T, ip):
r"""EvalDelta(DeltaCoefficient self, ElementTransformation T, IntegrationPoint ip) -> double"""
return _coefficient.DeltaCoefficient_EvalDelta(self, T, ip)
EvalDelta = _swig_new_instance_method(_coefficient.DeltaCoefficient_EvalDelta)
def Eval(self, T, ip):
r"""Eval(DeltaCoefficient self, ElementTransformation T, IntegrationPoint ip) -> double"""
return _coefficient.DeltaCoefficient_Eval(self, T, ip)
Eval = _swig_new_instance_method(_coefficient.DeltaCoefficient_Eval)
__swig_destroy__ = _coefficient.delete_DeltaCoefficient
# Register DeltaCoefficient in _coefficient:
_coefficient.DeltaCoefficient_swigregister(DeltaCoefficient)
class RestrictedCoefficient(Coefficient):
r"""Proxy of C++ mfem::RestrictedCoefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, _c, attr):
r"""__init__(RestrictedCoefficient self, Coefficient _c, intArray attr) -> RestrictedCoefficient"""
_coefficient.RestrictedCoefficient_swiginit(self, _coefficient.new_RestrictedCoefficient(_c, attr))
self._ref_to_c = _c
def Eval(self, T, ip):
r"""Eval(RestrictedCoefficient self, ElementTransformation T, IntegrationPoint ip) -> double"""
return _coefficient.RestrictedCoefficient_Eval(self, T, ip)
Eval = _swig_new_instance_method(_coefficient.RestrictedCoefficient_Eval)
__swig_destroy__ = _coefficient.delete_RestrictedCoefficient
# Register RestrictedCoefficient in _coefficient:
_coefficient.RestrictedCoefficient_swigregister(RestrictedCoefficient)
class VectorCoefficient(object):
r"""Proxy of C++ mfem::VectorCoefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
def __init__(self, *args, **kwargs):
raise AttributeError("No constructor defined - class is abstract")
__repr__ = _swig_repr
def SetTime(self, t):
r"""SetTime(VectorCoefficient self, double t)"""
return _coefficient.VectorCoefficient_SetTime(self, t)
SetTime = _swig_new_instance_method(_coefficient.VectorCoefficient_SetTime)
def GetTime(self):
r"""GetTime(VectorCoefficient self) -> double"""
return _coefficient.VectorCoefficient_GetTime(self)
GetTime = _swig_new_instance_method(_coefficient.VectorCoefficient_GetTime)
def GetVDim(self):
r"""GetVDim(VectorCoefficient self) -> int"""
return _coefficient.VectorCoefficient_GetVDim(self)
GetVDim = _swig_new_instance_method(_coefficient.VectorCoefficient_GetVDim)
def Eval(self, *args):
r"""
Eval(VectorCoefficient self, Vector V, ElementTransformation T, IntegrationPoint ip)
Eval(VectorCoefficient self, DenseMatrix M, ElementTransformation T, IntegrationRule ir)
"""
return _coefficient.VectorCoefficient_Eval(self, *args)
Eval = _swig_new_instance_method(_coefficient.VectorCoefficient_Eval)
__swig_destroy__ = _coefficient.delete_VectorCoefficient
# Register VectorCoefficient in _coefficient:
_coefficient.VectorCoefficient_swigregister(VectorCoefficient)
class VectorConstantCoefficient(VectorCoefficient):
r"""Proxy of C++ mfem::VectorConstantCoefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, v):
r"""__init__(VectorConstantCoefficient self, Vector v) -> VectorConstantCoefficient"""
try:
import numpy as np
value = np.array(v, copy=False, dtype=float).flatten()
can_np_array = True
except:
can_np_array = False
if can_np_array:
v = mfem._ser.vector.Vector(value)
self._value = v
else:
pass
_coefficient.VectorConstantCoefficient_swiginit(self, _coefficient.new_VectorConstantCoefficient(v))
def Eval(self, *args):
r"""
Eval(VectorConstantCoefficient self, Vector V, ElementTransformation T, IntegrationPoint ip)
Eval(VectorConstantCoefficient self, DenseMatrix M, ElementTransformation T, IntegrationRule ir)
Eval(VectorConstantCoefficient self, Vector V, ElementTransformation T, IntegrationPoint ip)
"""
return _coefficient.VectorConstantCoefficient_Eval(self, *args)
Eval = _swig_new_instance_method(_coefficient.VectorConstantCoefficient_Eval)
def GetVec(self):
r"""GetVec(VectorConstantCoefficient self) -> Vector"""
return _coefficient.VectorConstantCoefficient_GetVec(self)
GetVec = _swig_new_instance_method(_coefficient.VectorConstantCoefficient_GetVec)
__swig_destroy__ = _coefficient.delete_VectorConstantCoefficient
# Register VectorConstantCoefficient in _coefficient:
_coefficient.VectorConstantCoefficient_swigregister(VectorConstantCoefficient)
class VectorFunctionCoefficient(VectorCoefficient):
r"""Proxy of C++ mfem::VectorFunctionCoefficient class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, | |
a "bounding box" for the graph
# @code
# xmin, xmax , ymin , ymax = graph.bb()
# @endcode
def _gr_bb_ ( graph , more = 0.0 ) :
""" Get a ``bounding box'' for the graph
>>> xmin, xmax , ymin , ymax = graph.bb()
"""
xmin = pos_infinity
xmax = neg_infinity
ymin = pos_infinity
ymax = neg_infinity
for i , x , y in graph.iteritems() :
xmin = min ( xmin , x )
xmax = max ( xmax , x )
ymin = min ( ymin , y )
ymax = max ( ymax , y )
if more :
xmn = min ( xmin , xmin - more * ( xmax - xmin ) )
xmx = max ( xmax , xmax + more * ( xmax - xmin ) )
ymn = min ( ymin , ymin - more * ( ymax - ymin ) )
ymx = max ( ymax , ymax + more * ( ymax - ymin ) )
xmin = xmn
xmax = xmx
ymin = ymn
ymax = ymx
return xmin , xmax , ymin , ymax
# =============================================================================
## Get a "bounding box" for the graph
# @code
# xmin, xmax , ymin , ymax = graph.bb()
# @endcode
def _gre_bb_ ( graph , more = 0.0 ) :
""" Get a ``bounding box'' for the graph
>>> xmin, xmax , ymin , ymax = graph.bb()
"""
xmin = pos_infinity
xmax = neg_infinity
ymin = pos_infinity
ymax = neg_infinity
for i , x , y in graph.iteritems() :
xv = x.value ()
ex = x.error ()
yv = y.value ()
ey = y.error ()
xmin = min ( xmin , xv , xv + ex , xv - ex )
xmax = max ( xmax , xv , xv + ex , xv - ex )
ymin = min ( ymin , yv , yv + ey , yv - ey )
ymax = max ( ymax , yv , yv + ey , yv - ey )
if more :
xmn = min ( xmin , xmin - more * ( xmax - xmin ) )
xmx = max ( xmax , xmax + more * ( xmax - xmin ) )
ymn = min ( ymin , ymin - more * ( ymax - ymin ) )
ymx = max ( ymax , ymax + more * ( ymax - ymin ) )
xmin = xmn
xmax = xmx
ymin = ymn
ymax = ymx
return xmin , xmax , ymin , ymax
# =============================================================================
## Get a "bounding box" for the graph
# @code
# xmin, xmax , ymin , ymax = graph.bb()
# @endcode
def _grae_bb_ ( graph , more = 0.0 ) :
""" Get a ``bounding box'' for the graph
>>> xmin, xmax , ymin , ymax = graph.bb()
"""
xmin = pos_infinity
xmax = neg_infinity
ymin = pos_infinity
ymax = neg_infinity
for i , xv, enx , epx , yv , eny , epy in graph.iteritems() :
xmin = min ( xmin , xv , xv + abs ( epx ) , xv - abs ( enx ) )
xmax = max ( xmax , xv , xv + abs ( epx ) , xv - abs ( enx ) )
ymin = min ( ymin , yv , yv + abs ( epy ) , xv - abs ( eny ) )
ymax = max ( ymax , yv , yv + abs ( epy ) , xv - abs ( eny ) )
if more :
xmn = min ( xmin , xmin - more * ( xmax - xmin ) )
xmx = max ( xmax , xmax + more * ( xmax - xmin ) )
ymn = min ( ymin , ymin - more * ( ymax - ymin ) )
ymx = max ( ymax , ymax + more * ( ymax - ymin ) )
xmin = xmn
xmax = xmx
ymin = ymn
ymax = ymx
return xmin , xmax , ymin , ymax
# =============================================================================
## Get a "bounding box" for the graph
# @code
# xmin, xmax , ymin , ymax = graph.bb()
# @endcode
def _mg_bb_ ( graph , more = 0.0 ) :
""" Get a ``bounding box'' for the graph
>>> xmin, xmax , ymin , ymax = graph.bb()
"""
xmin = pos_infinity
xmax = neg_infinity
ymin = pos_infinity
ymax = neg_infinity
_gs = graph.GetListOfGraps()
for gr in _gs :
xmn , xmx , ymn , ymx = gr.bb ( more )
xmin = min ( xmin , xmn )
xmax = max ( xmax , xmx )
ymin = min ( ymin , ymn )
ymax = max ( ymax , ymx )
return xmin , xmax , ymin , ymax
ROOT.TGraph.bb = _gr_bb_
ROOT.TGraphErrors.bb = _gre_bb_
ROOT.TGraphAsymmErrors.bb = _grae_bb_
ROOT.TMultiGraph.bb = _mg_bb_
# =============================================================================
## get "slice" for graph
# @code
# >>> graph = ...
# >>> gr1 = graph[2:10]
# @endcode
# @author <NAME> <EMAIL>
# @date 2016-03-28
def _gr0_getslice_ ( graph , i , j ) :
"""Get the ``slice'' for TGraph:
>>> graph = ...
>>> gr1 = graph[2:10]
"""
np = len ( graph )
if i is None : i = 0
if j is None : j = np
while i < 0 : i += np
while j < 0 : j += np
new_graph = ROOT.TGraph( j - i ) if i < j else ROOT.TGraph()
copy_graph_attributes ( graph , new_graph )
ii = 0
while i < j :
new_graph[ ii ] = graph[i]
ii += 1
i += 1
return new_graph
# =============================================================================
## get "slice" for graph
# @code
# >>> graph = ...
# >>> gr1 = graph[2:10]
# @endcode
# @author <NAME> <EMAIL>
# @date 2016-03-28
def _gr1_getslice_ ( graph , i , j ) :
"""Get the ``slice'' for TGraphErrors:
>>> graph = ...
>>> gr1 = graph[2:10]
"""
np = len ( graph )
if i is None : i = 0
if j is None : j = np
while i < 0 : i += np
while j < 0 : j += np
new_graph = ROOT.TGraphErrors( j - i ) if i < j else ROOT.TGraphErrors ()
copy_graph_attributes ( graph , new_graph )
ii = 0
while i < j :
new_graph[ ii ] = graph[i]
ii += 1
i += 1
return new_graph
# =============================================================================
## get "slice" for graph
# @code
# >>> graph = ...
# >>> gr1 = graph[2:10]
# @endcode
# @author <NAME> <EMAIL>
# @date 2016-03-28
def _gr2_getslice_ ( graph , i , j ) :
"""Get the ``slice'' for TGraphAsymmErrors:
>>> graph = ...
>>> gr1 = graph[2:10]
"""
np = len ( graph )
if i is None : i = 0
if j is None : j = np
while i < 0 : i += np
while j < 0 : j += np
new_graph = ROOT.TGraphAsymmErrors( j - i ) if i < j else ROOT.TGraphAsymmErrors ()
copy_graph_attributes ( graph , new_graph )
ii = 0
while i < j :
new_graph[ ii ] = graph[i]
ii += 1
i += 1
return new_graph
# ============================================================================
## make sorted graph
# @code
# graph = ...
# s = graph.sorted()
# @endcode
# @date 2016-03-28
def _gr_sorted_ ( graph , reverse = False ) :
"""Make sorted graph
>>> graph = ...
>>> s = graph.sorted()
"""
## make new graph
new_graph = graph.Clone()
oitems = ( i for | |
<gh_stars>0
#!/usr/bin/python
#
# Copyright (c) 2018 Juniper Networks, Inc. All rights reserved.
#
"""This file contains code to support the hitless image upgrade feature."""
import argparse
from builtins import object
from builtins import str
import copy
from datetime import timedelta
import re
import sys
import traceback
sys.path.append("/opt/contrail/fabric_ansible_playbooks/module_utils")
# unit test
sys.path.append("../fabric-ansible/ansible-playbooks/module_utils")
from filter_utils import _task_error_log, FilterLog
from job_manager.job_utils import JobAnnotations, JobVncApi
ordered_role_groups = [
["leaf"],
["spine"],
["default"]
]
IMAGE_UPGRADE_DURATION = 30 # minutes
class FilterModule(object):
critical_routing_bridging_roles = {
"CRB-MCAST-Gateway",
"DC-Gateway",
"DCI-Gateway",
}
@staticmethod
def _validate_job_ctx(job_ctx):
if not job_ctx.get('fabric_fqname'):
raise ValueError('Invalid job_ctx: missing fabric_fqname')
job_input = job_ctx.get('job_input')
if not job_input:
raise ValueError('Invalid job_ctx: missing job_input')
if not job_input.get('fabric_uuid'):
raise ValueError('Invalid job_ctx: missing fabric_uuid')
return job_input
# end _validate_job_ctx
def filters(self):
return {
'hitless_upgrade_plan': self.get_hitless_upgrade_plan,
'hitless_next_batch': self.get_next_batch,
'hitless_all_devices': self.get_all_devices,
'hitless_device_info': self.get_device_info,
'hitless_validate': self.validate_critical_roles
}
# end filters
# Wrapper to call main routine
def get_hitless_upgrade_plan(self, job_ctx, image_upgrade_list):
try:
FilterLog.instance("HitlessUpgradeFilter")
self.job_input = FilterModule._validate_job_ctx(job_ctx)
self.fabric_uuid = self.job_input['fabric_uuid']
self.vncapi = JobVncApi.vnc_init(job_ctx)
self.job_ctx = job_ctx
self.ja = JobAnnotations(self.vncapi)
self.advanced_parameters = self._get_advanced_params()
self._cache_job_input()
self.batch_limit = self.advanced_parameters.get(
'bulk_device_upgrade_count')
self.image_upgrade_list = image_upgrade_list
upgrade_plan = self._get_hitless_upgrade_plan()
return upgrade_plan
except Exception as ex:
errmsg = "Unexpected error: %s\n%s" % (
str(ex), traceback.format_exc()
)
_task_error_log(errmsg)
return {
'status': 'failure',
'error_msg': errmsg,
}
# end get_hitless_upgrade_plan
# Get any advanced parameters from job input to override defaults
def _get_advanced_params(self):
job_template_fqname = self.job_ctx.get('job_template_fqname')
def_json = self.ja.generate_default_json(job_template_fqname)
adv_params = def_json.get("advanced_parameters")
job_input_adv_params = self.job_input.get('advanced_parameters', {})
adv_params = self.ja.dict_update(adv_params, job_input_adv_params)
return adv_params
# end _get_advanced_params
# Store the job input on the fabric object for UI to retrieve later
def _cache_job_input(self):
job_input = copy.deepcopy(self.job_input)
job_input.update({"advanced_parameters": self.advanced_parameters})
self.ja.cache_job_input(self.fabric_uuid,
self.job_ctx.get('job_template_fqname')[-1],
job_input)
# end _cache_job_input
# Read from Node Profile to determine whether the upgrade is hitless
def _is_hitless_upgrade(self, device_obj):
node_profile_refs = device_obj.get_node_profile_refs()
if node_profile_refs:
np_uuid = node_profile_refs[0].get('uuid')
node_profile_obj = self.vncapi.node_profile_read(id=np_uuid)
is_hitless = node_profile_obj.get_node_profile_hitless_upgrade()
return is_hitless
return True
# end _is_hitless_upgrade
# Main routine to generate an upgrade plan
def _get_hitless_upgrade_plan(self):
self.device_table, self.skipped_device_table = \
self._generate_device_table()
self.role_device_groups = self._generate_role_device_groups()
self.vpg_table = self._generate_vpg_table()
self._generate_buddy_lists()
self.batches = self._generate_batches()
self.report = self._generate_report()
self.results = self._generate_results()
upgrade_plan = {
'image_upgrade_list': self.image_upgrade_list,
'advanced_parameters': self.advanced_parameters,
'device_table': self.device_table,
'device_count': len(self.device_table),
'skipped_device_table': self.skipped_device_table,
'role_device_groups': self.role_device_groups,
'vpg_table': self.vpg_table,
'batches': self.batches,
'report': self.report,
'results': self.results,
'status': "success"
}
return upgrade_plan
# end _get_hitless_upgrade_plan
# generate a table of device information
def _generate_device_table(self):
device_table = {}
skipped_device_table = {}
for image_entry in self.image_upgrade_list:
image_uuid = image_entry.get('image_uuid')
image_obj = self.vncapi.device_image_read(id=image_uuid)
device_list = image_entry.get('device_list')
for device_uuid in device_list:
device_obj = self.vncapi.physical_router_read(id=device_uuid)
routing_bridging_roles = device_obj.routing_bridging_roles
if not routing_bridging_roles:
raise ValueError("Cannot find routing-bridging roles")
rb_roles = routing_bridging_roles.get_rb_roles()
is_hitless_upgrade = self._is_hitless_upgrade(device_obj)
device_info = {
"basic": {
"device_fqname": device_obj.fq_name,
"device_vendor":
device_obj.physical_router_vendor_name,
"device_family":
device_obj.physical_router_device_family,
"device_product":
device_obj.physical_router_product_name,
"device_serial_number":
device_obj.physical_router_serial_number,
"device_management_ip":
device_obj.physical_router_management_ip,
"device_username":
device_obj.physical_router_user_credentials.
username,
"device_password": self._get_password(device_obj),
"device_image_uuid": image_uuid,
"device_hitless_upgrade": is_hitless_upgrade
},
'image_family': image_obj.device_image_device_family,
'image_version': image_obj.device_image_os_version,
'current_image_version':
device_obj.physical_router_os_version,
'name': device_obj.fq_name[-1],
'uuid': device_uuid,
'physical_role': device_obj.physical_router_role,
'rb_roles': rb_roles,
'role': self._determine_role(
device_obj.physical_router_role, rb_roles),
'err_msgs': [],
'vpg_info': {"vpg_list": [], "buddies": []},
'target_multihomed_interface': []
}
skip, reason = self._check_skip_device_upgrade(device_info)
if skip:
if reason:
device_info['skip_reason'] = reason
skipped_device_table[device_uuid] = device_info
else:
device_table[device_uuid] = device_info
return device_table, skipped_device_table
# end _generate_device_table
# generate a simple table of roles with their corresponding devices
def _generate_role_device_groups(self):
# Group devices based on role. Use dict keyed by role name
role_device_groups = {}
for device_uuid, device_info in list(self.device_table.items()):
role = device_info['role']
if role not in role_device_groups:
role_device_groups[role] = []
role_device_groups[role].append(device_uuid)
# Sort lists
for role, group in list(role_device_groups.items()):
group.sort()
return role_device_groups
# end _generate_role_device_groups
# generate a table keyed by virtual port group uuid containing member
# devices and their physical interfaces
def _generate_vpg_table(self):
vpg_table = {}
vpg_refs = self.vncapi.virtual_port_groups_list(
parent_id=self.fabric_uuid). get(
'virtual-port-groups', [])
for vpg_ref in vpg_refs:
vpg_uuid = vpg_ref.get('uuid')
vpg_table[vpg_uuid] = {"device_table": {}}
vpg_dev_table = vpg_table[vpg_uuid]['device_table']
vpg_obj = self.vncapi.virtual_port_group_read(id=vpg_uuid)
vpg_table[vpg_uuid]['name'] = vpg_obj.fq_name[2]
pi_refs = vpg_obj.get_physical_interface_refs() or []
for pi_ref in pi_refs:
pi_uuid = pi_ref.get('uuid')
pi_obj = self.vncapi.physical_interface_read(id=pi_uuid)
device_uuid = pi_obj.parent_uuid
if device_uuid not in vpg_dev_table:
vpg_dev_table[device_uuid] = []
# If this is one of the devices to upgrade, append this
# vpg to the vpg_list for use later
if device_uuid in self.device_table:
device_info = self.device_table[device_uuid]
device_info['vpg_info']['vpg_list'].append(vpg_uuid)
pi_entry = {"fq_name": pi_obj.fq_name, "uuid": pi_obj.uuid}
vpg_dev_table[device_uuid].append(pi_entry)
# Add interface name to multihomed list
if device_uuid in self.device_table:
device_info = self.device_table[device_uuid]
if_name = pi_obj.fq_name[2]
if if_name not in \
device_info['target_multihomed_interface']:
device_info['target_multihomed_interface'].\
append(if_name)
return vpg_table
# end _generate_vpg_table
# For each device, generate a list of devices which cannot be upgraded at
# the same time because they are multi-homed to the same BMS
def _generate_buddy_lists(self):
for device_uuid, device_info in list(self.device_table.items()):
vpg_info = self.device_table[device_uuid]['vpg_info']
for vpg_uuid in vpg_info['vpg_list']:
vpg_entry = self.vpg_table[vpg_uuid]
vpg_dev_table = vpg_entry['device_table']
for vpg_dev_uuid, pi_list in list(vpg_dev_table.items()):
if vpg_dev_uuid not in vpg_info['buddies'] and \
vpg_dev_uuid != device_uuid:
buddy_entry = self._get_buddy_entry(vpg_dev_uuid,
pi_list)
vpg_info['buddies'].append(buddy_entry)
# end _generate_buddy_lists
# Create entry for peer, including ip_addr, username, password
def _get_buddy_entry(self, device_uuid, pi_list):
if device_uuid in self.device_table or \
device_uuid in self.skipped_device_table:
if device_uuid in self.device_table:
device_info = self.device_table[device_uuid]
else:
device_info = self.skipped_device_table[device_uuid]
fq_name = device_info['basic']['device_fqname']
mgmt_ip = device_info['basic']['device_management_ip']
username = device_info['basic']['device_username']
password = device_info['basic']['device_password']
multihomed_interface_list = \
device_info['target_multihomed_interface']
else:
device_obj = self.vncapi.physical_router_read(id=device_uuid)
fq_name = device_obj.fq_name
mgmt_ip = device_obj.physical_router_management_ip
username = device_obj.physical_router_user_credentials.username
password = self._get_password(device_obj)
multihomed_interface_list = \
self._get_multihomed_interface_list(pi_list)
return {
"uuid": device_uuid,
"fq_name": fq_name,
"name": fq_name[-1],
"mgmt_ip": mgmt_ip,
"username": username,
"password": password,
"multihomed_interface_list": multihomed_interface_list
}
# end _get_buddy_entry
# Get list of multihomed interface names
def _get_multihomed_interface_list(self, pi_list):
if_list = []
for pi_entry in pi_list:
if_name = pi_entry['fq_name'][-1]
if if_name not in if_list:
if_list.append(if_name)
return if_list
# end _get_multihomed_interface_list
def _device_value_based_on_number_of_critical_roles(self, device_uuid):
rb_roles = self.device_table[device_uuid].get('rb_roles')
how_many_critical_roles = 0
for rb_role in rb_roles:
if rb_role in FilterModule.critical_routing_bridging_roles:
how_many_critical_roles += 1
return -how_many_critical_roles
# Creates a dict: name of critical routing bridging role -> number of
# occurences in all devices.
def _calculate_devices_with_critical_routing_bridging_roles(self):
self.critical_routing_bridging_roles_count = {}
for critical_routing_bridging_role in\
FilterModule.critical_routing_bridging_roles:
self.critical_routing_bridging_roles_count[
critical_routing_bridging_role] = 0
for device_uuid, device_info in self.device_table.iteritems():
for routing_bridging_role in device_info.get('rb_roles'):
if routing_bridging_role in\
FilterModule.critical_routing_bridging_roles:
self.critical_routing_bridging_roles_count[
routing_bridging_role] += 1
# Assumes that critical_routing_bridging_roles_count has been initialized.
def _calc_max_number_of_repr_of_critical_rb_roles_per_batch(self):
self.max_number_of_repr_of_critical_rb_roles_per_batch = {}
for role_name, number_of_occurences \
in self.critical_routing_bridging_roles_count.iteritems():
self.max_number_of_repr_of_critical_rb_roles_per_batch[role_name] \
= number_of_occurences / 2 + number_of_occurences % 2
def _calculate_max_number_of_spines_updated_in_batch(self):
number_of_spines = 0
for device_uuid, device_info in self.device_table.iteritems():
if device_info.get('physical_role') == 'spine':
number_of_spines += 1
self.max_number_of_spines_updated_in_batch = \
number_of_spines / 2 + number_of_spines % 2
def _calc_number_of_repr_of_critical_rb_roles_in_batch(self, batch):
critical_routing_bridging_roles_count = {}
for critical_routing_bridging_role in\
FilterModule.critical_routing_bridging_roles:
critical_routing_bridging_roles_count[
critical_routing_bridging_role] = 0
for device_uuid in batch['device_list']:
rb_roles = self.device_table[device_uuid].get('rb_roles')
for rb_role in rb_roles:
if rb_role in FilterModule.critical_routing_bridging_roles:
critical_routing_bridging_roles_count[rb_role] += 1
return critical_routing_bridging_roles_count
# If correct batch extended with device_uuid is still correct in regards
# to vpg buddies, return True. Otherwise return False.
def _check_vpg_buddies_in_batch(self, device_uuid, batch):
# If this device shares a multi-homed vpg interface
# with another device in this batch, return False.
buddies = self._get_vpg_buddies(device_uuid)
for buddy in buddies:
if buddy['uuid'] in batch['device_list']:
return False
return True
# If correct batch extended with device_uuid is still correct in regards
# to number of spines in batch, return True. Otherwise return False.
def _check_number_of_spines_in_batch(self, device_uuid, batch):
device_info = self.device_table[device_uuid]
physical_role = device_info.get('physical_role')
if "spine" in physical_role:
spines_in_batch = 0
for device in batch['device_list']:
device_role = self.device_table[device].get('physical_role')
if "spine" in device_role:
spines_in_batch += 1
if (spines_in_batch + 1 >
self.max_number_of_spines_updated_in_batch):
return False
return True
# If correct batch extended with device_uuid is still correct in regards
# to number of critical roles, return True. Otherwise return False.
def _check_number_of_critical_rb_roles_in_batch(self, device_uuid, batch):
device_info = self.device_table[device_uuid]
rb_roles = device_info.get('rb_roles')
critical_rb_roles_in_device = list(
FilterModule.critical_routing_bridging_roles & set(rb_roles))
if critical_rb_roles_in_device:
critical_rb_roles_in_batch_count = self.\
_calc_number_of_repr_of_critical_rb_roles_in_batch(batch)
for rb_role in critical_rb_roles_in_device:
if critical_rb_roles_in_batch_count[rb_role] + 1 > self.\
max_number_of_repr_of_critical_rb_roles_per_batch[
rb_role]:
return False
return True
# It assumes that batch is correct and is not empty.
def _check_if_device_can_be_added_to_the_batch(self, device_uuid, batch):
return \
self._check_vpg_buddies_in_batch(device_uuid, batch) and \
self._check_number_of_spines_in_batch(device_uuid, batch) and \
self._check_number_of_critical_rb_roles_in_batch(
device_uuid, batch)
def _add_batch_index_to_device_info(self, batches):
for batch in batches:
for device_uuid in batch['device_list']:
self.device_table[device_uuid]['batch_index'] = batches.index(
batch)
def _add_device_to_the_batch(self, device_uuid, batch_load_list, batches):
batch = {}
loaded = False
batch_full = False
device_name = self.device_table[device_uuid].get('name')
# Try to add device into an existing batch
for batch in batch_load_list:
safe = | |
<filename>train.py<gh_stars>1-10
import sys
import os
from os.path import expanduser
import pickle
import torch
import torch.nn as nn
import torch.optim as optim
import torch.utils.data
import torch.onnx
import re
import json
from PIL import Image, ImageDraw
import torch
import numpy as np
# Training script- trains a Pytorch model against the Google Quickdraw dataset:
# https://github.com/googlecreativelab/quickdraw-dataset
#
# Specifically, it uses the "simplified Drawing files":
#
# https://console.cloud.google.com/storage/browser/quickdraw_dataset/full/simplified
#
# Also see https://www.kaggle.com/google/tinyquickdraw for a single downloadable tar file
# with about 50 million samples separated into 343 classes, which is where I got mine.
#
# It expects those files to be in ~/data/quickdraw. Specify any alternate path on the command line.
#
# As output it generates two files: doodles.pth (internal format) and doodles.onnx (ONNX export format).
#
# The model used here is a convolutional neural network accepting 1x64x64 inputs
# (i.e. black-and-white 64x64 images). Output is 344 neurons (i.e. one per label) with an extra neuron
# corresponding to label "nothing".
#
# NOTES:
#
# If doodles.pth is found (typically saved from a previous run), it will be loaded into the
# current model; otherwise it will start with a set of random weights. File size is approx. 300 MB.
#
# If it finds at any point during training that the output files doodles.pth or doodles.onnx
# are not on the drive, it will write new copies immediately with its current state (even though
# this means the first versions will only contain random weights). Deleting the files
# generates fresh copies, and so does finishing a training epoch (overwriting the prior versions).
# Because the data set is so immense, each epoch takes several hours to complete.
# In practice, with this model, performance levels off after about 3-4 epochs, with the network
# agreeing with Google's classification about 73% of the time.
#
# This way, if you need to edit a hyperparameter or go to work, you can pause execution by
# deleting the current doodles.pth and doodles.onnx files, letting it write new ones,
# and then hitting Ctrl-C. Typically you will want to adjust the learning rate downward
# or experiment with a different optimizer after the script has run for a few hours and
# its performance has reached a plateau. After you make your edits the script will pick up
# where it left off.
#
# If SAVE_BACKUP_FILES is set to True, the script will save backups as training progresses.
# Each time performance reaches a new record, a file will be saved with a filename indicating the
# new record number of correct responses. This is to avoid losing progress if the script crashes.
# (Raising the batch size too high can cause spurious out-of-memory errors at random times.)
# Specify data folder as command line argument; default is ~/data/quickdraw
DATA_DIRECTORY = '~/data/quickdraw'
if len(sys.argv) > 1:
DATA_DIRECTORY = sys.argv[1]
if DATA_DIRECTORY[0] == '~':
DATA_DIRECTORY = expanduser(DATA_DIRECTORY)
# Standard industry practice: Jack this number up as high as you can, then carefully lower it
# until the script stops crashing. Final value is dependent on GPU memory.
# This is a safe batch size to use on an RTX 2060 with 6 GB.
BATCH_SIZE = 1000
# Hyperparameters; both SGD and Adam work well, at least in the beginning; use SGD by default
OPTIMIZER_NAME = 'SGD'
SGD_LEARNING_RATE = 0.01
SGD_MOMENTUM = 0
ADAM_LEARNING_RATE = 0.001
ADAM_BETAS = (0.9, 0.99)
ADAM_EPSILON = 0.0001
INDEX_CACHE_FILE = './index_cache.pkl'
LABELS_FILE = './labels.txt'
STATE_DICT_FILE = './doodles.pth'
ONNX_FILE = './doodles.onnx'
SAVE_BACKUP_FILES = True
NUMBERED_STATE_DICT_FILE_TEMPLATE = './doodles_{}_of_{}.pth'
NUMBERED_ONNX_FILE_TEMPLATE = './doodles_{}_of_{}.onnx'
DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# If it's installed, turn this on to enable NVidia's Apex AMP Pytorch extension.
# This will let us do calculations in FP16 on the GPU which will save memory on the card
# and let us raise the batch size. It will also leverage RTX tensor cores on RTX cards.
# Default is set to False, because compiling and installing AMP is an involved process-
# NVidia's CUDA Toolkit to be installed on your system before you can compile it using pip.
MIXED_PRECISION = False
if MIXED_PRECISION and torch.cuda.is_available():
# See if the AMP Pytorch extension has been installed; otherwise stick to standard FP32.
# If we are using mixed precision we can raise the batch size but keep it a multiple of 8.
# All tensor dimensions must be multiples of 8 to trigger NVidia's tensor core optimizations.
try:
from apex import amp, optimizers
MIXED_PRECISION = True
BATCH_SIZE = int(BATCH_SIZE * 1.6) # Raising it by 60%
print('Using mixed precision.')
except ImportError:
MIXED_PRECISION = False
# This is a torch DataSet implementation that makes the following assumptions:
#
# 1. Data consists of a set of text files with ".ndjson" extensions in the specified directory.
# 2. Each line in the .ndjson file is a JSON string with all data for a single sample.
# 3. Each line of JSON has the following format (omitting extraneous fields):
# {"word":"elephant","drawing":[[[0, 1, 10],[25, 103, 163]],[[4,15,134,234,250],[27,22,6,4,0]]]}
# Array "drawing" has the brush strokes, each stroke a pair of arrays with x and y coordinates on a 256x256 grid.
# 4. We can build our label list by only looking at the first line of each file. (All lines have same value for "word".)
class QuickDrawDataset(torch.utils.data.Dataset):
# Take the batch size, so we know how much to pad with all-zero samples mapping to the "blank" channel.
# This way we ensure we deliver full-sized batches interspersed with a few blank samples mapping to label "nothing".
def __init__(self, dataDir, batch_size):
super(QuickDrawDataset, self).__init__()
print('Data folder: ' + dataDir)
self.dataDir = dataDir
self.filenames = list(filter(lambda x: x.endswith(".ndjson"), sorted(os.listdir(dataDir)))) #[1:20]
self.filenameByIndex = []
self.fileByteOffsetByIndex = []
self.labelListIndices = {}
self.labelList = []
for filename in self.filenames:
print('Indexing ' + filename)
file = open(dataDir + "/" + filename, "r")
byte_offset = 0
word = None
for line in file:
if (word == None):
words = re.findall('\"word\":\"([\w\s-]+)\"', line)
word = words[0]
self.labelListIndices[word] = len(self.labelList)
self.labelList.append(word)
# Only use the ones Google recognizes
if (len(re.findall('\"recognized\":true', line)) > 0):
self.filenameByIndex.append(filename)
self.fileByteOffsetByIndex.append(byte_offset)
byte_offset += len(line)
file.close()
self.labelListIndices['nothing'] = len(self.labelList)
self.labelList.append('nothing')
if MIXED_PRECISION:
# NVidia really wants tensor dimensions to be multiples of 8, make sure here
extra_nothings = 0
while len(self.labelList) % 8 > 0:
extra_nothings += 1
self.labelListIndices['nothing_{}'.format(extra_nothings)] = len(self.labelList)
self.labelList.append('nothing_{}'.format(extra_nothings))
self.paddingLength = batch_size - (len(self.filenameByIndex) % batch_size)
print('padding length {}'.format(self.paddingLength))
def __len__(self):
return len(self.filenameByIndex) + self.paddingLength
def __getitem__(self, idx):
if idx >= len(self.filenameByIndex):
# NULL sample
return torch.zeros(1, 64, 64, dtype=torch.float), self.labelListIndices['nothing']
filename = self.filenameByIndex[idx]
byte_offset = self.fileByteOffsetByIndex[idx]
file = open(self.dataDir + '/' + filename, 'r')
file.seek(byte_offset)
line = file.readline()
file.close()
# Convert line containing brush stroke coordinate list to a 256x256 image tensor using PIL
entry = json.loads(line)
drawing = entry.get('drawing')
im = Image.new("L", (256, 256))
draw = ImageDraw.Draw(im)
for stroke in drawing:
x_coords = stroke[0]
y_coords = stroke[1]
for i in range(len(x_coords) - 1):
draw.line((x_coords[i], y_coords[i], x_coords[i + 1], y_coords[i + 1]), fill=255, width=5)
im = im.resize((64, 64), Image.ANTIALIAS)
word = entry.get('word')
imageTensor = torch.tensor(np.array(im) / 256, dtype=torch.float)
# Alter image slightly to look like the inputs we're eventually going to get from the client.
# This is a limitation imposed by JavaScript which implements "antialiasing" on downsized canvases by
# nearest-neighbor downsampling, smoothed onscreen by a WebGL filter that looks nice but doesn't alter the image data,
# so we only get two-color jagged images.
#
# Tedious workarounds are possible: https://stackoverflow.com/questions/2303690/resizing-an-image-in-an-html5-canvas
THRESHOLD = 0.1
imageTensor[imageTensor >= THRESHOLD] = 1.0
imageTensor[imageTensor < THRESHOLD] = 0.0
imageTensor = imageTensor.unsqueeze(0)
return imageTensor, self.labelListIndices.get(word)
# Takes input of size Nx1x64x64, a batch of N black and white 64x64 images.
# Applies two convolutional layers and three fully connected layers.
class CNNModel(nn.Module):
# input_size is 64 (input samples are 64x64 images); num_classes is 344
def __init__(self, input_size, num_classes):
super(CNNModel, self).__init__()
self.layer1 = nn.Sequential(
nn.Conv2d(1, 32, kernel_size=5, stride=1, padding=2, bias=False),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2))
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=5, stride=1, padding=2, bias=False),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2))
dimension = int(64 * pow(input_size / 4, 2))
self.fc1 = nn.Sequential(nn.Linear(dimension, int(dimension / 4)), nn.Dropout(0.25))
self.fc2 = nn.Sequential(nn.Linear(int(dimension / 4), int(dimension / 8)), nn.Dropout(0.25))
self.fc3 = nn.Sequential(nn.Linear(int(dimension / 8), num_classes))
def forward(self, x):
out = self.layer1(x)
out = self.layer2(out)
out = out.view(out.size(0), -1)
out = | |
<reponame>sfox14/butterfly
"""My torch implementation of permutations and sinkhorn balancing ops.
A torch library of operations and sampling with permutations
and their approximation with doubly-stochastic matrices, through Sinkhorn balancing
"""
import numpy as np
from scipy.optimize import linear_sum_assignment
from scipy.stats import kendalltau
import torch
#from torch.distributions import Bernoulli
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def my_sample_gumbel(shape, eps=1e-10):
"""Samples arbitrary-shaped standard gumbel variables.
Args:
shape: list of integers
eps: float, for numerical stability
Returns:
A sample of standard Gumbel random variables
"""
#Sample from Gumbel(0, 1)
U = torch.rand(shape, dtype=torch.float, device=device)
return -torch.log(eps - torch.log(U + eps))
def simple_sinkhorn(MatrixA, n_iter = 20):
#performing simple Sinkhorn iterations.
for i in range(n_iter):
MatrixA /= MatrixA.sum(dim=1, keepdim=True)
MatrixA /= MatrixA.sum(dim=2, keepdim=True)
return MatrixA
def my_sinkhorn(log_alpha, n_iters = 20):
# torch version
"""Performs incomplete Sinkhorn normalization to log_alpha.
By a theorem by Sinkhorn and Knopp [1], a sufficiently well-behaved matrix
with positive entries can be turned into a doubly-stochastic matrix
(i.e. its rows and columns add up to one) via the successive row and column
normalization.
-To ensure positivity, the effective input to sinkhorn has to be
exp(log_alpha) (element wise).
-However, for stability, sinkhorn works in the log-space. It is only at
return time that entries are exponentiated.
[1] Sinkhorn, Richard and <NAME>.
Concerning nonnegative matrices and doubly stochastic
matrices. Pacific Journal of Mathematics, 1967
Args:
log_alpha: a 2D tensor of shape [N, N]
n_iters: number of sinkhorn iterations (in practice, as little as 20
iterations are needed to achieve decent convergence for N~100)
Returns:
A 3D tensor of close-to-doubly-stochastic matrices (2D tensors are
converted to 3D tensors with batch_size equals to 1)
"""
n = log_alpha.size()[1]
log_alpha = log_alpha.view(-1, n, n)
for i in range(n_iters):
# torch.logsumexp(input, dim, keepdim, out=None)
#Returns the log of summed exponentials of each row of the input tensor in the given dimension dim
#log_alpha -= (torch.logsumexp(log_alpha, dim=2, keepdim=True)).view(-1, n, 1)
#log_alpha -= (torch.logsumexp(log_alpha, dim=1, keepdim=True)).view(-1, 1, n)
#avoid in-place
log_alpha = log_alpha - (torch.logsumexp(log_alpha, dim=2, keepdim=True)).view(-1, n, 1)
log_alpha = log_alpha - (torch.logsumexp(log_alpha, dim=1, keepdim=True)).view(-1, 1, n)
return torch.exp(log_alpha)
def my_gumbel_sinkhorn(log_alpha, temp=1.0, n_samples=1, noise_factor=1.0, n_iters=20, squeeze=True):
"""Random doubly-stochastic matrices via gumbel noise.
In the zero-temperature limit sinkhorn(log_alpha/temp) approaches
a permutation matrix. Therefore, for low temperatures this method can be
seen as an approximate sampling of permutation matrices, where the
distribution is parameterized by the matrix log_alpha
The deterministic case (noise_factor=0) is also interesting: it can be
shown that lim t->0 sinkhorn(log_alpha/t) = M, where M is a
permutation matrix, the solution of the
matching problem M=arg max_M sum_i,j log_alpha_i,j M_i,j.
Therefore, the deterministic limit case of gumbel_sinkhorn can be seen
as approximate solving of a matching problem, otherwise solved via the
Hungarian algorithm.
Warning: the convergence holds true in the limit case n_iters = infty.
Unfortunately, in practice n_iter is finite which can lead to numerical
instabilities, mostly if temp is very low. Those manifest as
pseudo-convergence or some row-columns to fractional entries (e.g.
a row having two entries with 0.5, instead of a single 1.0)
To minimize those effects, try increasing n_iter for decreased temp.
On the other hand, too-low temperature usually lead to high-variance in
gradients, so better not choose too low temperatures.
Args:
log_alpha: 2D tensor (a matrix of shape [N, N])
or 3D tensor (a batch of matrices of shape = [batch_size, N, N])
temp: temperature parameter, a float.
n_samples: number of samples
noise_factor: scaling factor for the gumbel samples. Mostly to explore
different degrees of randomness (and the absence of randomness, with
noise_factor=0)
n_iters: number of sinkhorn iterations. Should be chosen carefully, in
inverse correspondence with temp to avoid numerical instabilities.
squeeze: a boolean, if True and there is a single sample, the output will
remain being a 3D tensor.
Returns:
sink: a 4D tensor of [batch_size, n_samples, N, N] i.e.
batch_size *n_samples doubly-stochastic matrices. If n_samples = 1 and
squeeze = True then the output is 3D.
log_alpha_w_noise: a 4D tensor of [batch_size, n_samples, N, N] of
noisy samples of log_alpha, divided by the temperature parameter. Ifmy_invert_listperm
n_samples = 1 then the output is 3D.
"""
n = log_alpha.size()[1]
log_alpha = log_alpha.view(-1, n, n)
batch_size = log_alpha.size()[0]
#log_alpha_w_noise = log_alpha[:,None,:,:].expand(batch_size, n_samples, n, n)
log_alpha_w_noise = log_alpha.repeat(n_samples, 1, 1)
if noise_factor == 0:
noise = 0.0
else:
noise = my_sample_gumbel([n_samples*batch_size, n, n])*noise_factor
log_alpha_w_noise = log_alpha_w_noise + noise
log_alpha_w_noise = log_alpha_w_noise / temp
my_log_alpha_w_noise = log_alpha_w_noise.clone()
sink = my_sinkhorn(my_log_alpha_w_noise)
if n_samples > 1 or squeeze is False:
sink = sink.view(n_samples, batch_size, n, n)
sink = torch.transpose(sink, 1, 0)
log_alpha_w_noise = log_alpha_w_noise.view(n_samples, batch_size, n, n)
log_alpha_w_noise = torch.transpose(log_alpha_w_noise, 1, 0)
return sink, log_alpha_w_noise
def my_sample_uniform_and_order(n_lists, n_numbers, prob_inc):
"""Samples uniform random numbers, return sorted lists and the indices of their original values
Returns a 2-D tensor of n_lists lists of n_numbers sorted numbers in the [0,1]
interval, each of them having n_numbers elements.
Lists are increasing with probability prob_inc.
It does so by first sampling uniform random numbers, and then sorting them.
Therefore, sorted numbers follow the distribution of the order statistics of
a uniform distribution.
It also returns the random numbers and the lists of permutations p such
p(sorted) = random.
Notice that if one ones to build sorted numbers in different intervals, one
might just want to re-scaled this canonical form.
Args:
n_lists: An int,the number of lists to be sorted.
n_numbers: An int, the number of elements in the permutation.
prob_inc: A float, the probability that a list of numbers will be sorted in
increasing order.
Returns:
ordered: a 2-D float tensor with shape = [n_list, n_numbers] of sorted lists
of numbers.
random: a 2-D float tensor with shape = [n_list, n_numbers] of uniform random
numbers.
permutations: a 2-D int tensor with shape = [n_list, n_numbers], row i
satisfies ordered[i, permutations[i]) = random[i,:].
"""
# sample n_lists samples from Bernoulli with probability of prob_inc
my_bern = torch.distributions.Bernoulli(torch.tensor([prob_inc])).sample([n_lists])
sign = -1*((my_bern * 2) -torch.ones([n_lists,1]))
sign = sign.type(torch.float32)
random =(torch.empty(n_lists, n_numbers).uniform_(0, 1))
random =random.type(torch.float32)
# my change
#random_with_sign = random * sign
#Finds sorted values and indices of the k largest entries for the last dimension.
#sorted – controls whether to return the elements in sorted order
#ordered, permutations = torch.topk(random_with_sign, k = n_numbers, sorted = True)
# my change
ordered, permutations = torch.sort(random, descending=True)
#my change
#ordered = ordered * sign
return ordered, random, permutations
def my_sample_permutations(n_permutations, n_objects):
"""Samples a batch permutations from the uniform distribution.
Returns a sample of n_permutations permutations of n_objects indices.
Permutations are assumed to be represented as lists of integers
(see 'listperm2matperm' and 'matperm2listperm' for conversion to alternative
matricial representation). It does so by sampling from a continuous
distribution and then ranking the elements. By symmetry, the resulting
distribution over permutations must be uniform.
Args:
n_permutations: An int, the number of permutations to sample.
n_objects: An int, the number of elements in the permutation.
the embedding sources.
Returns:
A 2D integer tensor with shape [n_permutations, n_objects], where each
row is a permutation of range(n_objects)
"""
random_pre_perm = torch.empty(n_permutations, n_objects).uniform_(0, 1)
_, permutations = torch.topk(random_pre_perm, k = n_objects)
return permutations
def my_permute_batch_split(batch_split, permutations):
"""Scrambles a batch of objects according to permutations.
It takes a 3D tensor [batch_size, n_objects, object_size]
and permutes items in axis=1 according to the 2D integer tensor
permutations, (with shape [batch_size, n_objects]) a list of permutations
expressed as lists. For many dimensional-objects (e.g. images), objects have
to be flattened so they will respect the 3D format, i.e. tf.reshape(
batch_split, [batch_size, n_objects, -1])
Args:
batch_split: 3D tensor with shape = [batch_size, n_objects, object_size] of
splitted objects
permutations: a 2D integer tensor with shape = [batch_size, n_objects] of
permutations, so that permutations[n] is a permutation of range(n_objects)
Returns:
A 3D tensor perm_batch_split with the same shape as batch_split,
so that perm_batch_split[n, j,:] = batch_split[n, perm[n,j],:]
"""
batch_size= permutations.size()[0]
n_objects = permutations.size()[1]
permutations = permutations.view(batch_size, n_objects, -1)
perm_batch_split = torch.gather(batch_split, 1, permutations)
return perm_batch_split
def my_listperm2matperm(listperm):
"""Converts a batch | |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BCOO (Bached coordinate format) matrix object and associated primitives."""
import functools
import operator
from typing import Any, NamedTuple, Sequence, Tuple
import warnings
import numpy as np
from jax import core
from jax import lax
from jax import tree_util
from jax import vmap
from jax.config import config
from jax.experimental.sparse._base import JAXSparse
from jax.experimental.sparse.util import _safe_asarray, CuSparseEfficiencyWarning
from jax.interpreters import batching
from jax.interpreters import partial_eval as pe
from jax.interpreters import mlir
import jax.numpy as jnp
from jax.interpreters import ad
from jax.util import safe_zip, unzip2, split_list
from jax._src import api_util
from jax._src.api_util import flatten_axes
from jax._src.lax.lax import (
ranges_like, remaining, _dot_general_batch_dim_nums, _dot_general_shape_rule,
DotDimensionNumbers)
from jax._src.lib.mlir import ir
from jax._src.lib.mlir.dialects import mhlo
from jax._src.numpy.setops import _unique
from jax._src.lib import sparse_apis
Dtype = Any
Shape = Tuple[int, ...]
#----------------------------------------------------------------------
# General utilities...
def broadcasting_vmap(fun, in_axes=0, out_axes=0):
@functools.wraps(fun)
def batched_fun(*args):
args_flat, in_tree = tree_util.tree_flatten(args)
in_axes_flat = flatten_axes("vmap in_axes", in_tree, in_axes, kws=False)
size = max(arg.shape[i] for arg, i in safe_zip(args_flat, in_axes_flat) if i is not None)
if size > 1:
if any(i is not None and arg.shape[i] not in (1, size)
for arg, i in safe_zip(args_flat, in_axes_flat)):
raise ValueError("broadcasting_vmap: mismatched input shapes")
args_flat, in_axes_flat = zip(*(
(arg, None) if i is None else (lax.squeeze(arg, (i,)), None) if arg.shape[i] == 1 else (arg, i)
for arg, i in zip(args_flat, in_axes_flat)
))
new_args = tree_util.tree_unflatten(in_tree, args_flat)
new_in_axes = tree_util.tree_unflatten(in_tree, in_axes_flat)
return vmap(fun, in_axes=new_in_axes, out_axes=out_axes)(*new_args)
return batched_fun
#----------------------------------------------------------------------
# BCOO primitives: batched extension of COO.
def _bcoo_nse(mat, n_batch=0, n_dense=0):
mat = jnp.asarray(mat)
mask = (mat != 0)
if n_dense > 0:
mask = mask.any([-(i + 1) for i in range(n_dense)])
mask = mask.sum(list(range(n_batch, mask.ndim)))
return mask.max()
# TODO(jakevdp) this can be problematic when used with autodiff; see
# https://github.com/google/jax/issues/10163. Should this be a primitive?
# Alternatively, maybe roll this into bcoo_sum_duplicates as an optional argument.
def bcoo_eliminate_zeros(mat, nse=None):
data, indices, shape = mat.data, mat.indices, mat.shape
props = _validate_bcoo(data, indices, shape)
mask = (data == 0).all(tuple(range(props.n_batch + 1, data.ndim)))
dims_to_contract = tuple(i for i, s in enumerate(indices.shape[:props.n_batch]) if s == 1)
mask = mask.all(dims_to_contract, keepdims=True)
fill_value = jnp.array(shape[props.n_batch:props.n_batch + props.n_sparse], dtype=indices.dtype)
f = lambda i, m: jnp.where(m[:, None], fill_value[None, :], i)
for _ in range(props.n_batch):
f = vmap(f)
indices = f(indices, mask)
return bcoo_sum_duplicates(BCOO((data, indices), shape=shape), nse=nse)
def _unbatch_bcoo(data, indices, shape):
n_batch = _validate_bcoo(data, indices, shape).n_batch
if n_batch == 0:
return data, indices
data = jnp.broadcast_to(data, shape[:n_batch] + data.shape[n_batch:])
indices = jnp.broadcast_to(indices, shape[:n_batch] + indices.shape[n_batch:])
batch_indices = jnp.mgrid[tuple(slice(None, d) for d in indices.shape[:n_batch + 1])][:-1]
batch_indices = batch_indices.reshape(n_batch, -1).T
data = data.reshape(np.prod(data.shape[:n_batch + 1]), *data.shape[n_batch + 1:])
indices = indices.reshape(np.prod(indices.shape[:n_batch + 1]), *indices.shape[n_batch + 1:])
return data, jnp.hstack([batch_indices, indices])
class BCOOProperties(NamedTuple):
n_batch: int
n_sparse: int
n_dense: int
nse: int
class BCOOInfo(NamedTuple):
shape: Shape
def _validate_bcoo(data: jnp.ndarray, indices: jnp.ndarray, shape: Sequence[int]) -> BCOOProperties:
props = _validate_bcoo_indices(indices, shape)
n_batch, n_sparse, n_dense, nse = props
shape = tuple(shape)
if any(s1 not in (1, s2) for s1, s2 in safe_zip(data.shape[:n_batch], shape[:n_batch])):
raise ValueError("data batch dimensions not compatible for "
f"data.shape={data.shape}, shape={shape}")
if data.shape[n_batch:] != (nse,) + shape[n_batch + n_sparse:]:
raise ValueError(f"Invalid data.shape={data.shape} for "
f"nse={nse}, n_batch={n_batch}, n_dense={n_dense}")
return props
def _validate_bcoo_indices(indices: jnp.ndarray, shape: Sequence[int]) -> BCOOProperties:
assert jnp.issubdtype(indices.dtype, jnp.integer)
shape = tuple(shape)
nse, n_sparse = indices.shape[-2:]
n_batch = indices.ndim - 2
n_dense = len(shape) - n_batch - n_sparse
assert n_dense >= 0
if any(s1 not in (1, s2) for s1, s2 in safe_zip(indices.shape[:n_batch], shape[:n_batch])):
raise ValueError("indices batch dimensions not compatible for "
f"indices.shape={indices.shape}, shape={shape}")
if indices.shape[n_batch:] != (nse, n_sparse):
raise ValueError(f"Invalid indices.shape={indices.shape} for "
f"nse={nse}, n_batch={n_batch}, n_dense={n_dense}")
return BCOOProperties(n_batch=n_batch, n_sparse=n_sparse, n_dense=n_dense, nse=nse)
#----------------------------------------------------------------------
# bcoo_todense
bcoo_todense_p = core.Primitive('bcoo_todense')
def bcoo_todense(mat):
"""Convert batched sparse matrix to a dense matrix.
Args:
mat: BCOO matrix.
Returns:
mat_dense: dense version of ``mat``.
"""
return _bcoo_todense(mat.data, mat.indices, spinfo=mat._info)
def _bcoo_todense(data, indices, *, spinfo):
"""Convert batched sparse matrix to a dense matrix.
Args:
data : array of shape ``batch_dims + (nse,) + block_dims``.
indices : array of shape ``batch_dims + (n_sparse, nse)``
spinfo : BCOOInfo. In particular, this includes the shape
of the matrix, which is equal to ``batch_dims + sparse_dims + block_dims``
where ``len(sparse_dims) == n_sparse``
Returns:
mat : array with specified shape and dtype matching ``data``
"""
return bcoo_todense_p.bind(jnp.asarray(data), jnp.asarray(indices), spinfo=spinfo)
@bcoo_todense_p.def_impl
def _bcoo_todense_impl(data, indices, *, spinfo):
shape = spinfo.shape
n_batch, n_sparse, _, _ = _validate_bcoo(data, indices, shape)
ind_slices = tuple(np.zeros(s, int) if i_s == 1 else np.arange(s)
for s, i_s in zip(shape[:n_batch], indices.shape[:n_batch]))
grid = tuple(np.meshgrid(*ind_slices, indexing='ij', sparse=True))
sparse_ind = tuple(indices[grid + (slice(None), i)] for i in range(n_sparse))
batch_slices = tuple(np.arange(s) for s in shape[:n_batch])
grid = np.meshgrid(*batch_slices, np.arange(1), indexing='ij', sparse=True)
batch_ind = tuple(grid)[:-1]
if not sparse_ind:
data = data.sum(n_batch, keepdims=bool(batch_ind), dtype=data.dtype)
return jnp.zeros(shape, data.dtype).at[batch_ind + sparse_ind].add(data)
@bcoo_todense_p.def_abstract_eval
def _bcoo_todense_abstract_eval(data, indices, *, spinfo):
shape = spinfo.shape
_validate_bcoo(data, indices, shape)
return core.ShapedArray(shape, data.dtype)
def _bcoo_todense_jvp(data_dot, data, indices, *, spinfo):
return _bcoo_todense(data_dot, indices, spinfo=spinfo)
def _bcoo_todense_transpose(ct, data, indices, *, spinfo):
shape = spinfo.shape
assert ad.is_undefined_primal(data)
if ad.is_undefined_primal(indices):
raise ValueError("Cannot transpose with respect to sparse indices")
assert ct.shape == shape
assert ct.dtype == data.aval.dtype
return bcoo_extract(indices, ct), indices
def _bcoo_todense_batching_rule(batched_args, batch_dims, *, spinfo):
data, indices = batched_args
if any(b not in [0, None] for b in batch_dims):
raise NotImplementedError(f"batch_dims={batch_dims}. Only 0 and None are supported.")
if batch_dims[0] is None:
data = data[None, ...]
if batch_dims[1] is None:
indices = indices[None, ...]
new_spinfo = BCOOInfo(
shape=(max(data.shape[0], indices.shape[0]), *spinfo.shape))
return _bcoo_todense(data, indices, spinfo=new_spinfo), 0
ad.defjvp(bcoo_todense_p, _bcoo_todense_jvp, None)
ad.primitive_transposes[bcoo_todense_p] = _bcoo_todense_transpose
batching.primitive_batchers[bcoo_todense_p] = _bcoo_todense_batching_rule
mlir.register_lowering(bcoo_todense_p, mlir.lower_fun(
_bcoo_todense_impl, multiple_results=False))
#--------------------------------------------------------------------
# bcoo_fromdense
bcoo_fromdense_p = core.Primitive('bcoo_fromdense')
bcoo_fromdense_p.multiple_results = True
_TRACED_NSE_ERROR = """
The error arose for the nse argument of bcoo_fromdense. In order for BCOO.fromdense()
to be used in traced/compiled code, you must pass a concrete value to the nse
(number of specified elements) argument.
"""
def bcoo_fromdense(mat, *, nse=None, n_batch=0, n_dense=0, index_dtype=jnp.int32):
"""Create BCOO-format sparse matrix from a dense matrix.
Args:
mat : array to be converted to BCOO.
nse : number of specified elements in each batch
n_batch : number of batch dimensions (default: 0)
n_dense : number of block_dimensions (default: 0)
index_dtype : dtype of sparse indices (default: int32)
Returns:
mat_bcoo: BCOO representation of the matrix.
"""
mat = jnp.asarray(mat)
if nse is None:
nse = _bcoo_nse(mat, n_batch, n_dense)
nse = core.concrete_or_error(operator.index, nse, _TRACED_NSE_ERROR)
return BCOO(_bcoo_fromdense(mat, nse=nse, n_batch=n_batch, n_dense=n_dense,
index_dtype=index_dtype),
shape=mat.shape)
def _bcoo_fromdense(mat, *, nse, n_batch=0, n_dense=0, index_dtype=jnp.int32):
"""Create BCOO-format sparse matrix from a dense matrix.
Args:
mat : array to be converted to BCOO, with ``ndim = n_batch + n_sparse + n_dense``.
nse : number of specified elements in each batch
n_batch : number of batch dimensions (default: 0)
n_dense : number of block_dimensions (default: 0)
index_dtype : dtype of sparse indices (default: int32)
Returns:
data : array of shape ``mat.shape[:n_batch] + (nse,) + mat.shape[mat.ndim - n_dense:]``
and dtype ``mat.dtype``
indices : array of shape ``mat.shape[:n_batch] + (n_sparse, nse)``
"""
mat = jnp.asarray(mat)
nse = core.concrete_or_error(operator.index, nse, _TRACED_NSE_ERROR)
return bcoo_fromdense_p.bind(mat, nse=nse, n_batch=n_batch, n_dense=n_dense,
index_dtype=index_dtype)
@bcoo_fromdense_p.def_impl
def _bcoo_fromdense_impl(mat, *, nse, n_batch, n_dense, index_dtype):
mat = jnp.asarray(mat)
n_sparse = mat.ndim - n_dense - n_batch
mask = (mat != 0)
if n_dense > 0:
mask = mask.any([-(i + 1) for i in range(n_dense)])
def _nonzero(a):
if a.ndim:
return jnp.nonzero(a, size=nse, fill_value=a.shape[:n_sparse])
return ()
for _ in range(n_batch):
_nonzero = vmap(_nonzero, 0)
indices = _nonzero(mask)
if not indices:
indices = jnp.zeros(mask.shape[:n_batch] + (nse, 0), index_dtype)
else:
indices = jnp.moveaxis(jnp.array(indices, index_dtype), 0, n_batch + 1)
data = bcoo_extract(indices, mat)
true_nonzeros = (lax.broadcasted_iota(jnp.int32, (1,) * n_batch + (nse,), n_batch) <
mask.sum(list(range(n_batch, mask.ndim)))[..., None])
true_nonzeros = true_nonzeros[(n_batch + 1) * (slice(None),) + n_dense * (None,)]
data = jnp.where(true_nonzeros, data, 0)
return data, indices
@bcoo_fromdense_p.def_abstract_eval
def _bcoo_fromdense_abstract_eval(mat, *, nse, n_batch, n_dense, index_dtype):
n_sparse = mat.ndim - n_batch - n_dense
data_shape = mat.shape[:n_batch] + (nse,) + mat.shape[n_batch + n_sparse:]
index_shape = mat.shape[:n_batch] + (nse, n_sparse)
return core.ShapedArray(data_shape, mat.dtype), core.ShapedArray(index_shape, index_dtype)
def _bcoo_fromdense_jvp(primals, tangents, | |
<gh_stars>1-10
# pylint: skip-file
# -*- coding: utf-8 -*-
# Author: trummerjo
# Module: MSLHttpRequestHandler
# Created on: 26.01.2017
# License: MIT https://goo.gl/5bMj3H
import re
import sys
import zlib
import gzip
import json
import time
import base64
import random
from Cryptodome.Random import get_random_bytes
from Cryptodome.Hash import HMAC, SHA256
from Cryptodome.Cipher import PKCS1_OAEP
from Cryptodome.PublicKey import RSA
from Cryptodome.Util import Padding
from Cryptodome.Cipher import AES
from StringIO import StringIO
from datetime import datetime
import xbmcvfs
import requests
import xml.etree.ElementTree as ET
def base64key_decode(payload):
l = len(payload) % 4
if l == 2:
payload += '=='
elif l == 3:
payload += '='
elif l != 0:
raise ValueError('Invalid base64 string')
return base64.urlsafe_b64decode(payload.encode('utf-8'))
class MSL(object):
# Is a handshake already performed and the keys loaded
handshake_performed = False
last_drm_context = ''
last_playback_context = ''
current_message_id = 0
session = requests.session()
rndm = random.SystemRandom()
tokens = []
base_url = 'http://www.netflix.com/api/msl/NFCDCH-LX/cadmium/'
endpoints = {
'manifest': base_url + 'manifest',
'license': base_url + 'license'
}
def __init__(self, kodi_helper):
"""
The Constructor checks for already existing crypto Keys.
If they exist it will load the existing keys
"""
self.kodi_helper = kodi_helper
try:
xbmcvfs.mkdir(path=self.kodi_helper.msl_data_path)
except OSError:
pass
if self.file_exists(self.kodi_helper.msl_data_path, 'msl_data.json'):
self.init_msl_data()
elif self.file_exists(self.kodi_helper.msl_data_path, 'rsa_key.bin'):
self.init_rsa_keys()
else:
self.init_generate_rsa_keys()
def init_msl_data(self):
self.kodi_helper.log(msg='MSL Data exists. Use old Tokens.')
self.__load_msl_data()
self.handshake_performed = True
def init_rsa_keys(self):
self.kodi_helper.log(msg='RSA Keys do already exist load old ones')
self.__load_rsa_keys()
if self.kodi_helper.get_esn():
self.__perform_key_handshake()
def init_generate_rsa_keys(self):
self.kodi_helper.log(msg='Create new RSA Keys')
# Create new Key Pair and save
self.rsa_key = RSA.generate(2048)
self.__save_rsa_keys()
if self.kodi_helper.get_esn():
self.__perform_key_handshake()
def perform_key_handshake(self):
self.__perform_key_handshake()
def load_manifest(self, viewable_id):
"""
Loads the manifets for the given viewable_id and
returns a mpd-XML-Manifest
:param viewable_id: The id of of the viewable
:return: MPD XML Manifest or False if no success
"""
manifest_request_data = {
'method': 'manifest',
'lookupType': 'PREPARE',
'viewableIds': [viewable_id],
'profiles': [
# Video
"playready-h264bpl30-dash",
"playready-h264mpl30-dash",
"playready-h264mpl31-dash",
"playready-h264mpl40-dash",
# Audio
'heaac-2-dash',
# Subtiltes
# 'dfxp-ls-sdh',
'simplesdh',
# 'nflx-cmisc',
# Unkown
'BIF240',
'BIF320'
],
'drmSystem': 'widevine',
'appId': '14673889385265',
'sessionParams': {
'pinCapableClient': False,
'uiplaycontext': 'null'
},
'sessionId': '14673889385265',
'trackId': 0,
'flavor': 'PRE_FETCH',
'secureUrls': False,
'supportPreviewContent': True,
'forceClearStreams': False,
'languages': ['de-DE'],
'clientVersion': '4.0004.899.011',
'uiVersion': 'akira'
}
# add hevc profiles if setting is set
if self.kodi_helper.use_hevc() is True:
hevc = 'hevc-main-'
main10 = 'hevc-main10-'
prk = 'dash-cenc-prk'
cenc = 'dash-cenc'
ctl = 'dash-cenc-tl'
hdr = 'hevc-hdr-main10-'
dv = 'hevc-dv-main10-'
dv5 = 'hevc-dv5-main10-'
manifest_request_data['profiles'].append(main10 + 'L41-' + cenc)
manifest_request_data['profiles'].append(main10 + 'L50-' + cenc)
manifest_request_data['profiles'].append(main10 + 'L51-' + cenc)
manifest_request_data['profiles'].append(hevc + 'L30-' + cenc)
manifest_request_data['profiles'].append(hevc + 'L31-' + cenc)
manifest_request_data['profiles'].append(hevc + 'L40-' + cenc)
manifest_request_data['profiles'].append(hevc + 'L41-' + cenc)
manifest_request_data['profiles'].append(hevc + 'L50-' + cenc)
manifest_request_data['profiles'].append(hevc + 'L51-' + cenc)
manifest_request_data['profiles'].append(main10 + 'L30-' + cenc)
manifest_request_data['profiles'].append(main10 + 'L31-' + cenc)
manifest_request_data['profiles'].append(main10 + 'L40-' + cenc)
manifest_request_data['profiles'].append(main10 + 'L41-' + cenc)
manifest_request_data['profiles'].append(main10 + 'L50-' + cenc)
manifest_request_data['profiles'].append(main10 + 'L51-' + cenc)
manifest_request_data['profiles'].append(main10 + 'L30-' + prk)
manifest_request_data['profiles'].append(main10 + 'L31-' + prk)
manifest_request_data['profiles'].append(main10 + 'L40-' + prk)
manifest_request_data['profiles'].append(main10 + 'L41-' + prk)
manifest_request_data['profiles'].append(hevc + 'L30-L31-' + ctl)
manifest_request_data['profiles'].append(hevc + 'L31-L40-' + ctl)
manifest_request_data['profiles'].append(hevc + 'L40-L41-' + ctl)
manifest_request_data['profiles'].append(hevc + 'L50-L51-' + ctl)
manifest_request_data['profiles'].append(main10 + 'L30-L31-' + ctl)
manifest_request_data['profiles'].append(main10 + 'L31-L40-' + ctl)
manifest_request_data['profiles'].append(main10 + 'L40-L41-' + ctl)
manifest_request_data['profiles'].append(main10 + 'L50-L51-' + ctl)
manifest_request_data['profiles'].append(dv + 'L30-' + cenc)
manifest_request_data['profiles'].append(dv + 'L31-' + cenc)
manifest_request_data['profiles'].append(dv + 'L40-' + cenc)
manifest_request_data['profiles'].append(dv + 'L41-' + cenc)
manifest_request_data['profiles'].append(dv + 'L50-' + cenc)
manifest_request_data['profiles'].append(dv + 'L51-' + cenc)
manifest_request_data['profiles'].append(dv5 + 'L30-' + prk)
manifest_request_data['profiles'].append(dv5 + 'L31-' + prk)
manifest_request_data['profiles'].append(dv5 + 'L40-' + prk)
manifest_request_data['profiles'].append(dv5 + 'L41-' + prk)
manifest_request_data['profiles'].append(dv5 + 'L50-' + prk)
manifest_request_data['profiles'].append(dv5 + 'L51-' + prk)
manifest_request_data['profiles'].append(hdr + 'L30-' + cenc)
manifest_request_data['profiles'].append(hdr + 'L31-' + cenc)
manifest_request_data['profiles'].append(hdr + 'L40-' + cenc)
manifest_request_data['profiles'].append(hdr + 'L41-' + cenc)
manifest_request_data['profiles'].append(hdr + 'L50-' + cenc)
manifest_request_data['profiles'].append(hdr + 'L51-' + cenc)
manifest_request_data['profiles'].append(hdr + 'L30-' + prk)
manifest_request_data['profiles'].append(hdr + 'L31-' + prk)
manifest_request_data['profiles'].append(hdr + 'L40-' + prk)
manifest_request_data['profiles'].append(hdr + 'L41-' + prk)
manifest_request_data['profiles'].append(hdr + 'L50-' + prk)
manifest_request_data['profiles'].append(hdr + 'L51-' + prk)
# Check if dolby sound is enabled and add to profles
if self.kodi_helper.get_dolby_setting():
manifest_request_data['profiles'].append('ddplus-2.0-dash')
manifest_request_data['profiles'].append('ddplus-5.1-dash')
request_data = self.__generate_msl_request_data(manifest_request_data)
try:
resp = self.session.post(self.endpoints['manifest'], request_data)
except:
resp = None
exc = sys.exc_info()
msg = '[MSL][POST] Error {} {}'
self.kodi_helper.log(msg=msg.format(exc[0], exc[1]))
if resp:
try:
# if the json() does not fail we have an error because
# the manifest response is a chuncked json response
resp.json()
self.kodi_helper.log(
msg='Error getting Manifest: ' + resp.text)
return False
except ValueError:
# json() failed so parse the chunked response
self.kodi_helper.log(
msg='Got chunked Manifest Response: ' + resp.text)
resp = self.__parse_chunked_msl_response(resp.text)
self.kodi_helper.log(
msg='Parsed chunked Response: ' + json.dumps(resp))
data = self.__decrypt_payload_chunks(resp['payloads'])
return self.__tranform_to_dash(data)
return False
def get_license(self, challenge, sid):
"""
Requests and returns a license for the given challenge and sid
:param challenge: The base64 encoded challenge
:param sid: The sid paired to the challengew
:return: Base64 representation of the licensekey or False unsuccessfull
"""
license_request_data = {
'method': 'license',
'licenseType': 'STANDARD',
'clientVersion': '4.0004.899.011',
'uiVersion': 'akira',
'languages': ['de-DE'],
'playbackContextId': self.last_playback_context,
'drmContextIds': [self.last_drm_context],
'challenges': [{
'dataBase64': challenge,
'sessionId': sid
}],
'clientTime': int(time.time()),
'xid': int((int(time.time()) + 0.1612) * 1000)
}
request_data = self.__generate_msl_request_data(license_request_data)
try:
resp = self.session.post(self.endpoints['license'], request_data)
except:
resp = None
exc = sys.exc_info()
self.kodi_helper.log(
msg='[MSL][POST] Error {} {}'.format(exc[0], exc[1]))
if resp:
try:
# If is valid json the request for the licnese failed
resp.json()
self.kodi_helper.log(msg='Error getting license: '+resp.text)
return False
except ValueError:
# json() failed so we have a chunked json response
resp = self.__parse_chunked_msl_response(resp.text)
data = self.__decrypt_payload_chunks(resp['payloads'])
if data['success'] is True:
return data['result']['licenses'][0]['data']
else:
self.kodi_helper.log(
msg='Error getting license: ' + json.dumps(data))
return False
return False
def __decrypt_payload_chunks(self, payloadchunks):
decrypted_payload = ''
for chunk in payloadchunks:
payloadchunk = json.JSONDecoder().decode(chunk)
payload = payloadchunk.get('payload')
decoded_payload = base64.standard_b64decode(payload)
encryption_envelope = json.JSONDecoder().decode(decoded_payload)
# Decrypt the text
cipher = AES.new(
self.encryption_key,
AES.MODE_CBC,
base64.standard_b64decode(encryption_envelope['iv']))
ciphertext = encryption_envelope.get('ciphertext')
plaintext = cipher.decrypt(base64.standard_b64decode(ciphertext))
# unpad the plaintext
plaintext = json.JSONDecoder().decode(Padding.unpad(plaintext, 16))
data = plaintext.get('data')
# uncompress data if compressed
if plaintext.get('compressionalgo') == 'GZIP':
decoded_data = base64.standard_b64decode(data)
data = zlib.decompress(decoded_data, 16 + zlib.MAX_WBITS)
else:
data = base64.standard_b64decode(data)
decrypted_payload += data
decrypted_payload = json.JSONDecoder().decode(decrypted_payload)[1]['payload']['data']
decrypted_payload = base64.standard_b64decode(decrypted_payload)
return json.JSONDecoder().decode(decrypted_payload)
def __tranform_to_dash(self, manifest):
self.save_file(
msl_data_path=self.kodi_helper.msl_data_path,
filename='manifest.json',
content=json.dumps(manifest))
manifest = manifest['result']['viewables'][0]
self.last_playback_context = manifest['playbackContextId']
self.last_drm_context = manifest['drmContextId']
# Check for pssh
pssh = ''
if 'psshb64' in manifest:
if len(manifest['psshb64']) >= 1:
pssh = manifest['psshb64'][0]
seconds = manifest['runtime']/1000
init_length = seconds / 2 * 12 + 20*1000
duration = "PT"+str(seconds)+".00S"
root = ET.Element('MPD')
root.attrib['xmlns'] = 'urn:mpeg:dash:schema:mpd:2011'
root.attrib['xmlns:cenc'] = 'urn:mpeg:cenc:2013'
root.attrib['mediaPresentationDuration'] = duration
period = ET.SubElement(root, 'Period', start='PT0S', duration=duration)
# One Adaption Set for Video
for video_track in manifest['videoTracks']:
video_adaption_set = ET.SubElement(
parent=period,
tag='AdaptationSet',
mimeType='video/mp4',
contentType="video")
# Content Protection
protection = ET.SubElement(
parent=video_adaption_set,
tag='ContentProtection',
schemeIdUri='urn:uuid:EDEF8BA9-79D6-4ACE-A3C8-27DCD51D21ED')
ET.SubElement(
parent=protection,
tag='widevine:license',
robustness_level='HW_SECURE_CODECS_REQUIRED')
if pssh is not '':
ET.SubElement(protection, 'cenc:pssh').text = pssh
for downloadable in video_track['downloadables']:
codec = 'h264'
if 'hevc' in downloadable['contentProfile']:
codec = 'hevc'
hdcp_versions = '0.0'
for hdcp in downloadable['hdcpVersions']:
if hdcp != 'none':
hdcp_versions = hdcp
rep = ET.SubElement(
parent=video_adaption_set,
tag='Representation',
width=str(downloadable['width']),
height=str(downloadable['height']),
bandwidth=str(downloadable['bitrate']*1024),
hdcp=hdcp_versions,
nflxContentProfile=str(downloadable['contentProfile']),
codecs=codec,
mimeType='video/mp4')
# BaseURL
base_url = self.__get_base_url(downloadable['urls'])
ET.SubElement(rep, 'BaseURL').text = base_url
# Init an Segment block
segment_base = ET.SubElement(
parent=rep,
tag='SegmentBase',
indexRange='0-' + str(init_length),
indexRangeExact='true')
ET.SubElement(
parent=segment_base,
tag='Initialization',
range='0-' + str(init_length))
# Multiple Adaption Set for audio
for audio_track in manifest['audioTracks']:
impaired = 'false'
if audio_track.get('trackType') == 'ASSISTIVE':
impaired = 'true'
audio_adaption_set = ET.SubElement(
parent=period,
tag='AdaptationSet',
lang=audio_track['bcp47'],
contentType='audio',
mimeType='audio/mp4',
impaired=impaired)
for downloadable in audio_track['downloadables']:
codec = 'aac'
self.kodi_helper.log(msg=downloadable)
is_dplus2 = downloadable['contentProfile'] == 'ddplus-2.0-dash'
is_dplus5 = downloadable['contentProfile'] == 'ddplus-5.1-dash'
if is_dplus2 or is_dplus5:
codec = 'ec-3'
self.kodi_helper.log(msg='codec is: ' + codec)
rep = ET.SubElement(
parent=audio_adaption_set,
tag='Representation',
codecs=codec,
bandwidth=str(downloadable['bitrate']*1024),
mimeType='audio/mp4')
# AudioChannel Config
uri = 'urn:mpeg:dash:23003:3:audio_channel_configuration:2011'
ET.SubElement(
parent=rep,
tag='AudioChannelConfiguration',
schemeIdUri=uri,
value=str(audio_track.get('channelsCount')))
# BaseURL
base_url = self.__get_base_url(downloadable['urls'])
ET.SubElement(rep, 'BaseURL').text = base_url
# Index range
segment_base = ET.SubElement(
parent=rep,
tag='SegmentBase',
indexRange='0-' + str(init_length),
indexRangeExact='true')
ET.SubElement(
parent=segment_base,
tag='Initialization',
range='0-' + str(init_length))
# Multiple Adaption Sets for subtiles
for text_track in manifest.get('textTracks'):
is_downloadables = 'downloadables' not in text_track
if is_downloadables or text_track.get('downloadables') is None:
continue
subtiles_adaption_set = ET.SubElement(
parent=period,
tag='AdaptationSet',
lang=text_track.get('bcp47'),
codecs='stpp',
contentType='text',
mimeType='application/ttml+xml')
for downloadable in text_track['downloadables']:
rep = ET.SubElement(
parent=subtiles_adaption_set,
tag='Representation',
nflxProfile=downloadable.get('contentProfile'))
base_url = self.__get_base_url(downloadable['urls'])
ET.SubElement(rep, 'BaseURL').text = base_url
xml = ET.tostring(root, encoding='utf-8', | |
import os
import pyembroidery.A10oReader as A10oReader
import pyembroidery.A100Reader as A100Reader
# import pyembroidery.ArtReader as ArtReader
import pyembroidery.BroReader as BroReader
import pyembroidery.ColReader as ColReader
import pyembroidery.ColWriter as ColWriter
import pyembroidery.CsvReader as CsvReader
import pyembroidery.CsvWriter as CsvWriter
import pyembroidery.DatReader as DatReader
import pyembroidery.DsbReader as DsbReader
import pyembroidery.DstReader as DstReader
import pyembroidery.DstWriter as DstWriter
import pyembroidery.DszReader as DszReader
import pyembroidery.EdrReader as EdrReader
import pyembroidery.EdrWriter as EdrWriter
import pyembroidery.EmdReader as EmdReader
import pyembroidery.ExpReader as ExpReader
import pyembroidery.ExpWriter as ExpWriter
import pyembroidery.ExyReader as ExyReader
import pyembroidery.FxyReader as FxyReader
import pyembroidery.GcodeReader as GcodeReader
import pyembroidery.GcodeWriter as GcodeWriter
import pyembroidery.GtReader as GtReader
import pyembroidery.HusReader as HusReader
import pyembroidery.InbReader as InbReader
import pyembroidery.InfReader as InfReader
import pyembroidery.InfWriter as InfWriter
import pyembroidery.JefReader as JefReader
import pyembroidery.JefWriter as JefWriter
import pyembroidery.JpxReader as JpxReader
import pyembroidery.JsonReader as JsonReader
import pyembroidery.JsonWriter as JsonWriter
import pyembroidery.KsmReader as KsmReader
import pyembroidery.MaxReader as MaxReader
import pyembroidery.MitReader as MitReader
import pyembroidery.NewReader as NewReader
import pyembroidery.PcdReader as PcdReader
import pyembroidery.PcmReader as PcmReader
import pyembroidery.PcqReader as PcqReader
import pyembroidery.PcsReader as PcsReader
import pyembroidery.PecReader as PecReader
import pyembroidery.PecWriter as PecWriter
import pyembroidery.PesReader as PesReader
import pyembroidery.PesWriter as PesWriter
import pyembroidery.PhbReader as PhbReader
import pyembroidery.PhcReader as PhcReader
import pyembroidery.PmvReader as PmvReader
import pyembroidery.PmvWriter as PmvWriter
import pyembroidery.PngWriter as PngWriter
import pyembroidery.SewReader as SewReader
import pyembroidery.ShvReader as ShvReader
import pyembroidery.SpxReader as SpxReader
import pyembroidery.StcReader as StcReader
import pyembroidery.StxReader as StxReader
import pyembroidery.SvgWriter as SvgWriter
import pyembroidery.TapReader as TapReader
import pyembroidery.TbfReader as TbfReader
import pyembroidery.TxtWriter as TxtWriter
import pyembroidery.U01Reader as U01Reader
import pyembroidery.U01Writer as U01Writer
import pyembroidery.Vp3Reader as Vp3Reader
import pyembroidery.Vp3Writer as Vp3Writer
import pyembroidery.XxxReader as XxxReader
import pyembroidery.XxxWriter as XxxWriter
# import pyembroidery.ZhsReader as ZhsReader
import pyembroidery.ZxyReader as ZxyReader
from .EmbEncoder import Transcoder as Normalizer
from .EmbFunctions import *
from .EmbThread import EmbThread
class EmbPattern:
def __init__(self, *args, **kwargs):
self.stitches = [] # type: list
self.threadlist = [] # type: list
self.extras = {} # type: dict
# filename, name, category, author, keywords, comments, are typical
self._previousX = 0 # type: float
self._previousY = 0 # type: float
len_args = len(args)
if len_args >= 1:
arg0 = args[0]
if isinstance(arg0, EmbPattern):
self.stitches = arg0.stitches[:]
self.threadlist = arg0.threadlist[:]
self.extras.update(arg0.extras)
self._previousX = arg0._previousX
self._previousY = arg0._previousY
return
if len(args) >= 2:
settings = args[1]
elif "settings" in kwargs:
settings = kwargs["settings"]
else:
settings = kwargs
if isinstance(arg0, str):
EmbPattern.static_read(arg0, settings=settings, pattern=self)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
if not isinstance(other, EmbPattern):
return False
if self.stitches != other.stitches:
return False
if self.threadlist != other.threadlist:
return False
if self.extras != other.extras:
return False
return True
def __str__(self):
if "name" in self.extras:
return "EmbPattern %s (commands: %3d, threads: %3d)" % (
self.extras["name"],
len(self.stitches),
len(self.threadlist),
)
return "EmbPattern (commands: %3d, threads: %3d)" % (
len(self.stitches),
len(self.threadlist),
)
def __len__(self):
return len(self.stitches)
def __getitem__(self, item):
if isinstance(item, str):
return self.extras[item]
return self.stitches[item]
def __setitem__(self, key, value):
if isinstance(key, str):
self.extras[key] = value
else:
self.stitches[key] = value
def __copy__(self):
return self.copy()
def __deepcopy__(self):
return self.copy()
def __iadd__(self, other):
if isinstance(other, EmbPattern):
self.add_pattern(other)
elif isinstance(other, EmbThread) or isinstance(other, str):
self.add_thread(other)
for i in range(0, len(self.stitches)):
data = self.stitches[i][2] & COMMAND_MASK
if data == STITCH or data == SEW_TO or data == NEEDLE_AT:
self.color_change()
break # Only add color change if stitching exists.
elif isinstance(other, int):
self.add_command(other)
elif isinstance(other, list) or isinstance(other, tuple): # tuple or list
if len(other) == 0:
return
v = other[0]
if isinstance(v, list) or isinstance(
v, tuple
): # tuple or list of tuple or lists
for v in other:
x = v[0]
y = v[1]
try:
cmd = v[2]
except IndexError:
cmd = STITCH
self.add_stitch_absolute(cmd, x, y)
elif isinstance(v, complex): # tuple or list of complex
for v in other:
x = v.real
y = v.imag
self.add_stitch_absolute(STITCH, x, y)
elif isinstance(v, int) or isinstance(
v, float
): # tuple or list of numbers.
i = 0
ie = len(other)
while i < ie:
self.add_stitch_absolute(STITCH, other[i], other[i + 1])
i += 2
elif isinstance(v, str):
self.extras[v] = other[1]
else:
raise ValueError()
return self
def __add__(self, other):
p = self.copy()
p.add_pattern(other)
return p
def __radd__(self, other):
p = other.copy()
p.add_pattern(self)
return p
def copy(self):
emb_pattern = EmbPattern()
emb_pattern.stitches = self.stitches[:]
emb_pattern.threadlist = self.threadlist[:]
emb_pattern.extras.update(self.extras)
emb_pattern._previousX = self._previousX
emb_pattern._previousY = self._previousY
return emb_pattern
def clear(self):
self.stitches = []
self.threadlist = []
self.extras = {}
self._previousX = 0
self._previousY = 0
def read(self, filename, **settings):
EmbPattern.static_read(filename, settings=settings, pattern=self)
def write(self, filename, **settings):
EmbPattern.static_write(self, filename, settings=settings)
def move(self, dx=0, dy=0, position=None):
"""Move dx, dy"""
if position is None:
self.add_stitch_relative(JUMP, dx, dy)
else:
self.insert_stitch_relative(position, JUMP, dx, dy)
def move_abs(self, x, y, position=None):
"""Move absolute x, y"""
if position is None:
self.add_stitch_absolute(JUMP, x, y)
else:
self.insert(position, JUMP, x, y)
def stitch(self, dx=0, dy=0, position=None):
"""Stitch dx, dy"""
if position is None:
self.add_stitch_relative(STITCH, dx, dy)
else:
self.insert_stitch_relative(position, STITCH, dx, dy)
def stitch_abs(self, x, y, position=None):
"""Stitch absolute x, y"""
if position is None:
self.add_stitch_absolute(STITCH, x, y)
else:
self.insert(position, STITCH, x, y)
def stop(self, dx=0, dy=0, position=None):
"""Stop dx, dy"""
if position is None:
self.add_stitch_relative(STOP, dx, dy)
else:
self.insert_stitch_relative(position, STOP, dx, dy)
def trim(self, dx=0, dy=0, position=None):
"""Trim dx, dy"""
if position is None:
self.add_stitch_relative(TRIM, dx, dy)
else:
self.insert_stitch_relative(position, TRIM, dx, dy)
def color_change(self, dx=0, dy=0, position=None):
"""Color Change dx, dy"""
if position is None:
self.add_stitch_relative(COLOR_CHANGE, dx, dy)
else:
self.insert_stitch_relative(position, COLOR_CHANGE, dx, dy)
def needle_change(self, needle=0, dx=0, dy=0, position=None):
"""Needle change, needle, dx, dy"""
cmd = encode_thread_change(NEEDLE_SET, None, needle)
if position is None:
self.add_stitch_relative(cmd, dx, dy)
else:
self.insert_stitch_relative(position, cmd, dx, dy)
def sequin_eject(self, dx=0, dy=0, position=None):
"""Eject Sequin dx, dy"""
if position is None:
self.add_stitch_relative(SEQUIN_EJECT, dx, dy)
else:
self.insert_stitch_relative(position, SEQUIN_EJECT, dx, dy)
def sequin_mode(self, dx=0, dy=0, position=None):
"""Eject Sequin dx, dy"""
if position is None:
self.add_stitch_relative(SEQUIN_MODE, dx, dy)
else:
self.insert_stitch_relative(position, SEQUIN_MODE, dx, dy)
def end(self, dx=0, dy=0, position=None):
"""End Design dx, dy"""
if position is None:
self.add_stitch_relative(END, dx, dy)
else:
self.insert_stitch_relative(position, END, dx, dy)
def add_thread(self, thread):
"""Adds thread to design.
Note: this has no effect on stitching and can be done at any point."""
if isinstance(thread, EmbThread):
self.threadlist.append(thread)
else:
thread_object = EmbThread()
thread_object.set(thread)
self.threadlist.append(thread_object)
def metadata(self, name, data):
"""Adds select metadata to design.
Note: this has no effect on stitching and can be done at any point."""
self.extras[name] = data
def get_metadata(self, name, default=None):
return self.extras.get(name, default)
def bounds(self):
"""Returns the bounds of the stitch data:
min_x, min_y, max_x, max_y"""
min_x = float("inf")
min_y = float("inf")
max_x = -float("inf")
max_y = -float("inf")
for stitch in self.stitches:
if stitch[0] > max_x:
max_x = stitch[0]
if stitch[0] < min_x:
min_x = stitch[0]
if stitch[1] > max_y:
max_y = stitch[1]
if stitch[1] < min_y:
min_y = stitch[1]
return min_x, min_y, max_x, max_y
extends = bounds
extents = bounds
def count_stitch_commands(self, command):
count = 0
for stitch in self.stitches:
flags = stitch[2] & COMMAND_MASK
if flags == command:
count += 1
return count
def count_color_changes(self):
return self.count_stitch_commands(COLOR_CHANGE)
def count_needle_sets(self):
return self.count_stitch_commands(NEEDLE_SET)
def count_stitches(self):
return len(self.stitches)
def count_threads(self):
return len(self.threadlist)
@staticmethod
def get_random_thread():
thread = EmbThread()
thread.set("random")
thread.description = "Random"
return thread
def get_thread_or_filler(self, index):
if len(self.threadlist) <= index:
return self.get_random_thread()
else:
return self.threadlist[index]
def get_thread(self, index):
return self.threadlist[index]
def get_match_commands(self, command):
for stitch in self.stitches:
flags = stitch[2] & COMMAND_MASK
if flags == command:
yield stitch
def get_as_stitchblock(self):
stitchblock = []
thread = self.get_thread_or_filler(0)
thread_index = 1
for stitch in self.stitches:
flags = stitch[2] & COMMAND_MASK
if flags == STITCH:
stitchblock.append(stitch)
else:
if len(stitchblock) > 0:
yield (stitchblock, thread)
stitchblock = []
if flags == COLOR_CHANGE:
thread = self.get_thread_or_filler(thread_index)
thread_index += 1
if len(stitchblock) > 0:
yield (stitchblock, thread)
def get_as_command_blocks(self):
last_pos = 0
last_command = NO_COMMAND
for pos, stitch in enumerate(self.stitches):
command = stitch[2] & COMMAND_MASK
if command == last_command or last_command == NO_COMMAND:
last_command = command
continue
last_command = command
yield self.stitches[last_pos:pos]
last_pos = pos
yield self.stitches[last_pos:]
def get_as_colorblocks(self):
"""
Returns a generator for colorblocks. Color blocks defined with color_breaks will have
the command omitted whereas color blocks delimited with color_change will end with the
color_change command, and if delimited with needle_set, the blocks will begin the new
color block with the needle_set.
"""
thread_index = 0
colorblock_start = 0
for pos, stitch in enumerate(self.stitches):
command = stitch[2] & COMMAND_MASK
if command == COLOR_BREAK:
if colorblock_start != pos:
thread = self.get_thread_or_filler(thread_index)
thread_index += 1
yield self.stitches[colorblock_start:pos], thread
colorblock_start = pos + 1
| |
= zbes[0:idim]
for j in range(idim, kdim - 1, 1):
pbes[j] = pbes[j - 1] + p_i
return pbes
def diagram(filen, listf, dims):
"""Diagram interface script.
Call the class fluxogram, serving as
interface between the main script and the class for flux
diagrams design.
Arguments:
- filen: the filename of the diagram flux;
- listf: a list containing the fluxes and storages;
- dims: the dimensions of the variables;
"""
import fluxogram
ntp = int(dims[3])
apet = listf[0]
apes = listf[1]
ekt = listf[2]
eks = listf[3]
ae2azt = listf[4]
ae2azs = listf[5]
a2kt = listf[6]
a2ks = listf[7]
at2as = listf[8]
kt2ks = listf[9]
ke2kzt = listf[10]
ke2kzs = listf[11]
apz = '{:.2f}'.format(apet[0, 0] + apes[0, 0])
az2kz = '{:.2f}'.format(-1e5 * (a2kt[0, 0]))
az2at = '{:.2f}'.format(-1e5 * np.nansum(ae2azt[0, 1:ntp - 1]))
aps = '{:.2f}'.format(np.nansum(apes[0, 1:ntp - 1]))
as2ks = '{:.2f}'.format(1e5 * np.nansum(a2ks[0, 1:ntp - 1]))
apt = '{:.2f}'.format(np.nansum(apet[0, 1:ntp - 1]))
at2kt = '{:.2f}'.format(1e5 * np.nansum(a2kt[0, 1:ntp - 1]))
az2as = '{:.2f}'.format(-1e5 * np.nansum(ae2azs[0, 1:ntp - 1]))
as2at = '{:.2f}'.format(1e5 * np.nansum(at2as[0, 1:ntp - 1]))
azin = '{:.2f}'.format((float(az2at) + float(az2as) - float(az2kz)))
asein = '{:.2f}'.format((float(as2ks) + float(as2at) - float(az2as)))
atein = '{:.2f}'.format(float(at2kt) - float(az2at) - float(as2at))
k_z = '{:.2f}'.format(ekt[0, 0] + eks[0, 0])
kte = '{:.2f}'.format(np.nansum(ekt[0, 1:ntp - 1]))
kse = '{:.2f}'.format(np.nansum(eks[0, 1:ntp - 1]))
kt2kz = '{:.2f}'.format(1e5 * np.nansum(ke2kzt[0, 1:ntp - 1]))
ks2kt = '{:.2f}'.format(-1e5 * np.nansum(kt2ks[0, 1:ntp - 1]))
ks2kz = '{:.2f}'.format(1e5 * np.nansum(ke2kzs[0, 1:ntp - 1]))
kteout = '{:.2f}'.format(float(at2kt) - float(ks2kt) - float(kt2kz))
kseout = '{:.2f}'.format(float(ks2kt) + float(as2ks) - float(ks2kz))
kzout = '{:.2f}'.format(float(kt2kz) + float(ks2kz) - float(az2kz))
list_lorenz = [
azin, apz, asein, aps, atein, apt, as2ks, at2kt, kteout, kte, kseout,
kse, kzout, k_z, az2kz, az2at, az2as, as2at, kt2kz, ks2kt, ks2kz
]
flux = fluxogram.Fluxogram(1000, 1000)
flux.add_storage("AZ", 600, 0, 0)
flux.add_storage("ASE", 600, 0.75, 0.25)
flux.add_storage("ATE", 600, 1.5, 0)
flux.add_storage("KTE", 600, 1.5, 1.5)
flux.add_storage("KSE", 600, 0.75, 1.25)
flux.add_storage("KZ", 600, 0, 1.5)
flux.add_storage("AZ+", 0, 0, -1)
flux.add_storage("ASE+", 0, 0.75, -1)
flux.add_storage("ATE+", 0, 1.5, -1)
flux.add_storage("KTE-", 0, 1.5, 2.5)
flux.add_storage("KSE-", 0, 0.75, 2.5)
flux.add_storage("KZ-", 0, 0, 2.5)
flux.add_flux("A2KZ", flux.storages[5], flux.storages[0], 100)
flux.add_flux("AE2AZ", flux.storages[0], flux.storages[2], 150)
flux.add_flux("AE2AS", flux.storages[0], flux.storages[1], 60)
flux.add_flux("AE2AT", flux.storages[1], flux.storages[2], 60)
flux.add_flux("A2KS", flux.storages[1], flux.storages[4], 60)
flux.add_flux("A2KT", flux.storages[2], flux.storages[3], 100)
flux.add_flux("KE2KS", flux.storages[3], flux.storages[4], 60)
flux.add_flux("KS2KZ", flux.storages[4], flux.storages[5], 60)
flux.add_flux("KE2KZ", flux.storages[3], flux.storages[5], 150)
flux.add_flux("AZ+", flux.storages[6], flux.storages[0], 60)
flux.add_flux("ASE+", flux.storages[7], flux.storages[1], 60)
flux.add_flux("ATE+", flux.storages[8], flux.storages[2], 60)
flux.add_flux("KTE-", flux.storages[3], flux.storages[9], 60)
flux.add_flux("KSE-", flux.storages[4], flux.storages[10], 60)
flux.add_flux("KZ-", flux.storages[5], flux.storages[11], 60)
flux.draw(filen, list_lorenz)
lec = float(kteout) + float(kseout) + float(kzout)
return lec
def gauaw(n_y):
"""Compute the Gaussian coefficients for the Gaussian grid conversion.
Arguments:
- n_y: the latitude dimension;
"""
c_c = (1 - (2 / math.pi)**2) / 4
eps = 0.00000000000001
k_k = n_y / 2
p_a = np.zeros(n_y)
p_a[0:k_k] = bsslzr(k_k)
p_w = np.zeros(n_y)
for i_l in range(k_k):
x_z = np.cos(p_a[i_l] / math.sqrt((n_y + 0.5)**2 + c_c))
iterr = 0.
zsp = 1.0
while (abs(zsp) > eps and iterr <= 10):
pkm1 = x_z
pkm2 = 1.0
for n_n in range(2, n_y, 1):
p_k = ((n_n * 2 - 1.0) * x_z * pkm1 - (n_n - 1.0) * pkm2) / n_n
pkm2 = pkm1
pkm1 = p_k
pkm1 = pkm2
pkmrk = (n_y * (pkm1 - x_z * p_k)) / (1.0 - x_z**2)
zsp = p_k / pkmrk
x_z = x_z - zsp
iterr = iterr + 1
if iterr > 15:
sys.exit("*** no convergence in gauaw ***")
p_a[i_l] = x_z
p_w[i_l] = (2.0 * (1.0 - x_z**2)) / ((n_y**2) * (pkm1**2))
p_a[n_y - 1 - i_l] = -p_a[i_l]
p_w[n_y - 1 - i_l] = p_w[i_l]
psi = p_a
pgw = p_w
return psi, pgw
def globall_cg(d3v, g_w, d_s, dims):
"""Compute the global and hemispheric averages.
Arguments:
- d3v: the 3D dataset to be averaged;
- g_w: the gaussian weights;
- d_s: the vertical levels;
- dims: a list containing the sizes of the dimensions;
"""
nlev = int(dims[0])
nlat = int(dims[2])
ntp = int(dims[3])
gmn = np.zeros([3, ntp - 1])
aux1 = np.zeros([nlev, int(nlat / 2), ntp - 1])
aux2 = np.zeros([nlev, int(nlat / 2), ntp - 1])
aux1v = np.zeros([nlev, ntp - 1])
aux2v = np.zeros([nlev, ntp - 1])
nhem = int(nlat / 2)
fac = 1 / G * PS / 1e5
for l_l in range(nlev):
for i_h in range(nhem):
aux1[l_l, i_h, :] = fac * np.real(d3v[l_l, i_h, :]) * g_w[i_h]
aux2[l_l, i_h, :] = (fac * np.real(d3v[l_l, i_h + nhem - 1, :]) *
g_w[i_h + nhem - 1])
aux1v[l_l, :] = (np.nansum(aux1[l_l, :, :], axis=0) / np.nansum(
g_w[0:nhem]) * d_s[l_l])
aux2v[l_l, :] = (np.nansum(aux2[l_l, :, :], axis=0) / np.nansum(
g_w[0:nhem]) * d_s[l_l])
gmn[1, :] = (np.nansum(aux1v, axis=0) / np.nansum(d_s))
gmn[2, :] = (np.nansum(aux2v, axis=0) / np.nansum(d_s))
gmn[0, :] = 0.5 * (gmn[1, :] + gmn[2, :])
return gmn
def init(logfile, filep):
"""Ingest input fields as complex fields and initialise tables.
Receive fields t,u,v,w as input fields in Fourier
coefficients (time,level,wave,lon), with real as even and imaginary parts
as odd. Convert them to complex fields for Python.
Arguments:
- filenc: name of the file containing the input fields;
- logfile: name of the file containing the table as a .txt file.
"""
with open(logfile, 'w') as log:
log.write('########################################################\n')
log.write('# #\n')
log.write('# LORENZ ENERGY CYCLE #\n')
log.write('# #\n')
log.write('########################################################\n')
with Dataset(filep) as dataset0:
t_a = dataset0.variables['ta'][:, :, :, :]
u_a = dataset0.variables['ua'][:, :, :, :]
v_a = dataset0.variables['va'][:, :, :, :]
wap = dataset0.variables['wap'][:, :, :, :]
lev = dataset0.variables['plev'][:]
time = dataset0.variables['time'][:]
lat = dataset0.variables['lat'][:]
nfc = np.shape(t_a)[3]
nlev = len(lev)
ntime = len(time)
nlat = len(lat)
ntp = nfc / 2 + 1
dims = [nlev, ntime, nlat, ntp]
if max(lev) < 1000:
lev = lev * 100
wap = wap * 100
t_a = np.transpose(t_a, (1, 0, 2, 3))
ta_r = t_a[:, :, :, 0::2]
ta_i = t_a[:, :, :, 1::2]
u_a = np.transpose(u_a, (1, 0, 2, 3))
ua_r = u_a[:, :, :, 0::2]
ua_i = u_a[:, :, :, 1::2]
v_a = np.transpose(v_a, (1, 0, 2, 3))
va_r = v_a[:, :, :, 0::2]
va_i = v_a[:, :, :, 1::2]
wap = np.transpose(wap, (1, 0, 2, 3))
wap_r = wap[:, :, :, 0::2]
wap_i = wap[:, :, :, 1::2]
ta_c = ta_r + 1j * ta_i
ua_c = ua_r + 1j * ua_i
va_c = va_r + 1j * va_i
wap_c = wap_r + 1j * wap_i
with open(logfile, 'a+') as log:
log.write(' \n')
log.write(' \n')
log.write('INPUT DATA:\n')
log.write('-----------\n')
log.write(' \n')
log.write('SPECTRAL RESOLUTION : {}\n'.format(nfc))
log.write('NUMBER OF LATITUDES : {}\n'.format(nlat))
log.write('NUMBER OF LEVEL : {}'.format(nlev))
log.write('LEVEL : {} Pa\n'.format(lev))
log.write(' \n')
log.write('WAVES:\n')
log.write(' \n')
log.write('(1) : 1 - {}\n'.format(NW_1))
log.write('(2) : {} - {}\n'.format(NW_1, NW_2))
log.write('(3) : {} - {}\n'.format(NW_2, NW_3))
log.write(' \n')
log.write('GLOBAL DIAGNOSTIC: \n')
log.write(' \n')
log.write(' I GLOBAL I NORTH I SOUTH I\n')
log.write('------------------------------------------------------\n')
return ta_c, ua_c, va_c, wap_c, dims, lev, lat
def makek(u_t, v_t):
"""Compute the kinetic energy reservoirs from u and v.
Arguments:
- u_t: a 3D zonal velocity field;
- v_t: a 3D meridional velocity field;
"""
ck1 = u_t * np.conj(u_t)
ck2 = v_t * np.conj(v_t)
e_k = np.real(ck1 + ck2)
e_k[:, :, 0] = 0.5 * np.real(u_t[:, :, 0] * u_t[:, :, 0] +
v_t[:, :, 0] * v_t[:, :, 0])
return e_k
def makea(t_t, t_g, gam):
"""Compute the kinetic energy reservoirs from t.
Arguments:
- t_t_ a 3D temperature field;
- t_g: a temperature vertical profile;
- gam: a vertical profile of the stability parameter;
"""
ape = gam[:, np.newaxis, np.newaxis] * np.real(t_t * np.conj(t_t))
ape[:, :, 0] = (gam[:, np.newaxis] * 0.5 * np.real(
(t_t[:, :, 0] - t_g[:, np.newaxis]) *
(t_t[:, :, 0] - t_g[:, np.newaxis])))
return ape
def mka2k(wap, t_t, w_g, t_g, p_l):
"""Compute the KE to APE energy conversions from t and w.
Arguments:
- wap: a 3D vertical velocity field;
- t_t: a 3D temperature field;
- w_g: | |
<gh_stars>1-10
import logging
import numpy as np
from functools import partial
from typing import Callable, Dict, Optional, Tuple, Union
from sklearn.base import clone, ClassifierMixin
from sklearn.calibration import CalibratedClassifierCV
from sklearn.exceptions import NotFittedError
from sklearn.ensemble import RandomForestClassifier
from alibi_detect.cd.base import BaseClassifierDrift
logger = logging.getLogger(__name__)
class ClassifierDriftSklearn(BaseClassifierDrift):
def __init__(
self,
x_ref: np.ndarray,
model: ClassifierMixin,
p_val: float = .05,
preprocess_x_ref: bool = True,
update_x_ref: Optional[Dict[str, int]] = None,
preprocess_fn: Optional[Callable] = None,
preds_type: str = 'probs',
binarize_preds: bool = False,
train_size: Optional[float] = .75,
n_folds: Optional[int] = None,
retrain_from_scratch: bool = True,
seed: int = 0,
use_calibration: bool = False,
calibration_kwargs: Optional[dict] = None,
use_oob: bool = False,
data_type: Optional[str] = None,
) -> None:
"""
Classifier-based drift detector. The classifier is trained on a fraction of the combined
reference and test data and drift is detected on the remaining data. To use all the data
to detect drift, a stratified cross-validation scheme can be chosen.
Parameters
----------
x_ref
Data used as reference distribution.
model
Sklearn classification model used for drift detection.
p_val
p-value used for the significance of the test.
preprocess_x_ref
Whether to already preprocess and store the reference data.
update_x_ref
Reference data can optionally be updated to the last n instances seen by the detector
or via reservoir sampling with size n. For the former, the parameter equals {'last': n} while
for reservoir sampling {'reservoir_sampling': n} is passed.
preprocess_fn
Function to preprocess the data before computing the data drift metrics.
preds_type
Whether the model outputs 'probs' or 'scores'.
binarize_preds
Whether to test for discrepancy on soft (e.g. probs/scores) model predictions directly
with a K-S test or binarise to 0-1 prediction errors and apply a binomial test.
train_size
Optional fraction (float between 0 and 1) of the dataset used to train the classifier.
The drift is detected on `1 - train_size`. Cannot be used in combination with `n_folds`.
n_folds
Optional number of stratified folds used for training. The model preds are then calculated
on all the out-of-fold predictions. This allows to leverage all the reference and test data
for drift detection at the expense of longer computation. If both `train_size` and `n_folds`
are specified, `n_folds` is prioritized.
retrain_from_scratch
Whether the classifier should be retrained from scratch for each set of test data or whether
it should instead continue training from where it left off on the previous set.
seed
Optional random seed for fold selection.
use_calibration
Whether to use calibration. Whether to use calibration. Calibration can be used on top of any model.
calibration_kwargs
Optional additional kwargs for calibration.
See https://scikit-learn.org/stable/modules/generated/sklearn.calibration.CalibratedClassifierCV.html
for more details.
use_oob
Whether to use out-of-bag(OOB) predictions. Supported only for `RandomForestClassifier`.
data_type
Optionally specify the data type (tabular, image or time-series). Added to metadata.
"""
super().__init__(
x_ref=x_ref,
p_val=p_val,
preprocess_x_ref=preprocess_x_ref,
update_x_ref=update_x_ref,
preprocess_fn=preprocess_fn,
preds_type=preds_type,
binarize_preds=binarize_preds,
train_size=train_size,
n_folds=n_folds,
retrain_from_scratch=retrain_from_scratch,
seed=seed,
data_type=data_type
)
if preds_type not in ['probs', 'scores']:
raise ValueError("'preds_type' should be 'probs' or 'scores'")
self.meta.update({'backend': 'sklearn'})
self.original_model = model
self.use_calibration = use_calibration
self.calibration_kwargs = dict() if calibration_kwargs is None else calibration_kwargs
self.use_oob = use_oob
self.model = self._clone_model() # type: ClassifierMixin
def _has_predict_proba(self, model) -> bool:
try:
# taking self.x_ref[0].shape to overcome bot cases when self.x_ref is np.ndarray or list
model.predict_proba(np.zeros((1, ) + self.x_ref[0].shape))
has_predict_proba = True
except NotFittedError:
has_predict_proba = True
except AttributeError:
has_predict_proba = False
return has_predict_proba
def _clone_model(self):
model = clone(self.original_model)
# equivalence between `retrain_from_scratch` and `warm_start`
if not self.retrain_from_scratch:
if hasattr(model, 'warm_start'):
model.warm_start = True
logger.warning('`retrain_from_scratch=False` sets automatically the parameter `warm_start=True` '
'for the given classifier. Please consult the documentation to ensure that the '
'`warm_start=True` is applicable in the current context (i.e., for tree-based '
'models such as RandomForest, setting `warm_start=True` is not applicable since the '
'fit function expects the same dataset and an update/increase in the number of '
'estimators - previous fitted estimators will be kept frozen while the new ones '
'will be fitted).')
else:
logger.warning('Current classifier does not support `warm_start`. The model will be retrained '
'from scratch every iteration.')
else:
if hasattr(model, 'warm_start'):
model.warm_start = False
logger.warning('`retrain_from_scratch=True` sets automatically the parameter `warm_start=False`.')
# oob checks
if self.use_oob:
if not isinstance(model, RandomForestClassifier):
raise ValueError('OOB supported only for RandomForestClassifier. '
f'Received a model of type {model.__class__.__name__}')
if self.use_calibration:
self.use_calibration = False
logger.warning('Calibration cannot be used when `use_oob=True`. Setting `use_calibration=False`.')
model.oob_score = True
model.bootstrap = True
logger.warning(
'`use_oob=True` sets automatically the classifier parameters `boostrap=True` and `oob_score=True`. '
'`train_size` and `n_folds` are ignored when `use_oob=True`.'
)
else:
if isinstance(model, RandomForestClassifier):
model.oob_score = False
logger.warning('`use_oob=False` sets automatically the classifier parameters `oob_score=False`.')
# preds_type checks
if self.preds_type == 'probs':
# calibrate the model if user specified.
if self.use_calibration:
model = CalibratedClassifierCV(base_estimator=model, **self.calibration_kwargs)
logger.warning('Using calibration to obtain the prediction probabilities.')
# check if it has predict proba. Cannot be checked via `hasattr` due to the same issue in SVC (see below)
has_predict_proba = self._has_predict_proba(model)
# if the binarize_preds=True, we don't really need the probabilities as in test_probs will be rounded
# to the closest integer (i.e., to 0 or 1) according to the predicted probability. Thus, we can define
# a hard label predict_proba based on the predict method
if self.binarize_preds and (not has_predict_proba):
if not hasattr(model, 'predict'):
raise AttributeError('Trying to use a model which does not support `predict`.')
def predict_proba(self, X):
return np.eye(2)[self.predict(X).astype(np.int32)]
# add predict_proba method. Overwriting predict_proba is not possible for SVC due
# to @available_if(_check_proba)
# Check link: https://github.com/scikit-learn/scikit-learn/blob/7e1e6d09b/sklearn/svm/_base.py#L807
setattr(model, 'aux_predict_proba', partial(predict_proba, model))
elif has_predict_proba:
setattr(model, 'aux_predict_proba', model.predict_proba)
# at this point the model does not have any predict_proba, thus the test can not be performed.
if not hasattr(model, 'aux_predict_proba'):
raise AttributeError("Trying to use a model which does not support `predict_proba` with "
"`preds_type='probs'`. Set (`use_calibration=True`, `calibration_kwargs`) or "
"(`binarize_preds=True`).")
else:
if self.use_calibration:
logger.warning("No calibration is performed when `preds_type='scores'`.")
if self.binarize_preds:
raise ValueError("`binarize_preds` must be `False` when `preds_type='scores'`.")
if not hasattr(model, 'decision_function'):
raise AttributeError("Trying to use a model which does not support `decision_function` with "
"`preds_type='scores'`.")
# need to put the scores in the format expected by test function, which requires to duplicate the
# scores along axis=1
def predict_proba(self, X):
scores = self.decision_function(X).reshape(-1, 1)
return np.tile(scores, reps=2)
# add predict_proba method
setattr(model, 'aux_predict_proba', partial(predict_proba, model))
return model
def score(self, x: Union[np.ndarray, list]) -> Tuple[float, float, np.ndarray, np.ndarray]:
"""
Compute the out-of-fold drift metric such as the accuracy from a classifier
trained to distinguish the reference data from the data to be tested.
Parameters
----------
x
Batch of instances.
Returns
-------
p-value, a notion of distance between the trained classifier's out-of-fold performance \
and that which we'd expect under the null assumption of no drift, \
and the out-of-fold classifier model prediction probabilities on the reference and test data
"""
if self.use_oob and isinstance(self.model, RandomForestClassifier):
return self._score_rf(x)
return self._score(x)
def _score(self, x: Union[np.ndarray, list]) -> Tuple[float, float, np.ndarray, np.ndarray]:
x_ref, x = self.preprocess(x)
n_ref, n_cur = len(x_ref), len(x)
x, y, splits = self.get_splits(x_ref, x, return_splits=True) # type: ignore
# iterate over folds: train a new model for each fold and make out-of-fold (oof) predictions
probs_oof_list, idx_oof_list = [], []
for idx_tr, idx_te in splits:
y_tr = y[idx_tr]
if isinstance(x, np.ndarray):
x_tr, x_te = x[idx_tr], x[idx_te]
elif isinstance(x, list):
x_tr, x_te = [x[_] for _ in idx_tr], [x[_] for _ in idx_te]
else:
raise TypeError(f'x needs to be of type np.ndarray or list and not {type(x)}.')
self.model.fit(x_tr, y_tr)
probs = self.model.aux_predict_proba(x_te)
probs_oof_list.append(probs)
idx_oof_list.append(idx_te)
probs_oof = np.concatenate(probs_oof_list, axis=0)
idx_oof = np.concatenate(idx_oof_list, axis=0)
y_oof = y[idx_oof]
p_val, dist = self.test_probs(y_oof, probs_oof, n_ref, n_cur)
probs_sort = probs_oof[np.argsort(idx_oof)]
return p_val, dist, probs_sort[:n_ref, 1], probs_sort[n_ref:, 1]
def _score_rf(self, x: Union[np.ndarray, list]) -> Tuple[float, float, np.ndarray, np.ndarray]:
x_ref, x = self.preprocess(x)
x, y = self.get_splits(x_ref, x, return_splits=False) # type: ignore
self.model.fit(x, y)
# it is possible that some inputs do not have OOB scores. This is probably means
# that too few trees were used to compute any reliable estimates.
index_oob = np.where(np.all(~np.isnan(self.model.oob_decision_function_), axis=1))[0]
probs_oob = self.model.oob_decision_function_[index_oob]
y_oob = y[index_oob]
# comparison | |
#!/usr/bin/python
# vim: ts=4 sw=4 et
"""Class for handling argparse parsers. Methods are configured as subparsers."""
import argparse
import os
import subprocess
import sys
import textwrap
import yaml
from vctools import Logger
# pylint: disable=too-many-instance-attributes
class ArgParser(Logger):
"""Argparser class. It handles the user inputs and config files."""
def __init__(self):
self.syspath = sys.path[0]
self.gitrev = subprocess.check_output(
[
'git', '--git-dir', self.syspath + '/.git', 'rev-parse', '--short', 'HEAD'
]
)
self.__version__ = self.gitrev
self.help = None
self.opts = None
self.dotrc = None
self.parser = argparse.ArgumentParser(
description='vCenter Tools CLI'
)
self.parser.add_argument(
'--version', '-v', action='version',
version=self.__version__,
help='version number'
)
# subparser
self.subparsers = self.parser.add_subparsers(metavar='')
# override options with defaults in dotfiles
rootdir = os.path.dirname(os.path.abspath(__file__ + '/../'))
rc_files = [rootdir + '/vctoolsrc.yaml', '~/.vctoolsrc.yaml']
for rc_file in rc_files:
try:
dotrc_yaml = open(os.path.expanduser(rc_file))
self.dotrc = yaml.load(dotrc_yaml)
except IOError:
pass
if not self.dotrc:
raise ValueError('Cannot load dotrc file.')
@staticmethod
def _mkdict(args):
"""
Internal method for converting an argparse string key=value into dict.
It passes each value through a for loop to correctly set its type,
otherwise it returns it as a string.
Example:
key1=val1,key2=val2,key3=val3
"""
params = dict(x.split('=') for x in args.split(','))
for key, value in params.iteritems():
if params[key].isdigit():
params[key] = int(value)
else:
if params[key] == 'True':
params[key] = True
elif params[key] == 'False':
params[key] = False
return params
@classmethod
def general(cls, **defaults):
"""General Parser."""
# general (parent)
general_parser = argparse.ArgumentParser(add_help=False)
# positional argument
general_parser.add_argument(
'host',
help='vCenter host'
)
genopts = general_parser.add_argument_group('general options')
genopts.add_argument(
'--passwd-file', metavar='',
help='GPG encrypted passwd file'
)
genopts.add_argument(
'--user', metavar='',
help='username'
)
genopts.add_argument(
'--domain', metavar='',
help='domain'
)
genopts.add_argument(
'--passwd', metavar='',
help='password'
)
if defaults:
general_parser.set_defaults(**defaults)
return general_parser
@classmethod
def logging(cls, **defaults):
""" Logging Parser """
# logging (parent)
logging_parser = argparse.ArgumentParser(add_help=False)
logging_opts = logging_parser.add_argument_group('logging options')
logging_opts.add_argument(
'--level', metavar='', choices=['info', 'debug'], default='info',
help='set logging level choices=[%(choices)s] default: %(default)s'
)
logging_opts.add_argument(
'--console-level', metavar='', choices=['info', 'error', 'debug'], default='error',
help='set console log level choices=[%(choices)s] default: %(default)s'
)
logging_opts.add_argument(
'--console-stream', metavar='', choices=['stdout', 'stderr'], default='stderr',
help='set console logging stream output choices=[%(choices)s] default: %(default)s'
)
logging_opts.add_argument(
'--logfile', metavar='', default='/var/log/vctools.log',
help='set logging path: %(default)s'
)
if defaults:
logging_parser.set_defaults(**defaults)
return logging_parser
def add(self, *parents, **defaults):
""" Add Hardware to Virtual Machines """
# add
usage = """
help: vctools add -h
vctools add <vc> <name> --device <options>
# add a network card
vctools add <vc> <name> --device nic --network <network>
"""
add_parser = self.subparsers.add_parser(
'add',
parents=list(parents),
formatter_class=argparse.RawDescriptionHelpFormatter,
usage=textwrap.dedent(usage),
description=textwrap.dedent(self.add.__doc__),
help='Add Hardware to Virtual Machines.'
)
add_parser.set_defaults(cmd='add')
add_parser.add_argument(
'--datacenter', metavar='', default='Linux',
help='vCenter Datacenter. default: %(default)s'
)
add_type_opts = add_parser.add_argument_group('type options')
add_parser.add_argument(
'name',
help='Name attribute of Virtual Machine object, i.e. hostname'
)
add_type_opts.add_argument(
'--device', metavar='', choices=['nic'],
help='Add hardware devices on Virtual Machines. choices=[%(choices)s]',
)
add_nic_opts = add_parser.add_argument_group('nic options')
add_nic_opts.add_argument(
'--network', metavar='',
help='The network of the interface, i.e. vlan_1234_network'
)
add_nic_opts.add_argument(
'--driver', metavar='', choices=['vmxnet3', 'e1000'],
help='The network driver, default: vmxnet3'
)
if defaults:
add_parser.set_defaults(**defaults)
def create(self, *parents, **defaults):
"""Create Parser."""
# create
create_parser = self.subparsers.add_parser(
'create', parents=list(parents),
description='Example: vctools create <vc> <config> <configN>',
help='Create Virtual Machines'
)
create_parser.set_defaults(cmd='create')
create_parser.add_argument(
'config', nargs='+', type=file,
help='YaML config for creating new Virtual Machines.'
)
create_parser.add_argument(
'--datacenter', metavar='', default='Linux',
help='vCenter Datacenter. default: %(default)s'
)
create_parser.add_argument(
'--power', action='store_true', default=True,
help='Power on the VM after creation. default: %(default)s'
)
if defaults:
create_parser.set_defaults(**defaults)
def mount(self, *parents, **defaults):
"""Mount Parser."""
# mount
mount_parser = self.subparsers.add_parser(
'mount', parents=list(parents),
help='Mount ISO to CD-Rom device'
)
mount_parser.set_defaults(cmd='mount')
mount_parser.add_argument(
'--datastore', metavar='',
help='Name of datastore where the ISO is located.'
)
mount_parser.add_argument(
'--path', metavar='',
help='Path inside datastore where the ISO is located.'
)
mount_parser.add_argument(
'--name', nargs='+', metavar='',
help='name attribute of Virtual Machine object.'
)
if defaults:
mount_parser.set_defaults(**defaults)
def power(self, *parents, **defaults):
"""Power Parser."""
# power
power_parser = self.subparsers.add_parser(
'power', parents=list(parents),
help='Power Management for Virtual Machines'
)
power_parser.set_defaults(cmd='power')
power_parser.add_argument(
'power', choices=['on', 'off', 'reset', 'reboot', 'shutdown'],
help='change power state of VM'
)
power_parser.add_argument(
'--name', nargs='+', metavar='',
help='name attribute of Virtual Machine object.'
)
if defaults:
power_parser.set_defaults(**defaults)
def query(self, *parents, **defaults):
"""Query Parser."""
# query
query_parser = self.subparsers.add_parser(
'query', parents=list(parents),
help='Query Info'
)
query_parser.set_defaults(cmd='query')
query_opts = query_parser.add_argument_group('query options')
query_opts.add_argument(
'--anti-affinity-rules', action='store_true',
help='Returns information about AntiAffinityRules.'
)
query_opts.add_argument(
'--datastores', action='store_true',
help='Returns information about Datastores.'
)
query_opts.add_argument(
'--datastore', metavar='',
help='vCenter Datastore.'
)
query_opts.add_argument(
'--vms', action='store_true',
help='Returns information about Virtual Machines.'
)
query_opts.add_argument(
'--folders', action='store_true',
help='Returns information about Folders.'
)
query_opts.add_argument(
'--networks', action='store_true',
help='Returns information about Networks.'
)
query_opts.add_argument(
'--clusters', action='store_true',
help='Returns information about ComputeResources.'
)
query_opts.add_argument(
'--cluster', metavar='',
help='vCenter ComputeResource.'
)
query_opts.add_argument(
'--datacenter', metavar='', default='Linux',
help='vCenter Datacenter. default: %(default)s'
)
query_opts.add_argument(
'--vmconfig', nargs='+', metavar='',
help='Virtual machine config'
)
query_opts.add_argument(
'--vm-by-datastore', action='store_true',
help='List the VMs associated with datastore.'
)
query_opts.add_argument(
'--vm-guest-ids', action='store_true',
help='Show all vm guest ids.'
)
query_vmcfg_opts = query_parser.add_argument_group('vmconfig options')
query_vmcfg_opts.add_argument(
'--createcfg', metavar='',
help='Create a build config from --vmconfig spec.'
)
if defaults:
query_parser.set_defaults(**defaults)
def reconfig(self, *parents, **defaults):
""" Reconfig VM Attributes and Hardware """
# reconfig
usage = """
help: vctools reconfig -h
vctools reconfig <vc> <name> [--cfgs|--device] <options>
# reconfigure config settings
# lookup vmware sdk configspec for all options
vctools reconfig <vc> <name> --cfgs memoryMB=<int>,numCPUs=<int>
# move vm to another folder
vctools reconfig <vc> <name> --folder <str>
# reconfigure a disk
vctools reconfig <vc> <name> --device disk --disk-id <int> --sizeGB <int>
# reconfigure a network card
vctools reconfig <vc> <name> --device nic --nic-id <int> --network <network>
"""
reconfig_parser = self.subparsers.add_parser(
'reconfig',
parents=list(parents),
formatter_class=argparse.RawDescriptionHelpFormatter,
usage=textwrap.dedent(usage),
description=textwrap.dedent(self.reconfig.__doc__),
help='Reconfigure Attributes for Virtual Machines.'
)
reconfig_parser.set_defaults(cmd='reconfig')
reconfig_parser.add_argument(
'--datacenter', metavar='', default='Linux',
help='vCenter Datacenter. default: %(default)s'
)
reconfig_type_opts = reconfig_parser.add_argument_group('type options')
reconfig_parser.add_argument(
'name',
help='Name attribute of Virtual Machine object, i.e. hostname'
)
reconfig_type_opts.add_argument(
'--device', metavar='', choices=['disk', 'nic'],
help='Reconfigure hardware devices on Virtual Machines. choices=[%(choices)s]',
)
reconfig_type_opts.add_argument(
'--cfgs', metavar='', type=self._mkdict,
help='A comma separated list of key values that represent config '
'settings such as memory or cpu. format: key=val,keyN=valN',
)
reconfig_type_opts.add_argument(
'--folder', metavar='', type=str,
help='Move the VM to another folder. It must exist. '
)
reconfig_disk_opts = reconfig_parser.add_argument_group('disk options')
reconfig_disk_opts.add_argument(
'--disk-id', metavar='', type=int,
help='The number that represents the disk'
)
reconfig_disk_opts.add_argument(
'--disk-prefix', metavar='', default='Hard disk',
help='The disk label prefix: default: \"%(default)s\"'
)
reconfig_disk_opts.add_argument(
'--sizeGB', type=int, metavar='',
help='New size hard disk in GB'
)
reconfig_nic_opts = reconfig_parser.add_argument_group('nic options')
reconfig_nic_opts.add_argument(
'--nic-id', metavar='', type=int,
help='The number that represents the network card.'
)
reconfig_nic_opts.add_argument(
'--nic-prefix', metavar='', default='Network adapter',
help='The network label prefix: default: \"%(default)s\"'
)
reconfig_nic_opts.add_argument(
'--network', metavar='',
help='The network of the interface, i.e. vlan_1234_network'
)
reconfig_nic_opts.add_argument(
'--driver', metavar='', choices=['vmxnet3', 'e1000'],
help='The network driver, default: vmxnet3'
)
if defaults:
reconfig_parser.set_defaults(**defaults)
def umount(self, *parents, **defaults):
""" Umount Parser """
# umount
umount_parser = self.subparsers.add_parser(
'umount', parents=list(parents),
help='Unmount ISO from CD-Rom device'
)
umount_parser.set_defaults(cmd='umount')
umount_parser.add_argument(
'--name', nargs='+',
help='name attribute of Virtual Machine object.'
)
if defaults:
umount_parser.set_defaults(**defaults)
def upload(self, *parents, **defaults):
""" Upload Parser """
# upload
upload_parser = self.subparsers.add_parser(
'upload', parents=list(parents),
help='Upload File'
)
upload_parser.set_defaults(cmd='upload')
upload_parser.add_argument(
'--iso', nargs='+', metavar='',
help='iso file that needs to be uploaded to vCenter.'
)
upload_parser.add_argument(
'--dest', metavar='',
help='destination folder where the iso will reside.'
)
upload_parser.add_argument(
'--datastore', metavar='', default='ISO_Templates',
help='datastore where the iso will reside. default: %(default)s'
)
upload_parser.add_argument(
'--verify-ssl', metavar='', default=False,
help='verify SSL certificate. default: %(default)s'
)
upload_parser.add_argument(
'--datacenter', metavar='', default='Linux',
help='vCenter Datacenter. default: %(default)s'
)
if defaults:
upload_parser.set_defaults(**defaults)
def drs(self, *parents):
"""Distributed Resource Scheduler rules, currently only anti-affinity"""
usage = """
Cluster DRS Rules
currently only anti-affinity rules are supported
help: vctools drs -h
vctools drs <vc> anti-affinity add <name> --vms <vm1 vm2...>
vctools drs <vc> anti-affinity delete <name>
"""
drs_parser = self.subparsers.add_parser(
'drs', parents=list(parents),
formatter_class=argparse.RawDescriptionHelpFormatter,
usage=textwrap.dedent(usage),
description=textwrap.dedent(self.drs.__doc__),
help='Cluster DRS rules'
)
drs_parser.set_defaults(cmd='drs')
drs_parser.add_argument(
'drs_type', choices=['anti-affinity'],
help='options: anti-affinity (other options may come later)'
)
drs_parser.add_argument(
'function', choices=['add', 'delete'],
help='options: add|delete'
)
drs_parser.add_argument(
'name', metavar='', type=str,
help='Name of the DRS Rule'
)
drs_parser.add_argument(
'--vms', nargs='+', metavar='', type=str,
help='VMs to be added to the DRS rule'
)
drs_parser.add_argument(
'--cluster', metavar='',
help='vCenter ComputeResource'
)
drs_parser.add_argument(
'--prefix', metavar='', type=str,
help='Cluster DRS rule name prefix'
)
def sanitize(self, opts):
"""
Sanitize arguments. This will override the user / config input to a supported state.
Examples:
- rename files
- force booleans
- absolute path checks
Args:
opts (obj): argparse namespace parsed args
"""
# | |
<reponame>oway13/Schoolwork<gh_stars>0
from pox.core import core
import pox.openflow.libopenflow_01 as of
from pox.lib.util import dpid_to_str, str_to_dpid
from pox.lib.packet.ethernet import ethernet
from pox.lib.packet.arp import arp
from pox.lib.addresses import IPAddr, EthAddr
# Even a simple usage of the logger is much nicer than print!
log = core.getLogger()
IP_to_MAC = {}
#Part 1: Called when a packet has been sent to the controller from a switch
#Receive ARP Request packets, and send ARP Response Packets with correct MAC
#found in the IP_to_MAC table that we have built
#Using some example Code from POX Wiki's section on building ARP messages
def _handle_PacketIn (event):
packet = event.parsed
in_port = event.port
#check if packet is arp packet
if packet.type == ethernet.ARP_TYPE:
#check if arp packet is an arp request
if packet.payload.opcode == arp.REQUEST:
log.info("Received ARP request: "+str(packet.payload.protodst))
if str(packet.payload.protodst) in IP_to_MAC:
#Build the arp response
arp_reply = arp()
arp_reply.hwsrc = IP_to_MAC[str(packet.payload.protodst)]
#log.info("Reply Source: "+IP_to_MAC[str(packet.payload.protodst)])
arp_reply.hwdst = packet.src
#log.info("Reply Dest: "+str(packet.src))
arp_reply.opcode = arp.REPLY
arp_reply.protosrc = packet.payload.protodst
#log.info("protosrc: "+str(packet.payload.protodst))
arp_reply.protodst = packet.payload.protosrc
#log.info("protodst: "+str(packet.payload.protosrc))
#Build the packet
ether = ethernet()
ether.type = ethernet.ARP_TYPE
ether.dst = packet.src
ether.src = IP_to_MAC[str(packet.payload.protodst)]
ether.payload = arp_reply
#Build the message
msg = of.ofp_packet_out()
msg.data = ether.pack()
#Send the message back to the switch
msg.actions.append(of.ofp_action_output(port = of.OFPP_IN_PORT))
msg.in_port = in_port
event.connection.send(msg)
log.info("Sent ARP Reply: "+str(IP_to_MAC[str(packet.payload.protodst)]))
#Part 1: Called on launch of controller
#Since we know exactly how the topology is built, we know what all of the
#IP's and MAC's of our hosts will be. So we will add all of these associations
#to our IP_to_MAC table.
def map_IP_to_MAC():
#Add IP-MAC Associations for all hosts to the IP_to_MAC table
IP_to_MAC["10.0.0.1"] = EthAddr("00:00:00:00:00:01")
IP_to_MAC["10.0.0.2"] = EthAddr("00:00:00:00:00:02")
IP_to_MAC["10.0.0.3"] = EthAddr("00:00:00:00:00:03")
IP_to_MAC["10.0.0.4"] = EthAddr("00:00:00:00:00:04")
IP_to_MAC["10.0.0.5"] = EthAddr("00:00:00:00:00:05")
IP_to_MAC["10.0.0.6"] = EthAddr("00:00:00:00:00:06")
#Part 1: Called when a connection on a switch goes up
#Install rules in all switches that all ARP packets go to controller
#We will then install open flow rules in every switch as they come up,
#telling them to forward all ARP packets to the controller.
#Part 2
#Install our starting rules in the switches as defined in the description
#of the leaf-spine topology in Prog4_Description
#Hosts on the same leaf route through the leaf to the other host
#H1, H3, H5 route through S4. H2, H4, H6 route through S5
def _handle_ConnectionUp(event):
log.info("Installing ARP flow mod in dpid:" + dpid_to_str(event.dpid))
msg = of.ofp_flow_mod()
msg.priority = 0x7000
msg.match.dl_type = ethernet.ARP_TYPE
msg.actions.append(of.ofp_action_output(port=of.OFPP_CONTROLLER))
event.connection.send(msg)
_dpid =dpid_to_str(event.dpid)
_dpid = _dpid[16]
#Time to install flow rules in the connection
msg = of.ofp_flow_mod()
msg.priority = 0x0007
msg.match.dl_type = 0x0800
log.info("Installing Flow mod in dpid: "+dpid_to_str(event.dpid))
if _dpid == "1":
#H1 to H2
msg.match.nw_dst = "10.0.0.2"
msg.match.nw_src = "10.0.0.1"
msg.actions.append(of.ofp_action_output(port = 4))
event.connection.send(msg)
#H2 to H1
msg.match.nw_dst = "10.0.0.1"
msg.match.nw_src = "10.0.0.2"
msg.actions.append(of.ofp_action_output(port = 3))
event.connection.send(msg)
#L1 to H1
msg.match.nw_dst = "10.0.0.1"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 3))
event.connection.send(msg)
#L1 to H2
msg.match.nw_dst = "10.0.0.2"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 4))
event.connection.send(msg)
#L1 to S4
msg.match.nw_src = "10.0.0.1"
msg.match.nw_dst = None
msg.actions.append(of.ofp_action_output(port = 1))
event.connection.send(msg)
#L1 to S5
msg.match.nw_src = "10.0.0.2"
msg.match.nw_dst = None
msg.actions.append(of.ofp_action_output(port = 2))
event.connection.send(msg)
elif _dpid == "2":
#H3 to H4
msg.match.nw_dst = "10.0.0.4"
msg.match.nw_src = "10.0.0.3"
msg.actions.append(of.ofp_action_output(port = 4))
event.connection.send(msg)
#H4 to H3
msg.match.nw_dst = "10.0.0.3"
msg.match.nw_src = "10.0.0.4"
msg.actions.append(of.ofp_action_output(port = 3))
event.connection.send(msg)
#L2 to H3
msg.match.nw_dst = "10.0.0.3"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 3))
event.connection.send(msg)
#L2 to H4
msg.match.nw_dst = "10.0.0.4"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 4))
event.connection.send(msg)
#L2 to S4
msg.match.nw_src = "10.0.0.3"
msg.match.nw_dst = None
msg.actions.append(of.ofp_action_output(port = 1))
event.connection.send(msg)
#L2 to S5
msg.match.nw_src = "10.0.0.4"
msg.match.nw_dst = None
msg.actions.append(of.ofp_action_output(port = 2))
event.connection.send(msg)
elif _dpid == "3":
#H5 to H6
msg.match.nw_dst = "10.0.0.6"
msg.match.nw_src = "10.0.0.5"
msg.actions.append(of.ofp_action_output(port = 4))
event.connection.send(msg)
#H6 to H5
msg.match.nw_dst = "10.0.0.5"
msg.match.nw_src = "10.0.0.6"
msg.actions.append(of.ofp_action_output(port = 3))
event.connection.send(msg)
#L3 to H5
msg.match.nw_dst = "10.0.0.5"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 3))
event.connection.send(msg)
#L3 to H6
msg.match.nw_dst = "10.0.0.6"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 4))
event.connection.send(msg)
#L3 to S4
msg.match.nw_src = "10.0.0.5"
msg.match.nw_dst = None
msg.actions.append(of.ofp_action_output(port = 1))
event.connection.send(msg)
#L3 to S5
msg.match.nw_src = "10.0.0.6"
msg.match.nw_dst = None
msg.actions.append(of.ofp_action_output(port = 2))
event.connection.send(msg)
elif _dpid == "4":
#S4 to L1
msg.match.nw_dst = "10.0.0.1"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 1))
event.connection.send(msg)
msg.match.nw_dst = "10.0.0.2"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 1))
event.connection.send(msg)
#S4 to L2
msg.match.nw_dst = "10.0.0.3"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 2))
event.connection.send(msg)
msg.match.nw_dst = "10.0.0.4"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 2))
event.connection.send(msg)
#S4 to L3
msg.match.nw_dst = "10.0.0.5"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 3))
event.connection.send(msg)
msg.match.nw_dst = "10.0.0.6"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 3))
event.connection.send(msg)
elif _dpid == "5":
#S5 to L1
msg.match.nw_dst = "10.0.0.1"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 1))
event.connection.send(msg)
msg.match.nw_dst = "10.0.0.2"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 1))
event.connection.send(msg)
#S5 to L2
msg.match.nw_dst = "10.0.0.3"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 2))
event.connection.send(msg)
msg.match.nw_dst = "10.0.0.4"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 2))
event.connection.send(msg)
#S5 to L3
msg.match.nw_dst = "10.0.0.5"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 3))
event.connection.send(msg)
msg.match.nw_dst = "10.0.0.6"
msg.match.nw_src = None
msg.actions.append(of.ofp_action_output(port = 3))
event.connection.send(msg)
else:
log.info("Finished install_rules without installing any flow mods")
return
#Part 3: Called when a link goes down between two switches
#When a link goes down, install a new rule at the switch affected to just use
#the other spine switch
def _handle_PortStatus(event):
log.info("Port status event: "+str(event.ofp.reason))
if event.ofp.reason == of.OFPPR_MODIFY:
_dpid = event.dpid
_dpid_short =dpid_to_str(_dpid)
_dpid_short = _dpid_short[16]
_port = event.port
log.info("Link failure in switch: "+str(_dpid_short)+" on port: "+str(_port))
msg = of.ofp_flow_mod()
msg.priority = 0x0007
msg.match.dl_type = 0x0800
#Link is down in S4
if _dpid_short == "4":
#Link between S4 and L1 is down
if _port == 1:
#Tell L1 to send things from H1 to S5 instead
msg.match.nw_src = "10.0.0.1"
msg.match.nw_dst = None
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-01"), msg)
#Tell L2 to send things to H1 and H2 via S5 instead
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.1"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-02"), msg)
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.2"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-02"), msg)
#Tell L3 to send things to H1 and H2 via S5 instead
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.1"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-03"), msg)
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.2"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-03"), msg)
#Link between S4 and L2 is down
elif _port == 2:
#Tell L2 to send things from H3 to S5 instead
msg.match.nw_src = "10.0.0.3"
msg.match.nw_dst = None
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-02"), msg)
#Tell L1 to send things to H3 and H4 via S5 instead
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.3"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-01"), msg)
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.4"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-01"), msg)
#Tell L3 to send things to H3 and H4 via S5 instead
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.3"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-03"), msg)
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.4"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-03"), msg)
#Link between S4 and L3 is down
elif _port == 3:
#Tell L3 to send things from H5 to S5 instead
msg.match.nw_src = "10.0.0.5"
msg.match.nw_dst = None
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-03"), msg)
#Tell L2 to send things to H5 and H6 via S5 instead
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.5"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-02"), msg)
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.6"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-02"), msg)
#Tell L1 to send things to H5 and H6 via S5 instead
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.5"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-01"), msg)
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.6"
msg.actions.append(of.ofp_action_output(port = 2))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-01"), msg)
else:
log.info("Failed port non-useful: "+str(_port))
#Link is down in S5
elif _dpid_short == "5":
#Link between S5 and L1 is down
if _port == 1:
#Tell L1 to send things from H2 to S4 instead
msg.match.nw_src = "10.0.0.2"
msg.match.nw_dst = None
msg.actions.append(of.ofp_action_output(port = 1))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-01"), msg)
#Tell L2 to send things to H1 and H2 via S4 instead
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.1"
msg.actions.append(of.ofp_action_output(port = 1))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-02"), msg)
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.2"
msg.actions.append(of.ofp_action_output(port = 1))
core.openflow.sendToDPID(str_to_dpid("00-00-00-00-00-02"), msg)
#Tell L3 to send things to H1 and H2 via S4 instead
msg.match.nw_src = None
msg.match.nw_dst = "10.0.0.1"
msg.actions.append(of.ofp_action_output(port | |
<reponame>RichardScottOZ/stackstac<gh_stars>0
from __future__ import annotations
import functools
import logging
import threading
import weakref
from typing import TYPE_CHECKING, Optional, Protocol, Tuple, Type, Union
import numpy as np
import rasterio as rio
from rasterio.vrt import WarpedVRT
from .rio_env import LayeredEnv
from .timer import time
from .reader_protocol import Reader
from .raster_spec import RasterSpec
if TYPE_CHECKING:
from rasterio.enums import Resampling
from rasterio.windows import Window
logger = logging.getLogger(__name__)
# TODO remove logging code?
def _curthread():
return threading.current_thread().name
def log_event(topic: str, msg: dict) -> None:
try:
import distributed
worker = distributed.get_worker()
except (ImportError, ValueError):
return
worker.log_event(topic, dict(msg, thread=_curthread()))
# /TODO
# Default GDAL configuration options
DEFAULT_GDAL_ENV = LayeredEnv(
always=dict(
CPL_VSIL_CURL_ALLOWED_EXTENSIONS="tif", # don't really know what this does. may not be necessary/desireable
GDAL_HTTP_MULTIRANGE="YES", # unclear if this actually works
GDAL_HTTP_MERGE_CONSECUTIVE_RANGES="YES",
# ^ unclear if this works either. won't do much when our dask chunks are aligned to the dataset's chunks.
),
open=dict(
GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR",
# ^ stop GDAL from requesting `.aux` and `.msk` files from the bucket (speeds up `open` time a lot)
VSI_CACHE=True
# ^ cache HTTP requests for opening datasets. This is critical for `ThreadLocalRioDataset`,
# which re-opens the same URL many times---having the request cached makes subsequent `open`s
# in different threads snappy.
),
read=dict(
VSI_CACHE=False
# ^ *don't* cache HTTP requests for actual data. We don't expect to re-request data,
# so this would just blow out the HTTP cache that we rely on to make repeated `open`s fast
# (see above)
),
)
# These GDAL _drivers_ are safe to run in multiple threads. Note that GDAL _datasets_ are never
# safe to access across different threads. But if we create a copy of the dataset for each thread,
# and each copy uses its own file descriptor (`sharing=False`), then each thread can safely access
# its own dataset in parallel. Compare this to the hdf5 driver for example, which assumes only one
# thread is accessing the entire library at a time.
# See `ThreadLocalRioDataset` for more.
# https://github.com/pangeo-data/pangeo-example-notebooks/issues/21#issuecomment-432457955
# https://gdal.org/drivers/raster/vrt.html#multi-threading-issues
MULTITHREADED_DRIVER_ALLOWLIST = {"GTiff"}
class ThreadsafeRioDataset(Protocol):
scale_offset: Tuple[float, float]
def read(self, *args, **kwargs) -> np.ndarray:
...
def close(self) -> None:
...
class SingleThreadedRioDataset:
"""
Interface for a rasterio dataset whose driver is inherently single-threaded (like hdf5).
Concurrent reads are protected by a lock.
"""
def __init__(
self,
env: LayeredEnv,
ds: rio.DatasetReader,
vrt: Optional[WarpedVRT] = None,
) -> None:
self.env = env
self.ds = ds
self.vrt = vrt
# Cache this for non-locking access
self.scale_offset = (ds.scales[0], ds.offsets[0])
self._lock = threading.Lock()
def read(self, *args, **kwargs) -> np.ndarray:
"Acquire the lock, then read from the dataset"
reader = self.vrt or self.ds
with self._lock, self.env.read:
return reader.read(*args, **kwargs)
def close(self) -> None:
"Acquire the lock, then close the dataset"
with self._lock:
if self.vrt:
self.vrt.close()
self.ds.close()
def __del__(self) -> None:
self.close()
def __enter__(self) -> SingleThreadedRioDataset:
self._lock.acquire()
return self
def __exit__(self, *args) -> None:
self._lock.release()
class ThreadLocalRioDataset:
"""
Creates a copy of the dataset and VRT for every thread that reads from it.
In GDAL, nothing allows you to read from the same dataset from multiple threads.
The best concurrency support available is that you can use the same *driver*, on
separate dataset objects, from different threads (so long as those datasets don't share
a file descriptor). Also, the thread that reads from a dataset must be the one that creates it.
This wrapper transparently re-opens the dataset (with ``sharing=False``, to use a separate file
descriptor) for each new thread that accesses it. Subsequent reads by that thread will reuse that
dataset.
Note
----
When using a large number of threads, this could potentially use a lot of memory!
GDAL datasets are not lightweight objects.
"""
def __init__(
self,
env: LayeredEnv,
ds: rio.DatasetReader,
vrt: Optional[WarpedVRT] = None,
) -> None:
self._env = env
self._url = ds.name
self._driver = ds.driver
self._open_options = ds.options
# Cache this for non-locking access
self.scale_offset = (ds.scales[0], ds.offsets[0])
if vrt is not None:
self._vrt_params = dict(
# src_crs=vrt.src_crs.to_string(),
# ^ we won't use this, and loading proj4 CRSs is slow
crs=vrt.crs.to_string(),
# ^ we _do_ ser-de the CRS to re-create it per thread,
# because pyproj.CRS objects probably aren't thread-safe?
resampling=vrt.resampling,
tolerance=vrt.tolerance,
src_nodata=vrt.src_nodata,
nodata=vrt.nodata,
width=vrt.width,
height=vrt.height,
src_transform=vrt.src_transform,
transform=vrt.transform,
dtype=vrt.working_dtype,
warp_extras=vrt.warp_extras,
)
# ^ copied from rioxarray
# https://github.com/corteva/rioxarray/blob/0804791a44f65ac4f303dd286e94b3eaee81f72b/rioxarray/_io.py#L720-L734
else:
self._vrt_params = None
self._threadlocal = threading.local()
self._threadlocal.ds = ds
self._threadlocal.vrt = vrt
# ^ NOTE: we fill these in *only for this thread*; in other threads, the attributes won't be set.
# Instead, `_open` will lazily fill them in.
self._lock = threading.Lock()
# ^ NOTE this lock protects any mutation of `self`---namely, changing `self._threadlocal`.
# The `threading.local` object is itself thread-safe (the `.x` part of `self._threadlocal.x` is protected),
# but because `close` closes datasets across all threads by simply deleting the current threadlocal
# and replacing it with an empty one, we have to synchronize all access to `self._threadlocal`.
log_event("create_ThreadLocalRioDataset", dict(url=self._url, vrt=bool(vrt)))
def _open(self) -> Union[SelfCleaningDatasetReader, WarpedVRT]:
with self._env.open:
with time(f"Reopen {self._url!r} in {_curthread()}: {{t}}"):
result = ds = SelfCleaningDatasetReader(
rio.parse_path(self._url),
sharing=False,
driver=self._driver,
**self._open_options,
)
log_event("open_dataset", dict(url=self._url))
if self._vrt_params:
with self._env.open_vrt:
result = vrt = WarpedVRT(ds, sharing=False, **self._vrt_params)
log_event("open_vrt", dict(url=self._url))
else:
vrt = None
with self._lock:
self._threadlocal.ds = ds
self._threadlocal.vrt = vrt
weakref.ref(
ds, functools.partial(log_event, "close_dataset", dict(url=self._url))
)
weakref.ref(vrt, functools.partial(log_event, "close_vrt", dict(url=self._url)))
# NOTE: functools.partial to hopefully avoid taking a closure over `self`
return result
@property
def dataset(self) -> Union[SelfCleaningDatasetReader, WarpedVRT]:
try:
with self._lock:
return self._threadlocal.vrt or self._threadlocal.ds
except AttributeError:
return self._open()
def read(self, *args, **kwargs) -> np.ndarray:
"Read from the current thread's dataset, opening a new copy of the dataset on first access from each thread."
with time(f"Read {self._url!r} in {_curthread()}: {{t}}"):
with self._env.read:
return self.dataset.read(*args, **kwargs)
def close(self) -> None:
"""
Release every thread's reference to its dataset, allowing them to close.
This method is thread-safe. After `close` returns, any `read` calls will
open new datasets for their threads. However, for best performance, be
sure that no thread will need to access the dataset again before
calling `close`.
If `close` is called while a thread-local copy of a dataset is opening,
that thread will still receive the newly-opened dataset. The next read
from that thread may or may not open the dataset yet again.
Note that the underlying rasterio dataset/VRT may not be immediately closed
upon calling this method; it will take until the next garbage-collection cycle.
Indeed, *if any other code holds a reference to one of the rasterio datasets,
it will not be closed at all*. This method just releases our references and relies
on garbage collection to do the rest.
"""
# We can't just call `close` on `self._threadlocal.ds`, because we want to close _all_
# the datasets held by all threads.
# It is (reasonably) very hard to access a different thread's storage on a `threading.local`
# object, so we can't just iterate through them all and call `close`.
# Instead, we simply replace the thread-local with a new empty one. Dropping our reference
# to the old thread-local will cause it to delete its internal dict, thereby dropping references
# to all the rasterio datasets contained therein.
# Then, the `__del__` method on `WarpedVRT` and `SelfCleaningDatasetReader` will close those
# datasets.
# NOTE: we're assuming here that closing a GDAL dataset from a thread other than the one that created
# it is safe to do, which, knowing GDAL, is quite possibly untrue.
log_event("close_ThreadLocalRioDataset", dict(url=self._url))
with self._lock:
self._threadlocal = threading.local()
class SelfCleaningDatasetReader(rio.DatasetReader):
# Unclear if this is even necessary, since `DatasetBase` implements `__dealloc__`,
# but better to be safe?
# https://github.com/mapbox/rasterio/blob/0a52d52b0c19094cd906c25fe3c23ddb48ee1f48/rasterio/_base.pyx#L445-L447
def __del__(self):
self.close()
class AutoParallelRioReader:
"""
rasterio-based Reader that picks the appropriate concurrency mechanism after opening the file.
After opening the ``url`` and seeing which GDAL driver it uses, it'll use
`ThreadLocalRioDataset` (full concurrency, but higher memory usage) if the
driver is in `MULTITHREADED_DRIVER_ALLOWLIST`, otherwise `SingleThreadedRioDataset`
for non-thread-safe drivers.
"""
def __init__(
self,
url: str,
spec: RasterSpec,
resampling: Resampling,
dtype: np.dtype,
fill_value: Optional[Union[int, float]],
rescale: bool,
gdal_env: Optional[LayeredEnv] = None,
) -> None:
if fill_value is not None and not np.can_cast(fill_value, dtype):
| |
<reponame>rSedoy/wagtail<filename>wagtail/core/blocks/base.py
import collections
from importlib import import_module
from django import forms
from django.core import checks
from django.core.exceptions import ImproperlyConfigured
from django.template.loader import render_to_string
from django.utils.encoding import force_str
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
__all__ = ['BaseBlock', 'Block', 'BoundBlock', 'DeclarativeSubBlocksMetaclass', 'BlockWidget', 'BlockField']
# =========================================
# Top-level superclasses and helper objects
# =========================================
class BaseBlock(type):
def __new__(mcs, name, bases, attrs):
meta_class = attrs.pop('Meta', None)
cls = super(BaseBlock, mcs).__new__(mcs, name, bases, attrs)
# Get all the Meta classes from all the bases
meta_class_bases = [meta_class] + [getattr(base, '_meta_class', None)
for base in bases]
meta_class_bases = tuple(filter(bool, meta_class_bases))
cls._meta_class = type(str(name + 'Meta'), meta_class_bases, {})
return cls
class Block(metaclass=BaseBlock):
name = ''
creation_counter = 0
TEMPLATE_VAR = 'value'
class Meta:
label = None
icon = "placeholder"
classname = None
group = ''
"""
Setting a 'dependencies' list serves as a shortcut for the common case where a complex block type
(such as struct, list or stream) relies on one or more inner block objects, and needs to ensure that
the responses from the 'media' and 'html_declarations' include the relevant declarations for those inner
blocks, as well as its own. Specifying these inner block objects in a 'dependencies' list means that
the base 'media' and 'html_declarations' methods will return those declarations; the outer block type can
then add its own declarations to the list by overriding those methods and using super().
"""
dependencies = []
def __new__(cls, *args, **kwargs):
# adapted from django.utils.deconstruct.deconstructible; capture the arguments
# so that we can return them in the 'deconstruct' method
obj = super(Block, cls).__new__(cls)
obj._constructor_args = (args, kwargs)
return obj
def all_blocks(self):
"""
Return a list consisting of self and all block objects that are direct or indirect dependencies
of this block
"""
result = [self]
for dep in self.dependencies:
result.extend(dep.all_blocks())
return result
def all_media(self):
media = forms.Media()
for block in self.all_blocks():
media += block.media
return media
def all_html_declarations(self):
declarations = filter(bool, [block.html_declarations() for block in self.all_blocks()])
return mark_safe('\n'.join(declarations))
def __init__(self, **kwargs):
self.meta = self._meta_class()
for attr, value in kwargs.items():
setattr(self.meta, attr, value)
# Increase the creation counter, and save our local copy.
self.creation_counter = Block.creation_counter
Block.creation_counter += 1
self.definition_prefix = 'blockdef-%d' % self.creation_counter
self.label = self.meta.label or ''
def set_name(self, name):
self.name = name
if not self.meta.label:
self.label = capfirst(force_str(name).replace('_', ' '))
@property
def media(self):
return forms.Media()
def html_declarations(self):
"""
Return an HTML fragment to be rendered on the form page once per block definition -
as opposed to once per occurrence of the block. For example, the block definition
ListBlock(label="Shopping list", CharBlock(label="Product"))
needs to output a <script type="text/template"></script> block containing the HTML for
a 'product' text input, to that these can be dynamically added to the list. This
template block must only occur once in the page, even if there are multiple 'shopping list'
blocks on the page.
Any element IDs used in this HTML fragment must begin with definition_prefix.
(More precisely, they must either be definition_prefix itself, or begin with definition_prefix
followed by a '-' character)
"""
return ''
def js_initializer(self):
"""
Returns a Javascript expression string, or None if this block does not require any
Javascript behaviour. This expression evaluates to an initializer function, a function that
takes the ID prefix and applies JS behaviour to the block instance with that value and prefix.
The parent block of this block (or the top-level page code) must ensure that this
expression is not evaluated more than once. (The resulting initializer function can and will be
called as many times as there are instances of this block, though.)
"""
return None
def render_form(self, value, prefix='', errors=None):
"""
Render the HTML for this block with 'value' as its content.
"""
raise NotImplementedError('%s.render_form' % self.__class__)
def value_from_datadict(self, data, files, prefix):
raise NotImplementedError('%s.value_from_datadict' % self.__class__)
def value_omitted_from_data(self, data, files, name):
"""
Used only for top-level blocks wrapped by BlockWidget (i.e.: typically only StreamBlock)
to inform ModelForm logic on Django >=1.10.2 whether the field is absent from the form
submission (and should therefore revert to the field default).
"""
return name not in data
def bind(self, value, prefix=None, errors=None):
"""
Return a BoundBlock which represents the association of this block definition with a value
and a prefix (and optionally, a ValidationError to be rendered).
BoundBlock primarily exists as a convenience to allow rendering within templates:
bound_block.render() rather than blockdef.render(value, prefix) which can't be called from
within a template.
"""
return BoundBlock(self, value, prefix=prefix, errors=errors)
def get_default(self):
"""
Return this block's default value (conventionally found in self.meta.default),
converted to the value type expected by this block. This caters for the case
where that value type is not something that can be expressed statically at
model definition type (e.g. something like StructValue which incorporates a
pointer back to the block definion object).
"""
return self.meta.default
def prototype_block(self):
"""
Return a BoundBlock that can be used as a basis for new empty block instances to be added on the fly
(new list items, for example). This will have a prefix of '__PREFIX__' (to be dynamically replaced with
a real prefix when it's inserted into the page) and a value equal to the block's default value.
"""
return self.bind(self.get_default(), '__PREFIX__')
def clean(self, value):
"""
Validate value and return a cleaned version of it, or throw a ValidationError if validation fails.
The thrown ValidationError instance will subsequently be passed to render() to display the
error message; the ValidationError must therefore include all detail necessary to perform that
rendering, such as identifying the specific child block(s) with errors, in the case of nested
blocks. (It is suggested that you use the 'params' attribute for this; using error_list /
error_dict is unreliable because Django tends to hack around with these when nested.)
"""
return value
def to_python(self, value):
"""
Convert 'value' from a simple (JSON-serialisable) value to a (possibly complex) Python value to be
used in the rest of the block API and within front-end templates . In simple cases this might be
the value itself; alternatively, it might be a 'smart' version of the value which behaves mostly
like the original value but provides a native HTML rendering when inserted into a template; or it
might be something totally different (e.g. an image chooser will use the image ID as the clean
value, and turn this back into an actual image object here).
"""
return value
def get_prep_value(self, value):
"""
The reverse of to_python; convert the python value into JSON-serialisable form.
"""
return value
def get_context(self, value, parent_context=None):
"""
Return a dict of context variables (derived from the block value and combined with the parent_context)
to be used as the template context when rendering this value through a template.
"""
context = parent_context or {}
context.update({
'self': value,
self.TEMPLATE_VAR: value,
})
return context
def get_template(self, context=None):
"""
Return the template to use for rendering the block if specified on meta class.
This extraction was added to make dynamic templates possible if you override this method
"""
return getattr(self.meta, 'template', None)
def render(self, value, context=None):
"""
Return a text rendering of 'value', suitable for display on templates. By default, this will
use a template (with the passed context, supplemented by the result of get_context) if a
'template' property is specified on the block, and fall back on render_basic otherwise.
"""
template = self.get_template(context=context)
if not template:
return self.render_basic(value, context=context)
if context is None:
new_context = self.get_context(value)
else:
new_context = self.get_context(value, parent_context=dict(context))
return mark_safe(render_to_string(template, new_context))
def get_api_representation(self, value, context=None):
"""
Can be used to customise the API response and defaults to the value returned by get_prep_value.
"""
return self.get_prep_value(value)
def render_basic(self, value, context=None):
"""
Return a text rendering of 'value', suitable for display on templates. render() will fall back on
this if the block does not define a 'template' property.
"""
return force_str(value)
def get_searchable_content(self, value):
"""
Returns a list of strings containing text content within this block to be used | |
<reponame>samuelralmeida/multi-user-blog<filename>main.py<gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import webapp2
import jinja2
import random
import hashlib
import hmac
import re
from datetime import datetime
from google.appengine.ext import db
from string import letters
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_dir),
autoescape=True)
# rewrite secret word
secret = 'sars'
def render_str(template, **params):
t = jinja_env.get_template(template)
return t.render(params)
# make a safe value from the variable "val" and the secret word "secret"
def make_secure_val(val):
return '%s|%s' % (val, hmac.new(secret, val).hexdigest())
# check if a given passed value is valid
def check_secure_val(secure_val):
val = secure_val.split('|')[0]
if secure_val == make_secure_val(val):
return val
# main class
class BlogHandler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, template, **params):
params['user'] = self.user
return render_str(template, **params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
# set a cookie based on "name" and "val" passed
def set_secure_cookie(self, name, val):
cookie_val = make_secure_val(val)
# add cookie to header of response page
self.response.headers.add_header(
'Set-Cookie',
'%s=%s; Path=/' % (name, cookie_val))
# check if a cookie is valid
def read_secure_cookie(self, name):
cookie_val = self.request.cookies.get(name)
return cookie_val and check_secure_val(cookie_val)
# set a cookie to login user
def login_cookie(self, user):
self.set_secure_cookie('user_id', str(user.key().id()))
# overwrite a login cookie, end the session
def logout_cookie(self):
self.response.headers.add_header('Set-Cookie', 'user_id=; Path=/')
# check if user is logged in. GAE function
def initialize(self, *a, **kw):
webapp2.RequestHandler.initialize(self, *a, **kw)
uid = self.read_secure_cookie('user_id')
self.user = uid and User.by_id(int(uid))
# redirect path '/' to '/blog/'
class MainPage(BlogHandler):
def get(self):
self.redirect('/blog/')
def blog_key(name='default'):
return db.Key.from_path('blogs', name)
class Post(db.Model):
subject = db.StringProperty(required=True)
content = db.TextProperty(required=True)
author = db.StringProperty(required=True)
# When a try to use ReferencePoperty, I get a HTTP error 500
# author = db.ReferenceProperty(User, collection_name='user_author')
num_comments = db.IntegerProperty(required=True, default=0)
liked_by = db.StringListProperty(default=[])
created = db.DateTimeProperty(auto_now_add=True)
# Can't use auto_now to last_modified because
# when some likes or comment was count the date changed too
last_modified = db.DateTimeProperty(required=False)
def render(self):
self._render_text = self.content.replace('\n', '<br>')
self._id = self.key().id()
return render_str("post.html", p=self)
@property
def likes(self):
return len(self.liked_by)
# create a new post to blog
class NewPost(BlogHandler):
def get(self):
if self.user:
# variable logged is used by base.html to define page header
self.render('newpost.html', logged=True)
else:
self.redirect('/blog/login')
def post(self):
if not self.user:
self.redirect('/blog/login')
else:
subject = self.request.get('subject')
content = self.request.get('content')
author = self.user.name
last_modified = datetime.now()
if subject and content:
p = Post(parent=blog_key(), subject=subject,
content=content, author=author, last_modified=last_modified)
p.put()
self.redirect('/blog/%s' % str(p.key().id()))
else:
error = "Sorry, but you must fill subject and content, please!"
self.render("newpost.html", subject=subject,
content=content, error=error, logged=True)
class PostPage(BlogHandler):
def get(self, post_id):
# Key.from_path(*path, parent=None, namespace=None)
key = db.Key.from_path('Post', int(post_id), parent=blog_key())
post = db.get(key)
comments_maked = Comment.all().filter(
'post_commented = ', key).order('-created_comment')
if not post:
self.error(404)
return
if self.user:
current_user = self.user.name
self.render("permalink.html", p=post, username=current_user,
comments_maked=comments_maked, logged=True)
else:
self.redirect('/blog/login')
class BlogFront(BlogHandler):
def get(self):
posts = Post.all().order('-created')
if self.user:
current_user = self.user.name
self.render('front.html', posts=posts,
logged=True, username=current_user)
else:
self.render('front.html', posts=posts)
# make a salt to store in databese instead password
def make_salt(lenght=5):
return ''.join(random.choice(letters) for x in xrange(lenght))
# make a hash to store in databese instead password
def make_password_hash(name, pw, salt=None):
if not salt:
salt = make_salt()
h = hashlib.sha256(name + pw + salt).hexdigest()
return '%s,%s' % (salt, h)
# check if password passed is valid
def valid_password(name, password, h):
salt = h.split(',')[0]
return h == make_password_hash(name, password, salt)
# create a user element in database
def users_key(group='default'):
return db.Key.from_path('users', group)
# user object
class User(db.Model):
name = db.StringProperty(required=True)
password_hash = db.StringProperty(required=True)
email = db.StringProperty()
posts_liked = db.StringListProperty(default=[])
@classmethod
def by_id(cls, uid):
return User.get_by_id(uid, parent=users_key())
@classmethod
def by_name(cls, name):
u = User.all().filter('name =', name).get()
return u
@classmethod
def register(cls, name, password, email=None):
password_hash = make_password_hash(name, password)
return User(parent=users_key(), name=name,
password_hash=password_hash, email=email)
@classmethod
def login(cls, name, password):
u = cls.by_name(name)
if u and valid_password(name, password, u.password_hash):
return u
# rule to check if username input in register is valid
USER_RE = re.compile(r"^^[a-zA-Z0-9_-]{3,20}$")
def valid_username_input(username):
return username and USER_RE.match(username)
# rule to check if password input in register is valid
PASS_RE = re.compile(r"^.{3,20}$")
def valid_password_input(password):
return password and PASS_RE.match(password)
# rule to check if email input in register is valid
EMAIL_RE = re.compile(r'^[\S]+@+[\S]+\.[\S]+$')
def valid_email_input(email):
return not email or EMAIL_RE.match(email)
# render signup form and check if inputs are valid
class Signup(BlogHandler):
def get(self):
self.render("signup-form.html")
def post(self):
have_error = False
self.username = self.request.get('username')
self.password = self.request.get('password')
self.verify = self.request.get('verify')
self.email = self.request.get('email')
params = dict(username=self.username, email=self.email)
# verify username input
if not valid_username_input(self.username):
params['error_username'] = 'Not a valid username'
have_error = True
# verify password input
if not valid_password_input(self.password):
params['error_password'] = '<PASSWORD>'
have_error = True
elif self.password != self.verify:
params['error_verify'] = 'Yours password did not match'
have_error = True
# verify email input
if not valid_email_input(self.email):
params['error_email'] = 'Not a valid email'
have_error = True
if have_error:
self.render('signup-form.html', **params)
else:
self.done()
def done(self, *a, **kw):
raise NotImplementedError
class Register(Signup):
def done(self):
# search user's name in database
u = User.by_name(self.username)
if u:
msg = "That's user already exists"
self.render('signup-form.html', error_username=msg)
else:
# create user object
u = User.register(self.username, self.password, self.email)
# save user object in datebase
u.put()
self.login_cookie(u)
self.redirect('/blog/welcome')
class Login(BlogHandler):
def get(self):
self.render('login-form.html')
def post(self):
username = self.request.get('username')
password = self.request.get('password')
# check if user is in database
u = User.login(username, password)
if u:
self.login_cookie(u)
self.redirect('/blog/welcome')
else:
msg = 'Invalid login'
self.render('login-form.html', error=msg)
class Logout(BlogHandler):
def get(self):
self.logout_cookie()
self.redirect('/blog')
class Welcome(BlogHandler):
def get(self):
if self.user:
author = self.user.name
author_posts = Post.all().filter('author =', author).order('-created')
self.render('welcome.html', username=author,
logged=True, author_posts=author_posts)
else:
self.redirect('/blog')
class Like(BlogHandler):
def get(self, post_id):
if not self.user:
self.redirect('/blog/login')
else:
key = db.Key.from_path('Post', int(post_id), parent=blog_key())
post = db.get(key)
if not post:
self.error(404)
return
else:
author = post.author
current_user = self.user.name
redirect = int(post_id)
if author == current_user:
msg = "You can't like your own post"
self.render('permalink.html', p=post,
logged=True, msg=msg, redirect=redirect, username=current_user)
elif current_user in post.liked_by:
msg = "You've already liked"
self.render('permalink.html', p=post,
logged=True, msg=msg, redirect=redirect, username=current_user)
else:
post.liked_by.append(current_user)
post.put()
msg = 'Liked'
self.render("permalink.html", p=post,
logged=True, msg=msg, redirect=redirect, username=current_user)
class Unlike(BlogHandler):
def get(self, post_id):
if not self.user:
self.redirect('/blog/login')
else:
key = db.Key.from_path('Post', int(post_id), parent=blog_key())
post = db.get(key)
if not post:
self.error(404)
return
else:
current_user = self.user.name
if current_user in post.liked_by:
redirect = ''
post.liked_by.remove(current_user)
post.put()
msg = 'Unliked'
self.render("permalink.html", p=post,
logged=True, msg=msg, redirect=redirect, username=current_user)
else:
redirect = int(post_id)
msg = "You can't unlike if you haven't liked yet"
self.render("permalink.html", p=post,
logged=True, msg=msg, redirect=redirect, username=current_user)
class DeletePost(BlogHandler):
def get(self, post_id):
if not self.user:
self.redirect('/blog/login')
else:
key = db.Key.from_path('Post', int(post_id), parent=blog_key())
post = db.get(key)
if not post:
self.error(404)
return
else:
current_user = self.user.name
author = post.author
if current_user == author:
# delete comments maked in post
db.delete(Comment.all(keys_only=True).filter(
'post_commented = ', key))
post.delete()
msg = "This post was deleted"
redirect = 'welcome'
self.render("permalink.html", p=post, logged=True,
msg=msg, redirect=redirect, username=current_user)
else:
msg = "You can't delete a post by another user"
redirect = 'welcome'
self.render("permalink.html", p=post, logged=True,
msg=msg, redirect=redirect, username=current_user)
class EditPost(BlogHandler):
def get(self, post_id):
if not self.user:
self.redirect('/blog/login')
else:
key = db.Key.from_path('Post', int(post_id), parent=blog_key())
post = db.get(key)
if not post:
self.error(404)
return
current_user = self.user.name
if post.author == current_user:
subject = post.subject
content = post.content
self.render('editpost.html', subject=subject,
content=content, logged=True)
else:
msg = "You can't edit post by another user"
redirect = 'welcome'
self.render('permalink.html', p=post, logged=True,
msg=msg, redirect=redirect, username=current_user)
def post(self, post_id):
if not self.user:
return self.redirect('/blog/login')
else:
subject = self.request.get('subject')
content = self.request.get('content')
if subject and content:
key = db.Key.from_path('Post', int(post_id), parent=blog_key())
post = db.get(key)
if not post:
self.error(404)
return
current_user = self.user.name
if post.author == current_user:
post.content = content
post.subject = subject
post.last_modified = datetime.now()
post.put()
self.redirect('/blog/%s' % str(post.key().id()))
else:
msg = "You can't edit post by another user"
redirect = 'welcome'
self.render('permalink.html', p=post, logged=True,
msg=msg, redirect=redirect, username=current_user)
else:
error = "Sorry, but you must fill subject and content, please!"
self.render("editpost.html", subject=subject,
content=content, error=error, logged=True)
class Comment(db.Model):
content_comment = db.StringProperty(required=True)
author_comment = db.StringProperty(required=True)
created_comment = db.DateTimeProperty(auto_now_add=True)
post_commented = db.ReferenceProperty(Post, collection_name='comments')
def render(self):
self._render_text = self.content_comment.replace('\n', '<br>')
self._id = self.key().id()
return render_str("comment.html", c=self)
class CommentPost(BlogHandler):
def get(self, post_id):
if not self.user:
self.redirect('/blog/login')
else:
key = db.Key.from_path('Post', int(post_id), parent=blog_key())
post = db.get(key)
if not post:
self.error(404)
return
self.render("newcomment.html", p=post, logged=True)
def post(self, post_id):
if not self.user:
return self.redirect('/blog/login')
else:
key = db.Key.from_path('Post', int(post_id), parent=blog_key())
post = db.get(key)
if not post:
self.error(404)
return
else:
comment = self.request.get('comment')
if comment:
username = self.user.name
c = Comment(content_comment=comment, author_comment=username,
post_commented=key, parent=blog_key())
c.put()
post.num_comments += 1
post.put()
| |
<gh_stars>0
'''
PIPELINE 2
Project manager for Maya
Ahutor: <NAME>
All rights reserved (c) 2017
pipeline.nnl.tv
<EMAIL>
---------------------------------------------------------------------------------------------
install:
Place the pipeline folder in your maya scripts folder and run this code (in python):
import pipeline
pipeline.start()
---------------------------------------------------------------------------------------------
You are using pipeline on you own risk.
Things can always go wrong, and under no circumstances the author
would be responsible for any damages caused from the use of this software.
When using this beta program you hereby agree to allow this program to collect
and send usage data to the author.
---------------------------------------------------------------------------------------------
The coded instructions, statements, computer programs, and/or related
material (collectively the "Data") in these files are subject to the terms
and conditions defined by
Creative Commons Attribution-NonCommercial-NoDerivs 4.0 Unported License:
http://creativecommons.org/licenses/by-nc-nd/4.0/
http://creativecommons.org/licenses/by-nc-nd/4.0/legalcode
http://creativecommons.org/licenses/by-nc-nd/4.0/legalcode.txt
---------------------------------------------------------------------------------------------
'''
import logging
import os
import pipeline.apps.project_editor as project_editor
import pipeline.libs.config as cfg
import pipeline.libs.data as dt
import pipeline.libs.nodes.elements as elements
import pipeline.libs.files as files
import pipeline.libs.serializer as serializer
import pipeline.libs.misc as misc
import pipeline.apps.massage as massage
import pipeline.apps.users as users
from pipeline.libs.Qt import QtWidgets, QtCore
logger = logging.getLogger(__name__)
class RootNode(dt.Node):
def __init__(self, name, parent=None, **kwargs):
super(RootNode, self).__init__(name, parent, **kwargs)
self.name = name
self.resource = cfg.folder_icon
self.data_file = None
self.data_file_path = None
self._settings = None
self._project = None
self._ui = None
for key in kwargs:
if key == "path":
self._path = kwargs[key]
self.data_file_path = os.path.join(kwargs[key], "%s.%s" % (os.path.split(kwargs[key])[1], "json"))
if key == "project":
self._project = kwargs[key]
if key == "settings":
self._settings = kwargs[key]
if key == "ui":
self._ui = kwargs[key]
if self.data_file_path:
self.set_data_file(self.data_file_path)
@property
def path(self):
return os.path.normpath(self._path)
@path.setter
def path(self, path):
self._path = path
@property
def settings(self):
return self._settings
@property
def color(self):
if self.approved_stage:
return QtWidgets.QColor("#00fa9a")
else:
return QtWidgets.QColor("white")
@property
def relative_path(self):
if self.path and self._project:
return files.reletive_path(self._project.path, self.path)
@property
def ansestors(self):
try:
ansestors = files.splitall(self.relative_path)
# ansestors.reverse()
return ansestors
except:
return None
def ansestor(self, int):
ansestors = self.ansestors
if ansestors and len(ansestors) >= int:
return self.ansestors[int]
return None
def typeInfo(self):
return cfg._root_
def set_data_file(self, path):
if os.path.isfile(path):
self.data_file = serializer.JSONSerializer(path=path)
return True
else:
pass
def create(self, path=None):
if files.create_directory(path):
self.path = path
return self
else:
return False
class ProjectNode(RootNode):
loaded = QtCore.Signal(dict)
def __init__(self, name, parent=None, **kwargs):
super(ProjectNode, self).__init__(name, parent, **kwargs)
self.project_file = None
if self.data_file:
self.project_file = self.data_file.read()
self.pipelineUI = None
for key in kwargs:
if key == "pipelineUI":
self.pipelineUI = kwargs[key]
self.loaded.connect(self.pipelineUI.updateCurrentProject)
def create(self,
nice_name = None,
path=None,
padding=3,
fps=25,
file_type = "ma",
users={"0": ["Admin", "1234", "admin"]},
branches = ["scenes", "assets"],
playblasts_root = cfg.playblast_save_options.PROJECT_ROOT,
prefix=None):
file_type = "ma"
project_key = serializer.id_generator()
project_data = {}
project_data["nice_name"] = nice_name if nice_name else self.name
project_data["project_name"] = self.name
project_data["project_key"] = project_key
project_data["padding"] = padding
project_data["fps"] = fps
project_data["defult_file_type"] = file_type
project_data["users"] = users
project_data["playblasts_root"] = playblasts_root
# project_data["prefix"] = prefix
# project_data["playblast_outside"] = playblast_outside
folders = ["scenes", "assets","images", "sourceimages", "data", "autosave", "movies", "scripts",
"sound", "clips", "renderData", "cache"]
for folder in folders:
# project_data[folder] = folder
files.create_directory(os.path.join(path, folder))
# render folders:
r_folders = ["renderData", "depth", "iprimages", "shaders"]
for r_folder in r_folders[1:]:
files.create_directory(os.path.join(path, r_folders[0], r_folder))
fur_folders = ["renderData", "fur", "furFiles", "furImages", "furEqualMap", "furAttrMap", "furShadowMap"]
for f_folder in fur_folders[2:]:
files.create_directory(os.path.join(path, fur_folders[0], fur_folders[1], f_folder))
# cache folders:
c_folders = ["cache", "particles", "nCache", "bifrost"]
for c_folder in c_folders[1:]:
files.create_directory(os.path.join(path, c_folders[0], c_folder))
fl_folders = ["cache", "nCache", "fluid"]
for fl_folder in fl_folders[2:]:
files.create_directory(os.path.join(path, fl_folders[0], fl_folders[1], fl_folder))
self.path = path
data_path = os.path.join(path, "%s.%s" % (self.name, "json"))
self.data_file_path = data_path
self.data_file = serializer.JSONSerializer().create(self.data_file_path, project_data)
self.project_file = self.data_file.read()
for branch in branches:
elements.BranchNode(branch, path=os.path.join(path, branch), project = self).create(path=os.path.join(path, branch))
# elements.BranchNode("assets", path=os.path.join(path, "assets"), project = self).create(path=os.path.join(path, "assets"))
if playblasts_root == cfg.playblast_save_options.PROJECT_ROOT:
files.create_directory(os.path.join(path, "playblasts"))
if playblasts_root == cfg.playblast_save_options.PROJECT_SISTER:
try:
files.create_directory(os.path.join(os.path.dirname(path), "{}_playblasts".format(self.name)))
except:
logger.info("Could not create Playblasts folder at: {}".format(os.path.dirname(path)))
return self
def online(self):
if os.path.isdir(self.path):
if os.path.isfile(self.data_file_path):
return True
return False
def edit(self):
logger.info(self.edit.__name__)
# _users = True if self.project_users else False
user = self.pipelineUI.settings.user[0]
password =self.pipelineUI.settings.user[1]
projectDlg = project_editor.Project_edit_Dialog(project=self, user_data = [user, password])
result = projectDlg.exec_()
res = projectDlg.result()
if result == QtWidgets.QDialog.Accepted:
# logger.info(res)
self.nice_name = res["nice_name"]
if res["users_mode"]:
self.users = res["users"]
else:
self.users = None
if res["playblasts_root"] != self.project_playblasts_root:
self.set_playblasts_root(res["playblasts_root"])
self.set(user = [user,password])
self.pipelineUI.navigate_to_current_file()
def validate_user(self, username=None, password=<PASSWORD>):
for key in self.users:
if self.users[key][0] == username and self.users[key][1] == password:
role = self.users[key][2]
if role == 'admin':
role = 'administrator'
return role
return ''
def link(self):
path = QtWidgets.QFileDialog.getOpenFileName(None, "Select Pipeline project file", filter = "*.*")
if path[0]:
project_path = os.path.dirname(path[0])
self.path = project_path
self.data_file_path = path[0]
self.set_data_file(self.data_file_path)
self.project_file = self.data_file.read()
self.pipelineUI.link_project(self)
def set(self, **kwargs):
user = ''
password = ''
role = ''
if 'user' in kwargs:
user = kwargs['user'][0]
password = kwargs['user'][1]
if self.data_file:
_users = True if self.project_users else False
if self.project_users:
# the project is permission based
if user == '':
# no user was called with the function, need to prompt for credentials
login = users.LoginWindow()
result = login.exec_()
user, password = login.result()
if result == QtWidgets.QDialog.Accepted:
# user entered credentials
role = self.validate_user(user, password)
# from user+password, return the role
if role == '':
# if no role was return there is no such user
logger.info("invalid username or password")
return False
else:
# recived valid role, set the user as current in the settings
self.pipelineUI.settings.user = [user, password]
else:
# user aborted, exit
return False
else:
# the function was called with a user+password, let's get their role
role = self.validate_user(user, password)
self.pipelineUI.settings.user = [user, password]
if role == '':
# if no role was return there is no such user
logger.info("invalid username or password")
return False
import pymel.core as pm
import maya.mel as mel
pm.workspace.open(self.path)
pm.workspace.chdir(self.path)
raw_project_path = self.path.replace("\\", "\\\\")
melCmd = "setProject \"" + raw_project_path + "\";"
try:
mel.eval(melCmd)
except:
pass
logger.info("Project changed to: {} ({})".format(self.nice_name, self.name))
self.loaded.emit({'users': _users,'user': user, 'role': role})
return True
def project_file_key(self, key=None):
if self.project_file:
return self.project_file[key]
else:
return None
def set_playblasts_root(self, new_root):
project_root = cfg.playblast_save_options.PROJECT_ROOT
project_sister = cfg.playblast_save_options.PROJECT_SISTER
component_root = cfg.playblast_save_options.COMPONENT
def get_fullpath_if_is_component(dir):
if os.path.exists(dir):
if os.path.isfile(os.path.join(dir, "%s.%s" % (os.path.split(dir)[1], "json"))):
j = serializer.Metadata_file(path=os.path.join(dir, "%s.%s" % (os.path.split(dir)[1], "json")))
info = j.data_file.read()
if info:
if info["typeInfo"] == cfg._component_:
return "_".join(info["fullpath"])
return False
def move_playblasts_into_components(move_from):
# logger.info("move_playblasts_into_components from {}, to {}".format(move_from, "components"))
for branch in self.branches:
for root, subFolders, _files in os.walk(branch):
for s in subFolders:
p = os.path.join(root, s)
fullpath = get_fullpath_if_is_component(p)
if fullpath:
b = os.path.split(branch)[1]
master_avi = os.path.join(move_from, b, "{}_MASTER.avi".format(fullpath))
master_mov = os.path.join(move_from, b, "{}_MASTER.mov".format(fullpath))
versions_dir = os.path.join(move_from, b, "versions", fullpath)
if os.path.isfile(master_avi):
logger.info("*.avi master from: {}".format(master_avi))
target_master_avi = os.path.join(p, "{}_MASTER.avi".format(fullpath))
logger.info("will move to: {}".format(target_master_avi))
files.dir_move(master_avi, target_master_avi)
if os.path.isfile(master_mov):
logger.info("*.mov master from: {}".format(master_mov))
target_master_mov = os.path.join(p, "{}_MASTER.mov".format(fullpath))
logger.info("will move to: {}".format(target_master_mov))
files.dir_move(master_mov, target_master_mov)
if os.path.isdir(versions_dir):
logger.info("playblasts versions folder from: {}".format(versions_dir))
target_dir = os.path.join(p, "playblasts")
logger.info("will move to: {}".format(target_dir))
files.dir_move(versions_dir, target_dir)
logger.info("<>")
logger.info(os.listdir(os.path.join(move_from, b, "versions")))
if not os.listdir(os.path.join(move_from, b, "versions")): files.delete(os.path.join(move_from, b, "versions"))
if not os.listdir(os.path.join(move_from, b)): files.delete(os.path.join(move_from, b))
if not os.listdir(move_from): files.delete(move_from)
def move_playblasts_to_single_folder(move_to):
# logger.info("move_playblasts_to_single_folde from {}, to {}".format(current_root, move_to))
for branch in self.branches:
b = os.path.split(branch)[1]
for root, subFolders, _files in os.walk(branch):
for s in subFolders:
p = os.path.join(root, s)
fullpath = get_fullpath_if_is_component(p)
if fullpath:
master_avi = os.path.join(p, "{}_MASTER.avi".format(fullpath))
master_mov = os.path.join(p, "{}_MASTER.mov".format(fullpath))
versions_dir = os.path.join(p, "playblasts")
if os.path.isfile(master_mov):
target_master_mov = os.path.join(move_to, b, "{}_MASTER.mov".format(fullpath))
files.assure_folder_exists(os.path.join(move_to, b))
files.dir_move(master_mov, target_master_mov)
if os.path.isfile(master_avi):
target_master_avi = os.path.join(move_to, b, "{}_MASTER.avi".format(fullpath))
files.assure_folder_exists(os.path.join(move_to, b))
files.dir_move(master_avi, target_master_avi)
if os.path.isdir(versions_dir):
target_dir = os.path.join(move_to, b,"versions", fullpath)
files.assure_folder_exists(os.path.join(move_to, b ,"versions"))
files.dir_move(versions_dir, target_dir)
current_root = self.project_playblasts_root
msg = "You are about to move your playblasts folder from {0}, to {1}.\n" \
"This may take a few minutes.".format(current_root, new_root)
prompt = massage.PromptUser(self.pipelineUI, prompt=msg, override_yes_text="Proceed", override_no_label="Don't move",
cancel_button=False)
result = prompt.exec_()
if result == 0:
if (current_root == project_root) or (current_root == project_sister):
if (new_root == project_root) or (new_root == project_sister):
current_playblasts_folder = self.playblasts_path
self.project_playblasts_root = new_root
files.dir_move(current_playblasts_folder, self.playblasts_path)
return
else:
move_playblasts_into_components(self.playblasts_path)
self.project_playblasts_root = new_root
return
else:
if (new_root == project_root) or (new_root == project_sister):
self.project_playblasts_root = new_root
move_playblasts_to_single_folder(self.playblasts_path)
return
def explore(self):
files.explore(self.path)
@property
def branches(self):
branches = list()
for dir in files.list_dir_folders(self.path):
if misc.branch_dir(os.path.join(self.path, dir)):
branches.append(os.path.join(self.path, dir))
return branches
@property
def nice_name(self):
if self.project_file:
try:
return self.project_file["nice_name"]
except:
return self.name
else:
return self.name
@nice_name.setter
def nice_name(self, nice_name):
if self.data_file:
data = {}
data["nice_name"] = nice_name
self.data_file.edit(data)
self.project_file = | |
# -*- coding: UTF-8 -*-
#
# copyright: 2020-2022, <NAME>
# author: <NAME> <http://github.com/fscm>
# license: SPDX-License-Identifier: MIT
"""Tests for the Dinar currency representation(s)."""
from decimal import Context
from pytest import raises
from multicurrency import Currency
from multicurrency import (
CurrencyMismatchException,
CurrencyTypeException)
CONTEXT = Context(prec=28, rounding='ROUND_HALF_EVEN').copy()
"""Tests for the Bahraini Dinar representation."""
from multicurrency import BahrainiDinar
class TestBahrainiDinar:
"""BahrainiDinar currency tests."""
def test_bahraini_dinar(self):
"""test_bahraini_dinar."""
amount = CONTEXT.create_decimal(1) / CONTEXT.create_decimal(7)
bahraini_dinar = BahrainiDinar(amount=amount)
decimal = CONTEXT.create_decimal(amount)
assert bahraini_dinar.amount == decimal
assert bahraini_dinar.numeric_code == '048'
assert bahraini_dinar.alpha_code == 'BHD'
assert bahraini_dinar.decimal_places == 3
assert bahraini_dinar.decimal_sign == '\u066B'
assert bahraini_dinar.grouping_places == 3
assert bahraini_dinar.grouping_sign == '\u066C'
assert not bahraini_dinar.international
assert bahraini_dinar.symbol == 'د.ب.'
assert bahraini_dinar.symbol_ahead
assert bahraini_dinar.symbol_separator == '\u00A0'
assert bahraini_dinar.localized_symbol == 'د.ب.'
assert bahraini_dinar.convertion == '٠١٢٣٤٥٦٧٨٩-'
assert bahraini_dinar.__hash__() == hash(
(bahraini_dinar.__class__, decimal, 'BHD', '048'))
assert bahraini_dinar.__repr__() == (
'BahrainiDinar(amount: 0.1428571428571428571428571429, '
'alpha_code: "BHD", '
'symbol: "د.ب.", '
'symbol_ahead: True, '
'symbol_separator: "\u00A0", '
'localized_symbol: "د.ب.", '
'numeric_code: "048", '
'decimal_places: "3", '
'decimal_sign: "\u066B", '
'grouping_places: "3", '
'grouping_sign: "\u066C", '
'convertion: "٠١٢٣٤٥٦٧٨٩-", '
'international: False)')
assert bahraini_dinar.__str__() == 'د.ب. ٠٫١٤٣'
def test_bahraini_dinar_negative(self):
"""test_bahraini_dinar_negative."""
amount = -100
bahraini_dinar = BahrainiDinar(amount=amount)
decimal = CONTEXT.create_decimal(amount)
assert bahraini_dinar.numeric_code == '048'
assert bahraini_dinar.alpha_code == 'BHD'
assert bahraini_dinar.decimal_places == 3
assert bahraini_dinar.decimal_sign == '\u066B'
assert bahraini_dinar.grouping_places == 3
assert bahraini_dinar.grouping_sign == '\u066C'
assert not bahraini_dinar.international
assert bahraini_dinar.symbol == 'د.ب.'
assert bahraini_dinar.symbol_ahead
assert bahraini_dinar.symbol_separator == '\u00A0'
assert bahraini_dinar.localized_symbol == 'د.ب.'
assert bahraini_dinar.convertion == '٠١٢٣٤٥٦٧٨٩-'
assert bahraini_dinar.__hash__() == hash(
(bahraini_dinar.__class__, decimal, 'BHD', '048'))
assert bahraini_dinar.__repr__() == (
'BahrainiDinar(amount: -100, '
'alpha_code: "BHD", '
'symbol: "د.ب.", '
'symbol_ahead: True, '
'symbol_separator: "\u00A0", '
'localized_symbol: "د.ب.", '
'numeric_code: "048", '
'decimal_places: "3", '
'decimal_sign: "\u066B", '
'grouping_places: "3", '
'grouping_sign: "\u066C", '
'convertion: "٠١٢٣٤٥٦٧٨٩-", '
'international: False)')
assert bahraini_dinar.__str__() == 'د.ب. -١٠٠٫٠٠٠'
def test_bahraini_dinar_custom(self):
"""test_bahraini_dinar_custom."""
amount = 1000
bahraini_dinar = BahrainiDinar(
amount=amount,
decimal_places=5,
decimal_sign='\u066C',
grouping_places=2,
grouping_sign='\u066B',
international=True,
symbol_ahead=False,
symbol_separator='_')
decimal = CONTEXT.create_decimal(amount)
assert bahraini_dinar.amount == decimal
assert bahraini_dinar.numeric_code == '048'
assert bahraini_dinar.alpha_code == 'BHD'
assert bahraini_dinar.decimal_places == 5
assert bahraini_dinar.decimal_sign == '\u066C'
assert bahraini_dinar.grouping_places == 2
assert bahraini_dinar.grouping_sign == '\u066B'
assert bahraini_dinar.international
assert bahraini_dinar.symbol == 'د.ب.'
assert not bahraini_dinar.symbol_ahead
assert bahraini_dinar.symbol_separator == '_'
assert bahraini_dinar.localized_symbol == 'د.ب.'
assert bahraini_dinar.convertion == '٠١٢٣٤٥٦٧٨٩-'
assert bahraini_dinar.__hash__() == hash(
(bahraini_dinar.__class__, decimal, 'BHD', '048'))
assert bahraini_dinar.__repr__() == (
'BahrainiDinar(amount: 1000, '
'alpha_code: "BHD", '
'symbol: "د.ب.", '
'symbol_ahead: False, '
'symbol_separator: "_", '
'localized_symbol: "د.ب.", '
'numeric_code: "048", '
'decimal_places: "5", '
'decimal_sign: "\u066C", '
'grouping_places: "2", '
'grouping_sign: "\u066B", '
'convertion: "٠١٢٣٤٥٦٧٨٩-", '
'international: True)')
assert bahraini_dinar.__str__() == 'BHD 10,00.00000'
def test_bahraini_dinar_changed(self):
"""test_cbahraini_dinar_changed."""
bahraini_dinar = BahrainiDinar(amount=1000)
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.amount = 999
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.alpha_code = 'EUR'
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.convertion = '0123456789,.'
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.symbol = '€'
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.symbol_ahead = False
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.symbol_separator = '_'
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.localized_symbol = '€'
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.numeric_code = '978'
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.decimal_places = 3
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.decimal_sign = ','
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.grouping_places = 4
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.grouping_sign = '.'
with raises(
AttributeError,
match='can\'t set attribute'):
bahraini_dinar.international = True
def test_bahraini_dinar_math_add(self):
"""test_bahraini_dinar_math_add."""
bahraini_dinar_one = BahrainiDinar(amount=1)
bahraini_dinar_two = BahrainiDinar(amount=2)
bahraini_dinar_three = BahrainiDinar(amount=3)
currency = Currency(amount=1, alpha_code='OTHER')
with raises(
CurrencyMismatchException,
match='unsupported operation between currency BHD and OTHER.'):
_ = bahraini_dinar_one + currency
with raises(
CurrencyTypeException,
match=(
'unsupported operation between <class \'multicurrency.'
'dinar.BahrainiDinar\'> '
'and <class \'str\'>.')):
_ = bahraini_dinar_one.__add__('1.00')
assert (
bahraini_dinar_one +
bahraini_dinar_two) == bahraini_dinar_three
def test_bahraini_dinar_slots(self):
"""test_bahraini_dinar_slots."""
bahraini_dinar = BahrainiDinar(amount=1000)
with raises(
AttributeError,
match=(
'\'BahrainiDinar\' '
'object has no attribute \'new_variable\'')):
bahraini_dinar.new_variable = 'fail' # pylint: disable=assigning-non-slot
"""Tests for the Algerian Dinar representation."""
from multicurrency import AlgerianDinar
class TestAlgerianDinar:
"""AlgerianDinar currency tests."""
def test_algerian_dinar(self):
"""test_algerian_dinar."""
amount = CONTEXT.create_decimal(1) / CONTEXT.create_decimal(7)
algerian_dinar = AlgerianDinar(amount=amount)
decimal = CONTEXT.create_decimal(amount)
assert algerian_dinar.amount == decimal
assert algerian_dinar.numeric_code == '012'
assert algerian_dinar.alpha_code == 'DZD'
assert algerian_dinar.decimal_places == 2
assert algerian_dinar.decimal_sign == ','
assert algerian_dinar.grouping_places == 3
assert algerian_dinar.grouping_sign == '.'
assert not algerian_dinar.international
assert algerian_dinar.symbol == 'د.ج.'
assert not algerian_dinar.symbol_ahead
assert algerian_dinar.symbol_separator == '\u00A0'
assert algerian_dinar.localized_symbol == 'د.ج.'
assert algerian_dinar.convertion == ''
assert algerian_dinar.__hash__() == hash(
(algerian_dinar.__class__, decimal, 'DZD', '012'))
assert algerian_dinar.__repr__() == (
'AlgerianDinar(amount: 0.1428571428571428571428571429, '
'alpha_code: "DZD", '
'symbol: "د.ج.", '
'symbol_ahead: False, '
'symbol_separator: "\u00A0", '
'localized_symbol: "د.ج.", '
'numeric_code: "012", '
'decimal_places: "2", '
'decimal_sign: ",", '
'grouping_places: "3", '
'grouping_sign: ".", '
'convertion: "", '
'international: False)')
assert algerian_dinar.__str__() == '0,14 د.ج.'
def test_algerian_dinar_negative(self):
"""test_algerian_dinar_negative."""
amount = -100
algerian_dinar = AlgerianDinar(amount=amount)
decimal = CONTEXT.create_decimal(amount)
assert algerian_dinar.numeric_code == '012'
assert algerian_dinar.alpha_code == 'DZD'
assert algerian_dinar.decimal_places == 2
assert algerian_dinar.decimal_sign == ','
assert algerian_dinar.grouping_places == 3
assert algerian_dinar.grouping_sign == '.'
assert not algerian_dinar.international
assert algerian_dinar.symbol == 'د.ج.'
assert not algerian_dinar.symbol_ahead
assert algerian_dinar.symbol_separator == '\u00A0'
assert algerian_dinar.localized_symbol == 'د.ج.'
assert algerian_dinar.convertion == ''
assert algerian_dinar.__hash__() == hash(
(algerian_dinar.__class__, decimal, 'DZD', '012'))
assert algerian_dinar.__repr__() == (
'AlgerianDinar(amount: -100, '
'alpha_code: "DZD", '
'symbol: "د.ج.", '
'symbol_ahead: False, '
'symbol_separator: "\u00A0", '
'localized_symbol: "د.ج.", '
'numeric_code: "012", '
'decimal_places: "2", '
'decimal_sign: ",", '
'grouping_places: "3", '
'grouping_sign: ".", '
'convertion: "", '
'international: False)')
assert algerian_dinar.__str__() == '-100,00 د.ج.'
def test_algerian_dinar_custom(self):
"""test_algerian_dinar_custom."""
amount = 1000
algerian_dinar = AlgerianDinar(
amount=amount,
decimal_places=5,
decimal_sign='.',
grouping_places=2,
grouping_sign=',',
international=True,
symbol_ahead=False,
symbol_separator='_')
decimal = CONTEXT.create_decimal(amount)
assert algerian_dinar.amount == decimal
assert algerian_dinar.numeric_code == '012'
assert algerian_dinar.alpha_code == 'DZD'
assert algerian_dinar.decimal_places == 5
assert algerian_dinar.decimal_sign == '.'
assert algerian_dinar.grouping_places == 2
assert algerian_dinar.grouping_sign == ','
assert algerian_dinar.international
assert algerian_dinar.symbol == 'د.ج.'
assert not algerian_dinar.symbol_ahead
assert algerian_dinar.symbol_separator == '_'
assert algerian_dinar.localized_symbol == 'د.ج.'
assert algerian_dinar.convertion == ''
assert algerian_dinar.__hash__() == hash(
(algerian_dinar.__class__, decimal, 'DZD', '012'))
assert algerian_dinar.__repr__() == (
'AlgerianDinar(amount: 1000, '
'alpha_code: "DZD", '
'symbol: "د.ج.", '
'symbol_ahead: False, '
'symbol_separator: "_", '
'localized_symbol: "د.ج.", '
'numeric_code: "012", '
'decimal_places: "5", '
'decimal_sign: ".", '
'grouping_places: "2", '
'grouping_sign: ",", '
'convertion: "", '
'international: True)')
assert algerian_dinar.__str__() == 'DZD 10,00.00000'
def test_algerian_dinar_changed(self):
"""test_calgerian_dinar_changed."""
algerian_dinar = AlgerianDinar(amount=1000)
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.amount = 999
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.alpha_code = 'EUR'
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.convertion = '0123456789,.'
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.symbol = '€'
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.symbol_ahead = False
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.symbol_separator = '_'
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.localized_symbol = '€'
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.numeric_code = '978'
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.decimal_places = 3
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.decimal_sign = ','
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.grouping_places = 4
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.grouping_sign = '.'
with raises(
AttributeError,
match='can\'t set attribute'):
algerian_dinar.international = True
def test_algerian_dinar_math_add(self):
"""test_algerian_dinar_math_add."""
algerian_dinar_one = AlgerianDinar(amount=1)
algerian_dinar_two = AlgerianDinar(amount=2)
algerian_dinar_three = AlgerianDinar(amount=3)
currency = Currency(amount=1, alpha_code='OTHER')
with raises(
CurrencyMismatchException,
match='unsupported operation between currency DZD and OTHER.'):
_ = algerian_dinar_one + currency
with raises(
CurrencyTypeException,
match=(
'unsupported operation between <class \'multicurrency.'
'dinar.AlgerianDinar\'> '
'and <class \'str\'>.')):
_ = algerian_dinar_one.__add__('1.00')
assert (
algerian_dinar_one +
algerian_dinar_two) == algerian_dinar_three
def test_algerian_dinar_slots(self):
"""test_algerian_dinar_slots."""
algerian_dinar = AlgerianDinar(amount=1000)
with raises(
AttributeError,
match=(
'\'AlgerianDinar\' '
'object has no attribute \'new_variable\'')):
algerian_dinar.new_variable = 'fail' # pylint: disable=assigning-non-slot
"""Tests for the Iraqi Dinar representation."""
from multicurrency import IraqiDinar
class TestIraqiDinar:
"""IraqiDinar currency tests."""
def test_iraqi_dinar(self):
"""test_iraqi_dinar."""
amount = CONTEXT.create_decimal(1) / CONTEXT.create_decimal(7)
iraqi_dinar = IraqiDinar(amount=amount)
decimal = CONTEXT.create_decimal(amount)
assert iraqi_dinar.amount == decimal
assert iraqi_dinar.numeric_code == '368'
assert iraqi_dinar.alpha_code == 'IQD'
assert iraqi_dinar.decimal_places == 3
assert iraqi_dinar.decimal_sign == '\u066B'
assert iraqi_dinar.grouping_places == 3
assert iraqi_dinar.grouping_sign == '\u066C'
assert not iraqi_dinar.international
assert iraqi_dinar.symbol == 'د.ع.'
assert iraqi_dinar.symbol_ahead
assert iraqi_dinar.symbol_separator == '\u00A0'
assert iraqi_dinar.localized_symbol == 'د.ع.'
assert iraqi_dinar.convertion == '٠١٢٣٤٥٦٧٨٩-'
assert iraqi_dinar.__hash__() == hash(
(iraqi_dinar.__class__, decimal, 'IQD', '368'))
assert iraqi_dinar.__repr__() == (
'IraqiDinar(amount: 0.1428571428571428571428571429, '
'alpha_code: "IQD", '
'symbol: "د.ع.", '
'symbol_ahead: True, '
'symbol_separator: "\u00A0", '
'localized_symbol: "د.ع.", '
'numeric_code: "368", '
'decimal_places: "3", '
'decimal_sign: | |
axis=0)
# Adds the max cars rental scenario into the rental dictionary for
# the current location.
self.rental_dict[idx][num_max_cars] = {'fee': full_rental_fee_arr,
'prob': full_rental_prob_arr}
# Adds the max cars return scenario into the return dictionary for
# the current location.
self.return_dict[idx][num_max_cars] = [1.]
def step(self, state, movement_matrix):
"""
Computes the value estimate of performing a particular movement matrix
given a particular state.
The steps of the state includes car movement, car rental and car return.
At the car return stage, if any location exceeds the maximum number of
cars possible within that location, excess cars are removed from the
analysis.
Parameter(s):
state: A valid state of the CarRental problem. The state in this case
is the number of cars in each location.
movement_matrix: A valid movement matrix by the CarRental agent. A
movement matrix is a num_locations x num_locations matrix that contains
values for row index i and column index j where i < j and 0 otherwise.
For each non-zero value, if the value is bigger than 0, cars equal to
the value are moved from location_i to location_j. If the value is
smaller than 0, cars equal to the absolute of the value are moved from
location_j to location_i. A valid movement matrix ensures that the
number of cars after all possible movement does not exceed the maximum
possible number of cars at any location.
Return(s):
The value estimate of performing a particular movement_matrix given a
particular state.
"""
# Initialises the final value estimate to 0.
final_val = 0.
# Converts the movement matrix to an action. The action contains only
# positive values. To obtain the action, two matrices are summed. The
# first matrix contains all nonnegative values of the movement matrix
# and 0 otherwise, where all such values are kept in their original
# locations. The second matrix is based on all negative values of the
# movement matrix and 0 otherwise. This matrix is transposed and
# converted to nonnegative values by finding the absolute value
# of the tranposed matrix, forming the second matrix.
action = movement_matrix * (movement_matrix > 0) - (movement_matrix * (movement_matrix < 0)).transpose()
# Performs the car movement step. It is assumed that the policy only
# provides valid movement matrices. By the convention of this code,
# losses to each location can be summed in a row and gains to each
# location can be summed in a column.
post_movement = state - movement_matrix.sum(axis=1) + movement_matrix.sum(axis=0)
# Computes the total cost of movement, excluding any free transfers,
# and subtracts it from the final value estimate.
movement_cost = (np.maximum(0., action - self.free_transfers_num_arr) * self.transfer_cost_arr).sum()
final_val -= movement_cost
# Computes the total cost of storage, specifically when it exceeds the
# threshold for additional storage, and subtracts it from the final
# value estimate.
storage_cost = ((post_movement > self.add_storage_threshold_list) * self.add_storage_fee).sum()
final_val -= storage_cost
# Iterates through all rental possibiltiies using a generator.
for current_rent_num in product(*[range(x+1) for x in post_movement]):
# Computes the profit and probability of performing the current
# rental possibility.
rental_profit = sum(self.rental_dict[location_idx][current_post_movement]['fee'][current_rent_num[location_idx]] for location_idx, current_post_movement in enumerate(post_movement))
rental_prob = reduce(mul, (self.rental_dict[location_idx][current_post_movement]['prob'][current_rent_num[location_idx]] for location_idx, current_post_movement in enumerate(post_movement)))
# Performs the rental step for the current rental possibility.
post_rental = post_movement - current_rent_num
# Iterates through all return possibiltiies given the current
# rental possibility using a generator.
for current_return_num in product(*[range(self.max_cars_list[idx] - post_rental[idx]+1) for idx in range(self.num_locations)]):
# Computes the probability of performing current return
# possibility.
return_prob = reduce(mul, (self.return_dict[location_idx][current_post_rental][current_return_num[location_idx]] for location_idx, current_post_rental in enumerate(post_rental)))
# Performs the return step for the current return possibility
# to obtain the final state.
final_state = post_rental + current_return_num
# Computes the final probability by multiplying the rental
# and return probabilties.
final_prob = rental_prob * return_prob
# Adds the value estimate for the current state, action and
# new state to the final value estimate.
final_val += final_prob * (rental_profit + self.discount * self.v[tuple(final_state.tolist())])
return final_val
def find_valid_moves(self, state):
"""
Finds all valid moves in the current state.
Parameter(s):
state: A valid state of the CarRental problem. The state in this case
is the number of cars in each location.
Return(s):
A list containing all valid movement matrices in the current state.
"""
# Initialises a list to contain every possible action for the
# current state.
filtered_movement_matrix_list = []
# For the current state, find every possible action and places
# it in the filtered movement matrix list. This is done by
# looping across each transfer pair in each movement matrix of
# the full movement matrix list and checking if all transfer
# pairs of the current movement matrix leads to a valid result.
for movement_matrix in self.full_movement_matrix_list:
# Assume that the current movement matrix is valid.
valid_movement_matrix = True
# For any transfer pair, if there are insufficient cars to
# perform the transfer or if the number of received cars
# causes a location to exceed the maximum number of cars
# possible in a location, the movement matrix is marked as
# invalid.
for transfer_pair in self.transfer_index_list:
num_movement = movement_matrix[transfer_pair]
if num_movement > 0 and (state[transfer_pair[0]] < num_movement or state[transfer_pair[1]] + num_movement > self.max_cars_list[transfer_pair[1]]):
valid_movement_matrix = False
break
elif num_movement < 0 and (state[transfer_pair[1]] < abs(num_movement) or state[transfer_pair[0]] + abs(num_movement) > self.max_cars_list[transfer_pair[0]]):
valid_movement_matrix = False
break
# Adds the current movement matrix to the filtered movement
# matrix list if the current movement matrix is valid.
if valid_movement_matrix:
filtered_movement_matrix_list.append(movement_matrix)
return filtered_movement_matrix_list
def visualise(self):
"""
Visualises the result of analysing the CarRental problem.
"""
# Obtain the current working directory.
curr_dir = os.path.dirname(os.path.abspath(__file__))
# Creates the required diagram and assigns a title to it, which includes
# the number of iterations performed as part of the specified method.
fig = plt.figure(figsize=(20, 10))
ax1 = fig.add_subplot(121)
ax2 = fig.add_subplot(122, projection='3d')
graph_title = ' '.join([substr.title() for substr in self.solve_method.split('_')])
fig.suptitle('Car Rental %s Results: %i Iterations' % (graph_title, self.current_iter), fontsize=30)
# Converts the final policy from a dictionary to an array for
# visualisation.
final_policy = np.zeros_like(self.v, dtype=int)
for key, val in self.pi.items():
# For two locations, the only policy involved is between the
# 0th and 1st location.
final_policy[key] = val[0][0,1]
# Draws the policy in the form of a contour plot in the left subplot.
# Includes the label and tickmarks of the colorbar in the process.
fig = heatmap(final_policy,
cmap=cm.coolwarm,
ax=ax1,
cbar_kws={'label': 'Cars to Transfer',
'ticks': list(range(final_policy.min(), final_policy.max()+1)),
'orientation': 'horizontal'})
# Sets the axes labels, limits and tick marks for the left subplot.
ax1.set_xlabel('Cars At Second Location')
ax1.set_ylabel('Cars At First Location')
ax1.set_ylim(0, self.max_cars_list[0] + 1)
ax1.set_xlim(0, self.max_cars_list[1] + 1)
ax1.set_yticklabels(ax1.get_yticklabels(), rotation=0)
# Prepares the x and y values of the right subplot by obtaining the
# indexes of each value in the final value estimate.
first_arr, second_arr = np.meshgrid(range(self.max_cars_list[0]+1), range(self.max_cars_list[1]+1), indexing='ij')
# Uses the indexes and final value estimate to draw a surface plot
# in the right subplot.
ax2.plot_surface(first_arr, second_arr, self.v)
ax2.text(-0.25, -0.25, self.v[0,0]+10, self.v[0,0].round().astype(int))
ax2.text(self.max_cars_list[0]-0.25, -0.25, self.v[self.max_cars_list[0], 0]-5, self.v[self.max_cars_list[0], 0].round().astype(int))
ax2.text(-0.25, self.max_cars_list[1], self.v[0, self.max_cars_list[1]]+5, self.v[0, self.max_cars_list[1]].round().astype(int))
ax2.text(self.max_cars_list[0]-0.25, self.max_cars_list[1], self.v[self.max_cars_list[0], self.max_cars_list[1]]+5, self.v[self.max_cars_list[0], self.max_cars_list[1]].round().astype(int))
# Sets the axes labels and tick marks for the right subplot.
ax2.set_xlabel('Cars At First Location')
ax2.set_xticks(list(range(self.max_cars_list[0] + 1)))
ax2.set_ylabel('Cars At Second Location')
ax2.set_yticks(list(reversed(range(self.max_cars_list[1] + 1))))
ax2.set_zlabel('Value Estimate ($)')
# Sets the title for both subplots.
title_size = 15
ax1.set_title('Optimal Policy', size=title_size)
ax2.set_title('Optimal Value', size=title_size)
# Saves the plotted diagram with the name of the selected method.
if self.name:
plt.savefig(os.path.join(curr_dir, 'carrental_%s_%s_results.png' % (self.name, self.solve_method)))
else:
plt.savefig(os.path.join(curr_dir, 'carrental_%s_results.png' % self.solve_method))
plt.close()
def policy_evaluation_state_func(self, *args):
"""
Performs the policy evaluation step for the given state. Only used when
multiprocessing is enabled.
Parameter(s):
args: A valid state of the CarRental problem. The state in this case
is the number of cars in each location.
"""
# For the current state, finds the value estimate for every
# movement matrix in the current policy and places the value
# estimates in a list.
values_list = [self.step(args, movement_matrix) for movement_matrix in self.pi[args]]
return args, sum(values_list) / len(values_list)
def policy_evaluation_log_result(self, results):
"""
Stores the results of policy evaluation for every state. Only used when
multiprocessing is enabled.
Parameter(s):
results: List of (state, value) tuples, the result of running policy
evaluation.
"""
# Unpacks the result tuple into state and value.
for s, val in results:
# Updates the value estimate copy of the current state with
# the average value estimate.
self.new_v[s] = val
def policy_improvement_state_func(self, state, tolerance):
"""
Performs the policy improvement step for the given state. Only used when
multiprocessing is enabled.
Parameter(s):
args: A valid state of the CarRental problem. The state in this case
is the number of cars in each location.
tolerance: Used to check if the value estimate has converged and to
decide on policies that result in the highest value estimate (the second
use case is to handle noise in the results caused by floating point
truncation).
"""
# Finds all valid movement matrices for the current state.
filtered_movement_matrix_list = self.find_valid_moves(state)
# For the current state, finds the value estimate for every
# possible moveement matrix and places the value | |
<reponame>infdahai/Strong_Learning<gh_stars>0
# encoding: utf-8
"""
@author: <NAME>
@contact: <EMAIL>
@version: 1.0
@file: main.py
@time: 2018/3/20
"""
import argparse
import os
import shutil
import socket
import time
import torch
import torch.backends.cudnn as cudnn
import torch.nn as nn
import torch.nn.parallel
import torch.optim as optim
import torch.utils.data
import torchvision.utils as vutils
from tensorboardX import SummaryWriter
from torch.autograd import Variable
from torch.optim.lr_scheduler import ReduceLROnPlateau
from torch.utils.data import DataLoader
import utils.transformed as transforms
from data.ImageFolderDataset import MyImageFolder
from models.HidingUNet import UnetGenerator
from models.RevealNet import RevealNet
from models.Discriminator import PixelDiscriminator
from models.Discriminator import PatchDiscriminator
from models.Discriminator_SN import Discriminator_SN
from models.Discriminator import Discriminator
from models.Discriminator_Switch import Discriminator_Switch
import numpy as np
DATA_DIR = './datasets/derain/'
# DATA_DIR = '/ghome/fanghan/HIDE_REAVEAL/datasets/right_VOC/'
# DATA_DIR = './datasets/VOC_smooth/'
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', default="train",
help='train | val | test')
parser.add_argument('--workers', type=int, default=8,
help='number of data loading workers')
parser.add_argument('--batchSize', type=int, default=16,
help='input batch size')
parser.add_argument('--imageSize', type=int, default=256,
help='the number of frames')
parser.add_argument('--niter', type=int, default=200,
help='number of epochs to train for')
parser.add_argument('--lr', type=float, default=0.001,
help='learning rate, default=0.001')
parser.add_argument('--decay_round', type=int, default=10,
help='learning rate decay 0.5 each decay_round')
parser.add_argument('--beta1', type=float, default=0.5,
help='beta1 for adam. default=0.5')
parser.add_argument('--cuda', type=bool, default=True,
help='enables cuda')
parser.add_argument('--ngpu', type=int, default=1,
help='number of GPUs to use')
parser.add_argument('--Hnet', default='',
help="path to Hidingnet (to continue training)")
parser.add_argument('--Rnet', default='',
help="path to Revealnet (to continue training)")
parser.add_argument('--Dnet', default='',
help="path to Discriminator (to continue training)")
parser.add_argument('--trainpics', default='./training/',
help='folder to output training images')
parser.add_argument('--validationpics', default='./training/',
help='folder to output validation images')
parser.add_argument('--testPics', default='./training/',
help='folder to output test images')
parser.add_argument('--outckpts', default='./training/',
help='folder to output checkpoints')
parser.add_argument('--outlogs', default='./training/',
help='folder to output images')
parser.add_argument('--outcodes', default='./training/',
help='folder to save the experiment codes')
parser.add_argument('--remark', default='', help='comment')
parser.add_argument('--test', default='', help='test mode, you need give the test pics dirs in this param')
parser.add_argument('--hostname', default=socket.gethostname(), help='the host name of the running server')
parser.add_argument('--debug', type=bool, default=False, help='debug mode do not create folders')
parser.add_argument('--logFrequency', type=int, default=10, help='the frequency of print the log on the console')
parser.add_argument('--resultPicFrequency', type=int, default=100, help='the frequency of save the resultPic')
#datasets to train
parser.add_argument('--datasets', type=str, default='derain',
help='denoise/derain')
#read secret image
parser.add_argument('--secret', type=str, default='flower',
help='secret folder')
#hyperparameter of loss
parser.add_argument('--beta', type=float, default=0.75,
help='hyper parameter of beta :secret_reveal err')
parser.add_argument('--betamse', type=float, default=10000,
help='hyper parameter of beta: mse_loss')
parser.add_argument('--betacons', type=float, default=1,
help='hyper parameter of beta: consist_loss')
parser.add_argument('--betassim', type=float, default=0,
help='hyper parameter of beta: ssim_loss')
parser.add_argument('--ssimws', type=float, default=11,
help='hyper parameter of beta: ssim window_size')
parser.add_argument('--betavgg', type=float, default=0,
help='hyper parameter of beta: vgg_loss')
parser.add_argument('--betapsnr', type=float, default=0,
help='hyper parameter of beta: psnr_loss')
parser.add_argument('--Dnorm', type=str, default='instance', help=' [instance | spectral | switch]')
parser.add_argument('--num_downs', type=int, default= 7 , help='nums of Unet downsample')
def main():
############### define global parameters ###############
global opt, optimizerH, optimizerR, optimizerD, writer, logPath, schedulerH, schedulerR
global val_loader, smallestLoss, mse_loss, gan_loss, pixel_loss, patch, criterion_GAN, criterion_pixelwise
################# 输出配置参数 ###############
opt = parser.parse_args()
if torch.cuda.is_available() and not opt.cuda:
print("WARNING: You have a CUDA device, "
"so you should probably run with --cuda")
cudnn.benchmark = True
############ create the dirs to save the result #############
cur_time = time.strftime('%Y-%m-%d-%H_%M_%S', time.localtime())
experiment_dir = opt.hostname + "_" + cur_time + opt.remark
opt.outckpts += experiment_dir + "/checkPoints"
opt.trainpics += experiment_dir + "/trainPics"
opt.validationpics += experiment_dir + "/validationPics"
opt.outlogs += experiment_dir + "/trainingLogs"
opt.outcodes += experiment_dir + "/codes"
opt.testPics += experiment_dir + "/testPics"
if not os.path.exists(opt.outckpts):
os.makedirs(opt.outckpts)
if not os.path.exists(opt.trainpics):
os.makedirs(opt.trainpics)
if not os.path.exists(opt.validationpics):
os.makedirs(opt.validationpics)
if not os.path.exists(opt.outlogs):
os.makedirs(opt.outlogs)
if not os.path.exists(opt.outcodes):
os.makedirs(opt.outcodes)
if (not os.path.exists(opt.testPics)) and opt.test != '':
os.makedirs(opt.testPics)
logPath = opt.outlogs + '/%s_%d_log.txt' % (opt.dataset, opt.batchSize)
# 保存模型的参数
print_log(str(opt), logPath)
# 保存本次实验的代码
save_current_codes(opt.outcodes)
# tensorboardX writer
writer = SummaryWriter(comment='**' + opt.hostname + "_" + opt.remark)
############## 获取数据集 ############################
DATA_DIR_root = './datasets/'
DATA_DIR = os.path.join(DATA_DIR_root, opt.datasets)
traindir = os.path.join(DATA_DIR, 'train')
valdir = os.path.join(DATA_DIR, 'val')
secretdir = os.path.join(DATA_DIR_root, opt.secret)
train_dataset = MyImageFolder(
traindir,
transforms.Compose([
transforms.Resize([opt.imageSize, 512]),
transforms.ToTensor(),
]))
val_dataset = MyImageFolder(
valdir,
transforms.Compose([
transforms.Resize([opt.imageSize, 512]),
transforms.ToTensor(),
]))
secret_dataset = MyImageFolder(
secretdir,
transforms.Compose([
transforms.Resize([opt.imageSize, opt.imageSize]),
transforms.ToTensor(),
]))
assert train_dataset
assert val_dataset
assert secret_dataset
train_loader = DataLoader(train_dataset, batch_size=opt.batchSize,
shuffle=True, num_workers=int(opt.workers))
secret_loader = DataLoader(secret_dataset, batch_size=opt.batchSize,
shuffle=False, num_workers=int(opt.workers))
val_loader = DataLoader(val_dataset, batch_size=opt.batchSize,
shuffle=True, num_workers=int(opt.workers))
############## 所使用网络结构 ############################
Hnet = UnetGenerator(input_nc=6, output_nc=3, num_downs= opt.num_downs, output_function=nn.Sigmoid)
Hnet.cuda()
Hnet.apply(weights_init)
Rnet = RevealNet(output_function=nn.Sigmoid)
Rnet.cuda()
Rnet.apply(weights_init)
if opt.Dnorm == "spectral" :
Dnet = Discriminator_SN(in_channels=3)
Dnet.cuda()
elif opt.Dnorm == "switch" :
Dnet = Discriminator_Switch(in_channels=3)
Dnet.cuda()
else:
Dnet = Discriminator(in_channels=3)
Dnet.cuda()
# Dnet.apply(weights_init)
# Calculate output of image discriminator (PatchGAN)
patch = (1, opt.imageSize // 2 ** 4, opt.imageSize // 2 ** 4)
# setup optimizer
optimizerH = optim.Adam(Hnet.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999))
schedulerH = ReduceLROnPlateau(optimizerH, mode='min', factor=0.2, patience=5, verbose=True)
optimizerR = optim.Adam(Rnet.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999))
schedulerR = ReduceLROnPlateau(optimizerR, mode='min', factor=0.2, patience=8, verbose=True)
optimizerD = optim.Adam(Dnet.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999))
schedulerD = ReduceLROnPlateau(optimizerD, mode='min', factor=0.2, patience=5, verbose=True)
# 判断是否接着之前的训练
if opt.Hnet != "":
Hnet.load_state_dict(torch.load(opt.Hnet))
# 两块卡加这行
if opt.ngpu > 1:
Hnet = torch.nn.DataParallel(Hnet).cuda()
print_network(Hnet)
if opt.Rnet != '':
Rnet.load_state_dict(torch.load(opt.Rnet))
if opt.ngpu > 1:
Rnet = torch.nn.DataParallel(Rnet).cuda()
print_network(Rnet)
if opt.Dnet != '':
Dnet.load_state_dict(torch.load(opt.Dnet))
if opt.ngpu > 1:
Dnet = torch.nn.DataParallel(Dnet).cuda()
print_network(Dnet)
# define loss
mse_loss = nn.MSELoss().cuda()
criterion_GAN = nn.MSELoss().cuda()
criterion_pixelwise = nn.L1Loss().cuda()
smallestLoss = 10000
print_log("training is beginning .......................................................", logPath)
for epoch in range(opt.niter):
######################## train ##########################################
train(train_loader, secret_loader, epoch, Hnet=Hnet, Rnet=Rnet, Dnet=Dnet)
####################### validation #####################################
val_hloss, val_rloss, val_r_mseloss, val_r_consistloss, val_dloss, val_fakedloss, val_realdloss, val_Ganlosses, val_Pixellosses, val_sumloss = validation(val_loader, secret_loader, epoch, Hnet=Hnet, Rnet=Rnet, Dnet=Dnet)
####################### adjust learning rate ############################
schedulerH.step(val_sumloss)
schedulerR.step(val_rloss)
schedulerD.step(val_dloss)
# # save the best model parameters
# if val_sumloss < globals()["smallestLoss"]:
# globals()["smallestLoss"] = val_sumloss
# # do checkPointing
# torch.save(Hnet.state_dict(),
# '%s/netH_epoch_%d,sumloss=%.6f,Hloss=%.6f.pth' % (
# opt.outckpts, epoch, val_sumloss, val_hloss))
# torch.save(Rnet.state_dict(),
# '%s/netR_epoch_%d,sumloss=%.6f,Rloss=%.6f.pth' % (
# opt.outckpts, epoch, val_sumloss, val_rloss))
# torch.save(Dnet.state_dict(),
# '%s/netD_epoch_%d,sumloss=%.6f,Dloss=%.6f.pth' % (
# opt.outckpts, epoch, val_sumloss, val_dloss))
# save the epoch model parameters
torch.save(Hnet.state_dict(),
'%s/netH_epoch_%d,sumloss=%.6f,Hloss=%.6f.pth' % (
opt.outckpts, epoch, val_sumloss, val_hloss))
torch.save(Rnet.state_dict(),
'%s/netR_epoch_%d,sumloss=%.6f,Rloss=%.6f.pth' % (
opt.outckpts, epoch, val_sumloss, val_rloss))
torch.save(Dnet.state_dict(),
'%s/netD_epoch_%d,sumloss=%.6f,Dloss=%.6f.pth' % (
opt.outckpts, epoch, val_sumloss, val_dloss))
writer.close()
def train(train_loader, secret_loader, epoch, Hnet, Rnet, Dnet):
batch_time = AverageMeter()
data_time = AverageMeter()
Hlosses = AverageMeter() # 纪录每个epoch H网络的loss
Rlosses = AverageMeter() # 纪录每个epoch R网络的loss
R_mselosses = AverageMeter()
R_consistlosses = AverageMeter()
Dlosses = AverageMeter()
FakeDlosses = AverageMeter()
RealDlosses = AverageMeter()
Ganlosses = AverageMeter()
Pixellosses = AverageMeter()
SumLosses = AverageMeter() # 纪录每个epoch Hloss + β*Rloss
# switch to train mode
Hnet.train()
Rnet.train()
Dnet.train()
# Tensor type
Tensor = torch.cuda.FloatTensor
start_time = time.time()
for i, data in enumerate(train_loader, 0):
data_time.update(time.time() - start_time)
Hnet.zero_grad()
Rnet.zero_grad()
this_batch_size = int(data.size()[0]) # 处理每个epoch 最后一个batch可能不足opt.bachsize
cover_img = data[0:this_batch_size, :, :, :] # batchsize,3,256,256
cover_img_A = cover_img[ :, :, 0:256, 0:256]
cover_img_B = cover_img[ :, :, 0:256, 256:512]
for j, data in enumerate(secret_loader, 0):
secret_img = data
secret_img = secret_img[0:this_batch_size, :, :, :] # 1,3,256,256
secret_img = secret_img.repeat(this_batch_size, 1, 1, 1)
concat_img = torch.cat([cover_img_A, secret_img], dim=1)
# 数据放入GPU
if opt.cuda:
cover_img = cover_img.cuda()
cover_img_A = cover_img_A.cuda()
cover_img_B = cover_img_B.cuda()
secret_img = secret_img.cuda()
concat_img = concat_img.cuda()
concat_imgv = Variable(concat_img) # concatImg 作为H网络的输入
cover_imgv = Variable(cover_img_B) # coverImg 作为H网络的label
container_img = Hnet(concat_imgv) # 得到藏有secretimg的containerImg
# Adversarial ground truths
valid = Variable(Tensor(np.ones((cover_imgv.size(0), *patch))), requires_grad=False)
fake = Variable(Tensor(np.zeros((cover_imgv.size(0), *patch))), requires_grad=False)
# pred_fake = Dnet(container_img, cover_imgv)
pred_fake = Dnet(container_img)
gan_loss = criterion_GAN(pred_fake, valid)
pixel_loss = criterion_pixelwise(container_img, cover_imgv)
errH = opt.betamse * (mse_loss(container_img, cover_imgv) + 0.01 * (gan_loss + 100 * pixel_loss))
# errH = opt.betamse * (mse_loss(container_img, cover_imgv) + gan_loss)
# errGAN = opt.betamse * 0.01 * (gan_loss + 100 * pixel_loss)
rev_secret_img = Rnet(container_img) # containerImg作为R网络的输入 得到RevSecImg
secret_imgv = Variable(secret_img) # secretImg作为R网络的label
errR_mse = opt.betamse * mse_loss(rev_secret_img, secret_imgv)
# clean_rev_serect_img = Rnet(cover_img)
# clean_errR = opt.betamse * mse(clean_rev_serect_img, )
half_batchsize = int(this_batch_size / 2)
# print(half_batchsize)
errR_consist = 10000 * mse_loss(rev_secret_img[0:half_batchsize, :, : ,:], rev_secret_img[half_batchsize:this_batch_size, : ,: ,:])
errR = errR_mse + opt.betacons * errR_consist
betaerrR_secret = opt.beta * errR
err_sum = errH + betaerrR_secret
# 计算梯度
err_sum.backward()
# 优化两个网络的参数
optimizerH.step()
optimizerR.step()
# Train Discriminator
Dnet.zero_grad()
# Real loss
pred_real = Dnet(cover_imgv)
loss_real = criterion_GAN(pred_real, valid)
# Fake loss
pred_fake = Dnet(container_img.detach())
loss_fake = criterion_GAN(pred_fake, fake)
# Total loss
errD = 10000 * 0.5 * (loss_real + loss_fake)
errD.backward()
optimizerD.step()
Hlosses.update(errH.data, this_batch_size) # 纪录H loss值
Rlosses.update(errR.data, this_batch_size) # 纪录R loss值
R_mselosses.update(errR_mse.data, this_batch_size) # 纪录R_mse loss值
R_consistlosses.update(errR_consist.data, this_batch_size) # 纪录R_consit loss值
Dlosses.update(errD.data, this_batch_size) # 纪录D loss值
FakeDlosses.update(loss_fake.data, this_batch_size) # 纪录fakeD loss值
RealDlosses.update(loss_real.data, this_batch_size) # 纪录realD loss值
Ganlosses.update(gan_loss.data, this_batch_size) #记录gan loss
Pixellosses.update(pixel_loss.data, this_batch_size) #记录pixel loss
SumLosses.update(err_sum.data, this_batch_size)
# 更新一个batch的时间
batch_time.update(time.time() - start_time)
start_time = time.time()
# 日志信息
log = | |
<gh_stars>100-1000
# Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
import pytest
from .helpers import _Base
class TestLoadJobConfig(_Base):
JOB_TYPE = "load"
@staticmethod
def _get_target_class():
from google.cloud.bigquery.job import LoadJobConfig
return LoadJobConfig
def test_ctor_w_properties(self):
config = self._get_target_class()(
allow_jagged_rows=True, allow_quoted_newlines=True
)
self.assertTrue(config.allow_jagged_rows)
self.assertTrue(config.allow_quoted_newlines)
def test_allow_jagged_rows_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.allow_jagged_rows)
def test_allow_jagged_rows_hit(self):
config = self._get_target_class()()
config._properties["load"]["allowJaggedRows"] = True
self.assertTrue(config.allow_jagged_rows)
def test_allow_jagged_rows_setter(self):
config = self._get_target_class()()
config.allow_jagged_rows = True
self.assertTrue(config._properties["load"]["allowJaggedRows"])
def test_allow_quoted_newlines_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.allow_quoted_newlines)
def test_allow_quoted_newlines_hit(self):
config = self._get_target_class()()
config._properties["load"]["allowQuotedNewlines"] = True
self.assertTrue(config.allow_quoted_newlines)
def test_allow_quoted_newlines_setter(self):
config = self._get_target_class()()
config.allow_quoted_newlines = True
self.assertTrue(config._properties["load"]["allowQuotedNewlines"])
def test_autodetect_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.autodetect)
def test_autodetect_hit(self):
config = self._get_target_class()()
config._properties["load"]["autodetect"] = True
self.assertTrue(config.autodetect)
def test_autodetect_setter(self):
config = self._get_target_class()()
config.autodetect = True
self.assertTrue(config._properties["load"]["autodetect"])
def test_clustering_fields_miss(self):
config = self._get_target_class()()
self.assertIsNone(config.clustering_fields)
def test_clustering_fields_hit(self):
config = self._get_target_class()()
fields = ["email", "postal_code"]
config._properties["load"]["clustering"] = {"fields": fields}
self.assertEqual(config.clustering_fields, fields)
def test_clustering_fields_setter(self):
fields = ["email", "postal_code"]
config = self._get_target_class()()
config.clustering_fields = fields
self.assertEqual(config._properties["load"]["clustering"], {"fields": fields})
def test_clustering_fields_setter_w_none(self):
config = self._get_target_class()()
fields = ["email", "postal_code"]
config._properties["load"]["clustering"] = {"fields": fields}
config.clustering_fields = None
self.assertIsNone(config.clustering_fields)
self.assertNotIn("clustering", config._properties["load"])
def test_create_disposition_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.create_disposition)
def test_create_disposition_hit(self):
from google.cloud.bigquery.job import CreateDisposition
disposition = CreateDisposition.CREATE_IF_NEEDED
config = self._get_target_class()()
config._properties["load"]["createDisposition"] = disposition
self.assertEqual(config.create_disposition, disposition)
def test_create_disposition_setter(self):
from google.cloud.bigquery.job import CreateDisposition
disposition = CreateDisposition.CREATE_IF_NEEDED
config = self._get_target_class()()
config.create_disposition = disposition
self.assertEqual(config._properties["load"]["createDisposition"], disposition)
def test_decimal_target_types_miss(self):
config = self._get_target_class()()
self.assertIsNone(config.decimal_target_types)
def test_decimal_target_types_hit(self):
from google.cloud.bigquery.enums import DecimalTargetType
config = self._get_target_class()()
decimal_target_types = [DecimalTargetType.NUMERIC, DecimalTargetType.STRING]
config._properties["load"]["decimalTargetTypes"] = decimal_target_types
expected = frozenset(decimal_target_types)
self.assertEqual(config.decimal_target_types, expected)
def test_decimal_target_types_setter(self):
from google.cloud.bigquery.enums import DecimalTargetType
decimal_target_types = (DecimalTargetType.NUMERIC, DecimalTargetType.BIGNUMERIC)
config = self._get_target_class()()
config.decimal_target_types = decimal_target_types
self.assertEqual(
config._properties["load"]["decimalTargetTypes"],
list(decimal_target_types),
)
def test_decimal_target_types_setter_w_none(self):
from google.cloud.bigquery.enums import DecimalTargetType
config = self._get_target_class()()
decimal_target_types = [DecimalTargetType.BIGNUMERIC]
config._properties["load"]["decimalTargetTypes"] = decimal_target_types
config.decimal_target_types = None
self.assertIsNone(config.decimal_target_types)
self.assertNotIn("decimalTargetTypes", config._properties["load"])
config.decimal_target_types = None # No error if unsetting an unset property.
def test_destination_encryption_configuration_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.destination_encryption_configuration)
def test_destination_encryption_configuration_hit(self):
from google.cloud.bigquery.encryption_configuration import (
EncryptionConfiguration,
)
kms_key_name = "kms-key-name"
encryption_configuration = EncryptionConfiguration(kms_key_name)
config = self._get_target_class()()
config._properties["load"]["destinationEncryptionConfiguration"] = {
"kmsKeyName": kms_key_name
}
self.assertEqual(
config.destination_encryption_configuration, encryption_configuration
)
def test_destination_encryption_configuration_setter(self):
from google.cloud.bigquery.encryption_configuration import (
EncryptionConfiguration,
)
kms_key_name = "kms-key-name"
encryption_configuration = EncryptionConfiguration(kms_key_name)
config = self._get_target_class()()
config.destination_encryption_configuration = encryption_configuration
expected = {"kmsKeyName": kms_key_name}
self.assertEqual(
config._properties["load"]["destinationEncryptionConfiguration"], expected
)
def test_destination_encryption_configuration_setter_w_none(self):
kms_key_name = "kms-key-name"
config = self._get_target_class()()
config._properties["load"]["destinationEncryptionConfiguration"] = {
"kmsKeyName": kms_key_name
}
config.destination_encryption_configuration = None
self.assertIsNone(config.destination_encryption_configuration)
self.assertNotIn(
"destinationEncryptionConfiguration", config._properties["load"]
)
def test_destination_table_description_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.destination_table_description)
def test_destination_table_description_hit(self):
description = "Description"
config = self._get_target_class()()
config._properties["load"]["destinationTableProperties"] = {
"description": description
}
self.assertEqual(config.destination_table_description, description)
def test_destination_table_description_setter(self):
description = "Description"
config = self._get_target_class()()
config.destination_table_description = description
expected = {"description": description}
self.assertEqual(
config._properties["load"]["destinationTableProperties"], expected
)
def test_destination_table_description_setter_w_fn_already(self):
description = "Description"
friendly_name = "<NAME>"
config = self._get_target_class()()
config._properties["load"]["destinationTableProperties"] = {
"friendlyName": friendly_name
}
config.destination_table_description = description
expected = {"friendlyName": friendly_name, "description": description}
self.assertEqual(
config._properties["load"]["destinationTableProperties"], expected
)
def test_destination_table_description_w_none(self):
description = "Description"
friendly_name = "<NAME>"
config = self._get_target_class()()
config._properties["load"]["destinationTableProperties"] = {
"description": description,
"friendlyName": friendly_name,
}
config.destination_table_description = None
expected = {"friendlyName": friendly_name}
self.assertEqual(
config._properties["load"]["destinationTableProperties"], expected
)
def test_destination_table_friendly_name_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.destination_table_friendly_name)
def test_destination_table_friendly_name_hit(self):
friendly_name = "<NAME>"
config = self._get_target_class()()
config._properties["load"]["destinationTableProperties"] = {
"friendlyName": friendly_name
}
self.assertEqual(config.destination_table_friendly_name, friendly_name)
def test_destination_table_friendly_name_setter(self):
friendly_name = "<NAME>"
config = self._get_target_class()()
config.destination_table_friendly_name = friendly_name
expected = {"friendlyName": friendly_name}
self.assertEqual(
config._properties["load"]["destinationTableProperties"], expected
)
def test_destination_table_friendly_name_setter_w_descr_already(self):
friendly_name = "<NAME>"
description = "Description"
config = self._get_target_class()()
config._properties["load"]["destinationTableProperties"] = {
"description": description
}
config.destination_table_friendly_name = friendly_name
expected = {"friendlyName": friendly_name, "description": description}
self.assertEqual(
config._properties["load"]["destinationTableProperties"], expected
)
def test_destination_table_friendly_name_w_none(self):
friendly_name = "<NAME>"
description = "Description"
config = self._get_target_class()()
config._properties["load"]["destinationTableProperties"] = {
"description": description,
"friendlyName": friendly_name,
}
config.destination_table_friendly_name = None
expected = {"description": description}
self.assertEqual(
config._properties["load"]["destinationTableProperties"], expected
)
def test_encoding_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.encoding)
def test_encoding_hit(self):
from google.cloud.bigquery.job import Encoding
encoding = Encoding.UTF_8
config = self._get_target_class()()
config._properties["load"]["encoding"] = encoding
self.assertEqual(config.encoding, encoding)
def test_encoding_setter(self):
from google.cloud.bigquery.job import Encoding
encoding = Encoding.UTF_8
config = self._get_target_class()()
config.encoding = encoding
self.assertEqual(config._properties["load"]["encoding"], encoding)
def test_field_delimiter_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.field_delimiter)
def test_field_delimiter_hit(self):
field_delimiter = "|"
config = self._get_target_class()()
config._properties["load"]["fieldDelimiter"] = field_delimiter
self.assertEqual(config.field_delimiter, field_delimiter)
def test_field_delimiter_setter(self):
field_delimiter = "|"
config = self._get_target_class()()
config.field_delimiter = field_delimiter
self.assertEqual(config._properties["load"]["fieldDelimiter"], field_delimiter)
def test_hive_partitioning_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.hive_partitioning)
def test_hive_partitioning_hit(self):
from google.cloud.bigquery.external_config import HivePartitioningOptions
config = self._get_target_class()()
config._properties["load"]["hivePartitioningOptions"] = {
"sourceUriPrefix": "http://foo/bar",
"mode": "STRINGS",
}
result = config.hive_partitioning
self.assertIsInstance(result, HivePartitioningOptions)
self.assertEqual(result.source_uri_prefix, "http://foo/bar")
self.assertEqual(result.mode, "STRINGS")
def test_hive_partitioning_setter(self):
from google.cloud.bigquery.external_config import HivePartitioningOptions
hive_partitioning = HivePartitioningOptions()
hive_partitioning.source_uri_prefix = "http://foo/bar"
hive_partitioning.mode = "AUTO"
config = self._get_target_class()()
config.hive_partitioning = hive_partitioning
self.assertEqual(
config._properties["load"]["hivePartitioningOptions"],
{"sourceUriPrefix": "http://foo/bar", "mode": "AUTO"},
)
config.hive_partitioning = None
self.assertIsNone(config._properties["load"]["hivePartitioningOptions"])
def test_hive_partitioning_invalid_type(self):
config = self._get_target_class()()
with self.assertRaises(TypeError):
config.hive_partitioning = {"mode": "AUTO"}
def test_ignore_unknown_values_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.ignore_unknown_values)
def test_ignore_unknown_values_hit(self):
config = self._get_target_class()()
config._properties["load"]["ignoreUnknownValues"] = True
self.assertTrue(config.ignore_unknown_values)
def test_ignore_unknown_values_setter(self):
config = self._get_target_class()()
config.ignore_unknown_values = True
self.assertTrue(config._properties["load"]["ignoreUnknownValues"])
def test_max_bad_records_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.max_bad_records)
def test_max_bad_records_hit(self):
max_bad_records = 13
config = self._get_target_class()()
config._properties["load"]["maxBadRecords"] = max_bad_records
self.assertEqual(config.max_bad_records, max_bad_records)
def test_max_bad_records_setter(self):
max_bad_records = 13
config = self._get_target_class()()
config.max_bad_records = max_bad_records
self.assertEqual(config._properties["load"]["maxBadRecords"], max_bad_records)
def test_null_marker_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.null_marker)
def test_null_marker_hit(self):
null_marker = "XXX"
config = self._get_target_class()()
config._properties["load"]["nullMarker"] = null_marker
self.assertEqual(config.null_marker, null_marker)
def test_null_marker_setter(self):
null_marker = "XXX"
config = self._get_target_class()()
config.null_marker = null_marker
self.assertEqual(config._properties["load"]["nullMarker"], null_marker)
def test_projection_fields_miss(self):
config = self._get_target_class()()
self.assertIsNone(config.projection_fields)
def test_projection_fields_hit(self):
config = self._get_target_class()()
fields = ["email", "postal_code"]
config.projection_fields = fields
self.assertEqual(config._properties["load"]["projectionFields"], fields)
self.assertEqual(config.projection_fields, fields)
def test_quote_character_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.quote_character)
def test_quote_character_hit(self):
quote_character = "'"
config = self._get_target_class()()
config._properties["load"]["quote"] = quote_character
self.assertEqual(config.quote_character, quote_character)
def test_quote_character_setter(self):
quote_character = "'"
config = self._get_target_class()()
config.quote_character = quote_character
self.assertEqual(config._properties["load"]["quote"], quote_character)
def test_schema_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.schema)
def test_schema_hit(self):
from google.cloud.bigquery.schema import SchemaField
config = self._get_target_class()()
all_props_repr = {
"mode": "REQUIRED",
"name": "foo",
"type": "INTEGER",
"description": "Foo",
}
minimal_repr = {"name": "bar", "type": "STRING"}
config._properties["load"]["schema"] = {
"fields": [all_props_repr, minimal_repr]
}
all_props, minimal = config.schema
self.assertEqual(all_props, SchemaField.from_api_repr(all_props_repr))
self.assertEqual(minimal, SchemaField.from_api_repr(minimal_repr))
def test_schema_setter_fields(self):
from google.cloud.bigquery.schema import SchemaField
config = self._get_target_class()()
full_name = SchemaField("full_name", "STRING", mode="REQUIRED")
age = SchemaField("age", "INTEGER", mode="REQUIRED")
config.schema = [full_name, age]
full_name_repr = {
"name": "full_name",
"type": "STRING",
"mode": "REQUIRED",
}
age_repr = {
"name": "age",
"type": "INTEGER",
"mode": "REQUIRED",
}
self.assertEqual(
config._properties["load"]["schema"], {"fields": [full_name_repr, age_repr]}
)
def test_schema_setter_valid_mappings_list(self):
config = self._get_target_class()()
full_name_repr = {
"name": "full_name",
"type": "STRING",
"mode": "REQUIRED",
}
age_repr = {
"name": "age",
"type": "INTEGER",
"mode": "REQUIRED",
}
schema = [full_name_repr, age_repr]
config.schema = schema
self.assertEqual(
config._properties["load"]["schema"], {"fields": [full_name_repr, age_repr]}
)
def test_schema_setter_invalid_mappings_list(self):
config = self._get_target_class()()
schema = [
{"name": "full_name", "type": "STRING", "mode": "REQUIRED"},
{"name": "age", "typeoo": "INTEGER", "mode": "REQUIRED"},
]
with self.assertRaises(Exception):
config.schema = schema
def test_schema_setter_unsetting_schema(self):
from google.cloud.bigquery.schema import SchemaField
config = self._get_target_class()()
config._properties["load"]["schema"] = [
SchemaField("full_name", "STRING", mode="REQUIRED"),
SchemaField("age", "INTEGER", mode="REQUIRED"),
]
config.schema = None
self.assertNotIn("schema", config._properties["load"])
config.schema = None # no error, idempotent operation
def test_schema_update_options_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.schema_update_options)
def test_schema_update_options_hit(self):
from google.cloud.bigquery.job import SchemaUpdateOption
options = [
SchemaUpdateOption.ALLOW_FIELD_ADDITION,
SchemaUpdateOption.ALLOW_FIELD_RELAXATION,
]
config = self._get_target_class()()
config._properties["load"]["schemaUpdateOptions"] = options
self.assertEqual(config.schema_update_options, options)
def test_schema_update_options_setter(self):
from google.cloud.bigquery.job import SchemaUpdateOption
options = [
SchemaUpdateOption.ALLOW_FIELD_ADDITION,
SchemaUpdateOption.ALLOW_FIELD_RELAXATION,
]
config = self._get_target_class()()
config.schema_update_options = options
self.assertEqual(config._properties["load"]["schemaUpdateOptions"], options)
def test_skip_leading_rows_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.skip_leading_rows)
def test_skip_leading_rows_hit_w_str(self):
skip_leading_rows = 1
config = self._get_target_class()()
config._properties["load"]["skipLeadingRows"] = str(skip_leading_rows)
self.assertEqual(config.skip_leading_rows, skip_leading_rows)
def test_skip_leading_rows_hit_w_integer(self):
skip_leading_rows = 1
config = self._get_target_class()()
config._properties["load"]["skipLeadingRows"] = skip_leading_rows
self.assertEqual(config.skip_leading_rows, skip_leading_rows)
def test_skip_leading_rows_setter(self):
skip_leading_rows = 1
config = self._get_target_class()()
config.skip_leading_rows = skip_leading_rows
self.assertEqual(
config._properties["load"]["skipLeadingRows"], str(skip_leading_rows)
)
def test_source_format_missing(self):
config = self._get_target_class()()
self.assertIsNone(config.source_format)
def test_source_format_hit(self):
from google.cloud.bigquery.job import SourceFormat
source_format = SourceFormat.CSV
config = self._get_target_class()()
config._properties["load"]["sourceFormat"] = source_format
self.assertEqual(config.source_format, source_format)
def test_source_format_setter(self):
from google.cloud.bigquery.job import SourceFormat
source_format = SourceFormat.CSV
config = self._get_target_class()()
config.source_format = source_format
self.assertEqual(config._properties["load"]["sourceFormat"], source_format)
def test_range_partitioning_w_none(self):
object_under_test = self._get_target_class()()
assert object_under_test.range_partitioning is None
def test_range_partitioning_w_value(self):
object_under_test = self._get_target_class()()
object_under_test._properties["load"]["rangePartitioning"] = {
"field": "column_one",
"range": {"start": 1, "end": | |
points,
facets, holes, and control_points. Geometries created through this method are expected to
be non-ambiguous meaning that no "overlapping" geometries exists and that nodal connectivity
is maintained (e.g. there are no nodes "overlapping" with facets without nodal connectivity).
:cvar points: List of points *(x, y)* defining the vertices of the section geometry.
If facets are not provided, it is a assumed the that the list of points are ordered
around the perimeter, either clockwise or anti-clockwise
:vartype points: list[list[float]]
:cvar facets: A list of *(start, end)* indexes of vertices defining the edges
of the section geoemtry. Can be used to define both external and internal perimeters of holes.
Facets are assumed to be described in the order of exterior perimeter, interior perimeter 1,
interior perimeter 2, etc.
:vartype facets: list[list[int]]
:cvar control_points: Optional. A list of points *(x, y)* that define non-interior regions as
being distinct, contiguous, and having one material. The point can be located anywhere within
region. Only one point is permitted per region. The order of control_points must be given in the
same order as the order that polygons are created by 'facets'.
If not given, then points will be assigned automatically using
shapely.geometry.Polygon.representative_point()
:vartype control_points: list[list[float]]
:cvar holes: Optional. A list of points *(x, y)* that define interior regions as
being holes or voids. The point can be located anywhere within the hole region.
Only one point is required per hole region.
:vartype holes: list[list[float]]
:cvar materials: Optional. A list of :class:`~sectionproperties.pre.pre.Material` objects that are to be
assigned, in order, to the regions defined by the given control_points. If not given, then
the :class:`~sectionproperties.pre.pre.DEFAULT_MATERIAL` will be used for each region.
:vartype materials: list[:class:`~sectionproperties.pre.pre.Material`]
"""
if materials and not control_points:
raise ValueError(
"Materials cannot be assigned without control_points. "
"Please provide corresponding control_points for each material."
)
if holes is None:
holes = list()
# First, generate all invidual polygons from points and facets
current_polygon_points = []
all_polygons = []
prev_facet = None
for facet in facets:
i_idx, j_idx = facet
if not prev_facet: # Add the first facet vertex to exterior and move on
current_polygon_points.append(points[i_idx])
prev_facet = facet
continue
prev_j_idx = prev_facet[1]
if i_idx != prev_j_idx: # If there is a break in the chain of edges...
# ... then add the last point, close off the polygon,
# and add the polygon to the all_polygons accumulator....
current_polygon_points.append(points[prev_j_idx])
all_polygons.append(Polygon(current_polygon_points))
# Then start collecting the points of the new polygon
current_polygon_points = [points[i_idx]]
else:
current_polygon_points.append(
points[i_idx]
) # Only need i_idx b/c shapely auto-closes polygons
prev_facet = facet
else: # Use the for...else clause to add the last point and close the last polygon.
current_polygon_points.append(points[j_idx])
all_polygons.append(Polygon(current_polygon_points))
# Then classify all of the collected polygons as either "exterior" or "interior"
exteriors = []
interiors = []
for polygon in all_polygons:
hole_coord_in_polygon = [
hole_coord
for hole_coord in holes
if polygon.contains(Point(hole_coord))
]
ctrl_coord_in_polygon = [
ctrl_coord
for ctrl_coord in control_points
if polygon.contains(Point(ctrl_coord))
]
if any(hole_coord_in_polygon) and not any(ctrl_coord_in_polygon):
interiors.append(polygon)
else:
exteriors.append(polygon)
# Create the holes by subtracting interior regions from exterior regions
if len(exteriors) != len(control_points):
raise ValueError(
f"The number of exterior regions ({len(exteriors)}) "
f"does not match the number of control_points given ({len(control_points)})."
)
if not interiors:
return CompoundGeometry(
[
Geometry(exterior, control_points=control_points[idx])
for idx, exterior in enumerate(exteriors)
]
)
else:
# "Punch" all holes through each exterior geometry
punched_exteriors = []
punched_exterior_geometries = []
for exterior in exteriors:
punched_exterior = exterior
for interior in interiors:
punched_exterior = punched_exterior - interior
try:
exterior_control_point = next(
control_point
for control_point in control_points
if punched_exterior.contains(Point(control_point))
)
except StopIteration:
raise ValueError(
f"Control points given are not contained within the geometry"
f" once holes are subtracted: {control_points}"
)
exterior_geometry = Geometry(
punched_exterior, control_points=exterior_control_point
)
punched_exterior_geometries.append(exterior_geometry)
return CompoundGeometry(punched_exterior_geometries)
@classmethod
def from_3dm(cls, filepath: Union[str, pathlib.Path], **kwargs) -> CompoundGeometry:
"""Class method to create a `CompoundGeometry` from the objects in a Rhino `3dm` file.
:param filepath:
File path to the rhino `.3dm` file.
:type filepath: Union[str, pathlib.Path]
:param \**kwargs:
See below.
:return:
A `CompoundGeometry` object.
:rtype: :class:`~sectionproperties.pre.geometry.CompoundGeometry`
:Keyword Arguments:
* *refine_num* (``int, optional``) --
Bézier curve interpolation number. In Rhino a surface's edges are nurb based curves.
Shapely does not support nurbs, so the individual Bézier curves are interpolated using straight lines.
This parameter sets the number of straight lines used in the interpolation.
Default is 1.
* *vec1* (``numpy.ndarray, optional``) --
A 3d vector in the Shapely plane. Rhino is a 3D geometry environment.
Shapely is a 2D geometric library.
Thus a 2D plane needs to be defined in Rhino that represents the Shapely coordinate system.
`vec1` represents the 1st vector of this plane. It will be used as Shapely's x direction.
Default is [1,0,0].
* *vec2* (``numpy.ndarray, optional``) --
Continuing from `vec1`, `vec2` is another vector to define the Shapely plane.
It must not be [0,0,0] and it's only requirement is that it is any vector in the Shapely plane (but not equal to `vec1`).
Default is [0,1,0].
* *plane_distance* (``float, optional``) --
The distance to the Shapely plane.
Default is 0.
* *project* (``boolean, optional``) --
Controls if the breps are projected onto the plane in the direction of the Shapley plane's normal.
Default is True.
* *parallel* (``boolean, optional``) --
Controls if only the rhino surfaces that have the same normal as the Shapely plane are yielded.
If true, all non parallel surfaces are filtered out.
Default is False.
"""
try:
import sectionproperties.pre.rhino as rhino_importer # type: ignore
except ImportError as e:
print(e)
print(
"There is something wrong with your rhino library installation. "
"Please report this error at https://github.com/robbievanleeuwen/section-properties/issues"
)
return
list_poly = rhino_importer.load_3dm(filepath, **kwargs)
return cls(geoms=MultiPolygon(list_poly))
def create_mesh(self, mesh_sizes: List[float], coarse: bool = False):
"""Creates a quadratic triangular mesh from the Geometry object.
:param mesh_size: A float describing the maximum mesh element area to be
used in the finite-element mesh for each Geometry object within the
CompoundGeometry object. If a list of length 1 is passed, then the one
size will be applied to all constituent Geometry meshes.
:type mesh_sizes: List[float]
:param bool coarse: If set to True, will create a coarse mesh (no area or
quality constraints)
:return: Geometry-object with mesh data stored in .mesh attribute. Returned
Geometry-object is self, not a new instance.
:rtype: :class:`~sectionproperties.pre.geometry.Geometry`
The following example creates a circular cross-section with a diameter of 50 with 64
points, and generates a mesh with a maximum triangular area of 2.5::
import sectionproperties.pre.library.primitive_sections as primitive_sections
geometry = primitive_sections.circular_section(d=50, n=64)
geometry = geometry.create_mesh(mesh_sizes=[2.5])
.. figure:: ../images/sections/circle_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
if isinstance(mesh_sizes, (float, int)):
mesh_sizes = [mesh_sizes]
if len(mesh_sizes) == 1:
mesh_sizes = mesh_sizes * len(self.control_points)
self.mesh = pre.create_mesh(
self.points,
self.facets,
self.holes,
self.control_points,
mesh_sizes,
coarse,
)
return self
def shift_section(self, x_offset: float = 0, y_offset: float = 0):
"""
Returns a new CompoundGeometry object translated by 'x_offset' and 'y_offset'.
:param x_offset: Distance in x-direction by which to shift the geometry.
:type x_offset: float
:param y_offset: Distance in y-direction by which to shift the geometry.
:type y_offset: float
:return: CompoundGeometry object shifted by 'x_offset' and 'y_offset'
:rtype: :class:`~sectionproperties.pre.geometry.CompoundGeometry`
"""
geoms_acc = []
for geom in self.geoms:
geoms_acc.append(geom.shift_section(x_offset=x_offset, y_offset=y_offset))
new_geom = CompoundGeometry(geoms_acc)
return new_geom
def rotate_section(
self,
angle: float,
rot_point: Union[List[float], str] = "center",
use_radians: bool = False,
):
"""Rotates the compound geometry and specified angle about a point. If the rotation point is not
provided, rotates the section about the center of the compound geometry's bounding box.
:param float angle: Angle (degrees by default) by which to rotate the section. A positive angle leads
to a counter-clockwise rotation.
:param rot_point: Optional. Point *(x, y)* about which to rotate the section. If not provided, will rotate
about the center of the compound geometry's bounding box. Default = 'center'.
:type rot_point: list[float, float]
:param use_radians: Boolean | |
the host.
username
The username used to login to the host, such as ``root``.
password
The password used to login to the host.
protocol
Optionally set to alternate protocol if the host is not using the default
protocol. Default protocol is ``https``.
port
Optionally set to alternate port if the host is not using the default
port. Default port is ``443``.
host_names
List of ESXi host names. When the host, username, and password credentials
are provided for a vCenter Server, the host_names argument is required to tell
vCenter the hosts for which to get ntp configuration information.
If host_names is not provided, the NTP configuration will be retrieved for the
``host`` location instead. This is useful for when service instance connection
information is used for a single ESXi host.
CLI Example:
.. code-block:: bash
# Used for single ESXi host connection information
salt '*' vsphere.get_ntp_config my.esxi.host root bad-password
# Used for connecting to a vCenter Server
salt '*' vsphere.get_ntp_config my.vcenter.location root bad-password \
host_names='[esxi-1.host.com, esxi-2.host.com]'
'''
service_instance = salt.utils.vmware.get_service_instance(host=host,
username=username,
password=password,
protocol=protocol,
port=port)
host_names = _check_hosts(service_instance, host, host_names)
ret = {}
for host_name in host_names:
host_ref = _get_host_ref(service_instance, host, host_name=host_name)
ntp_config = host_ref.configManager.dateTimeSystem.dateTimeInfo.ntpConfig.server
ret.update({host_name: ntp_config})
return ret
@depends(HAS_PYVMOMI)
@ignores_kwargs('credstore')
def get_service_policy(host, username, password, service_name, protocol=None, port=None, host_names=None):
'''
Get the service name's policy for a given host or list of hosts.
host
The location of the host.
username
The username used to login to the host, such as ``root``.
password
The <PASSWORD> login to the host.
service_name
The name of the service for which to retrieve the policy. Supported service names are:
- DCUI
- TSM
- SSH
- lbtd
- lsassd
- lwiod
- netlogond
- ntpd
- sfcbd-watchdog
- snmpd
- vprobed
- vpxa
- xorg
protocol
Optionally set to alternate protocol if the host is not using the default
protocol. Default protocol is ``https``.
port
Optionally set to alternate port if the host is not using the default
port. Default port is ``443``.
host_names
List of ESXi host names. When the host, username, and password credentials
are provided for a vCenter Server, the host_names argument is required to tell
vCenter the hosts for which to get service policy information.
If host_names is not provided, the service policy information will be retrieved
for the ``host`` location instead. This is useful for when service instance
connection information is used for a single ESXi host.
CLI Example:
.. code-block:: bash
# Used for single ESXi host connection information
salt '*' vsphere.get_service_policy my.esxi.host root bad-password '<PASSWORD>'
# Used for connecting to a vCenter Server
salt '*' vsphere.get_service_policy my.vcenter.location root bad-password 'ntpd' \
host_names='[esxi-1.host.com, esxi-2.host.com]'
'''
service_instance = salt.utils.vmware.get_service_instance(host=host,
username=username,
password=password,
protocol=protocol,
port=port)
valid_services = ['DCUI', 'TSM', 'SSH', 'ssh', 'lbtd', 'lsassd', 'lwiod', 'netlogond',
'ntpd', 'sfcbd-watchdog', 'snmpd', 'vprobed', 'vpxa', 'xorg']
host_names = _check_hosts(service_instance, host, host_names)
ret = {}
for host_name in host_names:
# Check if the service_name provided is a valid one.
# If we don't have a valid service, return. The service will be invalid for all hosts.
if service_name not in valid_services:
ret.update({host_name: {'Error': '{0} is not a valid service name.'.format(service_name)}})
return ret
host_ref = _get_host_ref(service_instance, host, host_name=host_name)
services = host_ref.configManager.serviceSystem.serviceInfo.service
# Don't require users to know that VMware lists the ssh service as TSM-SSH
if service_name == 'SSH' or service_name == 'ssh':
temp_service_name = 'TSM-SSH'
else:
temp_service_name = service_name
# Loop through services until we find a matching name
for service in services:
if service.key == temp_service_name:
ret.update({host_name:
{service_name: service.policy}})
# We've found a match - break out of the loop so we don't overwrite the
# Updated host_name value with an error message.
break
else:
msg = 'Could not find service \'{0}\' for host \'{1}\'.'.format(service_name,
host_name)
ret.update({host_name: {'Error': msg}})
# If we made it this far, something else has gone wrong.
if ret.get(host_name) is None:
msg = '\'vsphere.get_service_policy\' failed for host {0}.'.format(host_name)
log.debug(msg)
ret.update({host_name: {'Error': msg}})
return ret
@depends(HAS_PYVMOMI)
@ignores_kwargs('credstore')
def get_service_running(host, username, password, service_name, protocol=None, port=None, host_names=None):
'''
Get the service name's running state for a given host or list of hosts.
host
The location of the host.
username
The username used to login to the host, such as ``root``.
password
The password used to login to the host.
service_name
The name of the service for which to retrieve the policy. Supported service names are:
- DCUI
- TSM
- SSH
- lbtd
- lsassd
- lwiod
- netlogond
- ntpd
- sfcbd-watchdog
- snmpd
- vprobed
- vpxa
- xorg
protocol
Optionally set to alternate protocol if the host is not using the default
protocol. Default protocol is ``https``.
port
Optionally set to alternate port if the host is not using the default
port. Default port is ``443``.
host_names
List of ESXi host names. When the host, username, and password credentials
are provided for a vCenter Server, the host_names argument is required to tell
vCenter the hosts for which to get the service's running state.
If host_names is not provided, the service's running state will be retrieved
for the ``host`` location instead. This is useful for when service instance
connection information is used for a single ESXi host.
CLI Example:
.. code-block:: bash
# Used for single ESXi host connection information
salt '*' vsphere.get_service_running my.esxi.host root bad-password 'ssh'
# Used for connecting to a vCenter Server
salt '*' vsphere.get_service_running my.vcenter.location root bad-password '<PASSWORD>' \
host_names='[esxi-1.host.com, esxi-2.host.com]'
'''
service_instance = salt.utils.vmware.get_service_instance(host=host,
username=username,
password=password,
protocol=protocol,
port=port)
valid_services = ['DCUI', 'TSM', 'SSH', 'ssh', 'lbtd', 'lsassd', 'lwiod', 'netlogond',
'ntpd', 'sfcbd-watchdog', 'snmpd', 'vprobed', 'vpxa', 'xorg']
host_names = _check_hosts(service_instance, host, host_names)
ret = {}
for host_name in host_names:
# Check if the service_name provided is a valid one.
# If we don't have a valid service, return. The service will be invalid for all hosts.
if service_name not in valid_services:
ret.update({host_name: {'Error': '{0} is not a valid service name.'.format(service_name)}})
return ret
host_ref = _get_host_ref(service_instance, host, host_name=host_name)
services = host_ref.configManager.serviceSystem.serviceInfo.service
# Don't require users to know that VMware lists the ssh service as TSM-SSH
if service_name == 'SSH' or service_name == 'ssh':
temp_service_name = 'TSM-SSH'
else:
temp_service_name = service_name
# Loop through services until we find a matching name
for service in services:
if service.key == temp_service_name:
ret.update({host_name:
{service_name: service.running}})
# We've found a match - break out of the loop so we don't overwrite the
# Updated host_name value with an error message.
break
else:
msg = 'Could not find service \'{0}\' for host \'{1}\'.'.format(service_name,
host_name)
ret.update({host_name: {'Error': msg}})
# If we made it this far, something else has gone wrong.
if ret.get(host_name) is None:
msg = '\'vsphere.get_service_running\' failed for host {0}.'.format(host_name)
log.debug(msg)
ret.update({host_name: {'Error': msg}})
return ret
@depends(HAS_PYVMOMI)
@ignores_kwargs('credstore')
def get_vmotion_enabled(host, username, password, protocol=None, port=None, host_names=None):
'''
Get the VMotion enabled status for a given host or a list of host_names. Returns ``True``
if VMotion is enabled, ``False`` if it is not enabled.
host
The location of the host.
username
The username used to login to the host, such as ``root``.
password
The <PASSWORD> login to the host.
protocol
Optionally set to alternate protocol if the host is not using the default
protocol. Default protocol is ``https``.
port
Optionally set to alternate port if the host is not using the default
port. Default port is ``443``.
host_names
List of ESXi host names. When the host, username, and password credentials
are provided for a vCenter Server, the host_names argument is required to
tell vCenter which hosts to check if VMotion is enabled.
If host_names is not provided, the VMotion status will be retrieved for the
``host`` location instead. This is useful for when service instance
connection information is used for a single ESXi host.
CLI Example:
.. code-block:: bash
# Used for single ESXi host connection information
salt '*' vsphere.get_vmotion_enabled my.esxi.host root bad-password
# Used for connecting to a vCenter Server
salt '*' vsphere.get_vmotion_enabled my.vcenter.location root bad-password \
host_names='[esxi-1.host.com, esxi-2.host.com]'
'''
service_instance | |
The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.data_catalog.models.Term`
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/catalogs/{catalogId}/glossaries/{glossaryKey}/terms/{termKey}"
method = "PUT"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"update_term got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"catalogId": catalog_id,
"glossaryKey": glossary_key,
"termKey": term_key
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_term_details,
response_type="Term")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_term_details,
response_type="Term")
def update_term_relationship(self, catalog_id, glossary_key, term_key, term_relationship_key, update_term_relationship_details, **kwargs):
"""
Updates a specific glossary term relationship.
:param str catalog_id: (required)
Unique catalog identifier.
:param str glossary_key: (required)
Unique glossary key.
:param str term_key: (required)
Unique glossary term key.
:param str term_relationship_key: (required)
Unique glossary term relationship key.
:param UpdateTermRelationshipDetails update_term_relationship_details: (required)
The information to be updated in the term relationship.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call
for a resource, set the `if-match` parameter to the value of the
etag from a previous GET or POST response for that resource.
The resource will be updated or deleted only if the etag you
provide matches the resource's current etag value.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.data_catalog.models.TermRelationship`
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/catalogs/{catalogId}/glossaries/{glossaryKey}/terms/{termKey}/termRelationships/{termRelationshipKey}"
method = "PUT"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"update_term_relationship got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"catalogId": catalog_id,
"glossaryKey": glossary_key,
"termKey": term_key,
"termRelationshipKey": term_relationship_key
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_term_relationship_details,
response_type="TermRelationship")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_term_relationship_details,
response_type="TermRelationship")
def upload_credentials(self, catalog_id, data_asset_key, connection_key, upload_credentials_details, **kwargs):
"""
Upload connection credentails and metadata for this connection.
:param str catalog_id: (required)
Unique catalog identifier.
:param str data_asset_key: (required)
Unique data asset key.
:param str connection_key: (required)
Unique connection key.
:param UploadCredentialsDetails upload_credentials_details: (required)
The information used to upload the credentials file and metadata for updating this connection.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call
for a resource, set the `if-match` parameter to the value of the
etag from a previous GET or POST response for that resource.
The resource will be updated or deleted only if the etag you
provide matches the resource's current etag value.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or
server error without risk of executing that same action again. Retry tokens expire after 24
hours, but can be invalidated before then due to conflicting operations. For example, if a resource
has been deleted and purged from the system, then a retry of the original creation request
might be rejected.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.data_catalog.models.Connection`
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/catalogs/{catalogId}/dataAssets/{dataAssetKey}/connections/{connectionKey}/actions/uploadCredentials"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id",
"if_match",
"opc_retry_token"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"upload_credentials got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"catalogId": catalog_id,
"dataAssetKey": data_asset_key,
"connectionKey": connection_key
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing),
"if-match": kwargs.get("if_match", missing),
"opc-retry-token": kwargs.get("opc_retry_token", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=upload_credentials_details,
response_type="Connection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=upload_credentials_details,
response_type="Connection")
def users(self, catalog_id, **kwargs):
"""
Returns active users in the system.
:param str catalog_id: (required)
Unique catalog identifier.
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. If no value is specified TIMECREATED is default.
Allowed values are: "TIMECREATED", "DISPLAYNAME"
:param str sort_order: (optional)
The sort order to use, either 'asc' or 'desc'.
Allowed values are: "ASC", "DESC"
:param int limit: (optional)
The maximum number of items to return.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type str
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/catalogs/{catalogId}/actions/users"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"sort_by",
"sort_order",
"limit",
"page",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"users got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"catalogId": catalog_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
if 'sort_by' in kwargs:
sort_by_allowed_values = ["TIMECREATED", "DISPLAYNAME"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for | |
# -*- coding:utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import gc
import cv2
# gc.set_threshold(100, 10, 10)
from lib.model.config import cfg
from lib.rpn.generate_anchors_global import generate_anchors_global
from lib.rpn.anchor_target_layer_cpu import anchor_target_layer
from lib.rpn.proposal_target_layer import proposal_target_layer
from lib.rpn.proposal_layer import proposal_layer
from lib.nets.layers_util import *
from lib.model.nms_wrapper import nms_test as nms
from lib.roi_pooling.roi_pool import RoIPool, RoIPoolFunction
from lib.model.bbox_transform_cpu import clip_boxes, bbox_transform_inv
import torch.backends.cudnn as cudnn
import torchvision.utils as vutils
import torchvision.transforms as torchtrans
class Network(nn.Module):
def __init__(self):
super(Network, self).__init__()
self._layers = {}
self._channels = {}
self._feat_stride = None
self._anchor_scales = None
self._anchor_ratios = None
self._num_anchors = None
def _image_to_head(self, input):
raise NotImplementedError
def _head_to_tail(self, pool5):
raise NotImplementedError
def _load_pre_trained_model(self, pre_trained_model):
raise NotImplementedError
def _init_network(self):
raise NotImplementedError
class RPN(nn.Module):
def __init__(self, net):
super(RPN, self).__init__()
self._network = net
self.cross_entropy = None
self.loss_box = None
self.cache_dict = {}
def _init_network(self):
self._network._init_network()
self.rpn_conv = nn.Conv2d(self._network._channels['head'], 512, (3, 3), padding=1)
self.rpn_score = nn.Conv2d(512, self._network._num_anchors * 2, (1, 1))
self.rpn_bbox = nn.Conv2d(512, self._network._num_anchors * 4, (1, 1))
def forward(self, im_data, im_info, gt_boxes=None):
feature = self._network._image_to_head(im_data)
rpn_feature = self.rpn_conv(feature)
# cls
# n a*2 h w
rpn_cls_score = self.rpn_score(rpn_feature)
# n 2 a*h w
rpn_cls_score_reshape = self._reshape_layer(rpn_cls_score, 2)
# n 2 a*h w
rpn_cls_prob = F.softmax(rpn_cls_score_reshape, 1)
# n a*2 h w to n h w a*2
rpn_cls_prob_final = self._reshape_layer(rpn_cls_prob, self._network._num_anchors * 2).permute(0, 2, 3, 1).contiguous()
# bbox
rpn_bbox_score = self.rpn_bbox(rpn_feature)
rpn_bbox_score = rpn_bbox_score.permute(0, 2, 3, 1).contiguous()
# generate anchors
self._generate_anchors(rpn_cls_score)
rois, scores = self._region_proposal(rpn_cls_prob_final, rpn_bbox_score, im_info)
# generating training labels and build the rpn loss
if self.training:
assert gt_boxes is not None
rpn_data = self._anchor_target_layer(rpn_cls_score, gt_boxes, im_info)
self.cross_entropy, self.loss_box = self._build_loss(rpn_cls_score_reshape, rpn_bbox_score, rpn_data)
return rois, scores, feature
@property
def loss(self):
return self.cross_entropy + self.loss_box * cfg.TRAIN.LOSS_RATIO
def _build_loss(self, rpn_cls_score_reshape, rpn_bbox_score, rpn_data, sigma_rpn=3):
rpn_cls_score = rpn_cls_score_reshape.permute(0, 2, 3, 1).contiguous().view(-1, 2)
rpn_label = rpn_data[0].view(-1)
# cls loss
rpn_keep = Variable(rpn_label.data.ne(-1).nonzero().squeeze())
assert rpn_keep.numel() == cfg.TRAIN.RPN_BATCHSIZE
if cfg.CUDA_IF:
rpn_keep = rpn_keep.cuda()
rpn_cls_score = torch.index_select(rpn_cls_score, 0, rpn_keep)
rpn_label = torch.index_select(rpn_label, 0, rpn_keep)
rpn_cross_entropy = F.cross_entropy(rpn_cls_score, rpn_label)
rpn_bbox_targets, rpn_bbox_inside_weights, rpn_bbox_outside_weights = rpn_data[1:]
rpn_loss_box = self._smooth_l1_loss(rpn_bbox_score, rpn_bbox_targets, rpn_bbox_inside_weights,
rpn_bbox_outside_weights, sigma=sigma_rpn, dim=[1, 2, 3])
return rpn_cross_entropy, rpn_loss_box
def _smooth_l1_loss(self, bbox_pred, bbox_targets, bbox_inside_weights, bbox_outside_weights, sigma=1.0, dim=[1]):
sigma_2 = sigma ** 2
box_diff = bbox_pred - bbox_targets
in_box_diff = bbox_inside_weights * box_diff
abs_in_box_diff = torch.abs(in_box_diff)
smoothL1_sign = (abs_in_box_diff < 1. / sigma_2).detach().float()
in_loss_box = torch.pow(in_box_diff, 2) * (sigma_2 / 2.) * smoothL1_sign \
+ (abs_in_box_diff - (0.5 / sigma_2)) * (1. - smoothL1_sign)
out_loss_box = bbox_outside_weights * in_loss_box
loss_box = out_loss_box
for i in sorted(dim, reverse=True):
loss_box = loss_box.sum(i)
loss_box = loss_box.mean()
return loss_box
def _reshape_layer(self, x, d):
'''
:param x: n [a1 b1,a2 b2] h w
:param d: d
:return: n 2 a*h w
'''
input_shape = x.size()
x = x.view(
input_shape[0],
int(d),
int(float(input_shape[1] * input_shape[2]) / float(d)),
input_shape[3]
)
return x
def _generate_anchors(self, rpn_cls_score):
# anchors [A*K, 4]
anchors = generate_anchors_global(
feat_stride=self._network._feat_stride[0],
height=rpn_cls_score.size()[-2],
width=rpn_cls_score.size()[-1],
anchor_scales=self._network._anchor_scales,
anchor_ratios=self._network._anchor_ratios
)
self._anchors = Variable(torch.from_numpy(anchors)).float()
if cfg.CUDA_IF:
self._anchors = self._anchors.cuda()
self.cache_dict['anchors_cache'] = self._anchors
def _region_proposal(self, rpn_cls_prob_reshape, rpn_bbox_pred, im_info):
cfg_key = 'TRAIN' if self.training else 'TEST'
rois, rpn_scores = proposal_layer(rpn_cls_prob=rpn_cls_prob_reshape, rpn_bbox_pred=rpn_bbox_pred,
im_info=im_info, cfg_key=cfg_key,
_feat_stride=self._network._feat_stride,
anchors=self._anchors,
num_anchors=self._network._num_anchors)
self.cache_dict['rois'] = rois
self.cache_dict['rpn_scores'] = rpn_scores
return rois, rpn_scores
def _anchor_target_layer(self, rpn_cls_score, gt_boxes, im_info):
rpn_cls_score = rpn_cls_score.data
gt_boxes = gt_boxes.data.cpu().numpy()
all_anchors = self._anchors.data.cpu().numpy()
rpn_labels, rpn_bbox_targets, rpn_bbox_inside_weights, rpn_bbox_outside_weights = \
anchor_target_layer(rpn_cls_score=rpn_cls_score, gt_boxes=gt_boxes, im_info=im_info,
_feat_stride=self._network._feat_stride,
all_anchors=all_anchors,
num_anchors=self._network._num_anchors)
rpn_labels = np_to_variable(rpn_labels, is_cuda=cfg.CUDA_IF, dtype=torch.LongTensor)
rpn_bbox_targets = np_to_variable(rpn_bbox_targets, is_cuda=cfg.CUDA_IF)
rpn_bbox_inside_weights = np_to_variable(rpn_bbox_inside_weights, is_cuda=cfg.CUDA_IF)
rpn_bbox_outside_weights = np_to_variable(rpn_bbox_outside_weights, is_cuda=cfg.CUDA_IF)
self.cache_dict['rpn_labels'] = rpn_labels
self.cache_dict['rpn_bbox_targets'] = rpn_bbox_targets
self.cache_dict['rpn_bbox_inside_weights'] = rpn_bbox_inside_weights
self.cache_dict['rpn_bbox_outside_weights'] = rpn_bbox_outside_weights
return rpn_labels, rpn_bbox_targets, rpn_bbox_inside_weights, rpn_bbox_outside_weights
class FasterRCNN(nn.Module):
def __init__(self, net, classes=None):
super(FasterRCNN, self).__init__()
assert (classes is not None), 'class can not be none!'
self._classes = np.array(classes)
self._num_classes = len(classes)
self._rpn = RPN(net=net)
# loss
self.cross_entropy = None
self.loss_box = None
self.cache_dict = {}
self.metrics_dict = {}
def init_fasterRCNN(self):
self._rpn._init_network()
self.roi_pool = RoIPool(7, 7, 1.0 / self._rpn._network._feat_stride[0])
self.score_fc = nn.Linear(self._rpn._network._channels['tail'], self._num_classes)
self.bbox_fc = nn.Linear(self._rpn._network._channels['tail'], self._num_classes * 4)
def _predict(self, im_data, im_info, gt_boxes):
# benchmark because now the input size are not fixed
cudnn.benchmark = False
rois, rpn_scores, features = self._rpn(im_data, im_info, gt_boxes)
if self.training:
roi_data = self._proposal_target_layer(rpn_rois=rois, gt_boxes=gt_boxes,
rpn_scores=rpn_scores)
rois = roi_data[0]
else:
roi_data = None
pooled_features = self.roi_pool(features, rois)
self.cache_dict['pooled_features'] = pooled_features
if self.training:
# benchmark because now the input size are fixed
cudnn.benchmark = True
x = self._rpn._network._head_to_tail(pooled_features)
cls_score = self.score_fc(x)
cls_prob = F.softmax(cls_score, 1)
bbox_pred = self.bbox_fc(x)
if self.training:
self.cross_entropy, self.loss_box = self._build_loss(cls_score, bbox_pred, roi_data)
return cls_prob, bbox_pred, rois
def forward(self, im_data, im_info, gt_boxes=None):
im_data = np_to_variable(im_data, is_cuda=cfg.CUDA_IF).permute(0, 3, 1, 2)
self.cache_dict['im_data'] = im_data
gt_boxes = np_to_variable(gt_boxes, is_cuda=cfg.CUDA_IF) if gt_boxes is not None else None
self.cache_dict['gt_boxes'] = gt_boxes
cls_prob, bbox_pred, rois = self._predict(im_data, im_info, gt_boxes)
if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED:
# in proposal_target_layer target has done regularization
stds = bbox_pred.data.new(cfg.TRAIN.BBOX_NORMALIZE_STDS).repeat(self._num_classes).unsqueeze(0).expand_as(
bbox_pred)
means = bbox_pred.data.new(cfg.TRAIN.BBOX_NORMALIZE_MEANS).repeat(self._num_classes).unsqueeze(0).expand_as(
bbox_pred)
bbox_pred = bbox_pred.mul(Variable(stds)).add(Variable(means))
if not self.training:
# clear middle memory
self._delete_cache()
return cls_prob, bbox_pred, rois
@property
def loss(self):
return self.cross_entropy + self.loss_box * cfg.TRAIN.LOSS_RATIO
def _build_loss(self, cls_score, bbox_pred, roi_data, sigma_rpn=1):
label = roi_data[1].squeeze()
assert label.dim() == 1
fg_cnt = torch.sum(label.data.ne(0))
bg_cnt = label.data.numel() - fg_cnt
self.metrics_dict['fg'] = fg_cnt
self.metrics_dict['bg'] = bg_cnt
_, predict = cls_score.data.max(1)
label_data = label.data
tp = torch.sum(predict.eq(label_data) & label_data.ne(0)) if fg_cnt > 0 else 0
tf = torch.sum(predict.eq(label_data) & label_data.eq(0)) if bg_cnt > 0 else 0
self.metrics_dict['tp'] = tp
self.metrics_dict['tf'] = tf
# cls
cross_entropy = F.cross_entropy(cls_score, label)
# bbox
bbox_targets, bbox_inside_weights, bbox_outside_weights = roi_data[2:]
loss_box = self._rpn._smooth_l1_loss(bbox_pred, bbox_targets, bbox_inside_weights,
bbox_outside_weights, sigma=sigma_rpn)
return cross_entropy, loss_box
def init_special_bbox_fc(self, dev=0.001):
def _gaussian_init(m, dev):
m.weight.data.normal_(0.0, dev)
if hasattr(m.bias, 'data'):
m.bias.data.zero_()
model = self.bbox_fc
for m in model.modules():
if isinstance(m, nn.Conv2d):
_gaussian_init(m, dev)
elif isinstance(m, nn.Linear):
_gaussian_init(m, dev)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _delete_cache(self):
for dic in [self._rpn.cache_dict, self.cache_dict]:
for key in dic.keys():
del dic[key]
# gc.collect()
def _proposal_target_layer(self, rpn_rois, gt_boxes, rpn_scores):
rois, roi_scores, labels, bbox_targets, bbox_inside_weights, bbox_outside_weights = \
proposal_target_layer(
rpn_rois, rpn_scores, gt_boxes, self._num_classes)
labels = labels.long()
bbox_targets = Variable(bbox_targets)
bbox_inside_weights = Variable(bbox_inside_weights)
bbox_outside_weights = Variable(bbox_outside_weights)
self.cache_dict['labels'] = labels
self.cache_dict['bbox_targets'] = bbox_targets
self.cache_dict['bbox_inside_weights'] = bbox_inside_weights
self.cache_dict['bbox_outside_weights'] = bbox_outside_weights
return rois, labels, bbox_targets, bbox_inside_weights, bbox_outside_weights
def train_operation(self, blobs, optimizer, image_if=False, clip_parameters=None):
im_data = blobs['data']
im_info = blobs['im_info']
gt_boxes = blobs['gt_boxes']
# forward
result_cls_prob, result_bbox_pred, result_rois = self(im_data, im_info, gt_boxes)
loss = self.loss + self._rpn.loss
# backward
if optimizer is not None:
optimizer.zero_grad()
loss.backward()
if clip_parameters is not None:
nn.utils.clip_grad_norm(self._parameters, max_norm=10)
optimizer.step()
loss = loss.data.cpu()[0]
rpn_cls_loss = self._rpn.cross_entropy.data.cpu()[0]
rpn_bbox_loss = self._rpn.loss_box.data.cpu()[0]
fast_rcnn_cls_loss = self.cross_entropy.data.cpu()[0]
fast_rcnn_bbox_loss = self.loss_box.data.cpu()[0]
image = None
if image_if:
image = self.visual_image(blobs, result_cls_prob, result_bbox_pred, result_rois)
# clear middle memory
self._delete_cache()
return (loss, rpn_cls_loss, rpn_bbox_loss, fast_rcnn_cls_loss, fast_rcnn_bbox_loss), image
def visual_image(self, blobs, result_cls_prob, result_bbox_pred, result_rois):
new_gt_boxes = blobs['gt_boxes'].copy()
new_gt_boxes[:, :4] = new_gt_boxes[:, :4]
image = self.back_to_image(blobs['data']).astype(np.uint8)
im_shape = image.shape
pred_boxes, scores, classes = self.interpret_faster_rcnn_scale(result_cls_prob, result_bbox_pred, result_rois,
im_shape, min_score=0.1)
image = self.draw_photo(image, pred_boxes, scores, classes, new_gt_boxes)
image = torchtrans.ToTensor()(image)
image = vutils.make_grid([image])
return image
@staticmethod
def nms_detections(pred_boxes, scores, nms_thresh, inds=None):
dets = np.hstack((pred_boxes,
scores[:, np.newaxis])).astype(np.float32)
keep = nms(dets, nms_thresh)
if inds is None:
return pred_boxes[keep], scores[keep]
return pred_boxes[keep], scores[keep], inds[keep]
def interpret_faster_rcnn_scale(self, cls_prob, bbox_pred, rois, im_shape, nms=True, clip=True, min_score=0.0):
# find class
scores, inds = cls_prob.data.max(1)
scores, inds = scores.cpu().numpy(), inds.cpu().numpy()
keep = np.where((inds > 0) & (scores >= min_score))
scores, inds = scores[keep], inds[keep]
# Apply bounding-box regression deltas
keep = keep[0]
box_deltas = bbox_pred.data.cpu().numpy()[keep]
box_deltas = np.asarray([
box_deltas[i, (inds[i] * 4): (inds[i] * 4 + 4)] for i in range(len(inds))
], dtype=np.float)
boxes = rois.data.cpu().numpy()[keep, 1:5]
if len(keep) != 0:
pred_boxes = bbox_transform_inv(boxes, box_deltas)
else:
pred_boxes = boxes
if clip and pred_boxes.shape[0] > 0:
pred_boxes = clip_boxes(pred_boxes, im_shape)
# nms
if nms and pred_boxes.shape[0] > 0:
pred_boxes, scores, inds = self.nms_detections(pred_boxes, scores, 0.3, inds=inds)
return pred_boxes, scores, self._classes[inds]
def draw_photo(self, image, dets, scores, classes, gt_boxes):
# im2show = np.copy(image)
im2show = image
# color_b = (0, 191, 255)
for i, det in enumerate(dets):
det = tuple(int(x) for x in det)
r = min(0+i*10, 255)
r_i = i / 5
g = min(150+r_i*10, 255)
g_i = r_i / 5
b | |
<gh_stars>1-10
# -*- encoding: UTF-8 -*-
# -*- Indentation: 4 Spaces -*-
import os
import sys
try:
import pyperclip
except ImportError:
pass
'''
A package for tablular operation and for making making tabular data
This project is hosted on github for developmental purpose,
in which anyone can contribute, smallest of which are appreciated
Check out the source code at https://github.com/John-pix/Tabulator-Python
Download the documentation at https://drive.google.com/uc?export=download&id=1PBXYawIaA7vfTOCT6JjsfPOhitKGjmHZ
Requirments:
'pyperclip' module for copy-paste feature, you can still use without it but you cant use the copy-past features
download pyperclip from https://pypi.org/project/pyperclip/
'''
class CellOutOfBoundsException(Exception):
def __init__(self, row, column, message="Cell Not Found"):
self.row = row
self.column = column
self.message = message
super().__init__(self.message)
def __str__(self):
return f'Row = {self.row}; Column = {self.column}'
class table:
'''
A table object is created using the table class
Syntax :
table_obj = tabulux.table(
{
'Head1':['content-A1',content-A2',],
'Head2':['content-B1',content-B2',],
}
)
Now the table object is stored in the variable 'table_obj'
Note: every column should contain same number of elements'
'''
def __init__(self, layout):
self.layout = layout
self.row_len = len(layout[list(layout)[0]])
self.column_len = len(layout)
def column_length(self, copy=False):
'''
Use this method when you need the column length of the table
Syntax:
table_obj.column_length(copy={True/False})
where:
`table_obj` = a table object
`copy` = This attribute copies the result as text to your clipboard (False by default)
returns integer representing the number of column in `table_obj`
'''
if copy:
pyperclip.copy(self.column_len)
return self.column_len
def row_length(self, copy=False):
'''
Use this method when you need the number of rows of the table
Syntax:
table_obj.row_length(copy={True/False})
where:
`table_obj` = a table object
`copy` = This attribute copies the result as text to your clipboard (False by default)
returns integer representing the number of rows in `table_obj`
'''
if copy:
pyperclip.copy(self.row_len)
return self.row_len
def _get_layout(self):
return self.layout
def heads(self):
'''
Syntax :
table_obj.heads()
where:
table_obj = a table object
returns a 'list' of string representing the each heading of 'table_obj'
'''
table = self._get_layout()
heads = []
for head in table:
heads.append(head)
return heads
def head(self, column, copy=False):
'''
Use head() when you want a heading of a specific column from the table
Syntax:
table_obj.head(column_number, copy={True/False})
where:
`table_obj` = A table object
`column_number` = An integer representing the column number, whose heading you want
`copy` = A boolean- if True, copies the heading to your clipboard(False by default)
returns the heading of the specified column of the table as a string.
'''
heads = self.heads()
if copy:
pyperclip.copy(heads[column])
return heads[column]
def display(self):
'''
Syntax :
table1.display()
Output:
prints data in 'table1' in tabular form
Here is how the table would be printed:
|Head-A | Head-B |
-------------------
|content-A1 | Content-B1 |
|content-A2 | Content-B2 |
'''
table = self._get_layout()
heads = self.heads()
print('|',end = '')
for head in heads: #for printing the headings
print(head,end=' | ')
print("\n-----------",end='')
for row in range(self.row_len):
print('\n',end='|')
for column in heads:
print(table[column][row], end = ' | ')
print()
return
def cell(self, row, column, copy= False):
'''
Syntax:
Use this method when you need the cell content of a cell
table_obj.cell(row_ID, column_ID, copy={True/False})
where,
`table_obj` = table object
`row` = integer representing the row number (starting from 0), or, the heading of the column as a string
`column` = integer representing the column number (starting from 0)
`copy` = a boolean, if true- copies the result to your clipboard(False,by default)
returns the Cell Content of the specified location as String.
If the cell is not defined in the table or the rows or column given is out of bounds, a `CellOutOfBoundsException` is raised
'''
table = self._get_layout()
if row >= self.row_len:
raise CellOutOfBoundsException(row, column)
elif column >= self.column_len:
raise CellOutOfBoundsException(row,column)
if type(column) is str:
if copy == True:
pyperclip.copy(table[row][column])
return table[row][column]
elif type(column) is int:
if copy == True:
pyperclip.copy(table[self.heads()[row]][column])
return table[self.heads()[row]][column]
def row(self, row_number):
'''
You can use the row() method to get all the row elements in the table as a list
Syntax:
table_obj.row(row_number)
where:
table_obj = a table object
row_number = an integer representing the row number (starts from 0)
For example, the row_number of 2 would return a list containing the values from second row from all columns
A 'CellOutOfBoundsException' is raised if the specified row exceeds the number of rows in the table
'''
if row_number >= self.row_len:
raise CellOutOfBoundsException(row_number, 'X')
table = self._get_layout()
extracted_row = []
heads = self.heads()
for head in heads:
extracted_row.append(table[head][row_number])
return extracted_row
def column(self, column_number):
'''
You could use the getColumn function to get list containing all the whole column, under a specified row
Syntax:
table_obj.column(column_number)
where:
table_obj = a table object
column_number = an integer which represents column number, whose cells must be returned(starting from 0) or,
the heading of the column,(as string), whose values must be returned
'''
if column_number >= self.column_len:
raise CellOutOfBoundsException('X', column_number)
if type(column_number) is str:
return self.layout[column_number]
elif type(column_number) is int:
return self.layout[self.head(column_number)]
def change(self, row, column, new_content, paste=False):
'''
Use change() method to change the content of a cell
Syntax:
table_obj.change(row_ID, column_ID, new_content)
where,
table_obj = a table object
row_ID = an integer representing the column number(starts from 0)
column_ID = an integer representing the column number(starts from 0), or the heading of the column as a string
new_content = a string containg the new content
changes the of the specified column to the content in `new_content`
you can also set a cell content to whatever text copied to your clipboard, by setting the 'paste' argument to True
Syntax:
table_obj.change(row_ID, column_ID, new_content, paste=True)
the 'new_content' is ignored if paste is set to True, so you can leave the new content as `""`
This cange the cell in the specified location to the text in the clipboard.
Aditionaly, this method also returns the old content of the specified cell
'''
if row >= self.row_len:
raise CellOutOfBoundsException(row, column)
if column >= self.column_len:
raise CellOutOfBoundsException(row, column)
if type(column) is str:
old_content = self.layout[column][row]
if paste == True:
self.layout[column][row] = pyperclip.paste()
else:
self.layout[column][row] = new_content
return old_content
elif type(column) is int:
heads = self.heads()
old_content = self.layout[heads[column]][row]
if paste == True:
self.layout[heads[column]][row] = pyperclip.paste()
else:
self.layout[heads[column]][row] = new_content
return old_content
else:
raise TypeError
def add(self, x1, y1, x2, y2, copy=False):
'''
The add method can be used to add two cells, if the cells are integers or can be converted to integers,
Syntax:
table_obj.add(row1, column1, row2, column2, copy={True/False})
where:
`table_obj` = a table object
`row1` = cell row of the first cell as an integer
`column1` = cell column of the first cell as an integer, or the heading of the column as a string
`row2 ` = cell row of the second cell as an integer
`column2` = cell column of the second cell as an integer, or the heading of the column as a string
`copy` = If this boolean attribute is set to True, the result will be copied to your clipboard(False by default)
returns the sum of the two cells
'''
try:
if type(y1)is int:
y1 = self.head(y1)
if type(y2) is int:
y2 = self.head(y2)
except:
raise CellOutOfBoundsException(y1, y2)
try:
if copy:
pyperclip.copy(str(int(self.layout[y1][x1])) + (int(self.layout[y2][x2])))
return ((int(self.layout[y1][x1])) + (int(self.layout[y2][x2])))
except:
raise TypeError
def add_all(self, x1, x2, y1, y2, copy=False):
'''
The add_all() method can be used to add all the cells from a cell to another cell, if all their values are integers
Syntax:
table_obj.add_all(from_row, from_column, to_row, to_column, copy={True/False})
where:
`table_obj` = a table object
`from_row` = row number of the first cell
`from_column` = column number of the first cell
`to_row` = row number of the last cell
`to_column` = column number of the last cell
`copy` = a boolean, if True- the result will be copied to your clipboard (False by default)
returns the sum of all the content of cells between the specified cells.
If any cell content cannot be | |
cmap=cmap)
return cmap
@property
def dark2_8(self):
cname = "dark2_8"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "dark2_8.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def dark2_8_r(self):
cname = "dark2_8_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "dark2_8.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu(self):
cname = "gnbu"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_r(self):
cname = "gnbu_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_3(self):
cname = "gnbu_3"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_3.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_3_r(self):
cname = "gnbu_3_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_3.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_4(self):
cname = "gnbu_4"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_4.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_4_r(self):
cname = "gnbu_4_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_4.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_5(self):
cname = "gnbu_5"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_5.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_5_r(self):
cname = "gnbu_5_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_5.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_6(self):
cname = "gnbu_6"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_6.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_6_r(self):
cname = "gnbu_6_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_6.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_7(self):
cname = "gnbu_7"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_7.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_7_r(self):
cname = "gnbu_7_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_7.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_8(self):
cname = "gnbu_8"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_8.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_8_r(self):
cname = "gnbu_8_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_8.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_9(self):
cname = "gnbu_9"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_9.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def gnbu_9_r(self):
cname = "gnbu_9_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "gnbu_9.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens(self):
cname = "greens"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_r(self):
cname = "greens_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_3(self):
cname = "greens_3"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_3.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_3_r(self):
cname = "greens_3_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_3.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_4(self):
cname = "greens_4"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_4.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_4_r(self):
cname = "greens_4_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_4.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_5(self):
cname = "greens_5"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_5.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_5_r(self):
cname = "greens_5_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_5.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_6(self):
cname = "greens_6"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_6.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_6_r(self):
cname = "greens_6_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_6.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_7(self):
cname = "greens_7"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_7.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_7_r(self):
cname = "greens_7_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_7.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_8(self):
cname = "greens_8"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_8.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_8_r(self):
cname = "greens_8_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_8.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_9(self):
cname = "greens_9"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_9.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greens_9_r(self):
cname = "greens_9_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greens_9.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys(self):
cname = "greys"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_r(self):
cname = "greys_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_3(self):
cname = "greys_3"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_3.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_3_r(self):
cname = "greys_3_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_3.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_4(self):
cname = "greys_4"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_4.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_4_r(self):
cname = "greys_4_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_4.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_5(self):
cname = "greys_5"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_5.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_5_r(self):
cname = "greys_5_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_5.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_6(self):
cname = "greys_6"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_6.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_6_r(self):
cname = "greys_6_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_6.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_7(self):
cname = "greys_7"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_7.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_7_r(self):
cname = "greys_7_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_7.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_8(self):
cname = "greys_8"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_8.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_8_r(self):
cname = "greys_8_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_8.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_9(self):
cname = "greys_9"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_9.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def greys_9_r(self):
cname = "greys_9_r"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "greys_9.rgb")
cmap = Colormap(self._coltbl(cmap_file)[::-1], name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def oranges(self):
cname = "oranges"
if cname in matplotlib.cm._cmap_registry:
return matplotlib.cm.get_cmap(cname)
cmap_file = os.path.join(CMAPSFILE_DIR, "colorbrewer", "oranges.rgb")
cmap = Colormap(self._coltbl(cmap_file), name=cname)
matplotlib.cm.register_cmap(name=cname, cmap=cmap)
return cmap
@property
def | |
<filename>pipenv/patched/piptools/resolver.py
# coding: utf-8
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import copy
from functools import partial
from itertools import chain, count
import os
from first import first
from notpip.req import InstallRequirement
from . import click
from .cache import DependencyCache
from .exceptions import UnsupportedConstraint
from .logging import log
from .utils import (format_requirement, format_specifier, full_groupby,
is_pinned_requirement, key_from_ireq, key_from_req, UNSAFE_PACKAGES)
green = partial(click.style, fg='green')
magenta = partial(click.style, fg='magenta')
class RequirementSummary(object):
"""
Summary of a requirement's properties for comparison purposes.
"""
def __init__(self, ireq):
self.req = ireq.req
self.key = key_from_req(ireq.req)
self.markers = ireq.markers
self.extras = str(sorted(ireq.extras))
self.specifier = str(ireq.specifier)
def __eq__(self, other):
return str(self) == str(other)
def __hash__(self):
return hash(str(self))
def __str__(self):
return repr([self.key, self.specifier, self.extras])
class Resolver(object):
def __init__(self, constraints, repository, cache=None, prereleases=False, clear_caches=False, allow_unsafe=False):
"""
This class resolves a given set of constraints (a collection of
InstallRequirement objects) by consulting the given Repository and the
DependencyCache.
"""
self.our_constraints = set(constraints)
self.their_constraints = set()
self.repository = repository
if cache is None:
cache = DependencyCache() # pragma: no cover
self.dependency_cache = cache
self.prereleases = prereleases
self.clear_caches = clear_caches
self.allow_unsafe = allow_unsafe
self.unsafe_constraints = set()
@property
def constraints(self):
return set(self._group_constraints(chain(self.our_constraints,
self.their_constraints)))
def resolve_hashes(self, ireqs):
"""
Finds acceptable hashes for all of the given InstallRequirements.
"""
with self.repository.allow_all_wheels():
return {ireq: self.repository.get_hashes(ireq) for ireq in ireqs}
def resolve(self, max_rounds=12):
"""
Finds concrete package versions for all the given InstallRequirements
and their recursive dependencies. The end result is a flat list of
(name, version) tuples. (Or an editable package.)
Resolves constraints one round at a time, until they don't change
anymore. Protects against infinite loops by breaking out after a max
number rounds.
"""
if self.clear_caches:
self.dependency_cache.clear()
self.repository.clear_caches()
self.check_constraints(chain(self.our_constraints,
self.their_constraints))
# Ignore existing packages
os.environ[str('PIP_EXISTS_ACTION')] = str('i') # NOTE: str() wrapping necessary for Python 2/3 compat
for current_round in count(start=1):
if current_round > max_rounds:
raise RuntimeError('No stable configuration of concrete packages '
'could be found for the given constraints after '
'%d rounds of resolving.\n'
'This is likely a bug.' % max_rounds)
log.debug('')
log.debug(magenta('{:^60}'.format('ROUND {}'.format(current_round))))
has_changed, best_matches = self._resolve_one_round()
log.debug('-' * 60)
log.debug('Result of round {}: {}'.format(current_round,
'not stable' if has_changed else 'stable, done'))
if not has_changed:
break
# If a package version (foo==2.0) was built in a previous round,
# and in this round a different version of foo needs to be built
# (i.e. foo==1.0), the directory will exist already, which will
# cause a pip build failure. The trick is to start with a new
# build cache dir for every round, so this can never happen.
self.repository.freshen_build_caches()
del os.environ['PIP_EXISTS_ACTION']
# Only include hard requirements and not pip constraints
return {req for req in best_matches if not req.constraint}
@staticmethod
def check_constraints(constraints):
for constraint in constraints:
if constraint.link is not None and not constraint.editable:
msg = ('pip-compile does not support URLs as packages, unless they are editable. '
'Perhaps add -e option?')
raise UnsupportedConstraint(msg, constraint)
def _group_constraints(self, constraints):
"""
Groups constraints (remember, InstallRequirements!) by their key name,
and combining their SpecifierSets into a single InstallRequirement per
package. For example, given the following constraints:
Django<1.9,>=1.4.2
django~=1.5
Flask~=0.7
This will be combined into a single entry per package:
django~=1.5,<1.9,>=1.4.2
flask~=0.7
"""
for _, ireqs in full_groupby(constraints, key=key_from_ireq):
ireqs = list(ireqs)
editable_ireq = first(ireqs, key=lambda ireq: ireq.editable)
if editable_ireq:
yield editable_ireq # ignore all the other specs: the editable one is the one that counts
continue
ireqs = iter(ireqs)
# deepcopy the accumulator so as to not modify the self.our_constraints invariant
combined_ireq = copy.deepcopy(next(ireqs))
combined_ireq.comes_from = None
for ireq in ireqs:
# NOTE we may be losing some info on dropped reqs here
combined_ireq.req.specifier &= ireq.req.specifier
combined_ireq.constraint &= ireq.constraint
combined_ireq.markers = ireq.markers
# Return a sorted, de-duped tuple of extras
combined_ireq.extras = tuple(sorted(set(tuple(combined_ireq.extras) + tuple(ireq.extras))))
yield combined_ireq
def _resolve_one_round(self):
"""
Resolves one level of the current constraints, by finding the best
match for each package in the repository and adding all requirements
for those best package versions. Some of these constraints may be new
or updated.
Returns whether new constraints appeared in this round. If no
constraints were added or changed, this indicates a stable
configuration.
"""
# Sort this list for readability of terminal output
constraints = sorted(self.constraints, key=key_from_ireq)
unsafe_constraints = []
original_constraints = copy.copy(constraints)
if not self.allow_unsafe:
for constraint in original_constraints:
if constraint.name in UNSAFE_PACKAGES:
constraints.remove(constraint)
constraint.req.specifier = None
unsafe_constraints.append(constraint)
log.debug('Current constraints:')
for constraint in constraints:
log.debug(' {}'.format(constraint))
log.debug('')
log.debug('Finding the best candidates:')
best_matches = {self.get_best_match(ireq) for ireq in constraints}
# Find the new set of secondary dependencies
log.debug('')
log.debug('Finding secondary dependencies:')
safe_constraints = []
for best_match in best_matches:
for dep in self._iter_dependencies(best_match):
if self.allow_unsafe or dep.name not in UNSAFE_PACKAGES:
safe_constraints.append(dep)
# Grouping constraints to make clean diff between rounds
theirs = set(self._group_constraints(safe_constraints))
# NOTE: We need to compare RequirementSummary objects, since
# InstallRequirement does not define equality
diff = {RequirementSummary(t) for t in theirs} - {RequirementSummary(t) for t in self.their_constraints}
removed = ({RequirementSummary(t) for t in self.their_constraints} -
{RequirementSummary(t) for t in theirs})
unsafe = ({RequirementSummary(t) for t in unsafe_constraints} -
{RequirementSummary(t) for t in self.unsafe_constraints})
has_changed = len(diff) > 0 or len(removed) > 0 or len(unsafe) > 0
if has_changed:
log.debug('')
log.debug('New dependencies found in this round:')
for new_dependency in sorted(diff, key=lambda req: key_from_req(req.req)):
log.debug(' adding {}'.format(new_dependency))
log.debug('Removed dependencies in this round:')
for removed_dependency in sorted(removed, key=lambda req: key_from_req(req.req)):
log.debug(' removing {}'.format(removed_dependency))
log.debug('Unsafe dependencies in this round:')
for unsafe_dependency in sorted(unsafe, key=lambda req: key_from_req(req.req)):
log.debug(' remembering unsafe {}'.format(unsafe_dependency))
# Store the last round's results in the their_constraints
self.their_constraints = theirs
# Store the last round's unsafe constraints
self.unsafe_constraints = unsafe_constraints
return has_changed, best_matches
def get_best_match(self, ireq):
"""
Returns a (pinned or editable) InstallRequirement, indicating the best
match to use for the given InstallRequirement (in the form of an
InstallRequirement).
Example:
Given the constraint Flask>=0.10, may return Flask==0.10.1 at
a certain moment in time.
Pinned requirements will always return themselves, i.e.
Flask==0.10.1 => Flask==0.10.1
"""
if ireq.editable:
# NOTE: it's much quicker to immediately return instead of
# hitting the index server
best_match = ireq
elif is_pinned_requirement(ireq):
# NOTE: it's much quicker to immediately return instead of
# hitting the index server
best_match = ireq
else:
best_match = self.repository.find_best_match(ireq, prereleases=self.prereleases)
# Format the best match
log.debug(' found candidate {} (constraint was {})'.format(format_requirement(best_match),
format_specifier(ireq)))
return best_match
def _iter_dependencies(self, ireq):
"""
Given a pinned or editable InstallRequirement, collects all the
secondary dependencies for them, either by looking them up in a local
cache, or by reaching out to the repository.
Editable requirements will never be looked up, as they may have
changed at any time.
"""
if ireq.editable:
for dependency in self.repository.get_dependencies(ireq):
yield dependency
return
elif ireq.markers:
for dependency in self.repository.get_dependencies(ireq):
dependency.prepared = False
yield dependency
return
elif ireq.extras:
for dependency in self.repository.get_dependencies(ireq):
dependency.prepared = False
yield dependency
return
elif not is_pinned_requirement(ireq):
raise TypeError('Expected pinned or editable requirement, got {}'.format(ireq))
# Now, either get the dependencies from the dependency cache (for
# speed), or reach out to the external repository to
# download and inspect the package version and get dependencies
# from there
if ireq not in self.dependency_cache:
log.debug(' {} not in cache, need to check index'.format(format_requirement(ireq)), fg='yellow')
dependencies = self.repository.get_dependencies(ireq)
import sys
if sys.version_info[0] == 2:
self.dependency_cache[ireq] = sorted(str(ireq.req) for ireq in dependencies)
else:
self.dependency_cache[ireq] = sorted('{0}; {1}'.format(str(ireq.req), str(ireq.markers)) if ireq.markers else str(ireq.req) for ireq in dependencies)
# Example: ['Werkzeug>=0.9', 'Jinja2>=2.4']
dependency_strings = self.dependency_cache[ireq]
log.debug(' {:25} requires {}'.format(format_requirement(ireq),
', '.join(sorted(dependency_strings, key=lambda s: s.lower())) or '-'))
from notpip._vendor.packaging.markers import InvalidMarker
for dependency_string in dependency_strings:
try:
markers = None
if ';' in dependency_string:
# split off markers and remove any duplicates by comparing against deps
dependencies, markers = dependency_string.rsplit(';', 1)
dependency_string = ';'.join([dep for dep in dependencies.split(';') if dep.strip() != markers.strip()])
individual_dependencies = [dep.strip() for dep in dependency_string.split(', ')]
cleaned_deps = []
for dep in individual_dependencies:
tokens = [token.strip() for token in dep.split(';')]
cleaned_tokens = []
dep_markers = []
if len(tokens) == 1:
cleaned_deps.append(tokens[0])
continue
dep_markers = list(set(tokens[1:]))
cleaned_tokens.append(tokens[0])
if dep_markers:
cleaned_tokens.extend(dep_markers)
cleaned_deps.append('; | |
15074: ["Sablefish fish"], # Fish, sablefish, raw
15075: [], # Fish, sablefish, smoked
15076: ["Salmon fish"], # Fish, salmon, Atlantic, wild, raw
15077: [], # Fish, salmon, chinook, smoked
15078: [], # Fish, salmon, chinook, raw
15079: [], # Fish, salmon, chum, raw
15080: [], # Fish, salmon, chum, canned, drained solids with bone
15081: [], # Fish, salmon, coho, wild, raw
15082: [], # Fish, salmon, coho, wild, cooked, moist heat
15083: [], # Fish, salmon, pink, raw
15084: [], # Fish, salmon, pink, canned, total can contents
15085: [], # Fish, salmon, sockeye, raw
15086: [], # Fish, salmon, sockeye, cooked, dry heat
15087: [], # Fish, salmon, sockeye, canned, drained solids
15088: [], # Fish, sardine, Atlantic, canned in oil, drained solids with bone
15089: [], # Fish, sardine, Pacific, canned in tomato sauce, drained solids with bone
15090: ["Scup fish"], # Fish, scup, raw
15091: ["Sea bass fish"], # Fish, sea bass, mixed species, raw
15092: [], # Fish, sea bass, mixed species, cooked, dry heat
15093: ["Seatrout fish"], # Fish, seatrout, mixed species, raw
15094: ["Shad fish"], # Fish, shad, american, raw
15095: ["Shark fish"], # Fish, shark, mixed species, raw
15096: [], # Fish, shark, mixed species, cooked, batter-dipped and fried
15097: ["Sheepshead fish"], # Fish, sheepshead, raw
15098: [], # Fish, sheepshead, cooked, dry heat
15099: ["Smelt fish", "rainbow"], # Fish, smelt, rainbow, raw
15100: [], # Fish, smelt, rainbow, cooked, dry heat
15101: ["Snapper fish"], # Fish, snapper, mixed species, raw
15102: [], # Fish, snapper, mixed species, cooked, dry heat
15103: ["Spot fish"], # Fish, spot, raw
15104: ["Sturgeon fish"], # Fish, sturgeon, mixed species, raw
15105: [], # Fish, sturgeon, mixed species, cooked, dry heat
15106: [], # Fish, sturgeon, mixed species, smoked
15107: ["Sucker fish", "white"], # Fish, sucker, white, raw
15108: ["Sunfish fish"], # Fish, sunfish, pumpkin seed, raw
15109: [], # Fish, surimi
15110: ["Swordfish fish"], # Fish, swordfish, raw
15111: [], # Fish, swordfish, cooked, dry heat
15112: ["Tilefish fish"], # Fish, tilefish, raw
15113: [], # Fish, tilefish, cooked, dry heat
15114: ["Trout fish"], # Fish, trout, mixed species, raw
15115: [], # Fish, trout, rainbow, wild, raw
15116: [], # Fish, trout, rainbow, wild, cooked, dry heat
15117: ["Tuna fish"], # Fish, tuna, fresh, bluefin, raw
15118: [], # Fish, tuna, fresh, bluefin, cooked, dry heat
15119: [], # Fish, tuna, light, canned in oil, drained solids
15121: [], # Fish, tuna, light, canned in water, drained solids (Includes foods for USDA's Food Distribution Program)
15123: [], # Fish, tuna, fresh, skipjack, raw
15124: [], # Fish, tuna, white, canned in oil, drained solids
15126: [], # Fish, tuna, white, canned in water, drained solids
15127: [], # Fish, tuna, fresh, yellowfin, raw
15128: [], # Fish, tuna salad
15129: ["Turbot fish"], # Fish, turbot, european, raw
15130: ["Whitefish fish"], # Fish, whitefish, mixed species, raw
15131: [], # Fish, whitefish, mixed species, smoked
15132: ["Whiting fish"], # Fish, whiting, mixed species, raw
15133: [], # Fish, whiting, mixed species, cooked, dry heat
15134: ["Wolffish fish"], # Fish, wolffish, Atlantic, raw
15135: ["Yellowtail fish"], # Fish, yellowtail, mixed species, raw
15136: ["Crab", "alaska king"], # Crustaceans, crab, alaska king, raw
15137: [], # Crustaceans, crab, alaska king, cooked, moist heat
15138: [], # Crustaceans, crab, alaska king, imitation, made from surimi
15139: ["Crab", "blue"], # Crustaceans, crab, blue, raw
15140: [], # Crustaceans, crab, blue, cooked, moist heat
15141: [], # Crustaceans, crab, blue, canned
15142: [], # Crustaceans, crab, blue, crab cakes, home recipe
15143: ["Crab", "dungeness"], # Crustaceans, crab, dungeness, raw
15144: ["Crab", "queen"], # Crustaceans, crab, queen, raw
15145: ["Crayfish"], # Crustaceans, crayfish, mixed species, wild, raw
15146: [], # Crustaceans, crayfish, mixed species, wild, cooked, moist heat
15147: ["Lobster", "northern"], # Crustaceans, lobster, northern, raw
15148: [], # Crustaceans, lobster, northern, cooked, moist heat
15149: [
"Shrimp"
], # Crustaceans, shrimp, mixed species, raw (may contain additives to retain moisture)
15150: [], # Crustaceans, shrimp, mixed species, cooked, breaded and fried
15151: [], # Crustaceans, shrimp, mixed species, cooked, moist heat (may contain additives to retain moisture)
15152: [], # Crustaceans, shrimp, mixed species, canned
15153: [], # Crustaceans, shrimp, mixed species, imitation, made from surimi
15154: ["Spiny lobster"], # Crustaceans, spiny lobster, mixed species, raw
15155: ["Abalone"], # Mollusks, abalone, mixed species, raw
15156: [], # Mollusks, abalone, mixed species, cooked, fried
15157: ["Clam"], # Mollusks, clam, mixed species, raw
15158: [], # Mollusks, clam, mixed species, cooked, breaded and fried
15159: [], # Mollusks, clam, mixed species, cooked, moist heat
15160: [], # Mollusks, clam, mixed species, canned, drained solids
15162: [], # Mollusks, clam, mixed species, canned, liquid
15163: ["Cuttlefish"], # Mollusks, cuttlefish, mixed species, raw
15164: ["Mussel", "blue"], # Mollusks, mussel, blue, raw
15165: [], # Mollusks, mussel, blue, cooked, moist heat
15166: ["Octopus"], # Mollusks, octopus, common, raw
15167: ["Oyster", "eastern"], # Mollusks, oyster, eastern, wild, raw
15168: [], # Mollusks, oyster, eastern, cooked, breaded and fried
15169: [], # Mollusks, oyster, eastern, wild, cooked, moist heat
15170: [], # Mollusks, oyster, eastern, canned
15171: ["Oyster", "pacific"], # Mollusks, oyster, Pacific, raw
15172: ["Scallop"], # Mollusks, scallop, mixed species, raw
15173: [], # Mollusks, scallop, mixed species, cooked, breaded and fried
15174: [], # Mollusks, scallop, mixed species, imitation, made from surimi
15175: ["Squid"], # Mollusks, squid, mixed species, raw
15176: [], # Mollusks, squid, mixed species, cooked, fried
15177: [], # Mollusks, whelk, unspecified, raw
15178: [], # Mollusks, whelk, unspecified, cooked, moist heat
15179: [], # Fish, salmon, chinook, smoked, (lox), regular
15180: [], # Fish, salmon, chum, canned, without salt, drained solids with bone
15181: [], # Fish, salmon, pink, canned, without salt, solids with bone and liquid
15182: [], # Fish, salmon, sockeye, canned, without salt, drained solids with bone
15183: [], # Fish, tuna, light, canned in oil, without salt, drained solids
15184: [], # Fish, tuna, light, canned in water, without salt, drained solids
15185: [], # Fish, tuna, white, canned in oil, without salt, drained solids
15186: [], # Fish, tuna, white, canned in water, without salt, drained solids
15187: [], # Fish, bass, freshwater, mixed species, cooked, dry heat
15188: [], # Fish, bass, striped, cooked, dry heat
15189: [], # Fish, bluefish, cooked, dry heat
15190: [], # Fish, burbot, cooked, dry heat
15191: [], # Fish, butterfish, cooked, dry heat
15192: [], # Fish, cod, Pacific, cooked, dry heat (may contain additives to retain moisture)
15193: [], # Fish, cusk, cooked, dry heat
15194: [], # Fish, mahimahi, cooked, dry heat
15195: [], # Fish, drum, freshwater, cooked, dry heat
15196: [], # Fish, halibut, greenland, cooked, dry heat
15197: [], # Fish, herring, Pacific, cooked, dry heat
15198: [], # Fish, ling, cooked, dry heat
15199: [], # Fish, lingcod, cooked, dry heat
15200: [], # Fish, mackerel, king, cooked, dry heat
15201: [], # Fish, mackerel, Pacific and jack, mixed species, cooked, dry heat
15202: [], # Fish, milkfish, cooked, dry heat
15203: [], # Fish, monkfish, cooked, dry heat
15204: [], # Fish, pike, walleye, cooked, dry heat
15205: [], # Fish, pollock, Atlantic, cooked, dry heat
15206: [], # Fish, pout, ocean, cooked, dry heat
15207: [], # Fish, roe, mixed species, cooked, dry heat
15208: [], # Fish, sablefish, cooked, dry heat
15209: [], # Fish, salmon, Atlantic, wild, cooked, dry heat
15210: [], # Fish, salmon, chinook, cooked, dry heat
15211: [], # Fish, salmon, chum, cooked, dry heat
15212: [], # Fish, salmon, pink, cooked, dry heat
15213: [], # Fish, scup, cooked, dry heat
15214: [], # Fish, seatrout, mixed species, cooked, dry heat
15215: [], # | |
#!/usr/bin/env python
# Copyright 2016 DIANA-HEP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import math
import numbers
import struct
from histogrammar.defs import *
from histogrammar.util import *
class Minimize(Factory, Container):
"""Find the minimum value of a given quantity. If no data are observed, the result is NaN."""
@staticmethod
def ed(entries, min):
"""Create a Minimize that is only capable of being added.
Parameters:
entries (float): the number of entries.
min (float): the lowest value of the quantity observed or NaN if no data were observed.
"""
if not isinstance(entries, numbers.Real) and entries not in ("nan", "inf", "-inf"):
raise TypeError("entries ({0}) must be a number".format(entries))
if not isinstance(min, numbers.Real) and entries not in ("nan", "inf", "-inf"):
raise TypeError("min ({0}) must be a number".format(min))
if entries < 0.0:
raise ValueError("entries ({0}) cannot be negative".format(entries))
out = Minimize(None)
out.entries = float(entries)
out.min = float(min)
return out.specialize()
@staticmethod
def ing(quantity):
"""Synonym for ``__init__``."""
return Minimize(quantity)
def __init__(self, quantity):
"""Create a Minimize that is capable of being filled and added.
Parameters:
quantity (function returning float): computes the quantity of interest from the data.
Other parameters:
entries (float): the number of entries, initially 0.0. #
min (float): the lowest value of the quantity observed, initially NaN.
"""
self.quantity = serializable(quantity)
self.entries = 0.0
self.min = float("nan")
super(Minimize, self).__init__()
self.specialize()
@inheritdoc(Container)
def zero(self): return Minimize(self.quantity)
@inheritdoc(Container)
def __add__(self, other):
if isinstance(other, Minimize):
out = Minimize(self.quantity)
out.entries = self.entries + other.entries
out.min = minplus(self.min, other.min)
return out.specialize()
else:
raise ContainerException("cannot add {0} and {1}".format(self.name, other.name))
@inheritdoc(Container)
def __iadd__(self, other):
both = self + other
self.entries = both.entries
self.min = both.min
return self
@inheritdoc(Container)
def __mul__(self, factor):
if math.isnan(factor) or factor <= 0.0:
return self.zero()
else:
out = self.zero()
out.entries = factor * self.entries
out.min = self.min
return out.specialize()
@inheritdoc(Container)
def __rmul__(self, factor):
return self.__mul__(factor)
@property
def children(self):
"""List of sub-aggregators, to make it possible to walk the tree."""
return []
@inheritdoc(Container)
def fill(self, datum, weight=1.0):
self._checkForCrossReferences()
if weight > 0.0:
q = self.quantity(datum)
if not isinstance(q, numbers.Real):
raise TypeError("function return value ({0}) must be boolean or number".format(q))
# no possibility of exception from here on out (for rollback)
self.entries += weight
if math.isnan(self.min) or q < self.min:
self.min = q
def _cppGenerateCode(self, parser, generator, inputFieldNames, inputFieldTypes, derivedFieldTypes, derivedFieldExprs, storageStructs, initCode, initPrefix, initIndent, fillCode, fillPrefix, fillIndent, weightVars, weightVarStack, tmpVarTypes):
return self._c99GenerateCode(parser, generator, inputFieldNames, inputFieldTypes, derivedFieldTypes, derivedFieldExprs, storageStructs, initCode, initPrefix, initIndent, fillCode, fillPrefix, fillIndent, weightVars, weightVarStack, tmpVarTypes)
def _c99GenerateCode(self, parser, generator, inputFieldNames, inputFieldTypes, derivedFieldTypes, derivedFieldExprs, storageStructs, initCode, initPrefix, initIndent, fillCode, fillPrefix, fillIndent, weightVars, weightVarStack, tmpVarTypes):
initCode.append(" " * initIndent + self._c99ExpandPrefix(*initPrefix) + ".entries = 0.0;")
initCode.append(" " * initIndent + self._c99ExpandPrefix(*initPrefix) + ".min = NAN;")
normexpr = self._c99QuantityExpr(parser, generator, inputFieldNames, inputFieldTypes, derivedFieldTypes, derivedFieldExprs, None)
fillCode.append(" " * fillIndent + self._c99ExpandPrefix(*fillPrefix) + ".entries += " + weightVarStack[-1] + ";")
fillCode.append(" " * fillIndent + "if (std::isnan({min}) || {q} < {min}) {min} = {q};".format(
min = self._c99ExpandPrefix(*fillPrefix) + ".min",
q = normexpr))
storageStructs[self._c99StructName()] = """
typedef struct {{
double entries;
double min;
}} {0};
""".format(self._c99StructName())
def _clingUpdate(self, filler, *extractorPrefix):
obj = self._clingExpandPrefix(filler, *extractorPrefix)
self.entries = self.entries + obj.entries
self.min = minplus(self.min, obj.min)
def _c99StructName(self):
return "Mn"
def _cudaGenerateCode(self, parser, generator, inputFieldNames, inputFieldTypes, derivedFieldTypes, derivedFieldExprs, storageStructs, initCode, initPrefix, initIndent, fillCode, fillPrefix, fillIndent, combineCode, totalPrefix, itemPrefix, combineIndent, jsonCode, jsonPrefix, jsonIndent, weightVars, weightVarStack, tmpVarTypes, suppressName):
old = "old_" + str(len(tmpVarTypes))
tmpVarTypes[old] = "int"
assumed = "assumed_" + str(len(tmpVarTypes))
tmpVarTypes[assumed] = "float"
trial = "trial_" + str(len(tmpVarTypes))
tmpVarTypes[trial] = "float"
initCode.append(" " * initIndent + "(void){old}; (void){assumed}; (void){trial}; // not used; ignore warnings".format(old=old, assumed=assumed, trial=trial))
jsonCode.append(" " * jsonIndent + "(void){old}; (void){assumed}; (void){trial}; // not used; ignore warnings".format(old=old, assumed=assumed, trial=trial))
initCode.append(" " * initIndent + self._c99ExpandPrefix(*initPrefix) + ".entries = 0.0f;")
initCode.append(" " * initIndent + self._c99ExpandPrefix(*initPrefix) + ".min = CUDART_NAN_F;")
normexpr = self._cudaQuantityExpr(parser, generator, inputFieldNames, inputFieldTypes, derivedFieldTypes, derivedFieldExprs, None)
fillCode.append("""{indent}atomicAdd(&{prefix}.entries, {weight});
{indent}{old} = *(int*)(&{prefix}.min);
{indent}do {{
{indent} {assumed} = *(float*)(&{old});
{indent} if (isnan({assumed}) || {q} < {assumed})
{indent} {trial} = {q};
{indent} else
{indent} {trial} = {assumed};
{indent} {old} = atomicCAS((int*)(&{prefix}.min), *(int*)(&{assumed}), *(int*)(&{trial}));
{indent}}} while (*(int*)(&{assumed}) != {old});
""".format(indent = " " * fillIndent,
prefix = self._c99ExpandPrefix(*fillPrefix),
weight = weightVarStack[-1],
old = old,
assumed = assumed,
trial = trial,
q = normexpr))
combineCode.append("""{indent}atomicAdd(&{total}.entries, {item}.entries);
{indent}{old} = *(int*)(&{total}.min);
{indent}do {{
{indent} {assumed} = *(float*)(&{old});
{indent} if (isnan({assumed}))
{indent} {trial} = {item}.min;
{indent} else if (isnan({item}.min))
{indent} {trial} = {assumed};
{indent} else if ({assumed} < {item}.min)
{indent} {trial} = {assumed};
{indent} else
{indent} {trial} = {item}.min;
{indent} {old} = atomicCAS((int*)(&{total}.min), *(int*)(&{assumed}), *(int*)(&{trial}));
{indent}}} while (*(int*)(&{assumed}) != {old});
""".format(indent = " " * combineIndent,
total = self._c99ExpandPrefix(*totalPrefix),
item = self._c99ExpandPrefix(*itemPrefix),
weight = weightVarStack[-1],
old = old,
assumed = assumed,
trial = trial))
jsonCode.append(" " * jsonIndent + "fprintf(out, \"{\\\"entries\\\": \");")
jsonCode.append(" " * jsonIndent + "floatToJson(out, " + self._c99ExpandPrefix(*jsonPrefix) + ".entries);")
jsonCode.append(" " * jsonIndent + "fprintf(out, \", \\\"min\\\": \");")
jsonCode.append(" " * jsonIndent + "floatToJson(out, " + self._c99ExpandPrefix(*jsonPrefix) + ".min);")
if suppressName or self.quantity.name is None:
jsonCode.append(" " * jsonIndent + "fprintf(out, \"}\");")
else:
jsonCode.append(" " * jsonIndent + "fprintf(out, \", \\\"name\\\": " + json.dumps(json.dumps(self.quantity.name))[1:-1] + "}\");")
storageStructs[self._c99StructName()] = """
typedef struct {{
float entries;
float min;
}} {0};
""".format(self._c99StructName())
def _cudaUnpackAndFill(self, data, bigendian, alignment):
format = "<ff"
objentries, objmin = struct.unpack(format, data[:struct.calcsize(format)])
self.entries = self.entries + objentries
self.min = minplus(self.min, objmin)
return data[struct.calcsize(format):]
def _numpy(self, data, weights, shape):
q = self.quantity(data)
self._checkNPQuantity(q, shape)
self._checkNPWeights(weights, shape)
weights = self._makeNPWeights(weights, shape)
# no possibility of exception from here on out (for rollback)
import numpy
selection = numpy.isnan(q)
numpy.bitwise_not(selection, selection)
numpy.bitwise_and(selection, weights > 0.0, selection)
q = q[selection]
self.entries += float(weights.sum())
if math.isnan(self.min):
if q.shape[0] > 0:
self.min = float(q.min())
else:
if q.shape[0] > 0:
self.min = min(self.min, float(q.min()))
def _sparksql(self, jvm, converter):
return converter.Minimize(quantity.asSparkSQL())
@inheritdoc(Container)
def toJsonFragment(self, suppressName): return maybeAdd({
"entries": floatToJson(self.entries),
"min": floatToJson(self.min),
}, name=(None if suppressName else self.quantity.name))
@staticmethod
@inheritdoc(Factory)
def fromJsonFragment(json, nameFromParent):
if isinstance(json, dict) and hasKeys(json.keys(), ["entries", "min"], ["name"]):
if json["entries"] in ("nan", "inf", "-inf") or isinstance(json["entries"], numbers.Real):
entries = float(json["entries"])
else:
raise JsonFormatException(json["entries"], "Minimize.entries")
if isinstance(json.get("name", None), basestring):
name = json["name"]
elif json.get("name", None) is None:
name = None
else:
raise JsonFormatException(json["name"], "Minimize.name")
if json["min"] in ("nan", "inf", "-inf") or isinstance(json["min"], numbers.Real):
min = float(json["min"])
else:
raise JsonFormatException(json["min"], "Minimize.min")
out = Minimize.ed(entries, min)
out.quantity.name = nameFromParent if name is None else name
return out.specialize()
else:
raise JsonFormatException(json, "Minimize")
def __repr__(self):
return "<Minimize min={0}>".format(self.min)
def __eq__(self, other):
return isinstance(other, Minimize) and self.quantity == other.quantity and numeq(self.entries, other.entries) and numeq(self.min, other.min)
def __ne__(self, other): return not self == other
def __hash__(self):
return hash((self.quantity, self.entries, self.min))
Factory.register(Minimize)
class Maximize(Factory, Container):
"""Find the maximum value of a given quantity. If no data are observed, the result is NaN."""
@staticmethod
def ed(entries, max):
"""Create a Maximize that is only capable of being added.
Parameters:
entries (float): the number of entries.
max (float): the highest value of the quantity observed or NaN if no data were observed.
"""
if not isinstance(entries, numbers.Real) and entries not in ("nan", "inf", "-inf"):
raise TypeError("entries ({0}) must be a number".format(entries))
if not isinstance(max, numbers.Real) and entries not in ("nan", "inf", "-inf"):
raise TypeError("max ({0}) must be a number".format(max))
if entries < 0.0:
raise ValueError("entries ({0}) cannot be negative".format(entries))
out = Maximize(None)
out.entries = float(entries)
out.max = float(max)
return out.specialize()
@staticmethod
def ing(quantity):
"""Synonym for ``__init__``."""
return Maximize(quantity)
def __init__(self, quantity):
"""Create a Maximize that is capable of being filled and added.
Parameters:
quantity (function returning float): computes the quantity of interest from the data.
Other parameters:
entries (float): the number of entries, initially 0.0.
max (float): the highest value of the quantity observed, | |
<gh_stars>1-10
'''
File: cache_mag.py
Author: <NAME>
Version: 0.1
Create: 2016-05-11 15:25:43
Description: manage Landsat cache files
'''
import logging
from . import config
class file_obj():
def __init__(self, f):
import os
self._f = f
self._t = os.path.getatime(f)
self._z = os.path.getsize(f)
def __eq__(self, f):
return self._t == f._t
_w_lock = {}
_w_nums = {}
def _get_cache_dir(d=None):
if d:
return d
_d_tmp = config.get('conf', 'cache')
if _d_tmp:
return _d_tmp
from . import file_unzip
import os
return os.path.join(file_unzip.default_dir(None), 'cache')
class cache_mag():
"""manage Landsat cache files"""
def __init__(self, tag, cache=None, max_file=-1, max_size=-1):
self._t = tag
self._d = _get_cache_dir(cache)
if not self._d:
raise Exception('no cache folder specified')
self._max_file = config.getint('conf', 'max_cached_file', max_file)
self._max_size = config.getfloat('conf', 'max_cached_size', max_size)
# self._n = 0
global _w_nums
if self._t not in _w_nums:
_w_nums[self._t] = 0.0
global _w_lock
if self._t not in _w_lock:
if config.getboolean('conf', 'enable_cache_lock', True):
from . import multi_task
_w_lock[self._t] = multi_task.create_lock()
def cached(self, key):
_f = self.path(key)
import os
return os.path.exists(_f) and os.path.getsize(_f) > 0
def _format_str(self, t):
import re
_k = list(re.sub('[^\w\d_]', '_', t))
for i in range(len(_k)):
if t[i] in ['\\', '/', '.', '-']:
_k[i] = t[i]
return ''.join(_k)
def path(self, key):
import os
_p = self._format_str(key)
if _p and _p[0] in ['/', '\\']:
# remove the root path if it exists
_p = _p[1:]
_f = os.path.join(self._d, self._t, _p)
import os
if os.path.exists(_f):
os.utime(_f, None)
return _f
def get(self, key):
if not self.cached(key):
return None
return self.path(key)
def _put(self, inp, f):
import os
_f = f
if os.path.exists(_f):
return _f
if self._max_file > 0 or self._max_size > 0:
global _w_nums
_w_nums[self._t] += 1
if _w_nums[self._t] > (self._max_file / 10 if self._max_file > 0 else 1000):
self._clean()
_w_nums[self._t] = 0
try:
(lambda x: os.path.exists(x) or os.makedirs(x))(os.path.dirname(_f))
except Exception:
pass
import random
_f_out = _f + str(random.randint(0, 1000)) + '.bak'
import shutil
shutil.copy(inp, _f_out)
if os.path.exists(_f) == False:
shutil.move(_f_out, _f)
else:
os.remove(_f_out)
return _f
def put(self, key, inp=None, replace=False):
import os
_f = self.path(key)
_inp = inp if inp else key
if self.cached(key):
if replace:
logging.info('clear cached %s' % key)
try:
os.remove(_f)
except Exception:
pass
else:
logging.info('loading cached %s' % key)
return _f
global _w_lock
if self._t in _w_lock:
with _w_lock[self._t]:
self._put(_inp, _f)
else:
self._put(_inp, _f)
return _f
def _clean_file(self, f):
try:
import os
os.remove(f._f)
logging.info('clean cached file %s' % f._f)
except Exception as err:
import traceback
logging.error(traceback.format_exc())
logging.error(str(err))
print('\n\n* Error:', err)
def _clean(self):
import os
logging.info('clean cache')
# self._n = 0
_w_nums[self._t] == 0
_fs = []
_sz = 0.0
for _root, _dirs, _files in os.walk(self._d):
for _file in _files:
_ff = os.path.join(_root, _file)
if _file.endswith('.bak'):
import time
# remove bak files that has not been used for 24 hours
if (time.time() - os.path.getatime(_ff)) > 60 * 60 * 24:
logging.warning('remove bak file %s' % _ff)
os.remove(_ff)
continue
_fs.append(file_obj(_ff))
_sz += os.path.getsize(_ff)
_fs = sorted(_fs)
logging.info('checking cache %s, %s (%s, %s)' % (len(_fs), _sz, self._max_file, self._max_size))
_fd1 = []
if self._max_file > 0 and len(_fs) > self._max_file:
_fd = _fs[:self._max_file-len(_fs)]
_fd2 = []
if self._max_size > 0:
# convert from GB
self._max_size *= (1024 * 1024 * 1024)
if self._max_size > 0 and _sz > self._max_size:
_zz = _sz
for _f in _fs:
_fd2.append(_f)
_zz -= _f._z
if _zz <= self._max_size:
break
_fd = _fd1 if len(_fd1) > len(_fd2) else _fd2
logging.info('identified cached files to clean %s %s %s' % (len(_fd), len(_fd1), len(_fd2)))
for _f in _fd:
self._clean_file(_f)
_get_cache_que = None
class s3():
"""manage Landsat cache files"""
def __init__(self, bucket, fzip=None):
self._t = bucket
_zip = fzip
self._enable_cache = config.getboolean('conf', 'enable_s3_cache', True)
if not self._enable_cache:
logging.info('disabled caching S3 files')
if _zip is None:
from gio import file_unzip
_zip = file_unzip.file_unzip()
_p = _zip.generate_file()
else:
_p = _get_cache_dir()
if not _p:
if _zip is None:
raise Exception('need to provide zip obj when cache is disabled')
logging.info('disabled caching S3 files')
_p = _zip.generate_file()
self._zip = _zip
self._zip_inner = fzip is None
self._path = _p
self._c = cache_mag(bucket, _p)
self.bucket = self._t
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.clean()
def _get_s3_client(self):
import boto3
return boto3.client('s3')
def clean(self):
if (not self._enable_cache) and (self._path):
import shutil
shutil.rmtree(self._path, True)
if self._zip_inner and self._zip:
self._zip.clean()
def _list_by_resource(self, k, recursive=True, limit=-1):
import boto3
_ps = {'Prefix': k}
if not recursive:
_ps['Delimiter'] = '/'
_ss = boto3.resource('s3').Bucket(self._t)
if limit >= 0:
return list(_ss.objects.filter(**_ps).limit(limit))
_ls = list(_ss.objects.filter(**_ps))
return _ls
def _list_by_client(self, k, recursive=True, limit=-1):
_paginator = self._get_s3_client().get_paginator("list_objects_v2")
_ps = {'Bucket': self._t, 'Prefix': k}
if not recursive:
_ps['Delimiter'] = '/'
if config.getboolean('aws', 's3_requester_pay', True):
_ps['RequestPayer'] = 'requester'
if limit >= 0:
_ps['MaxKeys'] = limit
_ts = []
for _page in _paginator.paginate(**_ps):
# list files
for _k in _page.get('Contents', []):
if k.endswith('/') and _k.get('Key') == k:
# skip folders
continue
_ts.append(_k)
# include subfolders
if not recursive:
for _k in _page.get('CommonPrefixes', []):
_ts.append(_k)
return _ts
def list(self, k, recursive=True, limit=-1):
if config.getboolean('aws', 's3_requester_pay', True):
return self._list_by_client(k, recursive, limit)
return [{'Key': _s.key} for _s in self._list_by_resource(k, recursive, limit)]
def exists(self, k):
if not k:
return False
_os = self.list(k, limit=1)
return len(_os) > 0
def remove(self, key):
import boto3
_ss = boto3.resource('s3')
_bk = _ss.Bucket(self._t)
_nu = 0
for _o in _bk.objects.filter(Prefix=key):
_ss.Object(self._t, _o.key).delete()
_nu += 1
return _nu
# def remove(self, key):
# from . import config
# _ps = {'Bucket': self._t, 'Key': key}
# if config.getboolean('aws', 's3_requester_pay', True):
# _ps['RequestPayer'] = 'requester'
# return self._get_s3_client().delete_object(**_ps)
def get(self, k, lock=None):
if k is None:
return None
_enable_lock = config.getboolean('conf', 'enable_cache_lock', True)
if _enable_lock:
_num = config.getint('conf', 'max_cache_rec_num', 2)
else:
_num = 0
if _num <= 0:
return self._get(k, lock)
global _get_cache_que
if _get_cache_que is None:
import multiprocessing
_get_cache_que = multiprocessing.Semaphore(value=_num)
with _get_cache_que:
return self._get(k, lock)
def _get(self, k, lock=None):
# if config.getboolean('conf', 's3_get_with_cli', False):
# return self._get_cli(k, lock)
return self._get_boto(k, lock)
# download file using boto3 function
def _get_boto(self, k, lock=None):
if k is None:
return None
_key = k if isinstance(k, str) or isinstance(k, str) else k.key
_f = self._c.path(_key)
if self._c.cached(_key):
logging.debug('found cached file %s' % _f)
return _f
import os
try:
(lambda x: os.path.exists(x) or os.makedirs(x))(os.path.dirname(_f))
except Exception:
pass
import shutil
from . import file_unzip
for _i in range(3):
_t = file_unzip.generate_file(os.path.dirname(_f), '', '.bak')
try:
# write an empty file to prevent other process to use the same file name
with open(_t, 'w') as _fo:
_fo.write('')
_ps = {'Bucket': self._t, 'Key': k}
if config.getboolean('aws', 's3_requester_pay', True):
_ps['RequestPayer'] = 'requester'
try:
_rs = self._get_s3_client().get_object(**_ps)
except Exception as _err:
# import traceback
# logging.debug(traceback.format_exc())
logging.debug(str(_err))
# print('\n\n* Error:', _err)
# import time
# time.sleep(1)
logging.debug('failed to load key s3://%s/%s' % (self._t, k))
# continue
return None
_bd = _rs['Body']
_sz = 0.0
with open(_t, 'wb') as _fo:
for _bs in _bd.iter_chunks():
_fo.write(_bs)
_sz += float(len(_bs))
del _bs
if not os.path.exists(_t) or _sz < _rs['ContentLength']:
logging.warning('received partial file from S3 (%s, %s)' % (_sz, _rs['ContentLength']))
os.remove(_t)
continue
if lock is None:
if os.path.exists(_f) == False:
shutil.move(_t, _f)
else:
with lock:
if os.path.exists(_f) == False:
shutil.move(_t, _f)
return _f
finally:
if os.path.exists(_t):
os.remove(_t)
raise Exception('failed to load S3 file s3://%s/%s' % (self._t, _key))
# download file usign awscli command to test the issue that likely related to boto3
def _get_cli(self, k, lock=None):
if k is None:
return None
_key = k if isinstance(k, str) or isinstance(k, str) else k.key
_f = self._c.path(_key)
if self._c.cached(_key):
# logging.debug('found cached file %s' % _f)
return _f
import os
try:
(lambda x: os.path.exists(x) or os.makedirs(x))(os.path.dirname(_f))
except Exception:
pass
import shutil
from . import file_unzip
for _i in range(1):
_t = file_unzip.generate_file(os.path.dirname(_f), '', '.bak')
try:
# write an empty file to prevent other process to use the same file name
with open(_t, 'w') as _fo:
_fo.write('')
_cmd = 'aws s3 cp s3://%s/%s %s' % (self._t, k, _t)
if config.getboolean('aws', 's3_requester_pay', True):
_cmd = _cmd + ' --request-pay requester'
from . import run_commands as run
run.run(_cmd)
if os.path.getsize(_t) <= 1:
logging.warning('failed to receive file from S3 (%s://%s)' % (self._t, k))
os.remove(_t)
continue
if lock is |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.