gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
import logging
import os
import sys
from collections import namedtuple
import six
import yaml
from .errors import CircularReference
from .errors import ComposeFileNotFound
from .errors import ConfigurationError
from .interpolation import interpolate_environment_variables
from .validation import validate_against_fields_schema
from .validation import validate_against_service_schema
from .validation import validate_extended_service_exists
from .validation import validate_extends_file_path
from .validation import validate_service_names
from .validation import validate_top_level_object
DOCKER_CONFIG_KEYS = [
'cap_add',
'cap_drop',
'cgroup_parent',
'command',
'cpu_shares',
'cpuset',
'detach',
'devices',
'dns',
'dns_search',
'domainname',
'entrypoint',
'env_file',
'environment',
'extra_hosts',
'hostname',
'image',
'ipc',
'labels',
'links',
'log_driver',
'log_opt',
'mac_address',
'mem_limit',
'memswap_limit',
'net',
'pid',
'ports',
'privileged',
'read_only',
'restart',
'security_opt',
'stdin_open',
'tty',
'user',
'volume_driver',
'volumes',
'volumes_from',
'working_dir',
]
ALLOWED_KEYS = DOCKER_CONFIG_KEYS + [
'build',
'container_name',
'dockerfile',
'expose',
'external_links',
'name',
]
SUPPORTED_FILENAMES = [
'docker-compose.yml',
'docker-compose.yaml',
'fig.yml',
'fig.yaml',
]
DEFAULT_OVERRIDE_FILENAME = 'docker-compose.override.yml'
PATH_START_CHARS = [
'/',
'.',
'~',
]
log = logging.getLogger(__name__)
class ConfigDetails(namedtuple('_ConfigDetails', 'working_dir config_files')):
"""
:param working_dir: the directory to use for relative paths in the config
:type working_dir: string
:param config_files: list of configuration files to load
:type config_files: list of :class:`ConfigFile`
"""
class ConfigFile(namedtuple('_ConfigFile', 'filename config')):
"""
:param filename: filename of the config file
:type filename: string
:param config: contents of the config file
:type config: :class:`dict`
"""
def find(base_dir, filenames):
if filenames == ['-']:
return ConfigDetails(
os.getcwd(),
[ConfigFile(None, yaml.safe_load(sys.stdin))])
if filenames:
filenames = [os.path.join(base_dir, f) for f in filenames]
else:
filenames = get_default_config_files(base_dir)
log.debug("Using configuration files: {}".format(",".join(filenames)))
return ConfigDetails(
os.path.dirname(filenames[0]),
[ConfigFile(f, load_yaml(f)) for f in filenames])
def get_default_config_files(base_dir):
(candidates, path) = find_candidates_in_parent_dirs(SUPPORTED_FILENAMES, base_dir)
if not candidates:
raise ComposeFileNotFound(SUPPORTED_FILENAMES)
winner = candidates[0]
if len(candidates) > 1:
log.warn("Found multiple config files with supported names: %s", ", ".join(candidates))
log.warn("Using %s\n", winner)
if winner == 'docker-compose.yaml':
log.warn("Please be aware that .yml is the expected extension "
"in most cases, and using .yaml can cause compatibility "
"issues in future.\n")
if winner.startswith("fig."):
log.warn("%s is deprecated and will not be supported in future. "
"Please rename your config file to docker-compose.yml\n" % winner)
return [os.path.join(path, winner)] + get_default_override_file(path)
def get_default_override_file(path):
override_filename = os.path.join(path, DEFAULT_OVERRIDE_FILENAME)
return [override_filename] if os.path.exists(override_filename) else []
def find_candidates_in_parent_dirs(filenames, path):
"""
Given a directory path to start, looks for filenames in the
directory, and then each parent directory successively,
until found.
Returns tuple (candidates, path).
"""
candidates = [filename for filename in filenames
if os.path.exists(os.path.join(path, filename))]
if not candidates:
parent_dir = os.path.join(path, '..')
if os.path.abspath(parent_dir) != os.path.abspath(path):
return find_candidates_in_parent_dirs(filenames, parent_dir)
return (candidates, path)
@validate_top_level_object
@validate_service_names
def pre_process_config(config):
"""
Pre validation checks and processing of the config file to interpolate env
vars returning a config dict ready to be tested against the schema.
"""
return interpolate_environment_variables(config)
def load(config_details):
"""Load the configuration from a working directory and a list of
configuration files. Files are loaded in order, and merged on top
of each other to create the final configuration.
Return a fully interpolated, extended and validated configuration.
"""
def build_service(filename, service_name, service_dict):
loader = ServiceLoader(
config_details.working_dir,
filename,
service_name,
service_dict)
service_dict = loader.make_service_dict()
validate_paths(service_dict)
return service_dict
def load_file(filename, config):
processed_config = pre_process_config(config)
validate_against_fields_schema(processed_config)
return [
build_service(filename, name, service_config)
for name, service_config in processed_config.items()
]
def merge_services(base, override):
all_service_names = set(base) | set(override)
return {
name: merge_service_dicts(base.get(name, {}), override.get(name, {}))
for name in all_service_names
}
config_file = config_details.config_files[0]
for next_file in config_details.config_files[1:]:
config_file = ConfigFile(
config_file.filename,
merge_services(config_file.config, next_file.config))
return load_file(config_file.filename, config_file.config)
class ServiceLoader(object):
def __init__(self, working_dir, filename, service_name, service_dict, already_seen=None):
if working_dir is None:
raise Exception("No working_dir passed to ServiceLoader()")
self.working_dir = os.path.abspath(working_dir)
if filename:
self.filename = os.path.abspath(filename)
else:
self.filename = filename
self.already_seen = already_seen or []
self.service_dict = service_dict.copy()
self.service_name = service_name
self.service_dict['name'] = service_name
def detect_cycle(self, name):
if self.signature(name) in self.already_seen:
raise CircularReference(self.already_seen + [self.signature(name)])
def make_service_dict(self):
self.resolve_environment()
if 'extends' in self.service_dict:
self.validate_and_construct_extends()
self.service_dict = self.resolve_extends()
if not self.already_seen:
validate_against_service_schema(self.service_dict, self.service_name)
return process_container_options(self.service_dict, working_dir=self.working_dir)
def resolve_environment(self):
"""
Unpack any environment variables from an env_file, if set.
Interpolate environment values if set.
"""
if 'environment' not in self.service_dict and 'env_file' not in self.service_dict:
return
env = {}
if 'env_file' in self.service_dict:
for f in get_env_files(self.service_dict, working_dir=self.working_dir):
env.update(env_vars_from_file(f))
del self.service_dict['env_file']
env.update(parse_environment(self.service_dict.get('environment')))
env = dict(resolve_env_var(k, v) for k, v in six.iteritems(env))
self.service_dict['environment'] = env
def validate_and_construct_extends(self):
validate_extends_file_path(
self.service_name,
self.service_dict['extends'],
self.filename
)
self.extended_config_path = self.get_extended_config_path(
self.service_dict['extends']
)
self.extended_service_name = self.service_dict['extends']['service']
full_extended_config = pre_process_config(
load_yaml(self.extended_config_path)
)
validate_extended_service_exists(
self.extended_service_name,
full_extended_config,
self.extended_config_path
)
validate_against_fields_schema(full_extended_config)
self.extended_config = full_extended_config[self.extended_service_name]
def resolve_extends(self):
other_working_dir = os.path.dirname(self.extended_config_path)
other_already_seen = self.already_seen + [self.signature(self.service_name)]
other_loader = ServiceLoader(
working_dir=other_working_dir,
filename=self.extended_config_path,
service_name=self.service_name,
service_dict=self.extended_config,
already_seen=other_already_seen,
)
other_loader.detect_cycle(self.extended_service_name)
other_service_dict = other_loader.make_service_dict()
validate_extended_service_dict(
other_service_dict,
filename=self.extended_config_path,
service=self.extended_service_name,
)
return merge_service_dicts(other_service_dict, self.service_dict)
def get_extended_config_path(self, extends_options):
"""
Service we are extending either has a value for 'file' set, which we
need to obtain a full path too or we are extending from a service
defined in our own file.
"""
if 'file' in extends_options:
extends_from_filename = extends_options['file']
return expand_path(self.working_dir, extends_from_filename)
return self.filename
def signature(self, name):
return (self.filename, name)
def validate_extended_service_dict(service_dict, filename, service):
error_prefix = "Cannot extend service '%s' in %s:" % (service, filename)
if 'links' in service_dict:
raise ConfigurationError("%s services with 'links' cannot be extended" % error_prefix)
if 'volumes_from' in service_dict:
raise ConfigurationError("%s services with 'volumes_from' cannot be extended" % error_prefix)
if 'net' in service_dict:
if get_service_name_from_net(service_dict['net']) is not None:
raise ConfigurationError("%s services with 'net: container' cannot be extended" % error_prefix)
def process_container_options(service_dict, working_dir=None):
service_dict = service_dict.copy()
if 'volumes' in service_dict and service_dict.get('volume_driver') is None:
service_dict['volumes'] = resolve_volume_paths(service_dict, working_dir=working_dir)
if 'build' in service_dict:
service_dict['build'] = resolve_build_path(service_dict['build'], working_dir=working_dir)
if 'labels' in service_dict:
service_dict['labels'] = parse_labels(service_dict['labels'])
return service_dict
def merge_service_dicts(base, override):
d = base.copy()
if 'environment' in base or 'environment' in override:
d['environment'] = merge_environment(
base.get('environment'),
override.get('environment'),
)
path_mapping_keys = ['volumes', 'devices']
for key in path_mapping_keys:
if key in base or key in override:
d[key] = merge_path_mappings(
base.get(key),
override.get(key),
)
if 'labels' in base or 'labels' in override:
d['labels'] = merge_labels(
base.get('labels'),
override.get('labels'),
)
if 'image' in override and 'build' in d:
del d['build']
if 'build' in override and 'image' in d:
del d['image']
list_keys = ['ports', 'expose', 'external_links']
for key in list_keys:
if key in base or key in override:
d[key] = base.get(key, []) + override.get(key, [])
list_or_string_keys = ['dns', 'dns_search']
for key in list_or_string_keys:
if key in base or key in override:
d[key] = to_list(base.get(key)) + to_list(override.get(key))
already_merged_keys = ['environment', 'labels'] + path_mapping_keys + list_keys + list_or_string_keys
for k in set(ALLOWED_KEYS) - set(already_merged_keys):
if k in override:
d[k] = override[k]
return d
def merge_environment(base, override):
env = parse_environment(base)
env.update(parse_environment(override))
return env
def get_env_files(options, working_dir=None):
if 'env_file' not in options:
return {}
env_files = options.get('env_file', [])
if not isinstance(env_files, list):
env_files = [env_files]
return [expand_path(working_dir, path) for path in env_files]
def parse_environment(environment):
if not environment:
return {}
if isinstance(environment, list):
return dict(split_env(e) for e in environment)
if isinstance(environment, dict):
return dict(environment)
raise ConfigurationError(
"environment \"%s\" must be a list or mapping," %
environment
)
def split_env(env):
if '=' in env:
return env.split('=', 1)
else:
return env, None
def resolve_env_var(key, val):
if val is not None:
return key, val
elif key in os.environ:
return key, os.environ[key]
else:
return key, ''
def env_vars_from_file(filename):
"""
Read in a line delimited file of environment variables.
"""
if not os.path.exists(filename):
raise ConfigurationError("Couldn't find env file: %s" % filename)
env = {}
for line in open(filename, 'r'):
line = line.strip()
if line and not line.startswith('#'):
k, v = split_env(line)
env[k] = v
return env
def resolve_volume_paths(service_dict, working_dir=None):
if working_dir is None:
raise Exception("No working_dir passed to resolve_volume_paths()")
return [
resolve_volume_path(v, working_dir, service_dict['name'])
for v in service_dict['volumes']
]
def resolve_volume_path(volume, working_dir, service_name):
container_path, host_path = split_path_mapping(volume)
container_path = os.path.expanduser(container_path)
if host_path is not None:
if not any(host_path.startswith(c) for c in PATH_START_CHARS):
log.warn(
'Warning: the mapping "{0}:{1}" in the volumes config for '
'service "{2}" is ambiguous. In a future version of Docker, '
'it will designate a "named" volume '
'(see https://github.com/docker/docker/pull/14242). '
'To prevent unexpected behaviour, change it to "./{0}:{1}"'
.format(host_path, container_path, service_name)
)
host_path = os.path.expanduser(host_path)
return "%s:%s" % (expand_path(working_dir, host_path), container_path)
else:
return container_path
def resolve_build_path(build_path, working_dir=None):
if working_dir is None:
raise Exception("No working_dir passed to resolve_build_path")
return expand_path(working_dir, build_path)
def validate_paths(service_dict):
if 'build' in service_dict:
build_path = service_dict['build']
if not os.path.exists(build_path) or not os.access(build_path, os.R_OK):
raise ConfigurationError("build path %s either does not exist or is not accessible." % build_path)
def merge_path_mappings(base, override):
d = dict_from_path_mappings(base)
d.update(dict_from_path_mappings(override))
return path_mappings_from_dict(d)
def dict_from_path_mappings(path_mappings):
if path_mappings:
return dict(split_path_mapping(v) for v in path_mappings)
else:
return {}
def path_mappings_from_dict(d):
return [join_path_mapping(v) for v in d.items()]
def split_path_mapping(string):
if ':' in string:
(host, container) = string.split(':', 1)
return (container, host)
else:
return (string, None)
def join_path_mapping(pair):
(container, host) = pair
if host is None:
return container
else:
return ":".join((host, container))
def merge_labels(base, override):
labels = parse_labels(base)
labels.update(parse_labels(override))
return labels
def parse_labels(labels):
if not labels:
return {}
if isinstance(labels, list):
return dict(split_label(e) for e in labels)
if isinstance(labels, dict):
return labels
def split_label(label):
if '=' in label:
return label.split('=', 1)
else:
return label, ''
def expand_path(working_dir, path):
return os.path.abspath(os.path.join(working_dir, os.path.expanduser(path)))
def to_list(value):
if value is None:
return []
elif isinstance(value, six.string_types):
return [value]
else:
return value
def get_service_name_from_net(net_config):
if not net_config:
return
if not net_config.startswith('container:'):
return
_, net_name = net_config.split(':', 1)
return net_name
def load_yaml(filename):
try:
with open(filename, 'r') as fh:
return yaml.safe_load(fh)
except IOError as e:
raise ConfigurationError(six.text_type(e))
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Testing the Gap Statistic to find the k in k-means
http://datasciencelab.wordpress.com/2013/12/27/finding-the-k-in-k-means-clustering/
"""
from __future__ import division
import sys
import numpy as np
from sklearn.cluster import KMeans
import matplotlib.pyplot as plt
################################################################################
# ARRAY MANIPULATION CODE
################################################################################
def norm(a, axis=-1):
"""NumPy 1.8 style norm(). Needed for the NumPy 1.7.1 I am using.
a: A NumPy ndarray
axis: Axis to calculate norm along. Whole ndarray is normed if axis is None
Returns: norm along axis
"""
if axis is None:
return np.linalg.norm(a)
return np.apply_along_axis(np.linalg.norm, axis, a)
def subtract_outer(a, b):
"""The outer difference of a and b where
a_b = subtract_outer(a, b) => a_b[i, j, :] = a[i, :] - b[j, :]
a: A NumPy ndarray
b: A NumPy ndarray
Returns: outer difference of a and b
"""
assert a.shape[1] == b.shape[1]
assert len(a.shape) == 2 and len(b.shape) == 2
n = a.shape[1]
a_b = np.empty((a.shape[0], b.shape[0], n))
for i in xrange(n):
a_b[:, :, i] = np.subtract.outer(a[:, i], b[:, i])
return a_b
################################################################################
# CODE TO FIND K IN K-MEANS
################################################################################
def find_centers(X, k):
"""Divide the points in X into k clusters
X: points
k: number of clusters to separate points into
Returns: mu, labels
mu: centers of clusters
labels: indexes of points in X belonging to each cluster
"""
estimator = KMeans(init='k-means++', n_clusters=k, n_init=10)
estimator.fit(X)
mu = estimator.cluster_centers_
labels = estimator.labels_
return mu, labels
def Wk(X, mu, labels):
"""Compute the intra-cluster distances for the k clusters in X described by mu and labels.
X: points
mu: centers of clusters
labels: indexes of points in X belonging to each cluster
Returns: Normalized intra-cluster distance as defined in
http://datasciencelab.wordpress.com/2013/12/27/finding-the-k-in-k-means-clustering/
"""
k = mu.shape[0]
clusters = [X[np.where(labels == i)] for i in xrange(k)]
n = [x.shape[0] for x in clusters]
return sum(norm(clusters[i] - mu[i], None)**2/(2 * n[i]) for i in xrange(k))
def bounding_box(X):
"""Compute the bounding box for the points in X. This is the highest and lowest
x and y coordinates of all the points.
X: points
Returns: (xmin, xmax), (ymin, ymax)
xmin, xmax: min and max of x coordinates of X
ymin, ymax: min and max of y coordinates of X
"""
x, y = X[:, 0], X[:, 1]
return (x.min(), x.max()), (y.min(), y.max())
def gap_statistic(X, min_k, max_k, b):
"""Calculate gap statistic for X for k = min_k through k = max_k
using b reference data sets
X: points
min_k: lowest k to test
max_k: highest k to test
b: number of reference data sets to test against
Returns: Generator yielding k, logWk, logWkb, sk for min_k <= k <= max_k
k: This k
Wks: log(intra-cluster distance) for k
Wkbs: average reference log(intra-cluster distance) for k
sk: Normalized std dev log(intra-cluster distance) for k
"""
N = X.shape[0]
(xmin, xmax), (ymin, ymax) = bounding_box(X)
def reference_results(k):
# Create b reference data sets
BWkbs = np.zeros(b)
for i in xrange(b):
Xb = np.vstack([np.random.uniform(xmin, xmax, N),
np.random.uniform(ymin, ymax, N)]).T
mu, labels = find_centers(Xb, k)
BWkbs[i] = np.log(Wk(Xb, mu, labels))
logWkb = np.sum(BWkbs)/b
sk = np.sqrt(np.sum((BWkbs - logWkb)**2)/b) * np.sqrt(1 + 1/b)
return logWkb, sk
for k in xrange(min_k, max_k + 1):
mu, labels = find_centers(X, k)
logWk = np.log(Wk(X, mu, labels))
logWkb, sk = reference_results(k)
yield k, logWk, logWkb, sk
# Parameters for gap statistic calculation
B = 20 # Number of reference data sets
MIN_K = 1 # Lowest k to test
MAX_K = 10 # Highest k to test
def find_k(X, verbose=1):
"""Find the best k for k-means gap for X using the Gap Statistic
X: points
verbose: verbosity level: 0, 1 or 2
Returns: best k if found, otherwise -1
"""
for i, (k1, logWk1, logWkb1, sk1) in enumerate(gap_statistic(X, MIN_K, MAX_K + 1, B)):
gap1 = logWkb1 - logWk1
if i > 0:
if verbose >= 2:
print('%5d %5.2f %5.2f %5.2f : %5.2f' % (k, logWk, logWkb, sk, gap))
if gap > gap1 - sk1:
return k
k, logWk, logWkb, sk, gap = k1, logWk1, logWkb1, sk1, gap1
return -1
################################################################################
# TESTING CODE
################################################################################
# GRID_NUMBER is a square number close to GRID_NUMBER_TARGET
GRID_NUMBER_TARGET = 1000
GRID_WIDTH = int(np.sqrt(GRID_NUMBER_TARGET))
GRID_NUMBER = GRID_WIDTH**2
# UNIFORM_GRID an array of GRID_WIDTH x GRID_WIDTH evenly spaced points on [-1, 1] x [-1, 1]
xv, yv = np.meshgrid(np.linspace(-1, 1, GRID_WIDTH), np.linspace(-1, 1, GRID_WIDTH))
UNIFORM_GRID = np.vstack([xv.ravel(), yv.ravel()]).T
def maximally_spaced_points(k, r):
"""Return maximally spaced points in square of radius 1 around origin
(i.e. square containing x, y such that -1 <= x <= 1, -1 <= y <= 1)
Try to keep points at least distance r from edges of square
k: number of points
r: desired minimum distance from point to edge of square
Returns: ndarray of N 2-d points
"""
if k == 1:
return np.random.uniform(-min(r, 0.5), min(r, 0.5), size=(k, 2))
scale = 1.0 - min(r, 0.5)
# Start by randomly distributing points over unit radius square
x0 = np.random.uniform(-1.0, 1.0, size=(k, 2))
# Maximize minimum distance between centroids
for m in xrange(10):
changed = False
for i in xrange(k):
# Test replacing ith element in x0 all with elements in UNIFORM_GRID to find
# the one that maximizes the minimum distance to elements other than ith in x0
# If this minimum distance is greater than current_min then make it the ith element
# in x0
x1 = np.vstack((x0[:i, :], x0[i+1:, :]))
# minimum distance between ith element in x0 and all other elements in x0
current_min = norm(x1 - x0[i], 1).min()
# diffs[j] = minimum distance between jth element in UNIFORM_GRID and all elements
# in x0 other than ith
diffs = norm(subtract_outer(UNIFORM_GRID, x1)).min(axis=-1)
# max_j_min = index of element in UNIFORM_GRID that maximizes
# minimum distance between ith element in x0 and all other elements in x0
max_j_min = np.argmax(diffs)
if diffs[max_j_min] > current_min:
x0[i] = UNIFORM_GRID[max_j_min]
changed = True
if not changed and m > 1:
break
# Shrink square to get points r-ish distant from edges of unit radius square
return x0 * scale
def init_board_gauss(N, k, r):
"""Initialize board of N points with k clusters
Board is square of radius 1 around origin
(i.e. square containing x, y such that -1 <= x <= 1, -1 <= y <= 1)
Try to space cluster centers as far apart as possible while keeping them at least distance
r from edges of unit radius square. This is done in an approximate way by generating
random points around maximally spaced nuclei.
N: number of points
k: number of cluster
r: desired std dev of points in cluster from cluster center
Returns: X, centroids, labels
X: points
centroids: centroids of clusters
labels: cluster index for each point
"""
def add_cluster(X, j0, j1, cx, cy, s):
"""Add a cluster of normally distributed points to x for indexes [j0,j1)
around center cx, cy and std dev s.
X: points
j0, j1: Add points with indexs j such that j0 <= j < j1
cx, cy: Centers of normal distrubtion in x any y directions
s: Desired std dev of normal distrubtion in x any y directions
"""
j = j0
while j < j1:
a, b = np.random.normal(cx, s), np.random.normal(cy, s)
# Continue drawing points from the distribution in the range (-1, 1)
if abs(a) < 1 and abs(b) < 1:
X[j, :] = a, b
j += 1
return np.mean(X[j0:j1], axis=0)
nuclei = maximally_spaced_points(k, r)
n = N/k
X = np.empty((N, 2))
centroids = np.empty((k, 2))
labels = np.empty(N, dtype=int)
for i, (cx, cy) in enumerate(nuclei):
j0, j1 = int(round(i * n)), int(round((i + 1) * n))
centroids[i] = add_cluster(X, j0, j1, cx, cy, r)
labels[j0:j1] = i
return X, centroids, labels
def closest_indexes(centroids, mu):
"""Find the elements in centroids that are closest to the elements of mu and
return arrays of indexes that
map a centroid element to the closest element of mu, and
map a mu element to the closest element of centroids
centroids: ndarray of 2d points
mu: ndarray of 2d points
Returns: centroid_indexes, mu_indexes
centroid_indexes[m] is the centroid index corresponding to mu index m
mu_indexes[c] is the mu index corresponding to centroid index c
"""
k = centroids.shape[0]
if k == 1:
return [0], [0]
# separations[m, c] = distance between mu[m] and centroid[c]
separations = norm(subtract_outer(mu, centroids), 2)
# indexes of diffs in increasing order of distance
order = np.argsort(separations, axis=None)
centroids_done = set()
mu_done = set()
centroid_indexes = [-1] * k
mu_indexes = [-1] * k
# Go through the mu[m], centroid[c] pairs in order of increasing separation
# If m and c indexes are not assigned, set centroid_indexes[m] = c and mu_indexes[c] = m
for i in xrange(k**2):
c = order[i] % k
m = order[i] // k
if c in centroids_done or m in mu_done:
continue
centroid_indexes[m] = c
mu_indexes[c] = m
centroids_done.add(c)
mu_done.add(m)
if len(mu_done) >= k:
break
return centroid_indexes, mu_indexes
def match_clusters(centroids, mu, predicted_labels):
"""Return versions of mu and predicted_labels that are re-indexed so that
mu[i] is closer to centroids[i] than any other element of centroids.
centroids: ndarray of 2d points
mu: ndarray of 2d points
predicted_labels: ndarray of integers based on the mu indexes
Returns: mu2, predicted_labels2
mu2: mu re-indexed as described above
predicted_labels2: predicted_labels updated for the mu => mu2 re-indexing
"""
centroid_indexes, mu_indexes = closest_indexes(centroids, mu)
mu2 = mu[mu_indexes]
predicted_labels2 = np.empty(predicted_labels.shape, dtype=int)
for i in xrange(predicted_labels.shape[0]):
predicted_labels2[i] = centroid_indexes[predicted_labels[i]]
return mu2, predicted_labels2
def estimate_difficulty(k, X, centroids, labels):
"""Estimate difficulty of matching
1) find clusters for known k,
2) match them to the test clusters and
3) find which points don't belong to the clusters they were created for
This gives a crude measure of how much the test clusters overlap and of
how well the detected clusters match the test cluster
k: Number of clusters in test board
X: Points in test board
centroids: Centroids of clusters in test board
labels: Centroid labels of X
Returns: mu, different_labels
mu: Centroids of attempted clustering of test board
different_labels: Indexes of points in X for which the attempted clustering
gave different labels than board was created with
"""
mu, predicted_labels = find_centers(X, k)
mu, predicted_labels = match_clusters(centroids, mu, predicted_labels)
different_labels = np.nonzero(labels != predicted_labels)[0]
return mu, different_labels
COLOR_MAP = ['b', 'r', 'k', 'y', 'c', 'm']
# http://matplotlib.org/api/markers_api.html
MARKER_MAP = ['v', 'o', 's', '^', '<', '>', '8']
def COLOR(i): return COLOR_MAP[i % len(COLOR_MAP)]
def MARKER(i): return MARKER_MAP[i % len(MARKER_MAP)]
def graph_board(k, N, r, X, centroids, labels, mu, different_labels):
"""Graph a test board
k, N, r are the instructions for creating the test board
X, centroids, labels describe the test board that was created
mu, different_labels are an indication of how difficult the test board is.
boards with mu a long way from centroids or with a high
proprorting of different_labels are expected to be more difficult
k: Number of clusters in test board
N: Number of points in test board
r: Radius of cluster distributions in test board
X: Points in test board
centroids: Centroids of clusters in test board
labels: Centroid labels of X
mu: Centroids of attempted clustering of test board
different_labels: Indexes of points in X for which the attempted clustering gave different
labels than board was created with
"""
fig, ax = plt.subplots()
for i in xrange(k):
x = X[np.where(labels == i)]
ax.scatter(x[:, 0], x[:, 1], s=50, c=COLOR(i), marker=MARKER(i))
for i in different_labels:
ax.scatter(X[i, 0], X[i, 1], s=100, c='k', marker='x', linewidths=1, zorder=4)
for i in xrange(k):
cx, cy = centroids[i, :]
mx, my = mu[i, :]
dx, dy = mu[i, :] - centroids[i, :]
ax.scatter(cx, cy, marker='*', s=199, linewidths=3, c='k', zorder=10)
ax.scatter(cx, cy, marker='*', s=181, linewidths=2, c=COLOR(i), zorder=20)
ax.scatter(mx, my, marker='+', s=199, linewidths=4, c='k', zorder=11)
ax.scatter(mx, my, marker='+', s=181, linewidths=3, c=COLOR(i), zorder=21)
if dx**2 + dy**2 >= 0.001:
ax.arrow(cx, cy, dx, dy, lw=1, head_width=0.05, length_includes_head=True,
zorder=9, fc='y', ec='k')
ax.set_xlabel('x', fontsize=20)
ax.set_ylabel('y', fontsize=20)
ax.set_title('Clusters: k=%d, N=%d, r=%.2f, diff=%d (%.2f)' % (k, N, r,
different_labels.size, different_labels.size/N))
plt.xlim((-1.0, 1.0))
plt.ylim((-1.0, 1.0))
ax.grid(True)
fig.tight_layout()
plt.show()
def run_test(k, N, r, do_graph=False, verbose=1):
"""Run a test to see if find_k(X) returns the correct number of clusters for
test board X created with parameters k, N, r
k, N, r are the instructions for creating the test board
k: Number of clusters in test board
N: Number of points in test board
r: Radius of cluster distributions in test board
do_graph: Graph the test board if True
verbose: verbosity level: 0, 1 or 2
Returns: correct, n_different
correct: True if find_k() returned correct k
n_diffrent: Number points in test board for which the attempted clustering
gave different labels than board was created with. This is a measure
of difficulty
"""
assert MIN_K <= k <= MAX_K, 'invalid k=%d' % k
# Create a board of points to test
X, centroids, labels = init_board_gauss(N, k, r)
# Do the test!
predicted_k = find_k(X, verbose)
correct = predicted_k == k
# Estimate difficulty
mu, different_labels = estimate_difficulty(k, X, centroids, labels)
if verbose >= 1:
print(' k=%d,N=%3d,r=%.2f,diff=%.2f: predicted_k=%d,correct=%s' % (k, N, r,
different_labels.size/N, predicted_k, correct))
if do_graph:
graph_board(k, N, r, X, centroids, labels, mu, different_labels)
return correct, different_labels.size
def test_with_graphs():
"""Run some tests and graph the results
This lets you see what some typical test boards look like
"""
run_test(2, 50, 1.0, do_graph=True)
run_test(10, 100, 0.01, do_graph=True)
run_test(2, 100, 0.01, do_graph=True)
run_test(10, 100, 0.2, do_graph=True)
run_test(2, 100, 0.25, do_graph=True)
run_test(9, 200, 0.2, do_graph=True)
run_test(4, 200, 0.3, do_graph=True)
run_test(7, 200, 0.3, do_graph=True)
run_test(4, 200, r, do_graph=True)
run_test(5, 50, r, do_graph=True)
run_test(5, 50, r, do_graph=True)
run_test(7, 400, r, do_graph=True)
def test_range(n_repeats, verbose=1):
"""Run a range of tests with different test board parameters and printthe results to stdout.
n_repeats: Number of tests to run for each k, N, r combination
verbose: verbosity level: 0, 1 or 2
Returns: correct, n_different
correct: True if find_k() returned correct k
n_diffrent: Number points in test board for which the attempted clustering
gave different labels than board was created with. This is a measure
of difficulty
"""
results = []
print('n_repeats=%d' % n_repeats)
print('=' * 80)
# Run tests printing results as we go
for N in (20, 50, 100, 200, 400, 10**3, 10**4):
for k in (1, 2, 3, 5, 7, 9):
for r in (0.01, 0.1, 0.3, 0.5, 0.5**0.5, 1.0):
if not MIN_K <= k <= MAX_K:
continue
corrects, differents = zip(*(run_test(k, N, r, do_graph=False, verbose=verbose)
for _ in xrange(n_repeats)))
n_correct, n_different = sum(corrects), sum(differents)
results.append((k, N, r, n_correct, n_different))
print('k=%d,N=%3d,r=%.2f: %2d of %d = %3d%% (diff=%.2f)' % (k, N, r,
n_correct, n_repeats, int(100.0 * n_correct/n_repeats),
n_different/(n_repeats * N)))
if verbose >= 1:
print('-' * 80)
sys.stdout.flush()
# Print summary
print('=' * 80)
for k, N, r, n_correct, n_different in results:
print('k=%d,N=%3d,r=%.2f: %2d of %d = %3d%% (diff=%.2f)' % (k, N, r,
n_correct, n_repeats, int(100.0 * n_correct/n_repeats),
n_different/(n_repeats * N)))
def main():
print(__doc__)
print('')
print('NumPy: %s' % np.version.version)
np.random.seed(111)
#test_with_graphs()
n_repeats = 10
test_range(n_repeats, verbose=1)
print('')
main()
|
|
"""Pytest fixtures for main test suite."""
import os
import pytest
from pycounter import csvhelper
from pycounter import report
import pycounter.sushi
def parsedata(filename):
"""Helper function returns a report from a filename relative to data directory."""
return report.parse(os.path.join(os.path.dirname(__file__), "data", filename))
@pytest.fixture(params=["csvC4JR1", "C4JR1.csv", "C4JR1_bad.csv", "C4JR1GOA.csv"])
def csv_jr1_report(request):
"""Various CSV format JR1 reports."""
return parsedata(request.param)
@pytest.fixture(params=["simpleJR1.tsv", "tsvC4JR1", "issue_14.tsv"])
def tsv_jr1(request):
"""TSV file"""
return parsedata(request.param)
@pytest.fixture(params=["csvC4JR1", "C4JR1.csv", "C4JR1_bad.csv"])
def csv_jr1_report_std(request):
"""Standard (non-GOA) JR1 reports."""
return parsedata(request.param)
@pytest.fixture(params=["csvC4JR1", "C4JR1.csv"])
def csv_jr1_report_common_data(request):
"""JR1 reports with shared common data we can make assertions about."""
return parsedata(request.param)
@pytest.fixture(params=["csvC4JR1", "C4JR1.csv", "C4JR1_bad.csv"])
def csv_jr1_r4_report(request):
"""Revision 4 JR1 reports."""
return parsedata(request.param)
@pytest.fixture(params=["JR1.xlsx", "JR1_bad.xlsx", "xlsxJR1"])
def jr1_report_xlsx(request):
"""Excel formatted JR1 reports."""
return parsedata(request.param)
def parse_sushi_file(filename):
"""Turn SUSHI data file into a report."""
with open(os.path.join(os.path.dirname(__file__), "data", filename)) as datafile:
return pycounter.sushi.raw_to_full(datafile.read())
@pytest.fixture(
params=[
"sushi_simple.xml",
"sushi_simple_no_customer.xml",
"sushi_simple_br1.xml",
"sushi_simple_db1.xml",
"sushi_db1_missing_record_view.xml",
"sushi_br3.xml",
"sushi_jr2.xml",
]
)
def sushi_report_all(request):
"""Report from SUSHI, shared common data."""
return parse_sushi_file(request.param)
@pytest.fixture(
params=[
"sushi_simple.xml",
"sushi_simple_br1.xml",
"sushi_simple_db1.xml",
"sushi_db1_missing_record_view.xml",
]
)
def sushi_report_with_customer(request):
"""Report from SUSHI, shared common data with customer set."""
return parse_sushi_file(request.param)
@pytest.fixture(params=["sushi_simple_no_customer.xml"])
def sushi_report_no_customer(request):
"""Report from SUSHI, shared common data with customer not set."""
return parse_sushi_file(request.param)
@pytest.fixture(params=["sushi_simple.xml", "sushi_simple_no_customer.xml"])
def sushi_report_jr1(request):
"""Report from SUSHI, shared common data, JR1 only."""
return parse_sushi_file(request.param)
@pytest.fixture
def sushi_report_jr2():
"""Journal turnaways."""
return parse_sushi_file("sushi_jr2.xml")
@pytest.fixture
def sushi_report_br3():
"""Book turnaways."""
return parse_sushi_file("sushi_br3.xml")
@pytest.fixture
def sushi_simple_br1():
"""Book report."""
return parse_sushi_file("sushi_simple_br1.xml")
@pytest.fixture
def sushi_simple_db1():
"""Database report."""
return parse_sushi_file("sushi_simple_db1.xml")
@pytest.fixture
def sushi_missing_ii():
"""SUSHI response with missing ISSN."""
return parse_sushi_file("sushi_missing_ii.xml")
@pytest.fixture
def sushi_missing_rec():
"""Database report with January missing, no record_view records."""
rpt = parse_sushi_file("sushi_db1_missing_record_view.xml")
# missing data added on export
rpt.as_generic()
return rpt
@pytest.fixture
def sushi_missing_jan():
"""SUSHI with months missing."""
return parse_sushi_file("sushi_missing_jan.xml")
@pytest.fixture(
params=[
"C4BR1.tsv",
"C4DB1.tsv",
"C4JR1.csv",
"C4BR2.tsv",
"C4DB2.tsv",
"C4JR1mul.csv",
]
)
def common_output(request):
"""Common data for output."""
delim = {"tsv": "\t", "csv": ","}[request.param.split(".")[1]]
filename = os.path.join(os.path.dirname(__file__), "data", request.param)
with csvhelper.UnicodeReader(filename, delimiter=delim) as report_reader:
content = list(report_reader)
return parsedata(request.param).as_generic(), content
@pytest.fixture(params=["C4BR2.tsv", "C4BR1.tsv"])
def all_book_reports(request):
"""All book reports."""
return parsedata(request.param)
@pytest.fixture(params=["C4BR1.tsv", "simpleJR1.tsv"])
def report_file_output(request):
"""Reports with their expected output."""
rpt = parsedata(request.param)
with open(
os.path.join(os.path.dirname(__file__), "data", request.param), "rb"
) as f:
expected_data = f.read()
return rpt, expected_data
@pytest.fixture(params=["C4DB1.tsv", "C4DB2.tsv"])
def db_report(request):
"""All C4 database reports."""
return parsedata(request.param)
@pytest.fixture
def c4db2():
"""DB2."""
return parsedata("C4DB2.tsv")
@pytest.fixture
def c4db1():
"""DB1."""
return parsedata("C4DB1.tsv")
@pytest.fixture
def c4db1_sy():
"""DB1 split year"""
return parsedata("C4DB1_split_year.tsv")
@pytest.fixture
def br3_report():
"""Book report 3 (turnaways)."""
return parsedata("C4BR3.csv")
@pytest.fixture
def br1_report():
"""Book report 1."""
return parsedata("C4BR1.tsv")
@pytest.fixture
def br2_report():
"""Book report 2."""
return parsedata("C4BR2.tsv")
@pytest.fixture
def jr2_report():
"""Journal report 2 (turnaways)."""
return parsedata("C4JR2.csv")
@pytest.fixture
def pr1_report():
"""Platform report 1."""
return parsedata("PR1.tsv")
@pytest.fixture
def jr1_bad():
"""C4 JR1 with questionable formatting..."""
return parsedata("C4JR1_bad.csv")
@pytest.fixture
def goa():
"""Gold Open Access."""
return parsedata("C4JR1GOA.csv")
@pytest.fixture
def big_multiyear():
"""Big report spanning multiple years."""
return parsedata("C4JR1big.csv")
@pytest.fixture
def multiyear():
"""Multiyear report."""
return parsedata("C4JR1my.csv")
@pytest.fixture(
params="""C4BR1.tsv
C4BR2.tsv
C4BR3.csv
C4DB1.tsv
C4DB1_split_year.tsv
C4DB2.tsv
C4JR1.csv
C4JR1_bad.csv
C4JR1big.csv
C4JR1GOA.csv
C4JR1mul.csv
C4JR1my.csv
C4JR2.csv
C4JR2_single_month.csv
PR1.tsv
simpleJR1.tsv
""".split()
)
def all_reports(request):
"""All COUNTER 4 reports."""
return parsedata(request.param)
|
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import collections
import random
import re
import string
import time
import types
import itertools
import jsonpatch
import jsonpointer
import yaql.context
import murano.common.config as cfg
import murano.dsl.helpers as helpers
_random_string_counter = None
def _transform_json(json, mappings):
if isinstance(json, types.ListType):
return [_transform_json(t, mappings) for t in json]
if isinstance(json, types.DictionaryType):
result = {}
for key, value in json.items():
result[_transform_json(key, mappings)] = \
_transform_json(value, mappings)
return result
elif isinstance(json, types.ListType):
result = []
for value in json:
result.append(_transform_json(value, mappings))
return result
elif isinstance(json, types.StringTypes) and json.startswith('$'):
value = _convert_macro_parameter(json[1:], mappings)
if value is not None:
return value
return json
def _convert_macro_parameter(macro, mappings):
replaced = [False]
def replace(match):
replaced[0] = True
return unicode(mappings.get(match.group(1)))
result = re.sub('{(\\w+?)}', replace, macro)
if replaced[0]:
return result
else:
return mappings[macro]
@yaql.context.EvalArg('format', types.StringTypes)
def _format(format, *args):
return format.format(*[t() for t in args])
@yaql.context.EvalArg('src', types.StringTypes)
@yaql.context.EvalArg('substring', types.StringTypes)
@yaql.context.EvalArg('value', types.StringTypes)
def _replace_str(src, substring, value):
return src.replace(substring, value)
@yaql.context.EvalArg('src', types.StringTypes)
@yaql.context.EvalArg('replacements', dict)
def _replace_dict(src, replacements):
for key, value in replacements.iteritems():
if isinstance(src, str):
src = src.replace(key, str(value))
else:
src = src.replace(key, unicode(value))
return src
def _len(value):
return len(value())
def _coalesce(*args):
for t in args:
val = t()
if val:
return val
return None
@yaql.context.EvalArg('value', types.StringTypes)
def _base64encode(value):
return base64.b64encode(value)
@yaql.context.EvalArg('value', types.StringTypes)
def _base64decode(value):
return base64.b64decode(value)
@yaql.context.EvalArg('group', types.StringTypes)
@yaql.context.EvalArg('setting', types.StringTypes)
def _config(group, setting):
return cfg.CONF[group][setting]
@yaql.context.EvalArg('setting', types.StringTypes)
def _config_default(setting):
return cfg.CONF[setting]
@yaql.context.EvalArg('value', types.StringTypes)
def _upper(value):
return value.upper()
@yaql.context.EvalArg('value', types.StringTypes)
def _lower(value):
return value.lower()
@yaql.context.EvalArg('separator', types.StringTypes)
def _join(separator, collection):
return separator.join(str(t) for t in collection())
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('separator', types.StringTypes)
def _split(value, separator):
return value.split(separator)
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('prefix', types.StringTypes)
def _startswith(value, prefix):
return value.startswith(prefix)
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('suffix', types.StringTypes)
def _endswith(value, suffix):
return value.endswith(suffix)
@yaql.context.EvalArg('value', types.StringTypes)
def _trim(value):
return value.strip()
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('pattern', types.StringTypes)
def _mathces(value, pattern):
return re.match(pattern, value) is not None
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('index', int)
@yaql.context.EvalArg('length', int)
def _substr3(value, index, length):
if length < 0:
return value[index:]
else:
return value[index:index + length]
@yaql.context.EvalArg('value', types.StringTypes)
@yaql.context.EvalArg('index', int)
def _substr2(value, index):
return _substr3(value, index, -1)
def _str(value):
value = value()
if value is None:
return ''
elif value is True:
return 'true'
elif value is False:
return 'false'
return unicode(value)
def _int(value):
value = value()
if value is None:
return 0
return int(value)
def _pselect(collection, composer):
if isinstance(collection, types.ListType):
return helpers.parallel_select(collection, composer)
else:
return helpers.parallel_select(collection(), composer)
def _patch(obj, patch):
obj = obj()
patch = patch()
if not isinstance(patch, types.ListType):
patch = [patch]
patch = jsonpatch.JsonPatch(patch)
try:
return patch.apply(obj)
except jsonpointer.JsonPointerException:
return obj
def _int2base(x, base):
"""Converts decimal integers into another number base
from base-2 to base-36.
:param x: decimal integer
:param base: number base, max value is 36
:return: integer converted to the specified base
"""
digs = string.digits + string.lowercase
if x < 0:
sign = -1
elif x == 0:
return '0'
else:
sign = 1
x *= sign
digits = []
while x:
digits.append(digs[x % base])
x /= base
if sign < 0:
digits.append('-')
digits.reverse()
return ''.join(digits)
def _random_name():
"""Replace '#' char in pattern with supplied number, if no pattern is
supplied generate short and unique name for the host.
:param pattern: hostname pattern
:param number: number to replace with in pattern
:return: hostname
"""
global _random_string_counter
counter = _random_string_counter or 1
# generate first 5 random chars
prefix = ''.join(random.choice(string.lowercase) for _ in range(5))
# convert timestamp to higher base to shorten hostname string
# (up to 8 chars)
timestamp = _int2base(int(time.time() * 1000), 36)[:8]
# third part of random name up to 2 chars
# (1295 is last 2-digit number in base-36, 1296 is first 3-digit number)
suffix = _int2base(counter, 36)
_random_string_counter = (counter + 1) % 1296
return prefix + timestamp + suffix
@yaql.context.EvalArg('self', dict)
def _values(self):
return self.values()
@yaql.context.EvalArg('self', dict)
def _keys(self):
return self.keys()
@yaql.context.EvalArg('self', collections.Iterable)
def _flatten(self):
for i in self:
if isinstance(i, collections.Iterable):
for ii in i:
yield ii
else:
yield i
@yaql.context.EvalArg('self', dict)
@yaql.context.EvalArg('other', dict)
def _merge_with(self, other):
return helpers.merge_dicts(self, other)
@yaql.context.EvalArg('collection', collections.Iterable)
@yaql.context.EvalArg('count', int)
def _skip(collection, count):
return itertools.islice(collection, count, None)
@yaql.context.EvalArg('collection', collections.Iterable)
@yaql.context.EvalArg('count', int)
def _take(collection, count):
return itertools.islice(collection, count)
def register(context):
context.register_function(
lambda json, mappings: _transform_json(json(), mappings()), 'bind')
context.register_function(_format, 'format')
context.register_function(_replace_str, 'replace')
context.register_function(_replace_dict, 'replace')
context.register_function(_len, 'len')
context.register_function(_coalesce, 'coalesce')
context.register_function(_base64decode, 'base64decode')
context.register_function(_base64encode, 'base64encode')
context.register_function(_config, 'config')
context.register_function(_config_default, 'config')
context.register_function(_lower, 'toLower')
context.register_function(_upper, 'toUpper')
context.register_function(_join, 'join')
context.register_function(_split, 'split')
context.register_function(_pselect, 'pselect')
context.register_function(_startswith, 'startsWith')
context.register_function(_endswith, 'endsWith')
context.register_function(_trim, 'trim')
context.register_function(_mathces, 'matches')
context.register_function(_substr2, 'substr')
context.register_function(_substr3, 'substr')
context.register_function(_str, 'str')
context.register_function(_int, 'int')
context.register_function(_patch, 'patch')
context.register_function(_random_name, 'randomName')
# Temporary workaround, these functions should be moved to YAQL
context.register_function(_keys, 'keys')
context.register_function(_values, 'values')
context.register_function(_flatten, 'flatten')
context.register_function(_merge_with, 'mergeWith')
context.register_function(_skip, 'skip')
context.register_function(_take, 'take')
|
|
# -*- coding: utf-8 -*-
#
# Script to execute test cases from one suite in parallel
# by thomas klein / 2009
#
# imports
from robot.running import TestSuite
from robot import utils
from robot.conf import settings
import os, glob
import subprocess
import time
from datetime import datetime
import sys
import getopt
import fileinput
import PyroConfig as config
from ParseSauceURL import *
from SauceRest import *
class Jybot():
""" Helper class to interact with RobotFrameworks pybot script to execute tests / test suites.
"""
name = ""
tests = []
suite = ""
args = []
output = ""
process = -1
running = False
def __init__(self, name):
""" Constructor, creates the object and assigns the given 'name'.
"""
self.name = name
def start(self, suite, args=[]):
""" Starts the pybot script from RobotFramework executing the defined 'tests' from the given 'suite'.
'suite' is the filename of the test suite containing the 'tests'
'args' (optional) is a list of additional parameters passed to pybot
"""
self.suite = suite
self.output_file = "%s_Output.xml" % (self.name)
temp, suiteName = os.path.split(payload)
jyLog = open(os.path.join(logFolder, ("%s_Log.txt" % self.name)), "w")
jybotCommand = "pybot -o %s %s" % (os.path.join(logFolder, self.output_file), self.suite)
print "Executing : %s ..." % jybotCommand
self.running = True
self.process = subprocess.Popen(["pybot", "-o", "%s" % os.path.join(logFolder, self.output_file), "%s" % self.suite], cwd=clientCwd, stdout=jyLog, stderr=jyLog)
def isRunning(self):
""" Polls the pybot subprocess to check if it's running. Will return true if the process is running.
Returns false if the process hasn't been started or has finished already.
"""
if not self.running:
return False
elif self.process.poll() == 0 or self.process.returncode >= 0:
return False
else:
return True
def stop(self):
""" Kills the pybot subprocess.
"""
os.system("taskkill /T /F /PID %s" % self.process.pid)
self.running = False
# Methods #####################################################################################################
def startJybot(name, suite, args=[]):
""" Creates a pybot object, starts it and returns the object
'name' is the name for the pybot (will be used for log outputs)
'tests' is a list of tests to be executed by the pybot
'suite' is the filename of the test suite containing the 'tests'
'args' (optional) is a list of additional parameters passed to pybot
"""
jybot = Jybot(name)
jybot.start(suite, args)
return jybot
def generateReportAndLog(xmlFiles, reportFile, logFile):
""" Calls RobotFrameworks rebot tool to generate Report and Log files from output.xml files
'xmlFiles' is a list of output.xml files from jybot / pybot
'reportFile' is the path+name of the report.html file to be written
'logFile' is the path+name of the log.html file to be written
the global variable 'payload' will be used a report title
"""
rebotCommand = "rebot --log %s --report %s --reporttitle \"%s\" --name ' ' %s*.xml" % (logFile, reportFile, suiteName, payload)
print 'rebotCommand: ' + rebotCommand
rc = os.system(rebotCommand)
return rc
def parseArgs(argv):
""" Parses command line arguments like the testsuite name and additonal parameters
Expects the command line args without the python class as parameter argv (sys.argv[1:])
Fails and aborts script if args don't match the expected format
"""
global payload, clientCwd, baseDir, logFolder, testsToRun, suiteToRun, testDirectory
if len(argv)<1:
usage()
sys.exit(2)
suiteToRun = payload = argv[len(argv)-1]
if len(os.path.split(payload)) == 0:
clientCwd = "./"
else:
clientCwd = os.path.realpath(baseDir)
payload = os.path.join(os.path.realpath(baseDir), payload)
if not os.path.exists(payload):
print "FATAL - Pyro must be given a payload that exists: %s\n" % payload
exit(1)
#####
#### logFolder = os.path.abspath(logFolder)
logFolder = payload
print "Base dir: %s" % baseDir;
print "Real Base dir: %s" % os.path.realpath(baseDir);
print "Client dir: %s" % clientCwd
print "Real Client Dir: %s" % os.path.realpath(clientCwd)
print "Log dir: %s" % logFolder
print "Payload: %s" % payload
def getDynArgs(index):
""" Reads the DYN_ARGS variable from the config file and parses it into a list of argument strings
like --variable name:"value".
This list can be passed to the Pybot start() method as args[] list.
"""
arglist = []
for row in config.DYN_ARGS:
valueIndex = index
if len(row) < 2:
print "Error reading DYN_ARGS: Row is invalid: %s. Row will be skipped!" % row
else:
varName = row[0]
values = []
i = 1
while i < len(row):
values.append(row[i])
i = i+1
if valueIndex >= len(values):
valueIndex = (len(values)-1) % valueIndex
varValue = values[valueIndex]
arglist.append("--variable %s:\"%s\"" % (varName, varValue))
return arglist
def usage():
""" Prints usage information for Parabot """
print ""
print "Usage: python parabot.py [options] <testsuite.tsv>"
print ""
print "<testsuite.tsv> can be absolute or relative path + filename of a testsuite."
print "The containing folder will be used as working directory"
print ""
print "Options:"
print "-h\t--help\t\tThis screen"
print "-i\t--include\tInclude a tag"
print "-e\t--exclude\tExclude a tag"
print "-f\t--forceserial\tForces serial test execution"
print "-b\t--basedir\tSet parabots base dir"
print ""
# helper classes ##############################################################################################
# MAIN SCRIPT #################################################################################################
print "";
print "-- PYROSUITE --";
print "";
# save current time to calculate execution time at the end of the script
startTime = datetime.now()
# global vars
payload = "No suite defined yet" # specified via args
argumentFiles = []
listeners = []
jybots = []
clientCwd = "No cwd defined" # specified via testsuite from args
baseDir = "./"
# reading variables from ParabotConfig
time_between_test_start_up = config.time_between_test_start_up
logFolder = config.LOG_DIR
#antBatch = os.path.abspath(config.ANT_BATCH_FILE)
#seCwd = os.path.abspath(config.SELENIUM_GRID_DIR)
#startSelenium = config.AUTOSTART_SELENIUM
#browser = config.SELENIUM_BROWSER
os.environ['SAUCE_ONDEMAND_BROWSERS'] = '[{"platform":"LINUX","os":"Linux","browser":"chrome","url":"sauce-ondemand:?os=Linux&browser=chrome&browser-version=32&username=talliskane&access-key=6c3ed64b-e065-4df4-b921-75336e2cb9cf","browser-version":"32"},{"platform":"LINUX","os":"Linux","browser":"android","url":"sauce-ondemand:?os=Linux&browser=android&browser-version=4.0&username=talliskane&access-key=6c3ed64b-e065-4df4-b921-75336e2cb9cf","browser-version":"4.0"}]'
# parsing command line arguments
parseArgs(sys.argv[1:])
construct = SauceTeamCityBrowserData()
#print construct.getBrowsersString(sauce_username,sauce_accesskey)
browser_list = construct.getBrowsersString()
#print browser_list
#print construct.getBrowser(0)
#print construct.getUserName(0)
suiteName = os.path.basename(os.path.normpath(payload))
print "Starting suite \"%s\" ...\n" % suiteName
for (i, browser_list) in enumerate(browser_list):
testName = "%s_%s_%s" % (construct.getBrowser(i), construct.getOS(i), construct.getBrowserVersion(i))
print ""
print "Starting browser tests for %s ..." % testName
jybot = startJybot(testName, payload, getDynArgs(0))
while jybot.isRunning():
time.sleep(time_between_test_start_up)
jybots.append(jybot)
print "Serial tests finished"
print ""
# merging outputs to one report and log
print "Generating report and log"
report = "%s_Report.html" % os.path.join(logFolder, suiteName)
log = "%s_Log.html" % os.path.join(logFolder, suiteName)
outputXmls = []
for jybot in jybots:
outputXmls.append(os.path.join(logFolder, jybot.output))
reportRC = generateReportAndLog(outputXmls, report, log)
# delete XML output files after generating the report / log (if report generation
# returned zero)
#if reportRC == 0:
# for outXML in outputXmls:
# os.remove(outXML)
# calculating test execution time
endTime = datetime.now()
executionTime = endTime - startTime
print ""
print "Execution time: %s" % executionTime
|
|
"""
Tests for DBSCAN clustering algorithm
"""
import pickle
import numpy as np
import warnings
from scipy.spatial import distance
from scipy import sparse
import pytest
from sklearn.utils._testing import assert_array_equal
from sklearn.neighbors import NearestNeighbors
from sklearn.cluster import DBSCAN
from sklearn.cluster import dbscan
from sklearn.cluster.tests.common import generate_clustered_data
from sklearn.metrics.pairwise import pairwise_distances
n_clusters = 3
X = generate_clustered_data(n_clusters=n_clusters)
def test_dbscan_similarity():
# Tests the DBSCAN algorithm with a similarity array.
# Parameters chosen specifically for this task.
eps = 0.15
min_samples = 10
# Compute similarities
D = distance.squareform(distance.pdist(X))
D /= np.max(D)
# Compute DBSCAN
core_samples, labels = dbscan(
D, metric="precomputed", eps=eps, min_samples=min_samples
)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - (1 if -1 in labels else 0)
assert n_clusters_1 == n_clusters
db = DBSCAN(metric="precomputed", eps=eps, min_samples=min_samples)
labels = db.fit(D).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
def test_dbscan_feature():
# Tests the DBSCAN algorithm with a feature vector array.
# Parameters chosen specifically for this task.
# Different eps to other test, because distance is not normalised.
eps = 0.8
min_samples = 10
metric = "euclidean"
# Compute DBSCAN
# parameters chosen for task
core_samples, labels = dbscan(X, metric=metric, eps=eps, min_samples=min_samples)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
db = DBSCAN(metric=metric, eps=eps, min_samples=min_samples)
labels = db.fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
def test_dbscan_sparse():
core_sparse, labels_sparse = dbscan(sparse.lil_matrix(X), eps=0.8, min_samples=10)
core_dense, labels_dense = dbscan(X, eps=0.8, min_samples=10)
assert_array_equal(core_dense, core_sparse)
assert_array_equal(labels_dense, labels_sparse)
@pytest.mark.parametrize("include_self", [False, True])
def test_dbscan_sparse_precomputed(include_self):
D = pairwise_distances(X)
nn = NearestNeighbors(radius=0.9).fit(X)
X_ = X if include_self else None
D_sparse = nn.radius_neighbors_graph(X=X_, mode="distance")
# Ensure it is sparse not merely on diagonals:
assert D_sparse.nnz < D.shape[0] * (D.shape[0] - 1)
core_sparse, labels_sparse = dbscan(
D_sparse, eps=0.8, min_samples=10, metric="precomputed"
)
core_dense, labels_dense = dbscan(D, eps=0.8, min_samples=10, metric="precomputed")
assert_array_equal(core_dense, core_sparse)
assert_array_equal(labels_dense, labels_sparse)
def test_dbscan_sparse_precomputed_different_eps():
# test that precomputed neighbors graph is filtered if computed with
# a radius larger than DBSCAN's eps.
lower_eps = 0.2
nn = NearestNeighbors(radius=lower_eps).fit(X)
D_sparse = nn.radius_neighbors_graph(X, mode="distance")
dbscan_lower = dbscan(D_sparse, eps=lower_eps, metric="precomputed")
higher_eps = lower_eps + 0.7
nn = NearestNeighbors(radius=higher_eps).fit(X)
D_sparse = nn.radius_neighbors_graph(X, mode="distance")
dbscan_higher = dbscan(D_sparse, eps=lower_eps, metric="precomputed")
assert_array_equal(dbscan_lower[0], dbscan_higher[0])
assert_array_equal(dbscan_lower[1], dbscan_higher[1])
@pytest.mark.parametrize("use_sparse", [True, False])
@pytest.mark.parametrize("metric", ["precomputed", "minkowski"])
def test_dbscan_input_not_modified(use_sparse, metric):
# test that the input is not modified by dbscan
X = np.random.RandomState(0).rand(10, 10)
X = sparse.csr_matrix(X) if use_sparse else X
X_copy = X.copy()
dbscan(X, metric=metric)
if use_sparse:
assert_array_equal(X.toarray(), X_copy.toarray())
else:
assert_array_equal(X, X_copy)
def test_dbscan_no_core_samples():
rng = np.random.RandomState(0)
X = rng.rand(40, 10)
X[X < 0.8] = 0
for X_ in [X, sparse.csr_matrix(X)]:
db = DBSCAN(min_samples=6).fit(X_)
assert_array_equal(db.components_, np.empty((0, X_.shape[1])))
assert_array_equal(db.labels_, -1)
assert db.core_sample_indices_.shape == (0,)
def test_dbscan_callable():
# Tests the DBSCAN algorithm with a callable metric.
# Parameters chosen specifically for this task.
# Different eps to other test, because distance is not normalised.
eps = 0.8
min_samples = 10
# metric is the function reference, not the string key.
metric = distance.euclidean
# Compute DBSCAN
# parameters chosen for task
core_samples, labels = dbscan(
X, metric=metric, eps=eps, min_samples=min_samples, algorithm="ball_tree"
)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
db = DBSCAN(metric=metric, eps=eps, min_samples=min_samples, algorithm="ball_tree")
labels = db.fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
def test_dbscan_metric_params():
# Tests that DBSCAN works with the metrics_params argument.
eps = 0.8
min_samples = 10
p = 1
# Compute DBSCAN with metric_params arg
with warnings.catch_warnings(record=True) as warns:
db = DBSCAN(
metric="minkowski",
metric_params={"p": p},
eps=eps,
p=None,
min_samples=min_samples,
algorithm="ball_tree",
).fit(X)
assert not warns, warns[0].message
core_sample_1, labels_1 = db.core_sample_indices_, db.labels_
# Test that sample labels are the same as passing Minkowski 'p' directly
db = DBSCAN(
metric="minkowski", eps=eps, min_samples=min_samples, algorithm="ball_tree", p=p
).fit(X)
core_sample_2, labels_2 = db.core_sample_indices_, db.labels_
assert_array_equal(core_sample_1, core_sample_2)
assert_array_equal(labels_1, labels_2)
# Minkowski with p=1 should be equivalent to Manhattan distance
db = DBSCAN(
metric="manhattan", eps=eps, min_samples=min_samples, algorithm="ball_tree"
).fit(X)
core_sample_3, labels_3 = db.core_sample_indices_, db.labels_
assert_array_equal(core_sample_1, core_sample_3)
assert_array_equal(labels_1, labels_3)
with pytest.warns(
SyntaxWarning,
match=(
"Parameter p is found in metric_params. "
"The corresponding parameter from __init__ "
"is ignored."
),
):
# Test that checks p is ignored in favor of metric_params={'p': <val>}
db = DBSCAN(
metric="minkowski",
metric_params={"p": p},
eps=eps,
p=p + 1,
min_samples=min_samples,
algorithm="ball_tree",
).fit(X)
core_sample_4, labels_4 = db.core_sample_indices_, db.labels_
assert_array_equal(core_sample_1, core_sample_4)
assert_array_equal(labels_1, labels_4)
def test_dbscan_balltree():
# Tests the DBSCAN algorithm with balltree for neighbor calculation.
eps = 0.8
min_samples = 10
D = pairwise_distances(X)
core_samples, labels = dbscan(
D, metric="precomputed", eps=eps, min_samples=min_samples
)
# number of clusters, ignoring noise if present
n_clusters_1 = len(set(labels)) - int(-1 in labels)
assert n_clusters_1 == n_clusters
db = DBSCAN(p=2.0, eps=eps, min_samples=min_samples, algorithm="ball_tree")
labels = db.fit(X).labels_
n_clusters_2 = len(set(labels)) - int(-1 in labels)
assert n_clusters_2 == n_clusters
db = DBSCAN(p=2.0, eps=eps, min_samples=min_samples, algorithm="kd_tree")
labels = db.fit(X).labels_
n_clusters_3 = len(set(labels)) - int(-1 in labels)
assert n_clusters_3 == n_clusters
db = DBSCAN(p=1.0, eps=eps, min_samples=min_samples, algorithm="ball_tree")
labels = db.fit(X).labels_
n_clusters_4 = len(set(labels)) - int(-1 in labels)
assert n_clusters_4 == n_clusters
db = DBSCAN(leaf_size=20, eps=eps, min_samples=min_samples, algorithm="ball_tree")
labels = db.fit(X).labels_
n_clusters_5 = len(set(labels)) - int(-1 in labels)
assert n_clusters_5 == n_clusters
def test_input_validation():
# DBSCAN.fit should accept a list of lists.
X = [[1.0, 2.0], [3.0, 4.0]]
DBSCAN().fit(X) # must not raise exception
@pytest.mark.parametrize(
"args",
[
{"algorithm": "blah"},
{"metric": "blah"},
],
)
def test_dbscan_badargs(args):
# Test bad argument values: these should all raise ValueErrors
with pytest.raises(ValueError):
dbscan(X, **args)
def test_pickle():
obj = DBSCAN()
s = pickle.dumps(obj)
assert type(pickle.loads(s)) == obj.__class__
def test_boundaries():
# ensure min_samples is inclusive of core point
core, _ = dbscan([[0], [1]], eps=2, min_samples=2)
assert 0 in core
# ensure eps is inclusive of circumference
core, _ = dbscan([[0], [1], [1]], eps=1, min_samples=2)
assert 0 in core
core, _ = dbscan([[0], [1], [1]], eps=0.99, min_samples=2)
assert 0 not in core
def test_weighted_dbscan():
# ensure sample_weight is validated
with pytest.raises(ValueError):
dbscan([[0], [1]], sample_weight=[2])
with pytest.raises(ValueError):
dbscan([[0], [1]], sample_weight=[2, 3, 4])
# ensure sample_weight has an effect
assert_array_equal([], dbscan([[0], [1]], sample_weight=None, min_samples=6)[0])
assert_array_equal([], dbscan([[0], [1]], sample_weight=[5, 5], min_samples=6)[0])
assert_array_equal([0], dbscan([[0], [1]], sample_weight=[6, 5], min_samples=6)[0])
assert_array_equal(
[0, 1], dbscan([[0], [1]], sample_weight=[6, 6], min_samples=6)[0]
)
# points within eps of each other:
assert_array_equal(
[0, 1], dbscan([[0], [1]], eps=1.5, sample_weight=[5, 1], min_samples=6)[0]
)
# and effect of non-positive and non-integer sample_weight:
assert_array_equal(
[], dbscan([[0], [1]], sample_weight=[5, 0], eps=1.5, min_samples=6)[0]
)
assert_array_equal(
[0, 1], dbscan([[0], [1]], sample_weight=[5.9, 0.1], eps=1.5, min_samples=6)[0]
)
assert_array_equal(
[0, 1], dbscan([[0], [1]], sample_weight=[6, 0], eps=1.5, min_samples=6)[0]
)
assert_array_equal(
[], dbscan([[0], [1]], sample_weight=[6, -1], eps=1.5, min_samples=6)[0]
)
# for non-negative sample_weight, cores should be identical to repetition
rng = np.random.RandomState(42)
sample_weight = rng.randint(0, 5, X.shape[0])
core1, label1 = dbscan(X, sample_weight=sample_weight)
assert len(label1) == len(X)
X_repeated = np.repeat(X, sample_weight, axis=0)
core_repeated, label_repeated = dbscan(X_repeated)
core_repeated_mask = np.zeros(X_repeated.shape[0], dtype=bool)
core_repeated_mask[core_repeated] = True
core_mask = np.zeros(X.shape[0], dtype=bool)
core_mask[core1] = True
assert_array_equal(np.repeat(core_mask, sample_weight), core_repeated_mask)
# sample_weight should work with precomputed distance matrix
D = pairwise_distances(X)
core3, label3 = dbscan(D, sample_weight=sample_weight, metric="precomputed")
assert_array_equal(core1, core3)
assert_array_equal(label1, label3)
# sample_weight should work with estimator
est = DBSCAN().fit(X, sample_weight=sample_weight)
core4 = est.core_sample_indices_
label4 = est.labels_
assert_array_equal(core1, core4)
assert_array_equal(label1, label4)
est = DBSCAN()
label5 = est.fit_predict(X, sample_weight=sample_weight)
core5 = est.core_sample_indices_
assert_array_equal(core1, core5)
assert_array_equal(label1, label5)
assert_array_equal(label1, est.labels_)
@pytest.mark.parametrize("algorithm", ["brute", "kd_tree", "ball_tree"])
def test_dbscan_core_samples_toy(algorithm):
X = [[0], [2], [3], [4], [6], [8], [10]]
n_samples = len(X)
# Degenerate case: every sample is a core sample, either with its own
# cluster or including other close core samples.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1, min_samples=1)
assert_array_equal(core_samples, np.arange(n_samples))
assert_array_equal(labels, [0, 1, 1, 1, 2, 3, 4])
# With eps=1 and min_samples=2 only the 3 samples from the denser area
# are core samples. All other points are isolated and considered noise.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1, min_samples=2)
assert_array_equal(core_samples, [1, 2, 3])
assert_array_equal(labels, [-1, 0, 0, 0, -1, -1, -1])
# Only the sample in the middle of the dense area is core. Its two
# neighbors are edge samples. Remaining samples are noise.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1, min_samples=3)
assert_array_equal(core_samples, [2])
assert_array_equal(labels, [-1, 0, 0, 0, -1, -1, -1])
# It's no longer possible to extract core samples with eps=1:
# everything is noise.
core_samples, labels = dbscan(X, algorithm=algorithm, eps=1, min_samples=4)
assert_array_equal(core_samples, [])
assert_array_equal(labels, np.full(n_samples, -1.0))
def test_dbscan_precomputed_metric_with_degenerate_input_arrays():
# see https://github.com/scikit-learn/scikit-learn/issues/4641 for
# more details
X = np.eye(10)
labels = DBSCAN(eps=0.5, metric="precomputed").fit(X).labels_
assert len(set(labels)) == 1
X = np.zeros((10, 10))
labels = DBSCAN(eps=0.5, metric="precomputed").fit(X).labels_
assert len(set(labels)) == 1
def test_dbscan_precomputed_metric_with_initial_rows_zero():
# sample matrix with initial two row all zero
ar = np.array(
[
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.1, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.1, 0.0, 0.0],
[0.0, 0.0, 0.1, 0.1, 0.0, 0.0, 0.3],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1],
[0.0, 0.0, 0.0, 0.0, 0.3, 0.1, 0.0],
]
)
matrix = sparse.csr_matrix(ar)
labels = DBSCAN(eps=0.2, metric="precomputed", min_samples=2).fit(matrix).labels_
assert_array_equal(labels, [-1, -1, 0, 0, 0, 1, 1])
@pytest.mark.parametrize(
"params, err_type, err_msg",
[
({"eps": -1.0}, ValueError, "eps == -1.0, must be > 0.0."),
({"eps": 0.0}, ValueError, "eps == 0.0, must be > 0.0."),
({"min_samples": 0}, ValueError, "min_samples == 0, must be >= 1."),
(
{"min_samples": 1.5},
TypeError,
"min_samples must be an instance of int, not float.",
),
({"min_samples": -2}, ValueError, "min_samples == -2, must be >= 1."),
({"leaf_size": 0}, ValueError, "leaf_size == 0, must be >= 1."),
(
{"leaf_size": 2.5},
TypeError,
"leaf_size must be an instance of int, not float.",
),
({"leaf_size": -3}, ValueError, "leaf_size == -3, must be >= 1."),
({"p": -2}, ValueError, "p == -2, must be >= 0.0."),
(
{"n_jobs": 2.5},
TypeError,
"n_jobs must be an instance of int, not float.",
),
],
)
def test_dbscan_params_validation(params, err_type, err_msg):
"""Check the parameters validation in `DBSCAN`."""
with pytest.raises(err_type, match=err_msg):
DBSCAN(**params).fit(X)
|
|
# Plugin: core
# Date: 20th December 2014
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Core Plugin"""
__version__ = "0.0.1"
__author__ = "James Mills, prologic at shortcircuit dot net dot au"
from os import getcwd
from uuid import uuid4 as uuid
from tempfile import NamedTemporaryFile
from pathlib import Path
from circuits.io import File
from circuits import handler, task
from circuits.net.events import close, write
from ..plugin import BasePlugin
from ..gophertypes import get_type
from ..utils import execute, exists, iterdir, resolvepath, which
from ..utils import is_dir, is_file, is_world_executable, is_world_readable
IGNORE_PATTERNS = ["CSV", "gophermap", "*.bak", "*~", ".*"]
class CorePlugin(BasePlugin):
"""Core Plugin"""
def init(self, server, config):
super(CorePlugin, self).init(server, config)
self.rootdir = Path(self.config.get("rootdir", getcwd()))
def handle_gophermap(self, req, res, gophermap): # noqa
# XXX: C901 McCabe complexity 11
ignore = []
with gophermap.open("r", encoding=self.server.encoding) as f:
for line in f:
line = line.strip("\r\n")
if not line:
res.add_line()
elif line == ".":
# Stop Processing
return
elif line[0] == "#":
# Ignore Comments
continue
elif line[0] == "!":
res.add_title(line[1:])
elif line[0] == "=":
arg = line[1:].split(" ", 1)[0]
if arg and arg[0] == "/":
path = resolvepath(self.rootdir, arg)
else:
path = resolvepath(gophermap.parent, arg)
prog = which(arg)
if prog is not None or is_world_executable(path):
with NamedTemporaryFile() as f:
f.write(execute(req, res, line[1:], cwd=str(gophermap.parent)))
f.seek(0)
self.handle_gophermap(req, res, Path(f.name))
elif is_file(path):
self.handle_gophermap(req, res, path)
else:
res.error = (404, "Resource not found!")
elif line[0] == "-":
ignore.append(line[1:])
elif line == "*":
path = gophermap.parent
self.handle_directory(req, res, path, ignore=ignore)
return
elif line[0] == "3":
parts = line.split("\t")
res.add_error(parts[0][1:])
elif "\t" in line:
parts = line.split("\t")
if len(parts) < 4:
parts += [None] * (4 - len(parts))
type_name, selector, host, port = parts
type, name = type_name[0], type_name[1:]
selector = selector or name
isrelative = selector and selector[0] != "/"
islocal = host in (None, req.server.host)
isurl = type == "h" and selector[:4].lower() == "url:"
if islocal and isrelative and (not isurl):
slash = "" if req.selector[-1] == "/" else "/"
selector = "{}{}{}".format(req.selector, slash, selector or name)
res.add_link(type, name, selector, host, port)
else:
res.add_text(line)
def handle_directory(self, req, res, path, ignore=None):
ignore_patterns = IGNORE_PATTERNS[:]
if ignore is not None:
ignore_patterns += ignore
gopherignore = path.joinpath(".gopherignore")
if is_file(gopherignore):
with gopherignore.open("r") as f:
for line in f:
ignore_patterns.append(line.strip())
if path != self.rootdir:
type, name = "1", ".."
selector = "/".join(req.selector.rstrip("/").split("/")[:-1]) or "/"
res.add_link(type, name, selector)
for p in iterdir(path):
if any((p.match(pattern) for pattern in ignore_patterns)):
continue
type = get_type(p)
name = p.name
selector = p.name
slash = "" if req.selector[-1] == "/" else "/"
selector = "{}{}{}".format(req.selector, slash, selector)
res.add_link(type, name, selector)
def handle_file(self, req, res, path):
if not is_world_readable(path):
res.error = (403, "Resource Forbidden")
return
res.size = path.stat().st_size
res.stream = True
channel = uuid()
filename = str(path)
self.server.streams[channel] = (req, File(filename, channel=channel).register(self))
def handle_executable(self, req, res, script, *args):
if args:
req.environ["SCRIPT_NAME"] = "/".join(req.selector.split("/")[:-(len(args))])
res.stream = True
args = " ".join((str(script),) + args)
self.fire(task(execute, req, res, args, cwd=str(script.parent)), "workers")
@handler("caps")
def on_caps(self, caps):
# XXX: Make this depend on the environment
caps.add("PathDelimeter", "/")
caps.add("PathIdentity", ".")
caps.add("PathParent", "..")
caps.add("PathParentDouble", "FALSE")
caps.add("PathKeepPreDelimeter", "FALSE")
@handler("task_success", channel="workers")
def on_task_success(self, evt, val):
req, res = evt.args[1:3]
self.fire(write(req.sock, val))
self.fire(close(req.sock))
@handler("request") # noqa
def on_request(self, event, req, res):
# XXX: C901 McCabe complexity 10
parts = req.selector.split("/")
if len(parts) > 1 and parts[1] and parts[1][0] == "~":
root = Path(self.server.homedir.joinpath(parts[1][1:], self.server.userdir))
path = resolvepath(root, "/".join(parts[2:]))
else:
root = self.server.rootdir
path = resolvepath(root, req.selector)
if not exists(path):
parent = path.parent
gophermap = parent.joinpath("gophermap")
if is_world_executable(gophermap):
self.handle_executable(req, res, gophermap, path.name)
elif is_world_executable(parent) and not is_dir(parent):
self.handle_executable(req, res, parent, path.name)
else:
res.error = (404, "Resource not found!")
elif is_dir(path):
gophermap = path.joinpath("gophermap")
if is_world_executable(gophermap):
self.handle_executable(req, res, gophermap)
elif is_file(gophermap):
self.handle_gophermap(req, res, gophermap)
else:
self.handle_directory(req, res, path)
elif is_world_executable(path):
self.handle_executable(req, res, path)
else:
gophermap = path.with_suffix(".gophermap")
if is_file(gophermap):
self.handle_gophermap(req, res, gophermap)
else:
self.handle_file(req, res, path)
|
|
#
# Copyright (c) 2015 Advanced Micro Devices, Inc.
# All rights reserved.
#
# For use for simulation and test purposes only
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: Sooraj Puthoor
#
import optparse, os, re
import math
import glob
import inspect
import m5
from m5.objects import *
from m5.util import addToPath
addToPath('../')
from ruby import Ruby
from common import Options
from common import Simulation
from common import GPUTLBOptions, GPUTLBConfig
########################## Script Options ########################
def setOption(parser, opt_str, value = 1):
# check to make sure the option actually exists
if not parser.has_option(opt_str):
raise Exception("cannot find %s in list of possible options" % opt_str)
opt = parser.get_option(opt_str)
# set the value
exec("parser.values.%s = %s" % (opt.dest, value))
def getOption(parser, opt_str):
# check to make sure the option actually exists
if not parser.has_option(opt_str):
raise Exception("cannot find %s in list of possible options" % opt_str)
opt = parser.get_option(opt_str)
# get the value
exec("return_value = parser.values.%s" % opt.dest)
return return_value
# Adding script options
parser = optparse.OptionParser()
Options.addCommonOptions(parser)
Options.addSEOptions(parser)
parser.add_option("--cpu-only-mode", action="store_true", default=False,
help="APU mode. Used to take care of problems in "\
"Ruby.py while running APU protocols")
parser.add_option("-k", "--kernel-files",
help="file(s) containing GPU kernel code (colon separated)")
parser.add_option("-u", "--num-compute-units", type="int", default=1,
help="number of GPU compute units"),
parser.add_option("--num-cp", type="int", default=0,
help="Number of GPU Command Processors (CP)")
parser.add_option("--benchmark-root", help="Root of benchmark directory tree")
# not super important now, but to avoid putting the number 4 everywhere, make
# it an option/knob
parser.add_option("--cu-per-sqc", type="int", default=4, help="number of CUs" \
"sharing an SQC (icache, and thus icache TLB)")
parser.add_option("--simds-per-cu", type="int", default=4, help="SIMD units" \
"per CU")
parser.add_option("--wf-size", type="int", default=64,
help="Wavefront size(in workitems)")
parser.add_option("--sp-bypass-path-length", type="int", default=4, \
help="Number of stages of bypass path in vector ALU for Single Precision ops")
parser.add_option("--dp-bypass-path-length", type="int", default=4, \
help="Number of stages of bypass path in vector ALU for Double Precision ops")
# issue period per SIMD unit: number of cycles before issuing another vector
parser.add_option("--issue-period", type="int", default=4, \
help="Number of cycles per vector instruction issue period")
parser.add_option("--glbmem-wr-bus-width", type="int", default=32, \
help="VGPR to Coalescer (Global Memory) data bus width in bytes")
parser.add_option("--glbmem-rd-bus-width", type="int", default=32, \
help="Coalescer to VGPR (Global Memory) data bus width in bytes")
# Currently we only support 1 local memory pipe
parser.add_option("--shr-mem-pipes-per-cu", type="int", default=1, \
help="Number of Shared Memory pipelines per CU")
# Currently we only support 1 global memory pipe
parser.add_option("--glb-mem-pipes-per-cu", type="int", default=1, \
help="Number of Global Memory pipelines per CU")
parser.add_option("--wfs-per-simd", type="int", default=10, help="Number of " \
"WF slots per SIMD")
parser.add_option("--vreg-file-size", type="int", default=2048,
help="number of physical vector registers per SIMD")
parser.add_option("--bw-scalor", type="int", default=0,
help="bandwidth scalor for scalability analysis")
parser.add_option("--CPUClock", type="string", default="2GHz",
help="CPU clock")
parser.add_option("--GPUClock", type="string", default="1GHz",
help="GPU clock")
parser.add_option("--cpu-voltage", action="store", type="string",
default='1.0V',
help = """CPU voltage domain""")
parser.add_option("--gpu-voltage", action="store", type="string",
default='1.0V',
help = """CPU voltage domain""")
parser.add_option("--CUExecPolicy", type="string", default="OLDEST-FIRST",
help="WF exec policy (OLDEST-FIRST, ROUND-ROBIN)")
parser.add_option("--xact-cas-mode", action="store_true",
help="enable load_compare mode (transactional CAS)")
parser.add_option("--SegFaultDebug",action="store_true",
help="checks for GPU seg fault before TLB access")
parser.add_option("--FunctionalTLB",action="store_true",
help="Assumes TLB has no latency")
parser.add_option("--LocalMemBarrier",action="store_true",
help="Barrier does not wait for writethroughs to complete")
parser.add_option("--countPages", action="store_true",
help="Count Page Accesses and output in per-CU output files")
parser.add_option("--TLB-prefetch", type="int", help = "prefetch depth for"\
"TLBs")
parser.add_option("--pf-type", type="string", help="type of prefetch: "\
"PF_CU, PF_WF, PF_PHASE, PF_STRIDE")
parser.add_option("--pf-stride", type="int", help="set prefetch stride")
parser.add_option("--numLdsBanks", type="int", default=32,
help="number of physical banks per LDS module")
parser.add_option("--ldsBankConflictPenalty", type="int", default=1,
help="number of cycles per LDS bank conflict")
parser.add_option('--fast-forward-pseudo-op', action='store_true',
help = 'fast forward using kvm until the m5_switchcpu'
' pseudo-op is encountered, then switch cpus. subsequent'
' m5_switchcpu pseudo-ops will toggle back and forth')
parser.add_option('--outOfOrderDataDelivery', action='store_true',
default=False, help='enable OoO data delivery in the GM'
' pipeline')
Ruby.define_options(parser)
#add TLB options to the parser
GPUTLBOptions.tlb_options(parser)
(options, args) = parser.parse_args()
# The GPU cache coherence protocols only work with the backing store
setOption(parser, "--access-backing-store")
# if benchmark root is specified explicitly, that overrides the search path
if options.benchmark_root:
benchmark_path = [options.benchmark_root]
else:
# Set default benchmark search path to current dir
benchmark_path = ['.']
########################## Sanity Check ########################
# Currently the gpu model requires ruby
if buildEnv['PROTOCOL'] == 'None':
fatal("GPU model requires ruby")
# Currently the gpu model requires only timing or detailed CPU
if not (options.cpu_type == "timing" or
options.cpu_type == "detailed"):
fatal("GPU model requires timing or detailed CPU")
# This file can support multiple compute units
assert(options.num_compute_units >= 1)
# Currently, the sqc (I-Cache of GPU) is shared by
# multiple compute units(CUs). The protocol works just fine
# even if sqc is not shared. Overriding this option here
# so that the user need not explicitly set this (assuming
# sharing sqc is the common usage)
n_cu = options.num_compute_units
num_sqc = int(math.ceil(float(n_cu) / options.cu_per_sqc))
options.num_sqc = num_sqc # pass this to Ruby
########################## Creating the GPU system ########################
# shader is the GPU
shader = Shader(n_wf = options.wfs_per_simd,
clk_domain = SrcClockDomain(
clock = options.GPUClock,
voltage_domain = VoltageDomain(
voltage = options.gpu_voltage)))
# GPU_RfO(Read For Ownership) implements SC/TSO memory model.
# Other GPU protocols implement release consistency at GPU side.
# So, all GPU protocols other than GPU_RfO should make their writes
# visible to the global memory and should read from global memory
# during kernal boundary. The pipeline initiates(or do not initiate)
# the acquire/release operation depending on this impl_kern_boundary_sync
# flag. This flag=true means pipeline initiates a acquire/release operation
# at kernel boundary.
if buildEnv['PROTOCOL'] == 'GPU_RfO':
shader.impl_kern_boundary_sync = False
else:
shader.impl_kern_boundary_sync = True
# Switching off per-lane TLB by default
per_lane = False
if options.TLB_config == "perLane":
per_lane = True
# List of compute units; one GPU can have multiple compute units
compute_units = []
for i in xrange(n_cu):
compute_units.append(ComputeUnit(cu_id = i, perLaneTLB = per_lane,
num_SIMDs = options.simds_per_cu,
wfSize = options.wf_size,
spbypass_pipe_length = options.sp_bypass_path_length,
dpbypass_pipe_length = options.dp_bypass_path_length,
issue_period = options.issue_period,
coalescer_to_vrf_bus_width = \
options.glbmem_rd_bus_width,
vrf_to_coalescer_bus_width = \
options.glbmem_wr_bus_width,
num_global_mem_pipes = \
options.glb_mem_pipes_per_cu,
num_shared_mem_pipes = \
options.shr_mem_pipes_per_cu,
n_wf = options.wfs_per_simd,
execPolicy = options.CUExecPolicy,
xactCasMode = options.xact_cas_mode,
debugSegFault = options.SegFaultDebug,
functionalTLB = options.FunctionalTLB,
localMemBarrier = options.LocalMemBarrier,
countPages = options.countPages,
localDataStore = \
LdsState(banks = options.numLdsBanks,
bankConflictPenalty = \
options.ldsBankConflictPenalty),
out_of_order_data_delivery =
options.outOfOrderDataDelivery))
wavefronts = []
vrfs = []
for j in xrange(options.simds_per_cu):
for k in xrange(shader.n_wf):
wavefronts.append(Wavefront(simdId = j, wf_slot_id = k,
wfSize = options.wf_size))
vrfs.append(VectorRegisterFile(simd_id=j,
num_regs_per_simd=options.vreg_file_size))
compute_units[-1].wavefronts = wavefronts
compute_units[-1].vector_register_file = vrfs
if options.TLB_prefetch:
compute_units[-1].prefetch_depth = options.TLB_prefetch
compute_units[-1].prefetch_prev_type = options.pf_type
# attach the LDS and the CU to the bus (actually a Bridge)
compute_units[-1].ldsPort = compute_units[-1].ldsBus.slave
compute_units[-1].ldsBus.master = compute_units[-1].localDataStore.cuPort
# Attach compute units to GPU
shader.CUs = compute_units
########################## Creating the CPU system ########################
options.num_cpus = options.num_cpus
# The shader core will be whatever is after the CPU cores are accounted for
shader_idx = options.num_cpus
# The command processor will be whatever is after the shader is accounted for
cp_idx = shader_idx + 1
cp_list = []
# List of CPUs
cpu_list = []
CpuClass, mem_mode = Simulation.getCPUClass(options.cpu_type)
if CpuClass == AtomicSimpleCPU:
fatal("AtomicSimpleCPU is not supported")
if mem_mode != 'timing':
fatal("Only the timing memory mode is supported")
shader.timing = True
if options.fast_forward and options.fast_forward_pseudo_op:
fatal("Cannot fast-forward based both on the number of instructions and"
" on pseudo-ops")
fast_forward = options.fast_forward or options.fast_forward_pseudo_op
if fast_forward:
FutureCpuClass, future_mem_mode = CpuClass, mem_mode
CpuClass = X86KvmCPU
mem_mode = 'atomic_noncaching'
# Leave shader.timing untouched, because its value only matters at the
# start of the simulation and because we require switching cpus
# *before* the first kernel launch.
future_cpu_list = []
# Initial CPUs to be used during fast-forwarding.
for i in xrange(options.num_cpus):
cpu = CpuClass(cpu_id = i,
clk_domain = SrcClockDomain(
clock = options.CPUClock,
voltage_domain = VoltageDomain(
voltage = options.cpu_voltage)))
cpu_list.append(cpu)
if options.fast_forward:
cpu.max_insts_any_thread = int(options.fast_forward)
if fast_forward:
MainCpuClass = FutureCpuClass
else:
MainCpuClass = CpuClass
# CPs to be used throughout the simulation.
for i in xrange(options.num_cp):
cp = MainCpuClass(cpu_id = options.num_cpus + i,
clk_domain = SrcClockDomain(
clock = options.CPUClock,
voltage_domain = VoltageDomain(
voltage = options.cpu_voltage)))
cp_list.append(cp)
# Main CPUs (to be used after fast-forwarding if fast-forwarding is specified).
for i in xrange(options.num_cpus):
cpu = MainCpuClass(cpu_id = i,
clk_domain = SrcClockDomain(
clock = options.CPUClock,
voltage_domain = VoltageDomain(
voltage = options.cpu_voltage)))
if fast_forward:
cpu.switched_out = True
future_cpu_list.append(cpu)
else:
cpu_list.append(cpu)
########################## Creating the GPU dispatcher ########################
# Dispatcher dispatches work from host CPU to GPU
host_cpu = cpu_list[0]
dispatcher = GpuDispatcher()
########################## Create and assign the workload ########################
# Check for rel_path in elements of base_list using test, returning
# the first full path that satisfies test
def find_path(base_list, rel_path, test):
for base in base_list:
if not base:
# base could be None if environment var not set
continue
full_path = os.path.join(base, rel_path)
if test(full_path):
return full_path
fatal("%s not found in %s" % (rel_path, base_list))
def find_file(base_list, rel_path):
return find_path(base_list, rel_path, os.path.isfile)
executable = find_path(benchmark_path, options.cmd, os.path.exists)
# it's common for a benchmark to be in a directory with the same
# name as the executable, so we handle that automatically
if os.path.isdir(executable):
benchmark_path = [executable]
executable = find_file(benchmark_path, options.cmd)
if options.kernel_files:
kernel_files = [find_file(benchmark_path, f)
for f in options.kernel_files.split(':')]
else:
# if kernel_files is not set, see if there's a unique .asm file
# in the same directory as the executable
kernel_path = os.path.dirname(executable)
kernel_files = glob.glob(os.path.join(kernel_path, '*.asm'))
if kernel_files:
print "Using GPU kernel code file(s)", ",".join(kernel_files)
else:
fatal("Can't locate kernel code (.asm) in " + kernel_path)
# OpenCL driver
driver = ClDriver(filename="hsa", codefile=kernel_files)
for cpu in cpu_list:
cpu.workload = LiveProcess(executable = executable,
cmd = [options.cmd] + options.options.split(),
drivers = [driver])
for cp in cp_list:
cp.workload = host_cpu.workload
if fast_forward:
for i in xrange(len(future_cpu_list)):
future_cpu_list[i].workload = cpu_list[i].workload
########################## Create the overall system ########################
# List of CPUs that must be switched when moving between KVM and simulation
if fast_forward:
switch_cpu_list = \
[(cpu_list[i], future_cpu_list[i]) for i in xrange(options.num_cpus)]
# Full list of processing cores in the system. Note that
# dispatcher is also added to cpu_list although it is
# not a processing element
cpu_list = cpu_list + [shader] + cp_list + [dispatcher]
# creating the overall system
# notice the cpu list is explicitly added as a parameter to System
system = System(cpu = cpu_list,
mem_ranges = [AddrRange(options.mem_size)],
cache_line_size = options.cacheline_size,
mem_mode = mem_mode)
if fast_forward:
system.future_cpu = future_cpu_list
system.voltage_domain = VoltageDomain(voltage = options.sys_voltage)
system.clk_domain = SrcClockDomain(clock = options.sys_clock,
voltage_domain = system.voltage_domain)
if fast_forward:
have_kvm_support = 'BaseKvmCPU' in globals()
if have_kvm_support and buildEnv['TARGET_ISA'] == "x86":
system.vm = KvmVM()
for i in xrange(len(host_cpu.workload)):
host_cpu.workload[i].useArchPT = True
host_cpu.workload[i].kvmInSE = True
else:
fatal("KvmCPU can only be used in SE mode with x86")
# configure the TLB hierarchy
GPUTLBConfig.config_tlb_hierarchy(options, system, shader_idx)
# create Ruby system
system.piobus = IOXBar(width=32, response_latency=0,
frontend_latency=0, forward_latency=0)
Ruby.create_system(options, None, system)
system.ruby.clk_domain = SrcClockDomain(clock = options.ruby_clock,
voltage_domain = system.voltage_domain)
# attach the CPU ports to Ruby
for i in range(options.num_cpus):
ruby_port = system.ruby._cpu_ports[i]
# Create interrupt controller
system.cpu[i].createInterruptController()
# Connect cache port's to ruby
system.cpu[i].icache_port = ruby_port.slave
system.cpu[i].dcache_port = ruby_port.slave
ruby_port.mem_master_port = system.piobus.slave
if buildEnv['TARGET_ISA'] == "x86":
system.cpu[i].interrupts[0].pio = system.piobus.master
system.cpu[i].interrupts[0].int_master = system.piobus.slave
system.cpu[i].interrupts[0].int_slave = system.piobus.master
if fast_forward:
system.cpu[i].itb.walker.port = ruby_port.slave
system.cpu[i].dtb.walker.port = ruby_port.slave
# attach CU ports to Ruby
# Because of the peculiarities of the CP core, you may have 1 CPU but 2
# sequencers and thus 2 _cpu_ports created. Your GPUs shouldn't be
# hooked up until after the CP. To make this script generic, figure out
# the index as below, but note that this assumes there is one sequencer
# per compute unit and one sequencer per SQC for the math to work out
# correctly.
gpu_port_idx = len(system.ruby._cpu_ports) \
- options.num_compute_units - options.num_sqc
gpu_port_idx = gpu_port_idx - options.num_cp * 2
wavefront_size = options.wf_size
for i in xrange(n_cu):
# The pipeline issues wavefront_size number of uncoalesced requests
# in one GPU issue cycle. Hence wavefront_size mem ports.
for j in xrange(wavefront_size):
system.cpu[shader_idx].CUs[i].memory_port[j] = \
system.ruby._cpu_ports[gpu_port_idx].slave[j]
gpu_port_idx += 1
for i in xrange(n_cu):
if i > 0 and not i % options.cu_per_sqc:
print "incrementing idx on ", i
gpu_port_idx += 1
system.cpu[shader_idx].CUs[i].sqc_port = \
system.ruby._cpu_ports[gpu_port_idx].slave
gpu_port_idx = gpu_port_idx + 1
# attach CP ports to Ruby
for i in xrange(options.num_cp):
system.cpu[cp_idx].createInterruptController()
system.cpu[cp_idx].dcache_port = \
system.ruby._cpu_ports[gpu_port_idx + i * 2].slave
system.cpu[cp_idx].icache_port = \
system.ruby._cpu_ports[gpu_port_idx + i * 2 + 1].slave
system.cpu[cp_idx].interrupts[0].pio = system.piobus.master
system.cpu[cp_idx].interrupts[0].int_master = system.piobus.slave
system.cpu[cp_idx].interrupts[0].int_slave = system.piobus.master
cp_idx = cp_idx + 1
# connect dispatcher to the system.piobus
dispatcher.pio = system.piobus.master
dispatcher.dma = system.piobus.slave
################# Connect the CPU and GPU via GPU Dispatcher ###################
# CPU rings the GPU doorbell to notify a pending task
# using this interface.
# And GPU uses this interface to notify the CPU of task completion
# The communcation happens through emulated driver.
# Note this implicit setting of the cpu_pointer, shader_pointer and tlb array
# parameters must be after the explicit setting of the System cpu list
if fast_forward:
shader.cpu_pointer = future_cpu_list[0]
dispatcher.cpu = future_cpu_list[0]
else:
shader.cpu_pointer = host_cpu
dispatcher.cpu = host_cpu
dispatcher.shader_pointer = shader
dispatcher.cl_driver = driver
########################## Start simulation ########################
root = Root(system=system, full_system=False)
m5.ticks.setGlobalFrequency('1THz')
if options.abs_max_tick:
maxtick = options.abs_max_tick
else:
maxtick = m5.MaxTick
# Benchmarks support work item annotations
Simulation.setWorkCountOptions(system, options)
# Checkpointing is not supported by APU model
if (options.checkpoint_dir != None or
options.checkpoint_restore != None):
fatal("Checkpointing not supported by apu model")
checkpoint_dir = None
m5.instantiate(checkpoint_dir)
# Map workload to this address space
host_cpu.workload[0].map(0x10000000, 0x200000000, 4096)
if options.fast_forward:
print "Switch at instruction count: %d" % \
cpu_list[0].max_insts_any_thread
exit_event = m5.simulate(maxtick)
if options.fast_forward:
if exit_event.getCause() == "a thread reached the max instruction count":
m5.switchCpus(system, switch_cpu_list)
print "Switched CPUS @ tick %s" % (m5.curTick())
m5.stats.reset()
exit_event = m5.simulate(maxtick - m5.curTick())
elif options.fast_forward_pseudo_op:
while exit_event.getCause() == "switchcpu":
# If we are switching *to* kvm, then the current stats are meaningful
# Note that we don't do any warmup by default
if type(switch_cpu_list[0][0]) == FutureCpuClass:
print "Dumping stats..."
m5.stats.dump()
m5.switchCpus(system, switch_cpu_list)
print "Switched CPUS @ tick %s" % (m5.curTick())
m5.stats.reset()
# This lets us switch back and forth without keeping a counter
switch_cpu_list = [(x[1], x[0]) for x in switch_cpu_list]
exit_event = m5.simulate(maxtick - m5.curTick())
print "Ticks:", m5.curTick()
print 'Exiting because ', exit_event.getCause()
sys.exit(exit_event.getCode())
|
|
#!/usr/bin/env python3
import asyncio
import functools
import logging
import traceback
from thrift.server.TServer import TServerEventHandler
from thrift.Thrift import (
TException,
TProcessor,
)
from thrift.util.async_common import (
AsyncioRpcConnectionContext,
FramedProtocol,
THeaderProtocol,
ThriftHeaderClientProtocolBase,
TReadWriteBuffer,
WrappedTransport,
)
__all__ = [
'ThriftAsyncServerFactory', 'ThriftClientProtocolFactory',
'ThriftServerProtocolFactory',
]
logger = logging.getLogger(__name__)
#
# Thrift server support
#
@asyncio.coroutine
def ThriftAsyncServerFactory(
processor, *, interface=None, port=0, loop=None, nthreads=None, sock=None,
backlog=100
):
"""
ThriftAsyncServerFactory(processor) -> asyncio.Server
asyncio.Server factory for Thrift processors. In the spirit of "there
should be one obvious way to do it", this server only supports the new
THeader protocol.
If `interface` is None (the default), listens on all interfaces. `port` is
0 by default, which makes the OS allocate the port. Enumerate the returned
server's "sockets" attribute to know the port in this case.
If not given, the default event loop is used. If `nthreads` is given, the
default executor for the event loop is set to a thread pool of up to
`nthreads`.
Notes about the processor method handling:
1. By default all methods are executed synchronously on the event loop.
This can lead to poor performance if a single run takes long to process.
2. Mark coroutines with @asyncio.coroutine if you wish to use "yield from"
to call async services, schedule tasks with customized executors, etc.
3. Mark methods with @run_on_thread if you wish to run them on the thread
pool executor. Note that unless you're accessing C extensions which free
the GIL, this is not going to win you any performance.
Use this to initialize multiple servers asynchronously::
loop = asyncio.get_event_loop()
servers = [
ThriftAsyncServerFactory(handler1, port=9090, loop=loop),
ThriftAsyncServerFactory(handler2, port=9091, loop=loop),
]
loop.run_until_complete(asyncio.wait(servers))
try:
loop.run_forever() # Servers are initialized now
finally:
for server in servers:
server.close()
"""
if loop is None:
loop = asyncio.get_event_loop()
if not isinstance(processor, TProcessor):
try:
processor = processor._processor_type(processor, loop=loop)
except AttributeError:
raise TypeError(
"Unsupported processor type: {}".format(type(processor)),
)
if nthreads:
from concurrent.futures import ThreadPoolExecutor
loop.set_default_executor(
ThreadPoolExecutor(max_workers=nthreads),
)
event_handler = TServerEventHandler()
pfactory = ThriftServerProtocolFactory(processor, event_handler, loop)
server = yield from loop.create_server(
pfactory,
interface,
port,
sock=sock,
backlog=backlog,
)
if server.sockets:
for socket in server.sockets:
event_handler.preServe(socket.getsockname())
return server
def ThriftServerProtocolFactory(processor, server_event_handler, loop=None):
return functools.partial(
ThriftHeaderServerProtocol, processor, server_event_handler, loop,
)
class ThriftHeaderServerProtocol(FramedProtocol):
def __init__(self, processor, server_event_handler, loop=None):
super().__init__(loop=loop)
self.processor = processor
self.server_event_handler = server_event_handler
self.server_context = None
@asyncio.coroutine
def message_received(self, frame):
# Note: we are using a single `prot` for in and out so that
# we can support legacy clients that only understand FRAMED.
# The discovery of what the client supports happens in iprot's
# transport so we have to reuse a single one here.
buf = TReadWriteBuffer(frame)
prot = THeaderProtocol(buf)
try:
yield from self.processor.process(
prot, prot, self.server_context,
)
msg = buf.getvalue()
if len(msg) > 0:
self.transport.write(msg)
except TException as e:
logger.warning("TException while processing request: %s", str(e))
msg = buf.getvalue()
if len(msg) > 0:
self.transport.write(msg)
except asyncio.CancelledError:
self.transport.close()
except BaseException as e:
logger.error("Exception while processing request: %s", str(e))
logger.error(traceback.format_exc())
self.transport.close()
def connection_made(self, transport):
self.transport = transport
socket = self.transport.get_extra_info("socket")
if socket is not None:
self.server_context = AsyncioRpcConnectionContext(socket)
self.server_event_handler.newConnection(self.server_context)
def connection_lost(self, exc):
self.server_event_handler.connectionDestroyed(self.server_context)
#
# Thrift client support
#
def ThriftClientProtocolFactory(
client_class,
loop=None,
timeouts=None,
client_type=None,
):
return functools.partial(
ThriftHeaderClientProtocol,
client_class,
loop,
timeouts,
client_type,
)
class SenderTransport(WrappedTransport):
@asyncio.coroutine
def _send(self):
while True:
msg = yield from self._queue.get()
self._clean_producers()
self._trans.write(msg)
class ThriftHeaderClientProtocol(ThriftHeaderClientProtocolBase):
@asyncio.coroutine
def timeout_task(self, fname, seqid, delay):
yield from asyncio.sleep(delay, loop=self.loop)
self._handle_timeout(fname, seqid)
def wrapAsyncioTransport(self, asyncio_transport):
return SenderTransport(asyncio_transport, self, self.loop)
|
|
"""
txcons.py
Transaction Constructors for the blockchain.
"""
from collections import defaultdict
from asset import AssetTarget
from coloredcoinlib import (ColorSet, ColorTarget, SimpleColorValue,
ComposedTxSpec, OperationalTxSpec,
UNCOLORED_MARKER, OBColorDefinition,
InvalidColorIdError, ZeroSelectError)
from binascii import hexlify
import pycoin_txcons
import io
import math
class InsufficientFundsError(Exception):
pass
class InvalidTargetError(Exception):
pass
class InvalidTransformationError(Exception):
pass
class BasicTxSpec(object):
"""Represents a really simple colored coin transaction.
Specifically, this particular transaction class has not been
constructed, composed or signed. Those are done in other classes.
Note this only supports a single asset.
"""
def __init__(self, model):
"""Create a BasicTxSpec that has a wallet_model <model>
for an asset <asset>
"""
self.model = model
self.targets = []
def add_target(self, asset_target):
"""Add a ColorTarget <color_target> which specifies the
colorvalue and address
"""
if not isinstance(asset_target, AssetTarget):
raise InvalidTargetError("Not an asset target!")
self.targets.append(asset_target)
def is_monoasset(self):
"""Returns a boolean representing if the transaction sends
coins of exactly 1 color.
"""
if not self.targets:
raise InvalidTargetError('Basic txs is empty!')
asset = self.targets[0].get_asset()
for target in self.targets:
if target.get_asset() != asset:
return False
return True
def is_monocolor(self):
"""Returns a boolean representing if the transaction sends
coins of exactly 1 color.
"""
if not self.is_monoasset():
return False
asset = self.targets[0].get_asset()
return len(asset.get_color_set().color_id_set) == 1
def make_operational_tx_spec(self, asset):
"""Given a <tx_spec> of type BasicTxSpec, return
a SimpleOperationalTxSpec.
"""
if not self.is_monocolor():
raise InvalidTransformationError('Tx spec type not supported!')
op_tx_spec = SimpleOperationalTxSpec(self.model, asset)
color_id = list(asset.get_color_set().color_id_set)[0]
color_def = self.model.get_color_def(color_id)
for target in self.targets:
colorvalue = SimpleColorValue(colordef=color_def,
value=target.get_value())
colortarget = ColorTarget(target.get_address(), colorvalue)
op_tx_spec.add_target(colortarget)
return op_tx_spec
class BaseOperationalTxSpec(OperationalTxSpec):
def get_required_fee(self, tx_size):
"""Given a transaction that is of size <tx_size>,
return the transaction fee in Satoshi that needs to be
paid out to miners.
"""
base_fee = 11000.0
fee_value = math.ceil((tx_size * base_fee) / 1000)
return SimpleColorValue(colordef=UNCOLORED_MARKER,
value=fee_value)
def get_dust_threshold(self):
return SimpleColorValue(colordef=UNCOLORED_MARKER, value=600)
def _select_enough_coins(self, colordef, utxo_list, required_sum_fn):
ssum = SimpleColorValue(colordef=colordef, value=0)
selection = []
required_sum = None
for utxo in utxo_list:
ssum += SimpleColorValue.sum(utxo.colorvalues)
selection.append(utxo)
required_sum = required_sum_fn(utxo_list)
if ssum >= required_sum:
return selection, ssum
raise InsufficientFundsError('Not enough coins: %s requested, %s found!'
% (required_sum, ssum))
def _validate_select_coins_parameters(self, colorvalue, use_fee_estimator):
fee = None
if use_fee_estimator:
fee = use_fee_estimator.estimate_required_fee()
if not fee and colorvalue.get_value() < 0:
raise Exception("Cannot select negative coins!")
elif fee and (colorvalue + fee).get_value() < 0:
raise Exception("Cannot select negative coins!")
colordef = colorvalue.get_colordef()
if colordef != UNCOLORED_MARKER and use_fee_estimator:
msg = "Fee estimator can only be used with uncolored coins!"
raise Exception(msg)
class SimpleOperationalTxSpec(BaseOperationalTxSpec):
"""Subclass of OperationalTxSpec which uses wallet model.
Represents a transaction that's ready to be composed
and then signed. The parent is an abstract class.
"""
def __init__(self, model, asset):
"""Initialize a transaction that uses a wallet model
<model> and transfers asset/color <asset>.
"""
super(SimpleOperationalTxSpec, self).__init__()
self.model = model
self.targets = []
self.asset = asset
def add_target(self, color_target):
"""Add a ColorTarget <color_target> to the transaction
"""
if not isinstance(color_target, ColorTarget):
raise InvalidTargetError("Target is not an instance of ColorTarget!")
self.targets.append(color_target)
def get_targets(self):
"""Get a list of (receiving address, color_id, colorvalue)
triplets representing all the targets for this tx.
"""
return self.targets
def get_change_addr(self, color_def):
"""Get an address associated with color definition <color_def>
that is in the current wallet for receiving change.
"""
am = self.model.get_asset_definition_manager()
wam = self.model.get_address_manager()
color_id = color_def.color_id
asset = am.get_asset_by_color_id(color_id)
color_set = None
if color_def == UNCOLORED_MARKER:
color_set = ColorSet.from_color_ids(self.model.get_color_map(), [0])
elif asset.get_color_set().has_color_id(color_id):
color_set = asset.get_color_set()
if color_set is None:
raise InvalidColorIdError('Wrong color id!')
aw = wam.get_change_address(color_set)
return aw.get_address()
def select_coins(self, colorvalue, use_fee_estimator=None):
"""Return a list of utxos and sum that corresponds to
the colored coins identified by <color_def> of amount <colorvalue>
that we'll be spending from our wallet.
"""
self._validate_select_coins_parameters(colorvalue, use_fee_estimator)
def required_sum_fn(selection):
if use_fee_estimator:
return colorvalue + use_fee_estimator.estimate_required_fee(
extra_txins=len(selection))
else:
return colorvalue
required_sum_0 = required_sum_fn([])
if required_sum_0.get_value() == 0:
# no coins need to be selected
return [], required_sum_0
colordef = colorvalue.get_colordef()
color_id = colordef.get_color_id()
cq = self.model.make_coin_query({"color_id_set": set([color_id])})
utxo_list = cq.get_result()
return self._select_enough_coins(colordef, utxo_list, required_sum_fn)
class RawTxSpec(object):
"""Represents a transaction which can be serialized.
"""
def __init__(self, model, pycoin_tx, composed_tx_spec=None):
self.model = model
self.pycoin_tx = pycoin_tx
self.composed_tx_spec = composed_tx_spec
self.update_tx_data()
self.intent = None
def get_intent(self):
return self.intent
def get_hex_txhash(self):
the_hash = self.pycoin_tx.hash()
return the_hash[::-1].encode('hex')
def update_tx_data(self):
"""Updates serialized form of transaction.
"""
s = io.BytesIO()
self.pycoin_tx.stream(s)
self.tx_data = s.getvalue()
@classmethod
def from_composed_tx_spec(cls, model, composed_tx_spec):
testnet = model.is_testnet()
tx = pycoin_txcons.construct_standard_tx(composed_tx_spec, testnet)
return cls(model, tx, composed_tx_spec)
@classmethod
def from_tx_data(cls, model, tx_data):
pycoin_tx = pycoin_txcons.deserialize(tx_data)
composed_tx_spec = pycoin_txcons.reconstruct_composed_tx_spec(
model, pycoin_tx)
return cls(model, pycoin_tx, composed_tx_spec)
def sign(self, utxo_list):
pycoin_txcons.sign_tx(
self.pycoin_tx, utxo_list, self.model.is_testnet())
self.update_tx_data()
def get_tx_data(self):
"""Returns the signed transaction data.
"""
return self.tx_data
def get_hex_tx_data(self):
"""Returns the hex version of the signed transaction data.
"""
return hexlify(self.tx_data).decode("utf8")
def get_input_addresses(self):
ccc = self.model.ccc
bs = self.model.get_blockchain_state()
inputs = [ti.get_outpoint() for ti in self.composed_tx_spec.txins]
raw_addrs = [bs.get_tx(tx).outputs[n].raw_address for tx, n in inputs]
return [ccc.raw_to_address(raw) for raw in raw_addrs]
def compose_uncolored_tx(tx_spec):
""" compose a simple bitcoin transaction """
composed_tx_spec = tx_spec.make_composed_tx_spec()
targets = tx_spec.get_targets()
composed_tx_spec.add_txouts(targets)
ttotal = ColorTarget.sum(targets)
sel_utxos, sum_sel_coins = tx_spec.select_coins(ttotal, composed_tx_spec)
composed_tx_spec.add_txins(sel_utxos)
fee = composed_tx_spec.estimate_required_fee()
change = sum_sel_coins - ttotal - fee
# give ourselves the change
if change > tx_spec.get_dust_threshold():
composed_tx_spec.add_txout(value=change,
target_addr=tx_spec.get_change_addr(UNCOLORED_MARKER),
is_fee_change=True)
return composed_tx_spec
class TransactionSpecTransformer(object):
"""An object that can transform one type of transaction into another.
Essentially has the ability to take a transaction, compose it
and sign it by returning the appropriate objects.
The general flow of transaction types is this:
BasicTxSpec -> SimpleOperationalTxSpec -> ComposedTxSpec -> SignedTxSpec
"basic" -> "operational" -> "composed" -> "signed"
"""
def __init__(self, model, config):
"""Create a transaction transformer object for wallet_model <model>
and a wallet configuration <config>
"""
self.model = model
self.testnet = config.get('testnet', False)
def get_tx_composer(self, op_tx_spec):
"""Returns a function which is able to convert a given operational
tx spec <op_tx_spec> into a composed tx spec
"""
if op_tx_spec.is_monocolor():
color_def = op_tx_spec.get_targets()[0].get_colordef()
if color_def == UNCOLORED_MARKER:
return compose_uncolored_tx
else:
return color_def.compose_tx_spec
else:
# grab the first color def and hope that its compose_tx_spec
# will be able to handle it. if transaction has incompatible
# colors, compose_tx_spec will throw an exception
for target in op_tx_spec.get_targets():
tgt_color_def = target.get_colordef()
if tgt_color_def is UNCOLORED_MARKER:
continue
else:
return tgt_color_def.compose_tx_spec
return None
def classify_tx_spec(self, tx_spec):
"""For a transaction <tx_spec>, returns a string that represents
the type of transaction (basic, operational, composed, signed)
that it is.
"""
if isinstance(tx_spec, BasicTxSpec):
return 'basic'
elif isinstance(tx_spec, OperationalTxSpec):
return 'operational'
elif isinstance(tx_spec, ComposedTxSpec):
return 'composed'
elif isinstance(tx_spec, RawTxSpec):
return 'signed'
else:
return None
def transform_basic(self, tx_spec, target_spec_kind):
"""Takes a basic transaction <tx_spec> and returns a transaction
of type <target_spec_kind> which is one of (operational,
composed, signed).
"""
if target_spec_kind in ['operational', 'composed', 'signed']:
if tx_spec.is_monocolor():
asset = tx_spec.targets[0].get_asset()
operational_ts = tx_spec.make_operational_tx_spec(asset)
return self.transform(operational_ts, target_spec_kind)
msg = 'Do not know how to transform tx spec!'
raise InvalidTransformationError(msg)
def transform_operational(self, tx_spec, target_spec_kind):
"""Takes an operational transaction <tx_spec> and returns a
transaction of type <target_spec_kind> which is one of
(composed, signed).
"""
if target_spec_kind in ['composed', 'signed']:
composer = self.get_tx_composer(tx_spec)
if composer:
composed = composer(tx_spec)
return self.transform(composed, target_spec_kind)
msg = 'Do not know how to transform tx spec!'
raise InvalidTransformationError(msg)
def transform_composed(self, tx_spec, target_spec_kind):
"""Takes a SimpleComposedTxSpec <tx_spec> and returns
a signed transaction. For now, <target_spec_kind> must
equal "signed" or will throw an exception.
"""
if target_spec_kind in ['signed']:
rtxs = RawTxSpec.from_composed_tx_spec(self.model, tx_spec)
rtxs.sign(tx_spec.get_txins())
return rtxs
msg = 'Do not know how to transform tx spec!'
raise InvalidTransformationError(msg)
def transform_signed(self, tx_spec, target_spec_kind):
"""This method is not yet implemented.
"""
msg = 'Do not know how to transform tx spec!'
raise InvalidTransformationError(msg)
def transform(self, tx_spec, target_spec_kind):
"""Transform a transaction <tx_spec> into another type
of transaction defined by <target_spec_kind> and returns it.
"""
spec_kind = self.classify_tx_spec(tx_spec)
if spec_kind is None:
raise InvalidTransformationError('Spec kind is not recognized!')
if spec_kind == target_spec_kind:
return tx_spec
if spec_kind == 'basic':
return self.transform_basic(tx_spec, target_spec_kind)
elif spec_kind == 'operational':
return self.transform_operational(tx_spec, target_spec_kind)
elif spec_kind == 'composed':
return self.transform_composed(tx_spec, target_spec_kind)
elif spec_kind == 'signed':
return self.transform_signed(tx_spec, target_spec_kind)
|
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import qrl.generated.qrlwallet_pb2 as qrlwallet__pb2
class WalletAPIStub(object):
"""//////////////////////////
//////////////////////////
//////////////////////////
//// API ///////
//////////////////////////
//////////////////////////
//////////////////////////
This service describes the Wallet API
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.AddNewAddress = channel.unary_unary(
'/qrl.WalletAPI/AddNewAddress',
request_serializer=qrlwallet__pb2.AddNewAddressReq.SerializeToString,
response_deserializer=qrlwallet__pb2.AddNewAddressResp.FromString,
)
self.AddNewAddressWithSlaves = channel.unary_unary(
'/qrl.WalletAPI/AddNewAddressWithSlaves',
request_serializer=qrlwallet__pb2.AddNewAddressWithSlavesReq.SerializeToString,
response_deserializer=qrlwallet__pb2.AddNewAddressResp.FromString,
)
self.ListAddresses = channel.unary_unary(
'/qrl.WalletAPI/ListAddresses',
request_serializer=qrlwallet__pb2.ListAddressesReq.SerializeToString,
response_deserializer=qrlwallet__pb2.ListAddressesResp.FromString,
)
self.RemoveAddress = channel.unary_unary(
'/qrl.WalletAPI/RemoveAddress',
request_serializer=qrlwallet__pb2.RemoveAddressReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RemoveAddressResp.FromString,
)
self.IsValidAddress = channel.unary_unary(
'/qrl.WalletAPI/IsValidAddress',
request_serializer=qrlwallet__pb2.ValidAddressReq.SerializeToString,
response_deserializer=qrlwallet__pb2.ValidAddressResp.FromString,
)
self.EncryptWallet = channel.unary_unary(
'/qrl.WalletAPI/EncryptWallet',
request_serializer=qrlwallet__pb2.EncryptWalletReq.SerializeToString,
response_deserializer=qrlwallet__pb2.EncryptWalletResp.FromString,
)
self.LockWallet = channel.unary_unary(
'/qrl.WalletAPI/LockWallet',
request_serializer=qrlwallet__pb2.LockWalletReq.SerializeToString,
response_deserializer=qrlwallet__pb2.LockWalletResp.FromString,
)
self.UnlockWallet = channel.unary_unary(
'/qrl.WalletAPI/UnlockWallet',
request_serializer=qrlwallet__pb2.UnlockWalletReq.SerializeToString,
response_deserializer=qrlwallet__pb2.UnlockWalletResp.FromString,
)
self.GetRecoverySeeds = channel.unary_unary(
'/qrl.WalletAPI/GetRecoverySeeds',
request_serializer=qrlwallet__pb2.GetRecoverySeedsReq.SerializeToString,
response_deserializer=qrlwallet__pb2.GetRecoverySeedsResp.FromString,
)
self.GetWalletInfo = channel.unary_unary(
'/qrl.WalletAPI/GetWalletInfo',
request_serializer=qrlwallet__pb2.GetWalletInfoReq.SerializeToString,
response_deserializer=qrlwallet__pb2.GetWalletInfoResp.FromString,
)
self.RelayTransferTxn = channel.unary_unary(
'/qrl.WalletAPI/RelayTransferTxn',
request_serializer=qrlwallet__pb2.RelayTransferTxnReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RelayTxnResp.FromString,
)
self.RelayTransferTxnBySlave = channel.unary_unary(
'/qrl.WalletAPI/RelayTransferTxnBySlave',
request_serializer=qrlwallet__pb2.RelayTransferTxnBySlaveReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RelayTxnResp.FromString,
)
self.RelayMessageTxn = channel.unary_unary(
'/qrl.WalletAPI/RelayMessageTxn',
request_serializer=qrlwallet__pb2.RelayMessageTxnReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RelayTxnResp.FromString,
)
self.RelayMessageTxnBySlave = channel.unary_unary(
'/qrl.WalletAPI/RelayMessageTxnBySlave',
request_serializer=qrlwallet__pb2.RelayMessageTxnBySlaveReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RelayTxnResp.FromString,
)
self.RelayTokenTxn = channel.unary_unary(
'/qrl.WalletAPI/RelayTokenTxn',
request_serializer=qrlwallet__pb2.RelayTokenTxnReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RelayTxnResp.FromString,
)
self.RelayTokenTxnBySlave = channel.unary_unary(
'/qrl.WalletAPI/RelayTokenTxnBySlave',
request_serializer=qrlwallet__pb2.RelayTokenTxnBySlaveReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RelayTxnResp.FromString,
)
self.RelayTransferTokenTxn = channel.unary_unary(
'/qrl.WalletAPI/RelayTransferTokenTxn',
request_serializer=qrlwallet__pb2.RelayTransferTokenTxnReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RelayTxnResp.FromString,
)
self.RelayTransferTokenTxnBySlave = channel.unary_unary(
'/qrl.WalletAPI/RelayTransferTokenTxnBySlave',
request_serializer=qrlwallet__pb2.RelayTransferTokenTxnBySlaveReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RelayTxnResp.FromString,
)
self.RelaySlaveTxn = channel.unary_unary(
'/qrl.WalletAPI/RelaySlaveTxn',
request_serializer=qrlwallet__pb2.RelaySlaveTxnReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RelayTxnResp.FromString,
)
self.RelaySlaveTxnBySlave = channel.unary_unary(
'/qrl.WalletAPI/RelaySlaveTxnBySlave',
request_serializer=qrlwallet__pb2.RelaySlaveTxnBySlaveReq.SerializeToString,
response_deserializer=qrlwallet__pb2.RelayTxnResp.FromString,
)
self.ChangePassphrase = channel.unary_unary(
'/qrl.WalletAPI/ChangePassphrase',
request_serializer=qrlwallet__pb2.ChangePassphraseReq.SerializeToString,
response_deserializer=qrlwallet__pb2.ChangePassphraseResp.FromString,
)
self.GetMiniTransactionsByAddress = channel.unary_unary(
'/qrl.WalletAPI/GetMiniTransactionsByAddress',
request_serializer=qrlwallet__pb2.MiniTransactionsByAddressReq.SerializeToString,
response_deserializer=qrlwallet__pb2.MiniTransactionsByAddressResp.FromString,
)
self.GetTransaction = channel.unary_unary(
'/qrl.WalletAPI/GetTransaction',
request_serializer=qrlwallet__pb2.TransactionReq.SerializeToString,
response_deserializer=qrlwallet__pb2.TransactionResp.FromString,
)
self.GetBalance = channel.unary_unary(
'/qrl.WalletAPI/GetBalance',
request_serializer=qrlwallet__pb2.BalanceReq.SerializeToString,
response_deserializer=qrlwallet__pb2.BalanceResp.FromString,
)
self.GetTotalBalance = channel.unary_unary(
'/qrl.WalletAPI/GetTotalBalance',
request_serializer=qrlwallet__pb2.TotalBalanceReq.SerializeToString,
response_deserializer=qrlwallet__pb2.TotalBalanceResp.FromString,
)
self.GetOTS = channel.unary_unary(
'/qrl.WalletAPI/GetOTS',
request_serializer=qrlwallet__pb2.OTSReq.SerializeToString,
response_deserializer=qrlwallet__pb2.OTSResp.FromString,
)
self.GetHeight = channel.unary_unary(
'/qrl.WalletAPI/GetHeight',
request_serializer=qrlwallet__pb2.HeightReq.SerializeToString,
response_deserializer=qrlwallet__pb2.HeightResp.FromString,
)
self.GetBlock = channel.unary_unary(
'/qrl.WalletAPI/GetBlock',
request_serializer=qrlwallet__pb2.BlockReq.SerializeToString,
response_deserializer=qrlwallet__pb2.BlockResp.FromString,
)
self.GetBlockByNumber = channel.unary_unary(
'/qrl.WalletAPI/GetBlockByNumber',
request_serializer=qrlwallet__pb2.BlockByNumberReq.SerializeToString,
response_deserializer=qrlwallet__pb2.BlockResp.FromString,
)
self.GetAddressFromPK = channel.unary_unary(
'/qrl.WalletAPI/GetAddressFromPK',
request_serializer=qrlwallet__pb2.AddressFromPKReq.SerializeToString,
response_deserializer=qrlwallet__pb2.AddressFromPKResp.FromString,
)
self.GetNodeInfo = channel.unary_unary(
'/qrl.WalletAPI/GetNodeInfo',
request_serializer=qrlwallet__pb2.NodeInfoReq.SerializeToString,
response_deserializer=qrlwallet__pb2.NodeInfoResp.FromString,
)
class WalletAPIServicer(object):
"""//////////////////////////
//////////////////////////
//////////////////////////
//// API ///////
//////////////////////////
//////////////////////////
//////////////////////////
This service describes the Wallet API
"""
def AddNewAddress(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddNewAddressWithSlaves(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListAddresses(self, request, context):
"""rpc AddAddressFromSeed(AddAddressFromSeedReq) returns (AddAddressFromSeedResp);
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RemoveAddress(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def IsValidAddress(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def EncryptWallet(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def LockWallet(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UnlockWallet(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetRecoverySeeds(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetWalletInfo(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RelayTransferTxn(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RelayTransferTxnBySlave(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RelayMessageTxn(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RelayMessageTxnBySlave(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RelayTokenTxn(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RelayTokenTxnBySlave(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RelayTransferTokenTxn(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RelayTransferTokenTxnBySlave(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RelaySlaveTxn(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RelaySlaveTxnBySlave(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ChangePassphrase(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetMiniTransactionsByAddress(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetTransaction(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetBalance(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetTotalBalance(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetOTS(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetHeight(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetBlock(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetBlockByNumber(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAddressFromPK(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetNodeInfo(self, request, context):
# missing associated documentation comment in .proto file
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_WalletAPIServicer_to_server(servicer, server):
rpc_method_handlers = {
'AddNewAddress': grpc.unary_unary_rpc_method_handler(
servicer.AddNewAddress,
request_deserializer=qrlwallet__pb2.AddNewAddressReq.FromString,
response_serializer=qrlwallet__pb2.AddNewAddressResp.SerializeToString,
),
'AddNewAddressWithSlaves': grpc.unary_unary_rpc_method_handler(
servicer.AddNewAddressWithSlaves,
request_deserializer=qrlwallet__pb2.AddNewAddressWithSlavesReq.FromString,
response_serializer=qrlwallet__pb2.AddNewAddressResp.SerializeToString,
),
'ListAddresses': grpc.unary_unary_rpc_method_handler(
servicer.ListAddresses,
request_deserializer=qrlwallet__pb2.ListAddressesReq.FromString,
response_serializer=qrlwallet__pb2.ListAddressesResp.SerializeToString,
),
'RemoveAddress': grpc.unary_unary_rpc_method_handler(
servicer.RemoveAddress,
request_deserializer=qrlwallet__pb2.RemoveAddressReq.FromString,
response_serializer=qrlwallet__pb2.RemoveAddressResp.SerializeToString,
),
'IsValidAddress': grpc.unary_unary_rpc_method_handler(
servicer.IsValidAddress,
request_deserializer=qrlwallet__pb2.ValidAddressReq.FromString,
response_serializer=qrlwallet__pb2.ValidAddressResp.SerializeToString,
),
'EncryptWallet': grpc.unary_unary_rpc_method_handler(
servicer.EncryptWallet,
request_deserializer=qrlwallet__pb2.EncryptWalletReq.FromString,
response_serializer=qrlwallet__pb2.EncryptWalletResp.SerializeToString,
),
'LockWallet': grpc.unary_unary_rpc_method_handler(
servicer.LockWallet,
request_deserializer=qrlwallet__pb2.LockWalletReq.FromString,
response_serializer=qrlwallet__pb2.LockWalletResp.SerializeToString,
),
'UnlockWallet': grpc.unary_unary_rpc_method_handler(
servicer.UnlockWallet,
request_deserializer=qrlwallet__pb2.UnlockWalletReq.FromString,
response_serializer=qrlwallet__pb2.UnlockWalletResp.SerializeToString,
),
'GetRecoverySeeds': grpc.unary_unary_rpc_method_handler(
servicer.GetRecoverySeeds,
request_deserializer=qrlwallet__pb2.GetRecoverySeedsReq.FromString,
response_serializer=qrlwallet__pb2.GetRecoverySeedsResp.SerializeToString,
),
'GetWalletInfo': grpc.unary_unary_rpc_method_handler(
servicer.GetWalletInfo,
request_deserializer=qrlwallet__pb2.GetWalletInfoReq.FromString,
response_serializer=qrlwallet__pb2.GetWalletInfoResp.SerializeToString,
),
'RelayTransferTxn': grpc.unary_unary_rpc_method_handler(
servicer.RelayTransferTxn,
request_deserializer=qrlwallet__pb2.RelayTransferTxnReq.FromString,
response_serializer=qrlwallet__pb2.RelayTxnResp.SerializeToString,
),
'RelayTransferTxnBySlave': grpc.unary_unary_rpc_method_handler(
servicer.RelayTransferTxnBySlave,
request_deserializer=qrlwallet__pb2.RelayTransferTxnBySlaveReq.FromString,
response_serializer=qrlwallet__pb2.RelayTxnResp.SerializeToString,
),
'RelayMessageTxn': grpc.unary_unary_rpc_method_handler(
servicer.RelayMessageTxn,
request_deserializer=qrlwallet__pb2.RelayMessageTxnReq.FromString,
response_serializer=qrlwallet__pb2.RelayTxnResp.SerializeToString,
),
'RelayMessageTxnBySlave': grpc.unary_unary_rpc_method_handler(
servicer.RelayMessageTxnBySlave,
request_deserializer=qrlwallet__pb2.RelayMessageTxnBySlaveReq.FromString,
response_serializer=qrlwallet__pb2.RelayTxnResp.SerializeToString,
),
'RelayTokenTxn': grpc.unary_unary_rpc_method_handler(
servicer.RelayTokenTxn,
request_deserializer=qrlwallet__pb2.RelayTokenTxnReq.FromString,
response_serializer=qrlwallet__pb2.RelayTxnResp.SerializeToString,
),
'RelayTokenTxnBySlave': grpc.unary_unary_rpc_method_handler(
servicer.RelayTokenTxnBySlave,
request_deserializer=qrlwallet__pb2.RelayTokenTxnBySlaveReq.FromString,
response_serializer=qrlwallet__pb2.RelayTxnResp.SerializeToString,
),
'RelayTransferTokenTxn': grpc.unary_unary_rpc_method_handler(
servicer.RelayTransferTokenTxn,
request_deserializer=qrlwallet__pb2.RelayTransferTokenTxnReq.FromString,
response_serializer=qrlwallet__pb2.RelayTxnResp.SerializeToString,
),
'RelayTransferTokenTxnBySlave': grpc.unary_unary_rpc_method_handler(
servicer.RelayTransferTokenTxnBySlave,
request_deserializer=qrlwallet__pb2.RelayTransferTokenTxnBySlaveReq.FromString,
response_serializer=qrlwallet__pb2.RelayTxnResp.SerializeToString,
),
'RelaySlaveTxn': grpc.unary_unary_rpc_method_handler(
servicer.RelaySlaveTxn,
request_deserializer=qrlwallet__pb2.RelaySlaveTxnReq.FromString,
response_serializer=qrlwallet__pb2.RelayTxnResp.SerializeToString,
),
'RelaySlaveTxnBySlave': grpc.unary_unary_rpc_method_handler(
servicer.RelaySlaveTxnBySlave,
request_deserializer=qrlwallet__pb2.RelaySlaveTxnBySlaveReq.FromString,
response_serializer=qrlwallet__pb2.RelayTxnResp.SerializeToString,
),
'ChangePassphrase': grpc.unary_unary_rpc_method_handler(
servicer.ChangePassphrase,
request_deserializer=qrlwallet__pb2.ChangePassphraseReq.FromString,
response_serializer=qrlwallet__pb2.ChangePassphraseResp.SerializeToString,
),
'GetMiniTransactionsByAddress': grpc.unary_unary_rpc_method_handler(
servicer.GetMiniTransactionsByAddress,
request_deserializer=qrlwallet__pb2.MiniTransactionsByAddressReq.FromString,
response_serializer=qrlwallet__pb2.MiniTransactionsByAddressResp.SerializeToString,
),
'GetTransaction': grpc.unary_unary_rpc_method_handler(
servicer.GetTransaction,
request_deserializer=qrlwallet__pb2.TransactionReq.FromString,
response_serializer=qrlwallet__pb2.TransactionResp.SerializeToString,
),
'GetBalance': grpc.unary_unary_rpc_method_handler(
servicer.GetBalance,
request_deserializer=qrlwallet__pb2.BalanceReq.FromString,
response_serializer=qrlwallet__pb2.BalanceResp.SerializeToString,
),
'GetTotalBalance': grpc.unary_unary_rpc_method_handler(
servicer.GetTotalBalance,
request_deserializer=qrlwallet__pb2.TotalBalanceReq.FromString,
response_serializer=qrlwallet__pb2.TotalBalanceResp.SerializeToString,
),
'GetOTS': grpc.unary_unary_rpc_method_handler(
servicer.GetOTS,
request_deserializer=qrlwallet__pb2.OTSReq.FromString,
response_serializer=qrlwallet__pb2.OTSResp.SerializeToString,
),
'GetHeight': grpc.unary_unary_rpc_method_handler(
servicer.GetHeight,
request_deserializer=qrlwallet__pb2.HeightReq.FromString,
response_serializer=qrlwallet__pb2.HeightResp.SerializeToString,
),
'GetBlock': grpc.unary_unary_rpc_method_handler(
servicer.GetBlock,
request_deserializer=qrlwallet__pb2.BlockReq.FromString,
response_serializer=qrlwallet__pb2.BlockResp.SerializeToString,
),
'GetBlockByNumber': grpc.unary_unary_rpc_method_handler(
servicer.GetBlockByNumber,
request_deserializer=qrlwallet__pb2.BlockByNumberReq.FromString,
response_serializer=qrlwallet__pb2.BlockResp.SerializeToString,
),
'GetAddressFromPK': grpc.unary_unary_rpc_method_handler(
servicer.GetAddressFromPK,
request_deserializer=qrlwallet__pb2.AddressFromPKReq.FromString,
response_serializer=qrlwallet__pb2.AddressFromPKResp.SerializeToString,
),
'GetNodeInfo': grpc.unary_unary_rpc_method_handler(
servicer.GetNodeInfo,
request_deserializer=qrlwallet__pb2.NodeInfoReq.FromString,
response_serializer=qrlwallet__pb2.NodeInfoResp.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'qrl.WalletAPI', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
|
"""
tests.components.device_tracker.test_owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests Owntracks device tracker.
"""
import json
import os
import unittest
from collections import defaultdict
from homeassistant.components import device_tracker
from homeassistant.const import (STATE_NOT_HOME, CONF_PLATFORM)
import homeassistant.components.device_tracker.owntracks as owntracks
from tests.common import (
get_test_home_assistant, mock_mqtt_component, fire_mqtt_message)
USER = 'greg'
DEVICE = 'phone'
LOCATION_TOPIC = "owntracks/{}/{}".format(USER, DEVICE)
EVENT_TOPIC = "owntracks/{}/{}/event".format(USER, DEVICE)
DEVICE_TRACKER_STATE = "device_tracker.{}_{}".format(USER, DEVICE)
IBEACON_DEVICE = 'keys'
REGION_TRACKER_STATE = "device_tracker.beacon_{}".format(IBEACON_DEVICE)
LOCATION_MESSAGE = {
'batt': 92,
'cog': 248,
'tid': 'user',
'lon': 1.0,
't': 'u',
'alt': 27,
'acc': 60,
'p': 101.3977584838867,
'vac': 4,
'lat': 2.0,
'_type': 'location',
'tst': 1,
'vel': 0}
REGION_ENTER_MESSAGE = {
'lon': 1.0,
'event': 'enter',
'tid': 'user',
'desc': 'inner',
'wtst': 1,
't': 'b',
'acc': 60,
'tst': 2,
'lat': 2.0,
'_type': 'transition'}
REGION_LEAVE_MESSAGE = {
'lon': 1.0,
'event': 'leave',
'tid': 'user',
'desc': 'inner',
'wtst': 1,
't': 'b',
'acc': 60,
'tst': 2,
'lat': 2.0,
'_type': 'transition'}
class TestDeviceTrackerOwnTracks(unittest.TestCase):
""" Test the Template sensor. """
def setup_method(self, method):
""" Init needed objects. """
self.hass = get_test_home_assistant()
mock_mqtt_component(self.hass)
self.assertTrue(device_tracker.setup(self.hass, {
device_tracker.DOMAIN: {
CONF_PLATFORM: 'owntracks'
}}))
self.hass.states.set(
'zone.inner', 'zoning',
{
'name': 'zone',
'latitude': 2.1,
'longitude': 1.1,
'radius': 10
})
self.hass.states.set(
'zone.inner_2', 'zoning',
{
'name': 'zone',
'latitude': 2.1,
'longitude': 1.1,
'radius': 10
})
self.hass.states.set(
'zone.outer', 'zoning',
{
'name': 'zone',
'latitude': 2.0,
'longitude': 1.0,
'radius': 100000
})
self.hass.states.set(
'zone.passive', 'zoning',
{
'name': 'zone',
'latitude': 3.0,
'longitude': 1.0,
'radius': 10,
'passive': True
})
# Clear state between teste
self.hass.states.set(DEVICE_TRACKER_STATE, None)
owntracks.REGIONS_ENTERED = defaultdict(list)
owntracks.MOBILE_BEACONS_ACTIVE = defaultdict(list)
def teardown_method(self, method):
""" Stop down stuff we started. """
self.hass.stop()
try:
os.remove(self.hass.config.path(device_tracker.YAML_DEVICES))
except FileNotFoundError:
pass
def send_message(self, topic, message):
fire_mqtt_message(
self.hass, topic, json.dumps(message))
self.hass.pool.block_till_done()
def assert_location_state(self, location):
state = self.hass.states.get(DEVICE_TRACKER_STATE)
self.assertEqual(state.state, location)
def assert_location_latitude(self, latitude):
state = self.hass.states.get(DEVICE_TRACKER_STATE)
self.assertEqual(state.attributes.get('latitude'), latitude)
def assert_location_accuracy(self, accuracy):
state = self.hass.states.get(DEVICE_TRACKER_STATE)
self.assertEqual(state.attributes.get('gps_accuracy'), accuracy)
def assert_tracker_state(self, location):
state = self.hass.states.get(REGION_TRACKER_STATE)
self.assertEqual(state.state, location)
def assert_tracker_latitude(self, latitude):
state = self.hass.states.get(REGION_TRACKER_STATE)
self.assertEqual(state.attributes.get('latitude'), latitude)
def assert_tracker_accuracy(self, accuracy):
state = self.hass.states.get(REGION_TRACKER_STATE)
self.assertEqual(state.attributes.get('gps_accuracy'), accuracy)
def test_location_update(self):
self.send_message(LOCATION_TOPIC, LOCATION_MESSAGE)
self.assert_location_latitude(2.0)
self.assert_location_accuracy(60.0)
self.assert_location_state('outer')
def test_event_entry_exit(self):
self.send_message(EVENT_TOPIC, REGION_ENTER_MESSAGE)
# Enter uses the zone's gps co-ords
self.assert_location_latitude(2.1)
self.assert_location_accuracy(10.0)
self.assert_location_state('inner')
self.send_message(LOCATION_TOPIC, LOCATION_MESSAGE)
# Updates ignored when in a zone
self.assert_location_latitude(2.1)
self.assert_location_accuracy(10.0)
self.assert_location_state('inner')
self.send_message(EVENT_TOPIC, REGION_LEAVE_MESSAGE)
# Exit switches back to GPS
self.assert_location_latitude(2.0)
self.assert_location_accuracy(60.0)
self.assert_location_state('outer')
# Left clean zone state
self.assertFalse(owntracks.REGIONS_ENTERED[USER])
def test_event_exit_outside_zone_sets_away(self):
self.send_message(EVENT_TOPIC, REGION_ENTER_MESSAGE)
self.assert_location_state('inner')
# Exit message far away GPS location
message = REGION_LEAVE_MESSAGE.copy()
message['lon'] = 90.1
message['lat'] = 90.1
self.send_message(EVENT_TOPIC, message)
# Exit forces zone change to away
self.assert_location_state(STATE_NOT_HOME)
def test_event_entry_exit_right_order(self):
# Enter inner zone
self.send_message(EVENT_TOPIC, REGION_ENTER_MESSAGE)
self.assert_location_state('inner')
self.assert_location_latitude(2.1)
self.assert_location_accuracy(10.0)
# Enter inner2 zone
message = REGION_ENTER_MESSAGE.copy()
message['desc'] = "inner_2"
self.send_message(EVENT_TOPIC, message)
self.assert_location_state('inner_2')
self.assert_location_latitude(2.1)
self.assert_location_accuracy(10.0)
# Exit inner_2 - should be in 'inner'
message = REGION_LEAVE_MESSAGE.copy()
message['desc'] = "inner_2"
self.send_message(EVENT_TOPIC, message)
self.assert_location_state('inner')
self.assert_location_latitude(2.1)
self.assert_location_accuracy(10.0)
# Exit inner - should be in 'outer'
self.send_message(EVENT_TOPIC, REGION_LEAVE_MESSAGE)
self.assert_location_state('outer')
self.assert_location_latitude(2.0)
self.assert_location_accuracy(60.0)
def test_event_entry_exit_wrong_order(self):
# Enter inner zone
self.send_message(EVENT_TOPIC, REGION_ENTER_MESSAGE)
self.assert_location_state('inner')
# Enter inner2 zone
message = REGION_ENTER_MESSAGE.copy()
message['desc'] = "inner_2"
self.send_message(EVENT_TOPIC, message)
self.assert_location_state('inner_2')
# Exit inner - should still be in 'inner_2'
self.send_message(EVENT_TOPIC, REGION_LEAVE_MESSAGE)
self.assert_location_state('inner_2')
# Exit inner_2 - should be in 'outer'
message = REGION_LEAVE_MESSAGE.copy()
message['desc'] = "inner_2"
self.send_message(EVENT_TOPIC, message)
self.assert_location_state('outer')
def test_event_entry_exit_passive_zone(self):
# Enter passive zone
message = REGION_ENTER_MESSAGE.copy()
message['desc'] = "passive"
self.send_message(EVENT_TOPIC, message)
# Should pick up gps put not zone
self.assert_location_state('not_home')
self.assert_location_latitude(3.0)
self.assert_location_accuracy(10.0)
# Enter inner2 zone
message = REGION_ENTER_MESSAGE.copy()
message['desc'] = "inner_2"
self.send_message(EVENT_TOPIC, message)
self.assert_location_state('inner_2')
self.assert_location_latitude(2.1)
self.assert_location_accuracy(10.0)
# Exit inner_2 - should be in 'passive'
# ie gps co-ords - but not zone
message = REGION_LEAVE_MESSAGE.copy()
message['desc'] = "inner_2"
self.send_message(EVENT_TOPIC, message)
self.assert_location_state('not_home')
self.assert_location_latitude(3.0)
self.assert_location_accuracy(10.0)
# Exit passive - should be in 'outer'
message = REGION_LEAVE_MESSAGE.copy()
message['desc'] = "passive"
self.send_message(EVENT_TOPIC, message)
self.assert_location_state('outer')
self.assert_location_latitude(2.0)
self.assert_location_accuracy(60.0)
def test_event_entry_unknown_zone(self):
# Just treat as location update
message = REGION_ENTER_MESSAGE.copy()
message['desc'] = "unknown"
self.send_message(EVENT_TOPIC, message)
self.assert_location_latitude(2.0)
self.assert_location_state('outer')
def test_event_exit_unknown_zone(self):
# Just treat as location update
message = REGION_LEAVE_MESSAGE.copy()
message['desc'] = "unknown"
self.send_message(EVENT_TOPIC, message)
self.assert_location_latitude(2.0)
self.assert_location_state('outer')
def test_event_entry_zone_loading_dash(self):
# Make sure the leading - is ignored
# Ownracks uses this to switch on hold
message = REGION_ENTER_MESSAGE.copy()
message['desc'] = "-inner"
self.send_message(EVENT_TOPIC, REGION_ENTER_MESSAGE)
self.assert_location_state('inner')
def test_mobile_enter_move_beacon(self):
# Enter mobile beacon, should set location
message = REGION_ENTER_MESSAGE.copy()
message['desc'] = IBEACON_DEVICE
self.send_message(EVENT_TOPIC, message)
self.assert_tracker_latitude(2.0)
self.assert_tracker_state('outer')
# Move should move beacon
message = LOCATION_MESSAGE.copy()
message['lat'] = "3.0"
self.send_message(LOCATION_TOPIC, message)
self.assert_tracker_latitude(3.0)
self.assert_tracker_state(STATE_NOT_HOME)
def test_mobile_enter_exit_region_beacon(self):
# Start tracking beacon
message = REGION_ENTER_MESSAGE.copy()
message['desc'] = IBEACON_DEVICE
self.send_message(EVENT_TOPIC, message)
self.assert_tracker_latitude(2.0)
self.assert_tracker_state('outer')
# Enter location should move beacon
message = REGION_ENTER_MESSAGE.copy()
message['desc'] = "inner_2"
self.send_message(EVENT_TOPIC, message)
self.assert_tracker_latitude(2.1)
self.assert_tracker_state('inner_2')
# Exit location should switch to gps
message = REGION_LEAVE_MESSAGE.copy()
message['desc'] = "inner_2"
self.send_message(EVENT_TOPIC, message)
self.assert_tracker_latitude(2.0)
def test_mobile_exit_move_beacon(self):
# Start tracking beacon
message = REGION_ENTER_MESSAGE.copy()
message['desc'] = IBEACON_DEVICE
self.send_message(EVENT_TOPIC, message)
self.assert_tracker_latitude(2.0)
self.assert_tracker_state('outer')
# Exit mobile beacon, should set location
message = REGION_LEAVE_MESSAGE.copy()
message['desc'] = IBEACON_DEVICE
message['lat'] = "3.0"
self.send_message(EVENT_TOPIC, message)
self.assert_tracker_latitude(3.0)
# Move after exit should do nothing
message = LOCATION_MESSAGE.copy()
message['lat'] = "4.0"
self.send_message(LOCATION_TOPIC, LOCATION_MESSAGE)
self.assert_tracker_latitude(3.0)
def test_mobile_multiple_async_enter_exit(self):
# Test race condition
enter_message = REGION_ENTER_MESSAGE.copy()
enter_message['desc'] = IBEACON_DEVICE
exit_message = REGION_LEAVE_MESSAGE.copy()
exit_message['desc'] = IBEACON_DEVICE
for i in range(0, 20):
fire_mqtt_message(
self.hass, EVENT_TOPIC, json.dumps(enter_message))
fire_mqtt_message(
self.hass, EVENT_TOPIC, json.dumps(exit_message))
fire_mqtt_message(
self.hass, EVENT_TOPIC, json.dumps(enter_message))
self.hass.pool.block_till_done()
self.send_message(EVENT_TOPIC, exit_message)
self.assertEqual(owntracks.MOBILE_BEACONS_ACTIVE['greg_phone'], [])
def test_mobile_multiple_enter_exit(self):
# Should only happen if the iphone dies
enter_message = REGION_ENTER_MESSAGE.copy()
enter_message['desc'] = IBEACON_DEVICE
exit_message = REGION_LEAVE_MESSAGE.copy()
exit_message['desc'] = IBEACON_DEVICE
self.send_message(EVENT_TOPIC, enter_message)
self.send_message(EVENT_TOPIC, enter_message)
self.send_message(EVENT_TOPIC, exit_message)
self.assertEqual(owntracks.MOBILE_BEACONS_ACTIVE['greg_phone'], [])
|
|
# -*- coding: utf-8 -*-
import sys, os
import sphinx
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
DIR = os.path.dirname(__file__)
sys.path.append(
os.path.abspath(
os.path.join(DIR, '_extensions')))
# autodoc
sys.path.append(os.path.abspath(os.path.join(DIR, '..')))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.2'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.ifconfig',
'sphinx.ext.todo',
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.linkcode',
'github_link',
'odoo',
'html_domain',
'exercise_admonition',
'patchqueue'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'odoo'
copyright = u'Odoo S.A.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '9.0'
# The full version, including alpha/beta/rc tags.
release = '9.0'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'odoo'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'odoo'
odoo_cover_default = 'banners/installing_odoo.jpg'
odoo_cover_external = {
'https://odoo.com/documentation/functional/accounting.html' : 'banners/m_accounting.jpg',
'https://odoo.com/documentation/functional/double-entry.html' : 'banners/m_1.jpg',
'https://odoo.com/documentation/functional/valuation.html' : 'banners/m_2.jpg',
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_extensions']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_add_permalinks = u''
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# FIXME: no sidebar on index?
html_sidebars = {
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
latex_elements = {
'papersize': r'a4paper',
'preamble': u'''\\setcounter{tocdepth}{2}
''',
}
# default must be set otherwise ifconfig blows up
todo_include_todos = False
intersphinx_mapping = {
'python': ('https://docs.python.org/2/', None),
'werkzeug': ('http://werkzeug.pocoo.org/docs/', None),
'sqlalchemy': ('http://docs.sqlalchemy.org/en/rel_0_9/', None),
'django': ('https://django.readthedocs.org/en/latest/', None),
}
github_user = 'odoo'
github_project = 'odoo'
# monkeypatch PHP lexer to not require <?php
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True)
def setup(app):
app.connect('html-page-context', canonicalize)
app.add_config_value('canonical_root', None, 'env')
app.add_config_value('canonical_branch', 'master', 'env')
app.connect('html-page-context', versionize)
app.add_config_value('versions', '', 'env')
app.connect('html-page-context', analytics)
app.add_config_value('google_analytics_key', '', 'env')
def canonicalize(app, pagename, templatename, context, doctree):
""" Adds a 'canonical' URL for the current document in the rendering
context. Requires the ``canonical_root`` setting being set. The canonical
branch is ``master`` but can be overridden using ``canonical_branch``.
"""
if not app.config.canonical_root:
return
context['canonical'] = _build_url(
app.config.canonical_root, app.config.canonical_branch, pagename)
def versionize(app, pagename, templatename, context, doctree):
""" Adds a version switcher below the menu, requires ``canonical_root``
and ``versions`` (an ordered, space-separated lists of all possible
versions).
"""
if not (app.config.canonical_root and app.config.versions):
return
context['versions'] = [
(vs, _build_url(app.config.canonical_root, vs, pagename))
for vs in app.config.versions.split(',')
if vs != app.config.version
]
def analytics(app, pagename, templatename, context, doctree):
if not app.config.google_analytics_key:
return
context['google_analytics_key'] = app.config.google_analytics_key
def _build_url(root, branch, pagename):
return "{canonical_url}{canonical_branch}/{canonical_page}".format(
canonical_url=root,
canonical_branch=branch,
canonical_page=(pagename + '.html').replace('index.html', '')
.replace('index/', ''),
)
|
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import uuid
from six.moves import urllib
from keystone.common import sql
from keystone.common.sql import migration_helpers
from keystone import config
from keystone import contrib
from keystone.contrib import oauth1
from keystone.contrib.oauth1 import controllers
from keystone import exception
from keystone.openstack.common.db.sqlalchemy import migration
from keystone.openstack.common import importutils
from keystone.tests import test_v3
CONF = config.CONF
class OAuth1Tests(test_v3.RestfulTestCase):
EXTENSION_NAME = 'oauth1'
EXTENSION_TO_ADD = 'oauth1_extension'
CONSUMER_URL = '/OS-OAUTH1/consumers'
def setup_database(self):
super(OAuth1Tests, self).setup_database()
package_name = '.'.join((contrib.__name__, self.EXTENSION_NAME))
package = importutils.import_module(package_name)
abs_path = migration_helpers.find_migrate_repo(package)
migration.db_version_control(sql.get_engine(), abs_path)
migration.db_sync(sql.get_engine(), abs_path)
def setUp(self):
super(OAuth1Tests, self).setUp()
# Now that the app has been served, we can query CONF values
self.base_url = 'http://localhost/v3'
self.controller = controllers.OAuthControllerV3()
def _create_single_consumer(self):
ref = {'description': uuid.uuid4().hex}
resp = self.post(
self.CONSUMER_URL,
body={'consumer': ref})
return resp.result['consumer']
def _create_request_token(self, consumer, project_id):
endpoint = '/OS-OAUTH1/request_token'
client = oauth1.Client(consumer['key'],
client_secret=consumer['secret'],
signature_method=oauth1.SIG_HMAC,
callback_uri="oob")
headers = {'requested_project_id': project_id}
url, headers, body = client.sign(self.base_url + endpoint,
http_method='POST',
headers=headers)
return endpoint, headers
def _create_access_token(self, consumer, token):
endpoint = '/OS-OAUTH1/access_token'
client = oauth1.Client(consumer['key'],
client_secret=consumer['secret'],
resource_owner_key=token.key,
resource_owner_secret=token.secret,
signature_method=oauth1.SIG_HMAC,
verifier=token.verifier)
url, headers, body = client.sign(self.base_url + endpoint,
http_method='POST')
headers.update({'Content-Type': 'application/json'})
return endpoint, headers
def _get_oauth_token(self, consumer, token):
client = oauth1.Client(consumer['key'],
client_secret=consumer['secret'],
resource_owner_key=token.key,
resource_owner_secret=token.secret,
signature_method=oauth1.SIG_HMAC)
endpoint = '/auth/tokens'
url, headers, body = client.sign(self.base_url + endpoint,
http_method='POST')
headers.update({'Content-Type': 'application/json'})
ref = {'auth': {'identity': {'oauth1': {}, 'methods': ['oauth1']}}}
return endpoint, headers, ref
def _authorize_request_token(self, request_id):
return '/OS-OAUTH1/authorize/%s' % (request_id)
class ConsumerCRUDTests(OAuth1Tests):
def _consumer_create(self, description=None, description_flag=True,
**kwargs):
if description_flag:
ref = {'description': description}
else:
ref = {}
if kwargs:
ref.update(kwargs)
resp = self.post(
self.CONSUMER_URL,
body={'consumer': ref})
consumer = resp.result['consumer']
consumer_id = consumer['id']
self.assertEqual(consumer['description'], description)
self.assertIsNotNone(consumer_id)
self.assertIsNotNone(consumer['secret'])
return consumer
def test_consumer_create(self):
description = uuid.uuid4().hex
self._consumer_create(description=description)
def test_consumer_create_none_desc_1(self):
self._consumer_create()
def test_consumer_create_none_desc_2(self):
self._consumer_create(description_flag=False)
def test_consumer_create_normalize_field(self):
# If create a consumer with a field with : or - in the name,
# the name is normalized by converting those chars to _.
field_name = 'some:weird-field'
field_value = uuid.uuid4().hex
extra_fields = {field_name: field_value}
consumer = self._consumer_create(**extra_fields)
normalized_field_name = 'some_weird_field'
self.assertEqual(field_value, consumer[normalized_field_name])
def test_consumer_delete(self):
consumer = self._create_single_consumer()
consumer_id = consumer['id']
resp = self.delete(self.CONSUMER_URL + '/%s' % consumer_id)
self.assertResponseStatus(resp, 204)
def test_consumer_get(self):
consumer = self._create_single_consumer()
consumer_id = consumer['id']
resp = self.get(self.CONSUMER_URL + '/%s' % consumer_id)
self_url = ['http://localhost/v3', self.CONSUMER_URL,
'/', consumer_id]
self_url = ''.join(self_url)
self.assertEqual(resp.result['consumer']['links']['self'], self_url)
self.assertEqual(resp.result['consumer']['id'], consumer_id)
def test_consumer_list(self):
self._consumer_create()
resp = self.get(self.CONSUMER_URL)
entities = resp.result['consumers']
self.assertIsNotNone(entities)
self_url = ['http://localhost/v3', self.CONSUMER_URL]
self_url = ''.join(self_url)
self.assertEqual(resp.result['links']['self'], self_url)
self.assertValidListLinks(resp.result['links'])
def test_consumer_update(self):
consumer = self._create_single_consumer()
original_id = consumer['id']
original_description = consumer['description']
update_description = original_description + '_new'
update_ref = {'description': update_description}
update_resp = self.patch(self.CONSUMER_URL + '/%s' % original_id,
body={'consumer': update_ref})
consumer = update_resp.result['consumer']
self.assertEqual(consumer['description'], update_description)
self.assertEqual(consumer['id'], original_id)
def test_consumer_update_bad_secret(self):
consumer = self._create_single_consumer()
original_id = consumer['id']
update_ref = copy.deepcopy(consumer)
update_ref['description'] = uuid.uuid4().hex
update_ref['secret'] = uuid.uuid4().hex
self.patch(self.CONSUMER_URL + '/%s' % original_id,
body={'consumer': update_ref},
expected_status=400)
def test_consumer_update_bad_id(self):
consumer = self._create_single_consumer()
original_id = consumer['id']
original_description = consumer['description']
update_description = original_description + "_new"
update_ref = copy.deepcopy(consumer)
update_ref['description'] = update_description
update_ref['id'] = update_description
self.patch(self.CONSUMER_URL + '/%s' % original_id,
body={'consumer': update_ref},
expected_status=400)
def test_consumer_update_normalize_field(self):
# If update a consumer with a field with : or - in the name,
# the name is normalized by converting those chars to _.
field1_name = 'some:weird-field'
field1_orig_value = uuid.uuid4().hex
extra_fields = {field1_name: field1_orig_value}
consumer = self._consumer_create(**extra_fields)
consumer_id = consumer['id']
field1_new_value = uuid.uuid4().hex
field2_name = 'weird:some-field'
field2_value = uuid.uuid4().hex
update_ref = {field1_name: field1_new_value,
field2_name: field2_value}
update_resp = self.patch(self.CONSUMER_URL + '/%s' % consumer_id,
body={'consumer': update_ref})
consumer = update_resp.result['consumer']
normalized_field1_name = 'some_weird_field'
self.assertEqual(field1_new_value, consumer[normalized_field1_name])
normalized_field2_name = 'weird_some_field'
self.assertEqual(field2_value, consumer[normalized_field2_name])
def test_consumer_create_no_description(self):
resp = self.post(self.CONSUMER_URL, body={'consumer': {}})
consumer = resp.result['consumer']
consumer_id = consumer['id']
self.assertIsNone(consumer['description'])
self.assertIsNotNone(consumer_id)
self.assertIsNotNone(consumer['secret'])
def test_consumer_get_bad_id(self):
self.get(self.CONSUMER_URL + '/%(consumer_id)s'
% {'consumer_id': uuid.uuid4().hex},
expected_status=404)
class OAuthFlowTests(OAuth1Tests):
def test_oauth_flow(self):
consumer = self._create_single_consumer()
consumer_id = consumer['id']
consumer_secret = consumer['secret']
self.consumer = {'key': consumer_id, 'secret': consumer_secret}
self.assertIsNotNone(self.consumer['secret'])
url, headers = self._create_request_token(self.consumer,
self.project_id)
content = self.post(url, headers=headers)
credentials = urllib.parse.parse_qs(content.result)
request_key = credentials['oauth_token'][0]
request_secret = credentials['oauth_token_secret'][0]
self.request_token = oauth1.Token(request_key, request_secret)
self.assertIsNotNone(self.request_token.key)
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
resp = self.put(url, body=body, expected_status=200)
self.verifier = resp.result['token']['oauth_verifier']
self.request_token.set_verifier(self.verifier)
url, headers = self._create_access_token(self.consumer,
self.request_token)
content = self.post(url, headers=headers)
credentials = urllib.parse.parse_qs(content.result)
access_key = credentials['oauth_token'][0]
access_secret = credentials['oauth_token_secret'][0]
self.access_token = oauth1.Token(access_key, access_secret)
self.assertIsNotNone(self.access_token.key)
url, headers, body = self._get_oauth_token(self.consumer,
self.access_token)
content = self.post(url, headers=headers, body=body)
self.keystone_token_id = content.headers['X-Subject-Token']
self.keystone_token = content.result['token']
self.assertIsNotNone(self.keystone_token_id)
class AccessTokenCRUDTests(OAuthFlowTests):
def test_delete_access_token_dne(self):
self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
% {'user': self.user_id,
'auth': uuid.uuid4().hex},
expected_status=404)
def test_list_no_access_tokens(self):
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
% {'user_id': self.user_id})
entities = resp.result['access_tokens']
self.assertEqual([], entities)
self.assertValidListLinks(resp.result['links'])
def test_get_single_access_token(self):
self.test_oauth_flow()
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(key)s'
% {'user_id': self.user_id,
'key': self.access_token.key})
entity = resp.result['access_token']
self.assertEqual(entity['id'], self.access_token.key)
self.assertEqual(entity['consumer_id'], self.consumer['key'])
def test_get_access_token_dne(self):
self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(key)s'
% {'user_id': self.user_id,
'key': uuid.uuid4().hex},
expected_status=404)
def test_list_all_roles_in_access_token(self):
self.test_oauth_flow()
resp = self.get('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles'
% {'id': self.user_id,
'key': self.access_token.key})
entities = resp.result['roles']
self.assertTrue(entities)
self.assertValidListLinks(resp.result['links'])
def test_get_role_in_access_token(self):
self.test_oauth_flow()
url = ('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles/%(role)s'
% {'id': self.user_id, 'key': self.access_token.key,
'role': self.role_id})
resp = self.get(url)
entity = resp.result['role']
self.assertEqual(entity['id'], self.role_id)
def test_get_role_in_access_token_dne(self):
self.test_oauth_flow()
url = ('/users/%(id)s/OS-OAUTH1/access_tokens/%(key)s/roles/%(role)s'
% {'id': self.user_id, 'key': self.access_token.key,
'role': uuid.uuid4().hex})
self.get(url, expected_status=404)
def test_list_and_delete_access_tokens(self):
self.test_oauth_flow()
# List access_tokens should be > 0
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
% {'user_id': self.user_id})
entities = resp.result['access_tokens']
self.assertTrue(entities)
self.assertValidListLinks(resp.result['links'])
# Delete access_token
resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
% {'user': self.user_id,
'auth': self.access_token.key})
self.assertResponseStatus(resp, 204)
# List access_token should be 0
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
% {'user_id': self.user_id})
entities = resp.result['access_tokens']
self.assertEqual([], entities)
self.assertValidListLinks(resp.result['links'])
class AuthTokenTests(OAuthFlowTests):
def test_keystone_token_is_valid(self):
self.test_oauth_flow()
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
r = self.get('/auth/tokens', headers=headers)
self.assertValidTokenResponse(r, self.user)
# now verify the oauth section
oauth_section = r.result['token']['OS-OAUTH1']
self.assertEqual(oauth_section['access_token_id'],
self.access_token.key)
self.assertEqual(oauth_section['consumer_id'], self.consumer['key'])
# verify the roles section
roles_list = r.result['token']['roles']
# we can just verify the 0th role since we are only assigning one role
self.assertEqual(roles_list[0]['id'], self.role_id)
# verify that the token can perform delegated tasks
ref = self.new_user_ref(domain_id=self.domain_id)
r = self.admin_request(path='/v3/users', headers=headers,
method='POST', body={'user': ref})
self.assertValidUserResponse(r, ref)
def test_delete_access_token_also_revokes_token(self):
self.test_oauth_flow()
# Delete access token
resp = self.delete('/users/%(user)s/OS-OAUTH1/access_tokens/%(auth)s'
% {'user': self.user_id,
'auth': self.access_token.key})
self.assertResponseStatus(resp, 204)
# Check Keystone Token no longer exists
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
self.get('/auth/tokens', headers=headers,
expected_status=404)
def test_deleting_consumer_also_deletes_tokens(self):
self.test_oauth_flow()
# Delete consumer
consumer_id = self.consumer['key']
resp = self.delete('/OS-OAUTH1/consumers/%(consumer_id)s'
% {'consumer_id': consumer_id})
self.assertResponseStatus(resp, 204)
# List access_token should be 0
resp = self.get('/users/%(user_id)s/OS-OAUTH1/access_tokens'
% {'user_id': self.user_id})
entities = resp.result['access_tokens']
self.assertEqual([], entities)
# Check Keystone Token no longer exists
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
self.head('/auth/tokens', headers=headers,
expected_status=404)
def test_change_user_password_also_deletes_tokens(self):
self.test_oauth_flow()
# delegated keystone token exists
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
r = self.get('/auth/tokens', headers=headers)
self.assertValidTokenResponse(r, self.user)
user = {'password': uuid.uuid4().hex}
r = self.patch('/users/%(user_id)s' % {
'user_id': self.user['id']},
body={'user': user})
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
self.admin_request(path='/auth/tokens', headers=headers,
method='GET', expected_status=404)
def test_deleting_project_also_invalidates_tokens(self):
self.test_oauth_flow()
# delegated keystone token exists
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
r = self.get('/auth/tokens', headers=headers)
self.assertValidTokenResponse(r, self.user)
r = self.delete('/projects/%(project_id)s' % {
'project_id': self.project_id})
headers = {'X-Subject-Token': self.keystone_token_id,
'X-Auth-Token': self.keystone_token_id}
self.admin_request(path='/auth/tokens', headers=headers,
method='GET', expected_status=404)
def test_token_chaining_is_not_allowed(self):
self.test_oauth_flow()
#attempt to re-authenticate (token chain) with the given token
path = '/v3/auth/tokens/'
auth_data = self.build_authentication_request(
token=self.keystone_token_id)
self.admin_request(
path=path,
body=auth_data,
token=self.keystone_token_id,
method='POST',
expected_status=403)
def test_delete_keystone_tokens_by_consumer_id(self):
self.test_oauth_flow()
self.token_api.get_token(self.keystone_token_id)
self.token_api.delete_tokens(self.user_id,
consumer_id=self.consumer['key'])
self.assertRaises(exception.TokenNotFound, self.token_api.get_token,
self.keystone_token_id)
class MaliciousOAuth1Tests(OAuth1Tests):
def test_bad_consumer_secret(self):
consumer = self._create_single_consumer()
consumer_id = consumer['id']
consumer = {'key': consumer_id, 'secret': uuid.uuid4().hex}
url, headers = self._create_request_token(consumer, self.project_id)
self.post(url, headers=headers, expected_status=401)
def test_bad_request_token_key(self):
consumer = self._create_single_consumer()
consumer_id = consumer['id']
consumer_secret = consumer['secret']
consumer = {'key': consumer_id, 'secret': consumer_secret}
url, headers = self._create_request_token(consumer, self.project_id)
self.post(url, headers=headers)
url = self._authorize_request_token(uuid.uuid4().hex)
body = {'roles': [{'id': self.role_id}]}
self.put(url, body=body, expected_status=404)
def test_bad_verifier(self):
consumer = self._create_single_consumer()
consumer_id = consumer['id']
consumer_secret = consumer['secret']
consumer = {'key': consumer_id, 'secret': consumer_secret}
url, headers = self._create_request_token(consumer, self.project_id)
content = self.post(url, headers=headers)
credentials = urllib.parse.parse_qs(content.result)
request_key = credentials['oauth_token'][0]
request_secret = credentials['oauth_token_secret'][0]
request_token = oauth1.Token(request_key, request_secret)
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
resp = self.put(url, body=body, expected_status=200)
verifier = resp.result['token']['oauth_verifier']
self.assertIsNotNone(verifier)
request_token.set_verifier(uuid.uuid4().hex)
url, headers = self._create_access_token(consumer, request_token)
self.post(url, headers=headers, expected_status=401)
def test_bad_authorizing_roles(self):
consumer = self._create_single_consumer()
consumer_id = consumer['id']
consumer_secret = consumer['secret']
consumer = {'key': consumer_id, 'secret': consumer_secret}
url, headers = self._create_request_token(consumer, self.project_id)
content = self.post(url, headers=headers)
credentials = urllib.parse.parse_qs(content.result)
request_key = credentials['oauth_token'][0]
self.assignment_api.remove_role_from_user_and_project(
self.user_id, self.project_id, self.role_id)
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
self.admin_request(path=url, method='PUT',
body=body, expected_status=404)
def test_expired_authorizing_request_token(self):
CONF.oauth1.request_token_duration = -1
consumer = self._create_single_consumer()
consumer_id = consumer['id']
consumer_secret = consumer['secret']
self.consumer = {'key': consumer_id, 'secret': consumer_secret}
self.assertIsNotNone(self.consumer['key'])
url, headers = self._create_request_token(self.consumer,
self.project_id)
content = self.post(url, headers=headers)
credentials = urllib.parse.parse_qs(content.result)
request_key = credentials['oauth_token'][0]
request_secret = credentials['oauth_token_secret'][0]
self.request_token = oauth1.Token(request_key, request_secret)
self.assertIsNotNone(self.request_token.key)
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
self.put(url, body=body, expected_status=401)
def test_expired_creating_keystone_token(self):
CONF.oauth1.access_token_duration = -1
consumer = self._create_single_consumer()
consumer_id = consumer['id']
consumer_secret = consumer['secret']
self.consumer = {'key': consumer_id, 'secret': consumer_secret}
self.assertIsNotNone(self.consumer['key'])
url, headers = self._create_request_token(self.consumer,
self.project_id)
content = self.post(url, headers=headers)
credentials = urllib.parse.parse_qs(content.result)
request_key = credentials['oauth_token'][0]
request_secret = credentials['oauth_token_secret'][0]
self.request_token = oauth1.Token(request_key, request_secret)
self.assertIsNotNone(self.request_token.key)
url = self._authorize_request_token(request_key)
body = {'roles': [{'id': self.role_id}]}
resp = self.put(url, body=body, expected_status=200)
self.verifier = resp.result['token']['oauth_verifier']
self.request_token.set_verifier(self.verifier)
url, headers = self._create_access_token(self.consumer,
self.request_token)
content = self.post(url, headers=headers)
credentials = urllib.parse.parse_qs(content.result)
access_key = credentials['oauth_token'][0]
access_secret = credentials['oauth_token_secret'][0]
self.access_token = oauth1.Token(access_key, access_secret)
self.assertIsNotNone(self.access_token.key)
url, headers, body = self._get_oauth_token(self.consumer,
self.access_token)
self.post(url, headers=headers, body=body, expected_status=401)
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
=================
Threading Classes
=================
:Author:
Moritz Emanuel Beber
:Date:
2011-02-26
:Copyright:
Copyright(c) 2011 Jacobs University of Bremen. All rights reserved.
:File:
threaded.py
Notes
-----
Most classes (unless they inherit from old-style classes) are new-style classes.
Attributes and methods not intended to be accessed directly by the user are
preceeded by a single underscore '_' but they can be used if the user knows
what he is doing. Those preceeded with two underscores '__' should under no
circumstances be touched.
"""
import os
import threading
import logging
import paramiko
import socket
import math
from Queue import Queue
from .errors import NetworkError
class ThreadPoolWorker(threading.Thread):
"""
Worker thread that operates on items from its queue.
"""
def __init__(self, queue, exception_queue=None):
"""
"""
threading.Thread.__init__(self)
self._queue = queue
self._exception_queue = exception_queue
self.daemon = True
def run(self):
"""
"""
while True:
(perform, args, kw_args) = self._queue.get()
try:
perform(*args, **kw_args)
except StandardError as err:
if self._exception_queue:
self._exception_queue.put((err, perform, args, kw_args))
finally:
self._queue.task_done()
def ThreadPool(object):
"""
"""
def __init__(self, num_threads, retry=False):
"""
"""
object.__init__(self)
self.queue = Queue()
if retry:
self.exception_queue = Queue()
else:
self.exception_queue = None
for i in xrange(num_threads):
w = ThreadPoolWorker(self.queue, self.exception_queue)
w.start()
def put(self, perform, *args, **kw_args):
"""
"""
self.queue.put((perform, args, kw_args))
def join(self):
"""
"""
self.queue.join()
class RemoteSetup(object):
"""
docstring for RemoteSetup
"""
def __init__(self, host, options, *args, **kwargs):
"""
docstring
"""
object.__init__(self)
self._host = str(host)
self.name = "%s@%s" % (self.__class__.__name__, self._host)
self.logger = logging.getLogger(self.name)
self._child_name = "%s.SSHClient" % self.name
self._child_logger = logging.getLogger(self._child_name)
self._child_logger.propagate = 0
self._options = options
self._client = None
self._n_cpus = None
self._cpu_usage = None
self._io_lock = threading.Lock()
def __del__(self):
"""
docstring
"""
if self._client:
self._client.close()
def close(self):
if self._client:
self._client.close()
def make_ssh_connection(self):
"""
docstring
"""
# create the communication instance
self.logger.debug("Creating SSHClient instance")
self._client = paramiko.SSHClient()
# set logging for it
self.logger.debug("Setting log channel")
self._client.set_log_channel(self._child_name)
self.logger.debug("Setting missing host key policies")
if self._options.auto_add:
self._client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
else:
self._client.set_missing_host_key_policy(paramiko.WarningPolicy())
self.logger.debug("Loading known host keys")
self._io_lock.acquire()
try:
self._client.load_host_keys(
os.path.expanduser("~/.ssh/known_hosts"))
except IOError as err:
self.logger.exception(str(err))
# how to proceed when loading of host keys fails?
# right now making the connection probably still fails so all is well
finally:
self._io_lock.release()
self.logger.debug("Making connection")
try:
self._client.connect(hostname=self._host, port=self._options.ssh_port,
username=self._options.username, password=self._options.password)
except paramiko.BadHostKeyException:
raise NetworkError("Bad Host Key")
except paramiko.AuthenticationException:
raise NetworkError("Authentication Error")
except paramiko.SSHException:
raise NetworkError("Connection Error")
except socket.error:
raise NetworkError("Socket Error")
else:
self.logger.info("Connection established and authenticated")
self._io_lock.acquire()
self._client.save_host_keys(os.path.expanduser("~/.ssh/known_hosts"))
self._io_lock.release()
def one_time_cmd(self, cmd):
"""
"""
try:
(stdin_fh, stdout_fh, stderr_fh) = self._client.exec_command(cmd,\
self._options.buf_size)
except paramiko.SSHException:
raise NetworkError("Failed to execute remote command")
stderr = stderr_fh.read()
stdout = stdout_fh.read()
if stderr and not stdout:
raise NetworkError("Remote command failed with: %s", stderr)
else:
return stdout
def _detect_ncpus(self):
"""
docstring
"""
# get number of cpus on linux
cmd = "grep -c 'model name' '/proc/cpuinfo'"
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
for line in stdout:
try:
self._n_cpus = int(line)
except ValueError:
continue
else:
return
# no CPUs detected, i.e., cmd caused an error
# will use pty on MacOS as well for consistency
cmd = "sysctl -n hw.ncpu"
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
for line in stdout:
try:
self._n_cpus = int(line)
except ValueError:
continue
else:
return
# return the default value
self.logger.warning("Could not detect number of CPUs,"\
" assuming default '1'")
self._n_cpus = 1
def _detect_cpu_usage(self, num_probes=10.0):
"""
docstring
"""
# for linux, unix, and macosx that's why both -e and -a
cmd = "vmstat 1 %d" % num_probes
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
total = 0.
for line in stdout:
if not line:
continue
tmp = line.split()
# only want to parse lines that start with numbers
try:
float(tmp[0])
except ValueError:
continue
# cheap trick not to parse ordinary text, like %CPU header
# ps --no-headers not available on mac, for example
try:
total += float(tmp[12])
except ValueError:
continue
self._cpu_usage = math.ceil(total / num_probes)
return
# default usage
self.logger.warning("Could not detect CPU usage, assuming 0 %%")
self._cpu_usage = 0.
def remote_shell_cmd(self, cmd, timeout=20.):
"""
"""
try:
channel = self._client.invoke_shell()
except paramiko.SSHException:
raise NetworkError("Failed to invoke remote shell")
if channel.gettimeout():
self.logger.debug("Channel timeout: %f", channel.gettimeout())
else:
channel.settimeout(timeout)
try:
channel.sendall(cmd)
except socket.timeout:
channel.close()
raise NetworkError("Connection timed out")
stdout = ""
expect = "%s@%s:~>\r\n" % (self._options.username, self._host)
while True:
try:
stdout += channel.recv(self._options.buf_size)
if stdout.endswith(expect):
break
except socket.timeout:
break
channel.close()
return stdout
def _setup_job(self, lower, upper, shell_file="batch_jobs.sh"):
"""
docstring
"""
cmd = "screen -dmS batch_simulation %s %d %d\n"\
% (shell_file, lower, upper)
# we only have to check for immediate errors of running this command
# not sure how to do that atm
stdout = self.remote_shell_cmd(cmd)
if stdout:
self.logger.debug(stdout)
def usage(self):
"""
docstring
"""
self.logger.debug("Establishing SSH connection...")
try:
self.make_ssh_connection()
except NetworkError as err:
self.logger.debug(str(err))
return 0
self.logger.debug("Detecting number of CPUs...")
self._detect_ncpus()
self.logger.debug("There are %d CPUs online", self._n_cpus)
self.logger.debug("Detecting CPU usage...")
self._detect_cpu_usage()
self.logger.debug("Usage is: %f", self._cpu_usage)
# compare work load with number of cpus present
self._cpu_usage = round(self._n_cpus * self._cpu_usage / 100.0, 0)
self._n_cpus = self._n_cpus - int(self._cpu_usage)
self.logger.debug("Number of CPUs to use: %d", self._n_cpus)
self.logger.debug("Closing client")
self._client.close()
return self._n_cpus
def run(self, lower, upper, shell_file="batch_jobs.sh"):
"""
docstring
"""
self.logger.debug("Establishing SSH connection...")
try:
self.make_ssh_connection()
except NetworkError as err:
self.logger.debug(str(err))
return None
# start simulations
self._setup_job(lower, upper, shell_file)
self.logger.info("Remote job started")
self._client.close()
def _detect_processes(self, *args):
"""
docstring
"""
pids = list()
for comm in args:
cmd = "ps -u %s -o pid,comm | grep %s | grep -v grep" %\
(self._options.username, comm)
stdout = self.one_time_cmd(cmd)
if stdout:
self.logger.debug(stdout)
stdout = stdout.split("\n")
for line in stdout:
# cheap trick not to parse ordinary text, like %CPU header
try:
pids.append(int(line.split()[0]))
except ValueError:
continue
except IndexError:
break
return pids
def kill(self, *args):
"""
docstring
"""
self.logger.debug("Establishing SSH connection...")
try:
self.make_ssh_connection()
except NetworkError as err:
self.logger.debug(str(err))
return 0
self.logger.debug("Killing process(es)...")
pids = self._detect_processes(*args)
self.logger.debug(pids)
killed = 0
for pid in pids:
cmd = "kill %d" % pid
try:
stdout = self.one_time_cmd(cmd)
except NetworkError as err:
self.logger.debug(str(err))
self.logger.debug(stdout)
else:
killed += 1
self.logger.debug("Closing client")
self._client.close()
return killed
|
|
from __future__ import unicode_literals
import unittest
import spotify
from spotify import compat
import tests
from tests import mock
@mock.patch('spotify.album.lib', spec=spotify.lib)
class AlbumTest(unittest.TestCase):
def setUp(self):
self.session = tests.create_session_mock()
def test_create_without_uri_or_sp_album_fails(self, lib_mock):
with self.assertRaises(AssertionError):
spotify.Album(self.session)
@mock.patch('spotify.Link', spec=spotify.Link)
def test_create_from_uri(self, link_mock, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
link_instance_mock = link_mock.return_value
link_instance_mock.as_album.return_value = spotify.Album(
self.session, sp_album=sp_album
)
uri = 'spotify:album:foo'
result = spotify.Album(self.session, uri=uri)
link_mock.assert_called_with(self.session, uri=uri)
link_instance_mock.as_album.assert_called_with()
lib_mock.sp_album_add_ref.assert_called_with(sp_album)
self.assertEqual(result._sp_album, sp_album)
@mock.patch('spotify.Link', spec=spotify.Link)
def test_create_from_uri_fail_raises_error(self, link_mock, lib_mock):
link_instance_mock = link_mock.return_value
link_instance_mock.as_album.return_value = None
uri = 'spotify:album:foo'
with self.assertRaises(ValueError):
spotify.Album(self.session, uri=uri)
def test_adds_ref_to_sp_album_when_created(self, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
spotify.Album(self.session, sp_album=sp_album)
lib_mock.sp_album_add_ref.assert_called_with(sp_album)
def test_releases_sp_album_when_album_dies(self, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
album = None # noqa
tests.gc_collect()
lib_mock.sp_album_release.assert_called_with(sp_album)
@mock.patch('spotify.Link', spec=spotify.Link)
def test_repr(self, link_mock, lib_mock):
link_instance_mock = link_mock.return_value
link_instance_mock.uri = 'foo'
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = repr(album)
self.assertEqual(result, 'Album(%r)' % 'foo')
def test_eq(self, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
album1 = spotify.Album(self.session, sp_album=sp_album)
album2 = spotify.Album(self.session, sp_album=sp_album)
self.assertTrue(album1 == album2)
self.assertFalse(album1 == 'foo')
def test_ne(self, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
album1 = spotify.Album(self.session, sp_album=sp_album)
album2 = spotify.Album(self.session, sp_album=sp_album)
self.assertFalse(album1 != album2)
def test_hash(self, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
album1 = spotify.Album(self.session, sp_album=sp_album)
album2 = spotify.Album(self.session, sp_album=sp_album)
self.assertEqual(hash(album1), hash(album2))
def test_is_loaded(self, lib_mock):
lib_mock.sp_album_is_loaded.return_value = 1
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.is_loaded
lib_mock.sp_album_is_loaded.assert_called_once_with(sp_album)
self.assertTrue(result)
@mock.patch('spotify.utils.load')
def test_load(self, load_mock, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
album.load(10)
load_mock.assert_called_with(self.session, album, timeout=10)
def test_is_available(self, lib_mock):
lib_mock.sp_album_is_available.return_value = 1
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.is_available
lib_mock.sp_album_is_available.assert_called_once_with(sp_album)
self.assertTrue(result)
def test_is_available_is_none_if_unloaded(self, lib_mock):
lib_mock.sp_album_is_loaded.return_value = 0
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.is_available
lib_mock.sp_album_is_loaded.assert_called_once_with(sp_album)
self.assertIsNone(result)
@mock.patch('spotify.artist.lib', spec=spotify.lib)
def test_artist(self, artist_lib_mock, lib_mock):
sp_artist = spotify.ffi.cast('sp_artist *', 43)
lib_mock.sp_album_artist.return_value = sp_artist
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.artist
lib_mock.sp_album_artist.assert_called_with(sp_album)
self.assertEqual(artist_lib_mock.sp_artist_add_ref.call_count, 1)
self.assertIsInstance(result, spotify.Artist)
self.assertEqual(result._sp_artist, sp_artist)
@mock.patch('spotify.artist.lib', spec=spotify.lib)
def test_artist_if_unloaded(self, artist_lib_mock, lib_mock):
lib_mock.sp_album_artist.return_value = spotify.ffi.NULL
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.artist
lib_mock.sp_album_artist.assert_called_with(sp_album)
self.assertIsNone(result)
@mock.patch('spotify.Image', spec=spotify.Image)
def test_cover(self, image_mock, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
sp_image_id = spotify.ffi.new('char[]', b'cover-id')
lib_mock.sp_album_cover.return_value = sp_image_id
sp_image = spotify.ffi.cast('sp_image *', 43)
lib_mock.sp_image_create.return_value = sp_image
image_mock.return_value = mock.sentinel.image
image_size = spotify.ImageSize.SMALL
callback = mock.Mock()
result = album.cover(image_size, callback=callback)
self.assertIs(result, mock.sentinel.image)
lib_mock.sp_album_cover.assert_called_with(sp_album, int(image_size))
lib_mock.sp_image_create.assert_called_with(
self.session._sp_session, sp_image_id
)
# Since we *created* the sp_image, we already have a refcount of 1 and
# shouldn't increase the refcount when wrapping this sp_image in an
# Image object
image_mock.assert_called_with(
self.session, sp_image=sp_image, add_ref=False, callback=callback
)
@mock.patch('spotify.Image', spec=spotify.Image)
def test_cover_defaults_to_normal_size(self, image_mock, lib_mock):
sp_image_id = spotify.ffi.new('char[]', b'cover-id')
lib_mock.sp_album_cover.return_value = sp_image_id
sp_image = spotify.ffi.cast('sp_image *', 43)
lib_mock.sp_image_create.return_value = sp_image
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
album.cover()
lib_mock.sp_album_cover.assert_called_with(
sp_album, int(spotify.ImageSize.NORMAL)
)
def test_cover_is_none_if_null(self, lib_mock):
lib_mock.sp_album_cover.return_value = spotify.ffi.NULL
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.cover()
lib_mock.sp_album_cover.assert_called_with(
sp_album, int(spotify.ImageSize.NORMAL)
)
self.assertIsNone(result)
@mock.patch('spotify.Link', spec=spotify.Link)
def test_cover_link_creates_link_to_cover(self, link_mock, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
sp_link = spotify.ffi.cast('sp_link *', 43)
lib_mock.sp_link_create_from_album_cover.return_value = sp_link
link_mock.return_value = mock.sentinel.link
image_size = spotify.ImageSize.SMALL
result = album.cover_link(image_size)
lib_mock.sp_link_create_from_album_cover.assert_called_once_with(
sp_album, int(image_size)
)
link_mock.assert_called_once_with(
self.session, sp_link=sp_link, add_ref=False
)
self.assertEqual(result, mock.sentinel.link)
@mock.patch('spotify.Link', spec=spotify.Link)
def test_cover_link_defaults_to_normal_size(self, link_mock, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
sp_link = spotify.ffi.cast('sp_link *', 43)
lib_mock.sp_link_create_from_album_cover.return_value = sp_link
link_mock.return_value = mock.sentinel.link
album.cover_link()
lib_mock.sp_link_create_from_album_cover.assert_called_once_with(
sp_album, int(spotify.ImageSize.NORMAL)
)
def test_name(self, lib_mock):
lib_mock.sp_album_name.return_value = spotify.ffi.new(
'char[]', b'Foo Bar Baz'
)
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.name
lib_mock.sp_album_name.assert_called_once_with(sp_album)
self.assertEqual(result, 'Foo Bar Baz')
def test_name_is_none_if_unloaded(self, lib_mock):
lib_mock.sp_album_name.return_value = spotify.ffi.new('char[]', b'')
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.name
lib_mock.sp_album_name.assert_called_once_with(sp_album)
self.assertIsNone(result)
def test_year(self, lib_mock):
lib_mock.sp_album_year.return_value = 2013
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.year
lib_mock.sp_album_year.assert_called_once_with(sp_album)
self.assertEqual(result, 2013)
def test_year_is_none_if_unloaded(self, lib_mock):
lib_mock.sp_album_is_loaded.return_value = 0
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.year
lib_mock.sp_album_is_loaded.assert_called_once_with(sp_album)
self.assertIsNone(result)
def test_type(self, lib_mock):
lib_mock.sp_album_type.return_value = int(spotify.AlbumType.SINGLE)
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.type
lib_mock.sp_album_type.assert_called_once_with(sp_album)
self.assertIs(result, spotify.AlbumType.SINGLE)
def test_type_is_none_if_unloaded(self, lib_mock):
lib_mock.sp_album_is_loaded.return_value = 0
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
result = album.type
lib_mock.sp_album_is_loaded.assert_called_once_with(sp_album)
self.assertIsNone(result)
@mock.patch('spotify.Link', spec=spotify.Link)
def test_link_creates_link_to_album(self, link_mock, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 42)
album = spotify.Album(self.session, sp_album=sp_album)
sp_link = spotify.ffi.cast('sp_link *', 43)
lib_mock.sp_link_create_from_album.return_value = sp_link
link_mock.return_value = mock.sentinel.link
result = album.link
link_mock.assert_called_once_with(
self.session, sp_link=sp_link, add_ref=False
)
self.assertEqual(result, mock.sentinel.link)
@mock.patch('spotify.album.lib', spec=spotify.lib)
class AlbumBrowserTest(unittest.TestCase):
def setUp(self):
self.session = tests.create_session_mock()
spotify._session_instance = self.session
def tearDown(self):
spotify._session_instance = None
def test_create_without_album_or_sp_albumbrowse_fails(self, lib_mock):
with self.assertRaises(AssertionError):
spotify.AlbumBrowser(self.session)
def test_create_from_album(self, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 43)
album = spotify.Album(self.session, sp_album=sp_album)
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
lib_mock.sp_albumbrowse_create.return_value = sp_albumbrowse
result = album.browse()
lib_mock.sp_albumbrowse_create.assert_called_with(
self.session._sp_session, sp_album, mock.ANY, mock.ANY
)
self.assertIsInstance(result, spotify.AlbumBrowser)
albumbrowse_complete_cb = lib_mock.sp_albumbrowse_create.call_args[0][2]
userdata = lib_mock.sp_albumbrowse_create.call_args[0][3]
self.assertFalse(result.loaded_event.is_set())
albumbrowse_complete_cb(sp_albumbrowse, userdata)
self.assertTrue(result.loaded_event.is_set())
def test_create_from_album_with_callback(self, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 43)
album = spotify.Album(self.session, sp_album=sp_album)
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
lib_mock.sp_albumbrowse_create.return_value = sp_albumbrowse
callback = mock.Mock()
result = album.browse(callback)
lib_mock.sp_albumbrowse_create.assert_called_with(
self.session._sp_session, sp_album, mock.ANY, mock.ANY
)
albumbrowse_complete_cb = lib_mock.sp_albumbrowse_create.call_args[0][2]
userdata = lib_mock.sp_albumbrowse_create.call_args[0][3]
albumbrowse_complete_cb(sp_albumbrowse, userdata)
result.loaded_event.wait(3)
callback.assert_called_with(result)
def test_browser_is_gone_before_callback_is_called(self, lib_mock):
sp_album = spotify.ffi.cast('sp_album *', 43)
album = spotify.Album(self.session, sp_album=sp_album)
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
lib_mock.sp_albumbrowse_create.return_value = sp_albumbrowse
callback = mock.Mock()
result = spotify.AlbumBrowser(
self.session, album=album, callback=callback
)
loaded_event = result.loaded_event
result = None # noqa
tests.gc_collect()
# The mock keeps the handle/userdata alive, thus this test doesn't
# really test that session._callback_handles keeps the handle alive.
albumbrowse_complete_cb = lib_mock.sp_albumbrowse_create.call_args[0][2]
userdata = lib_mock.sp_albumbrowse_create.call_args[0][3]
albumbrowse_complete_cb(sp_albumbrowse, userdata)
loaded_event.wait(3)
self.assertEqual(callback.call_count, 1)
self.assertEqual(
callback.call_args[0][0]._sp_albumbrowse, sp_albumbrowse
)
def test_adds_ref_to_sp_albumbrowse_when_created(self, lib_mock):
session = tests.create_session_mock()
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
spotify.AlbumBrowser(session, sp_albumbrowse=sp_albumbrowse)
lib_mock.sp_albumbrowse_add_ref.assert_called_with(sp_albumbrowse)
def test_releases_sp_albumbrowse_when_album_dies(self, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
browser = None # noqa
tests.gc_collect()
lib_mock.sp_albumbrowse_release.assert_called_with(sp_albumbrowse)
@mock.patch('spotify.Link', spec=spotify.Link)
def test_repr(self, link_mock, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
lib_mock.sp_albumbrowse_is_loaded.return_value = 1
sp_album = spotify.ffi.cast('sp_album *', 43)
lib_mock.sp_albumbrowse_album.return_value = sp_album
link_instance_mock = link_mock.return_value
link_instance_mock.uri = 'foo'
result = repr(browser)
self.assertEqual(result, 'AlbumBrowser(%r)' % 'foo')
def test_repr_if_unloaded(self, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
lib_mock.sp_albumbrowse_is_loaded.return_value = 0
result = repr(browser)
self.assertEqual(result, 'AlbumBrowser(<not loaded>)')
def test_eq(self, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser1 = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
browser2 = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
self.assertTrue(browser1 == browser2)
self.assertFalse(browser1 == 'foo')
def test_ne(self, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser1 = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
browser2 = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
self.assertFalse(browser1 != browser2)
def test_hash(self, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser1 = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
browser2 = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
self.assertEqual(hash(browser1), hash(browser2))
def test_is_loaded(self, lib_mock):
lib_mock.sp_albumbrowse_is_loaded.return_value = 1
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
result = browser.is_loaded
lib_mock.sp_albumbrowse_is_loaded.assert_called_once_with(
sp_albumbrowse
)
self.assertTrue(result)
@mock.patch('spotify.utils.load')
def test_load(self, load_mock, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
browser.load(10)
load_mock.assert_called_with(self.session, browser, timeout=10)
def test_error(self, lib_mock):
lib_mock.sp_albumbrowse_error.return_value = int(
spotify.ErrorType.OTHER_PERMANENT
)
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
result = browser.error
lib_mock.sp_albumbrowse_error.assert_called_once_with(sp_albumbrowse)
self.assertIs(result, spotify.ErrorType.OTHER_PERMANENT)
def test_backend_request_duration(self, lib_mock):
lib_mock.sp_albumbrowse_backend_request_duration.return_value = 137
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
result = browser.backend_request_duration
lib_mock.sp_albumbrowse_backend_request_duration.assert_called_with(
sp_albumbrowse
)
self.assertEqual(result, 137)
def test_backend_request_duration_when_not_loaded(self, lib_mock):
lib_mock.sp_albumbrowse_is_loaded.return_value = 0
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
result = browser.backend_request_duration
lib_mock.sp_albumbrowse_is_loaded.assert_called_with(sp_albumbrowse)
self.assertEqual(
lib_mock.sp_albumbrowse_backend_request_duration.call_count, 0
)
self.assertIsNone(result)
def test_album(self, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
sp_album = spotify.ffi.cast('sp_album *', 43)
lib_mock.sp_albumbrowse_album.return_value = sp_album
result = browser.album
self.assertIsInstance(result, spotify.Album)
self.assertEqual(result._sp_album, sp_album)
def test_album_when_not_loaded(self, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
lib_mock.sp_albumbrowse_album.return_value = spotify.ffi.NULL
result = browser.album
lib_mock.sp_albumbrowse_album.assert_called_with(sp_albumbrowse)
self.assertIsNone(result)
@mock.patch('spotify.artist.lib', spec=spotify.lib)
def test_artist(self, artist_lib_mock, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
sp_artist = spotify.ffi.cast('sp_artist *', 43)
lib_mock.sp_albumbrowse_artist.return_value = sp_artist
result = browser.artist
self.assertIsInstance(result, spotify.Artist)
self.assertEqual(result._sp_artist, sp_artist)
def test_artist_when_not_loaded(self, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
lib_mock.sp_albumbrowse_artist.return_value = spotify.ffi.NULL
result = browser.artist
lib_mock.sp_albumbrowse_artist.assert_called_with(sp_albumbrowse)
self.assertIsNone(result)
def test_copyrights(self, lib_mock):
copyright = spotify.ffi.new('char[]', b'Apple Records 1973')
lib_mock.sp_albumbrowse_num_copyrights.return_value = 1
lib_mock.sp_albumbrowse_copyright.return_value = copyright
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
self.assertEqual(lib_mock.sp_albumbrowse_add_ref.call_count, 1)
result = browser.copyrights
self.assertEqual(lib_mock.sp_albumbrowse_add_ref.call_count, 2)
self.assertEqual(len(result), 1)
lib_mock.sp_albumbrowse_num_copyrights.assert_called_with(
sp_albumbrowse
)
item = result[0]
self.assertIsInstance(item, compat.text_type)
self.assertEqual(item, 'Apple Records 1973')
self.assertEqual(lib_mock.sp_albumbrowse_copyright.call_count, 1)
lib_mock.sp_albumbrowse_copyright.assert_called_with(sp_albumbrowse, 0)
def test_copyrights_if_no_copyrights(self, lib_mock):
lib_mock.sp_albumbrowse_num_copyrights.return_value = 0
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
result = browser.copyrights
self.assertEqual(len(result), 0)
lib_mock.sp_albumbrowse_num_copyrights.assert_called_with(
sp_albumbrowse
)
self.assertEqual(lib_mock.sp_albumbrowse_copyright.call_count, 0)
def test_copyrights_if_unloaded(self, lib_mock):
lib_mock.sp_albumbrowse_is_loaded.return_value = 0
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
result = browser.copyrights
lib_mock.sp_albumbrowse_is_loaded.assert_called_with(sp_albumbrowse)
self.assertEqual(len(result), 0)
@mock.patch('spotify.track.lib', spec=spotify.lib)
def test_tracks(self, track_lib_mock, lib_mock):
sp_track = spotify.ffi.cast('sp_track *', 43)
lib_mock.sp_albumbrowse_num_tracks.return_value = 1
lib_mock.sp_albumbrowse_track.return_value = sp_track
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
self.assertEqual(lib_mock.sp_albumbrowse_add_ref.call_count, 1)
result = browser.tracks
self.assertEqual(lib_mock.sp_albumbrowse_add_ref.call_count, 2)
self.assertEqual(len(result), 1)
lib_mock.sp_albumbrowse_num_tracks.assert_called_with(sp_albumbrowse)
item = result[0]
self.assertIsInstance(item, spotify.Track)
self.assertEqual(item._sp_track, sp_track)
self.assertEqual(lib_mock.sp_albumbrowse_track.call_count, 1)
lib_mock.sp_albumbrowse_track.assert_called_with(sp_albumbrowse, 0)
track_lib_mock.sp_track_add_ref.assert_called_with(sp_track)
def test_tracks_if_no_tracks(self, lib_mock):
lib_mock.sp_albumbrowse_num_tracks.return_value = 0
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
result = browser.tracks
self.assertEqual(len(result), 0)
lib_mock.sp_albumbrowse_num_tracks.assert_called_with(sp_albumbrowse)
self.assertEqual(lib_mock.sp_albumbrowse_track.call_count, 0)
def test_tracks_if_unloaded(self, lib_mock):
lib_mock.sp_albumbrowse_is_loaded.return_value = 0
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
result = browser.tracks
lib_mock.sp_albumbrowse_is_loaded.assert_called_with(sp_albumbrowse)
self.assertEqual(len(result), 0)
def test_review(self, lib_mock):
sp_albumbrowse = spotify.ffi.cast('sp_albumbrowse *', 42)
browser = spotify.AlbumBrowser(
self.session, sp_albumbrowse=sp_albumbrowse
)
review = spotify.ffi.new('char[]', b'A nice album')
lib_mock.sp_albumbrowse_review.return_value = review
result = browser.review
self.assertIsInstance(result, compat.text_type)
self.assertEqual(result, 'A nice album')
class AlbumTypeTest(unittest.TestCase):
def test_has_constants(self):
self.assertEqual(spotify.AlbumType.ALBUM, 0)
self.assertEqual(spotify.AlbumType.SINGLE, 1)
|
|
from __future__ import print_function, division
from sympy import (sympify, diff, sin, cos, Matrix, Symbol, integrate,
trigsimp, Function, symbols)
from sympy.core.basic import S
from sympy.core.compatibility import reduce
from .vector import Vector, _check_vector
from .frame import CoordinateSym, _check_frame
from .dyadic import Dyadic
from .printing import vprint, vsprint, vpprint, vlatex, init_vprinting
__all__ = ['cross', 'dot', 'express', 'time_derivative', 'outer',
'kinematic_equations', 'get_motion_params', 'partial_velocity',
'dynamicsymbols', 'vprint', 'vsprint', 'vpprint', 'vlatex',
'init_vprinting']
def cross(vec1, vec2):
"""Cross product convenience wrapper for Vector.cross(): \n"""
if not isinstance(vec1, (Vector, Dyadic)):
raise TypeError('Cross product is between two vectors')
return vec1 ^ vec2
cross.__doc__ += Vector.cross.__doc__
def dot(vec1, vec2):
"""Dot product convenience wrapper for Vector.dot(): \n"""
if not isinstance(vec1, (Vector, Dyadic)):
raise TypeError('Dot product is between two vectors')
return vec1 & vec2
dot.__doc__ += Vector.dot.__doc__
def express(expr, frame, frame2=None, variables=False):
"""
Global function for 'express' functionality.
Re-expresses a Vector, scalar(sympyfiable) or Dyadic in given frame.
Refer to the local methods of Vector and Dyadic for details.
If 'variables' is True, then the coordinate variables (CoordinateSym
instances) of other frames present in the vector/scalar field or
dyadic expression are also substituted in terms of the base scalars of
this frame.
Parameters
==========
expr : Vector/Dyadic/scalar(sympyfiable)
The expression to re-express in ReferenceFrame 'frame'
frame: ReferenceFrame
The reference frame to express expr in
frame2 : ReferenceFrame
The other frame required for re-expression(only for Dyadic expr)
variables : boolean
Specifies whether to substitute the coordinate variables present
in expr, in terms of those of frame
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, outer, dynamicsymbols
>>> N = ReferenceFrame('N')
>>> q = dynamicsymbols('q')
>>> B = N.orientnew('B', 'Axis', [q, N.z])
>>> d = outer(N.x, N.x)
>>> from sympy.physics.vector import express
>>> express(d, B, N)
cos(q)*(B.x|N.x) - sin(q)*(B.y|N.x)
>>> express(B.x, N)
cos(q)*N.x + sin(q)*N.y
>>> express(N[0], B, variables=True)
B_x*cos(q(t)) - B_y*sin(q(t))
"""
_check_frame(frame)
if expr == 0:
return expr
if isinstance(expr, Vector):
#Given expr is a Vector
if variables:
#If variables attribute is True, substitute
#the coordinate variables in the Vector
frame_list = [x[-1] for x in expr.args]
subs_dict = {}
for f in frame_list:
subs_dict.update(f.variable_map(frame))
expr = expr.subs(subs_dict)
#Re-express in this frame
outvec = Vector([])
for i, v in enumerate(expr.args):
if v[1] != frame:
temp = frame.dcm(v[1]) * v[0]
if Vector.simp:
temp = temp.applyfunc(lambda x:
trigsimp(x, method='fu'))
outvec += Vector([(temp, frame)])
else:
outvec += Vector([v])
return outvec
if isinstance(expr, Dyadic):
if frame2 is None:
frame2 = frame
_check_frame(frame2)
ol = Dyadic(0)
for i, v in enumerate(expr.args):
ol += express(v[0], frame, variables=variables) * \
(express(v[1], frame, variables=variables) |
express(v[2], frame2, variables=variables))
return ol
else:
if variables:
#Given expr is a scalar field
frame_set = set([])
expr = sympify(expr)
#Subsitute all the coordinate variables
for x in expr.atoms():
if isinstance(x, CoordinateSym)and x.frame != frame:
frame_set.add(x.frame)
subs_dict = {}
for f in frame_set:
subs_dict.update(f.variable_map(frame))
return expr.subs(subs_dict)
return expr
def time_derivative(expr, frame, order=1):
"""
Calculate the time derivative of a vector/scalar field function
or dyadic expression in given frame.
References
==========
http://en.wikipedia.org/wiki/Rotating_reference_frame#Time_derivatives_in_the_two_frames
Parameters
==========
expr : Vector/Dyadic/sympifyable
The expression whose time derivative is to be calculated
frame : ReferenceFrame
The reference frame to calculate the time derivative in
order : integer
The order of the derivative to be calculated
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, dynamicsymbols
>>> from sympy import Symbol
>>> q1 = Symbol('q1')
>>> u1 = dynamicsymbols('u1')
>>> N = ReferenceFrame('N')
>>> A = N.orientnew('A', 'Axis', [q1, N.x])
>>> v = u1 * N.x
>>> A.set_ang_vel(N, 10*A.x)
>>> from sympy.physics.vector import time_derivative
>>> time_derivative(v, N)
u1'*N.x
>>> time_derivative(u1*A[0], N)
N_x*Derivative(u1(t), t)
>>> B = N.orientnew('B', 'Axis', [u1, N.z])
>>> from sympy.physics.vector import outer
>>> d = outer(N.x, N.x)
>>> time_derivative(d, B)
- u1'*(N.y|N.x) - u1'*(N.x|N.y)
"""
t = dynamicsymbols._t
_check_frame(frame)
if order == 0:
return expr
if order % 1 != 0 or order < 0:
raise ValueError("Unsupported value of order entered")
if isinstance(expr, Vector):
outvec = Vector(0)
for i, v in enumerate(expr.args):
if v[1] == frame:
outvec += Vector([(express(v[0], frame,
variables=True).diff(t), frame)])
else:
outvec += time_derivative(Vector([v]), v[1]) + \
(v[1].ang_vel_in(frame) ^ Vector([v]))
return time_derivative(outvec, frame, order - 1)
if isinstance(expr, Dyadic):
ol = Dyadic(0)
for i, v in enumerate(expr.args):
ol += (v[0].diff(t) * (v[1] | v[2]))
ol += (v[0] * (time_derivative(v[1], frame) | v[2]))
ol += (v[0] * (v[1] | time_derivative(v[2], frame)))
return time_derivative(ol, frame, order - 1)
else:
return diff(express(expr, frame, variables=True), t, order)
def outer(vec1, vec2):
"""Outer product convenience wrapper for Vector.outer():\n"""
if not isinstance(vec1, Vector):
raise TypeError('Outer product is between two Vectors')
return vec1 | vec2
outer.__doc__ += Vector.outer.__doc__
def kinematic_equations(speeds, coords, rot_type, rot_order=''):
"""Gives equations relating the qdot's to u's for a rotation type.
Supply rotation type and order as in orient. Speeds are assumed to be
body-fixed; if we are defining the orientation of B in A using by rot_type,
the angular velocity of B in A is assumed to be in the form: speed[0]*B.x +
speed[1]*B.y + speed[2]*B.z
Parameters
==========
speeds : list of length 3
The body fixed angular velocity measure numbers.
coords : list of length 3 or 4
The coordinates used to define the orientation of the two frames.
rot_type : str
The type of rotation used to create the equations. Body, Space, or
Quaternion only
rot_order : str
If applicable, the order of a series of rotations.
Examples
========
>>> from sympy.physics.vector import dynamicsymbols
>>> from sympy.physics.vector import kinematic_equations, vprint
>>> u1, u2, u3 = dynamicsymbols('u1 u2 u3')
>>> q1, q2, q3 = dynamicsymbols('q1 q2 q3')
>>> vprint(kinematic_equations([u1,u2,u3], [q1,q2,q3], 'body', '313'),
... order=None)
[-(u1*sin(q3) + u2*cos(q3))/sin(q2) + q1', -u1*cos(q3) + u2*sin(q3) + q2', (u1*sin(q3) + u2*cos(q3))*cos(q2)/sin(q2) - u3 + q3']
"""
# Code below is checking and sanitizing input
approved_orders = ('123', '231', '312', '132', '213', '321', '121', '131',
'212', '232', '313', '323', '1', '2', '3', '')
rot_order = str(rot_order).upper() # Now we need to make sure XYZ = 123
rot_type = rot_type.upper()
rot_order = [i.replace('X', '1') for i in rot_order]
rot_order = [i.replace('Y', '2') for i in rot_order]
rot_order = [i.replace('Z', '3') for i in rot_order]
rot_order = ''.join(rot_order)
if not isinstance(speeds, (list, tuple)):
raise TypeError('Need to supply speeds in a list')
if len(speeds) != 3:
raise TypeError('Need to supply 3 body-fixed speeds')
if not isinstance(coords, (list, tuple)):
raise TypeError('Need to supply coordinates in a list')
if rot_type.lower() in ['body', 'space']:
if rot_order not in approved_orders:
raise ValueError('Not an acceptable rotation order')
if len(coords) != 3:
raise ValueError('Need 3 coordinates for body or space')
# Actual hard-coded kinematic differential equations
q1, q2, q3 = coords
q1d, q2d, q3d = [diff(i, dynamicsymbols._t) for i in coords]
w1, w2, w3 = speeds
s1, s2, s3 = [sin(q1), sin(q2), sin(q3)]
c1, c2, c3 = [cos(q1), cos(q2), cos(q3)]
if rot_type.lower() == 'body':
if rot_order == '123':
return [q1d - (w1 * c3 - w2 * s3) / c2, q2d - w1 * s3 - w2 *
c3, q3d - (-w1 * c3 + w2 * s3) * s2 / c2 - w3]
if rot_order == '231':
return [q1d - (w2 * c3 - w3 * s3) / c2, q2d - w2 * s3 - w3 *
c3, q3d - w1 - (- w2 * c3 + w3 * s3) * s2 / c2]
if rot_order == '312':
return [q1d - (-w1 * s3 + w3 * c3) / c2, q2d - w1 * c3 - w3 *
s3, q3d - (w1 * s3 - w3 * c3) * s2 / c2 - w2]
if rot_order == '132':
return [q1d - (w1 * c3 + w3 * s3) / c2, q2d + w1 * s3 - w3 *
c3, q3d - (w1 * c3 + w3 * s3) * s2 / c2 - w2]
if rot_order == '213':
return [q1d - (w1 * s3 + w2 * c3) / c2, q2d - w1 * c3 + w2 *
s3, q3d - (w1 * s3 + w2 * c3) * s2 / c2 - w3]
if rot_order == '321':
return [q1d - (w2 * s3 + w3 * c3) / c2, q2d - w2 * c3 + w3 *
s3, q3d - w1 - (w2 * s3 + w3 * c3) * s2 / c2]
if rot_order == '121':
return [q1d - (w2 * s3 + w3 * c3) / s2, q2d - w2 * c3 + w3 *
s3, q3d - w1 + (w2 * s3 + w3 * c3) * c2 / s2]
if rot_order == '131':
return [q1d - (-w2 * c3 + w3 * s3) / s2, q2d - w2 * s3 - w3 *
c3, q3d - w1 - (w2 * c3 - w3 * s3) * c2 / s2]
if rot_order == '212':
return [q1d - (w1 * s3 - w3 * c3) / s2, q2d - w1 * c3 - w3 *
s3, q3d - (-w1 * s3 + w3 * c3) * c2 / s2 - w2]
if rot_order == '232':
return [q1d - (w1 * c3 + w3 * s3) / s2, q2d + w1 * s3 - w3 *
c3, q3d + (w1 * c3 + w3 * s3) * c2 / s2 - w2]
if rot_order == '313':
return [q1d - (w1 * s3 + w2 * c3) / s2, q2d - w1 * c3 + w2 *
s3, q3d + (w1 * s3 + w2 * c3) * c2 / s2 - w3]
if rot_order == '323':
return [q1d - (-w1 * c3 + w2 * s3) / s2, q2d - w1 * s3 - w2 *
c3, q3d - (w1 * c3 - w2 * s3) * c2 / s2 - w3]
if rot_type.lower() == 'space':
if rot_order == '123':
return [q1d - w1 - (w2 * s1 + w3 * c1) * s2 / c2, q2d - w2 *
c1 + w3 * s1, q3d - (w2 * s1 + w3 * c1) / c2]
if rot_order == '231':
return [q1d - (w1 * c1 + w3 * s1) * s2 / c2 - w2, q2d + w1 *
s1 - w3 * c1, q3d - (w1 * c1 + w3 * s1) / c2]
if rot_order == '312':
return [q1d - (w1 * s1 + w2 * c1) * s2 / c2 - w3, q2d - w1 *
c1 + w2 * s1, q3d - (w1 * s1 + w2 * c1) / c2]
if rot_order == '132':
return [q1d - w1 - (-w2 * c1 + w3 * s1) * s2 / c2, q2d - w2 *
s1 - w3 * c1, q3d - (w2 * c1 - w3 * s1) / c2]
if rot_order == '213':
return [q1d - (w1 * s1 - w3 * c1) * s2 / c2 - w2, q2d - w1 *
c1 - w3 * s1, q3d - (-w1 * s1 + w3 * c1) / c2]
if rot_order == '321':
return [q1d - (-w1 * c1 + w2 * s1) * s2 / c2 - w3, q2d - w1 *
s1 - w2 * c1, q3d - (w1 * c1 - w2 * s1) / c2]
if rot_order == '121':
return [q1d - w1 + (w2 * s1 + w3 * c1) * c2 / s2, q2d - w2 *
c1 + w3 * s1, q3d - (w2 * s1 + w3 * c1) / s2]
if rot_order == '131':
return [q1d - w1 - (w2 * c1 - w3 * s1) * c2 / s2, q2d - w2 *
s1 - w3 * c1, q3d - (-w2 * c1 + w3 * s1) / s2]
if rot_order == '212':
return [q1d - (-w1 * s1 + w3 * c1) * c2 / s2 - w2, q2d - w1 *
c1 - w3 * s1, q3d - (w1 * s1 - w3 * c1) / s2]
if rot_order == '232':
return [q1d + (w1 * c1 + w3 * s1) * c2 / s2 - w2, q2d + w1 *
s1 - w3 * c1, q3d - (w1 * c1 + w3 * s1) / s2]
if rot_order == '313':
return [q1d + (w1 * s1 + w2 * c1) * c2 / s2 - w3, q2d - w1 *
c1 + w2 * s1, q3d - (w1 * s1 + w2 * c1) / s2]
if rot_order == '323':
return [q1d - (w1 * c1 - w2 * s1) * c2 / s2 - w3, q2d - w1 *
s1 - w2 * c1, q3d - (-w1 * c1 + w2 * s1) / s2]
elif rot_type.lower() == 'quaternion':
if rot_order != '':
raise ValueError('Cannot have rotation order for quaternion')
if len(coords) != 4:
raise ValueError('Need 4 coordinates for quaternion')
# Actual hard-coded kinematic differential equations
e0, e1, e2, e3 = coords
w = Matrix(speeds + [0])
E = Matrix([[e0, -e3, e2, e1], [e3, e0, -e1, e2], [-e2, e1, e0, e3],
[-e1, -e2, -e3, e0]])
edots = Matrix([diff(i, dynamicsymbols._t) for i in [e1, e2, e3, e0]])
return list(edots.T - 0.5 * w.T * E.T)
else:
raise ValueError('Not an approved rotation type for this function')
def get_motion_params(frame, **kwargs):
"""
Returns the three motion parameters - (acceleration, velocity, and
position) as vectorial functions of time in the given frame.
If a higher order differential function is provided, the lower order
functions are used as boundary conditions. For example, given the
acceleration, the velocity and position parameters are taken as
boundary conditions.
The values of time at which the boundary conditions are specified
are taken from timevalue1(for position boundary condition) and
timevalue2(for velocity boundary condition).
If any of the boundary conditions are not provided, they are taken
to be zero by default (zero vectors, in case of vectorial inputs). If
the boundary conditions are also functions of time, they are converted
to constants by substituting the time values in the dynamicsymbols._t
time Symbol.
This function can also be used for calculating rotational motion
parameters. Have a look at the Parameters and Examples for more clarity.
Parameters
==========
frame : ReferenceFrame
The frame to express the motion parameters in
acceleration : Vector
Acceleration of the object/frame as a function of time
velocity : Vector
Velocity as function of time or as boundary condition
of velocity at time = timevalue1
position : Vector
Velocity as function of time or as boundary condition
of velocity at time = timevalue1
timevalue1 : sympyfiable
Value of time for position boundary condition
timevalue2 : sympyfiable
Value of time for velocity boundary condition
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, get_motion_params, dynamicsymbols
>>> from sympy import symbols
>>> R = ReferenceFrame('R')
>>> v1, v2, v3 = dynamicsymbols('v1 v2 v3')
>>> v = v1*R.x + v2*R.y + v3*R.z
>>> get_motion_params(R, position = v)
(v1''*R.x + v2''*R.y + v3''*R.z, v1'*R.x + v2'*R.y + v3'*R.z, v1*R.x + v2*R.y + v3*R.z)
>>> a, b, c = symbols('a b c')
>>> v = a*R.x + b*R.y + c*R.z
>>> get_motion_params(R, velocity = v)
(0, a*R.x + b*R.y + c*R.z, a*t*R.x + b*t*R.y + c*t*R.z)
>>> parameters = get_motion_params(R, acceleration = v)
>>> parameters[1]
a*t*R.x + b*t*R.y + c*t*R.z
>>> parameters[2]
a*t**2/2*R.x + b*t**2/2*R.y + c*t**2/2*R.z
"""
##Helper functions
def _process_vector_differential(vectdiff, condition, \
variable, ordinate, frame):
"""
Helper function for get_motion methods. Finds derivative of vectdiff wrt
variable, and its integral using the specified boundary condition at
value of variable = ordinate.
Returns a tuple of - (derivative, function and integral) wrt vectdiff
"""
#Make sure boundary condition is independent of 'variable'
if condition != 0:
condition = express(condition, frame, variables=True)
#Special case of vectdiff == 0
if vectdiff == Vector(0):
return (0, 0, condition)
#Express vectdiff completely in condition's frame to give vectdiff1
vectdiff1 = express(vectdiff, frame)
#Find derivative of vectdiff
vectdiff2 = time_derivative(vectdiff, frame)
#Integrate and use boundary condition
vectdiff0 = Vector(0)
lims = (variable, ordinate, variable)
for dim in frame:
function1 = vectdiff1.dot(dim)
abscissa = dim.dot(condition).subs({variable : ordinate})
# Indefinite integral of 'function1' wrt 'variable', using
# the given initial condition (ordinate, abscissa).
vectdiff0 += (integrate(function1, lims) + abscissa) * dim
#Return tuple
return (vectdiff2, vectdiff, vectdiff0)
##Function body
_check_frame(frame)
#Decide mode of operation based on user's input
if 'acceleration' in kwargs:
mode = 2
elif 'velocity' in kwargs:
mode = 1
else:
mode = 0
#All the possible parameters in kwargs
#Not all are required for every case
#If not specified, set to default values(may or may not be used in
#calculations)
conditions = ['acceleration', 'velocity', 'position',
'timevalue', 'timevalue1', 'timevalue2']
for i, x in enumerate(conditions):
if x not in kwargs:
if i < 3:
kwargs[x] = Vector(0)
else:
kwargs[x] = S(0)
elif i < 3:
_check_vector(kwargs[x])
else:
kwargs[x] = sympify(kwargs[x])
if mode == 2:
vel = _process_vector_differential(kwargs['acceleration'],
kwargs['velocity'],
dynamicsymbols._t,
kwargs['timevalue2'], frame)[2]
pos = _process_vector_differential(vel, kwargs['position'],
dynamicsymbols._t,
kwargs['timevalue1'], frame)[2]
return (kwargs['acceleration'], vel, pos)
elif mode == 1:
return _process_vector_differential(kwargs['velocity'],
kwargs['position'],
dynamicsymbols._t,
kwargs['timevalue1'], frame)
else:
vel = time_derivative(kwargs['position'], frame)
acc = time_derivative(vel, frame)
return (acc, vel, kwargs['position'])
def partial_velocity(vel_list, u_list, frame):
"""Returns a list of partial velocities.
For a list of velocity or angular velocity vectors the partial derivatives
with respect to the supplied generalized speeds are computed, in the
specified ReferenceFrame.
The output is a list of lists. The outer list has a number of elements
equal to the number of supplied velocity vectors. The inner lists are, for
each velocity vector, the partial derivatives of that velocity vector with
respect to the generalized speeds supplied.
Parameters
==========
vel_list : list
List of velocities of Point's and angular velocities of ReferenceFrame's
u_list : list
List of independent generalized speeds.
frame : ReferenceFrame
The ReferenceFrame the partial derivatives are going to be taken in.
Examples
========
>>> from sympy.physics.vector import Point, ReferenceFrame
>>> from sympy.physics.vector import dynamicsymbols
>>> from sympy.physics.vector import partial_velocity
>>> u = dynamicsymbols('u')
>>> N = ReferenceFrame('N')
>>> P = Point('P')
>>> P.set_vel(N, u * N.x)
>>> vel_list = [P.vel(N)]
>>> u_list = [u]
>>> partial_velocity(vel_list, u_list, N)
[[N.x]]
"""
if not hasattr(vel_list, '__iter__'):
raise TypeError('Provide velocities in an iterable')
if not hasattr(u_list, '__iter__'):
raise TypeError('Provide speeds in an iterable')
list_of_pvlists = []
for i in vel_list:
pvlist = []
for j in u_list:
vel = i.diff(j, frame)
pvlist += [vel]
list_of_pvlists += [pvlist]
return list_of_pvlists
def dynamicsymbols(names, level=0):
"""Uses symbols and Function for functions of time.
Creates a SymPy UndefinedFunction, which is then initialized as a function
of a variable, the default being Symbol('t').
Parameters
==========
names : str
Names of the dynamic symbols you want to create; works the same way as
inputs to symbols
level : int
Level of differentiation of the returned function; d/dt once of t,
twice of t, etc.
Examples
========
>>> from sympy.physics.vector import dynamicsymbols
>>> from sympy import diff, Symbol
>>> q1 = dynamicsymbols('q1')
>>> q1
q1(t)
>>> diff(q1, Symbol('t'))
Derivative(q1(t), t)
"""
esses = symbols(names, cls=Function)
t = dynamicsymbols._t
if hasattr(esses, '__iter__'):
esses = [reduce(diff, [t] * level, e(t)) for e in esses]
return esses
else:
return reduce(diff, [t] * level, esses(t))
dynamicsymbols._t = Symbol('t')
dynamicsymbols._str = '\''
|
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for datastore_utils.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import itertools
import math
import mock
from six.moves import range
from google.appengine.api import datastore_errors
from google.appengine.ext import ndb
from google.appengine.ext.ndb import polymodel
from upvote.gae.datastore import utils as datastore_utils
from upvote.gae.lib.testing import basetest
from upvote.shared import constants
class CopyEntityTest(basetest.UpvoteTestCase):
def setUp(self):
super(CopyEntityTest, self).setUp()
class A(ndb.Model):
a = ndb.StringProperty()
self.default_model = A
def testUpdateProperties(self):
inst = self.default_model(a='abc')
inst.put()
new = datastore_utils.CopyEntity(inst, a='xyz')
new.put()
self.assertEqual('abc', inst.a)
self.assertEqual('xyz', new.a)
self.assertNotEqual(new.key, inst.key)
def testFailToSet_AutoNowProperty(self):
class A(ndb.Model):
a = ndb.DateTimeProperty(auto_now=True)
inst = A()
inst.put()
with self.assertRaises(datastore_utils.PropertyError):
datastore_utils.CopyEntity(
inst, a=datetime.datetime.utcnow())
def testFailToSet_ComputedProperty(self):
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.ComputedProperty(lambda self: self.a[0])
inst = A(a='xyz')
inst.put()
self.assertEqual('x', inst.b)
with self.assertRaises(datastore_utils.PropertyError):
datastore_utils.CopyEntity(inst, b='a')
def testModelWithComputedProperty(self):
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.ComputedProperty(lambda self: self.a[0])
inst = A(a='xyz')
inst.put()
self.assertEqual('x', inst.b)
new = datastore_utils.CopyEntity(inst, a='abc')
new.put()
self.assertEqual('a', new.b)
def testPolyModel(self):
class A(datastore_utils.polymodel.PolyModel):
a = ndb.StringProperty()
class B(A):
pass
inst = B(a='abc')
inst.put()
new = datastore_utils.CopyEntity(inst, a='xyz')
new.put()
self.assertEqual('xyz', new.a)
self.assertIsInstance(new, B)
def testPolyModel_NoClass(self):
class A(datastore_utils.polymodel.PolyModel):
a = ndb.StringProperty()
class B(A):
pass
inst = B(a='abc')
a_copy = datastore_utils.CopyEntity(inst, a='xyz')
a_copy.put()
inst.put()
self.assertEqual('xyz', a_copy.a)
self.assertEqual('abc', inst.a)
def testNewId(self):
inst = self.default_model(a='abc')
inst.put()
new = datastore_utils.CopyEntity(inst, id='an_id')
new.put()
self.assertEqual('abc', new.a)
self.assertEqual('an_id', new.key.id())
def testNewIdWithParent(self):
inst = self.default_model(a='abc')
inst.put()
parent = ndb.Key('C', 'c', 'B', 'b')
expected = ndb.Key('C', 'c', 'B', 'b', 'A', 'an_id')
new = datastore_utils.CopyEntity(
inst, new_parent=parent, id='an_id')
new.put()
self.assertEqual(expected, new.key)
def testIdWithKey(self):
inst = self.default_model(a='abc')
inst.put()
with self.assertRaises(datastore_errors.BadArgumentError):
datastore_utils.CopyEntity(
inst, new_key=ndb.Key('A', 'a_key'), id='an_id')
def testParentWithKey(self):
inst = self.default_model(a='abc')
inst.put()
parent = ndb.Key('C', 'c', 'B', 'b')
with self.assertRaises(datastore_errors.BadArgumentError):
datastore_utils.CopyEntity(
inst, new_key=ndb.Key('A', 'a_key'), new_parent=parent)
def testUnknownProperty(self):
inst = self.default_model(a='abc')
inst.put()
with self.assertRaises(datastore_utils.PropertyError):
datastore_utils.CopyEntity(inst, not_a_property='a')
def testDeletedProperty(self):
inst = self.default_model(a='abc')
inst.put()
class A(ndb.Model): # pylint: disable=unused-variable
b = ndb.StringProperty()
inst = inst.key.get(use_cache=False)
copy = datastore_utils.CopyEntity(inst)
self.assertFalse(hasattr(copy, 'a'))
class DeletePropertyTest(basetest.UpvoteTestCase):
def setUp(self):
super(DeletePropertyTest, self).setUp()
def testSameSchema(self):
# Initial schema
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.StringProperty()
# Create an entity using the initial schema
inst = A(a='abc', b='def')
inst.put()
self.assertIsNotNone(inst.b)
# Delete the property and save the entity
datastore_utils.DeleteProperty(inst, 'b')
inst.put()
inst = A.get_by_id(inst.key.id())
# The old data is gone :)
self.assertIsNone(inst.b)
def testSameSchema_DoesntDeleteProperty(self):
# Initial schema
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.StringProperty()
# Create an entity using the initial schema
inst = A(a='abc', b='def')
inst.put()
# Delete the property and save the entity
datastore_utils.DeleteProperty(inst, 'b')
inst.put()
# Create a new instance and verify that the 'b' hasn't disappeared
new = A(a='abc', b='def')
new.put()
self.assertTrue(datastore_utils.HasProperty(new, 'b'))
def testSameSchema_RepeatedProperty(self):
# Initial schema
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.StringProperty(repeated=True)
# Create an entity using the initial schema
inst = A(a='abc', b=['def'])
inst.put()
self.assertIsNotNone(inst.b)
# Delete the property and save the entity
datastore_utils.DeleteProperty(inst, 'b')
inst.put()
inst = A.get_by_id(inst.key.id())
# The old data is...kinda gone :|
self.assertEqual([], inst.b)
def testChangeSchema(self):
# Initial schema
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.StringProperty()
# Create an entity using the initial schema
inst = A(a='abc', b='def')
inst.put()
# Revised schema
class A(ndb.Model): # pylint: disable=function-redefined
a = ndb.StringProperty()
# Retrieve and save the old instance
inst = A.get_by_id(inst.key.id())
inst.put()
# The old data is still there :(
self.assertIsNotNone(inst.b)
# Delete the property and save the entity
datastore_utils.DeleteProperty(inst, 'b')
inst.put()
inst = A.get_by_id(inst.key.id())
# The old data is gone :)
self.assertIsNone(inst.b)
def testChangeSchema_RequiredField(self):
# Initial schema but this time with a required property
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.StringProperty(required=True)
# Create an entity using the initial schema
inst = A(a='abc', b='def')
inst.put()
# Revised schema without the required property
class A(ndb.Model): # pylint: disable=function-redefined
a = ndb.StringProperty()
# Retrieve and save the old instance
inst = A.get_by_id(inst.key.id())
inst.put()
# The old data is still there :(
self.assertIsNotNone(inst.b)
# Delete the property and save the entity
datastore_utils.DeleteProperty(inst, 'b')
inst.put()
inst = A.get_by_id(inst.key.id())
# The old data is gone :)
self.assertIsNone(inst.b)
def testUnknownProperty(self):
class A(ndb.Model):
a = ndb.StringProperty()
inst = A(a='abc')
inst.put()
datastore_utils.DeleteProperty(inst, 'b')
inst.put()
inst = A.get_by_id(inst.key.id())
self.assertIsNotNone(inst.a)
def testChangeSchema_PolyModel(self):
# Initial schema
class Base(polymodel.PolyModel):
a = ndb.StringProperty()
b = ndb.StringProperty(required=True)
class A(Base):
pass
# Create an entity using the initial schema
inst = A(a='abc', b='def')
inst.put()
# Revised schema
class Base(polymodel.PolyModel): # pylint: disable=function-redefined
a = ndb.StringProperty()
class A(Base): # pylint: disable=function-redefined
pass
# Retrieve and save the old instance
inst = A.get_by_id(inst.key.id())
inst.put()
# The old data is still there :(
self.assertIsNotNone(inst.b)
# Delete the property and save the entity
datastore_utils.DeleteProperty(inst, 'b')
inst.put()
inst = A.get_by_id(inst.key.id())
# The old data is gone :)
self.assertIsNone(inst.b)
class DeletePropertyValueTest(basetest.UpvoteTestCase):
def testDeleteValue(self):
# Initial schema
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.StringProperty()
# Create an entity using the initial schema
inst = A(a='abc', b='def')
inst.put()
self.assertIsNotNone(inst.b)
# Delete the property and save the entity
datastore_utils.DeletePropertyValue(inst, 'b')
inst.put()
inst = A.get_by_id(inst.key.id())
# The old data is gone :)
self.assertIsNone(inst.b)
def testDatetimeAutoNowAdd(self):
# Initial schema
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.DateTimeProperty(auto_now_add=True)
# Create an entity using the initial schema
inst = A(a='abc')
inst.put()
# Delete the property and save the entity
datastore_utils.DeletePropertyValue(inst, 'b')
inst.put()
self.assertTrue(datastore_utils.HasProperty(inst, 'b'))
self.assertIsNotNone(inst.b)
def testRepeatedProperty(self):
# Initial schema
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.StringProperty(repeated=True)
# Create an entity using the initial schema
inst = A(a='abc', b=['def'])
inst.put()
self.assertIsNotNone(inst.b)
# Delete the property and save the entity
datastore_utils.DeletePropertyValue(inst, 'b')
inst.put()
inst = A.get_by_id(inst.key.id())
# The old data is gone
self.assertEqual([], inst.b)
def testRequiredField(self):
# Initial schema but this time with a required property
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.StringProperty(required=True)
# Create an entity using the initial schema
inst = A(a='abc', b='def')
inst.put()
# Delete the property and save the entity
datastore_utils.DeletePropertyValue(inst, 'b')
# Property required but no longer has a value.
with self.assertRaises(Exception):
inst.put()
def testUnknownProperty(self):
class A(ndb.Model):
a = ndb.StringProperty()
inst = A(a='abc')
inst.put()
datastore_utils.DeletePropertyValue(inst, 'b')
inst.put()
inst = A.get_by_id(inst.key.id())
self.assertIsNotNone(inst.a)
class HasValueTest(basetest.UpvoteTestCase):
def testHasValue(self):
class Foo(ndb.Model):
a = ndb.ComputedProperty(lambda self: 'a')
b = ndb.StringProperty()
foo = Foo()
self.assertFalse(datastore_utils.HasValue(foo, 'a'))
self.assertFalse(datastore_utils.HasValue(foo, 'b'))
foo.b = 'b'
self.assertFalse(datastore_utils.HasValue(foo, 'a'))
self.assertTrue(datastore_utils.HasValue(foo, 'b'))
foo.put()
self.assertTrue(datastore_utils.HasValue(foo, 'a'))
self.assertTrue(datastore_utils.HasValue(foo, 'b'))
class GetLocalComputedPropertyValueTest(basetest.UpvoteTestCase):
def setUp(self):
super(GetLocalComputedPropertyValueTest, self).setUp()
class A(ndb.Model):
a = ndb.StringProperty()
b = ndb.ComputedProperty(lambda self: self.a[0])
self.inst = A(a='xyz')
def testNormal(self):
self.assertIsNone(
datastore_utils.GetLocalComputedPropertyValue(self.inst, 'b'))
self.inst.put()
self.assertEqual(
'x', datastore_utils.GetLocalComputedPropertyValue(self.inst, 'b'))
self.inst.a = 'cdg'
self.assertEqual(
'x', datastore_utils.GetLocalComputedPropertyValue(self.inst, 'b'))
self.inst.put()
self.assertEqual(
'c', datastore_utils.GetLocalComputedPropertyValue(self.inst, 'b'))
def testUnknownProperty(self):
with self.assertRaises(datastore_utils.PropertyError):
datastore_utils.GetLocalComputedPropertyValue(
self.inst, 'NotARealProperty')
def testNotComputedProperty(self):
with self.assertRaises(datastore_utils.PropertyError):
datastore_utils.GetLocalComputedPropertyValue(self.inst, 'a')
class KeyHasAncestorTest(basetest.UpvoteTestCase):
def testKeyHasAncestor(self):
self.assertFalse(
datastore_utils.KeyHasAncestor(ndb.Key('A', 1), ndb.Key('A', 1)))
self.assertTrue(
datastore_utils.KeyHasAncestor(
ndb.Key('A', 1, 'B', 2), ndb.Key('A', 1)))
self.assertFalse(
datastore_utils.KeyHasAncestor(
ndb.Key('A', 1, 'B', 2), ndb.Key('A', 2)))
self.assertFalse(
datastore_utils.KeyHasAncestor(
ndb.Key('A', 1, 'B', 2), ndb.Key('A', 1, 'B', 2)))
self.assertTrue(
datastore_utils.KeyHasAncestor(
ndb.Key('A', 1, 'B', 2, 'C', 3), ndb.Key('A', 1, 'B', 2)))
class ConcatenateKeysTest(basetest.UpvoteTestCase):
def testSuccess(self):
keys = [ndb.Key('A', 1, 'B', 2), ndb.Key('C', 3)]
self.assertEqual(
ndb.Key('A', 1, 'B', 2, 'C', 3), datastore_utils.ConcatenateKeys(*keys))
def testEmpty(self):
self.assertIsNone(datastore_utils.ConcatenateKeys())
class GetKeyFromUrlsafeTest(basetest.UpvoteTestCase):
def testSuccess(self):
key = ndb.Key('A', 'a', 'B', 'b')
self.assertEqual(key, datastore_utils.GetKeyFromUrlsafe(key.urlsafe()))
def testError(self):
self.assertIsNone(
datastore_utils.GetKeyFromUrlsafe('not a real ndb key string'))
class FutureFactoryTest(basetest.UpvoteTestCase):
def testInTxn(self):
def AssertInTxn():
self.assertTrue(ndb.in_transaction())
def RunAssert():
fut = datastore_utils.GetNoOpFuture()
fut.add_callback(AssertInTxn)
fut.add_immediate_callback(AssertInTxn)
fut.get_result()
ndb.transaction(RunAssert)
class GetNoOpFutureTest(basetest.UpvoteTestCase):
def testNone(self):
future = datastore_utils.GetNoOpFuture()
self.assertTrue(future.done())
self.assertIsNone(future.get_result())
def testResult(self):
result = 'foobar'
future = datastore_utils.GetNoOpFuture(result)
self.assertTrue(future.done())
self.assertEqual(result, future.get_result())
class GetMultiFutureTest(basetest.UpvoteTestCase):
def testNoInput(self):
mf = datastore_utils.GetMultiFuture([])
self.assertTrue(mf.done())
def testSingleFuture(self):
f = ndb.Future()
mf = datastore_utils.GetMultiFuture([f])
self.assertFalse(f.done())
self.assertFalse(mf.done())
f.set_result(None)
self.assertTrue(f.done())
self.assertFalse(mf.done())
# Event loop must run for the MultiFuture to be marked as done.
mf.wait()
self.assertTrue(mf.done())
def testManyFutures(self):
futures = [ndb.Future() for _ in range(3)]
mf = datastore_utils.GetMultiFuture(futures)
self.assertFalse(any(f.done() for f in futures))
self.assertFalse(mf.done())
for f in futures:
f.set_result(None)
self.assertTrue(all(f.done() for f in futures))
self.assertFalse(mf.done())
# Event loop must run for the MultiFuture to be marked as done.
mf.wait()
self.assertTrue(mf.done())
def testCantModifyResult(self):
f = ndb.Future()
mf = datastore_utils.GetMultiFuture([f])
with self.assertRaises(RuntimeError):
mf.add_dependent(ndb.Future())
class TestModel(ndb.Model):
foo = ndb.StringProperty()
bar = ndb.IntegerProperty()
def CreateEntity(foo='foo', bar=0):
entity = TestModel(foo=foo, bar=bar)
entity.put()
return entity
def CreateEntities(count, **kwargs):
return [CreateEntity(**kwargs) for _ in range(count)]
_GLOBAL_CBK_MOCK = mock.MagicMock()
def CallMock(*args, **kwargs):
_GLOBAL_CBK_MOCK(*args, **kwargs)
def GetKey(key):
return key.get()
def ReturnFoo(entity):
return entity.foo
def ReturnBar(entity):
return entity.bar
class PaginateTest(basetest.UpvoteTestCase):
def testSuccess(self):
page_size = 10
for entity_count in range(50):
# Create some number of entities.
CreateEntities(entity_count)
# Verify that we get the expected number of pages.
pages = list(
datastore_utils.Paginate(TestModel.query(), page_size=page_size))
expected_page_count = int(math.ceil(float(entity_count) / page_size))
self.assertLen(pages, expected_page_count)
# Verify that we get the expected number of entities.
entities = list(itertools.chain(*pages))
self.assertLen(entities, entity_count)
# Delete everything.
for entity in entities:
entity.key.delete()
class QueuedPaginatedBatchApply(basetest.UpvoteTestCase):
def tearDown(self):
super(QueuedPaginatedBatchApply, self).tearDown()
_GLOBAL_CBK_MOCK.reset_mock()
def testSuccess(self):
entities = CreateEntities(3)
datastore_utils.QueuedPaginatedBatchApply(
TestModel.query(), CallMock, page_size=2)
for _ in range(3):
self.assertTaskCount(constants.TASK_QUEUE.DEFAULT, 1)
self.RunDeferredTasks()
self.assertTaskCount(constants.TASK_QUEUE.DEFAULT, 0)
self.assertTrue(_GLOBAL_CBK_MOCK.called_with(entities[:2]))
self.assertTrue(_GLOBAL_CBK_MOCK.called_with(entities[2:]))
self.assertEqual(2, _GLOBAL_CBK_MOCK.call_count)
def testExtraArgs(self):
entities = CreateEntities(1)
datastore_utils.QueuedPaginatedBatchApply(
TestModel.query(), CallMock, extra_args=['a', 'b'],
extra_kwargs={'c': 'c'})
for _ in range(2):
self.assertTaskCount(constants.TASK_QUEUE.DEFAULT, 1)
self.RunDeferredTasks()
self.assertTaskCount(constants.TASK_QUEUE.DEFAULT, 0)
self.assertTrue(_GLOBAL_CBK_MOCK.called_with(entities, 'a', 'b', c='c'))
if __name__ == '__main__':
basetest.main()
|
|
import gc
import opcode
import inspect
import theano
import numpy as np
from collections import OrderedDict
from inspect import getcallargs
#import theano
#from theano.sandbox.cuda import cuda_ndarray
#cuda_ndarray = cuda_ndarray.cuda_ndarray
def orderedcallargs(fn, *args, **kwargs):
"""
Returns an OrderedDictionary containing the names and values of a
function's arguments. The arguments are ordered according to the function's
argspec:
1. named arguments
2. variable positional argument
3. variable keyword argument
"""
callargs = getcallargs(fn, *args, **kwargs)
argspec = inspect.getargspec(fn)
o_callargs = OrderedDict()
for argname in argspec.args:
o_callargs[argname] = callargs[argname]
if argspec.varargs:
o_callargs[argspec.varargs] = callargs[argspec.varargs]
if argspec.keywords:
o_callargs[argspec.keywords] = callargs[argspec.keywords]
return o_callargs
def expandedcallargs(fn, *args, **kwargs):
"""
Returns a tuple of all function args and kwargs, expanded so that varargs
and kwargs are not nested. The args are ordered by their position in the
function signature.
"""
return tuple(flatten(orderedcallargs(fn, *args, **kwargs)))
def as_seq(x, seq_type=None):
"""
If x is not a sequence, returns it as one. The seq_type argument allows the
output type to be specified (defaults to list). If x is a sequence and
seq_type is provided, then x is converted to seq_type.
Arguments
---------
x : seq or object
seq_type : output sequence type
If None, then if x is already a sequence, no change is made. If x
is not a sequence, a list is returned.
"""
if x is None:
# None represents an empty sequence
x = []
elif not isinstance(x, (list, tuple, set, frozenset, dict)):
# if x is not already a sequence (including dict), then make it one
x = [x]
if seq_type is not None and not isinstance(x, seq_type):
# if necessary, convert x to the sequence type
x = seq_type(x)
return x
def itercode(code):
"""Return a generator of byte-offset, opcode, and argument
from a byte-code-string
"""
i = 0
extended_arg = 0
n = len(code)
while i < n:
c = code[i]
num = i
op = ord(c)
i = i + 1
oparg = None
if op >= opcode.HAVE_ARGUMENT:
oparg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg
extended_arg = 0
i = i + 2
if op == opcode.EXTENDED_ARG:
extended_arg = oparg * 65536
delta = yield num, op, oparg
if delta is not None:
abs_rel, dst = delta
assert abs_rel == 'abs' or abs_rel == 'rel'
i = dst if abs_rel == 'abs' else i + dst
def flatten(container):
"""Iterate over the elements of a [nested] container in a consistent order,
unpacking dictionaries, lists, and tuples.
Returns a list.
Note that unflatten(container, flatten(container)) == container """
rval = []
if isinstance(container, (list, tuple)):
for d_i in container:
rval.extend(flatten(d_i))
elif isinstance(container, dict):
if isinstance(container, OrderedDict):
sortedkeys = container.keys()
else:
try:
sortedkeys = sorted(container.keys())
except TypeError:
sortedkeys = container.keys()
for k in sortedkeys:
# if isinstance(k, (tuple, dict)):
# # -- if keys are tuples containing ndarrays, should
# # they be traversed also?
# raise NotImplementedError(
# 'potential ambiguity in container key', k)
rval.extend(flatten(container[k]))
else:
rval.append(container)
return rval
def unflatten(container, flat):
"""Iterate over a [nested] container, building a clone from the elements of
flat.
Returns object with same type as container.
Note that unflatten(container, flatten(container)) == container
"""
def unflatten_inner(container, pos):
if isinstance(container, (list, tuple)):
rval = []
for d_i in container:
d_i_clone, pos = unflatten_inner(d_i, pos)
rval.append(d_i_clone)
# check for namedtuple, which has a different __new__ signature
if hasattr(container, '_fields'):
rval = type(container)(*rval)
else:
rval = type(container)(rval)
elif isinstance(container, dict):
rval = type(container)()
if isinstance(container, OrderedDict):
sortedkeys = container.keys()
else:
try:
sortedkeys = sorted(container.keys())
except TypeError:
sortedkeys = container.keys()
for k in sortedkeys:
v_clone, pos = unflatten_inner(container[k], pos)
rval[k] = v_clone
else:
rval = flat[pos]
pos += 1
return rval, pos
return unflatten_inner(container, 0)[0]
def isvar(x):
"""
Type test for Theano variables.
"""
vartypes = (theano.tensor.sharedvar.SharedVariable,
theano.tensor.TensorConstant,
theano.tensor.TensorVariable)
return isinstance(x, vartypes)
def clean_int_args(*args, **kwargs):
"""
Given args and kwargs, replaces small integers with numpy int16 objects, to
allow tracing.
"""
flatargs = flatten(args)
for i, a in enumerate(flatargs):
if type(a) is int and -5 <= a <= 256:
flatargs[i] = np.int16(a)
clean_args = unflatten(args, flatargs)
flatkwargs = flatten(kwargs)
for i, a in enumerate(flatkwargs):
if type(a) is int and -5 <= a <= 256:
flatkwargs[i] = np.int16(a)
clean_kwargs = unflatten(kwargs, flatkwargs)
return clean_args, clean_kwargs
# -- picklable decorated function
class post_collect(object):
def __init__(self, f):
self.f = f
def __call__(self, *args, **kwargs):
try:
return self.f(*args, **kwargs)
finally:
gc.collect()
#mem_info = cuda_ndarray.mem_info()
#om = cuda_ndarray.outstanding_mallocs()
#print 'Post-gc: %s %s' % (mem_info, om)
|
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import pkg_resources
import google.auth # type: ignore
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.metastore_v1.types import metastore
from google.longrunning import operations_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-metastore",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class DataprocMetastoreTransport(abc.ABC):
"""Abstract transport class for DataprocMetastore."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
DEFAULT_HOST: str = "metastore.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.list_services: gapic_v1.method.wrap_method(
self.list_services, default_timeout=None, client_info=client_info,
),
self.get_service: gapic_v1.method.wrap_method(
self.get_service, default_timeout=None, client_info=client_info,
),
self.create_service: gapic_v1.method.wrap_method(
self.create_service, default_timeout=60.0, client_info=client_info,
),
self.update_service: gapic_v1.method.wrap_method(
self.update_service, default_timeout=60.0, client_info=client_info,
),
self.delete_service: gapic_v1.method.wrap_method(
self.delete_service, default_timeout=60.0, client_info=client_info,
),
self.list_metadata_imports: gapic_v1.method.wrap_method(
self.list_metadata_imports,
default_timeout=None,
client_info=client_info,
),
self.get_metadata_import: gapic_v1.method.wrap_method(
self.get_metadata_import, default_timeout=None, client_info=client_info,
),
self.create_metadata_import: gapic_v1.method.wrap_method(
self.create_metadata_import,
default_timeout=60.0,
client_info=client_info,
),
self.update_metadata_import: gapic_v1.method.wrap_method(
self.update_metadata_import,
default_timeout=60.0,
client_info=client_info,
),
self.export_metadata: gapic_v1.method.wrap_method(
self.export_metadata, default_timeout=60.0, client_info=client_info,
),
self.restore_service: gapic_v1.method.wrap_method(
self.restore_service, default_timeout=60.0, client_info=client_info,
),
self.list_backups: gapic_v1.method.wrap_method(
self.list_backups, default_timeout=None, client_info=client_info,
),
self.get_backup: gapic_v1.method.wrap_method(
self.get_backup, default_timeout=None, client_info=client_info,
),
self.create_backup: gapic_v1.method.wrap_method(
self.create_backup, default_timeout=60.0, client_info=client_info,
),
self.delete_backup: gapic_v1.method.wrap_method(
self.delete_backup, default_timeout=60.0, client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def operations_client(self):
"""Return the client designed to process long-running operations."""
raise NotImplementedError()
@property
def list_services(
self,
) -> Callable[
[metastore.ListServicesRequest],
Union[
metastore.ListServicesResponse, Awaitable[metastore.ListServicesResponse]
],
]:
raise NotImplementedError()
@property
def get_service(
self,
) -> Callable[
[metastore.GetServiceRequest],
Union[metastore.Service, Awaitable[metastore.Service]],
]:
raise NotImplementedError()
@property
def create_service(
self,
) -> Callable[
[metastore.CreateServiceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def update_service(
self,
) -> Callable[
[metastore.UpdateServiceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def delete_service(
self,
) -> Callable[
[metastore.DeleteServiceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def list_metadata_imports(
self,
) -> Callable[
[metastore.ListMetadataImportsRequest],
Union[
metastore.ListMetadataImportsResponse,
Awaitable[metastore.ListMetadataImportsResponse],
],
]:
raise NotImplementedError()
@property
def get_metadata_import(
self,
) -> Callable[
[metastore.GetMetadataImportRequest],
Union[metastore.MetadataImport, Awaitable[metastore.MetadataImport]],
]:
raise NotImplementedError()
@property
def create_metadata_import(
self,
) -> Callable[
[metastore.CreateMetadataImportRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def update_metadata_import(
self,
) -> Callable[
[metastore.UpdateMetadataImportRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def export_metadata(
self,
) -> Callable[
[metastore.ExportMetadataRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def restore_service(
self,
) -> Callable[
[metastore.RestoreServiceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def list_backups(
self,
) -> Callable[
[metastore.ListBackupsRequest],
Union[metastore.ListBackupsResponse, Awaitable[metastore.ListBackupsResponse]],
]:
raise NotImplementedError()
@property
def get_backup(
self,
) -> Callable[
[metastore.GetBackupRequest],
Union[metastore.Backup, Awaitable[metastore.Backup]],
]:
raise NotImplementedError()
@property
def create_backup(
self,
) -> Callable[
[metastore.CreateBackupRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def delete_backup(
self,
) -> Callable[
[metastore.DeleteBackupRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
__all__ = ("DataprocMetastoreTransport",)
|
|
import logging
from dateutil import parser
import math
import json
from get_database import get_mode_db, get_section_db
from datetime import datetime, timedelta
from userclient import getClientSpecificQueryFilter
import stats
import time
# TODO: Argh! Until now, we just had the data collector import the webapp.
# Now we have the webapp import the data collector. This badly needs
# restructuring.
import sys
import os
sys.path.append("%s" % os.getcwd())
sys.path.append("%s/../CFC_DataCollector/moves" % os.getcwd())
import collect
def travel_time(time1,time2):
start_time=parser.parse(time1)
end_time=parser.parse(time2)
travel_time = end_time-start_time
return travel_time.seconds
def calDistance(point1, point2):
earthRadius = 6371000
# SHANKARI: Why do we have two calDistance() functions?
# Need to combine into one
# points are now in geojson format (lng,lat)
dLat = math.radians(point1[1]-point2[1])
dLon = math.radians(point1[0]-point2[0])
lat1 = math.radians(point1[1])
lat2 = math.radians(point2[1])
a = (math.sin(dLat/2) ** 2) + ((math.sin(dLon/2) ** 2) * math.cos(lat1) * math.cos(lat2))
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
d = earthRadius * c
return d
def max_Distance(points):
# 'track_points':[{'track_location':{'type':'Point', 'coordinates':[point["lat"],point["lon"]]}, 'time':point["time"]}for point in seg_act_note["trackPoints"]] if "trackPoints" in seg_act_note else []}
num_pts=len(points)
max_d=0
for i in range(num_pts):
for j in range(i+1,num_pts):
max_d=max(max_d,calDistance(points[i]['track_location']['coordinates'], points[j]['track_location']['coordinates']))
return max_d
def filter_unclassifiedSections(UnclassifiedSections):
minimum_travel_time=120
minimum_travel_distance=200
Modes=get_mode_db()
Sections=get_section_db()
filtered_Sections=[]
for section in UnclassifiedSections:
# logging.debug("Appending %s" % json.dumps(section))
if section['section_start_time']!=''and section['section_end_time']!=''and len(section['track_points'])>=2:
if travel_time(section['section_start_time'],section['section_end_time']) >= minimum_travel_time and \
max_Distance(section['track_points']) >= minimum_travel_distance:
section['mode']=''.join(mode['mode_name'] for mode in Modes.find({"mode_id":section['mode']})) \
if type(section['mode'])!=type('aa') else section['mode']
filtered_Sections.append(section)
else:
Sections.update({"$and":[{'source':'Shankari'},
{'user_id':section['user_id']},
{'trip_id': section['trip_id']},
{'section_id': section['section_id']}]},{"$set":{'type':'not a trip'}})
elif section['section_start_time']!=''and section['section_end_time']!=''and len(section['track_points'])<2:
if travel_time(section['section_start_time'],section['section_end_time']) >= minimum_travel_time:
section['mode']=''.join(mode['mode_name'] for mode in Modes.find({"mode_id":section['mode']})) \
if type(section['mode'])!=type('aa') else section['mode']
filtered_Sections.append(section)
else:
Sections.update({"$and":[{'source':'Shankari'},
{'user_id':section['user_id']},
{'trip_id': section['trip_id']},
{'section_id': section['section_id']}]},{"$set":{'type':'not a trip'}})
elif (section['section_start_time']==''or section['section_end_time']=='') and len(section['track_points'])>=2:
if max_Distance(section['track_points']) >= minimum_travel_distance:
section['mode']=''.join(mode['mode_name'] for mode in Modes.find({"mode_id":section['mode']})) \
if type(section['mode'])!=type('aa') else section['mode']
filtered_Sections.append(section)
else:
Sections.update({"$and":[{'source':'Shankari'},
{'user_id':section['user_id']},
{'trip_id': section['trip_id']},
{'section_id': section['section_id']}]},{"$set":{'type':'not a trip'}})
else:
Sections.update({"$and":[{'source':'Shankari'},
{'user_id':section['user_id']},
{'trip_id': section['trip_id']},
{'section_id': section['section_id']}]},{"$set":{'type':'not complete information'}})
return filtered_Sections
# TODO: Mogeng fix me the right way
def stripoutNonSerializable(sectionList):
strippedList = []
for section in sectionList:
del section['section_start_datetime']
del section['section_end_datetime']
del section['section_start_point']
del section['section_end_point']
del section['user_id']
strippedList.append(section)
return strippedList
def queryUnclassifiedSections(uuid):
now = datetime.now()
weekago = now - timedelta(weeks = 1)
user_uuid=uuid
clientSpecificQuery = getClientSpecificQueryFilter(user_uuid)
Sections=get_section_db()
logging.debug('section.count = %s' % Sections.count())
# Theoretically, we shouldn't need the 'predicted_mode' code because we
# predict values right after reading the trips from moves. However, there
# is still a small race window in which we are reading trips for other
# users and haven't yet run the classifier. As we get more users, this
# window can only grow, and it is easy to handle it, so let's just do so now.
defaultQueryList = [ {'source':'Shankari'},
{'user_id':user_uuid},
{'predicted_mode': { '$exists' : True } },
{'confirmed_mode': ''},
{ 'type': 'move' },
{'section_end_datetime': {"$gt": weekago}}]
completeQueryList = defaultQueryList + clientSpecificQuery
unclassifiedSections=Sections.find({"$and": completeQueryList})
# totalUnclassifiedSections are for debugging only, can remove after we know that this works well
totalUnclassifiedSections=Sections.find({"$and":[ {'source':'Shankari'},
{'user_id':user_uuid},
{'confirmed_mode': ''},
{ 'type': 'move' }]})
unclassifiedSectionCount = unclassifiedSections.count()
totalUnclassifiedSectionCount = totalUnclassifiedSections.count()
logging.debug('Unsec.count = %s' % unclassifiedSectionCount)
logging.debug('Total Unsec.count = %s' % totalUnclassifiedSectionCount)
# Keep track of what percent of sections are stripped out.
# Sections can be stripped out for various reasons:
# - they are too old
# - they have enough confidence that above the magic threshold (90%) AND
# the client has requested stripping out
stats.storeServerEntry(user_uuid, stats.STAT_TRIP_MGR_PCT_SHOWN, time.time(),
0 if totalUnclassifiedSectionCount == 0 else float(unclassifiedSectionCount)/totalUnclassifiedSectionCount)
return unclassifiedSections
def getUnclassifiedSections(uuid):
return_dict={}
unclassifiedSections = queryUnclassifiedSections(uuid)
filtered_UnclassifiedSections=filter_unclassifiedSections(unclassifiedSections)
logging.debug("filtered_UnclassifiedSections = %s" % len(filtered_UnclassifiedSections))
stripped_filtered_UnclassifiedSections = stripoutNonSerializable(filtered_UnclassifiedSections)
logging.debug("stripped_filtered_UnclassifiedSections = %s" % len(stripped_filtered_UnclassifiedSections))
return_dict["sections"]=stripped_filtered_UnclassifiedSections
return return_dict
def setSectionClassification(uuid, userClassifications):
number_class_sec=len(userClassifications)
user_uuid=uuid
Sections=get_section_db()
Modes=get_mode_db()
logging.debug("userClassifications = %s" % userClassifications)
logging.debug("number_class_sec = %s" % number_class_sec)
if number_class_sec!=0:
logging.debug("number_class_sec = %s" % number_class_sec)
for sectionindex in range(number_class_sec):
if userClassifications[sectionindex]['userMode']=='not a trip':
logging.debug("usermode = %s" % userClassifications[sectionindex]['userMode'])
Sections.update({"$and":[{'source':'Shankari'},
{'user_id': user_uuid},
{'trip_id': userClassifications[sectionindex]['trip_id']},
{'section_id': int(userClassifications[sectionindex]['section_id'])}]},
{"$set":{'type':userClassifications[sectionindex]['userMode']}})
logging.debug("update done" )
else:
Sections.update({"$and":[{'source':'Shankari'},
{'user_id': user_uuid},
{'trip_id': userClassifications[sectionindex]['trip_id']},
{'section_id': int(userClassifications[sectionindex]['section_id'])}]},
{"$set":{'confirmed_mode':int(''.join(map(str, [mode['mode_id']
for mode in Modes.find({'mode_name':userClassifications[sectionindex]['userMode']})])))
if Modes.find({'mode_name':userClassifications[sectionindex]['userMode']}).count()!=0
else userClassifications[sectionindex]['userMode']}})
logging.debug("update done" )
def storeSensedTrips(user_uuid, sections):
collect.processTripArray(user_uuid, sections)
logging.debug("done storing sensed trips")
def getModeOptions():
Modes=get_mode_db()
return_dict = {}
modes=[]
for mode in Modes.find():
modes.append(mode['mode_name'])
return_dict['modelist'] = modes
return return_dict
|
|
from __future__ import unicode_literals
from collections import OrderedDict
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.core.exceptions import FieldDoesNotExist
from django.core.serializers.json import DjangoJSONEncoder
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.module_loading import import_string
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_text, python_2_unicode_compatible
from jsonfield import JSONField
from .utils import get_request
EncoderClass = DjangoJSONEncoder
if hasattr(settings, 'JSONFIELD_ENCODER'):
EncoderClass = import_string(getattr(settings, 'JSONFIELD_ENCODER'))
DUMP_KWARGS = {
'cls': EncoderClass,
'separators': (',', ':')
}
class AuditTrailQuerySet(models.QuerySet):
def get_changes(self):
changes_dict = {}
if not self.exists():
return {}
model_class = self[0].content_type.model_class()
for trail in self.order_by('id'):
if not isinstance(trail.content_object, model_class):
raise ValueError(
'AuditTrailQuerySet.get_changes couldn\'t get changes for different models: %s and %s' % (
model_class.__name__, trail.content_object.__class__.__name__
))
if trail.is_related_changed:
continue
self._apply_field_changes(changes_dict, trail)
# Removing values that changed back
# F.e. 1->2->3->1 should not be showed as change with 1->1
for field, change in changes_dict.copy().items():
if change['old_value'] == change['new_value']:
del changes_dict[field]
return changes_dict
def get_related_changes(self):
related_changes_dict = OrderedDict()
changes = self.filter(action=AuditTrail.ACTIONS.RELATED_CHANGED).order_by('id')
for change in changes:
related_trail = change.related_trail
key = '%s.%s-%d' % (
related_trail.content_type.app_label,
related_trail.content_type.name,
int(related_trail.object_id)
)
related_object_changes = related_changes_dict.get(key, None)
if related_object_changes is None:
related_object_changes = {
'action': related_trail.get_action_display(),
'representation': related_trail.object_repr,
'changes': {},
'model': '%s.%s' % (related_trail.content_type.app_label, related_trail.content_type.model)
}
related_changes_dict[key] = related_object_changes
if related_trail.is_deleted and related_object_changes['action'] == 'Created':
del related_changes_dict[key]
self._apply_field_changes(related_object_changes['changes'], related_trail)
return related_changes_dict.values()
def _apply_field_changes(self, changes_dict, trail):
for field_name, field_change in trail.get_changes().items():
if field_name not in changes_dict:
changes_dict[field_name] = field_change
changes_dict[field_name]['new_value'] = field_change['new_value']
changes_dict[field_name]['new_value_string'] = field_change['new_value_string']
changes_dict[field_name]['field_name'] = field_name
class AuditTrailManager(models.Manager):
def get_queryset(self):
return AuditTrailQuerySet(self.model, using=self._db)
def generate_for_instance(self, instance, action):
audit_trail = self.model(
content_type=ContentType.objects.get_for_model(instance),
object_id=instance.pk,
object_repr=force_text(instance)[:200],
action=action
)
request = get_request(['user', 'META'])
if request and hasattr(request, 'user'):
if request.user.is_authenticated():
audit_trail.user = request.user
audit_trail.user_ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META.get('REMOTE_ADDR')
audit_trail.save()
return audit_trail
def generate_trail_for_instance_created(self, instance):
return self.generate_for_instance(instance, AuditTrail.ACTIONS.CREATED)
def generate_trail_for_instance_updated(self, instance):
return self.generate_for_instance(instance, AuditTrail.ACTIONS.UPDATED)
def generate_trail_for_instance_deleted(self, instance):
return self.generate_for_instance(instance, AuditTrail.ACTIONS.DELETED)
def generate_trail_for_related_change(self, instance):
return self.generate_for_instance(instance, AuditTrail.ACTIONS.RELATED_CHANGED)
@python_2_unicode_compatible
class AuditTrail(models.Model):
class ACTIONS(object):
CREATED = 1
UPDATED = 2
DELETED = 3
RELATED_CHANGED = 4
ACTION_CHOICES = (
(ACTIONS.CREATED, _('Created')),
(ACTIONS.UPDATED, _('Updated')),
(ACTIONS.DELETED, _('Deleted')),
(ACTIONS.RELATED_CHANGED, _('Related changed'))
)
""" Table to store all changes of subscribed models. """
content_type = models.ForeignKey(ContentType, blank=True, null=True,
verbose_name=_('content type'))
object_id = models.TextField(blank=True, null=True)
content_object = GenericForeignKey('content_type', 'object_id')
user = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True,
verbose_name=_('user'))
user_ip = models.GenericIPAddressField(_('IP address'), null=True)
object_repr = models.CharField(_('object repr'), max_length=200)
action = models.PositiveSmallIntegerField(_('action'),
choices=ACTION_CHOICES)
action_time = models.DateTimeField(_('date and time'), auto_now=True)
changes = JSONField(dump_kwargs=DUMP_KWARGS)
related_trail = models.ForeignKey(to='self', null=True)
objects = AuditTrailManager()
class Meta:
ordering = ('-id',)
app_label = 'audit_trail'
verbose_name = _('audit trail')
verbose_name_plural = _('audit trails')
def __str__(self):
if self.action != self.ACTIONS.RELATED_CHANGED:
return '%s was %s at %s' % (
self.object_repr, self.get_action_display().lower(), self.action_time.isoformat()
)
else:
return '%s %s at %s' % (
self.object_repr, self.get_action_display().lower(), self.action_time.isoformat()
)
@property
def is_created(self):
return self.action == self.ACTIONS.CREATED
@property
def is_updated(self):
return self.action == self.ACTIONS.UPDATED
@property
def is_deleted(self):
return self.action == self.ACTIONS.DELETED
@property
def is_related_changed(self):
return self.action == self.ACTIONS.RELATED_CHANGED
def get_changes(self):
if not isinstance(self.changes, dict):
return self.changes
changes = self.changes.copy()
model_class = self.content_type.model_class()
for field_name, change in changes.items():
try:
change['field_label'] = model_class._meta.get_field(field_name).verbose_name.capitalize()
except FieldDoesNotExist:
change['field_label'] = '<field removed> %s' % field_name
return changes
|
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'MediaType'
db.create_table('medialibrary_mediatype', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('label', self.gf('django.db.models.fields.CharField')(default='unknown', max_length=20)),
))
db.send_create_signal('medialibrary', ['MediaType'])
# Adding model 'MediaFormat'
db.create_table('medialibrary_mediaformat', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('label', self.gf('django.db.models.fields.CharField')(default='unknown', max_length=10)),
))
db.send_create_signal('medialibrary', ['MediaFormat'])
# Adding model 'MediaObject'
db.create_table('medialibrary_mediaobject', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(default='imagefile', max_length=60, blank=True)),
('url', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255, blank=True)),
('type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['medialibrary.MediaType'], blank=True)),
('format', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['medialibrary.MediaFormat'], blank=True)),
('publisher', self.gf('django.db.models.fields.CharField')(default='Sodiioo', max_length=60, blank=True)),
('license', self.gf('django.db.models.fields.CharField')(default='http://web.resource.org/cc/PublicDomain', max_length=60, blank=True)),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('creation_datetime', self.gf('django.db.models.fields.DateTimeField')()),
('upload_image', self.gf('django.db.models.fields.files.ImageField')(max_length=60, null=True)),
('has_tag', self.gf('django.db.models.fields.IntegerField')(default=0, null=True, blank=True)),
))
db.send_create_signal('medialibrary', ['MediaObject'])
# Adding M2M table for field related on 'MediaObject'
db.create_table('medialibrary_mediaobject_related', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('from_mediaobject', models.ForeignKey(orm['medialibrary.mediaobject'], null=False)),
('to_mediaobject', models.ForeignKey(orm['medialibrary.mediaobject'], null=False))
))
db.create_unique('medialibrary_mediaobject_related', ['from_mediaobject_id', 'to_mediaobject_id'])
# Adding model 'MediaLibrary'
db.create_table('medialibrary_medialibrary', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
))
db.send_create_signal('medialibrary', ['MediaLibrary'])
# Adding M2M table for field media_object on 'MediaLibrary'
db.create_table('medialibrary_medialibrary_media_object', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('medialibrary', models.ForeignKey(orm['medialibrary.medialibrary'], null=False)),
('mediaobject', models.ForeignKey(orm['medialibrary.mediaobject'], null=False))
))
db.create_unique('medialibrary_medialibrary_media_object', ['medialibrary_id', 'mediaobject_id'])
# Adding model 'ImageObject'
db.create_table('medialibrary_imageobject', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=60, null=True)),
))
db.send_create_signal('medialibrary', ['ImageObject'])
# Adding model 'ImageObject2'
db.create_table('medialibrary_imageobject2', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=60, null=True)),
))
db.send_create_signal('medialibrary', ['ImageObject2'])
def backwards(self, orm):
# Deleting model 'MediaType'
db.delete_table('medialibrary_mediatype')
# Deleting model 'MediaFormat'
db.delete_table('medialibrary_mediaformat')
# Deleting model 'MediaObject'
db.delete_table('medialibrary_mediaobject')
# Removing M2M table for field related on 'MediaObject'
db.delete_table('medialibrary_mediaobject_related')
# Deleting model 'MediaLibrary'
db.delete_table('medialibrary_medialibrary')
# Removing M2M table for field media_object on 'MediaLibrary'
db.delete_table('medialibrary_medialibrary_media_object')
# Deleting model 'ImageObject'
db.delete_table('medialibrary_imageobject')
# Deleting model 'ImageObject2'
db.delete_table('medialibrary_imageobject2')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_signature': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_fake': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'show_marked_tags': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'subscribed_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'medialibrary.imageobject': {
'Meta': {'object_name': 'ImageObject'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '60', 'null': 'True'})
},
'medialibrary.imageobject2': {
'Meta': {'object_name': 'ImageObject2'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '60', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'medialibrary.mediaformat': {
'Meta': {'object_name': 'MediaFormat'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '10'})
},
'medialibrary.medialibrary': {
'Meta': {'object_name': 'MediaLibrary'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media_object': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['medialibrary.MediaObject']", 'symmetrical': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'medialibrary.mediaobject': {
'Meta': {'object_name': 'MediaObject'},
'creation_datetime': ('django.db.models.fields.DateTimeField', [], {}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'format': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['medialibrary.MediaFormat']", 'blank': 'True'}),
'has_tag': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'license': ('django.db.models.fields.CharField', [], {'default': "'http://web.resource.org/cc/PublicDomain'", 'max_length': '60', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'imagefile'", 'max_length': '60', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'default': "'Sodiioo'", 'max_length': '60', 'blank': 'True'}),
'related': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_rel_+'", 'blank': 'True', 'to': "orm['medialibrary.MediaObject']"}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['medialibrary.MediaType']", 'blank': 'True'}),
'upload_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '60', 'null': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'blank': 'True'})
},
'medialibrary.mediatype': {
'Meta': {'object_name': 'MediaType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '20'})
}
}
complete_apps = ['medialibrary']
|
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2022, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Models for mapping values from one range or space to another in the client.
Mappers (as opposed to scales) are not presumed to be invertible.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from .. import palettes
from ..core.enums import Palette
from ..core.has_props import abstract
from ..core.properties import (
Color,
Either,
Enum,
FactorSeq,
Float,
HatchPatternType,
Instance,
Int,
List,
MarkerType,
Nullable,
Seq,
String,
Tuple,
)
from ..core.validation import warning
from ..core.validation.warnings import PALETTE_LENGTH_FACTORS_MISMATCH
from .transforms import Transform
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'Mapper',
'ColorMapper',
'CategoricalMapper',
'CategoricalColorMapper',
'CategoricalMarkerMapper',
'CategoricalPatternMapper',
'ContinuousColorMapper',
'LinearColorMapper',
'LogColorMapper',
'EqHistColorMapper',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
@abstract
class Mapper(Transform):
''' Base class for mappers.
'''
# explicit __init__ to support Init signatures
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
@abstract
class ColorMapper(Mapper):
''' Base class for color mapper types.
'''
def __init__(self, *args, **kwargs) -> None:
if len(args) == 1:
kwargs['palette'] = args[0]
super().__init__(**kwargs)
palette = Seq(Color, help="""
A sequence of colors to use as the target palette for mapping.
This property can also be set as a ``String``, to the name of any of the
palettes shown in :ref:`bokeh.palettes`.
""").accepts(Enum(Palette), lambda pal: getattr(palettes, pal))
nan_color = Color(default="gray", help="""
Color to be used if data is NaN or otherwise not mappable.
""")
@abstract
class CategoricalMapper(Mapper):
''' Base class for mappers that map categorical factors to other values.
'''
# explicit __init__ to support Init signatures
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
factors = FactorSeq(help="""
A sequence of factors / categories that map to the some target range. For
example the following color mapper:
.. code-block:: python
mapper = CategoricalColorMapper(palette=["red", "blue"], factors=["foo", "bar"])
will map the factor ``"foo"`` to red and the factor ``"bar"`` to blue.
""")
start = Int(default=0, help="""
A start index to "slice" data factors with before mapping.
For example, if the data to color map consists of 2-level factors such
as ``["2016", "sales"]`` and ``["2016", "marketing"]``, then setting
``start=1`` will perform color mapping only based on the second sub-factor
(i.e. in this case based on the department ``"sales"`` or ``"marketing"``)
""")
end = Nullable(Int, help="""
A start index to "slice" data factors with before mapping.
For example, if the data to color map consists of 2-level factors such
as ``["2016", "sales"]`` and ``["2017", "marketing"]``, then setting
``end=1`` will perform color mapping only based on the first sub-factor
(i.e. in this case based on the year ``"2016"`` or ``"2017"``)
If ``None`` then all sub-factors from ``start`` to the end of the
factor will be used for color mapping.
""")
class CategoricalColorMapper(CategoricalMapper, ColorMapper):
''' Map categorical factors to colors.
Values that are passed to this mapper that are not in the factors list
will be mapped to ``nan_color``.
'''
# explicit __init__ to support Init signatures
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
@warning(PALETTE_LENGTH_FACTORS_MISMATCH)
def _check_palette_length(self):
palette = self.palette
factors = self.factors
if len(palette) < len(factors):
extra_factors = factors[len(palette):]
return f"{extra_factors} will be assigned to `nan_color` {self.nan_color}"
class CategoricalMarkerMapper(CategoricalMapper):
''' Map categorical factors to marker types.
Values that are passed to this mapper that are not in the factors list
will be mapped to ``default_value``.
.. note::
This mappers is primarily only useful with the ``Scatter`` marker
glyph that be parameterized by marker type.
'''
# explicit __init__ to support Init signatures
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
markers = Seq(MarkerType, help="""
A sequence of marker types to use as the target for mapping.
""")
default_value = MarkerType(default="circle", help="""
A marker type to use in case an unrecognized factor is passed in to be
mapped.
""")
class CategoricalPatternMapper(CategoricalMapper):
''' Map categorical factors to hatch fill patterns.
Values that are passed to this mapper that are not in the factors list
will be mapped to ``default_value``.
Added in version 1.1.1
'''
# explicit __init__ to support Init signatures
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
patterns = Seq(HatchPatternType, help="""
A sequence of marker types to use as the target for mapping.
""")
default_value = HatchPatternType(default=" ", help="""
A hatch pattern to use in case an unrecognized factor is passed in to be
mapped.
""")
@abstract
class ContinuousColorMapper(ColorMapper):
''' Base class for continuous color mapper types.
'''
# explicit __init__ to support Init signatures
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
domain = List(Tuple(Instance("bokeh.models.renderers.GlyphRenderer"), Either(String, List(String))), default=[], help="""
A collection of glyph renderers to pool data from for establishing data metrics.
If empty, mapped data will be used instead.
""")
low = Nullable(Float, help="""
The minimum value of the range to map into the palette. Values below
this are clamped to ``low``. If ``None``, the value is inferred from data.
""")
high = Nullable(Float, help="""
The maximum value of the range to map into the palette. Values above
this are clamped to ``high``. If ``None``, the value is inferred from data.
""")
low_color = Nullable(Color, help="""
Color to be used if data is lower than ``low`` value. If None,
values lower than ``low`` are mapped to the first color in the palette.
""")
high_color = Nullable(Color, help="""
Color to be used if data is higher than ``high`` value. If None,
values higher than ``high`` are mapped to the last color in the palette.
""")
class LinearColorMapper(ContinuousColorMapper):
''' Map numbers in a range [*low*, *high*] linearly into a sequence of
colors (a palette).
For example, if the range is [0, 99] and the palette is
``['red', 'green', 'blue']``, the values would be mapped as follows::
x < 0 : 'red' # values < low are clamped
0 <= x < 33 : 'red'
33 <= x < 66 : 'green'
66 <= x < 99 : 'blue'
99 <= x : 'blue' # values > high are clamped
'''
# explicit __init__ to support Init signatures
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
class LogColorMapper(ContinuousColorMapper):
''' Map numbers in a range [*low*, *high*] into a sequence of colors
(a palette) on a natural logarithm scale.
For example, if the range is [0, 25] and the palette is
``['red', 'green', 'blue']``, the values would be mapped as follows::
x < 0 : 'red' # values < low are clamped
0 <= x < 2.72 : 'red' # math.e ** 1
2.72 <= x < 7.39 : 'green' # math.e ** 2
7.39 <= x < 20.09 : 'blue' # math.e ** 3
20.09 <= x : 'blue' # values > high are clamped
.. warning::
The ``LogColorMapper`` only works for images with scalar values that are
non-negative.
'''
# explicit __init__ to support Init signatures
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
@abstract
class ScanningColorMapper(ContinuousColorMapper):
# explicit __init__ to support Init signatures
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
class EqHistColorMapper(ScanningColorMapper):
'''
'''
# explicit __init__ to support Init signatures
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
bins = Int(default=256*256, help="Number of histogram bins")
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
|
|
import copy
import os
import re
import pytest
import time
from subprocess import CalledProcessError
from rancher import ApiError
from .test_auth import enable_ad, load_setup_data
from .common import add_role_to_user
from .common import auth_get_user_token
from .common import auth_resource_cleanup
from .common import AUTH_PROVIDER
from .common import AUTH_USER_PASSWORD
from .common import apply_crd
from .common import check_condition
from .common import compare_versions
from .common import CLUSTER_MEMBER
from .common import CLUSTER_OWNER
from .common import create_kubeconfig
from .common import create_project_and_ns
from .common import create_ns
from .common import DEFAULT_TIMEOUT
from .common import delete_crd
from .common import execute_kubectl_cmd
from .common import get_a_group_and_a_user_not_in_it
from .common import get_admin_client
from .common import get_client_for_token
from .common import get_cluster_client_for_token
from .common import get_crd
from .common import get_group_principal_id
from .common import get_project_client_for_token
from .common import get_user_by_group
from .common import get_user_client
from .common import get_user_client_and_cluster
from .common import if_test_group_rbac
from .common import if_test_rbac
from .common import login_as_auth_user
from .common import NESTED_GROUP_ENABLED
from .common import PROJECT_MEMBER
from .common import PROJECT_OWNER
from .common import PROJECT_READ_ONLY
from .common import random_test_name
from .common import rbac_get_kubeconfig_by_role
from .common import rbac_get_namespace
from .common import rbac_get_user_token_by_role
from .common import requests
from .common import run_command as run_command_common
from .common import ADMIN_TOKEN
from .common import USER_TOKEN
from .common import validate_all_workload_image_from_rancher
from .common import wait_for_condition
from .common import wait_for_pod_to_running
from .common import wait_for_pods_in_workload
from .common import wait_for_wl_to_active
from .test_monitoring import C_MONITORING_ANSWERS
ISTIO_PATH = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "resource/istio")
ISTIO_CRD_PATH = os.path.join(ISTIO_PATH, "crds")
ISTIO_TEMPLATE_ID = "cattle-global-data:system-library-rancher-istio"
ISTIO_VERSION = os.environ.get('RANCHER_ISTIO_VERSION', "")
ISTIO_INGRESSGATEWAY_NODEPORT = os.environ.get(
'RANCHER_ISTIO_INGRESSGATEWAY_NODEPORT', 31380)
ISTIO_BOOKINFO_QUERY_RESULT = "<title>Simple Bookstore App</title>"
ISTIO_EXTERNAL_ID = "catalog://?catalog=system-library" \
"&template=rancher-istio&version="
DEFAULT_ANSWERS = {
"enableCRDs": "true",
"gateways.enabled": "true",
"gateways.istio-ingressgateway.type": "NodePort",
"gateways.istio-ingressgateway.ports[0].nodePort":
ISTIO_INGRESSGATEWAY_NODEPORT,
"gateways.istio-ingressgateway.ports[0].port": 80,
"gateways.istio-ingressgateway.ports[0].targetPort": 80,
"gateways.istio-ingressgateway.ports[0].name": "http2",
"global.monitoring.type": "cluster-monitoring"}
namespace = {"app_client": None, "app_ns": None, "gateway_url": None,
"system_ns": None, "system_project": None,
"istio_version": None, "istio_app": None}
crd_test_data = [
("policy.authentication.istio.io", "authenticationpolicy.yaml"),
# ("adapter.config.istio.io", "adapter.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
# ("attributemanifest.config.istio.io", "attributemanifest.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
("handler.config.istio.io", "handler.yaml"),
# ("httpapispecbinding.config.istio.io", "httpapispecbinding.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
# ("httpapispec.config.istio.io", "httpapispec.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
# ("instance.config.istio.io", "instance.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
("quotaspecbinding.config.istio.io", "quotaspecbinding.yaml"),
("quotaspec.config.istio.io", "quotaspec.yaml"),
("rule.config.istio.io", "rule.yaml"),
# ("template.config.istio.io", "template.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
("destinationrule.networking.istio.io", "destinationrule.yaml"),
("envoyfilter.networking.istio.io", "envoyfilter.yaml"),
("gateway.networking.istio.io", "gateway.yaml"),
("serviceentry.networking.istio.io", "serviceentry.yaml"),
("sidecar.networking.istio.io", "sidecar.yaml"),
("virtualservice.networking.istio.io", "virtualservice.yaml"),
("rbacconfig.rbac.istio.io", "rbacconfig.yaml"),
("servicerolebinding.rbac.istio.io", "servicerolebinding.yaml"),
("servicerole.rbac.istio.io", "servicerole.yaml"),
("authorizationpolicy.security.istio.io", "authorizationpolicy.yaml"),
# ("certificate.certmanager.k8s.io", "certificate.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
# ("challenge.certmanager.k8s.io", "challenge.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
# ("clusterissuer.certmanager.k8s.io", "clusterissuer.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
# ("issuer.certmanager.k8s.io", "issuer.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
# ("order.certmanager.k8s.io", "order.yaml"),
# ABOVE FAILS in current state: Rancher v2.3.5
]
def test_istio_resources():
app_client = namespace["app_client"]
app_ns = namespace["app_ns"]
gateway_url = namespace["gateway_url"]
create_and_test_bookinfo_services(app_client, app_ns)
create_bookinfo_virtual_service(app_client, app_ns)
create_and_test_bookinfo_gateway(app_client, app_ns, gateway_url)
create_and_test_bookinfo_routing(app_client, app_ns, gateway_url)
def test_istio_deployment_options():
file_path = ISTIO_PATH + '/nginx-custom-sidecar.yaml'
expected_image = "rancher/istio-proxyv2:1.4.3"
p_client = namespace["app_client"]
ns = namespace["app_ns"]
execute_kubectl_cmd('apply -f ' + file_path + ' -n ' + ns.name, False)
result = execute_kubectl_cmd('get deployment -n ' + ns.name, True)
for deployment in result['items']:
wl = p_client.list_workload(id='deployment:'
+ deployment['metadata']['namespace']
+ ':'
+ deployment['metadata']['name']).data[
0]
wl = wait_for_wl_to_active(p_client, wl, 60)
wl_pods = wait_for_pods_in_workload(p_client, wl, 1)
wait_for_pod_to_running(p_client, wl_pods[0])
workload = p_client.list_workload(name="nginx-v1",
namespaceId=ns.id).data[0]
pod = p_client.list_pod(workloadId=workload.id).data[0]
try:
assert any(container.image == expected_image
for container in pod.containers)
except AssertionError as e:
retrieved_images = ""
for container in pod.containers:
retrieved_images += container.image + " "
retrieved_images = retrieved_images.strip().split(" ")
raise AssertionError("None of {} matches '{}'".format(
retrieved_images, expected_image))
# Enables all possible istio custom answers with the exception of certmanager
def test_istio_custom_answers(skipif_unsupported_istio_version,
enable_all_options_except_certmanager):
expected_deployments = [
"grafana", "istio-citadel", "istio-egressgateway", "istio-galley",
"istio-ilbgateway", "istio-ingressgateway", "istio-pilot",
"istio-policy", "istio-sidecar-injector", "istio-telemetry",
"istio-tracing", "istiocoredns", "kiali", "prometheus"
]
expected_daemonsets = ["istio-nodeagent"]
validate_all_workload_image_from_rancher(
get_system_client(USER_TOKEN), namespace["system_ns"],
ignore_pod_count=True, deployment_list=expected_deployments,
daemonset_list=expected_daemonsets)
# This is split out separately from test_istio_custom_answers because
# certmanager creates its own crds outside of istio
def test_istio_certmanager_enables(skipif_unsupported_istio_version,
enable_certmanager):
expected_deployments = [
"certmanager", "istio-citadel", "istio-galley", "istio-ingressgateway",
"istio-pilot", "istio-policy", "istio-sidecar-injector",
"istio-telemetry", "istio-tracing", "kiali"
]
validate_all_workload_image_from_rancher(
get_system_client(USER_TOKEN), namespace["system_ns"],
ignore_pod_count=True, deployment_list=expected_deployments)
@if_test_rbac
def test_rbac_istio_metrics_allow_all_cluster_owner(allow_all_access):
kiali_url, tracing_url, _, _ = get_urls()
cluster_owner = rbac_get_user_token_by_role(CLUSTER_OWNER)
validate_access(kiali_url, cluster_owner)
validate_access(tracing_url, cluster_owner)
@if_test_rbac
def test_rbac_istio_monitoring_allow_all_cluster_owner(allow_all_access):
_, _, grafana_url, prometheus_url = get_urls()
cluster_owner = rbac_get_user_token_by_role(CLUSTER_OWNER)
validate_access(grafana_url, cluster_owner)
validate_access(prometheus_url, cluster_owner)
@if_test_rbac
def test_rbac_istio_metrics_allow_all_cluster_member(allow_all_access):
kiali_url, tracing_url, _, _ = get_urls()
cluster_member = rbac_get_user_token_by_role(CLUSTER_MEMBER)
validate_access(kiali_url, cluster_member)
validate_access(tracing_url, cluster_member)
@if_test_rbac
def test_rbac_istio_monitoring_allow_all_cluster_member(allow_all_access):
_, _, grafana_url, prometheus_url = get_urls()
cluster_member = rbac_get_user_token_by_role(CLUSTER_MEMBER)
validate_no_access(grafana_url, cluster_member)
validate_no_access(prometheus_url, cluster_member)
@if_test_rbac
def test_rbac_istio_metrics_allow_all_project_owner(allow_all_access):
kiali_url, tracing_url, _, _ = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_OWNER)
validate_access(kiali_url, cluster_member)
validate_access(tracing_url, cluster_member)
@if_test_rbac
def test_rbac_istio_monitoring_allow_all_project_owner(allow_all_access):
_, _, grafana_url, prometheus_url = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_OWNER)
validate_no_access(grafana_url, cluster_member)
validate_no_access(prometheus_url, cluster_member)
@if_test_rbac
def test_rbac_istio_metrics_allow_all_project_member(allow_all_access):
kiali_url, tracing_url, _, _ = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_MEMBER)
validate_access(kiali_url, cluster_member)
validate_access(tracing_url, cluster_member)
@if_test_rbac
def test_rbac_istio_monitoring_allow_all_project_member(allow_all_access):
_, _, grafana_url, prometheus_url = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_MEMBER)
validate_no_access(grafana_url, cluster_member)
validate_no_access(prometheus_url, cluster_member)
@if_test_rbac
def test_rbac_istio_metrics_allow_all_project_read(allow_all_access):
kiali_url, tracing_url, _, _ = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_READ_ONLY)
validate_access(kiali_url, cluster_member)
validate_access(tracing_url, cluster_member)
@if_test_rbac
def test_rbac_istio_monitoring_allow_all_project_read(allow_all_access):
_, _, grafana_url, prometheus_url = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_READ_ONLY)
validate_no_access(grafana_url, cluster_member)
validate_no_access(prometheus_url, cluster_member)
@if_test_rbac
def test_rbac_istio_metrics_allow_none_cluster_owner(default_access):
kiali_url, tracing_url, _, _ = get_urls()
cluster_owner = rbac_get_user_token_by_role(CLUSTER_OWNER)
validate_access(kiali_url, cluster_owner)
validate_access(tracing_url, cluster_owner)
@if_test_rbac
def test_rbac_istio_monitoring_allow_none_cluster_owner(default_access):
_, _, grafana_url, prometheus_url = get_urls()
cluster_owner = rbac_get_user_token_by_role(CLUSTER_OWNER)
validate_access(grafana_url, cluster_owner)
validate_access(prometheus_url, cluster_owner)
@if_test_rbac
def test_rbac_istio_metrics_allow_none_cluster_member(default_access):
kiali_url, tracing_url, _, _ = get_urls()
cluster_member = rbac_get_user_token_by_role(CLUSTER_MEMBER)
validate_no_access(kiali_url, cluster_member)
validate_no_access(tracing_url, cluster_member)
@if_test_rbac
def test_rbac_istio_monitoring_allow_none_cluster_member(default_access):
_, _, grafana_url, prometheus_url = get_urls()
cluster_member = rbac_get_user_token_by_role(CLUSTER_MEMBER)
validate_no_access(grafana_url, cluster_member)
validate_no_access(prometheus_url, cluster_member)
@if_test_rbac
def test_rbac_istio_metrics_allow_none_project_owner(default_access):
kiali_url, tracing_url, _, _ = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_OWNER)
validate_no_access(kiali_url, cluster_member)
validate_no_access(tracing_url, cluster_member)
@if_test_rbac
def test_rbac_istio_monitoring_allow_none_project_owner(default_access):
_, _, grafana_url, prometheus_url = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_OWNER)
validate_no_access(grafana_url, cluster_member)
validate_no_access(prometheus_url, cluster_member)
@if_test_rbac
def test_rbac_istio_metrics_allow_none_project_member(default_access):
kiali_url, tracing_url, _, _ = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_MEMBER)
validate_no_access(kiali_url, cluster_member)
validate_no_access(tracing_url, cluster_member)
@if_test_rbac
def test_rbac_istio_monitoring_allow_none_project_member(default_access):
_, _, grafana_url, prometheus_url = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_MEMBER)
validate_no_access(grafana_url, cluster_member)
validate_no_access(prometheus_url, cluster_member)
@if_test_rbac
def test_rbac_istio_metrics_allow_none_project_read(default_access):
kiali_url, tracing_url, _, _ = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_READ_ONLY)
validate_no_access(kiali_url, cluster_member)
validate_no_access(tracing_url, cluster_member)
@if_test_rbac
def test_rbac_istio_monitoring_allow_none_project_read(default_access):
_, _, grafana_url, prometheus_url = get_urls()
cluster_member = rbac_get_user_token_by_role(PROJECT_READ_ONLY)
validate_no_access(grafana_url, cluster_member)
validate_no_access(prometheus_url, cluster_member)
@if_test_rbac
def test_rbac_istio_update_cluster_member():
user = rbac_get_user_token_by_role(CLUSTER_MEMBER)
with pytest.raises(ApiError) as e:
update_istio_app({"FOO": "BAR"}, user)
assert e.value.error.status == 403
assert e.value.error.code == 'Forbidden'
@if_test_rbac
def test_rbac_istio_disable_cluster_member():
user = rbac_get_user_token_by_role(CLUSTER_MEMBER)
with pytest.raises(ApiError) as e:
delete_istio_app(user)
assert e.value.error.status == 403
assert e.value.error.code == 'Forbidden'
@if_test_rbac
def test_rbac_istio_update_project_owner():
user = rbac_get_user_token_by_role(PROJECT_OWNER)
with pytest.raises(ApiError) as e:
update_istio_app({"FOO": "BAR"}, user)
assert e.value.error.status == 403
assert e.value.error.code == 'Forbidden'
@if_test_rbac
def test_rbac_istio_disable_project_owner():
user = rbac_get_user_token_by_role(PROJECT_OWNER)
with pytest.raises(ApiError) as e:
delete_istio_app(user)
assert e.value.error.status == 403
assert e.value.error.code == 'Forbidden'
@if_test_rbac
def test_rbac_istio_update_project_member():
user = rbac_get_user_token_by_role(PROJECT_MEMBER)
with pytest.raises(ApiError) as e:
update_istio_app({"FOO": "BAR"}, user)
assert e.value.error.status == 403
assert e.value.error.code == 'Forbidden'
@if_test_rbac
def test_rbac_istio_disable_project_member():
user = rbac_get_user_token_by_role(PROJECT_MEMBER)
with pytest.raises(ApiError) as e:
delete_istio_app(user)
assert e.value.error.status == 403
assert e.value.error.code == 'Forbidden'
@if_test_rbac
def test_rbac_istio_update_project_read():
user = rbac_get_user_token_by_role(PROJECT_READ_ONLY)
with pytest.raises(ApiError) as e:
update_istio_app({"FOO": "BAR"}, user)
assert e.value.error.status == 403
assert e.value.error.code == 'Forbidden'
@if_test_rbac
def test_rbac_istio_disable_project_read():
user = rbac_get_user_token_by_role(PROJECT_READ_ONLY)
with pytest.raises(ApiError) as e:
delete_istio_app(user)
assert e.value.error.status == 403
assert e.value.error.code == 'Forbidden'
@if_test_rbac
@pytest.mark.parametrize("crd,manifest", crd_test_data)
def test_rbac_istio_crds_project_owner(skipif_unsupported_istio_version,
update_answers, crd, manifest):
if "certmanager" in crd:
update_answers("enable_certmanager")
else :
update_answers("default_access")
kubectl_context = rbac_get_kubeconfig_by_role(PROJECT_OWNER)
file = ISTIO_CRD_PATH + '/' + manifest
ns = rbac_get_namespace()
assert re.match("{}.* created".format(crd),
apply_crd(ns, file, kubectl_context))
assert "Forbidden" not in get_crd(ns, crd, kubectl_context)
assert re.match("{}.* deleted".format(crd),
delete_crd(ns, file, kubectl_context))
@if_test_rbac
@pytest.mark.parametrize("crd,manifest", crd_test_data)
def test_rbac_istio_crds_project_member(skipif_unsupported_istio_version,
update_answers, crd, manifest):
if "certmanager" in crd:
update_answers("enable_certmanager")
else :
update_answers("default_access")
kubectl_context = rbac_get_kubeconfig_by_role(PROJECT_MEMBER)
file = ISTIO_CRD_PATH + '/' + manifest
ns = rbac_get_namespace()
assert re.match("{}.* created".format(crd),
apply_crd(ns, file, kubectl_context))
assert "Forbidden" not in get_crd(ns, crd, kubectl_context)
assert re.match("{}.* deleted".format(crd),
delete_crd(ns, file, kubectl_context))
@if_test_rbac
@pytest.mark.parametrize("crd,manifest", crd_test_data)
def test_rbac_istio_crds_project_read(skipif_unsupported_istio_version,
update_answers, crd, manifest):
if "certmanager" in crd:
update_answers("enable_certmanager")
else :
update_answers("default_access")
kubectl_context = rbac_get_kubeconfig_by_role(PROJECT_READ_ONLY)
file = ISTIO_CRD_PATH + '/' + manifest
ns = rbac_get_namespace()
assert str(apply_crd(ns, file, kubectl_context)).startswith(
"Error from server (Forbidden)")
assert "Forbidden" not in get_crd(ns, crd, kubectl_context)
assert str(delete_crd(ns, file, kubectl_context)).startswith(
"Error from server (Forbidden)")
@if_test_group_rbac
def test_rbac_istio_group_access(auth_cluster_access, update_answers):
group, users, noauth_user = auth_cluster_access
update_answers("allow_group_access", group=group)
kiali_url, tracing_url, grafana_url, prometheus_url = get_urls()
for user in users:
user_token = auth_get_user_token(user)
print("Validating {} has access.".format(user))
validate_access(kiali_url, user_token)
validate_access(tracing_url, user_token)
validate_no_access(grafana_url, user_token)
validate_no_access(prometheus_url, user_token)
print("Validating {} does not have access.".format(noauth_user))
noauth_token = auth_get_user_token(noauth_user)
validate_no_access(kiali_url, noauth_token)
validate_no_access(tracing_url, noauth_token)
validate_no_access(grafana_url, noauth_token)
validate_no_access(prometheus_url, noauth_token)
def validate_access(url, user):
headers = {'Authorization': 'Bearer ' + user}
response = requests.get(headers=headers, url=url, verify=False)
assert response.ok
return response
def validate_no_access(url, user):
headers = {'Authorization': 'Bearer ' + user}
response = requests.get(headers=headers, url=url, verify=False)
assert not response.ok
return response
def update_istio_app(answers, user, app=None, ns=None, project=None):
if app is None:
app = namespace["istio_app"]
if ns is None:
ns = namespace["system_ns"]
if project is None:
project = namespace["system_project"]
p_client = get_system_client(user)
updated_answers = copy.deepcopy(DEFAULT_ANSWERS)
updated_answers.update(answers)
namespace["istio_app"] = p_client.update(
obj=app,
externalId=ISTIO_EXTERNAL_ID,
targetNamespace=ns.name,
projectId=project.id,
answers=updated_answers)
verify_istio_app_ready(p_client, namespace["istio_app"], 120, 120)
def create_and_verify_istio_app(p_client, ns, project):
print("creating istio catalog app")
app = p_client.create_app(
name="cluster-istio",
externalId=ISTIO_EXTERNAL_ID,
targetNamespace=ns.name,
projectId=project.id,
answers=DEFAULT_ANSWERS
)
verify_istio_app_ready(p_client, app, 120, 600)
return app
def delete_istio_app(user):
p_client = get_system_client(user)
p_client.delete(namespace["istio_app"])
def verify_istio_app_ready(p_client, app, install_timeout, deploy_timeout,
initial_run=True):
if initial_run:
print("Verify Istio App has installed and deployed properly")
if install_timeout <= 0 or deploy_timeout <= 0:
raise TimeoutError("Timeout waiting for istio to be properly "
"installed and deployed.") from None
elif 'conditions' in app and not initial_run:
for cond in app['conditions']:
if "False" in cond['status'] and 'message' in cond \
and "failed" in cond['message']:
raise AssertionError(
"Failed to properly install/deploy app. Reason: {}".format(
cond['message'])) from None
try:
wait_for_condition(p_client, app, check_condition('Installed', 'True'),
timeout=2)
except (Exception, TypeError):
verify_istio_app_ready(p_client, p_client.list_app(
name='cluster-istio').data[0], install_timeout-2, deploy_timeout,
initial_run=False)
try:
wait_for_condition(p_client, app, check_condition('Deployed', 'True'),
timeout=2)
except (Exception, TypeError):
verify_istio_app_ready(p_client, p_client.list_app(
name='cluster-istio').data[0], 2, deploy_timeout-2,
initial_run=False)
def get_urls():
_, cluster = get_user_client_and_cluster()
if namespace["istio_version"] == "0.1.0" \
or namespace["istio_version"] == "0.1.1":
kiali_url = os.environ.get('CATTLE_TEST_URL', "") + \
"/k8s/clusters/" + cluster.id + \
"/api/v1/namespaces/istio-system/services/" \
"http:kiali-http:80/proxy/"
else:
kiali_url = os.environ.get('CATTLE_TEST_URL', "") + \
"/k8s/clusters/" + cluster.id + \
"/api/v1/namespaces/istio-system/services/" \
"http:kiali:20001/proxy/"
tracing_url = os.environ.get('CATTLE_TEST_URL', "") + \
"/k8s/clusters/" + cluster.id + \
"/api/v1/namespaces/istio-system/services/" \
"http:tracing:80/proxy/jaeger/search"
grafana_url = os.environ.get('CATTLE_TEST_URL', "") + \
"/k8s/clusters/" + cluster.id + \
"/api/v1/namespaces/cattle-prometheus/services/" \
"http:access-grafana:80/proxy/dashboards/"
prometheus_url = os.environ.get('CATTLE_TEST_URL', "") + \
"/k8s/clusters/" + cluster.id + \
"/api/v1/namespaces/cattle-prometheus/services/" \
"http:access-prometheus:80/proxy/"
return kiali_url, tracing_url, grafana_url, prometheus_url
def verify_admission_webhook():
has_admission_webhook = execute_kubectl_cmd(
'api-versions | grep admissionregistration', False)
if len(has_admission_webhook) == 0:
raise AssertionError(
"MutatingAdmissionWebhook and ValidatingAdmissionWebhook plugins "
"are not listed in the kube-apiserver --enable-admission-plugins")
def add_istio_label_to_ns(c_client, ns):
labels = {
"istio-injection": "enabled"
}
ns = c_client.update_by_id_namespace(ns.id, labels=labels)
return ns
def create_and_test_bookinfo_services(p_client, ns, timeout=DEFAULT_TIMEOUT):
book_info_file_path = ISTIO_PATH + '/bookinfo.yaml'
execute_kubectl_cmd('apply -f ' + book_info_file_path + ' -n '
+ ns.name, False)
result = execute_kubectl_cmd('get deployment -n ' + ns.name, True)
for deployment in result['items']:
wl = p_client.list_workload(id='deployment:'
+ deployment['metadata']['namespace']
+ ':'
+ deployment['metadata']['name']).data[0]
wl = wait_for_wl_to_active(p_client, wl, 60)
wl_pods = wait_for_pods_in_workload(p_client, wl, 1)
wait_for_pod_to_running(p_client, wl_pods[0])
rating_pod = execute_kubectl_cmd('get pod -l app=ratings -n' + ns.name)
assert len(rating_pod['items']) == 1
rating_pod_name = rating_pod['items'][0]['metadata']['name']
try:
result = execute_kubectl_cmd(
'exec -it -n ' + ns.name + ' ' + rating_pod_name
+ ' -c ratings -- curl productpage:9080/productpage'
+ ' | grep -o "<title>.*</title>"', False)
except CalledProcessError:
result = None
start = time.time()
while result is None or result.rstrip() != ISTIO_BOOKINFO_QUERY_RESULT:
if time.time() - start > timeout:
raise AssertionError(
"Timed out and failed to get bookinfo service ready")
time.sleep(.5)
try:
result = execute_kubectl_cmd(
'exec -it -n ' + ns.name + ' ' + rating_pod_name
+ ' -c ratings -- curl productpage:9080/productpage'
+ ' | grep -o "<title>.*</title>"', False)
except CalledProcessError:
result = None
assert result.rstrip() == ISTIO_BOOKINFO_QUERY_RESULT
return result
def create_and_test_bookinfo_gateway(app_client, namespace,
gateway_url, timeout=DEFAULT_TIMEOUT):
servers = [{
"hosts": ["*"],
"port": {
"number": "80",
"protocol": "HTTP",
"name": "http"
}
}]
selector = {"istio": "ingressgateway"}
app_client.create_gateway(name="bookinfo-gateway",
namespaceId=namespace.id,
selector=selector,
servers=servers)
gateways = execute_kubectl_cmd('get gateway -n' + namespace.name, True)
assert len(gateways['items']) == 1
curl_cmd = 'curl -s http://' + gateway_url \
+ '/productpage | grep -o "<title>.*</title>"'
result = run_command(curl_cmd)
start = time.time()
while result is None or result.rstrip() != ISTIO_BOOKINFO_QUERY_RESULT:
if time.time() - start > timeout:
raise AssertionError(
"Timed out and failed to get bookinfo gateway ready")
time.sleep(.5)
result = run_command(curl_cmd)
assert result.rstrip() == ISTIO_BOOKINFO_QUERY_RESULT
return result
def create_bookinfo_virtual_service(app_client, namespace):
http = [{
"route": [{
"destination": {
"host": "productpage",
"port": {"number": 9080}
},
"weight": 100,
"portNumberOrName": "9080"
}],
"match": [
{"uri": {"exact": "/productpage"}},
{"uri": {"exact": "/login"}},
{"uri": {"exact": "/logout"}},
{"uri": {"prefix": "/api/v1/products"}}
]
}]
app_client.create_virtual_service(name="bookinfo",
namespaceId=namespace.id,
gateways=["bookinfo-gateway"],
http=http,
hosts=["*"])
def create_bookinfo_destination_rules(app_client, namespace):
subsets = [
{
"name": "v1",
"labels": {
"version": "v1"
}
},
{
"name": "v2",
"labels": {
"version": "v2"
}
},
{
"name": "v3",
"labels": {
"version": "v3"
}
}
]
app_client.create_destination_rule(namespaceId=namespace.id,
name="reviews",
host="reviews",
subsets=subsets)
def create_and_test_bookinfo_routing(app_client, namespace,
gateway_url, timeout=30):
http = [{
"route": [{
"destination": {
"subset": "v3",
"host": "reviews",
"port": {"number": 9080}
},
"weight": 100,
"portNumberOrName": "9080"
}]
}]
create_bookinfo_destination_rules(app_client, namespace)
app_client.create_virtual_service(name="reviews",
namespaceId=namespace.id,
http=http,
hosts=["reviews"])
curl_cmd = 'curl -s http://' + gateway_url \
+ '/productpage | grep -o "glyphicon-star"'
result = run_command(curl_cmd)
start = time.time()
while result is None or "glyphicon-star" not in result:
if time.time() - start > timeout:
raise AssertionError(
"Timed out and failed to get correct reviews version")
time.sleep(.5)
result = run_command(curl_cmd)
assert "glyphicon-star" in result
return result
# if grep returns no output, subprocess.check_output raises CalledProcessError
def run_command(command):
try:
return run_command_common(command)
except CalledProcessError:
return None
def get_system_client(user):
# Gets client and cluster using USER_TOKEN, who is a CLUSTER_OWNER
client, cluster = get_user_client_and_cluster()
projects = client.list_project(name='System', clusterId=cluster.id)
if len(projects.data) == 0:
raise AssertionError(
"System project not found in the cluster " + cluster.Name)
p = projects.data[0]
return get_project_client_for_token(p, user)
def add_user_to_cluster(username):
class User(object):
def __init__(self, u_name, user_id, token):
self.username = u_name
self.id = user_id
self.token = token
user_data = login_as_auth_user(username, AUTH_USER_PASSWORD)
u_id = user_data['userId']
u_token = user_data['token']
user_obj = User(username, u_id, u_token)
add_role_to_user(user_obj, CLUSTER_MEMBER)
# Enable one of these two below options to get around Issue #25365
get_client_for_token(u_token)
# headers = {'Authorization': 'Bearer ' + u_token}
# url = os.environ.get('CATTLE_TEST_URL', "") + "/v3/users?me=true"
# response = requests.get(headers=headers, url=url, verify=False)
@pytest.fixture()
def update_answers():
def _update_answers(answer_type, group=None):
answers = {
"kiali.enabled": "true",
"tracing.enabled": "true",
}
if answer_type == "allow_all_access":
additional_answers = {
"global.members[0].kind": "Group",
"global.members[0].name": "system:authenticated",
}
answers.update(additional_answers)
elif answer_type == "allow_group_access":
auth_admin = login_as_auth_user(load_setup_data()["admin_user"],
AUTH_USER_PASSWORD)
group_id = get_group_principal_id(group, token=auth_admin['token'])
additional_answers = {
"global.members[0].kind": "Group",
"global.members[0].name": group_id,
}
answers.update(additional_answers)
elif answer_type == "enable_certmanager":
additional_answers = {"certmanager.enabled": "true"}
answers.update(additional_answers)
elif answer_type == "enable_all_options_except_certmanager":
additional_answers = {
"gateways.istio-egressgateway.enabled": "true",
"gateways.istio-ilbgateway.enabled": "true",
"gateways.istio-ingressgateway.sds.enabled": "true",
"global.proxy.accessLogFile": "/dev/stdout",
"grafana.enabled": "true",
"istiocoredns.enabled": "true",
"kiali.dashboard.grafanaURL": "",
"kiali.prometheusAddr": "http://prometheus:9090",
"nodeagent.enabled": "true",
"nodeagent.env.CA_ADDR": "istio-citadel:8060",
"nodeagent.env.CA_PROVIDER": "Citadel",
"prometheus.enabled": "true",
}
answers.update(additional_answers)
update_istio_app(answers, USER_TOKEN)
return _update_answers
@pytest.fixture()
def default_access(update_answers):
update_answers("default_access")
@pytest.fixture()
def allow_all_access(update_answers):
update_answers("allow_all_access")
@pytest.fixture()
def enable_certmanager(update_answers):
update_answers("enable_certmanager")
@pytest.fixture()
def enable_all_options_except_certmanager(update_answers):
update_answers("enable_all_options_except_certmanager")
@pytest.fixture(scope='function')
def skipif_unsupported_istio_version(request):
if ISTIO_VERSION != "":
istio_version = ISTIO_VERSION
else:
client, _ = get_user_client_and_cluster()
istio_versions = list(client.list_template(
id=ISTIO_TEMPLATE_ID).data[0].versionLinks.keys())
istio_version = istio_versions[len(istio_versions) - 1]
if compare_versions(istio_version, "1.4.3") < 0:
pytest.skip("This test is not supported for older Istio versions")
@pytest.fixture(scope='function')
def auth_cluster_access(request):
group, noauth_user = get_a_group_and_a_user_not_in_it(
NESTED_GROUP_ENABLED)
users = get_user_by_group(group, NESTED_GROUP_ENABLED)
for user in users:
add_user_to_cluster(user)
add_user_to_cluster(noauth_user)
def fin():
auth_resource_cleanup()
request.addfinalizer(fin)
return group, users, noauth_user
@pytest.fixture(scope='module', autouse="True")
def create_project_client(request):
global DEFAULT_ANSWERS
global ISTIO_EXTERNAL_ID
client, cluster = get_user_client_and_cluster()
create_kubeconfig(cluster)
admin_client = get_admin_client()
ad_enabled = admin_client.by_id_auth_config("activedirectory").enabled
if AUTH_PROVIDER == "activeDirectory" and not ad_enabled:
enable_ad(load_setup_data()["admin_user"], ADMIN_TOKEN,
password=AUTH_USER_PASSWORD, nested=NESTED_GROUP_ENABLED)
projects = client.list_project(name='System', clusterId=cluster.id)
if len(projects.data) == 0:
raise AssertionError(
"System project not found in the cluster " + cluster.name)
p = projects.data[0]
p_client = get_project_client_for_token(p, USER_TOKEN)
c_client = get_cluster_client_for_token(cluster, USER_TOKEN)
istio_versions = list(client.list_template(
id=ISTIO_TEMPLATE_ID).data[0].versionLinks.keys())
istio_version = istio_versions[len(istio_versions) - 1]
if ISTIO_VERSION != "":
istio_version = ISTIO_VERSION
ISTIO_EXTERNAL_ID += istio_version
answers = {"global.rancher.clusterId": p.clusterId}
DEFAULT_ANSWERS.update(answers)
if cluster["enableClusterMonitoring"] is False:
client.action(cluster, "enableMonitoring",
answers=C_MONITORING_ANSWERS)
if cluster["istioEnabled"] is False:
verify_admission_webhook()
ns = create_ns(c_client, cluster, p, 'istio-system')
app = create_and_verify_istio_app(p_client, ns, p)
else:
app = p_client.list_app(name='cluster-istio').data[0]
ns = c_client.list_namespace(name='istio-system').data[0]
update_istio_app(DEFAULT_ANSWERS, USER_TOKEN,
app=app, ns=ns, project=p)
istio_project, app_ns = create_project_and_ns(
USER_TOKEN, cluster,
random_test_name("istio-app"),
random_test_name("istio-app-ns"))
add_istio_label_to_ns(c_client, app_ns)
app_client = get_project_client_for_token(istio_project, USER_TOKEN)
istio_gateway_wl = p_client.by_id_workload('deployment:' +
ns.name +
':istio-ingressgateway')
assert istio_gateway_wl is not None
endpoints = istio_gateway_wl['publicEndpoints'][0]
gateway_url = endpoints['addresses'][0] + ':' + str(endpoints['port'])
namespace["gateway_url"] = gateway_url
namespace["app_ns"] = app_ns
namespace["app_client"] = app_client
namespace["system_ns"] = ns
namespace["system_project"] = p
namespace["istio_version"] = istio_version
namespace["istio_app"] = app
def fin():
client = get_user_client()
client.delete(istio_project)
request.addfinalizer(fin)
|
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class lbvserver_appflowpolicy_binding(base_resource) :
""" Binding class showing the appflowpolicy that can be bound to lbvserver.
"""
def __init__(self) :
self._policyname = ""
self._priority = 0
self._gotopriorityexpression = ""
self._bindpoint = ""
self._invoke = False
self._labeltype = ""
self._labelname = ""
self._name = ""
self.___count = 0
@property
def priority(self) :
"""Priority.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
"""Priority.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def gotopriorityexpression(self) :
"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
return self._gotopriorityexpression
except Exception as e:
raise e
@gotopriorityexpression.setter
def gotopriorityexpression(self, gotopriorityexpression) :
"""Expression specifying the priority of the next policy which will get evaluated if the current policy rule evaluates to TRUE.
"""
try :
self._gotopriorityexpression = gotopriorityexpression
except Exception as e:
raise e
@property
def policyname(self) :
"""Name of the policy bound to the LB vserver.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
"""Name of the policy bound to the LB vserver.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def name(self) :
"""Name for the virtual server. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at sign (@), equal sign (=), and hyphen (-) characters. Can be changed after the virtual server is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my vserver" or 'my vserver'). .<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name for the virtual server. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at sign (@), equal sign (=), and hyphen (-) characters. Can be changed after the virtual server is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my vserver" or 'my vserver'). .<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def bindpoint(self) :
"""The bindpoint to which the policy is bound.<br/>Possible values = REQUEST, RESPONSE.
"""
try :
return self._bindpoint
except Exception as e:
raise e
@bindpoint.setter
def bindpoint(self, bindpoint) :
"""The bindpoint to which the policy is bound.<br/>Possible values = REQUEST, RESPONSE
"""
try :
self._bindpoint = bindpoint
except Exception as e:
raise e
@property
def labeltype(self) :
"""The invocation type.<br/>Possible values = reqvserver, resvserver, policylabel.
"""
try :
return self._labeltype
except Exception as e:
raise e
@labeltype.setter
def labeltype(self, labeltype) :
"""The invocation type.<br/>Possible values = reqvserver, resvserver, policylabel
"""
try :
self._labeltype = labeltype
except Exception as e:
raise e
@property
def labelname(self) :
"""Name of the label invoked.
"""
try :
return self._labelname
except Exception as e:
raise e
@labelname.setter
def labelname(self, labelname) :
"""Name of the label invoked.
"""
try :
self._labelname = labelname
except Exception as e:
raise e
@property
def invoke(self) :
"""Invoke policies bound to a virtual server or policy label.
"""
try :
return self._invoke
except Exception as e:
raise e
@invoke.setter
def invoke(self, invoke) :
"""Invoke policies bound to a virtual server or policy label.
"""
try :
self._invoke = invoke
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(lbvserver_appflowpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.lbvserver_appflowpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = lbvserver_appflowpolicy_binding()
updateresource.name = resource.name
updateresource.policyname = resource.policyname
updateresource.gotopriorityexpression = resource.gotopriorityexpression
updateresource.bindpoint = resource.bindpoint
updateresource.invoke = resource.invoke
updateresource.labeltype = resource.labeltype
updateresource.labelname = resource.labelname
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [lbvserver_appflowpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].policyname = resource[i].policyname
updateresources[i].gotopriorityexpression = resource[i].gotopriorityexpression
updateresources[i].bindpoint = resource[i].bindpoint
updateresources[i].invoke = resource[i].invoke
updateresources[i].labeltype = resource[i].labeltype
updateresources[i].labelname = resource[i].labelname
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = lbvserver_appflowpolicy_binding()
deleteresource.name = resource.name
deleteresource.policyname = resource.policyname
deleteresource.bindpoint = resource.bindpoint
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [lbvserver_appflowpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
deleteresources[i].policyname = resource[i].policyname
deleteresources[i].bindpoint = resource[i].bindpoint
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
""" Use this API to fetch lbvserver_appflowpolicy_binding resources.
"""
try :
obj = lbvserver_appflowpolicy_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
""" Use this API to fetch filtered set of lbvserver_appflowpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbvserver_appflowpolicy_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
""" Use this API to count lbvserver_appflowpolicy_binding resources configued on NetScaler.
"""
try :
obj = lbvserver_appflowpolicy_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
""" Use this API to count the filtered set of lbvserver_appflowpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbvserver_appflowpolicy_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Bindpoint:
REQUEST = "REQUEST"
RESPONSE = "RESPONSE"
class Labeltype:
reqvserver = "reqvserver"
resvserver = "resvserver"
policylabel = "policylabel"
class lbvserver_appflowpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.lbvserver_appflowpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.lbvserver_appflowpolicy_binding = [lbvserver_appflowpolicy_binding() for _ in range(length)]
|
|
import os
import sys
import Options
import CommodityPrice
import threading
import time
class EdceWrapper:
_commodityNameTranslationTable = {
"Fruit And Vegetables" : "Fruit and Vegetables",
"Atmospheric Extractors" : "Atmospheric Processors",
"Marine Supplies" : "Marine Equipment",
"Agricultural Medicines" : "Agri-Medicines",
"Basic Narcotics" : "Narcotics",
"Drones" : "Limpet",
"Terrain Enrichment Systems" : "Land Enrichment Systems",
"Non Lethal Weapons" : "Non-lethal Weapons",
"Heliostatic Furnaces" : "Microbial Furnaces",
"Bio Reducing Lichen" : "Bioreducing Lichen",
"Hazardous Environment Suits" : "H.E. Suits",
"Auto Fabricators" : "Auto-Fabricators"
}
def __init__(self, edcePath, db, postMarketData, verificationCodeInputFn):
self.verificationFn = verificationCodeInputFn
self.db = db
self.lock = threading.RLock()
self.disabled = None
sys.path.insert(0, edcePath)
sys.path.insert(0, os.path.join(edcePath, "edce"))
import edce.query
import edce.error
import edce.util
import edce.eddn
import edce.config
import edce.globals
edce.eddn.testSchema = False
edce.query.minimumDelay = 0
print(Options.getPath())
import configparser
edce.config.setConfigFile(Options.getPath("edce.ini"))
edce.config.writeConfig(Options.get("elite-username", ""), Options.get("elite-password", ""), True, Options.getPath(), Options.getPath(), Options.getPath())
self.resultsUpdated = True
self.resultsLastUpdated=0
self.activeThreads = []
self.result = None
self.finishedListeners = []
self.postMarkedData = postMarketData
self.lastUpdatedInfo = {"starportName" : "",
"systemName" : "",
"docked" : False}
def addFinishedListener(self, listener):
self.finishedListeners.append(listener)
def callFinishedListeners(self, data):
for i in self.finishedListeners:
i(data)
def _updateResults(self):
import edce.query
import edce.error
import edce.util
import edce.eddn
import edce.config
import edce.globals
try:
if self.disabled is not None:
return
res = edce.query.performQuery(verificationCodeSupplyFn = self.verificationFn)
result = edce.util.edict(res)
if self.postMarkedData:
if "docked" in result.commander and result.commander.docked:
edce.eddn.postMarketData(result)
with self.lock:
self.result = result
print("New data fetched from edce")
except Exception as ex:
self.disabled = ex
def isDisabled(self):
return self.disabled
def _cleanThreads(self):
toBeRemoved = []
for index, value in enumerate(self.activeThreads):
if not value.is_alive():
toBeRemoved.append(index)
for i in toBeRemoved:
del self.activeThreads[i]
def fetchNewInfo(self):
self._cleanThreads()
thread = threading.Thread(target = self._updateResults)
self.activeThreads.append(thread)
thread.start()
def isActive(self):
for i in self.activeThreads:
if i.is_alive():
return True
return False
def join(self):
self._cleanThreads()
for i in self.activeThreads:
i.join()
self._cleanThreads()
def updateResults(self):
self._cleanThreads()
result = None
with self.lock:
result = self.result
if result is not None:
self.result = None
if result is not None:
self._updateImpl(result)
return True
else:
return False
def _updateImpl(self, results):
starportName = results.lastStarport.name
systemName = results.lastSystem.name
docked = results.commander.docked
systems = self.db.getSystemByName(systemName)
if len(systems) == 0:
print("This is not known system")
return
if len(systems) > 1:
print("More than one hit for {0} skipping".format(systemName))
return
system = systems[0]
base = self.findBase(starportName, system)
if base is None:
print("Cannot find {0} in database. Skipping".format(starportName))
return
pricesLst = base.getPrices()
newPrices = []
prices = dict(zip([i.getCommodity().getName() for i in pricesLst], pricesLst))
print("Updating prices for base {0}({1}) at system {2}".format(starportName, base.getId(), systemName))
for i in results.lastStarport.commodities:
localName = self._getLocalCommodityName(i.name)
if not localName in prices:
print("Commity price '{0}' for base {1} is not in database... creating".format(localName, base.getName()))
commodity = self.db.getCommodityByName(localName)
if commodity is None:
print("Commity '{0}' for base {1} is not in database... skipping".format(localName, base.getName()))
continue
priceData = CommodityPrice.CommodityPrice(self.db, None, commodity.getId(), i.sellPrice, i.buyPrice, i.demand, 0, base.getId(), i.stock)
priceData.touch()
newPrices.append(priceData)
else:
priceData = prices[localName]
priceData.setImportPrice(i.sellPrice)
priceData.setExportPrice(i.buyPrice)
priceData.setSupply(i.stock)
priceData.setDemand(i.demand)
for i in pricesLst:
i.commitChanges()
for i in newPrices:
i.commitChanges()
print("prices updated from edce!")
self.resultsLastUpdated=time.time()
self.lastUpdatedInfo = {"starportName" : starportName,
"systemName" : systemName,
"docked" : docked}
self.callFinishedListeners(dict(self.lastUpdatedInfo))
def findBase(self, starportName, system):
base = None
for i in system.getStations():
if i.getName() == starportName:
base = i
break
return base
def getLastUpdatedInfo(self):
return self.lastUpdatedInfo
def getResult(self):
return self.result
def _getLocalCommodityName(self, name):
if name in EdceWrapper._commodityNameTranslationTable:
return EdceWrapper._commodityNameTranslationTable[name]
else:
return name
|
|
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django.core.files import File
import PIL
from PIL import Image
from mock import patch, MagicMock
from webapp.forms import PhotoForm
class DashboardTestCase(TestCase):
"""
Testcase for the Dashboard View .
"""
fixtures = ['sample_data.json']
def setUp(self):
"""
operations to be done before every test
"""
# create a test client:
self.client = Client()
self.user_credentials = {
'id': '10207225470607962',
'first_name': 'Awili',
'last_name': 'Uzo',
'email': 'awilo@ymail.com',
'photo': 'http://graph.facebook.com/sample_image',
'picture': {
'data': {
'url': 'http://graph.facebook.com/sample_image',
'is_silouhette': True,
}
}
}
# login a user:
self.client.post(
reverse('webapp:facebook_auth'),
self.user_credentials
)
self.mock_photo = MagicMock()
self.mock_photo.serialize = MagicMock(return_value="<photo>")
def test_logged_in_user_can_access_the_dashboard_view(self):
"""
Tests that a get request to the dashboard
view returns and renders successfully.
"""
response = self.client.get(
reverse('webapp:dashboard')
)
self.assertEquals(response.status_code, 200)
self.assertIn('Logged in as', response.content)
self.assertIn('photo_effects', response.context)
def test_unauthenticated_user_is_redirected_to_index_auth_view(self):
"""
Tests that a request to the dashboard by an unauthenticated
user is redirected to the index/auth view.
"""
response = Client().get(
reverse('webapp:dashboard')
)
self.assertEquals(response.status_code, 302)
def test_user_can_view_their_uploaded_photos(self):
"""
Tests that a user sees a list of their uploaded
photos as loaded by the photoList component.
"""
response = self.client.get(
reverse('webapp:photos')
)
self.assertEquals(response.status_code, 200)
self.assertIn('success', response.content)
self.assertIn('data', response.content)
@patch(
'webapp.forms.PhotoForm.save',
return_value=MagicMock(
serialize=MagicMock(return_value="<serialized_photo>")
)
)
@patch('PIL.Image')
def test_user_can_upload_photo(self, mock_form_save, mock_image):
"""
Tests that an authenticated user can upload a
photo to the server from their file system.
"""
mock_uploaded_file = MagicMock(
spec=File,
name="mock_image_file.jpg",
content_type='image/jpeg',
size=84625
)
response = self.client.post(
reverse('webapp:photo_upload'),
{'image': mock_uploaded_file}
)
self.assertEquals(response.status_code, 200)
self.assertIn('success', response.content)
self.assertIn('photoData', response.content)
self.assertTrue(PhotoForm.save.called)
@patch(
'webapp.forms.PhotoForm.save',
return_value=MagicMock(
serialize=MagicMock(return_value="<serialized_photo>")
)
)
@patch('PIL.Image')
def test_cannot_upload_invalid_photo(self, mock_form_save, mock_image):
"""
Tests that an authenticated user cannot a upload
an invalid photo file or data to the server.
"""
response = self.client.post(
reverse('webapp:photo_upload'),
{'image': ''}
)
self.assertEquals(response.status_code, 403)
self.assertIn('invalid', response.content)
@patch.object(Image, "open")
def test_fetching_photo_without_effects(self, mock_image_open):
"""
Tests that photos can be fetched without
any effects.
"""
response = self.client.get(
reverse(
'webapp:photo_service',
kwargs={
'username': 'AwiliUzo',
'filename': 'e3w3wl9m21rz.jpg',
}
)
)
self.assertEquals(response.status_code, 200)
@patch.object(Image, "open")
def test_photo_can_be_downloaded(self, mock_image_open):
"""
Tests that photos can be downloaded
"""
response = self.client.get(
reverse(
'webapp:photo_service',
kwargs={
'username': 'AwiliUzo',
'filename': 'e3w3wl9m21rz.jpg',
}
) + "?download=true"
)
self.assertEquals(response.status_code, 200)
self.assertIn(response['Content-Disposition'],
'attachment; filename="e3w3wl9m21rz.jpg"')
def test_user_can_update_photo_caption(self):
"""
Tests that user can update the default
caption for photos.
"""
response = self.client.post(
reverse(
'webapp:photo_update_delete',
kwargs={'public_id': 'e3w3wl9m21rz'}
),
{'caption': 'Caribbean Chilling'}
)
self.assertEquals(response.status_code, 200)
self.assertIn('success', response.content)
self.assertIn('photoData', response.content)
self.assertIn('Caribbean Chilling', response.content)
def test_user_can_update_effects_caption(self):
"""
Tests that user can update the persistent
effects for a photo.
"""
response = self.client.post(
reverse(
'webapp:photo_update_delete',
kwargs={'public_id': 'e3w3wl9m21rz'}
),
{'effects': 'charcoal,blur'}
)
self.assertEquals(response.status_code, 200)
self.assertIn('success', response.content)
self.assertIn('photoData', response.content)
self.assertIn('charcoal,blur', response.content)
@patch("os.path.exists", return_value=True)
@patch("os.remove")
def test_user_can_delete_photo(self, mock_path_exists, mock_os_remove):
"""
Tests that user can delete uploaded photos.
"""
response = self.client.delete(
reverse(
'webapp:photo_update_delete',
kwargs={'public_id': 'e3w3wl9m21rz'}
)
)
self.assertEquals(response.status_code, 200)
self.assertIn('success', response.content)
|
|
"""Support for Amcrest IP cameras."""
import asyncio
from datetime import timedelta
import logging
from urllib3.exceptions import HTTPError
from amcrest import AmcrestError
import voluptuous as vol
from homeassistant.components.camera import (
Camera, CAMERA_SERVICE_SCHEMA, SUPPORT_ON_OFF, SUPPORT_STREAM)
from homeassistant.components.ffmpeg import DATA_FFMPEG
from homeassistant.const import (
CONF_NAME, STATE_ON, STATE_OFF)
from homeassistant.helpers.aiohttp_client import (
async_aiohttp_proxy_stream, async_aiohttp_proxy_web,
async_get_clientsession)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
CAMERA_WEB_SESSION_TIMEOUT, CAMERAS, DATA_AMCREST, DEVICES, SERVICE_UPDATE)
from .helpers import log_update_error, service_signal
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=15)
STREAM_SOURCE_LIST = [
'snapshot',
'mjpeg',
'rtsp',
]
_SRV_EN_REC = 'enable_recording'
_SRV_DS_REC = 'disable_recording'
_SRV_EN_AUD = 'enable_audio'
_SRV_DS_AUD = 'disable_audio'
_SRV_EN_MOT_REC = 'enable_motion_recording'
_SRV_DS_MOT_REC = 'disable_motion_recording'
_SRV_GOTO = 'goto_preset'
_SRV_CBW = 'set_color_bw'
_SRV_TOUR_ON = 'start_tour'
_SRV_TOUR_OFF = 'stop_tour'
_ATTR_PRESET = 'preset'
_ATTR_COLOR_BW = 'color_bw'
_CBW_COLOR = 'color'
_CBW_AUTO = 'auto'
_CBW_BW = 'bw'
_CBW = [_CBW_COLOR, _CBW_AUTO, _CBW_BW]
_SRV_GOTO_SCHEMA = CAMERA_SERVICE_SCHEMA.extend({
vol.Required(_ATTR_PRESET): vol.All(vol.Coerce(int), vol.Range(min=1)),
})
_SRV_CBW_SCHEMA = CAMERA_SERVICE_SCHEMA.extend({
vol.Required(_ATTR_COLOR_BW): vol.In(_CBW),
})
CAMERA_SERVICES = {
_SRV_EN_REC: (CAMERA_SERVICE_SCHEMA, 'async_enable_recording', ()),
_SRV_DS_REC: (CAMERA_SERVICE_SCHEMA, 'async_disable_recording', ()),
_SRV_EN_AUD: (CAMERA_SERVICE_SCHEMA, 'async_enable_audio', ()),
_SRV_DS_AUD: (CAMERA_SERVICE_SCHEMA, 'async_disable_audio', ()),
_SRV_EN_MOT_REC: (
CAMERA_SERVICE_SCHEMA, 'async_enable_motion_recording', ()),
_SRV_DS_MOT_REC: (
CAMERA_SERVICE_SCHEMA, 'async_disable_motion_recording', ()),
_SRV_GOTO: (_SRV_GOTO_SCHEMA, 'async_goto_preset', (_ATTR_PRESET,)),
_SRV_CBW: (_SRV_CBW_SCHEMA, 'async_set_color_bw', (_ATTR_COLOR_BW,)),
_SRV_TOUR_ON: (CAMERA_SERVICE_SCHEMA, 'async_start_tour', ()),
_SRV_TOUR_OFF: (CAMERA_SERVICE_SCHEMA, 'async_stop_tour', ()),
}
_BOOL_TO_STATE = {True: STATE_ON, False: STATE_OFF}
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up an Amcrest IP Camera."""
if discovery_info is None:
return
name = discovery_info[CONF_NAME]
device = hass.data[DATA_AMCREST][DEVICES][name]
async_add_entities([
AmcrestCam(name, device, hass.data[DATA_FFMPEG])], True)
class AmcrestCam(Camera):
"""An implementation of an Amcrest IP camera."""
def __init__(self, name, device, ffmpeg):
"""Initialize an Amcrest camera."""
super().__init__()
self._name = name
self._api = device.api
self._ffmpeg = ffmpeg
self._ffmpeg_arguments = device.ffmpeg_arguments
self._stream_source = device.stream_source
self._resolution = device.resolution
self._token = self._auth = device.authentication
self._control_light = device.control_light
self._is_recording = False
self._motion_detection_enabled = None
self._brand = None
self._model = None
self._audio_enabled = None
self._motion_recording_enabled = None
self._color_bw = None
self._rtsp_url = None
self._snapshot_lock = asyncio.Lock()
self._unsub_dispatcher = []
self._update_succeeded = False
async def async_camera_image(self):
"""Return a still image response from the camera."""
available = self.available
if not available or not self.is_on:
_LOGGER.warning(
'Attempt to take snaphot when %s camera is %s', self.name,
'offline' if not available else 'off')
return None
async with self._snapshot_lock:
try:
# Send the request to snap a picture and return raw jpg data
response = await self.hass.async_add_executor_job(
self._api.snapshot)
return response.data
except (AmcrestError, HTTPError) as error:
log_update_error(
_LOGGER, 'get image from', self.name, 'camera', error)
return None
async def handle_async_mjpeg_stream(self, request):
"""Return an MJPEG stream."""
# The snapshot implementation is handled by the parent class
if self._stream_source == 'snapshot':
return await super().handle_async_mjpeg_stream(request)
if not self.available:
_LOGGER.warning(
'Attempt to stream %s when %s camera is offline',
self._stream_source, self.name)
return None
if self._stream_source == 'mjpeg':
# stream an MJPEG image stream directly from the camera
websession = async_get_clientsession(self.hass)
streaming_url = self._api.mjpeg_url(typeno=self._resolution)
stream_coro = websession.get(
streaming_url, auth=self._token,
timeout=CAMERA_WEB_SESSION_TIMEOUT)
return await async_aiohttp_proxy_web(
self.hass, request, stream_coro)
# streaming via ffmpeg
from haffmpeg.camera import CameraMjpeg
streaming_url = self._rtsp_url
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
await stream.open_camera(
streaming_url, extra_cmd=self._ffmpeg_arguments)
try:
stream_reader = await stream.get_reader()
return await async_aiohttp_proxy_stream(
self.hass, request, stream_reader,
self._ffmpeg.ffmpeg_stream_content_type)
finally:
await stream.close()
# Entity property overrides
@property
def should_poll(self) -> bool:
"""Return True if entity has to be polled for state.
False if entity pushes its state to HA.
"""
return True
@property
def name(self):
"""Return the name of this camera."""
return self._name
@property
def device_state_attributes(self):
"""Return the Amcrest-specific camera state attributes."""
attr = {}
if self._audio_enabled is not None:
attr['audio'] = _BOOL_TO_STATE.get(self._audio_enabled)
if self._motion_recording_enabled is not None:
attr['motion_recording'] = _BOOL_TO_STATE.get(
self._motion_recording_enabled)
if self._color_bw is not None:
attr[_ATTR_COLOR_BW] = self._color_bw
return attr
@property
def available(self):
"""Return True if entity is available."""
return self._api.available
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_ON_OFF | SUPPORT_STREAM
# Camera property overrides
@property
def is_recording(self):
"""Return true if the device is recording."""
return self._is_recording
@property
def brand(self):
"""Return the camera brand."""
return self._brand
@property
def motion_detection_enabled(self):
"""Return the camera motion detection status."""
return self._motion_detection_enabled
@property
def model(self):
"""Return the camera model."""
return self._model
async def stream_source(self):
"""Return the source of the stream."""
return self._rtsp_url
@property
def is_on(self):
"""Return true if on."""
return self.is_streaming
# Other Entity method overrides
async def async_on_demand_update(self):
"""Update state."""
self.async_schedule_update_ha_state(True)
async def async_added_to_hass(self):
"""Subscribe to signals and add camera to list."""
for service, params in CAMERA_SERVICES.items():
self._unsub_dispatcher.append(async_dispatcher_connect(
self.hass,
service_signal(service, self.entity_id),
getattr(self, params[1])))
self._unsub_dispatcher.append(async_dispatcher_connect(
self.hass, service_signal(SERVICE_UPDATE, self._name),
self.async_on_demand_update))
self.hass.data[DATA_AMCREST][CAMERAS].append(self.entity_id)
async def async_will_remove_from_hass(self):
"""Remove camera from list and disconnect from signals."""
self.hass.data[DATA_AMCREST][CAMERAS].remove(self.entity_id)
for unsub_dispatcher in self._unsub_dispatcher:
unsub_dispatcher()
def update(self):
"""Update entity status."""
if not self.available or self._update_succeeded:
if not self.available:
self._update_succeeded = False
return
_LOGGER.debug('Updating %s camera', self.name)
try:
if self._brand is None:
resp = self._api.vendor_information.strip()
if resp.startswith('vendor='):
self._brand = resp.split('=')[-1]
else:
self._brand = 'unknown'
if self._model is None:
resp = self._api.device_type.strip()
if resp.startswith('type='):
self._model = resp.split('=')[-1]
else:
self._model = 'unknown'
self.is_streaming = self._api.video_enabled
self._is_recording = self._api.record_mode == 'Manual'
self._motion_detection_enabled = (
self._api.is_motion_detector_on())
self._audio_enabled = self._api.audio_enabled
self._motion_recording_enabled = (
self._api.is_record_on_motion_detection())
self._color_bw = _CBW[self._api.day_night_color]
self._rtsp_url = self._api.rtsp_url(typeno=self._resolution)
except AmcrestError as error:
log_update_error(
_LOGGER, 'get', self.name, 'camera attributes', error)
self._update_succeeded = False
else:
self._update_succeeded = True
# Other Camera method overrides
def turn_off(self):
"""Turn off camera."""
self._enable_video_stream(False)
def turn_on(self):
"""Turn on camera."""
self._enable_video_stream(True)
def enable_motion_detection(self):
"""Enable motion detection in the camera."""
self._enable_motion_detection(True)
def disable_motion_detection(self):
"""Disable motion detection in camera."""
self._enable_motion_detection(False)
# Additional Amcrest Camera service methods
async def async_enable_recording(self):
"""Call the job and enable recording."""
await self.hass.async_add_executor_job(self._enable_recording, True)
async def async_disable_recording(self):
"""Call the job and disable recording."""
await self.hass.async_add_executor_job(self._enable_recording, False)
async def async_enable_audio(self):
"""Call the job and enable audio."""
await self.hass.async_add_executor_job(self._enable_audio, True)
async def async_disable_audio(self):
"""Call the job and disable audio."""
await self.hass.async_add_executor_job(self._enable_audio, False)
async def async_enable_motion_recording(self):
"""Call the job and enable motion recording."""
await self.hass.async_add_executor_job(self._enable_motion_recording,
True)
async def async_disable_motion_recording(self):
"""Call the job and disable motion recording."""
await self.hass.async_add_executor_job(self._enable_motion_recording,
False)
async def async_goto_preset(self, preset):
"""Call the job and move camera to preset position."""
await self.hass.async_add_executor_job(self._goto_preset, preset)
async def async_set_color_bw(self, color_bw):
"""Call the job and set camera color mode."""
await self.hass.async_add_executor_job(self._set_color_bw, color_bw)
async def async_start_tour(self):
"""Call the job and start camera tour."""
await self.hass.async_add_executor_job(self._start_tour, True)
async def async_stop_tour(self):
"""Call the job and stop camera tour."""
await self.hass.async_add_executor_job(self._start_tour, False)
# Methods to send commands to Amcrest camera and handle errors
def _enable_video_stream(self, enable):
"""Enable or disable camera video stream."""
# Given the way the camera's state is determined by
# is_streaming and is_recording, we can't leave
# recording on if video stream is being turned off.
if self.is_recording and not enable:
self._enable_recording(False)
try:
self._api.video_enabled = enable
except AmcrestError as error:
log_update_error(
_LOGGER, 'enable' if enable else 'disable', self.name,
'camera video stream', error)
else:
self.is_streaming = enable
self.schedule_update_ha_state()
if self._control_light:
self._enable_light(self._audio_enabled or self.is_streaming)
def _enable_recording(self, enable):
"""Turn recording on or off."""
# Given the way the camera's state is determined by
# is_streaming and is_recording, we can't leave
# video stream off if recording is being turned on.
if not self.is_streaming and enable:
self._enable_video_stream(True)
rec_mode = {'Automatic': 0, 'Manual': 1}
try:
self._api.record_mode = rec_mode[
'Manual' if enable else 'Automatic']
except AmcrestError as error:
log_update_error(
_LOGGER, 'enable' if enable else 'disable', self.name,
'camera recording', error)
else:
self._is_recording = enable
self.schedule_update_ha_state()
def _enable_motion_detection(self, enable):
"""Enable or disable motion detection."""
try:
self._api.motion_detection = str(enable).lower()
except AmcrestError as error:
log_update_error(
_LOGGER, 'enable' if enable else 'disable', self.name,
'camera motion detection', error)
else:
self._motion_detection_enabled = enable
self.schedule_update_ha_state()
def _enable_audio(self, enable):
"""Enable or disable audio stream."""
try:
self._api.audio_enabled = enable
except AmcrestError as error:
log_update_error(
_LOGGER, 'enable' if enable else 'disable', self.name,
'camera audio stream', error)
else:
self._audio_enabled = enable
self.schedule_update_ha_state()
if self._control_light:
self._enable_light(self._audio_enabled or self.is_streaming)
def _enable_light(self, enable):
"""Enable or disable indicator light."""
try:
self._api.command(
'configManager.cgi?action=setConfig&LightGlobal[0].Enable={}'
.format(str(enable).lower()))
except AmcrestError as error:
log_update_error(
_LOGGER, 'enable' if enable else 'disable', self.name,
'indicator light', error)
def _enable_motion_recording(self, enable):
"""Enable or disable motion recording."""
try:
self._api.motion_recording = str(enable).lower()
except AmcrestError as error:
log_update_error(
_LOGGER, 'enable' if enable else 'disable', self.name,
'camera motion recording', error)
else:
self._motion_recording_enabled = enable
self.schedule_update_ha_state()
def _goto_preset(self, preset):
"""Move camera position and zoom to preset."""
try:
self._api.go_to_preset(
action='start', preset_point_number=preset)
except AmcrestError as error:
log_update_error(
_LOGGER, 'move', self.name,
'camera to preset {}'.format(preset), error)
def _set_color_bw(self, cbw):
"""Set camera color mode."""
try:
self._api.day_night_color = _CBW.index(cbw)
except AmcrestError as error:
log_update_error(
_LOGGER, 'set', self.name,
'camera color mode to {}'.format(cbw), error)
else:
self._color_bw = cbw
self.schedule_update_ha_state()
def _start_tour(self, start):
"""Start camera tour."""
try:
self._api.tour(start=start)
except AmcrestError as error:
log_update_error(
_LOGGER, 'start' if start else 'stop', self.name,
'camera tour', error)
|
|
import abc
import asyncio
import collections
import inspect
import keyword
import os
import re
import sys
import warnings
from collections.abc import Container, Iterable, Sized
from pathlib import Path
from types import MappingProxyType
from yarl import URL, quote, unquote
from . import hdrs
from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
from .file_sender import FileSender
from .protocol import HttpVersion11
from .web_exceptions import (HTTPExpectationFailed, HTTPForbidden,
HTTPMethodNotAllowed, HTTPNotFound)
from .web_reqrep import Response, StreamResponse
__all__ = ('UrlDispatcher', 'UrlMappingMatchInfo',
'AbstractResource', 'Resource', 'PlainResource', 'DynamicResource',
'AbstractRoute', 'ResourceRoute',
'StaticResource', 'View')
PY_35 = sys.version_info >= (3, 5)
HTTP_METHOD_RE = re.compile(r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$")
class AbstractResource(Sized, Iterable):
def __init__(self, *, name=None):
self._name = name
@property
def name(self):
return self._name
@abc.abstractmethod # pragma: no branch
def url(self, **kwargs):
"""Construct url for resource with additional params.
Deprecated, use url_for() instead.
"""
warnings.warn(".url(...) is deprecated, use .url_for instead",
DeprecationWarning,
stacklevel=3)
@abc.abstractmethod # pragma: no branch
def url_for(self, **kwargs):
"""Construct url for resource with additional params."""
@asyncio.coroutine
@abc.abstractmethod # pragma: no branch
def resolve(self, request):
"""Resolve resource
Return (UrlMappingMatchInfo, allowed_methods) pair."""
@abc.abstractmethod
def add_prefix(self, prefix):
"""Add a prefix to processed URLs.
Required for subapplications support.
"""
@abc.abstractmethod
def get_info(self):
"""Return a dict with additional info useful for introspection"""
class AbstractRoute(abc.ABC):
def __init__(self, method, handler, *,
expect_handler=None,
resource=None):
if expect_handler is None:
expect_handler = _defaultExpectHandler
assert asyncio.iscoroutinefunction(expect_handler), \
'Coroutine is expected, got {!r}'.format(expect_handler)
method = method.upper()
if not HTTP_METHOD_RE.match(method):
raise ValueError("{} is not allowed HTTP method".format(method))
assert callable(handler), handler
if asyncio.iscoroutinefunction(handler):
pass
elif inspect.isgeneratorfunction(handler):
warnings.warn("Bare generators are deprecated, "
"use @coroutine wrapper", DeprecationWarning)
elif (isinstance(handler, type) and
issubclass(handler, AbstractView)):
pass
else:
@asyncio.coroutine
def handler_wrapper(*args, **kwargs):
result = old_handler(*args, **kwargs)
if asyncio.iscoroutine(result):
result = yield from result
return result
old_handler = handler
handler = handler_wrapper
self._method = method
self._handler = handler
self._expect_handler = expect_handler
self._resource = resource
@property
def method(self):
return self._method
@property
def handler(self):
return self._handler
@property
@abc.abstractmethod
def name(self):
"""Optional route's name, always equals to resource's name."""
@property
def resource(self):
return self._resource
@abc.abstractmethod
def get_info(self):
"""Return a dict with additional info useful for introspection"""
@abc.abstractmethod # pragma: no branch
def url_for(self, *args, **kwargs):
"""Construct url for route with additional params."""
@abc.abstractmethod # pragma: no branch
def url(self, **kwargs):
"""Construct url for resource with additional params.
Deprecated, use url_for() instead.
"""
warnings.warn(".url(...) is deprecated, use .url_for instead",
DeprecationWarning,
stacklevel=3)
@asyncio.coroutine
def handle_expect_header(self, request):
return (yield from self._expect_handler(request))
class UrlMappingMatchInfo(dict, AbstractMatchInfo):
def __init__(self, match_dict, route):
super().__init__(match_dict)
self._route = route
self._apps = []
self._frozen = False
@property
def handler(self):
return self._route.handler
@property
def route(self):
return self._route
@property
def expect_handler(self):
return self._route.handle_expect_header
@property
def http_exception(self):
return None
def get_info(self):
return self._route.get_info()
@property
def apps(self):
return tuple(self._apps)
@property
def middlewares(self):
middlewares = []
for app in self._apps:
middlewares.extend(reversed(app.middlewares))
return middlewares
def add_app(self, app):
if self._frozen:
raise RuntimeError("Cannot change apps stack after .freeze() call")
self._apps.insert(0, app)
def freeze(self):
self._frozen = True
def __repr__(self):
return "<MatchInfo {}: {}>".format(super().__repr__(), self._route)
class MatchInfoError(UrlMappingMatchInfo):
def __init__(self, http_exception):
self._exception = http_exception
super().__init__({}, SystemRoute(self._exception))
@property
def http_exception(self):
return self._exception
def __repr__(self):
return "<MatchInfoError {}: {}>".format(self._exception.status,
self._exception.reason)
@asyncio.coroutine
def _defaultExpectHandler(request):
"""Default handler for Expect header.
Just send "100 Continue" to client.
raise HTTPExpectationFailed if value of header is not "100-continue"
"""
expect = request.headers.get(hdrs.EXPECT)
if request.version == HttpVersion11:
if expect.lower() == "100-continue":
request.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n")
else:
raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
class Resource(AbstractResource):
def __init__(self, *, name=None):
super().__init__(name=name)
self._routes = []
def add_route(self, method, handler, *,
expect_handler=None):
for route in self._routes:
if route.method == method or route.method == hdrs.METH_ANY:
raise RuntimeError("Added route will never be executed, "
"method {route.method} is "
"already registered".format(route=route))
route = ResourceRoute(method, handler, self,
expect_handler=expect_handler)
self.register_route(route)
return route
def register_route(self, route):
assert isinstance(route, ResourceRoute), \
'Instance of Route class is required, got {!r}'.format(route)
self._routes.append(route)
@asyncio.coroutine
def resolve(self, request):
allowed_methods = set()
match_dict = self._match(request.rel_url.raw_path)
if match_dict is None:
return None, allowed_methods
for route in self._routes:
route_method = route.method
allowed_methods.add(route_method)
if route_method == request.method or route_method == hdrs.METH_ANY:
return UrlMappingMatchInfo(match_dict, route), allowed_methods
else:
return None, allowed_methods
def __len__(self):
return len(self._routes)
def __iter__(self):
return iter(self._routes)
class PlainResource(Resource):
def __init__(self, path, *, name=None):
super().__init__(name=name)
assert path.startswith('/')
self._path = path
def add_prefix(self, prefix):
assert prefix.startswith('/')
assert prefix.endswith('/')
assert len(prefix) > 1
self._path = prefix + self._path[1:]
def _match(self, path):
# string comparison is about 10 times faster than regexp matching
if self._path == path:
return {}
else:
return None
def get_info(self):
return {'path': self._path}
def url(self, *, query=None):
super().url()
return str(self.url_for().with_query(query))
def url_for(self):
return URL(self._path)
def __repr__(self):
name = "'" + self.name + "' " if self.name is not None else ""
return "<PlainResource {name} {path}".format(name=name,
path=self._path)
class DynamicResource(Resource):
def __init__(self, pattern, formatter, *, name=None):
super().__init__(name=name)
assert pattern.pattern.startswith('\\/')
assert formatter.startswith('/')
self._pattern = pattern
self._formatter = formatter
def add_prefix(self, prefix):
assert prefix.startswith('/')
assert prefix.endswith('/')
assert len(prefix) > 1
self._pattern = re.compile(re.escape(prefix)+self._pattern.pattern[2:])
self._formatter = prefix + self._formatter[1:]
def _match(self, path):
match = self._pattern.fullmatch(path)
if match is None:
return None
else:
return {key: unquote(value) for key, value in
match.groupdict().items()}
def get_info(self):
return {'formatter': self._formatter,
'pattern': self._pattern}
def url_for(self, **parts):
url = self._formatter.format_map(parts)
return URL(url)
def url(self, *, parts, query=None):
super().url(**parts)
return str(self.url_for(**parts).with_query(query))
def __repr__(self):
name = "'" + self.name + "' " if self.name is not None else ""
return ("<DynamicResource {name} {formatter}"
.format(name=name, formatter=self._formatter))
class PrefixResource(AbstractResource):
def __init__(self, prefix, *, name=None):
assert prefix.startswith('/'), prefix
assert prefix.endswith('/'), prefix
super().__init__(name=name)
self._prefix = quote(prefix, safe='/')
self._prefix_len = len(self._prefix)
def add_prefix(self, prefix):
assert prefix.startswith('/')
assert prefix.endswith('/')
assert len(prefix) > 1
self._prefix = prefix + self._prefix[1:]
self._prefix_len = len(self._prefix)
class StaticResource(PrefixResource):
def __init__(self, prefix, directory, *, name=None,
expect_handler=None, chunk_size=256*1024,
response_factory=StreamResponse,
show_index=False, follow_symlinks=False):
super().__init__(prefix, name=name)
try:
directory = Path(directory)
if str(directory).startswith('~'):
directory = Path(os.path.expanduser(str(directory)))
directory = directory.resolve()
if not directory.is_dir():
raise ValueError('Not a directory')
except (FileNotFoundError, ValueError) as error:
raise ValueError(
"No directory exists at '{}'".format(directory)) from error
self._directory = directory
self._file_sender = FileSender(resp_factory=response_factory,
chunk_size=chunk_size)
self._show_index = show_index
self._follow_symlinks = follow_symlinks
self._routes = {'GET': ResourceRoute('GET', self._handle, self,
expect_handler=expect_handler),
'HEAD': ResourceRoute('HEAD', self._handle, self,
expect_handler=expect_handler)}
def url(self, *, filename, query=None):
return str(self.url_for(filename=filename).with_query(query))
def url_for(self, *, filename):
if isinstance(filename, Path):
filename = str(filename)
while filename.startswith('/'):
filename = filename[1:]
url = self._prefix + quote(filename, safe='/')
return URL(url)
def get_info(self):
return {'directory': self._directory,
'prefix': self._prefix}
@asyncio.coroutine
def resolve(self, request):
path = request.rel_url.raw_path
method = request.method
allowed_methods = {'GET', 'HEAD'}
if not path.startswith(self._prefix):
return None, set()
if method not in allowed_methods:
return None, allowed_methods
match_dict = {'filename': unquote(path[self._prefix_len:])}
return (UrlMappingMatchInfo(match_dict, self._routes[method]),
allowed_methods)
def __len__(self):
return len(self._routes)
def __iter__(self):
return iter(self._routes.values())
@asyncio.coroutine
def _handle(self, request):
filename = unquote(request.match_info['filename'])
try:
filepath = self._directory.joinpath(filename).resolve()
if not self._follow_symlinks:
filepath.relative_to(self._directory)
except (ValueError, FileNotFoundError) as error:
# relatively safe
raise HTTPNotFound() from error
except Exception as error:
# perm error or other kind!
request.app.logger.exception(error)
raise HTTPNotFound() from error
# on opening a dir, load it's contents if allowed
if filepath.is_dir():
if self._show_index:
try:
ret = Response(text=self._directory_as_html(filepath),
content_type="text/html")
except PermissionError:
raise HTTPForbidden()
else:
raise HTTPForbidden()
elif filepath.is_file():
ret = yield from self._file_sender.send(request, filepath)
else:
raise HTTPNotFound
return ret
def _directory_as_html(self, filepath):
"returns directory's index as html"
# sanity check
assert filepath.is_dir()
posix_dir_len = len(self._directory.as_posix())
# remove the beginning of posix path, so it would be relative
# to our added static path
relative_path_to_dir = filepath.as_posix()[posix_dir_len:]
index_of = "Index of /{}".format(relative_path_to_dir)
head = "<head>\n<title>{}</title>\n</head>".format(index_of)
h1 = "<h1>{}</h1>".format(index_of)
index_list = []
dir_index = filepath.iterdir()
for _file in sorted(dir_index):
# show file url as relative to static path
file_url = _file.as_posix()[posix_dir_len:]
# if file is a directory, add '/' to the end of the name
if _file.is_dir():
file_name = "{}/".format(_file.name)
else:
file_name = _file.name
index_list.append(
'<li><a href="{url}">{name}</a></li>'.format(url=file_url,
name=file_name)
)
ul = "<ul>\n{}\n</ul>".format('\n'.join(index_list))
body = "<body>\n{}\n{}\n</body>".format(h1, ul)
html = "<html>\n{}\n{}\n</html>".format(head, body)
return html
def __repr__(self):
name = "'" + self.name + "'" if self.name is not None else ""
return "<StaticResource {name} {path} -> {directory!r}".format(
name=name, path=self._prefix, directory=self._directory)
class PrefixedSubAppResource(PrefixResource):
def __init__(self, prefix, app):
super().__init__(prefix)
self._app = app
for resource in app.router.resources():
resource.add_prefix(prefix)
def add_prefix(self, prefix):
super().add_prefix(prefix)
for resource in self._app.router.resources():
resource.add_prefix(prefix)
def url_for(self, *args, **kwargs):
raise RuntimeError(".url_for() is not supported "
"by sub-application root")
def url(self, **kwargs):
"""Construct url for route with additional params."""
raise RuntimeError(".url() is not supported "
"by sub-application root")
def get_info(self):
return {'app': self._app,
'prefix': self._prefix}
@asyncio.coroutine
def resolve(self, request):
if not request.url.raw_path.startswith(self._prefix):
return None, set()
match_info = yield from self._app.router.resolve(request)
match_info.add_app(self._app)
if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
methods = match_info.http_exception.allowed_methods
else:
methods = set()
return (match_info, methods)
def __len__(self):
return len(self._app.router.routes())
def __iter__(self):
return iter(self._app.router.routes())
def __repr__(self):
return "<PrefixedSubAppResource {prefix} -> {app!r}>".format(
prefix=self._prefix, app=self._app)
class ResourceRoute(AbstractRoute):
"""A route with resource"""
def __init__(self, method, handler, resource, *,
expect_handler=None):
super().__init__(method, handler, expect_handler=expect_handler,
resource=resource)
def __repr__(self):
return "<ResourceRoute [{method}] {resource} -> {handler!r}".format(
method=self.method, resource=self._resource,
handler=self.handler)
@property
def name(self):
return self._resource.name
def url_for(self, *args, **kwargs):
"""Construct url for route with additional params."""
return self._resource.url_for(*args, **kwargs)
def url(self, **kwargs):
"""Construct url for route with additional params."""
super().url(**kwargs)
return self._resource.url(**kwargs)
def get_info(self):
return self._resource.get_info()
class SystemRoute(AbstractRoute):
def __init__(self, http_exception):
super().__init__(hdrs.METH_ANY, self._handler)
self._http_exception = http_exception
def url_for(self, *args, **kwargs):
raise RuntimeError(".url_for() is not allowed for SystemRoute")
def url(self, *args, **kwargs):
raise RuntimeError(".url() is not allowed for SystemRoute")
@property
def name(self):
return None
def get_info(self):
return {'http_exception': self._http_exception}
@asyncio.coroutine
def _handler(self, request):
raise self._http_exception
@property
def status(self):
return self._http_exception.status
@property
def reason(self):
return self._http_exception.reason
def __repr__(self):
return "<SystemRoute {self.status}: {self.reason}>".format(self=self)
class View(AbstractView):
@asyncio.coroutine
def __iter__(self):
if self.request.method not in hdrs.METH_ALL:
self._raise_allowed_methods()
method = getattr(self, self.request.method.lower(), None)
if method is None:
self._raise_allowed_methods()
resp = yield from method()
return resp
if PY_35:
def __await__(self):
return (yield from self.__iter__())
def _raise_allowed_methods(self):
allowed_methods = {
m for m in hdrs.METH_ALL if hasattr(self, m.lower())}
raise HTTPMethodNotAllowed(self.request.method, allowed_methods)
class ResourcesView(Sized, Iterable, Container):
def __init__(self, resources):
self._resources = resources
def __len__(self):
return len(self._resources)
def __iter__(self):
yield from self._resources
def __contains__(self, resource):
return resource in self._resources
class RoutesView(Sized, Iterable, Container):
def __init__(self, resources):
self._routes = []
for resource in resources:
for route in resource:
self._routes.append(route)
def __len__(self):
return len(self._routes)
def __iter__(self):
yield from self._routes
def __contains__(self, route):
return route in self._routes
class UrlDispatcher(AbstractRouter, collections.abc.Mapping):
DYN = re.compile(r'\{(?P<var>[a-zA-Z][_a-zA-Z0-9]*)\}')
DYN_WITH_RE = re.compile(
r'\{(?P<var>[a-zA-Z][_a-zA-Z0-9]*):(?P<re>.+)\}')
GOOD = r'[^{}/]+'
ROUTE_RE = re.compile(r'(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})')
NAME_SPLIT_RE = re.compile('[.:-]')
def __init__(self, app):
super().__init__()
self._resources = []
self._named_resources = {}
self._app = app
@asyncio.coroutine
def resolve(self, request):
method = request.method
allowed_methods = set()
for resource in self._resources:
match_dict, allowed = yield from resource.resolve(request)
if match_dict is not None:
return match_dict
else:
allowed_methods |= allowed
else:
if allowed_methods:
return MatchInfoError(HTTPMethodNotAllowed(method,
allowed_methods))
else:
return MatchInfoError(HTTPNotFound())
def __iter__(self):
return iter(self._named_resources)
def __len__(self):
return len(self._named_resources)
def __contains__(self, name):
return name in self._named_resources
def __getitem__(self, name):
return self._named_resources[name]
def resources(self):
return ResourcesView(self._resources)
def routes(self):
return RoutesView(self._resources)
def named_resources(self):
return MappingProxyType(self._named_resources)
def _reg_resource(self, resource):
assert isinstance(resource, AbstractResource), \
'Instance of AbstractResource class is required, got {!r}'.format(
resource)
if self.frozen:
raise RuntimeError("Cannot register a resource into "
"frozen router.")
name = resource.name
if name is not None:
parts = self.NAME_SPLIT_RE.split(name)
for part in parts:
if not part.isidentifier() or keyword.iskeyword(part):
raise ValueError('Incorrect route name {!r}, '
'the name should be a sequence of '
'python identifiers separated '
'by dash, dot or column'.format(name))
if name in self._named_resources:
raise ValueError('Duplicate {!r}, '
'already handled by {!r}'
.format(name, self._named_resources[name]))
self._named_resources[name] = resource
self._resources.append(resource)
def add_resource(self, path, *, name=None):
if not path.startswith('/'):
raise ValueError("path should be started with /")
if not ('{' in path or '}' in path or self.ROUTE_RE.search(path)):
resource = PlainResource(quote(path, safe='/'), name=name)
self._reg_resource(resource)
return resource
pattern = ''
formatter = ''
for part in self.ROUTE_RE.split(path):
match = self.DYN.fullmatch(part)
if match:
pattern += '(?P<{}>{})'.format(match.group('var'), self.GOOD)
formatter += '{' + match.group('var') + '}'
continue
match = self.DYN_WITH_RE.fullmatch(part)
if match:
pattern += '(?P<{var}>{re})'.format(**match.groupdict())
formatter += '{' + match.group('var') + '}'
continue
if '{' in part or '}' in part:
raise ValueError("Invalid path '{}'['{}']".format(path, part))
part = quote(part, safe='/')
formatter += part
pattern += re.escape(part)
try:
compiled = re.compile(pattern)
except re.error as exc:
raise ValueError(
"Bad pattern '{}': {}".format(pattern, exc)) from None
resource = DynamicResource(compiled, formatter, name=name)
self._reg_resource(resource)
return resource
def add_route(self, method, path, handler,
*, name=None, expect_handler=None):
resource = self.add_resource(path, name=name)
return resource.add_route(method, handler,
expect_handler=expect_handler)
def add_static(self, prefix, path, *, name=None, expect_handler=None,
chunk_size=256*1024, response_factory=StreamResponse,
show_index=False, follow_symlinks=False):
"""Add static files view.
prefix - url prefix
path - folder with files
"""
# TODO: implement via PrefixedResource, not ResourceAdapter
assert prefix.startswith('/')
if not prefix.endswith('/'):
prefix += '/'
resource = StaticResource(prefix, path,
name=name,
expect_handler=expect_handler,
chunk_size=chunk_size,
response_factory=response_factory,
show_index=show_index,
follow_symlinks=follow_symlinks)
self._reg_resource(resource)
return resource
def add_head(self, *args, **kwargs):
"""
Shortcut for add_route with method HEAD
"""
return self.add_route(hdrs.METH_HEAD, *args, **kwargs)
def add_get(self, *args, **kwargs):
"""
Shortcut for add_route with method GET
"""
return self.add_route(hdrs.METH_GET, *args, **kwargs)
def add_post(self, *args, **kwargs):
"""
Shortcut for add_route with method POST
"""
return self.add_route(hdrs.METH_POST, *args, **kwargs)
def add_put(self, *args, **kwargs):
"""
Shortcut for add_route with method PUT
"""
return self.add_route(hdrs.METH_PUT, *args, **kwargs)
def add_patch(self, *args, **kwargs):
"""
Shortcut for add_route with method PATCH
"""
return self.add_route(hdrs.METH_PATCH, *args, **kwargs)
def add_delete(self, *args, **kwargs):
"""
Shortcut for add_route with method DELETE
"""
return self.add_route(hdrs.METH_DELETE, *args, **kwargs)
def add_subapp(self, prefix, subapp):
assert prefix.startswith('/')
if not prefix.endswith('/'):
prefix += '/'
if subapp.frozen:
raise RuntimeError("Cannod add frozen application")
resource = PrefixedSubAppResource(prefix, subapp)
self._reg_resource(resource)
self._app._reg_subapp_signals(subapp)
subapp.freeze()
return resource
|
|
from django import forms
from django.core.urlresolvers import reverse
from django.core import exceptions
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _, ugettext
from django.utils.http import int_to_base36
from django.utils.importlib import import_module
from django.contrib.auth import authenticate
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.models import Site
from models import EmailAddress
# from models import PasswordReset
from utils import perform_login, send_email_confirmation, setup_user_email
from allauth.utils import (email_address_exists, generate_unique_username,
get_user_model)
from app_settings import AuthenticationMethod, EmailVerificationMethod
import app_settings
from adapter import get_adapter
User = get_user_model()
class PasswordField(forms.CharField):
def __init__(self, *args, **kwargs):
is_primary_password = kwargs.pop('is_primary_password', None)
placeholder =_('Password') if is_primary_password else _('Confirm password')
render_value = kwargs.pop('render_value',
app_settings.PASSWORD_INPUT_RENDER_VALUE)
kwargs['widget'] = forms.PasswordInput(render_value=render_value,
attrs={'placeholder': placeholder})
super(PasswordField, self).__init__(*args, **kwargs)
class SetPasswordField(PasswordField):
def __init__(self, *args, **kwargs):
kwargs['is_primary_password'] = True
super(SetPasswordField, self).__init__(*args, **kwargs)
def clean(self, value):
value = super(SetPasswordField, self).clean(value)
min_length = app_settings.PASSWORD_MIN_LENGTH
if len(value) < min_length:
raise forms.ValidationError(_("Password must be a minimum of {0} "
"characters.").format(min_length))
return value
class LoginForm(forms.Form):
password = PasswordField(
label = _("Password"))
remember = forms.BooleanField(
label = _("Remember Me"),
# help_text = _("If checked you will stay logged in for 3 weeks"),
required = False
)
user = None
def __init__(self, *args, **kwargs):
super(LoginForm, self).__init__(*args, **kwargs)
if app_settings.AUTHENTICATION_METHOD == AuthenticationMethod.EMAIL:
login_widget = forms.TextInput(attrs={'placeholder':
_('Email') })
login_field = forms.EmailField(label=_("Email"),
widget=login_widget)
elif app_settings.AUTHENTICATION_METHOD \
== AuthenticationMethod.USERNAME:
login_widget = forms.TextInput(attrs={'placeholder':
_('Username') })
login_field = forms.CharField(label=_("Username"),
widget=login_widget,
max_length=30)
else:
assert app_settings.AUTHENTICATION_METHOD \
== AuthenticationMethod.USERNAME_EMAIL
login_widget = forms.TextInput(attrs={'placeholder':
_('Username or e-mail') })
login_field = forms.CharField(label=ugettext("Login"),
widget=login_widget)
self.fields["login"] = login_field
self.fields.keyOrder = ["login", "password", "remember"]
def user_credentials(self):
"""
Provides the credentials required to authenticate the user for
login.
"""
credentials = {}
login = self.cleaned_data["login"]
if app_settings.AUTHENTICATION_METHOD == AuthenticationMethod.EMAIL:
credentials["email"] = login
elif (app_settings.AUTHENTICATION_METHOD
== AuthenticationMethod.USERNAME):
credentials["username"] = login
else:
if "@" in login and "." in login:
credentials["email"] = login
else:
credentials["username"] = login
credentials["password"] = self.cleaned_data["password"]
return credentials
def clean(self):
if self._errors:
return
user = authenticate(**self.user_credentials())
if user:
if user.is_active:
self.user = user
else:
raise forms.ValidationError(_("This account is currently"
" inactive."))
else:
if app_settings.AUTHENTICATION_METHOD == AuthenticationMethod.EMAIL:
error = _("The e-mail address and/or password you specified"
" are not correct.")
elif app_settings.AUTHENTICATION_METHOD \
== AuthenticationMethod.USERNAME:
error = _("The username and/or password you specified are"
" not correct.")
else:
error = _("The login and/or password you specified are not"
" correct.")
raise forms.ValidationError(error)
return self.cleaned_data
def login(self, request, redirect_url=None):
ret = perform_login(request, self.user, redirect_url=redirect_url)
if self.cleaned_data["remember"]:
request.session.set_expiry(60 * 60 * 24 * 7 * 3)
else:
request.session.set_expiry(0)
return ret
class _DummyCustomSignupForm(forms.Form):
def save(self, user):
pass
def _base_signup_form_class():
if not app_settings.SIGNUP_FORM_CLASS:
return _DummyCustomSignupForm
try:
fc_module, fc_classname = app_settings.SIGNUP_FORM_CLASS.rsplit('.', 1)
except ValueError:
raise exceptions.ImproperlyConfigured('%s does not point to a form'
' class'
% app_settings.SIGNUP_FORM_CLASS)
try:
mod = import_module(fc_module)
except ImportError, e:
raise exceptions.ImproperlyConfigured('Error importing form class %s:'
' "%s"' % (fc_module, e))
try:
fc_class = getattr(mod, fc_classname)
except AttributeError:
raise exceptions.ImproperlyConfigured('Module "%s" does not define a'
' "%s" class' % (fc_module,
fc_classname))
if not hasattr(fc_class, 'save'):
raise exceptions.ImproperlyConfigured('The custom signup form must'
' implement a "save" method')
return fc_class
class BaseSignupForm(_base_signup_form_class()):
username = forms.CharField(
label = _("Username"),
max_length = 30,
min_length = app_settings.USERNAME_MIN_LENGTH,
widget = forms.TextInput()
)
email = forms.EmailField(widget=forms.TextInput(attrs={'placeholder':_('Email') }))
def __init__(self, *args, **kwargs):
super(BaseSignupForm, self).__init__(*args, **kwargs)
if (app_settings.EMAIL_REQUIRED
or (app_settings.EMAIL_VERIFICATION
== EmailVerificationMethod.MANDATORY)
or (app_settings.AUTHENTICATION_METHOD
== AuthenticationMethod.EMAIL)):
self.fields["email"].label = ugettext("Email")
self.fields["email"].required = True
else:
self.fields["email"].label = ugettext("Email (optional)")
self.fields["email"].required = False
if not app_settings.USERNAME_REQUIRED:
del self.fields["username"]
def clean_username(self):
value = self.cleaned_data["username"]
try:
User.objects.get(username__iexact=value)
except User.DoesNotExist:
return value
raise forms.ValidationError(_("This username is already taken. Please "
"choose another."))
def clean_email(self):
value = self.cleaned_data["email"]
if app_settings.UNIQUE_EMAIL:
if value and email_address_exists(value):
raise forms.ValidationError \
(_("A user is already registered with this e-mail address."))
return value
def create_user(self, commit=True):
user = User()
# data collected by providers, if any, is passed as `initial`
# signup form data. This may contain fields such as
# `first_name`, whereas these may not have field counterparts
# in the form itself. So let's pick these up here...
data = self.initial
user.last_name = data.get('last_name', '')
user.first_name = data.get('first_name', '')
user.email = self.cleaned_data["email"].strip().lower()
user.username = data.get('username', '')
if app_settings.USERNAME_REQUIRED:
user.username = self.cleaned_data["username"]
else:
user.username = generate_unique_username(user.username or
user.first_name or
user.last_name or
user.email)
user.set_unusable_password()
if commit:
user.save()
return user
class SignupForm(BaseSignupForm):
password1 = SetPasswordField(label=_("Password"))
password2 = PasswordField(label=_("Password (again)"))
confirmation_key = forms.CharField(
max_length = 40,
required = False,
widget = forms.HiddenInput())
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
# current_order =self.fields.keyOrder
# preferred_order = self.fields.keyOrder = ["username",
# "password1",
# "password2",
# "email"]
# if not app_settings.USERNAME_REQUIRED:
# preferred_order = self.fields.keyOrder = ["email",
# "password1",
# "password2"]
# # Make sure custom fields are put below main signup fields
# self.fields.keyOrder = preferred_order + [ f for f in current_order if not f in preferred_order ]
if not app_settings.SIGNUP_PASSWORD_VERIFICATION:
del self.fields["password2"]
def clean(self):
super(SignupForm, self).clean()
if app_settings.SIGNUP_PASSWORD_VERIFICATION \
and "password1" in self.cleaned_data \
and "password2" in self.cleaned_data:
if self.cleaned_data["password1"] != self.cleaned_data["password2"]:
raise forms.ValidationError(_("You must type the same password each time."))
return self.cleaned_data
def create_user(self, commit=True):
user = super(SignupForm, self).create_user(commit=False)
password = self.cleaned_data.get("password1")
if password:
user.set_password(password)
if commit:
user.save()
return user
def save(self, request):
new_user = self.create_user()
super(SignupForm, self).save(new_user)
setup_user_email(request, new_user)
send_email_confirmation(request, new_user)
self.after_signup(new_user)
return new_user
def after_signup(self, user, **kwargs):
"""
An extension point for subclasses.
"""
pass
class UserForm(forms.Form):
def __init__(self, user=None, *args, **kwargs):
self.user = user
super(UserForm, self).__init__(*args, **kwargs)
class AddEmailForm(UserForm):
email = forms.EmailField(
label = _("E-mail"),
required = True,
widget = forms.TextInput(attrs={"size": "30"})
)
def clean_email(self):
value = self.cleaned_data["email"]
errors = {
"this_account": _("This e-mail address is already associated with this account."),
"different_account": _("This e-mail address is already associated with another account."),
}
emails = EmailAddress.objects.filter(email__iexact=value)
if emails.filter(user=self.user).exists():
raise forms.ValidationError(errors["this_account"])
if app_settings.UNIQUE_EMAIL:
if emails.exclude(user=self.user).exists():
raise forms.ValidationError(errors["different_account"])
return value
def save(self, request):
return EmailAddress.objects.add_email(request,
self.user,
self.cleaned_data["email"],
confirm=True)
class ChangePasswordForm(UserForm):
oldpassword = PasswordField(label=_("Current Password"))
password1 = SetPasswordField(label=_("New Password"))
password2 = PasswordField(label=_("New Password (again)"))
def clean_oldpassword(self):
if not self.user.check_password(self.cleaned_data.get("oldpassword")):
raise forms.ValidationError(_("Please type your current password."))
return self.cleaned_data["oldpassword"]
def clean_password2(self):
if "password1" in self.cleaned_data and "password2" in self.cleaned_data:
if self.cleaned_data["password1"] != self.cleaned_data["password2"]:
raise forms.ValidationError(_("You must type the same password each time."))
return self.cleaned_data["password2"]
def save(self):
self.user.set_password(self.cleaned_data["password1"])
self.user.save()
class SetPasswordForm(UserForm):
password1 = SetPasswordField(label=_("Password"))
password2 = PasswordField(label=_("Password (again)"))
def clean_password2(self):
if "password1" in self.cleaned_data and "password2" in self.cleaned_data:
if self.cleaned_data["password1"] != self.cleaned_data["password2"]:
raise forms.ValidationError(_("You must type the same password each time."))
return self.cleaned_data["password2"]
def save(self):
self.user.set_password(self.cleaned_data["password1"])
self.user.save()
class ResetPasswordForm(forms.Form):
email = forms.EmailField(
label = _("E-mail"),
required = True,
widget = forms.TextInput(attrs={"size":"30"})
)
def clean_email(self):
email = self.cleaned_data["email"]
self.users = User.objects.filter(Q(email__iexact=email)
| Q(emailaddress__email__iexact=email)).distinct()
if not self.users.exists():
raise forms.ValidationError(_("The e-mail address is not assigned to any user account"))
return self.cleaned_data["email"]
def save(self, **kwargs):
email = self.cleaned_data["email"]
token_generator = kwargs.get("token_generator", default_token_generator)
for user in self.users:
temp_key = token_generator.make_token(user)
# save it to the password reset model
# password_reset = PasswordReset(user=user, temp_key=temp_key)
# password_reset.save()
current_site = Site.objects.get_current()
# send the password reset email
path = reverse("account_reset_password_from_key",
kwargs=dict(uidb36=int_to_base36(user.id),
key=temp_key))
url = 'http://%s%s' % (current_site.domain,
path)
context = { "site": current_site,
"user": user,
"password_reset_url": url }
get_adapter().send_mail('account/email/password_reset_key',
email,
context)
return self.cleaned_data["email"]
class ResetPasswordKeyForm(forms.Form):
password1 = SetPasswordField(label=_("New Password"))
password2 = PasswordField(label=_("New Password (again)"))
def __init__(self, *args, **kwargs):
self.user = kwargs.pop("user", None)
self.temp_key = kwargs.pop("temp_key", None)
super(ResetPasswordKeyForm, self).__init__(*args, **kwargs)
# FIXME: Inspecting other fields -> should be put in def clean(self) ?
def clean_password2(self):
if "password1" in self.cleaned_data and "password2" in self.cleaned_data:
if self.cleaned_data["password1"] != self.cleaned_data["password2"]:
raise forms.ValidationError(_("You must type the same password each time."))
return self.cleaned_data["password2"]
def save(self):
# set the new user password
user = self.user
user.set_password(self.cleaned_data["password1"])
user.save()
# mark password reset object as reset
# PasswordReset.objects.filter(temp_key=self.temp_key).update(reset=True)
|
|
import os, io, sys, re
from .utils import CodeGenerator, camel_to_snake_case, camel_to_screaming_snake_case, delegate_type, which
from .registry import BaseTypeElem, Member, Typed, TypeRef, DispatchTable
PREDEFINED_TYPES = {
'int8_t': 'i8',
'uint8_t': 'u8',
'int16_t': 'i16',
'uint16_t': 'u16',
'int32_t': 'i32',
'uint32_t': 'u32',
'int64_t': 'i64',
'uint64_t': 'u64',
'float': 'f32',
'double': 'f64',
'void': 'c_void',
'char': 'c_char',
'int': 'c_int',
'size_t': 'usize',
}
PREDEFINED_UTILS = {
'VK_MAKE_VERSION': ('vk_make_version', 'u32'),
}
IGNORED = set([
'VK_TRUE', 'VK_FALSE', 'VK_NULL_HANDLE'
])
RESERVED_KEYWORDS = set([
'_', 'abstract', 'alignof', 'as', 'become',
'box', 'break', 'const', 'continue', 'crate',
'do', 'else', 'enum', 'extern', 'false',
'final', 'fn', 'for', 'if', 'impl',
'in', 'let', 'loop', 'macro', 'match',
'mod', 'move', 'mut', 'offsetof', 'override',
'priv', 'proc', 'pub', 'pure', 'ref',
'return', 'Self', 'self', 'sizeof', 'static',
'struct', 'super', 'trait', 'true', 'type',
'typeof', 'unsafe', 'unsized', 'use', 'virtual',
'where', 'while', 'yield',
])
INCLUDE_TO_MODULE = {
'X11/Xlib.h': 'wsi::xlib',
'X11/extensions/Xrandr.h': 'wsi::xlib',
'android/native_window.h': 'wsi::android',
'mir_toolkit/client_types.h': 'wsi::mir',
'wayland-client.h': 'wsi::wayland',
'windows.h': 'wsi::win32',
'xcb/xcb.h': 'wsi::xcb',
}
STRUCT_TYPES = set([BaseTypeElem.STRUCT, BaseTypeElem.UNION])
_rustfmt_config = None
_rustfmt_exe = None
def rustfmt(filename):
global _rustfmt_config, _rustfmt_exe
if _rustfmt_exe is None:
_rustfmt_exe = which('rustfmt')
if _rustfmt_exe is None:
_rustfmt_exe = False
print('`rustfmt` not found in PATH. The file `%s` will not be formatted!' % filename)
return
if _rustfmt_exe is False:
return
import subprocess
rustfmt_cmdline = [_rustfmt_exe]
if _rustfmt_config is None:
if os.path.isfile('./rustfmt.toml'):
_rustfmt_config = './rustfmt.toml'
else:
_rustfmt_config = False
if _rustfmt_config is not False:
rustfmt_cmdline.append('--config-path')
rustfmt_cmdline.append(str(_rustfmt_config))
rustfmt_cmdline.append(filename)
res = subprocess.run(rustfmt_cmdline)
if res.returncode == 3:
print('`rustfmt` was unable to format `%s` properly, but the code was valid: the error was ignored' % filename)
elif res.returncode != 0:
raise subprocess.CalledProcessError('Command %s returned %d', res.args, res.returncode)
def _lifetime_diamond(lifetimes, with_subtyping=False):
if not lifetimes:
return ''
else:
lifetime_args = list()
prev = None
for l in reversed(sorted(lifetimes)):
if prev is not None and with_subtyping:
arg = '\'%s: \'%s' % (l, prev)
else:
arg = '\'%s' % l
lifetime_args.append(arg)
prev = l
return '<%s>' % ','.join(lifetime_args)
class RustCodeGenerator(CodeGenerator):
def __init__(self, out, *args, **kwargs):
if isinstance(out, str):
self._filename = out
self._old_out = None
else:
from tempfile import NamedTemporaryFile
self._old_out = out
out = NamedTemporaryFile('w', delete=False)
self._filename = out.name
super(RustCodeGenerator, self).__init__(out, *args, **kwargs)
def close(self):
super(RustCodeGenerator, self).close()
self.out.close()
rustfmt(self._filename)
if self._old_out is not None:
self.out = self._old_out
with open(self._filename, "r") as input:
for line in iter(input.readlines()):
self.out.write(line)
os.unlink(self._filename)
self._old_out = None
class ImportGenerator(CodeGenerator):
def __init__(self, *args, **kwargs):
super(ImportGenerator, self).__init__(*args, **kwargs)
self.imports = set()
self._target_out = self.out
self.out = io.StringIO()
def close(self):
if self._target_out is not None:
contents = self.out.getvalue()
self.out.close()
self.out = self._target_out
self._target_out = None
if len(self.imports) > 0:
for imp in sorted(self.imports):
self('use ', imp, ';').nl()
self.nl()
self.imports = None
self.out.write(contents)
super(ImportGenerator, self).close()
_RE_SUB_ = re.compile(r'\-[0-9]+$')
class RustGenerator:
def __init__(self, registry):
self.registry = registry
self.target = 'src'
self._import_generator = None
def manage_imports(self, gen):
if self._import_generator is not None and self._import_generator.imports is not None:
raise ValueError('canonly open one import-generator at a time')
self._import_generator = ImportGenerator(gen)
return self._import_generator
def add_import(self, imp):
if self._import_generator is not None and self._import_generator.imports is not None:
self._import_generator.imports.add(imp)
return imp
def rust_value(self, value, **kwargs):
if value is None:
return 'None', 'Option<c_void>'
if value.startswith('(') and value.endswith(')'):
value = value[1:-1]
if value.startswith('"'):
return '"%s\\0"' % value[1:-1], '&str'
if value[0].isalpha():
item = self.registry.enum_items[value]
name = self.rust_enum_item_name(item)
if item.enum_group.type is None:
_, ty = self.rust_enum_item_value(item, **kwargs)
if not kwargs.get('no_imports', False):
self.add_import('enums')
return 'enums::' + name, ty
else:
return item.enum_group.name + '::' + name, item.enum_group.name
ty = 'u32'
sub = _RE_SUB_.search(value)
if sub is not None:
sub = sub.group(0)
value = value[:-len(sub)]
else:
sub = ''
if value.endswith('f'):
ty = 'f32'
value = value + '32'
elif value.endswith('ULL'):
ty = 'u64'
value = value[:-3] + 'u64'
elif value.endswith('LL'):
ty = 'i64'
value = value[:-2] + 'i64'
elif value.endswith('U'):
ty = 'u32'
value = value[:-1] + 'u32'
if value.startswith('~'):
value = '!' + value[1:]
return value+sub, ty
def rust_dimension_value(self, value, **kwargs):
value, ty = self.rust_value(value, **kwargs)
if value is not None and value[0].isalpha() and ty != 'usize':
# requires a cast
return '%s as usize' % value
return value
def rust_enum_item_value(self, item, **kwargs):
if item.bitpos is not None:
return '1<<%s' % item.bitpos, 'u32'
v, t = self.rust_value(item.value, **kwargs)
if item.enum_group.type is not None:
if v.startswith('-'):
i = int(v[1:])
v = '!%s' % (i-1)
return v, 'u32'
return v, t
def rust_enum_item_name(self, item):
if item.enum_group.type is None:
return item.name
name = item.shortname
if name[0].isnumeric() or name in RESERVED_KEYWORDS:
if item.enum_group.type == 'bitmask':
if name.endswith('_BIT'):
name = name[:-4]
name = 'BIT_' + name
else:
name = 'E_' + name
return name
def rust_member_name(self, member):
if isinstance(member, str):
name = member
else:
name = member.name
if name in RESERVED_KEYWORDS:
name = 'e' + name[0].upper() + name[1:]
return name
rust_param_name = rust_member_name
def rust_member_function(self, member, keyword_prefix='_'):
if isinstance(member, str):
name = member
else:
name = member.name
name = camel_to_snake_case(name)
if name.startswith('pp_'):
name = name[3:]
elif name.startswith('p_'):
name = name[2:]
if name in RESERVED_KEYWORDS:
name = keyword_prefix + name
return name
rust_param_name = rust_member_name
def rust_composed_lifetimes(self, ty, **kwargs):
lifetimes = set()
for m in ty.members:
if m.name == 'pNext' and m.type == TypeRef.VOID_PTR:
lifetimes.add('l')
else:
(_, _, _, member_lifetimes) = self.rust_type_details(m, no_imports=True, **kwargs)
lifetimes.update(member_lifetimes)
return lifetimes
def rust_type_details(self, ty, **kwargs):
if isinstance(ty, str):
ty = self.registry.types[ty]
elif isinstance(ty, Member):
m = ty
ty = m.type
if 'optional' not in kwargs:
kwargs['optional'] = m.optional
if 'len' not in kwargs:
kwargs['len'] = m.len
as_raw_conv = False
lifetimes = set()
if isinstance(ty, BaseTypeElem):
type_name = ty.name
raw_type_name = None
if type_name in PREDEFINED_TYPES:
type_name = PREDEFINED_TYPES[type_name]
return (type_name, type_name, as_raw_conv, lifetimes)
elif type_name in RESERVED_KEYWORDS:
type_name = 'T' + type_name
if ty.name == 'VkBool32':
as_raw_conv = True
raw_type_name = 'VkBool32'
type_name = 'bool'
elif ty.__class__ is BaseTypeElem.PROVIDED:
self.add_import('platform::*')
if len(ty.requires) == 1:
mod = INCLUDE_TO_MODULE.get(ty.requires[0], None)
if mod is not None:
type_name = mod + '::' + type_name
elif ty.__class__ is BaseTypeElem.FUNCTIONPOINTER and kwargs.pop('optional', None):
type_name = 'Option<%s>' % type_name
elif ty.__class__ in STRUCT_TYPES:
if not kwargs.get('as_param', None):
lifetimes.update(self.rust_composed_lifetimes(ty))
type_name += _lifetime_diamond(lifetimes)
elif ty.__class__ is BaseTypeElem.HANDLE:
raw_type_name = ty.non_dispatchable and 'u64' or 'usize'
as_raw_conv = True
if not kwargs.get('as_param', None):
lifetimes.add('h')
type_name += '<\'h>'
if kwargs.pop('optional', None):
type_name = 'Option<%s>' % type_name
if raw_type_name is None:
raw_type_name = type_name
return (type_name, raw_type_name, as_raw_conv, lifetimes)
elif not isinstance(ty, TypeRef):
raise ValueError('unable to hadle arg', ty)
elif ty.is_named():
return self.rust_type_details(ty.name, **kwargs)
elif ty.is_function():
params = ', '.join([self.rust_raw_type(p.type, as_param=True) for p in ty.arg.params])
res = ''
if ty.arg.returns != TypeRef.VOID:
res = ' -> %s' % self.rust_raw_type(ty.arg.returns, as_param=True, optional=True)
type_name = 'extern "system" fn (%s)%s' % (params, res)
if kwargs.pop('optional', None):
type_name = 'Option<%s>' % type_name
return (type_name, type_name, as_raw_conv, lifetimes)
elif ty.is_ptr():
length = kwargs.pop('len', None)
opt = kwargs.pop('optional', None)
if ty.is_const():
arg = ty.arg.arg
mut = ''
else:
arg = ty.arg
mut = 'mut '
(type_name, raw_typename, as_raw_conv, arg_lifetimes) = self.rust_type_details(arg, **kwargs)
lifetimes.update(arg_lifetimes)
raw_typename = '*%s%s' % (mut or 'const ', raw_typename)
if not mut and arg == TypeRef.CHAR and length == 'null-terminated':
if not kwargs.get('as_param', None):
lt = '\'l '
lifetimes.add('l')
else:
lt = ''
type_name = '&%sAsRef<CStr>' % lt
lifetimes.add('l')
as_raw_conv = True
if opt:
type_name = 'Option<%s>' % type_name
elif length:
if arg == TypeRef.VOID:
type_name = 'u8'
if not kwargs.get('as_param', None):
lt = '\'l '
lifetimes.add('l')
else:
lt = ''
type_name = '&%s%s[%s]' % (lt, mut, type_name)
lifetimes.add('l')
as_raw_conv = True
elif arg == TypeRef.VOID or (arg.is_named() and arg.resolved_type.__class__ is BaseTypeElem.PROVIDED):
type_name = raw_typename
else:
if not kwargs.get('as_param', None):
lt = '\'l '
lifetimes.add('l')
else:
lt = ''
type_name = '&%s%s%s' % (lt, mut, type_name)
if opt:
type_name = 'Option<%s>' % type_name
as_raw_conv = True
return (type_name, raw_typename, as_raw_conv, lifetimes)
elif ty.is_array():
if ty.is_const():
arg = ty.arg.arg
else:
arg = ty.arg
(type_name, raw_typename, as_raw_conv, arg_lifetimes) = self.rust_type_details(arg, **kwargs)
lifetimes.update(arg_lifetimes)
dim = self.rust_dimension_value(ty.dim, no_imports=kwargs.get('no_imports', False))
type_name = '[%s; %s]' % (type_name, dim)
raw_typename = '[%s; %s]' % (raw_typename, dim)
return (type_name, raw_typename, as_raw_conv, lifetimes)
raise ValueError('unable to hadle arg', ty)
def rust_raw_type(self, ty, **kwargs):
(_typename, raw_typename, _as_raw_conv, _lifetimes) = self.rust_type_details(ty, **kwargs)
return raw_typename
def rust_safe_type(self, ty, **kwargs):
(typename, _raw_typename, _as_raw_conv, _lifetimes) = self.rust_type_details(ty, **kwargs)
return typename
def rust_param_as_raw(self, ty, declname=None, **kwargs):
if isinstance(ty, Member):
if declname is None:
declname = self.rust_param_name(ty)
if ty.is_out and (ty.len_for or ty is getattr(ty.container, 'out_param', None)):
if ty.len:
declname = '%s.as_mut_ptr()' % declname
else:
declname = '&mut %s' % declname
(typename, raw_typename, as_raw_conv, _lifetimes) = self.rust_type_details(ty, no_imports=True, **kwargs)
if as_raw_conv:
if '&' in declname:
declname = '(%s)' %declname
declname = '%s.as_raw()' % declname
if 'u8' in typename and 'c_void' in raw_typename:
declname = '%s as %s' % (declname,raw_typename)
return declname
def is_public(self, ty):
if isinstance(ty, str):
ty = self.registry.types[ty]
if isinstance(ty, Member):
m = ty
ty = m.type
if m.values:
return False
if m.len_for:
return False
if m.len:
return False
if isinstance(ty, BaseTypeElem):
return True
if not isinstance(ty, TypeRef):
raise ValueError('unable to hadle arg', ty)
if ty.__class__ == TypeRef.POINTER:
if ty.arg.__class__ == TypeRef.CONST:
return self.is_public(ty.arg.arg)
return self.is_public(ty.arg)
if ty == TypeRef.VOID:
return False
return True
def _generate_docs(self, obj, gen, short=False):
docs = obj.docs
if docs is None:
return
gen.nl()
for line in docs:
if short and line == '':
return
gen('/// ', line).nl()
def _generate_feature_comment(self, feature, gen):
gen.nl()
gen('// feature: ', feature.name).nl()
def _generate_feature_comment_nonconsecutive(self, feature, gen):
last = getattr(gen, '_last_feature', None)
if last is feature:
return
gen._last_feature = feature
self._generate_feature_comment(feature, gen)
def _generate_feature_protect(self, feature, gen):
if feature.is_extension:
gen('#[cfg(feature = "', feature.name, '")]').nl()
if feature.protect:
gen('#[cfg(feature = "', feature.protect, '")]').nl()
def _is_feature_protect(self, feature):
return feature.is_extension or feature.protect is not None
def generate_all(self):
self.generate_enums()
self.generate_types()
self.generate_protos()
self.generate_dispatch_table()
self.generate_dispatch_commands()
self.generate_prelude()
def generate_types(self, file=None):
if file is None:
file = os.path.join(self.target,'types.rs')
reg = self.registry
with RustCodeGenerator(file) as gen:
gen("/* GENERATED FILE */").nl()
gen.nl()
gen('#![allow(non_snake_case)]').nl()
gen.nl()
gen('#[path = "types_impl.rs"]').nl()
gen('pub mod types_impl;').nl()
gen.nl()
with self.manage_imports(gen) as gen:
self.add_import('AsRaw')
self.add_import('Struct')
for feature in reg.features:
with gen.open_nonempty() as nonempty_gen:
self._generate_feature_comment(feature, nonempty_gen)
for ty in feature.types:
self._generate_type(ty, gen)
def _generate_type(self, ty, gen):
if ty.name in IGNORED:
return
delegate_type(self, '_generate_type_', ty, gen)
def _generate_type_basetype(self, ty, gen):
self._generate_docs(ty, gen)
self._generate_feature_protect(ty.requiering_feature, gen)
gen('pub type ', ty.name, ' = ', self.rust_raw_type(ty.type), ';').nl()
def _generate_type_bitmask(self, ty, gen):
if len(ty.requires) == 1:
enumtype = ty.requires[0]
else:
enumtype = 'VkFlags'
self._generate_docs(ty, gen)
self._generate_feature_protect(ty.requiering_feature, gen)
gen('pub type ', ty.name, ' = ', enumtype, ';').nl()
def _generate_type_enum(self, ty, gen):
self._generate_docs(ty, gen, short=True)
self._generate_feature_protect(ty.requiering_feature, gen)
if ty.name != ty.group.name:
gen('pub use enums::', ty.group.name, ' as ', ty.name, ';').nl()
else:
gen('pub use enums::', ty.group.name, ';').nl()
def _generate_type_handle(self, ty, gen):
base = ty.non_dispatchable and 'VkNonDispatchableHandle' or 'VkDispatchableHandle'
self.add_import('utils::%s' % base)
self._generate_feature_protect(ty.requiering_feature, gen)
gen('#[doc(hidden)]').nl()
gen('#[derive(Copy,Clone)]').nl()
gen('pub enum ', ty.name, '__ {}').nl()
self._generate_docs(ty, gen)
self._generate_feature_protect(ty.requiering_feature, gen)
gen('pub type ', ty.name, '<\'l> = ', base, '<\'l,', ty.name, '__>;').nl()
def _generate_type_funcpointer(self, ty, gen):
self._generate_docs(ty, gen)
self._generate_feature_protect(ty.requiering_feature, gen)
gen('#[allow(non_camel_case_types)]').nl()
gen('pub type ', ty.name, ' = ', self.rust_raw_type(ty.type), ';').nl()
def _generate_type_struct(self, ty, gen):
members = []
lifetimes = set()
has_p_next = False
for i, member in enumerate(ty.members):
name = self.rust_member_name(member)
(typename, raw_typename, as_raw_conv, member_lifetimes) = self.rust_type_details(member)
lifetimes.update(member_lifetimes)
hidden = False
if i == 0 and member.name == 'sType' and typename == 'VkStructureType' and member.values:
hidden = True
elif i == 1 and member.name == 'pNext' and member.type == TypeRef.VOID_PTR:
self.add_import('std::cell::Cell')
typename = raw_typename = 'Cell<%s>' % raw_typename
hidden = True
has_p_next = True
lifetimes.add('l')
elif member.len_for:
hidden = 'readonly'
typename = typename.replace(' mut ', ' ')
members.append({
'obj': member,
'name': name,
'typename': typename,
'raw_typename': raw_typename,
'as_raw_conv': as_raw_conv,
'lifetimes': member_lifetimes,
'hidden': hidden,
})
self._generate_docs(ty, gen)
gen('#[repr(C)]').nl()
if not lifetimes:
gen('#[derive(Copy,Clone)]').nl()
self._generate_feature_protect(ty.requiering_feature, gen)
lifetime_params = _lifetime_diamond(lifetimes, with_subtyping=True)
lifetime_args = _lifetime_diamond(lifetimes)
gen('pub struct ', ty.name, lifetime_params, ' {').nl()
used_lifetimes = set()
with gen.open_indention():
for member in members:
if not member['hidden'] and not member['as_raw_conv'] and self.is_public(member['obj']):
used_lifetimes.update(member['lifetimes'])
gen('pub ', member['name'], ' : ', member['typename'], ',').nl()
else:
gen(member['name'], ' : ', member['raw_typename'], ',').nl()
unused_lifetimes = lifetimes - used_lifetimes
if unused_lifetimes:
gen('_p: ::std::marker::PhantomData<(', ','.join(['&\'%s u8' % l for l in unused_lifetimes]), ')>,').nl()
gen('}').nl()
self._generate_feature_protect(ty.requiering_feature, gen)
gen('impl', lifetime_params, ' ', ty.name, lifetime_args, ' {').nl()
with gen.open_indention():
if not ty.returnedonly:
gen('#[inline]').nl()
gen('pub fn new() -> ', ty.name, lifetime_args ,' {').nl()
with gen.open_indention():
gen('unsafe {').nl()
with gen.open_indention():
default_values = []
for m in members:
if m['obj'].values:
default_values.append((m['name'], m['obj'].values))
if default_values:
gen(ty.name, ' {').nl()
with gen.open_indention():
for name, value in default_values:
value, _ = self.rust_value(value)
gen(name, ': ', value, ', ').nl()
gen('..::std::mem::zeroed()').nl()
gen('}').nl()
else:
gen('::std::mem::zeroed()').nl()
gen('}').nl()
gen('}').nl()
for mem in members:
m = mem['obj']
if mem['hidden']:
continue
if not self.is_public(m):
if m.type.__class__ == TypeRef.POINTER and m.type.arg.__class__ == TypeRef.CONST:
arg = m.type.arg.arg
if arg.__class__ == TypeRef.POINTER and arg.arg.__class__ == TypeRef.CONST and arg.arg.arg == TypeRef.CHAR: #TODO
print('unable to handle setter for %s::%s (2)' % (ty.name, m.name))
continue
if m.len and m.len != 'null-terminated':
if m.len not in ty.members: #TODO
print('unable to handle setter for %s::%s (3)' % (ty.name, m.name))
continue
if len(ty.members[m.len].len_for) != 1: #TODO
print('unable to handle setter for %s::%s (4)' % (ty.name, m.name))
continue
gen('#[inline]').nl()
valuetype = mem['typename']
gen('pub fn set_', self.rust_member_function(m, ''), '(mut self, value: ', valuetype, ') -> Self {').nl()
with gen.open_indention():
if m.len and m.len != 'null-terminated':
len_param = ty.members[m.len]
gen('self. ', self.rust_param_name(len_param), ' = value.len() as ', self.rust_raw_type(len_param), ';').nl()
if not mem['as_raw_conv']:
gen('self.', mem['name'], ' = value;').nl()
else:
gen('unsafe {').nl()
as_raw_call = self.rust_param_as_raw(m, declname='value')
with gen.open_indention():
gen('self.', mem['name'], ' = ', as_raw_call, ';').nl()
gen('}').nl()
gen('self').nl()
gen('}').nl()
for mem in members:
m = mem['obj']
if mem['hidden'] and mem['hidden'] != 'readonly':
continue
if m.len and m.len != 'null-terminated':
continue # TODO
#elif m.len and m.len not in ty.members:
# continue # TODO
if m.type == TypeRef.BOOL:
gen('#[inline]').nl()
gen('pub fn is_', self.rust_member_function(m, ''), '(&self) -> bool {').nl()
with gen.open_indention():
gen('self.', mem['name'], ' != 0').nl()
gen('}').nl()
elif m.len == 'null-terminated':
gen('#[inline]').nl()
gen('pub fn ', self.rust_member_function(m, 'get_'), '(&self) -> &CStr {').nl()
with gen.open_indention():
gen('unsafe { ::std::ffi::CStr::from_ptr(self.', mem['name'],') }').nl()
gen('}').nl()
elif m.type.__class__ == TypeRef.NAMED and m.type.resolved_type.__class__ in STRUCT_TYPES:
gen('#[inline]').nl()
gen('pub fn ', self.rust_member_function(m, 'get_'), '(&self) -> &', mem['typename'],' {').nl()
with gen.open_indention():
gen('&self.', mem['name']).nl()
gen('}').nl()
elif not mem['as_raw_conv']:
gen('#[inline]').nl()
gen('pub fn ', self.rust_member_function(m, 'get_'), '(&self) -> ', mem['typename'],' {').nl()
with gen.open_indention():
gen('self.', mem['name']).nl()
gen('}').nl()
if has_p_next and not ty.structextends:
self.add_import('StructExtends')
gen('#[inline]').nl()
gen('pub fn extend<E>(self, e: &E) -> Self where E: StructExtends<Self> + Sized {').nl()
with gen.open_indention():
gen('unsafe { self.pNext.set(e.extend(self.pNext.get())) };').nl()
gen('self').nl()
gen('}').nl()
gen('}').nl()
if not ty.returnedonly:
self._generate_feature_protect(ty.requiering_feature, gen)
gen('impl', lifetime_params, ' Default for ', ty.name, lifetime_args, ' {').nl()
with gen.open_indention():
gen('fn default() -> ', ty.name, lifetime_args,' { ', ty.name, '::new() }').nl()
gen('}').nl()
self._generate_feature_protect(ty.requiering_feature, gen)
gen('unsafe impl', lifetime_params, ' Struct for ', ty.name, lifetime_args, ' {}').nl()
if ty.structextends:
for ext in ty.structextends:
ext_typename, _, _, ext_lifetimes = self.rust_type_details(ext)
if 'l' in ext_lifetimes:
# rename 'l to 'm
ext_lifetimes.remove('l')
ext_lifetimes.add('m')
ext_typename = ext_typename.replace('\'l', '\'m')
ext_lifetimes.update(lifetimes)
ext_lifetime_params = _lifetime_diamond(ext_lifetimes, with_subtyping=True)
self.add_import('StructExtends')
self._generate_feature_protect(ty.requiering_feature, gen)
gen('unsafe impl', ext_lifetime_params, ' StructExtends<', ext_typename,'> for ', ty.name, lifetime_args, ' {').nl()
with gen.open_indention():
gen('#[inline]').nl()
gen('unsafe fn extend(&self, next: *const c_void) -> *const c_void {').nl()
with gen.open_indention():
gen('assert!(self.pNext.get().is_null());').nl()
gen('self.pNext.set(next);').nl()
gen('self as *const ', ty.name,' as *const c_void').nl()
gen('}').nl()
gen('}').nl()
size = ty.size()
if size is None:
raise ValueError('struct has unknown size', ty)
self._generate_feature_protect(ty.requiering_feature, gen)
gen('#[cfg(test)]').nl()
gen('#[test]').nl()
gen('fn test_struct_size_', camel_to_snake_case(ty.name), '() {').nl()
with gen.open_indention():
bytes, ints, pointers, _ = size
add = ''
if ints > 0:
gen('let int_size = ::std::mem::size_of::<::std::os::raw::c_int>();').nl()
add += ' + int_size * %d' % ints
if pointers > 0:
gen('let ptr_size = ::std::mem::size_of::<usize>();').nl()
add += ' + ptr_size * %d' % pointers
gen('assert_size!(', bytes, add ,', ', ty.name,');').nl()
gen('}').nl()
def _generate_type_union(self, ty, gen):
self._generate_docs(ty, gen)
gen('#[repr(C)]').nl()
gen('#[derive(Copy,Clone)]').nl()
self._generate_feature_protect(ty.requiering_feature, gen)
gen('pub union ', ty.name, ' {').nl()
with gen.open_indention():
for member in ty.members:
if self.is_public(member):
gen('pub ')
gen(self.rust_member_name(member), ' : ', self.rust_safe_type(member), ',').nl()
gen('}').nl()
self._generate_feature_protect(ty.requiering_feature, gen)
gen('unsafe impl Struct for ', ty.name, ' {}').nl()
gen('#[cfg(test)]').nl()
gen('#[test]').nl()
gen('fn test_union_size_', camel_to_snake_case(ty.name), '() {').nl()
with gen.open_indention():
gen('assert_size!(', ty.size()[0] ,', ', ty.name,');').nl()
gen('}').nl()
def generate_enums(self, file=None):
if file is None:
file = os.path.join(self.target,'enums.rs')
reg = self.registry
with RustCodeGenerator(file) as gen:
gen("/* GENERATED FILE */").nl()
gen.nl()
for ty in reg.types:
if ty.__class__ == BaseTypeElem.DEFINE:
self._generate_define(ty, gen)
for group in reg.enum_groups:
self._generate_enum_group(group, gen)
def _generate_define(self, define, gen):
if define.requiering_feature is None \
or define.name in IGNORED \
or define.value is None \
or define.is_deprecated:
return
call = ''
if define.macro_call:
call, ty = PREDEFINED_UTILS[define.macro_call]
call = '%s!' % call
value = define.value
else:
value, ty = self.rust_value(define.value)
self._generate_feature_comment_nonconsecutive(define.requiering_feature, gen)
self._generate_docs(define, gen)
self._generate_feature_protect(define.requiering_feature, gen)
gen('pub const ', define.name, ' : ', ty, ' = ', call, value, ';').nl()
def _generate_enum_group(self, group, gen):
if group.requiering_feature is None \
or group.name in IGNORED:
return
self._generate_feature_comment_nonconsecutive(group.requiering_feature, gen)
gen.nl()
if group.type is None:
self._generate_enum_group_defines(group, gen)
elif group.name == 'VkResult':
self._generate_enum_group_error_enum(group, gen)
else:
self._generate_enum_group_enum(group, gen)
def _generate_enum_group_defines(self, group, gen):
gen('// ', group.name).nl()
gen('/////', '/'*len(group.name)).nl()
for item in group.enum_items:
if item.requiering_feature is None \
or item.name in IGNORED:
continue
elif item.requiering_feature is not group.requiering_feature:
self._generate_feature_comment_nonconsecutive(item.requiering_feature, gen)
self._generate_docs(item, gen)
self._generate_feature_protect(item.requiering_feature, gen)
name = self.rust_enum_item_name(item)
value, ty = self.rust_enum_item_value(item)
gen('pub const ', name, ' : ', ty, ' = ', value, ';').nl()
def _generate_enum_group_enum_item(self, group, item, gen):
with_guard = item.requiering_feature is not None and item.requiering_feature is not group.requiering_feature
if with_guard:
self._generate_feature_comment_nonconsecutive(item.requiering_feature, gen)
self._generate_docs(item, gen)
if with_guard:
self._generate_feature_protect(item.requiering_feature, gen)
name = self.rust_enum_item_name(item)
value, _ = self.rust_enum_item_value(item)
gen(name, ' = ', value)
def _generate_enum_group_enum(self, group, gen):
self._generate_feature_protect(group.requiering_feature, gen)
gen('define_', group.type, '! {').nl()
with gen.open_indention():
self._generate_docs(group.enum_type, gen)
gen('pub enum ', group.name, ' {').nl()
with gen.open_indention():
for i, item in enumerate(group.enum_items):
if i>0:
gen(',').nl()
self._generate_enum_group_enum_item(group, item, gen)
gen.nl()
gen('}').nl()
gen('}').nl()
gen._last_feature = group.requiering_feature
def _generate_enum_group_error_enum(self, group, gen):
gen('define_enum! {').nl()
with gen.open_indention():
self._generate_docs(group.enum_type, gen)
gen('pub enum VkError {').nl()
with gen.open_indention():
for i, item in enumerate(group.enum_items):
if i == 0: # skip SUCCESS
continue
if i > 1:
gen(',').nl()
self._generate_enum_group_enum_item(group, item, gen)
gen.nl()
gen('}').nl()
gen('}').nl()
gen.nl()
gen('impl ::std::error::Error for VkError {').nl()
with gen.open_indention():
gen('fn description(&self) -> &str {').nl()
with gen.open_indention():
for i, item in enumerate(group.enum_items):
if i == 0: # skip SUCCESS
continue
with_guard = item.requiering_feature is not None and item.requiering_feature is not group.requiering_feature
if with_guard:
self._generate_feature_protect(item.requiering_feature, gen)
gen('{').nl().i()
name = self.rust_enum_item_name(item)
comment = item.comment
if not comment and item.docs and len(item.docs)>0:
comment = item.docs[0].strip()
if not comment:
comment = item.shortname.lower()
if comment.startswith('error_'):
comment = comment[6:]
else:
dot = comment.find('.')
if dot > 0:
comment = comment[:dot]
gen('if *self == VkError::', name, '{ return "', comment,'"; }').nl()
if with_guard:
gen.o()('}').nl()
gen('"unknown"').nl()
gen('}').nl()
gen('}').nl()
gen('impl ::std::fmt::Display for VkError {').nl()
with gen.open_indention():
gen('fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {').nl()
with gen.open_indention():
gen('write!(f, "{} ({})", ::std::error::Error::description(self), *self as i32)')
gen('}').nl()
gen('}').nl()
self._generate_docs(group.enum_type, gen)
gen('pub type VkResult<V=()> = Result<V,VkError>;')
def _generate_command_signature(self, base_cmd, gen, paramnames=True, method=None, safe=False, with_return=True, **kwargs):
if method is None:
method = safe is True and self.rust_safe_type or self.rust_raw_type
gen('(')
i = 0
for param in base_cmd.params:
if safe and (param is base_cmd.out_param or param.len_for):
continue
if i>0:
gen(', ')
i += 1
if paramnames and param.name:
gen(self.rust_param_name(param.name), ': ')
gen(method(param, as_param=True, **kwargs))
gen(')')
if with_return and base_cmd.returns != TypeRef.VOID:
gen(' -> ', method(base_cmd.returns, optional=True, **kwargs))
def generate_protos(self, file=None):
if file is None:
file = os.path.join(self.target, 'protos.rs')
reg = self.registry
with RustCodeGenerator(file) as gen:
gen("/* GENERATED FILE */").nl()
gen.nl()
gen('#![allow(non_camel_case_types)]').nl()
gen.nl()
with self.manage_imports(gen) as gen:
self.add_import('platform::*')
self.add_import('types::*')
for feature in reg.features:
with gen.open_nonempty() as nonempty_gen:
self._generate_feature_comment(feature, nonempty_gen)
for command in feature.commands:
self._generate_proto(command, gen)
def _generate_proto(self, command, gen):
self._generate_feature_protect(command.requiering_feature, gen)
gen('pub type PFN_', command.name, ' = extern "system" fn ' )
self._generate_command_signature(command, gen, paramnames=False, safe=False)
gen(';').nl()
def generate_dispatch_table(self, file=None):
if file is None:
file = os.path.join(self.target, 'dispatch_table.rs')
with RustCodeGenerator(file) as gen:
gen("/* GENERATED FILE */").nl()
gen.nl()
gen('use protos::*;').nl()
gen('use types::PFN_vkVoidFunction;').nl()
gen.nl()
for table in DispatchTable:
self._generate_dispatch_table(table, gen)
gen.nl()
gen.nl()
for table in DispatchTable:
self._generate_dispatch_table_init(table, gen)
def _generate_dispatch_table(self, table, gen):
gen('#[allow(non_snake_case)]').nl()
gen('pub struct Vk', table.value.capitalize(), 'DispatchTable {').nl()
with gen.open_indention():
for feature in self.registry.features:
with gen.open_nonempty() as nonempty_gen:
self._generate_feature_comment(feature, nonempty_gen)
for command in feature.commands:
if command.dispatch_table is not table \
or command.name.endswith('ProcAddr') \
or command.name in IGNORED:
continue
self._generate_feature_protect(feature, gen)
gen('pub ', command.name, ': Option<PFN_', command.name, '>,').nl()
gen('}').nl()
gen.nl()
def _generate_dispatch_table_init(self, table, gen):
table_name = table.value.capitalize()
gen('impl Vk', table_name, 'DispatchTable {').nl()
with gen.open_indention():
if table is DispatchTable.Loader:
gen('pub unsafe fn load<R,F1>(gpa: F1)')
else:
gen('pub unsafe fn load<R,F1,F2>(gpa: F1, has_ext: F2)')
gen(' -> Result<Vk', table_name, 'DispatchTable, R>').nl()
with gen.open_indention():
gen('where').nl()
with gen.open_indention():
gen('F1: Fn(&str) -> Result<PFN_vkVoidFunction, R>,').nl()
if table is not DispatchTable.Loader:
gen('F2: Fn(&str) -> bool,').nl()
gen('{').nl()
with gen.open_indention():
gen('use std::mem::transmute as tm;').nl()
gen('let mut tab : Vk', table_name, 'DispatchTable = ::std::mem::zeroed();')
for feature in self.registry.features:
with gen.open_nonempty() as nonempty_gen:
self._generate_feature_comment(feature, nonempty_gen)
if self._is_feature_protect(feature):
self._generate_feature_protect(feature, nonempty_gen)
nonempty_gen('{').nl()
nonempty_gen.i()
if feature.is_extension:
nonempty_gen('if has_ext("', feature.name, '\\0") {').nl()
nonempty_gen.i()
try:
for command in feature.commands:
if command.dispatch_table is not table \
or command.name.endswith('ProcAddr') \
or command.name in IGNORED:
continue
gen('tab.', command.name, ' = tm(gpa("', command.name, '\\0")?);').nl()
finally:
if feature.is_extension:
nonempty_gen.o()
nonempty_gen('}').nl()
if self._is_feature_protect(feature):
nonempty_gen.o()
nonempty_gen('}').nl()
gen('Ok(tab)').nl()
gen('}').nl()
gen('}').nl()
gen.nl()
def generate_dispatch_commands(self, file=None):
if file is None:
file = os.path.join(self.target, 'dispatch_commands.rs')
reg = self.registry
with RustCodeGenerator(file) as gen:
gen("/* GENERATED FILE */").nl()
gen.nl()
gen('#![allow(non_snake_case)]').nl()
gen.nl()
with self.manage_imports(gen) as gen:
self.add_import('AsRaw')
self.add_import('platform::*')
self.add_import('enums::{VkError,VkResult}')
self.add_import('types::*')
self.add_import('dispatch_table::*')
gen.nl()
for feature in reg.features:
with gen.open_nonempty() as nonempty_gen:
self._generate_feature_comment(feature, nonempty_gen)
for command in feature.commands:
if command.name.endswith('ProcAddr') \
or command.name in IGNORED:
continue
self._generate_dispatch_command(command, gen)
def _generate_dispatch_command(self, command, gen):
table_name = command.dispatch_table.value.capitalize()
if command.dispatch_table is DispatchTable.Loader:
handle_arg = ''
else:
handle_arg = self.rust_param_name(next(iter(command.params))) + ', '
is_create = False
for table2 in DispatchTable:
table2_name = table2.name.capitalize()
if command.name == 'vkCreate%s'%table2_name:
is_create = table2_name
break
# is this the destroy command for the dispatch_table
is_destroy = command.name == 'vkDestroy%s'%table_name
# remove lifetime 'l, we only care about lifetime 'h (for handles)
lifetimes = self.rust_composed_lifetimes(command) - set(['l'])
def safe_dispatch_type(*args, **kwargs):
tyname = self.rust_safe_type(*args, **kwargs)
return tyname.replace('&\'l ', '&').replace('<\'l>', '').replace('\'l', '\'_')
out_param = command.out_param
out_typename = None
out_typename_return = None
out_convert = ''
if out_param:
if command.out_param.type.arg == TypeRef.BOOL:
out_typename = 'VkBool32'
else:
out_typename = safe_dispatch_type(command.out_param.type.arg)
if out_param.len:
if out_param.type.arg == TypeRef.VOID:
out_typename = 'u8'
out_typename = 'Vec<%s>' % out_typename
out_typename_return = out_typename
if command.out_param.type.arg == TypeRef.BOOL:
out_typename_return = 'bool'
out_convert = ' != 0'
self._generate_docs(command, gen)
self._generate_feature_protect(command.requiering_feature, gen)
gen('pub fn ', command.name, _lifetime_diamond(lifetimes))
self._generate_command_signature(command, gen, method=safe_dispatch_type, safe=True, with_return=False)
result_convert = ''
if out_param and command.returns == TypeRef.RESULT:
gen(' -> VkResult<', out_typename_return,'>')
elif command.returns == TypeRef.BOOL:
gen(' -> bool')
result_convert = ' != 0'
elif command.returns != TypeRef.VOID:
gen(' -> ', safe_dispatch_type(command.returns, optional=True))
elif out_param:
gen(' -> ', out_typename_return)
gen(' {').nl()
with gen.open_indention():
gen('unsafe {').nl()
with gen.open_indention():
# add length params
enumerate_len_param = None
enumerate_with_incomplete = False
for param in command.params:
if param.len_for:
if param.is_out:
enumerate_len_param = param
gen('let mut ', self.rust_param_name(param), ': ', safe_dispatch_type(param.type.arg), ' = 0;').nl()
else:
gen('let ', self.rust_param_name(param), ' = ', self.rust_param_name(param.len_for[0]), '.len() as ', safe_dispatch_type(param), ';').nl()
for len_for_param in param.len_for[1:]:
gen('assert!(', self.rust_param_name(param), ' as usize == ', self.rust_param_name(len_for_param),'.len());').nl()
if enumerate_len_param:
enumerate_with_incomplete = command.returns == TypeRef.RESULT and 'VK_INCOMPLETE' in command.successcodes
# add return param
out_param = command.out_param
out_len_expr = None
if out_param:
out_paramname = self.rust_param_name(out_param)
gen('let mut ', out_paramname, ': ', out_typename, ' = ')
if out_param.len:
if enumerate_len_param:
out_len_expr = self.rust_param_name(enumerate_len_param)
gen('Vec::new();').nl()
else:
out_len_elems = out_param.len.split('::')
out_len_expr = self.rust_param_name(out_len_elems[0]) + ''.join(['.%s()' % self.rust_member_function(p) for p in out_len_elems[1:]])
gen('Vec::with_capacity(', out_len_expr,' as usize);').nl()
else:
gen('::std::mem::zeroed();').nl()
is_check_result = command.returns == TypeRef.RESULT
if is_check_result and is_destroy:
gen('let _r = ')
gen('Vk', table_name, 'DispatchTable::with(', handle_arg, '|_t|{').nl()
with gen.open_indention():
if enumerate_with_incomplete:
gen('loop {').nl().i()
all_params_as_raw = [self.rust_param_as_raw(p) for p in command.params]
if enumerate_len_param:
if is_check_result:
gen('let _r = ')
all_args = ', '.join(all_params_as_raw[:-1] + ['::std::ptr::null_mut()'])
gen('_t.', command.name, '.unwrap()(', all_args, ');').nl()
if enumerate_with_incomplete:
gen('if _r == Err(VkError::INCOMPLETE) { continue; }').nl()
if is_check_result:
gen('if let Err(_e) = _r { return Err(_e); }').nl()
gen('if ', self.rust_param_name(enumerate_len_param) ,' == 0 {').nl()
with gen.open_indention():
if is_check_result:
gen('return Ok(', out_paramname, out_convert, ');').nl()
else:
gen('return ', out_paramname, out_convert, ';').nl()
gen('}').nl()
gen(self.rust_param_name(out_param) ,' = Vec::with_capacity(', out_len_expr,' as usize);').nl()
if is_check_result and (is_create or out_param):
gen('let _r = ')
all_args = ', '.join(all_params_as_raw)
gen('_t.', command.name, '.unwrap()(', all_args, ')')
if out_param:
gen(';').nl()
if enumerate_with_incomplete:
gen('if _r == Err(VkError::INCOMPLETE) { continue; }').nl()
if is_check_result:
gen('if let Err(_e) = _r { return Err(_e); }').nl()
elif is_create:
gen(';').nl()
if is_check_result:
gen('if let Err(_e) = _r { return Err(_e); }').nl()
else:
gen.nl()
if out_param and out_param.len:
gen(self.rust_param_name(out_param) ,'.set_len(', out_len_expr,' as usize);').nl()
if is_create:
all_args = ', '.join([self.rust_param_name(p.name) for p in command.params])
gen('Vk', is_create, 'DispatchTable::create(', all_args, ');').nl()
if out_param:
if is_check_result:
gen('return Ok(', out_paramname, out_convert, ');').nl()
else:
gen('return ', out_paramname, out_convert, ';').nl()
if enumerate_with_incomplete:
gen.o()
gen('}').nl()
elif is_create and is_check_result:
gen('_r').nl()
gen('})', result_convert)
if is_destroy:
gen(';').nl()
gen('Vk', table_name, 'DispatchTable::destroy(', handle_arg, ');')
if is_check_result:
gen('return _r;')
gen.nl()
gen('}').nl()
gen('}').nl()
def generate_prelude(self, file=None):
if file is None:
file = os.path.join(self.target, 'prelude.rs')
reg = self.registry
with RustCodeGenerator(file) as gen:
gen("/* GENERATED FILE */").nl()
gen.nl()
for f in reg.features:
protect = True
if f.is_extension:
protect = False
self._generate_docs(f, gen)
self._generate_feature_protect(f, gen)
gen('pub mod ', f.name.lower(), '{').nl()
gen.i()
for enum_item in f.enum_items:
if enum_item.enum_group.type is not None \
or enum_item.name in IGNORED:
continue
if protect:
self._generate_feature_protect(f, gen)
gen('pub use enums::', enum_item.name, ';').nl()
for ty in f.types:
if ty.name == 'VkResult':
gen('pub use enums::VkError;').nl()
if ty.name in PREDEFINED_UTILS:
name, _ = PREDEFINED_UTILS[ty.name]
gen('pub use utils::', name, ';').nl()
continue
if ty.__class__ == BaseTypeElem.PROVIDED \
or ty.__class__ == BaseTypeElem.INCLUDE \
or ty.name in IGNORED:
continue
if ty.__class__ == BaseTypeElem.DEFINE:
if ty.is_deprecated or ty.value is None:
continue
if protect:
self._generate_feature_protect(f, gen)
gen('pub use enums::', ty.name, ';').nl()
else:
if protect:
self._generate_feature_protect(f, gen)
gen('pub use types::', ty.name, ';').nl()
for cmd in f.commands:
if cmd.name in PREDEFINED_UTILS:
name, _ = PREDEFINED_UTILS[cmd.name]
gen('pub use utils::', name, ';').nl()
continue
if cmd.name.endswith('ProcAddr') \
or cmd.name in IGNORED:
continue
if protect:
self._generate_feature_protect(f, gen)
gen('pub use dispatch_commands::', cmd.name, ';').nl()
if f.is_extension:
gen.o()
gen('}').nl()
|
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""URL endpoint to allow Buildbot slaves to post data to the dashboard."""
import copy
import json
import logging
import math
import re
from google.appengine.api import datastore_errors
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
from dashboard import datastore_hooks
from dashboard import math_utils
from dashboard import post_data_handler
from dashboard.models import graph_data
_TASK_QUEUE_NAME = 'add-point-queue'
# Number of rows to process per task queue task. This limits the task size
# and execution time (Limits: 100KB object size and 10 minutes execution time).
_TASK_QUEUE_SIZE = 64
# Max length for a Row property name.
_MAX_COLUMN_NAME_LENGTH = 25
# Maximum length of a value for a string property.
_STRING_COLUMN_MAX_LENGTH = 400
# Maximum number of properties for a Row.
_MAX_NUM_COLUMNS = 30
# Maximum length for a test path. This limit is required because the test path
# used as the string ID for TestContainer (the parent in the datastore for Row
# entities), and datastore imposes a maximum string ID length.
_MAX_TEST_PATH_LENGTH = 500
class BadRequestError(Exception):
"""An error indicating that a 400 response status should be returned."""
pass
class AddPointHandler(post_data_handler.PostDataHandler):
"""URL endpoint to post data to the dashboard."""
def post(self):
"""Validates data parameter and add task to queue to process points.
The row data comes from a "data" parameter, which is a JSON encoding of a
list of dictionaries, each of which represents one performance result
(one point in a graph) and associated data.
[
{
"master": "ChromiumPerf",
"bot": "xp-release-dual-core",
"test": "dromaeo/dom/modify",
"revision": 123456789,
"value": 24.66,
"error": 2.33,
"units": "ms",
"supplemental_columns": {
"d_median": 24234.12,
"d_mean": 23.553,
"r_webkit": 423340,
...
},
...
},
...
]
In general, the required fields are "master", "bot", "test" (which together
form the test path which identifies the series that this point belongs to),
and "revision" and "value", which are the X and Y values for the point.
This API also supports the Dashboard JSON v1.0 format (go/telemetry-json),
the first producer of which is Telemetry. Telemetry provides lightweight
serialization of values it produces, as JSON. If a dashboard JSON object is
passed, it will be a single dict rather than a list, with the test,
value, error, and units fields replaced by a chart_data field containing a
Chart JSON dict (see design doc, and example below). Dashboard JSON v1.0 is
processed by converting it into rows (which can be viewed as Dashboard JSON
v0).
{
"master": "ChromiumPerf",
<other row fields>,
"chart_data": {
"foo": {
"bar": {
"type": "scalar",
"name": "foo.bar",
"units": "ms",
"value": 4.2,
},
"summary": {
"type": "list_of_scalar_values",
"name": "foo",
"units": "ms",
"values": [4.2, 5.7, 6.8],
"std": 1.30512,
},
},
}
Request parameters:
data: JSON encoding of a list of dictionaries.
Outputs:
Empty 200 response with if successful,
200 response with warning message if optional data is invalid,
403 response with error message if sender IP is not white-listed,
400 response with error message if required data is invalid.
500 with error message otherwise.
"""
datastore_hooks.SetPrivilegedRequest()
if not self._CheckIpAgainstWhitelist():
# TODO(qyearsley): Add test coverage. See http://crbug.com/447432
return
data = self.request.get('data')
if not data:
# TODO(qyearsley): Add test coverage. See http://crbug.com/447432
self.ReportError('Missing "data" parameter.', status=400)
return
try:
data = json.loads(self.request.get('data'))
except ValueError:
self.ReportError('Invalid JSON string.', status=400)
return
logging.info('Received data: %s', data)
try:
if type(data) is dict:
if data.get('chart_data'):
data = _DashboardJsonToRawRows(data)
if not data:
return # No data to add, bail out.
else:
self.ReportError(
'Data should be a list of rows or a Dashboard JSON v1.0 dict.',
status=400)
return
test_map = _ConstructTestPathMap(data)
for row_dict in data:
_ValidateRowDict(row_dict, test_map)
_AddTasks(data)
except BadRequestError as error:
# If any of the data was invalid, abort immediately and return an error.
self.ReportError(error.message, status=400)
def _DashboardJsonToRawRows(dash_json_dict):
"""Formats a Dashboard JSON dict as a list of row dicts.
For the dashboard to begin accepting the Telemetry Dashboard JSON format
as per go/telemetry-json, this function chunks a Dashboard JSON literal
into rows and passes the resulting list to _AddTasks.
Args:
dash_json_dict: A dashboard JSON v1.0 dict.
Returns:
A list of dicts, each of which represents a point.
Raises:
AssertionError: The given argument wasn't a dict.
BadRequestError: The content of the input wasn't valid.
"""
assert type(dash_json_dict) is dict
# A Dashboard JSON dict should at least have all charts coming from the
# same master, bot and rev. It can contain multiple charts, however.
if not dash_json_dict.get('master'):
raise BadRequestError('No master name given.')
if not dash_json_dict.get('bot'):
raise BadRequestError('No bot name given.')
if not dash_json_dict.get('point_id'):
raise BadRequestError('No point_id number given.')
if not dash_json_dict.get('chart_data'):
raise BadRequestError('No chart data given.')
test_suite_name = _TestSuiteName(dash_json_dict)
chart_data = dash_json_dict.get('chart_data', {})
charts = chart_data.get('charts', {})
if not charts:
return [] # No charts implies no data to add.
# Links to about:tracing traces are listed under 'trace'; if they
# exist copy them to a separate dictionary and delete from the chartjson
# so that we don't try to process them as data points.
tracing_links = None
if 'trace' in charts:
tracing_links = charts['trace'].copy()
del charts['trace']
row_template = _MakeRowTemplate(dash_json_dict)
benchmark_description = chart_data.get('benchmark_description', '')
trace_rerun_options = dict(chart_data.get('trace_rerun_options', []))
is_ref = bool(dash_json_dict.get('is_ref'))
rows = []
for chart in charts:
for trace in charts[chart]:
# Need to do a deep copy here so we don't copy a_tracing_uri data.
row = copy.deepcopy(row_template)
specific_vals = _FlattenTrace(
test_suite_name, chart, trace, charts[chart][trace], is_ref,
tracing_links, benchmark_description)
# Telemetry may validly produce rows that represent a value of NaN. To
# avoid getting into messy situations with alerts, we do not add such
# rows to be processed.
if not (math.isnan(specific_vals['value']) or
math.isnan(specific_vals['error'])):
if specific_vals['tracing_uri']:
row['supplemental_columns']['a_tracing_uri'] = specific_vals[
'tracing_uri']
if trace_rerun_options:
row['supplemental_columns']['a_trace_rerun_options'] = (
trace_rerun_options)
row.update(specific_vals)
rows.append(row)
return rows
def _TestSuiteName(dash_json_dict):
"""Extracts a test suite name from Dashboard JSON.
The dashboard JSON may contain a field "test_suite_name". If this is not
present or it is None, the dashboard will fall back to using "benchmark_name"
in the "chart_data" dict.
"""
if dash_json_dict.get('test_suite_name'):
return dash_json_dict['test_suite_name']
try:
return dash_json_dict['chart_data']['benchmark_name']
except KeyError as e:
raise BadRequestError('Could not find test suite name. ' + e.message)
def _AddTasks(data):
"""Puts tasks on queue for adding data.
Args:
data: A list of dictionaries, each of which represents one point.
"""
task_list = []
for data_sublist in _Chunk(data, _TASK_QUEUE_SIZE):
task_list.append(taskqueue.Task(
url='/add_point_queue',
params={'data': json.dumps(data_sublist)}))
queue = taskqueue.Queue(_TASK_QUEUE_NAME)
for task_sublist in _Chunk(task_list, taskqueue.MAX_TASKS_PER_ADD):
# Calling get_result waits for all tasks to be added. It's possible that
# this is different, and maybe faster, than just calling queue.add.
queue.add_async(task_sublist).get_result()
def _Chunk(items, chunk_size):
"""Breaks a long list into sub-lists of a particular size."""
chunks = []
for i in range(0, len(items), chunk_size):
chunks.append(items[i:i + chunk_size])
return chunks
def _MakeRowTemplate(dash_json_dict):
"""Produces a template for rows created from a Dashboard JSON v1.0 dict.
_DashboardJsonToRawRows adds metadata fields to every row that it creates.
These include things like master, bot, point ID, versions, and other
supplementary data. This method produces a dict containing this metadata
to which row-specific information (like value and error) can be added.
Some metadata needs to be transformed to conform to the v0 format, and this
method is also responsible for that transformation.
Some validation is deferred until after the input is converted to a list
of row dicts, since revision format correctness is checked on a per-point
basis.
Args:
dash_json_dict: A dashboard JSON v1.0 dict.
Returns:
A dict containing data to include in each row dict that is created from
|dash_json_dict|.
"""
row_template = dash_json_dict.copy()
del row_template['chart_data']
del row_template['point_id']
row_template['revision'] = dash_json_dict['point_id']
annotations = row_template['supplemental']
versions = row_template['versions']
del row_template['supplemental']
del row_template['versions']
row_template['supplemental_columns'] = {}
supplemental = row_template['supplemental_columns']
for annotation in annotations:
supplemental['a_' + annotation] = annotations[annotation]
for version in versions:
supplemental['r_' + version] = versions[version]
return row_template
def _FlattenTrace(test_suite_name, chart_name, trace_name, trace,
is_ref=False, tracing_links=None, benchmark_description=''):
"""Takes a trace dict from dashboard JSON and readies it for display.
Traces can be either scalars or lists; if scalar we take the value directly;
if list we average the values and compute their standard deviation. We also
extract fields that are normally part of v0 row dicts that are uploaded
using add_point but are actually part of traces in the v1.0 format.
Args:
test_suite_name: The name of the test suite (benchmark).
chart_name: The name of the chart to which this trace belongs.
trace_name: The name of the passed trace.
trace: A trace dict extracted from a dashboard JSON chart.
is_ref: A boolean which indicates whether this trace comes from a
reference build.
tracing_links: A dictionary mapping trace names to about:tracing trace
urls in cloud storage
benchmark_description: A string documenting the benchmark suite to which
this trace belongs.
Returns:
A dict containing units, value, and error for this trace.
Raises:
BadRequestError: The data wasn't valid.
"""
if '@@' in chart_name:
tir_label, chart_name = chart_name.split('@@')
chart_name = chart_name + '/' + tir_label
trace_type = trace.get('type')
if trace_type == 'scalar':
value = trace.get('value')
if value is None:
if trace.get('none_value_reason'):
value = float('nan')
else:
# TODO(qyearsley): Add test coverage. See http://crbug.com/447432
raise BadRequestError('Expected scalar value, got: ' + value)
error = 0
elif trace_type == 'list_of_scalar_values':
values = trace.get('values')
if not values or None in values:
if trace.get('none_value_reason'):
value = float('nan')
error = float('nan')
else:
raise BadRequestError('Expected list of scalar values, got: ' + values)
else:
value = math_utils.Mean(values)
std = trace.get('std')
if std is not None:
error = std
else:
error = math_utils.StandardDeviation(values)
elif trace_type == 'histogram':
value, error = _GeomMeanAndStdDevFromHistogram(trace)
elif trace_type is not None:
raise BadRequestError('Invalid value type in chart object: ' + trace_type)
else:
raise BadRequestError('No trace type provided.')
# If there is a link to an about:tracing trace in cloud storage for this
# test trace_name, cache it.
tracing_uri = None
if (tracing_links and
trace_name in tracing_links and
'cloud_url' in tracing_links[trace_name]):
tracing_uri = tracing_links[trace_name]['cloud_url'].replace('\\/', '/')
trace_name = _EscapeName(trace_name)
if trace_name == 'summary':
subtest_name = chart_name
else:
subtest_name = chart_name + '/' + trace_name
name = test_suite_name + '/' + subtest_name
if trace_name == 'summary' and is_ref:
name += '/ref'
elif trace_name != 'summary' and is_ref:
name += '_ref'
row_dict = {
'test': name,
'value': value,
'error': error,
'units': trace['units'],
'tracing_uri': tracing_uri,
'benchmark_description': benchmark_description,
}
if 'improvement_direction' in trace:
improvement_direction_str = trace['improvement_direction']
if improvement_direction_str is None:
raise BadRequestError('improvement_direction must not be None')
row_dict['higher_is_better'] = _ImprovementDirectionToHigherIsBetter(
improvement_direction_str)
return row_dict
def _EscapeName(name):
"""Escapes a trace name so it can be stored in a row.
Args:
name: A string representing a name.
Returns:
An escaped version of the name.
"""
return re.sub(r'[\:|=/#&,]', '_', name)
def _GeomMeanAndStdDevFromHistogram(histogram):
"""Generates the geom. mean and std. dev. for a histogram.
A histogram is a collection of numerical buckets with associated
counts; a bucket can either represent a number of instances of a single
value ('low'), or from within a range of values (in which case 'high' will
specify the upper bound). We compute the statistics by treating the
histogram analogously to a list of individual values, where the counts tell
us how many of each value there are.
Args:
histogram: A histogram dict with a list 'buckets' of buckets.
Returns:
The geometric mean and standard deviation of the given histogram.
"""
# Note: This code comes originally from
# build/scripts/common/chromium_utils.py and was used initially for
# processing histogram results on the buildbot side previously.
if 'buckets' not in histogram:
# TODO(qyearsley): Add test coverage. See http://crbug.com/447432
return 0.0, 0.0
count = 0
sum_of_logs = 0
for bucket in histogram['buckets']:
if 'high' in bucket:
bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
else:
# TODO(qyearsley): Add test coverage. See http://crbug.com/447432
bucket['mean'] = bucket['low']
if bucket['mean'] > 0:
sum_of_logs += math.log(bucket['mean']) * bucket['count']
count += bucket['count']
if count == 0:
return 0.0, 0.0
sum_of_squares = 0
geom_mean = math.exp(sum_of_logs / count)
for bucket in histogram['buckets']:
if bucket['mean'] > 0:
sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
return geom_mean, math.sqrt(sum_of_squares / count)
def _ImprovementDirectionToHigherIsBetter(improvement_direction_str):
"""Converts an improvement direction string to a higher_is_better boolean.
Args:
improvement_direction_str: a string, either 'up' or 'down'.
Returns:
A boolean expressing the appropriate higher_is_better value.
Raises:
BadRequestError: if improvement_direction_str is invalid.
"""
# If improvement_direction is provided, we want to use it. Otherwise, by not
# providing it we'll fall back to unit-info.json
# TODO(eakuefner): Fail instead of falling back after fixing crbug.com/459450.
if improvement_direction_str == 'up':
return True
elif improvement_direction_str == 'down':
return False
else:
raise BadRequestError('Invalid improvement direction string: ' +
improvement_direction_str)
def _ConstructTestPathMap(row_dicts):
"""Makes a mapping from test paths to last added revision."""
last_added_revision_keys = []
for row in row_dicts:
if not ('master' in row and 'bot' in row and 'test' in row):
continue
path = '%s/%s/%s' % (row['master'], row['bot'], row['test'].strip('/'))
if len(path) > _MAX_TEST_PATH_LENGTH:
continue
last_added_revision_keys.append(ndb.Key('LastAddedRevision', path))
try:
last_added_revision_entities = ndb.get_multi(last_added_revision_keys)
except datastore_errors.BadRequestError:
# TODO(qyearsley): Add test coverage. See http://crbug.com/447432
logging.warn('Datastore BadRequestError when getting %s',
repr(last_added_revision_keys))
return {}
return {r.key.string_id(): r.revision
for r in last_added_revision_entities if r is not None}
def _ValidateRowDict(row, test_map):
"""Checks all fields in the input dictionary.
Args:
row: A dictionary which represents one point.
test_map: A dictionary mapping test paths to last added revision.
Raises:
BadRequestError: The input was not valid.
"""
required_fields = ['master', 'bot', 'test']
for field in required_fields:
if field not in row:
raise BadRequestError('No "%s" field in row dict.' % field)
_ValidateMasterBotTest(row['master'], row['bot'], row['test'])
_ValidateRowId(row, test_map)
GetAndValidateRowProperties(row)
def _ValidateMasterBotTest(master, bot, test):
"""Validates the master, bot, and test properties of a row dict."""
# Trailing and leading slashes in the test name are ignored.
# The test name must consist of at least a test suite plus sub-test.
test = test.strip('/')
if '/' not in test:
raise BadRequestError('Test name must have more than one part.')
if len(test.split('/')) > graph_data.MAX_TEST_ANCESTORS:
raise BadRequestError('Invalid test name: %s' % test)
# The master and bot names have just one part.
if '/' in master or '/' in bot:
raise BadRequestError('Illegal slash in master or bot name.')
_ValidateTestPath('%s/%s/%s' % (master, bot, test))
def _ValidateTestPath(test_path):
"""Checks whether all the parts of the test path are valid."""
# A test with a test path length over the max key length shouldn't be
# created, since the test path is used in TestContainer keys.
if len(test_path) > _MAX_TEST_PATH_LENGTH:
raise BadRequestError('Test path too long: %s' % test_path)
# Stars are reserved for test path patterns, so they can't be used in names.
if '*' in test_path:
raise BadRequestError('Illegal asterisk in test name.')
for name in test_path.split('/'):
_ValidateTestPathPartName(name)
def _ValidateTestPathPartName(name):
"""Checks whether a Master, Bot or Test name is OK."""
# NDB Datastore doesn't allow key names to start and with "__" and "__".
if name.startswith('__') and name.endswith('__'):
raise BadRequestError(
'Invalid name: "%s". Names cannot start and end with "__".' % name)
def _ValidateRowId(row_dict, test_map):
"""Checks whether the ID for a Row is OK.
Args:
row_dict: A dictionary with new point properties, including "revision".
test_map: A dictionary mapping test paths to the last previously added
revision for each test.
Raises:
BadRequestError: The revision is not acceptable for some reason.
"""
row_id = GetAndValidateRowId(row_dict)
# Get the last added revision number for this test.
master, bot, test = row_dict['master'], row_dict['bot'], row_dict['test']
test_path = '%s/%s/%s' % (master, bot, test)
last_row_id = test_map.get(test_path)
if not last_row_id:
# Could be first point in test.
logging.warning('Test %s has no last added revision entry.', test_path)
return
if not _IsAcceptableRowId(row_id, last_row_id):
raise BadRequestError(
'Invalid ID (revision) %d; compared to previous ID %s, it was larger '
'or smaller by too much.' % (row_id, last_row_id))
def _IsAcceptableRowId(row_id, last_row_id):
"""Checks whether the given row id (aka revision) is not too large or small.
For each data series (i.e. Test entity), we assume that row IDs are
monotonically increasing. On a given chart, points are sorted by these
row IDs. This way, points can arrive out of order but still be shown
correctly in the chart.
However, sometimes a bot might start to use a different *type* of row ID;
for example it might change from revision numbers or build numbers to
timestamps, or from timestamps to build numbers. This causes a lot of
problems, including points being put out of order.
If a sender of data actually wants to switch to a different type of
row ID, it would be much cleaner for them to start sending it under a new
chart name.
Args:
row_id: The proposed Row entity id (usually sent as "revision")
last_row_id: The previous Row id, or None if there were none previous.
Returns:
True if acceptable, False otherwise.
"""
if last_row_id is None:
# TODO(qyearsley): Add test coverage. See http://crbug.com/447432
return True
if row_id <= 0:
# TODO(qyearsley): Add test coverage. See http://crbug.com/447432
return False
# Too big of a decrease.
if row_id < 0.5 * last_row_id:
return False
# Too big of an increase.
if row_id > 2 * last_row_id:
return False
return True
def GetAndValidateRowId(row_dict):
"""Returns the integer ID for a new Row.
This method is also responsible for validating the input fields related
to making the new row ID.
Args:
row_dict: A dictionary obtained from the input JSON.
Returns:
An integer row ID.
Raises:
BadRequestError: The input wasn't formatted properly.
"""
if 'revision' not in row_dict:
raise BadRequestError('Required field "revision" missing.')
try:
return int(row_dict['revision'])
except (ValueError, TypeError):
raise BadRequestError('Bad value for "revision", should be numerical.')
def GetAndValidateRowProperties(row):
"""From the object received, make a dictionary of properties for a Row.
This includes the default "value" and "error" columns as well as all
supplemental columns, but it doesn't include "revision", and it doesn't
include input fields that are properties of the parent Test, such as
"units".
This method is responsible for validating all properties that are to be
properties of the new Row.
Args:
row: A dictionary obtained from the input JSON.
Returns:
A dictionary of the properties and property values to set when creating
a Row. This will include "value" and "error" as well as all supplemental
columns.
Raises:
BadRequestError: The properties weren't formatted correctly.
"""
columns = {}
# Value and error must be floating point numbers.
if 'value' not in row:
raise BadRequestError('No "value" given.')
try:
columns['value'] = float(row['value'])
except (ValueError, TypeError):
raise BadRequestError('Bad value for "value", should be numerical.')
if 'error' in row:
try:
error = float(row['error'])
columns['error'] = error
except (ValueError, TypeError):
logging.warn('Bad value for "error".')
columns.update(_GetSupplementalColumns(row))
return columns
def _GetSupplementalColumns(row):
"""Gets a dict of supplemental columns.
If any columns are invalid, a warning is logged and they just aren't included,
but no exception is raised.
Individual rows may specify up to _MAX_NUM_COLUMNS extra data, revision,
and annotation columns. These columns must follow formatting rules for
their type. Invalid columns are dropped with an error log, but the valid
data will still be graphed.
Args:
row: A dict, possibly with the key "supplemental_columns", the value of
which should be a dict.
Returns:
A dict of valid supplemental columns.
"""
columns = {}
for (name, value) in row.get('supplemental_columns', {}).iteritems():
# Don't allow too many columns
if len(columns) == _MAX_NUM_COLUMNS:
logging.warn('Too many columns, some being dropped.')
break
value = _CheckSupplementalColumn(name, value)
if value:
columns[name] = value
return columns
def _CheckSupplementalColumn(name, value):
"""Returns a possibly modified value for a supplemental column, or None."""
# Check length of column name.
name = str(name)
if len(name) > _MAX_COLUMN_NAME_LENGTH:
logging.warn('Supplemental column name too long.')
return None
# The column name has a prefix which indicates type of value.
if name[:2] not in ('d_', 'r_', 'a_'):
logging.warn('Bad column name "%s", invalid prefix.', name)
return None
# The d_ prefix means "data column", intended to hold numbers.
if name.startswith('d_'):
try:
value = float(value)
except (ValueError, TypeError):
logging.warn('Bad value for column "%s", should be numerical.', name)
return None
# The r_ prefix means "revision", and the value should look like a number,
# a version number, or a git commit hash.
if name.startswith('r_'):
revision_patterns = [
r'^\d+$',
r'^\d+\.\d+\.\d+\.\d+$',
r'^[A-Fa-f0-9]{40}$',
]
if (not value or len(str(value)) > _STRING_COLUMN_MAX_LENGTH or
not any(re.match(p, str(value)) for p in revision_patterns)):
logging.warn('Bad value for revision column "%s".', name)
return None
value = str(value)
if name.startswith('a_'):
# Annotation column, should be a short string.
if len(str(value)) > _STRING_COLUMN_MAX_LENGTH:
logging.warn('Value for "%s" too long, max length is %d.',
name, _STRING_COLUMN_MAX_LENGTH)
return None
return value
|
|
# -*- coding: utf-8 -*-
"""
Installs and configures puppet
"""
import sys
import logging
import os
import platform
import time
from packstack.installer import utils
from packstack.installer import basedefs, output_messages
from packstack.installer.exceptions import ScriptRuntimeError, PuppetError
from packstack.modules.common import filtered_hosts
from packstack.modules.ospluginutils import manifestfiles
from packstack.modules.puppet import scan_logfile, validate_logfile
#------------------ oVirt installer initialization ------------------
PLUGIN_NAME = "Puppet"
PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_NAME, 'blue')
PUPPET_DIR = os.environ.get('PACKSTACK_PUPPETDIR',
'/usr/share/openstack-puppet/')
MODULE_DIR = os.path.join(PUPPET_DIR, 'modules')
def initConfig(controller):
group = {"GROUP_NAME": "PUPPET",
"DESCRIPTION": "Puppet Config parameters",
"PRE_CONDITION": lambda x: 'yes',
"PRE_CONDITION_MATCH": "yes",
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True}
controller.addGroup(group, [])
def initSequences(controller):
puppetpresteps = [
{'title': 'Clean Up', 'functions': [run_cleanup]},
]
controller.insertSequence("Clean Up", [], [], puppetpresteps, index=0)
puppetsteps = [
{'title': 'Installing Dependencies',
'functions': [install_deps]},
{'title': 'Copying Puppet modules and manifests',
'functions': [copy_puppet_modules]},
{'title': 'Applying Puppet manifests',
'functions': [apply_puppet_manifest]},
{'title': 'Finalizing',
'functions': [finalize]}
]
controller.addSequence("Puppet", [], [], puppetsteps)
#------------------------- helper functions -------------------------
def wait_for_puppet(currently_running, messages):
log_len = 0
twirl = ["-", "\\", "|", "/"]
while currently_running:
for hostname, finished_logfile in currently_running:
log_file = os.path.splitext(os.path.basename(finished_logfile))[0]
space_len = basedefs.SPACE_LEN - len(log_file)
if len(log_file) > log_len:
log_len = len(log_file)
if hasattr(sys.stdout, "isatty") and sys.stdout.isatty():
twirl = twirl[-1:] + twirl[:-1]
sys.stdout.write(("\rTesting if puppet apply is finished: %s"
% log_file).ljust(40 + log_len))
sys.stdout.write("[ %s ]" % twirl[0])
sys.stdout.flush()
try:
# Once a remote puppet run has finished, we retrieve the log
# file and check it for errors
local_server = utils.ScriptRunner()
log = os.path.join(basedefs.PUPPET_MANIFEST_DIR,
os.path.basename(finished_logfile))
log = log.replace(".finished", ".log")
local_server.append('scp -o StrictHostKeyChecking=no '
'-o UserKnownHostsFile=/dev/null '
'root@%s:%s %s'
% (hostname, finished_logfile, log))
# To not pollute logs we turn of logging of command execution
local_server.execute(log=False)
# If we got to this point the puppet apply has finished
currently_running.remove((hostname, finished_logfile))
# clean off the last "testing apply" msg
if hasattr(sys.stdout, "isatty") and sys.stdout.isatty():
sys.stdout.write(("\r").ljust(45 + log_len))
except ScriptRuntimeError:
# the test raises an exception if the file doesn't exist yet
# TO-DO: We need to start testing 'e' for unexpected exceptions
time.sleep(3)
continue
# check log file for relevant notices
messages.extend(scan_logfile(log))
# check the log file for errors
sys.stdout.write('\r')
try:
validate_logfile(log)
state = utils.state_message('%s:' % log_file, 'DONE', 'green')
sys.stdout.write('%s\n' % state)
sys.stdout.flush()
except PuppetError:
state = utils.state_message('%s:' % log_file, 'ERROR', 'red')
sys.stdout.write('%s\n' % state)
sys.stdout.flush()
raise
#-------------------------- step functions --------------------------
def run_cleanup(config, messages):
localserver = utils.ScriptRunner()
localserver.append("rm -rf %s/*pp" % basedefs.PUPPET_MANIFEST_DIR)
localserver.execute()
def install_deps(config, messages):
deps = ["puppet", "openssh-clients", "tar", "nc"]
modules_pkg = 'openstack-puppet-modules'
local = utils.ScriptRunner()
local.append('rpm -q --requires %s | egrep -v "^(rpmlib|\/|perl)"'
% modules_pkg)
rc, modules_deps = local.execute()
# Modules package might not be installed if we are running from source.
# In this case we assume user knows what (s)he's doing and we don't
# install modules dependencies
if ('%s is not installed' % modules_pkg) not in modules_deps:
modules_deps = [i.strip() for i in modules_deps.split() if i.strip()]
deps.extend(modules_deps)
for hostname in filtered_hosts(config):
server = utils.ScriptRunner(hostname)
for package in deps:
server.append("rpm -q --whatprovides %s || yum install -y %s"
% (package, package))
server.execute()
def copy_puppet_modules(config, messages):
os_modules = ' '.join(('apache', 'ceilometer', 'certmonger', 'cinder',
'concat', 'firewall', 'glance', 'heat', 'horizon',
'inifile', 'keystone', 'memcached', 'mongodb',
'mysql', 'neutron', 'nova', 'nssdb', 'openstack',
'packstack', 'qpid', 'rabbitmq', 'rsync', 'ssh',
'stdlib', 'swift', 'sysctl', 'tempest', 'vcsrepo',
'vlan', 'vswitch', 'xinetd'))
# write puppet manifest to disk
manifestfiles.writeManifests()
server = utils.ScriptRunner()
for hostname in filtered_hosts(config):
host_dir = config['HOST_DETAILS'][hostname]['tmpdir']
# copy Packstack manifests
server.append("cd %s/puppet" % basedefs.DIR_PROJECT_DIR)
server.append("cd %s" % basedefs.PUPPET_MANIFEST_DIR)
server.append("tar --dereference -cpzf - ../manifests | "
"ssh -o StrictHostKeyChecking=no "
"-o UserKnownHostsFile=/dev/null "
"root@%s tar -C %s -xpzf -" % (hostname, host_dir))
# copy resources
resources = config.get('RESOURCES', {})
for path, localname in resources.get(hostname, []):
server.append("scp -o StrictHostKeyChecking=no "
"-o UserKnownHostsFile=/dev/null "
"%s root@%s:%s/resources/%s" %
(path, hostname, host_dir, localname))
# copy Puppet modules required by Packstack
server.append("cd %s" % MODULE_DIR)
server.append("tar --dereference -cpzf - %s | "
"ssh -o StrictHostKeyChecking=no "
"-o UserKnownHostsFile=/dev/null "
"root@%s tar -C %s -xpzf -" %
(os_modules, hostname,
os.path.join(host_dir, 'modules')))
server.execute()
def apply_puppet_manifest(config, messages):
if config.get("DRY_RUN"):
return
currently_running = []
lastmarker = None
loglevel = ''
logcmd = False
if logging.root.level <= logging.DEBUG:
loglevel = '--debug'
logcmd = True
for manifest, marker in manifestfiles.getFiles():
# if the marker has changed then we don't want to proceed until
# all of the previous puppet runs have finished
if lastmarker is not None and lastmarker != marker:
wait_for_puppet(currently_running, messages)
lastmarker = marker
for hostname in filtered_hosts(config):
if "%s_" % hostname not in manifest:
continue
host_dir = config['HOST_DETAILS'][hostname]['tmpdir']
print "Applying %s" % manifest
server = utils.ScriptRunner(hostname)
man_path = os.path.join(config['HOST_DETAILS'][hostname]['tmpdir'],
basedefs.PUPPET_MANIFEST_RELATIVE,
manifest)
running_logfile = "%s.running" % man_path
finished_logfile = "%s.finished" % man_path
currently_running.append((hostname, finished_logfile))
server.append("touch %s" % running_logfile)
server.append("chmod 600 %s" % running_logfile)
server.append("export PACKSTACK_VAR_DIR=%s" % host_dir)
cmd = ("( flock %s/ps.lock "
"puppet apply %s --modulepath %s/modules %s > %s "
"2>&1 < /dev/null ; "
"mv %s %s ) > /dev/null 2>&1 < /dev/null &"
% (host_dir, loglevel, host_dir, man_path, running_logfile,
running_logfile, finished_logfile))
server.append(cmd)
server.execute(log=logcmd)
# wait for outstanding puppet runs befor exiting
wait_for_puppet(currently_running, messages)
def finalize(config, messages):
for hostname in filtered_hosts(config):
server = utils.ScriptRunner(hostname)
server.append("installed=$(rpm -q kernel --last | head -n1 | "
"sed 's/kernel-\([a-z0-9\.\_\-]*\).*/\\1/g')")
server.append("loaded=$(uname -r | head -n1)")
server.append('[ "$loaded" == "$installed" ]')
try:
rc, out = server.execute()
except ScriptRuntimeError:
messages.append('Because of the kernel update the host %s '
'requires reboot.' % hostname)
|
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ragged.to_tensor."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
from absl.testing import parameterized
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import indexed_slices
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.ops.ragged.ragged_tensor import RaggedTensor
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import googletest
from tensorflow.python.util import nest
def make_placeholder(t):
return array_ops.placeholder_with_default(t, None)
def rebuild_ragged_tensor_with_value_rowids(rt, feed_dict=None, sess=None):
"""Returns a copy of `rt`, built using `from_value_rowids`.
This ensures that RaggedTensor._cached_value_rowids is populated, which
triggers a different code-path for converting ragged tensors to tensors.
If `feed_dict` and `sess` are specified, then build the new `RaggedTensor`
using placeholder tensors, and populate a feed dictionary that can be used
to feed the placeholders.
Args:
rt: The RaggedTensor to copy.
feed_dict: If specified, then build the new `RaggedTensor` using
placeholders, and populate this dict with entries to feed those
placeholders.
sess: A session used to evaluate tensors; required if feed_dict is
specified.
Returns:
A copy of `rt`, built using `from_value_rowids`.
"""
if isinstance(rt, ragged_tensor.RaggedTensor):
values = rebuild_ragged_tensor_with_value_rowids(rt.values, feed_dict, sess)
rowids = rt.value_rowids()
nrows = rt.nrows()
if feed_dict is not None:
rowids_ph = make_placeholder(rowids)
nrows_ph = make_placeholder(nrows)
feed_dict[rowids_ph] = sess.run(rowids)
feed_dict[nrows_ph] = sess.run(nrows)
rowids, nrows = rowids_ph, nrows_ph
return ragged_tensor.RaggedTensor.from_value_rowids(values, rowids, nrows)
else:
if feed_dict is not None:
rt_ph = make_placeholder(rt)
feed_dict[rt_ph] = sess.run(rt)
rt = rt_ph
return rt
@test_util.run_all_in_graph_and_eager_modes
class RaggedTensorToTensorOpTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
def testDocStringExamples(self):
"""Example from ragged_to_tensor.__doc__."""
rt = ragged_factory_ops.constant([[9, 8, 7], [], [6, 5], [4]])
dt = rt.to_tensor()
self.assertAllEqual(dt, [[9, 8, 7], [0, 0, 0], [6, 5, 0], [4, 0, 0]])
@parameterized.named_parameters(
# Simple 2D ragged tensors (with one ragged dimension)
{
'testcase_name': 'shape_2xN',
'rt_input': [[0, 1, 2], [], [3]],
'expected': [[0, 1, 2], [0, 0, 0], [3, 0, 0]]
},
{
'testcase_name': 'shape_2xN_default_0D',
'rt_input': [[0, 1, 2], [], [3]],
'default': 5,
'expected': [[0, 1, 2], [5, 5, 5], [3, 5, 5]]
},
{
'testcase_name': 'empty_first_row',
'rt_input': [[], [], [3, 4], []],
'expected': [[0, 0], [0, 0], [3, 4], [0, 0]]
},
{
'testcase_name': 'empty_last_row',
'rt_input': [[0, 1, 2], [], [3], []],
'expected': [[0, 1, 2], [0, 0, 0], [3, 0, 0], [0, 0, 0]]
},
{
'testcase_name': 'shape_4xN',
'rt_input': [[1, 2, 3], [], [4], [5, 6]],
'expected': [[1, 2, 3], [0, 0, 0], [4, 0, 0], [5, 6, 0]]
},
{
'testcase_name': 'shape_4xN_default_0D',
'rt_input': [[1, 2, 3], [], [4], [5, 6]],
'default': 9,
'expected': [[1, 2, 3], [9, 9, 9], [4, 9, 9], [5, 6, 9]]
},
{
'testcase_name': 'shape_2xN_already_dense',
'rt_input': [[6, 7, 8], [9, 10, 11]],
'expected': [[6, 7, 8], [9, 10, 11]],
},
{
'testcase_name': 'shape_2xN_string_already_dense',
'rt_input': [[b'a', b'b', b'c'],
[b'd', b'e', b'antidisestablishmentarianism']],
'ragged_rank': 1,
'expected': [[b'a', b'b', b'c'],
[b'd', b'e', b'antidisestablishmentarianism']],
},
# 3D ragged tensors with two ragged dimensions
{
'testcase_name': 'shape_4xNxM',
'rt_input': [[[1, 2], [], [3, 4]], [], [[5]], [[6, 7], [8]]],
'expected': [
[[1, 2], [0, 0], [3, 4]], #
[[0, 0], [0, 0], [0, 0]], #
[[5, 0], [0, 0], [0, 0]], #
[[6, 7], [8, 0], [0, 0]], #
]
},
{
'testcase_name': 'shape_4xNxM_default_0D',
'rt_input': [[[1, 2], [], [3, 4]], [], [[5]], [[6, 7], [8]]],
'default': 9,
'expected': [
[[1, 2], [9, 9], [3, 4]], #
[[9, 9], [9, 9], [9, 9]], #
[[5, 9], [9, 9], [9, 9]], #
[[6, 7], [8, 9], [9, 9]], #
]
},
{
'testcase_name': 'shape_1xNx1_default_0D',
'rt_input': [[[1], [2], [3]]],
'ragged_rank': 1,
'default': 0,
'expected': [[[1], [2], [3]]],
},
{
'testcase_name': 'shape_2xNx2_already_dense',
'rt_input': [[[6, 7], [8, 9], [10, 11]],
[[12, 13], [14, 15], [16, 17]]],
'ragged_rank': 1,
'expected': [[[6, 7], [8, 9], [10, 11]],
[[12, 13], [14, 15], [16, 17]]],
},
{
'testcase_name': 'shape_2xNx2_already_dense_default_1D',
'rt_input': [[[6, 7], [8, 9], [10, 11]],
[[12, 13], [14, 15], [16, 17]]],
'ragged_rank': 1,
'default': [31, 32],
'expected': [[[6, 7], [8, 9], [10, 11]],
[[12, 13], [14, 15], [16, 17]]],
},
{
'testcase_name': 'shape_2xNx2_string_already_dense',
'rt_input': [[[b'a', b'b'], [b'c', b'd'], [b'e', b'f']],
[[b'g', b'jalapeno'], [b'kangaroo', b'llama'],
[b'manzana', b'nectar']]],
'ragged_rank': 1,
'expected': [[[b'a', b'b'], [b'c', b'd'], [b'e', b'f']],
[[b'g', b'jalapeno'], [b'kangaroo', b'llama'],
[b'manzana', b'nectar']]],
},
# 3D ragged tensors with one ragged dimension
{
'testcase_name': 'shape_4xNx1_default_1D',
'rt_input': [[[1], [2], [3]], [], [[4]], [[5], [6]]],
'ragged_rank': 1,
'default': [9],
'expected': [[[1], [2], [3]],
[[9], [9], [9]],
[[4], [9], [9]],
[[5], [6], [9]]]
},
{
'testcase_name': 'shape_2xNx2_default_0D',
'rt_input': [[[6, 7], [8, 9], [10, 11]],
[[12, 13], [14, 15]]],
'ragged_rank': 1,
'default': 2,
'expected': [[[6, 7], [8, 9], [10, 11]],
[[12, 13], [14, 15], [2, 2]]],
},
{
'testcase_name': 'shape_2xNx2_default_1D',
'rt_input': [[[6, 7], [8, 9], [10, 11]],
[[12, 13], [14, 15]]],
'ragged_rank': 1,
'default': [2, 3],
'expected': [[[6, 7], [8, 9], [10, 11]],
[[12, 13], [14, 15], [2, 3]]],
},
# 4D ragged tensors with 3 ragged dimensions
{
'testcase_name': 'shape_1xNxMxK_default_0D',
'rt_input': [[[[1], [2]], [], [[3]]]],
'default': 9,
'expected': [[[[1], [2]], [[9], [9]], [[3], [9]]]],
},
# Broadcast default
{
'testcase_name': 'shape_2xNx2x2_default_2x1',
'rt_input': [[[[1, 2], [3, 4]]], []],
'ragged_rank': 1,
'default': [[5], [6]],
'expected': [[[[1, 2], [3, 4]]],
[[[5, 5], [6, 6]]]],
},
{
'testcase_name': 'shape_2xNx2x2_default_1x2',
'rt_input': [[[[1, 2], [3, 4]]], []],
'ragged_rank': 1,
'default': [[5, 6]],
'expected': [[[[1, 2], [3, 4]]],
[[[5, 6], [5, 6]]]],
},
# Explicit shape
{
'testcase_name': 'shape_4xN_with_crop',
'rt_input': [[0, 1, 2, 3], [], [4], []],
'shape': [2, 3],
'expected': [[0, 1, 2], [0, 0, 0]],
},
{
'testcase_name': 'shape_2xN_with_pad',
'rt_input': [[1, 2], [3]],
'shape': [3, 3],
'expected': [[1, 2, 0], [3, 0, 0], [0, 0, 0]],
},
{
'testcase_name': 'shape_4xN_with_crop_and_pad',
'rt_input': [[0, 1, 2, 3], [], [4], []],
'shape': [2, 8],
'expected': [[0, 1, 2, 3, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0]],
},
{
'testcase_name': 'shape_4xN_with_tuple_shape',
'rt_input': [[0, 1, 2, 3], [], [4], []],
'shape': (2, 3),
'expected': [[0, 1, 2], [0, 0, 0]],
},
{
'testcase_name': 'shape_4xN_with_tensorshape_shape',
'rt_input': [[0, 1, 2, 3], [], [4], []],
'shape': tensor_shape.TensorShape([2, 3]),
'expected': [[0, 1, 2], [0, 0, 0]],
},
{
'testcase_name': 'shape_4xN_with_partial_shape',
'rt_input': [[0, 1, 2, 3], [], [4], []],
'shape': tensor_shape.TensorShape([2, None]),
'expected': [[0, 1, 2, 3], [0, 0, 0, 0]],
},
# Empty tensors
{
'testcase_name': 'shape_0xN',
'rt_input': [],
'ragged_rank': 1,
'expected': [],
'expected_shape': [0, 0],
},
{
'testcase_name': 'shape_0xNxM',
'rt_input': [],
'ragged_rank': 2,
'expected': [],
'expected_shape': [0, 0, 0],
},
# {
# 'testcase_name': 'shape_0xNx2',
# 'rt_input': [],
# 'ragged_rank': 1,
# 'inner_shape': [2],
# 'expected': [],
# 'expected_shape': [0, 0, 2],
# },
{
'testcase_name': 'shape_2xN_empty',
'rt_input': [[], []],
'expected': [[], []],
'expected_shape': [2, 0],
},
) # pyformat: disable
def testRaggedTensorToTensor(self,
rt_input,
expected,
ragged_rank=None,
inner_shape=None,
default=None,
shape=None,
expected_shape=None):
rt1 = ragged_factory_ops.constant(
rt_input, ragged_rank=ragged_rank, inner_shape=inner_shape)
rt2 = rebuild_ragged_tensor_with_value_rowids(rt1)
for rt in [rt1, rt2]:
for use_placeholder in [False, True]:
if use_placeholder:
if default is not None:
default = make_placeholder(default)
rt = nest.map_structure(make_placeholder, rt, expand_composites=True)
dt = rt.to_tensor(default_value=default, shape=shape)
self.assertIsInstance(dt, ops.Tensor)
self.assertEqual(rt.dtype, dt.dtype)
if shape is not None:
self.assertTrue(dt.shape.is_compatible_with(shape))
else:
self.assertTrue(dt.shape.is_compatible_with(rt.shape))
if expected_shape is not None:
expected = np.ndarray(expected_shape, buffer=np.array(expected))
self.assertAllEqual(dt, expected)
@parameterized.parameters([
{
'rt_input': [[1, 2, 3]],
'default': 'a',
'error_type': TypeError,
'error': r'Expected int32|Cannot convert',
},
{
'rt_input': [[1, 2, 3]],
'default': [0],
'error': r'default_value\.shape=\[1\] and '
r'rt_input\.flat_values\.shape=\[3\] are incompatible: '
r'default_value\.rank = 1 must be less than '
r'rt_input\.flat_values\.rank = 1'
},
{
'rt_input': [[[1, 2], [3, 4]], [[5, 6]]],
'ragged_rank': 1,
'default': [7, 8, 9],
'error': r'default_value\.shape=\[3\] and '
r'rt_input\.flat_values\.shape=\[3,2\] are incompatible: '
r'default_value\.shape\[-1\] = 3 but '
r'rt_input\.flat_values\.shape\[-1\] = 2'
},
{
'rt_input': [[1, 2, 3]],
'shape': [3, 3, 3],
'error': r'rt_input\.shape and shape=\[.,.,.\] are incompatible: '
r'rt_input\.rank = 2 but shape\.rank = 3'
},
{
'rt_input': [[[1, 2, 3]]],
'ragged_rank': 1,
'shape': [1, 1, 4],
'error': r'rt_input\.shape and shape=\[1,1,4\] are incompatible: '
r'rt_input\.shape\[2\] = 3 but shape\[2\] = 4'
},
])
def testError(self,
rt_input,
error,
error_type=(ValueError, errors.InvalidArgumentError),
default=None,
ragged_rank=None,
shape=None):
rt = ragged_factory_ops.constant(rt_input, ragged_rank=ragged_rank)
with self.assertRaisesRegexp(error_type, error):
self.evaluate(rt.to_tensor(default_value=default, shape=shape))
rt_placeholder = nest.map_structure(
make_placeholder, rt, expand_composites=True)
with self.assertRaisesRegexp(error_type, error):
self.evaluate(
rt_placeholder.to_tensor(default_value=default, shape=shape))
def test_shape_limit_shape_is_tensor(self):
input_data = ragged_factory_ops.constant([[0, 1, 2, 3], [], [4], []])
actual = input_data.to_tensor(
shape=constant_op.constant([2, 3], dtype=dtypes.int64))
self.assertAllEqual(actual, [[0, 1, 2], [0, 0, 0]])
self.assertEqual(actual.shape.as_list(), [2, 3])
def test_shape_limit_shape_is_tensor_unknown_rank(self):
input_data = ragged_factory_ops.constant([[0, 1, 2, 3], [], [4], []])
actual = input_data.to_tensor(
shape=constant_op.constant(-1, dtype=dtypes.int64))
self.assertAllEqual(
actual, [[0, 1, 2, 3], [0, 0, 0, 0], [4, 0, 0, 0], [0, 0, 0, 0]])
self.assertTrue(actual.shape.is_compatible_with([4, 4]))
def test_shape_limit_shape_is_tensor_unknown_dim(self):
input_data = ragged_factory_ops.constant([[0, 1, 2, 3], [], [4], []])
actual = input_data.to_tensor(
shape=constant_op.constant([2, -1], dtype=dtypes.int64))
self.assertAllEqual(actual, [[0, 1, 2, 3], [0, 0, 0, 0]])
self.assertTrue(actual.shape.is_compatible_with([2, None]))
def test_shape_limit_shape_is_tensor_int32(self):
input_data = ragged_factory_ops.constant([[0, 1, 2, 3], [], [4], []])
actual = input_data.to_tensor(
shape=constant_op.constant([2, 3], dtype=dtypes.int32))
self.assertAllEqual(actual, [[0, 1, 2], [0, 0, 0]])
self.assertEqual(actual.shape.as_list(), [2, 3])
def test_shape_expand_first_dim(self):
input_data = ragged_factory_ops.constant([[0, 1, 2], [], [3]])
actual = input_data.to_tensor(shape=[4, 4])
self.assertAllEqual(
actual, [[0, 1, 2, 0], [0, 0, 0, 0], [3, 0, 0, 0], [0, 0, 0, 0]])
self.assertEqual(actual.shape.as_list(), [4, 4])
def test_value_transposed(self):
# Check that transposed data is not an issue.
my_value = array_ops.transpose(
constant_op.constant([[0, 1, 2, 3], [4, 5, 6, 7]]))
input_data = RaggedTensor.from_value_rowids(
values=my_value,
value_rowids=constant_op.constant([0, 1, 2, 3], dtype=dtypes.int64),
nrows=constant_op.constant(4, dtype=dtypes.int64),
validate=True)
self.assertAllEqual(input_data, [[[0, 4]], [[1, 5]], [[2, 6]], [[3, 7]]])
def test_broadcast_default(self):
# The dense dimension here is 2 x 2
input_data = ragged_factory_ops.constant([[[[1, 2], [3, 4]]], []],
ragged_rank=1)
# This placeholder has a 2 x 1 dimension.
default_value = make_placeholder([[5], [6]])
actual = input_data.to_tensor(default_value=default_value)
expected = [[[[1, 2], [3, 4]]], [[[5, 5], [6, 6]]]]
self.assertAllEqual(actual, expected)
def test_broadcast_default_no_placeholder(self):
input_data = ragged_factory_ops.constant([[[[1, 2], [3, 4]]], []],
ragged_rank=1)
# default_value has a 2 x 1 dimension.
default_value = constant_op.constant([[5], [6]], shape=None)
actual = input_data.to_tensor(default_value=default_value)
expected = [[[[1, 2], [3, 4]]], [[[5, 5], [6, 6]]]]
self.assertAllEqual(actual, expected)
def test_shape_expand_second_dim(self):
input_data = ragged_factory_ops.constant([[0, 1, 2], [], [3], []])
actual = input_data.to_tensor(shape=[3, 4])
self.assertAllEqual(actual, [[0, 1, 2, 0], [0, 0, 0, 0], [3, 0, 0, 0]])
def test_empty_tensor_with_shape(self):
input_data = RaggedTensor.from_value_rowids(
values=constant_op.constant([], dtype=dtypes.int64),
value_rowids=constant_op.constant([], dtype=dtypes.int64),
nrows=constant_op.constant(2, dtype=dtypes.int64),
validate=True)
actual = input_data.to_tensor(default_value=3, shape=[2, 3])
self.assertAllEqual(actual, [[3, 3, 3], [3, 3, 3]])
# pylint: disable=bad-whitespace
@parameterized.named_parameters([
dict(
testcase_name = '2d_default_shape',
shape = None,
rt_value = [[1, 2, 3], [4], [5, 6]],
rt_grad = [[9, 8, 7], [6], [3, 2]],
default_value = 0,
default_grad = sum([5, 4, 1]),
output_value = [[1, 2, 3], [4, 0, 0], [5, 6, 0]],
output_grad = [[9, 8, 7], [6, 5, 4], [3, 2, 1]]),
dict(
testcase_name = '2d_pad',
shape = [4, 4],
rt_value = [[1, 2, 3], [4], [5, 6]],
rt_grad = [[9, 8, 7], [5], [1, 0]],
default_value = 0,
default_grad = sum([6, 4, 3, 2, 1, 2, 3, 4, 5, 6]),
output_value = [
[1, 2, 3, 0], [4, 0, 0, 0], [5, 6, 0, 0], [0, 0, 0, 0]],
output_grad = [
[9, 8, 7, 6], [5, 4, 3, 2], [1, 0, 1, 2], [3, 4, 5, 6]]),
dict(
testcase_name = '2d_pad_and_crop',
shape = [5, 3],
rt_value = [[1, 2, 3], [4], [5, 6, 7, 8, 9], [8]],
rt_grad = [[9, 8, 7], [6], [3, 2, 1, 0, 0], [2]],
default_value = 0,
default_grad = sum([5, 4, 3, 4, 5, 6, 7]),
output_value = [
[1, 2, 3], [4, 0, 0], [5, 6, 7], [8, 0, 0], [0, 0, 0]],
output_grad = [
[9, 8, 7], [6, 5, 4], [3, 2, 1], [2, 3, 4], [5, 6, 7]]),
dict(
testcase_name = '3d_rrank_2',
shape = [2, 2, 2],
rt_value = [[[9, 8, 7], [6]], [[5, 4]]],
rt_grad = [[[1, 2, 0], [3]], [[5, 6]]],
default_value = 3,
default_grad = sum([4, 7, 8]),
output_value = [[[9, 8], [6, 3]], [[5, 4], [3, 3]]],
output_grad = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]),
dict(
testcase_name = '3d_rrank_1_with_0d_default',
ragged_rank = 1,
shape = [2, 2, 2],
rt_value = [[[9, 8], [7, 6]], [[5, 4]]],
rt_grad = [[[1, 2], [3, 4]], [[5, 6]]],
default_value = 3,
default_grad = sum([7, 8]),
output_value = [[[9, 8], [7, 6]], [[5, 4], [3, 3]]],
output_grad = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]),
dict(
testcase_name = '3d_rrank_1_with_1d_default',
ragged_rank = 1,
shape = [2, 2, 2],
rt_value = [[[9, 8], [7, 6]], [[5, 4]]],
rt_grad = [[[1, 2], [3, 4]], [[5, 6]]],
default_value = [3, 2],
default_grad = [7, 8],
output_value = [[[9, 8], [7, 6]], [[5, 4], [3, 2]]],
output_grad = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]),
dict(
testcase_name = '3d_rrank_1_with_1d_broadcast_default',
ragged_rank = 1,
shape = [2, 2, 2],
rt_value = [[[9, 8], [7, 6]], [[5, 4]]],
rt_grad = [[[1, 2], [3, 4]], [[5, 6]]],
default_value = [3],
default_grad = [7 + 8],
output_value = [[[9, 8], [7, 6]], [[5, 4], [3, 3]]],
output_grad = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]),
dict(
testcase_name = '4d_rrank_1_with_2d_default',
ragged_rank = 1,
shape = [3, 3, 2, 1],
rt_value = [[[[9], [8]], [[7], [6]]], [[[5], [4]]]],
rt_grad = [[[[1], [2]], [[3], [4]]], [[[7], [8]]]],
default_value = [[3], [2]],
default_grad = [[5 + 9 + 2 + 4 + 6 + 8], [6 + 1 + 3 + 5 + 7 + 9]],
output_value = [[[[9], [8]], [[7], [6]], [[3], [2]]],
[[[5], [4]], [[3], [2]], [[3], [2]]],
[[[3], [2]], [[3], [2]], [[3], [2]]]],
output_grad = [[[[1], [2]], [[3], [4]], [[5], [6]]],
[[[7], [8]], [[9], [1]], [[2], [3]]],
[[[4], [5]], [[6], [7]], [[8], [9]]]]),
dict(
testcase_name = '4d_rrank_1_with_with_0d_default',
ragged_rank = 1,
shape = [3, 3, 2, 1],
rt_value = [[[[9], [8]], [[7], [6]]], [[[5], [4]]]],
rt_grad = [[[[1], [2]], [[3], [4]]], [[[7], [8]]]],
default_value = 3,
default_grad = 5 + 9 + 2 + 4 + 6 + 8 + 6 + 1 + 3 + 5 + 7 + 9,
output_value = [[[[9], [8]], [[7], [6]], [[3], [3]]],
[[[5], [4]], [[3], [3]], [[3], [3]]],
[[[3], [3]], [[3], [3]], [[3], [3]]]],
output_grad = [[[[1], [2]], [[3], [4]], [[5], [6]]],
[[[7], [8]], [[9], [1]], [[2], [3]]],
[[[4], [5]], [[6], [7]], [[8], [9]]]]),
dict(
testcase_name = 'zero_size',
shape = [0, 0],
rt_value = [[9, 8], [7, 6, 5], [4]],
rt_grad = [[0, 0], [0, 0, 0], [0]],
default_value = 3,
default_grad = 0,
output_value = [],
output_grad = [])
]) # pyformat: disable
@test_util.run_deprecated_v1
def test_gradient(self,
shape,
rt_value,
rt_grad,
default_value,
default_grad,
output_value,
output_grad,
ragged_rank=None):
"""Tests that ragged_to_dense generates the right gradient.
Args:
shape: The `shape` arg for `ragged_to_dense`.
rt_value: The `rt_input` arg for `ragged_to_dense`.
rt_grad: The expected gradient for `rt_value`. Corresponds 1:1 with
`rt_value`.
default_value: The `default_value` arg for `ragged_to_dense`.
default_grad: The expected gradient for `default_value`. Corresponds 1:1
with `default_value`.
output_value: The expected output of `ragged_to_dense`.
output_grad: The gradient for the output (used to generate the gradients
`rt_grad` and `default_grad`). Corresponds 1:1 with `output_value`.
ragged_rank: Ragged rank for `rt_value`.
"""
if context.executing_eagerly():
return
rt_value = ragged_factory_ops.constant(
rt_value, dtype=dtypes.float32, ragged_rank=ragged_rank)
rt_grad = ragged_factory_ops.constant(
rt_grad, dtype=dtypes.float32, ragged_rank=ragged_rank)
default_value = constant_op.constant(default_value, dtype=dtypes.float32)
default_grad = constant_op.constant(default_grad, dtype=dtypes.float32)
output_value = constant_op.constant(
output_value, dtype=dtypes.float32, shape=shape)
output_grad = constant_op.constant(
output_grad, dtype=dtypes.float32, shape=shape)
shape = tensor_shape.as_shape(shape)
# There are different code paths for ragged_to_dense, depending on whether
# the RaggedTensor was created from row_splits or value_rowids. Make sure
# that we test both.
for partition_type in ['row_splits', 'value_rowids']:
# There are different code paths when computing the gradient for
# default_value, depending on whether shape info is statically available;
# make sure that we test all code paths.
for shape_info in ['known', 'unknown_dims', 'unknown_rank']:
rt_val = self.rt_with_partition_type(rt_value, partition_type)
rt_val = self.wrap_in_placeholder(rt_val, shape_info)
default_val = self.wrap_in_placeholder(default_value, shape_info)
shape_val = self.wrap_in_placeholder(shape, shape_info)
out = rt_val.to_tensor(default_val, shape=shape_val)
self.assertAllClose(out, output_value)
actual_flat_values_grad, actual_default_grad = gradients_impl.gradients(
ys=out,
xs=(rt_value.flat_values, default_value),
grad_ys=output_grad)
self.assertIsInstance(actual_flat_values_grad,
indexed_slices.IndexedSlices)
actual_flat_values_grad = ops.convert_to_tensor(actual_flat_values_grad)
actual_values_grad = rt_value.with_flat_values(actual_flat_values_grad)
self.assertAllClose(actual_values_grad, rt_grad)
self.assertAllClose(actual_default_grad, default_grad)
def rt_with_partition_type(self, rt, partition_type):
if isinstance(rt, ops.Tensor):
return rt
if partition_type == 'row_splits':
return rt
if partition_type == 'value_rowids':
return ragged_tensor.RaggedTensor.from_value_rowids(
self.rt_with_partition_type(rt.values, partition_type),
rt.value_rowids(), rt.nrows())
raise AssertionError('Unexpected partition_type %r' % partition_type)
def wrap_in_placeholder(self, arg, shape_info):
"""Wraps `arg` in a placeholder to limit static shape info.
Args:
arg: The value to wrap. A Tensor, RaggedTensor, or TensorShape.
shape_info: One of ['known', 'unknown_dims', 'unknown_rank'].
Returns:
* If shape_info is 'known': returns `arg`.
* If shape_info is 'unknown_dims': returns a placeholder wrapping `arg`
where the dimension sizes are unknown. If `arg` is a TensorShape,
then convert it to a vector first. If `arg` is a RaggedTensor, then
wrap the flat_values.
* If shape_info is 'unknown_rank': returns a placeholder wrapping `arg`
where the rank is unknown. If `arg` is a TensorShape, then convert it
to a vector first. If `arg` is a RaggedTensor, then wrap the
flat_values.
"""
if shape_info == 'known':
return arg
if isinstance(arg, ragged_tensor.RaggedTensor):
return arg.with_flat_values(
self.wrap_in_placeholder(arg.flat_values, shape_info))
if isinstance(arg, tensor_shape.TensorShape):
if arg.ndims is None:
return arg
arg = constant_op.constant(arg.as_list())
if shape_info == 'unknown_rank':
return array_ops.placeholder_with_default(arg, None)
if shape_info == 'unknown_dims':
return array_ops.placeholder_with_default(arg, [None] * arg.shape.rank)
raise AssertionError('Unexpected shape_info %r' % shape_info)
class RaggedToDenseBenchmark(googletest.Benchmark):
# Configurations to test. See `run_benchmark` for config param docs.
CONFIGS = [
{'shape': [10, 10]},
{'shape': [10, 1000]},
{'shape': [1000, 10]},
{'shape': [1000, 10], 'fill': [1, 0.95]}, # Mostly full.
{'shape': [1000, 10], 'fill': [1, 0.05]}, # Mostly empty.
{'shape': [1000, 10], 'dtype': dtypes.string},
{'shape': [1000, 10], 'dtype': dtypes.int64},
{'shape': [100, 100]},
{'shape': [50, 50, 32]},
{'shape': [100, 100, 100], 'min_iters': 100},
{'shape': [1000, 1000], 'min_iters': 100},
{'shape': [10, 10, 10, 10, 10]},
{'shape': [10, 10, 10, 10, 10], 'ragged_rank': 1},
{'shape': [10, 10, 10, 10, 10], 'ragged_rank': 2},
{'shape': [50, 50, 32], 'ragged_rank': 1, 'default_shape': [32]},
{'shape': [200, 50, 32], 'ragged_rank': 1, 'default_shape': [32]}
] # pyformat: disable
def run_benchmark(self,
shape=(100, 100),
ragged_rank=None,
dtype=dtypes.float32,
fill=None,
default_shape=(),
output_shape=None,
min_iters=1000):
"""Run a benchmark with the specified configuration parameters.
Args:
shape: Bounding box for the input ragged tensor.
ragged_rank: Ragged rank for the input ragged tensor. Defaults to
`len(shape)-1`.
dtype: Data type for the input ragged tensor.
fill: How full each dimension should be (0-1). Corresponds 1:1 with
`shape`. Defaults to 0.8 for each dimension.
default_shape: Shape for the default (padding) value.
output_shape: Output shape -- ragged tensor will be padded or cropped to
this shape.
min_iters: Minimum iterations for benchmark.
"""
if ragged_rank is None:
ragged_rank = len(shape) - 1
if fill is None:
fill = [0.8 for _ in shape]
# Build the inputs for the op.
rt_input = self._generateRaggedTensor(shape, ragged_rank, dtype, fill)
default_value = constant_op.constant(
self._generateRaggedTensor(default_shape, 0, dtype), dtype=dtype)
mbs = np.prod(shape) / (2**20)
with session.Session(config=benchmark.benchmark_config()) as sess:
extras = {
'shape': shape,
'ragged_rank': ragged_rank,
'dtype': dtype,
'fill': fill,
'default_shape': default_shape
}
rt = ragged_factory_ops.constant(rt_input, dtype, ragged_rank=ragged_rank)
# Inputs for with_splits:
splits_rt_placeholder = ragged_factory_ops.placeholder(
dtype, ragged_rank, shape[ragged_rank + 1:])
splits_feed_dict = {splits_rt_placeholder: sess.run(rt)}
# Inputs for with_rowids:
rowids_feed_dict = {}
rowids_rt_placeholder = rebuild_ragged_tensor_with_value_rowids(
rt, rowids_feed_dict, sess)
# Common arguments for benchmarks:
run_op_benchmark_kwargs = dict(
sess=sess,
store_memory_usage=True,
min_iters=min_iters,
burn_iters=max(5, min_iters // 10),
mbs=mbs,
extras=extras)
ragged_to_tensor_with_splits = splits_rt_placeholder.to_tensor(
default_value=default_value)
self.run_op_benchmark(
op_or_tensor=ragged_to_tensor_with_splits.op,
name='ragged_to_tensor_with_splits',
feed_dict=splits_feed_dict,
**run_op_benchmark_kwargs)
ragged_to_tensor_with_rowids = rowids_rt_placeholder.to_tensor(
default_value=default_value)
self.run_op_benchmark(
op_or_tensor=ragged_to_tensor_with_rowids.op,
name='ragged_to_tensor_with_rowids',
feed_dict=rowids_feed_dict,
**run_op_benchmark_kwargs)
def _generateRaggedTensor(self, shape, ragged_rank, dtype, fill=None, axis=0):
if axis == len(shape):
value = random.random()
if dtype == dtypes.string:
value = str(value)
if dtype.is_integer:
value = int(value * 1000)
return value
if axis == 0 or axis > ragged_rank:
slice_size = shape[axis]
else:
slice_size = (np.random.geometric(fill[axis], shape[axis]) == 1).sum()
return [
self._generateRaggedTensor(shape, ragged_rank, dtype, fill, axis + 1)
for _ in range(slice_size)
]
def benchmark_ragged_to_dense(self):
random.seed(5)
for config in self.CONFIGS:
self.run_benchmark(**config)
if __name__ == '__main__':
googletest.main()
|
|
import json
import pytest
import krisk.plot as kk
import numpy as np
DATA_DIR = 'krisk/tests/data'
read_option_tests = lambda f: json.load(open(DATA_DIR + '/' + f, 'r'))
def assert_barline_data(plot, true_option, test_legend=True):
assert plot.option['series'][0] == true_option['series'][0]
assert plot.option['xAxis']['data'] == true_option['xAxis']['data']
if test_legend:
assert plot.option['legend']['data'] == true_option['legend']['data']
def assert_scatter_data(plot, true_option):
assert plot.option['series'][0]['data'] == true_option['series'][0]['data']
assert plot.option['xAxis'] == true_option['xAxis']
assert plot.option['yAxis'] == true_option['yAxis']
def test_bar(gapminder):
#Bar
p1 = kk.bar(gapminder,
'year',
y='pop',
c='continent',
how='mean',
stacked=True,
annotate=True)
opt1 = read_option_tests('bar.json')
assert_barline_data(p1, opt1)
#Bar with x-axis and category
p2 = kk.bar(gapminder,'year',c='continent',stacked=True)
opt2 = read_option_tests('bar_x_c.json')
assert_barline_data(p2, opt2)
# Bar Annotate All
p3 = kk.bar(gapminder,
'year',
y='pop',
c='continent',
how='mean',
stacked=True,
annotate='all')
opt3 = read_option_tests('/bar_ann_all.json')
assert_barline_data(p3, opt3)
p4 = kk.bar(gapminder,'continent',y='gdpPercap',how='mean')
opt4 = {'legend': {'data': []},
'series': [{'data': [4426.026, 8955.554, 802.675,
3255.367, 19980.596],
'name': 'continent',
'type': 'bar'}],
'title': {'text': ''},
'tooltip': {'axisPointer': {'type': ''}},
'xAxis': {'data': ['Africa', 'Americas', 'Asia',
'Europe', 'Oceania']},
'yAxis': {}}
assert_barline_data(p4, opt4, test_legend=False)
def test_trendline(gapminder):
# p = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True)
p1 = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True)
opt1 = read_option_tests('bar_year_pop_mean_trendline.json')
assert_barline_data(p1, opt1, test_legend=False)
assert p1.option['series'][-1]['data'] == opt1['series'][-1]['data']
assert p1.option['series'][-1]['name'] == 'trendline'
assert p1.option['series'][-1]['type'] == 'line'
assert p1.option['series'][-1]['lineStyle'] == {'normal': {'color': '#000'}}
p2 = kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,
c='continent',stacked=True)
opt2 = read_option_tests('bar_year_pop_mean_continent_trendline.json')
assert_barline_data(p2, opt2)
assert p2.option['series'][-1]['data'] == opt2['series'][-1]['data']
try:
kk.bar(gapminder,'year',how='mean',y='pop',trendline=True,c='continent')
except ValueError:
pass
def test_line(gapminder):
p = kk.line(
gapminder,
'year',
y='lifeExp',
c='continent',
how='mean',
stacked=True,
area=True,
annotate='all')
opt = read_option_tests('line.json')
assert_barline_data(p, opt)
assert p.option['tooltip']['axisPointer']['type'] == 'shadow'
assert p.option['tooltip']['trigger'] == 'axis'
def test_smooth_line(gapminder):
p = kk.line(gapminder[gapminder.year == 1952],'continent',y='pop',
how='mean',smooth=True)
assert p.option['series'][0]['smooth'] == True
def test_full_bar_line(gapminder):
bar = kk.bar(gapminder,'year',c='continent',y='pop',how='mean',
stacked=True,full=True,annotate='all')
line = kk.line(gapminder,'year',c='continent',y='pop',how='mean',
stacked=True,full=True,annotate='all')
for i in range(len(bar.option['series'])):
bar.option['series'][i].pop('type')
line.option['series'][i].pop('type')
bar.option['series'][i].pop('label')
line.option['series'][i].pop('label')
true_option = read_option_tests('full_bar_line.json')
assert_barline_data(bar, true_option)
assert_barline_data(line, true_option)
def test_sort_bar_line(gapminder):
p = kk.line(gapminder,'year', y='pop', how='mean',c='continent',
sort_on= np.mean ,sort_c_on='Americas')
assert p.option['xAxis']['data'] == [1952, 1957, 1962, 1967, 1972, 1977,
1982, 1987, 1992, 1997, 2002, 2007]
assert p.option['legend']['data'] == ['Africa', 'Americas', 'Asia',
'Europe', 'Oceania']
assert p.option['series'][0] == {'data': [-10595881.167,
-9604550.167,
-8874458.167,
-7114907.167,
-5114619.167,
-2722602.167,
158346.833,
3379549.833,
6422966.833,
9196608.833,
11411735.833,
13457809.833],
'name': 'Africa',
'type': 'line'}
def test_hist(gapminder):
p1 = kk.hist(gapminder,'lifeExp',bins=10)
opt1 = read_option_tests('hist_x.json')
assert_barline_data(p1, opt1)
p2 = kk.hist(
gapminder,
'lifeExp',
c='continent',
bins=20,
normed=True,
stacked=True)
opt2 = read_option_tests('hist.json')
assert_barline_data(p2, opt2)
def test_density(gapminder):
p1 = kk.hist(gapminder,'lifeExp',density=True)
assert p1.option['series'][0]['data'] == [0, 4, 2, 7, 2, 2, 3, 5, 13, 16, 6]
assert p1.option['series'][-1] == {'data': [0, 4, 2, 7, 2, 2,
3, 5, 13, 16, 6, 0],
'lineStyle': {'normal': {'color': '#000'}},
'name': 'density',
'smooth': True,
'type': 'line'}
assert p1.option['xAxis']['boundaryGap'] == False
assert p1.option['xAxis']['data'] == [0, 28, 34, 39, 44, 49, 55,
60, 65, 70, 75, 81, 0]
p2 = kk.hist(gapminder,'lifeExp',bins=10,c='continent',
stacked=True,density=True)
opt2 = read_option_tests('hist_lifeExp_b10_continent_density.json')
assert_barline_data(p2, opt2)
try:
kk.hist(gapminder,'year',density=True,c='continent')
except ValueError:
pass
def test_scatter(gapminder):
# Simple Scatter
p1 = kk.scatter(gapminder[gapminder.year == 1952],'pop','lifeExp')
opt1 = read_option_tests('simple_scatter.json')
assert_scatter_data(p1, opt1)
assert p1.option['title'] == {'text': ''}
assert p1.option['tooltip'] == {'axisPointer': {'type': 'line'},
'fontFamily': 'sans-serif',
'fontSize': 14,
'fontStyle': 'normal',
'trigger': 'item',
'triggerOn': 'mousemove'}
# Grouped Scatter
p2 = kk.scatter(
gapminder[gapminder.year == 1952],
'lifeExp',
'gdpPercap',
s='pop',
c='continent')
opt2 = read_option_tests('scatter.json')
assert_scatter_data(p2, opt2)
assert p2.option['series'][0]['name'] == 'Africa'
assert p2.option['series'][0]['type'] == 'scatter'
assert p2.option['visualMap'][0] == opt2['visualMap'][0]
# Scatter
p3 = kk.scatter(gapminder[gapminder.year == 1952],
'lifeExp', 'gdpPercap', s='pop')
opt3 = read_option_tests('scatter_single.json')
assert_scatter_data(p3, opt3)
def test_bar_line(gapminder):
p1 = kk.bar_line(gapminder, 'continent', 'lifeExp', 'gdpPercap')
assert p1.option['series'][0] == {'data': [59.03, 69.06, 37.479,
68.433, 74.663],
'name': 'lifeExp',
'type': 'bar'}
assert p1.option['series'][-1] == {'data': [4426.026, 8955.554, 802.675,
3255.367, 19980.596],
'name': 'gdpPercap',
'type': 'line',
'yAxisIndex': 1}
assert p1.option['xAxis']['data'] == ['Africa', 'Americas', 'Asia',
'Europe', 'Oceania']
p2 = kk.bar_line(gapminder, 'continent', 'lifeExp', 'gdpPercap',
is_distinct=True)
assert p2.option['series'][0]['data'] == [43.077, 62.485, 28.801,
55.23, 69.12]
assert p2.option['series'][-1]['data'] == [2449.008, 5911.315, 779.445,
1601.056, 10039.596]
assert p2.option['xAxis']['data'] == ['Africa', 'Americas', 'Asia',
'Europe', 'Oceania']
def test_waterfall():
np.random.seed(0)
import pandas as pd
df = pd.DataFrame({'val': -1 + 10 * np.random.randn(10)})
p1 = kk.waterfall(df['val'])
p1.option['tooltip']['formatter'] = """function (params) {
var tar;
if (params[1].value != '-') {
tar = params[1];
}
else {
tar = params[2];
}
return tar.name + '<br/>' + tar.seriesName + ' : ' + tar.value;
}"""
assert p1.option['tooltip']['axisPointer'] == {'type': 'shadow'}
assert p1.option['series'][0]['name'] == ''
p1_data_invis = p1.option['series'][0].pop('data')
assert p1_data_invis == [0.0, 16.641, 19.642, 28.429, 49.838, 56.741,
56.741, 62.729, 60.696, 60.696]
assert p1.option['series'][0] == {
'name': '',
'type': 'bar',
'stack': 'stack',
"itemStyle": {
"normal": {
"barBorderColor": 'rgba(0,0,0,0)',
"color": 'rgba(0,0,0,0)'
},
"emphasis": {
"barBorderColor": 'rgba(0,0,0,0)',
"color": 'rgba(0,0,0,0)'
}
}}
p1_data_val = p1.option['series'][1].pop('data')
assert p1_data_val == [16.641, 3.002, 8.787, 21.409, 17.676, 10.773, 8.501,
2.514, 2.032, 3.106]
assert p1.option['series'][1] == {'name': 'val',
'stack': 'stack',
'type': 'bar'}
p2 = kk.waterfall(df['val'], color_coded=True,
annotate="outside", up_name="up")
p2_data_pos_val = p2.option['series'][1].pop('data')
p2_data_neg_val = p2.option['series'][2].pop('data')
assert p2_data_pos_val == [16.641, 3.002, 8.787, 21.409,
17.676, '-', 8.501, '-', '-', 3.106]
assert p2_data_neg_val == ['-', '-', '-', '-', '-',
10.773, '-', 2.514, 2.032, '-']
pos_series = p2.option['series'][1]
neg_series = p2.option['series'][2]
assert pos_series == {
'label': {'normal': {'position': 'top', 'show': True}},
'name': 'up', 'stack': 'stack', 'type': 'bar'
}
assert neg_series == {
'label': {'normal': {'position': 'bottom', 'show': True}},
'name': 'negative', 'stack': 'stack', 'type': 'bar'
}
def test_tidy_plots(gapminder):
df1 = gapminder.pivot_table(values='lifeExp', index='year',
columns='continent', aggfunc='mean')
p1_opt = kk.line_tidy(df1).option
p2_opt = kk.bar_tidy(df1).option
assert (p1_opt['xAxis']['data'] ==
p2_opt['xAxis']['data'] ==
df1.index.astype(str).tolist())
assert (p1_opt['series'][0]['data'] ==
p2_opt['series'][0]['data'] ==
df1.iloc[:,0].values.round(3).tolist())
assert [e['name'] for e in p1_opt['series']] == p1_opt['legend']['data']
assert [e['name'] for e in p2_opt['series']] == p2_opt['legend']['data']
p3_opt = kk.bar_tidy(df1, stacked=True, trendline=True,
annotate=True).option
# test trendline
assert p3_opt['series'][-1]['data'] == [0] * df1.shape[0]
assert p3_opt['series'][-1]['name'] == 'trendline'
# test annotate
assert p3_opt['series'][-2]['label'] == {'normal': {'position': 'top',
'show': True}}
# test stacked
assert ([e['stack']for e in p3_opt['series']] ==
['unnamed'] * (df1.shape[1] + 1))
p4_opt = kk.line_tidy(df1,
area=True, full=True,
smooth=True, stacked=True).option
assert ([e['smooth'] for e in p4_opt['series']] ==
len(p4_opt['legend']['data']) * [True])
africa_val = df1.div(df1.sum(1), axis=0).iloc[:,0].round(3).values.tolist()
assert africa_val == p4_opt['series'][0]['data']
|
|
# Copyright 2013 OpenStack Foundation
# Copyright (C) 2013 Yahoo! Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import argparse
import json
import os
import subprocess
import tempfile
import testtools
from glanceclient import exc
from glanceclient import shell
import glanceclient.v1.client as client
import glanceclient.v1.images
import glanceclient.v1.shell as v1shell
from tests import utils
fixtures = {
'/v1/images/96d2c7e1-de4e-4612-8aa2-ba26610c804e': {
'PUT': (
{
'Location': 'http://fakeaddress.com:9292/v1/images/'
'96d2c7e1-de4e-4612-8aa2-ba26610c804e',
'Etag': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'X-Openstack-Request-Id':
'req-b645039d-e1c7-43e5-b27b-2d18a173c42b',
'Date': 'Mon, 29 Apr 2013 10:24:32 GMT'
},
json.dumps({
'image': {
'status': 'active', 'name': 'testimagerename',
'deleted': False,
'container_format': 'ami',
'created_at': '2013-04-25T15:47:43',
'disk_format': 'ami',
'updated_at': '2013-04-29T10:24:32',
'id': '96d2c7e1-de4e-4612-8aa2-ba26610c804e',
'min_disk': 0,
'protected': False,
'min_ram': 0,
'checksum': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'owner': '1310db0cce8f40b0987a5acbe139765a',
'is_public': True,
'deleted_at': None,
'properties': {
'kernel_id': '1b108400-65d8-4762-9ea4-1bf6c7be7568',
'ramdisk_id': 'b759bee9-0669-4394-a05c-fa2529b1c114'
},
'size': 25165824
}
})
),
'HEAD': (
{
'x-image-meta-id': '96d2c7e1-de4e-4612-8aa2-ba26610c804e',
'x-image-meta-status': 'active'
},
None
),
'GET': (
{
'x-image-meta-status': 'active',
'x-image-meta-owner': '1310db0cce8f40b0987a5acbe139765a',
'x-image-meta-name': 'cirros-0.3.1-x86_64-uec',
'x-image-meta-container_format': 'ami',
'x-image-meta-created_at': '2013-04-25T15:47:43',
'etag': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'location': 'http://fakeaddress.com:9292/v1/images/'
'96d2c7e1-de4e-4612-8aa2-ba26610c804e',
'x-image-meta-min_ram': '0',
'x-image-meta-updated_at': '2013-04-25T15:47:43',
'x-image-meta-id': '96d2c7e1-de4e-4612-8aa2-ba26610c804e',
'x-image-meta-property-ramdisk_id':
'b759bee9-0669-4394-a05c-fa2529b1c114',
'date': 'Mon, 29 Apr 2013 09:25:17 GMT',
'x-image-meta-property-kernel_id':
'1b108400-65d8-4762-9ea4-1bf6c7be7568',
'x-openstack-request-id':
'req-842735bf-77e8-44a7-bfd1-7d95c52cec7f',
'x-image-meta-deleted': 'False',
'x-image-meta-checksum': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'x-image-meta-protected': 'False',
'x-image-meta-min_disk': '0',
'x-image-meta-size': '25165824',
'x-image-meta-is_public': 'True',
'content-type': 'text/html; charset=UTF-8',
'x-image-meta-disk_format': 'ami',
},
None
)
},
'/v1/images/44d2c7e1-de4e-4612-8aa2-ba26610c444f': {
'PUT': (
{
'Location': 'http://fakeaddress.com:9292/v1/images/'
'44d2c7e1-de4e-4612-8aa2-ba26610c444f',
'Etag': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'X-Openstack-Request-Id':
'req-b645039d-e1c7-43e5-b27b-2d18a173c42b',
'Date': 'Mon, 29 Apr 2013 10:24:32 GMT'
},
json.dumps({
'image': {
'status': 'queued', 'name': 'testimagerename',
'deleted': False,
'container_format': 'ami',
'created_at': '2013-04-25T15:47:43',
'disk_format': 'ami',
'updated_at': '2013-04-29T10:24:32',
'id': '44d2c7e1-de4e-4612-8aa2-ba26610c444f',
'min_disk': 0,
'protected': False,
'min_ram': 0,
'checksum': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'owner': '1310db0cce8f40b0987a5acbe139765a',
'is_public': True,
'deleted_at': None,
'properties': {
'kernel_id':
'1b108400-65d8-4762-9ea4-1bf6c7be7568',
'ramdisk_id':
'b759bee9-0669-4394-a05c-fa2529b1c114'
},
'size': 25165824
}
})
),
'HEAD': (
{
'x-image-meta-id': '44d2c7e1-de4e-4612-8aa2-ba26610c444f',
'x-image-meta-status': 'queued'
},
None
),
'GET': (
{
'x-image-meta-status': 'queued',
'x-image-meta-owner': '1310db0cce8f40b0987a5acbe139765a',
'x-image-meta-name': 'cirros-0.3.1-x86_64-uec',
'x-image-meta-container_format': 'ami',
'x-image-meta-created_at': '2013-04-25T15:47:43',
'etag': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'location': 'http://fakeaddress.com:9292/v1/images/'
'44d2c7e1-de4e-4612-8aa2-ba26610c444f',
'x-image-meta-min_ram': '0',
'x-image-meta-updated_at': '2013-04-25T15:47:43',
'x-image-meta-id': '44d2c7e1-de4e-4612-8aa2-ba26610c444f',
'x-image-meta-property-ramdisk_id':
'b759bee9-0669-4394-a05c-fa2529b1c114',
'date': 'Mon, 29 Apr 2013 09:25:17 GMT',
'x-image-meta-property-kernel_id':
'1b108400-65d8-4762-9ea4-1bf6c7be7568',
'x-openstack-request-id':
'req-842735bf-77e8-44a7-bfd1-7d95c52cec7f',
'x-image-meta-deleted': 'False',
'x-image-meta-checksum': 'f8a2eeee2dc65b3d9b6e63678955bd83',
'x-image-meta-protected': 'False',
'x-image-meta-min_disk': '0',
'x-image-meta-size': '25165824',
'x-image-meta-is_public': 'True',
'content-type': 'text/html; charset=UTF-8',
'x-image-meta-disk_format': 'ami',
},
None
)
}
}
class ShellInvalidEndpointTest(utils.TestCase):
# Patch os.environ to avoid required auth info.
def setUp(self):
"""Run before each test."""
super(ShellInvalidEndpointTest, self).setUp()
self.old_environment = os.environ.copy()
os.environ = {
'OS_USERNAME': 'username',
'OS_PASSWORD': 'password',
'OS_TENANT_ID': 'tenant_id',
'OS_TOKEN_ID': 'test',
'OS_AUTH_URL': 'http://127.0.0.1:5000/v2.0/',
'OS_AUTH_TOKEN': 'pass',
'OS_IMAGE_API_VERSION': '1',
'OS_REGION_NAME': 'test',
'OS_IMAGE_URL': 'http://is.invalid'}
self.shell = shell.OpenStackImagesShell()
def tearDown(self):
super(ShellInvalidEndpointTest, self).tearDown()
os.environ = self.old_environment
def run_command(self, cmd):
self.shell.main(cmd.split())
def assert_called(self, method, url, body=None, **kwargs):
return self.shell.cs.assert_called(method, url, body, **kwargs)
def assert_called_anytime(self, method, url, body=None):
return self.shell.cs.assert_called_anytime(method, url, body)
def test_image_list_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint, self.run_command, 'image-list')
def test_image_details_invalid_endpoint_legacy(self):
self.assertRaises(
exc.InvalidEndpoint, self.run_command, 'details')
def test_image_update_invalid_endpoint_legacy(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command, 'update {"name":""test}')
def test_image_index_invalid_endpoint_legacy(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command, 'index')
def test_image_create_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command, 'image-create')
def test_image_delete_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command, 'image-delete <fake>')
def test_image_download_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command, 'image-download <fake>')
def test_image_members_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command, 'image-members fake_id')
def test_members_list_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command, 'member-list --image-id fake')
def test_member_replace_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command, 'members-replace image_id member_id')
def test_image_show_invalid_endpoint_legacy(self):
self.assertRaises(
exc.InvalidEndpoint, self.run_command, 'show image')
def test_image_show_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command, 'image-show --human-readable <IMAGE_ID>')
def test_member_images_invalid_endpoint_legacy(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command, 'member-images member_id')
def test_member_create_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command,
'member-create --can-share <IMAGE_ID> <TENANT_ID>')
def test_member_delete_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command,
'member-delete <IMAGE_ID> <TENANT_ID>')
def test_member_add_invalid_endpoint(self):
self.assertRaises(
exc.InvalidEndpoint,
self.run_command,
'member-add <IMAGE_ID> <TENANT_ID>')
class ShellStdinHandlingTests(testtools.TestCase):
def _fake_update_func(self, *args, **kwargs):
'''Function to replace glanceclient.images.update,
to determine the parameters that would be supplied with the update
request
'''
# Store passed in args
self.collected_args = (args, kwargs)
# Return the first arg, which is an image,
# as do_image_update expects this.
return args[0]
def setUp(self):
super(ShellStdinHandlingTests, self).setUp()
self.api = utils.FakeAPI(fixtures)
self.gc = client.Client("http://fakeaddress.com")
self.gc.images = glanceclient.v1.images.ImageManager(self.api)
# Store real stdin, so it can be restored in tearDown.
self.real_sys_stdin_fd = os.dup(0)
# Replace stdin with a FD that points to /dev/null.
dev_null = open('/dev/null')
self.dev_null_fd = dev_null.fileno()
os.dup2(dev_null.fileno(), 0)
# Replace the image update function with a fake,
# so that we can tell if the data field was set correctly.
self.real_update_func = self.gc.images.update
self.collected_args = []
self.gc.images.update = self._fake_update_func
def tearDown(self):
"""Restore stdin and gc.images.update to their pretest states."""
super(ShellStdinHandlingTests, self).tearDown()
def try_close(fd):
try:
os.close(fd)
except OSError:
# Already closed
pass
# Restore stdin
os.dup2(self.real_sys_stdin_fd, 0)
# Close duplicate stdin handle
try_close(self.real_sys_stdin_fd)
# Close /dev/null handle
try_close(self.dev_null_fd)
# Restore the real image update function
self.gc.images.update = self.real_update_func
def _do_update(self, image='96d2c7e1-de4e-4612-8aa2-ba26610c804e'):
"""call v1/shell's do_image_update function."""
v1shell.do_image_update(
self.gc, argparse.Namespace(
image=image,
name='testimagerename',
property={},
purge_props=False,
human_readable=False,
file=None,
progress=False
)
)
def test_image_update_closed_stdin(self):
"""Supply glanceclient with a closed stdin, and perform an image
update to an active image. Glanceclient should not attempt to read
stdin.
"""
# NOTE(hughsaunders) Close stdin, which is repointed to /dev/null by
# setUp()
os.close(0)
self._do_update()
self.assertTrue(
'data' not in self.collected_args[1]
or self.collected_args[1]['data'] is None
)
def test_image_update_data_is_read_from_file(self):
"""Ensure that data is read from a file."""
try:
# NOTE(hughsaunders) Create a tmpfile, write some data to it and
# set it as stdin
f = open(tempfile.mktemp(), 'w+')
f.write('Some Data')
f.flush()
f.seek(0)
os.dup2(f.fileno(), 0)
self._do_update('44d2c7e1-de4e-4612-8aa2-ba26610c444f')
self.assertTrue('data' in self.collected_args[1])
self.assertIsInstance(self.collected_args[1]['data'], file)
self.assertEqual(self.collected_args[1]['data'].read(),
'Some Data')
finally:
try:
f.close()
os.remove(f.name)
except Exception:
pass
def test_image_update_data_is_read_from_pipe(self):
"""Ensure that data is read from a pipe."""
try:
# NOTE(hughsaunders): Setup a pipe, duplicate it to stdin
# ensure it is read.
process = subprocess.Popen(['/bin/echo', 'Some Data'],
stdout=subprocess.PIPE)
os.dup2(process.stdout.fileno(), 0)
self._do_update('44d2c7e1-de4e-4612-8aa2-ba26610c444f')
self.assertTrue('data' in self.collected_args[1])
self.assertIsInstance(self.collected_args[1]['data'], file)
self.assertEqual(self.collected_args[1]['data'].read(),
'Some Data\n')
finally:
try:
process.stdout.close()
except OSError:
pass
|
|
#file calibration.py
#calibration with a convolutional neural net
import os
import cv2
import numpy as np
import Tkinter as tk
import time
from random import randint
import tensorflow as tf
import openface
import dlib
from six.moves import cPickle
printing=False
def scale(img):
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
#perform histogram equalization
img = cv2.equalizeHist(img)
#now convert to [-1,1]
img = (np.float32(img)/255.-.5)*2.
return img
def calibrate(sess, optimizer, dur, cam, alib, face_dim, X, Y, F, x_tf, y_tf, screen_width = None, screen_height = None, x = None, y = None, pred = None):
#capture for 10 seconds
t_end = time.time() + dur
while time.time() < t_end:
ret, frame = cam.read()
face_box = alib.getLargestFaceBoundingBox(frame)
#Display the resulting frame
if face_box is not None:
top_left = (face_box.left(), face_box.top())
bot_right = (face_box.right(), face_box.bottom())
aligned_face = alib.align(face_dim, frame)
cv2.imshow('aligned', aligned_face)
#rescale and center from 0-255 to [-1,1]
gray_face = scale(aligned_face)
gray_face = [np.reshape(gray_face, (face_dim, face_dim, 1))]
f = [[face_box.left(), face_box.top(), face_box.right(), face_box.bottom()]]
#Now go train!
sess.run(optimizer, feed_dict={X: gray_face, F: f, Y: [[x_tf,y_tf]]})
"""
if pred is not None and x is not None and y is not None and screen_width is not None and screen_height is not None:
p = sess.run(pred, feed_dict={X: gray_face, F: f})
print 'On cal reference: %d -> %f, %d -> %f' % (x,x_tf,y,y_tf)
print 'On cal actual: %f -> %d, %f -> %d' % (p[0][0],p[0][0]*screen_width/2.+screen_width/2.,p[0][1],p[0][1]*screen_height/2.+screen_height/2.)
"""
def test(sess, pred, cam, alib, face_dim, X, F, screen_width, screen_height, x_tf, y_tf, x, y):
ret, frame = cam.read()
face_box = alib.getLargestFaceBoundingBox(frame)
#Display the resulting frame
if face_box is not None:
top_left = (face_box.left(), face_box.top())
bot_right = (face_box.right(), face_box.bottom())
aligned_face = alib.align(face_dim, frame)
#rescale and center from 0-255 to [-1,1]
gray_face = np.reshape(scale(aligned_face), (-1, face_dim, face_dim, 1))
f = [[face_box.left(), face_box.top(), face_box.right(), face_box.bottom()]]
p = sess.run(pred, feed_dict={X: gray_face, F: f})
print 'Reference: %d -> %f, %d -> %f' % (x,x_tf,y,y_tf)
print 'Actual: %f -> %d, %f -> %d' % (p[0][0],p[0][0]*screen_width/2.+screen_width/2.,p[0][1],p[0][1]*screen_height/2.+screen_height/2.)
return p
def conv2d(x, W, b, strides=1):
# Conv2D wrapper, with bias and relu activation
x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')
x = tf.nn.bias_add(x, b)
return tf.nn.relu(x)
#return x
def maxpool2d(x, k=2):
return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1], padding='SAME')
def mlp(x, f, weights, biases, conv_drop, hidden_drop, face_dim):
l1 = conv2d(x, weights['wc1'], biases['bc1'])
l1 = maxpool2d(l1)
l1 = tf.nn.l2_normalize(l1,0)
l1 = tf.nn.dropout(l1, conv_drop)
if printing: l1 = tf.Print(l1, [l1], 'l1: ')
l2 = conv2d(l1, weights['wc2'], biases['bc2'])
l2 = maxpool2d(l2)
l2 = tf.nn.l2_normalize(l2,0)
#l2 = tf.reshape(l2, [-1, weights['wd1'].get_shape().as_list()[0]]) #reshape to (?, 2048)
l2 = tf.nn.dropout(l2, conv_drop)
if printing: l2 = tf.Print(l2, [l2], 'l2: ')
l3 = conv2d(l2, weights['wc3'], biases['bc3'])
l3 = maxpool2d(l3)
l3 = tf.reshape(l3, [-1, weights['wd1'].get_shape().as_list()[0]]) #reshape to (?, 2048)
l3 = tf.nn.l2_normalize(l3,0)
l3 = tf.nn.dropout(l3, conv_drop)
if printing: l3 = tf.Print(l3, [l3], 'l3: ')
l4 = tf.add(tf.matmul(l3, weights['wd1']), biases['bd1'])
#l4 = tf.add(tf.matmul(l2, weights['wd1']), biases['bd1'])
l4 = tf.nn.relu(l4)
l4 = tf.nn.l2_normalize(l4,0)
l4 = tf.nn.dropout(l4, hidden_drop)
if printing: out = tf.Print(l4, [l4], 'l4: ')
#append the bounding box locations
#out = tf.concat(1, [l4, f])
out = l4
out = tf.add(tf.matmul(out, weights['out']), biases['out'])
if printing: out = tf.Print(out, [out], 'out: ')
return out
def mlp_dense(x, f, weights, biases, hidden_drop, face_dim):
#The idea heare is that the first two layers will try to backproject and the final layer
#will use the face location information to figure out where the origin truly was
l1 = tf.reshape(x, [-1, face_dim**2]) #reshape to (?, face_dim*2)
l1 = tf.add(tf.matmul(l1, weights['w1']), biases['b1'])
l1 = tf.nn.l2_normalize(l1,0)
l1 = tf.nn.relu(l1)
l1 = tf.nn.dropout(l1, hidden_drop)
l2 = tf.add(tf.matmul(l1, weights['w2']), biases['b2'])
l2 = tf.nn.l2_normalize(l2,0)
l2 = tf.nn.relu(l2)
l2 = tf.nn.dropout(l2, hidden_drop)
#add in the bounding box information
out = tf.concat(1, [l2, f])
out = tf.add(tf.matmul(l2, weights['out']), biases['out'])
if printing: out = tf.Print(out, [out], 'out: ')
return out
def main():
root = tk.Tk()
#setup openface
fileDir = os.path.dirname(os.path.realpath(__file__))
modelDir = os.path.join(fileDir, '..', 'openface/models')
dlibModelDir = os.path.join(modelDir, 'dlib')
openfaceModelDir = os.path.join(modelDir, 'openface')
os.path.join(dlibModelDir,"shape_predictor_68_face_landmarks.dat")
print dlibModelDir
alib = openface.AlignDlib(dlibModelDir + "/shape_predictor_68_face_landmarks.dat")
#use 1/2 screen width because I personally use dual monitors
screen_width = root.winfo_screenwidth()/4
screen_height = root.winfo_screenheight()/2
img = np.zeros((screen_height, screen_width,3), np.uint8)
print screen_width, screen_height
print('starting video')
cap = cv2.VideoCapture(0)
#get video resolution
ret, frame = cap.read()
if ret == False: os.sys.exit("No camera detected")
print frame.shape
frame_h, frame_w, _ = frame.shape
npxls = frame_h*frame_w
print "Frame dims are '{0}' x '{1}'".format(frame_w,frame_h)
print('Initializing neural net')
learning_rate = .01
conv_drop = .8
hidden_drop = .8
n_out = 2 #coordinates of where the user is gazing
face_dim = 96 #size of face bounding box
X = tf.placeholder(tf.float32, [None, face_dim, face_dim, 1])
F = tf.placeholder(tf.float32, [None, 6]) #4 coordinates of a face + 2 for scaling
Y = tf.placeholder(tf.float32, [None, n_out])
# Store layers weight & bias
weights = {
'wc1': tf.Variable(tf.random_uniform([3, 3, 1, 32], minval=-.00001, maxval=.00001)), # 4x4 conv, 1 input, 32 outputs
'wc2': tf.Variable(tf.random_uniform([3, 3, 32, 64], minval=-.00001, maxval=.00001)), # 4x4 conv, 32 input, 64 outputs
'wc3': tf.Variable(tf.random_uniform([3, 3, 64, 128])), # 4x4 conv, 64 input, 128 outputs
'wd1': tf.Variable(tf.random_uniform([128*4*4, 1024])), # fully connected. 128*4*4 inputs from conv layer
#'out': tf.Variable(tf.random_uniform([1024+4, n_out])) #625 outputs from the conv layer + 4 inputs representing the location of the head within the original iamge
'out': tf.Variable(tf.random_uniform([1024, n_out], minval=-.00001, maxval=.00001)) #625 outputs from the conv layer + 4 inputs representing the location of the head within the original iamge
}
biases = {
'bc1': tf.Variable(tf.random_uniform([32], minval=-.00001, maxval=.00001)),
'bc2': tf.Variable(tf.random_uniform([64], minval=-.00001, maxval=.00001)),
'bc3': tf.Variable(tf.random_uniform([128], minval=-.00001, maxval=.00001)),
'bd1': tf.Variable(tf.random_uniform([1024], minval=-.00001, maxval=.00001)),
'out': tf.Variable(tf.random_uniform([n_out], minval=-.00001, maxval=.00001))
}
weights_d = {
'w1' : tf.Variable(tf.random_uniform([face_dim**2, 2048], minval=-.00001, maxval=.00001)),
'w2' : tf.Variable(tf.random_uniform([2048, 512], minval=-.00001, maxval=.00001)),
'out': tf.Variable(tf.random_uniform([512, n_out], minval=-.00001, maxval=.00001)),
}
biases_d = {
'b1' : tf.Variable(tf.random_uniform([2048], minval=-.00001, maxval=.00001)),
'b2' : tf.Variable(tf.random_uniform([512], minval=-.00001, maxval=.00001)),
'out': tf.Variable(tf.random_uniform([n_out], minval=-.00001, maxval=.00001))
}
#create multilayer perceptron
#pred = mlp(X, F, weights, biases, conv_drop, hidden_drop, face_dim)
pred = mlp_dense(X, F, weights_d, biases_d, hidden_drop, face_dim)
#define cost function
#for now the cost function is the MSE
#figure out how to do cross entropy with logits maybe? I mean this is a regression problem so...
cost = tf.pow(pred-Y,2)
cost = tf.reduce_mean(cost)
train_op = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
#train_op = tf.train.RMSPropOptimizer(.1, .9).minimize(cost)
#train_op = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
#pred_op = tf.argmax(pred, 1)
init = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init)
print('Calibrating')
"""
npts = 10
print "generating '{0}' random points to choose from".format(npts)
for _ in xrange(0,npts):
img[:] = (0,0,0) # clear
x = randint(0,screen_width-1)#/screen_width
y = randint(0,screen_height-1)#/screen_height
print x,y
#create a white circle at the randomly selected point
cv2.circle(img, (x,y), 10, (255,255,255), -1)
cv2.waitKey(100)
cv2.imshow('calibration', img)
cv2.waitKey(100)
calibrate(sess, train_op, cap, 2, n_input, X, Y, x/float(screen_width) , y/float(screen_height))
ret, frame = cap.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray_rs = np.reshape(gray,(1,n_input))
print sess.run(pred, feed_dict={X: gray_rs/255.}).shape
#alternative calibration
for x in xrange(0,screen_width,100):
for y in xrange(0,screen_height,100):
#create a white circle at the randomly selected point
img[:] = (0,0,0) # clear
cv2.circle(img, (x,y), 10, (255,255,255), -1)
cv2.waitKey(100)
cv2.imshow('calibration', img)
cv2.waitKey(100)
x_tf = (float(x)/screen_width-.5)*2.
y_tf = (float(y)/screen_height-.5)*2.
calibrate(sess, train_op, .5, cap, alib, face_dim, X, Y, F, x_tf, y_tf)
"""
#Lets see if it can figure out a dot....
x = screen_width/4
y = screen_height/4
while True:
img[:] = (0,0,0) # clear
cv2.circle(img, (x,y), 10, (255,255,255), -1)
cv2.waitKey(100)
cv2.imshow('calibration', img)
cv2.waitKey(100)
#normalize x and y to be between -1 and 1
x_tf = (float(x)/screen_width-.5)*2.
y_tf = (float(y)/screen_height-.5)*2.
calibrate(sess, train_op, .5, cap, alib, face_dim, X, Y, F, x_tf, y_tf, screen_width, screen_height, x, y, pred)
p_tf = test(sess, pred, cap, alib, face_dim, X, F, float(screen_width), float(screen_height), x_tf, y_tf, x, y)
if p_tf is not None:
p = np.multiply(np.array(p_tf)/2.+.5, np.array([screen_width, screen_height])).astype(int)
#draw
cv2.circle(img, (p[0][0], p[0][1]), 10, (255,0,0), -1)
cv2.imshow('Focus', img)
#Display the resulting frame
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cv2.destroyWindow('calibration')
print('Now continuing onto testing')
"""
img = np.zeros((screen_height, screen_width,3), np.uint8)
while(True):
#generate the dot
x = randint(0,screen_width-1)#/screen_width
y = randint(0,screen_height-1)#/screen_height
x_tf = (float(x)/screen_width-.5)*2.
y_tf = (float(y)/screen_height-.5)*2.
#attempt to perform the backprojection
p_tf = test(sess, pred, cap, alib, face_dim, X, F, float(screen_width), float(screen_height), x_tf, y_tf, x, y)
if p_tf is not None:
p = np.multiply(np.array(p_tf)/2.+.5, np.array([screen_width, screen_height])).astype(int)
#draw
img[:] = (0,0,0) # clear
cv2.circle(img, (p[0][0], p[0][1]), 10, (255,255,255), -1)
cv2.imshow('Focus', img)
#Display the resulting frame
if cv2.waitKey(1) & 0xFF == ord('q'):
break
"""
if __name__ == '__main__':
main()
|
|
"""Module containing python functions, which generate second order RTD kernels."""
class KernelHandler(object):
"""Class responsible for inserting matrix elements into the various matrices used."""
def __init__(self, si):
self.si = si
self.ndm0 = si.ndm0
self.ndm0r = si.ndm0r
self.npauli = si.npauli
self.phi0 = None
self.kern = None
def set_kern(self, kern):
self.kern = kern
def set_phi0(self, phi0):
self.phi0 = phi0
def is_included(self, b, bp, bcharge):
""" Checks if the density matrix entry :math:`|b><bp|` is included in the calculations.
Parameters
----------
b : int
first state
bp : int
second state
bcharge : int
charge of the states b and bp
Returns
-------
bool
true if it's included
"""
bbp = self.si.get_ind_dm0(b, bp, bcharge)
if bbp == -1:
return False
return True
def is_unique(self, b, bp, bcharge):
""" Check if the entry :math:`|b><bp|` is unique.
Parameters
----------
b : int
first state
bp : int
second state
bcharge : int
charge of the states b and bp
Returns
-------
bool
true if unique
"""
bbp_bool = self.si.get_ind_dm0(b, bp, bcharge, maptype=2)
return bbp_bool
def set_energy(self, energy, b, bp, bcharge):
bbp = self.si.get_ind_dm0(b, bp, bcharge)
bbpi = self.ndm0 + bbp - self.npauli
bbpi_bool = True if bbpi >= self.ndm0 else False
if bbpi_bool:
self.kern[bbp, bbpi] = self.kern[bbp, bbpi] + energy
self.kern[bbpi, bbp] = self.kern[bbpi, bbp] - energy
def set_matrix_element(self, fct, b, bp, bcharge, a, ap, acharge):
""" Adds a complex value to the matrix element connecting :math:`|a><ap|` and :math:`|b><bp|` in the kernel.
Parameters
----------
fct : complex
value to be added
b : int
first state of :math:`|b><bp|`
bp : int
second state of :math:`|b><bp|`
bcharge : int
charge of states b and bp
a : int
first state of :math:`|a><ap|`
ap : int
second state of :math:`|a><ap|`
acharge : int
charge of the states a and ap
self.kern : ndarray
(modifies) the kernel
"""
bbp = self.si.get_ind_dm0(b, bp, bcharge)
bbpi = self.ndm0 + bbp - self.npauli
bbpi_bool = True if bbpi >= self.ndm0 else False
aap = self.si.get_ind_dm0(a, ap, acharge)
aapi = self.ndm0 + aap - self.npauli
aap_sgn = +1 if self.si.get_ind_dm0(a, ap, acharge, maptype=3) else -1
fct_imag = fct.imag
fct_real = fct.real
self.kern[bbp, aap] += fct_imag
if aapi >= self.ndm0:
self.kern[bbp, aapi] += fct_real*aap_sgn
if bbpi_bool:
self.kern[bbpi, aapi] += fct_imag*aap_sgn
if bbpi_bool:
self.kern[bbpi, aap] += -fct_real
def set_matrix_element_pauli(self, fctm, fctp, bb, aa):
""" Adds a real value (fctp) to the the matrix element connecting the states
bb and aa in the Pauli kernel. In addition, adds another another real value (fctm)
to the diagonal kern[bb, bb].
Parameters
----------
fctm : double
value to be added to kern[bb, aa]
fctp : double
value to be added to kern[bb, bb]
bb : int
first state/index
aa : int
second state/index
self.kern : ndarray
(modifies) the kernel
"""
self.kern[bb, bb] += fctm
self.kern[bb, aa] += fctp
def get_phi0_element(self, b, bp, bcharge):
r""" Gets the entry of the density matrix given by :math:`|b><bp|`.
Parameters
----------
b : int
first state
bp : int
second state
bcharge : int
charge of the states b and bp
Returns
-------
complex
the value :math:`<b|\phi_0|bp>`
"""
bbp = self.si.get_ind_dm0(b, bp, bcharge)
if bbp == -1:
return 0.0
bbpi = self.ndm0 + bbp - self.npauli
bbpi_bool = True if bbpi >= self.ndm0 else False
phi0_real = self.phi0[bbp]
phi0_imag = 0
if bbpi_bool:
bbp_conj = self.si.get_ind_dm0(b, bp, bcharge, maptype=3)
phi0_imag = self.phi0[bbpi] if bbp_conj else -self.phi0[bbpi]
return phi0_real + 1j*phi0_imag
class KernelHandlerMatrixFree(KernelHandler):
"""Class used for inserting matrix elements into vectors when using the matrix free
solution method."""
def __init__(self, si):
KernelHandler.__init__(self, si)
self.dphi0_dt = None
def set_dphi0_dt(self, dphi0_dt):
self.dphi0_dt = dphi0_dt
def set_energy(self, energy, b, bp, bcharge):
if b == bp:
return
bbp = self.si.get_ind_dm0(b, bp, bcharge)
bbpi = self.ndm0 + bbp - self.npauli
phi0bbp = self.get_phi0_element(b, bp, bcharge)
dphi0_dt_bbp = -1j*energy*phi0bbp
self.dphi0_dt[bbp] += dphi0_dt_bbp.real
self.dphi0_dt[bbpi] -= dphi0_dt_bbp.imag
def set_matrix_element(self, fct, b, bp, bcharge, a, ap, acharge):
r""" Adds a contribution to :math:`d\phi_o /dt` that stems from the matrix element
connecting :math:`|b><bp|` and :math:`|a><ap|` in the full off-diagonal in the kernel.
Parameters
----------
fct : complex
value to be added
b : int
first state of :math:`|b><bp|`
bp : int
second state of :math:`|b><bp|`
bcharge : int
charge for the states b and bp
a : int
first state of :math:`|a><ap|`
ap : int
second state of :math:`|a><ap|`
acharge : int
charge of the states a and ap
self.dphi0_dt : ndarray
(modifies) time derivative of the density matrix
"""
bbp = self.si.get_ind_dm0(b, bp, bcharge)
bbpi = self.ndm0 + bbp - self.npauli
bbpi_bool = True if bbpi >= self.ndm0 else False
aap = self.si.get_ind_dm0(a, ap, acharge)
phi0aap = self.get_phi0_element(a, ap, acharge)
dphi0_dt_bbp = -1j*fct*phi0aap
self.dphi0_dt[bbp] += dphi0_dt_bbp.real
if bbpi_bool:
self.dphi0_dt[bbpi] -= dphi0_dt_bbp.imag
def set_matrix_element_pauli(self, fctm, fctp, bb, aa):
r""" Adds a contribution to :math:`d\phi_o /dt` that stems from the matrix element
connecting :math:`|b><b|` and :math:`|a><a|` in the Pauli kernel.
Parameters
----------
fctm : double
value from the diagonal of the kernel kern[bb, bb]
fctp : double
value from the off-diagonal of the kernel kern[bb, aa]
b : int
first state
a : int
second state
self.dphi0_dt : ndarray
(modifies) time derivative of the density matrix
"""
self.dphi0_dt[bb] += fctm*self.phi0[bb] + fctp*self.phi0[aa]
def get_phi0_norm(self):
ncharge, statesdm = self.si.ncharge, self.si.statesdm
norm = 0.0
for bcharge in range(ncharge):
for b in statesdm[bcharge]:
bb = self.si.get_ind_dm0(b, b, bcharge)
norm += self.phi0[bb]
return norm
class KernelHandlerRTD(KernelHandler):
"""Class used for inserting matrix elements into the matrices used in the RTD approach."""
def set_matrix_list(self):
self.mats = [self.Wdd, self.WE1, self.WE2, self.ReWdn, self.ImWdn, self.ReWnd, self.ImWnd, self.Lnn]
def add_matrix_element(self, fct, l, b, bp, bcharge, a, ap, acharge, mi):
r"""
Adds a value to the lead-resolved ndarray (kernel) given by index mi. The indices are set by the entries
:math:`|b><bp|` and :math:`|a><ap|` in the density matrix. Which matrix to add the value to is determined by
mi as 0 = :math:`W_{dd}`, 1 = :math:`W_{E,1}`, 2 = :math:`W_{E,2}`, 3 = :math:`\Re(W_{dn}^{(1)})`,
4 = :math:`\Im (W_{dn}^{(1)})`, 5 = :math:`\Re (W_{nd}^{(1)})`, 6 = :math:`\Im(W_{nd}^{(1)})`,
7 = :math:`L_{nn}`.
Parameters
----------
fct : float
the value to be added
l : int
lead index
b : int
first index for state 1
bp : int
second index for state 1
bcharge : int
charge of state 1
a : int
first index for state 2
ap : int
second index for state 2
acharge : int
charge of state 2
mi : int
index for selecting which matrix to insert into
self.mats[mi] : ndarray
(Modifies) the matrix selected by mi
"""
indx1 = self.si.get_ind_dm0(b, bp, bcharge)
indx2 = self.si.get_ind_dm0(a, ap, acharge)
if b != bp:
indx1 -= self.npauli
if b > bp:
indx1 += self.ndm0 - self.npauli
if a != ap:
indx2 -= self.npauli
if a > ap:
indx2 += self.ndm0 - self.npauli
mat = self.mats[mi]
mat[l, indx1, indx2] += fct
def set_matrix_element_dd(self, l, fctm, fctp, bb, aa, mi):
"""
Adds a value to the lead-resolved kernel connecting :math:`|b><b|` to :math:`|a><a|`,
and uses the conservation law to add a second value to the diagonal (connecting :math:`|b><b|`
to itself).
Parameters
----------
l : int
lead index
fctm : float
value to be added to the diagonal (tunneling out)
fctp : float
value to be added to the off-diagonal (tunneling in)
bb : int
index for the entry :math:`|b><b|`
aa : int
index for the entry :math:`|a><a|`
mi : int
index for selecting which matrix to insert into
self.mats[mi] : ndarray
(Modifies) the matrix selected by mi
"""
mat = self.mats[mi]
mat[l, bb, bb] += fctm
mat[l, bb, aa] += fctp
def add_element_2nd_order(self, r, fct, indx0, indx1, a3, charge3, a4, charge4):
"""
Adds a value to the lead-resolved kernel for the diagonal density matrix. Uses symmetries
between second order diagrams in the RTD approach to add the value to four places in the matrix.
Parameters
----------
r : int
lead index
fct : float
value to be added
indx0 : int
index for inital state
indx1 : int
index for intermidiate state 1
a3 : int
intermediate state 3 is given by :math:`|a3><a3|`
charge3 : int
charge of intermediate state 3
a4 : int
final state is given by :math:`|a4><a4|`
charge4 : int
charge of the final state
self.Wdd : ndarray
(Modifies) the lead-resolved kernel for the diagonal density matrix.
"""
si = self.si
indx3 = si.get_ind_dm0(a3, a3, charge3)
indx4 = si.get_ind_dm0(a4, a4, charge4)
fct = 2 * fct
self.Wdd[r, indx4, indx0] += fct
# Flipping left-most vertex p3 = -p3
self.Wdd[r, indx3, indx0] += -fct
# Flipping right-most vertex p0 = -p0
self.Wdd[r, indx4, indx1] += fct
# Flipping left-most and right-most vertices p0 = -p0 and p3 = -p3
self.Wdd[r, indx3, indx1] += -fct
def add_element_Lnn(self, a1, b1, charge, fct):
"""
Adds a value to the part of :math:`L_{N,+}` connecting an off-diagonal component of the density matrix to
itself.
Parameters
----------
a1 : int
first part of the component :math:`|a1><b1|`
b1 : int
second part of the component :math:`|a1><b1|`
charge : int
charge of the states a1 and b1
fct : float
the value to be added
self.Lnn : ndarray
(Modifies) the anti-commutator Liouvillian connecting non-diagonal elements
"""
indx = self.si.get_ind_dm0(a1, b1, charge) - self.npauli
if a1 > b1:
indx += self.ndm0 - self.npauli
self.Lnn[indx, indx] += fct
|
|
import sys
import bisect
import copy
import contextlib
import pkg_resources
import numpy
from PyQt4.QtGui import (
QWidget, QButtonGroup, QGroupBox, QRadioButton, QSlider,
QDoubleSpinBox, QComboBox, QSpinBox, QListView,
QVBoxLayout, QHBoxLayout, QFormLayout, QSpacerItem, QSizePolicy,
QCursor, QIcon, QStandardItemModel, QStandardItem, QStyle,
QStylePainter, QStyleOptionFrame, QPixmap,
QApplication, QDrag
)
from PyQt4 import QtGui
from PyQt4.QtCore import Qt, QObject, QEvent, QSize, QModelIndex, QMimeData
from PyQt4.QtCore import pyqtSignal as Signal, pyqtSlot as Slot
import Orange.data
from Orange import preprocess
from Orange.statistics import distribution
from Orange.preprocess import Continuize
from Orange.widgets import widget, gui, settings
from .owimpute import RandomTransform
@contextlib.contextmanager
def blocked(qobj):
state = qobj.signalsBlocked()
qobj.blockSignals(True)
try:
yield qobj
finally:
qobj.blockSignals(state)
class BaseEditor(QWidget):
"""
Base widget for editing preprocessor's parameters.
"""
#: Emitted when parameters have changed.
changed = Signal()
#: Emitted when parameters were edited/changed as a result of
#: user interaction.
edited = Signal()
def setParameters(self, parameters):
"""
Set parameters.
Parameters
----------
parameters : dict
Parameters as a dictionary. It is up to subclasses to
properly parse the contents.
"""
raise NotImplementedError
def parameters(self):
"""Return the parameters as a dictionary.
"""
raise NotImplementedError
@staticmethod
def createinstance(params):
"""
Create the Preprocessor instance given the stored parameters dict.
Parameters
----------
params : dict
Parameters as returned by `parameters`.
"""
raise NotImplementedError
class _NoneDisc(preprocess.discretize.Discretization):
"""Discretize all variables into None.
Used in combination with preprocess.Discretize to remove
all discrete features from the domain.
"""
def __call__(self, data, variable):
return None
class DiscretizeEditor(BaseEditor):
"""
Editor for preprocess.Discretize.
"""
#: Discretize methods
NoDisc, EqualWidth, EqualFreq, Drop, EntropyMDL = 0, 1, 2, 3, 4
Discretizers = {
NoDisc: (None, {}),
EqualWidth: (preprocess.discretize.EqualWidth, {"n": 4}),
EqualFreq: (preprocess.discretize.EqualFreq, {"n": 4}),
Drop: (_NoneDisc, {}),
EntropyMDL: (preprocess.discretize.EntropyMDL, {"force": False})
}
Names = {
NoDisc: "None",
EqualWidth: "Equal width discretization",
EqualFreq: "Equal frequency discretization",
Drop: "Remove continuous attributes",
EntropyMDL: "Entropy-MDL discretization"
}
def __init__(self, parent=None, **kwargs):
BaseEditor.__init__(self, parent, **kwargs)
self.__method = DiscretizeEditor.EqualFreq
self.__nintervals = 4
layout = QVBoxLayout()
self.setLayout(layout)
self.__group = group = QButtonGroup(self, exclusive=True)
for method in [self.EntropyMDL, self.EqualFreq, self.EqualWidth,
self.Drop]:
rb = QRadioButton(
self, text=self.Names[method],
checked=self.__method == method
)
layout.addWidget(rb)
group.addButton(rb, method)
group.buttonClicked.connect(self.__on_buttonClicked)
self.__slbox = slbox = QGroupBox(
title="Number of intervals (for equal width/frequency",
flat=True
)
slbox.setLayout(QVBoxLayout())
self.__slider = slider = QSlider(
orientation=Qt.Horizontal,
minimum=2, maximum=10, value=self.__nintervals,
enabled=self.__method in [self.EqualFreq, self.EqualWidth],
)
slider.valueChanged.connect(self.__on_valueChanged)
slbox.layout().addWidget(slider)
container = QHBoxLayout()
container.setContentsMargins(13, 0, 0, 0)
container.addWidget(slbox)
self.layout().insertLayout(3, container)
self.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Preferred)
def setMethod(self, method):
if self.__method != method:
self.__method = method
b = self.__group.button(method)
b.setChecked(True)
self.__slider.setEnabled(
method in [self.EqualFreq, self.EqualWidth]
)
self.changed.emit()
def method(self):
return self.__method
def intervals(self):
return self.__nintervals
def setIntervals(self, n):
n = numpy.clip(n, self.__slider.minimum(), self.__slider.maximum())
n = int(n)
if self.__nintervals != n:
self.__nintervals = n
# blocking signals in order to differentiate between
# changed by user (notified through __on_valueChanged) or
# changed programmatically (this)
with blocked(self.__slider):
self.__slider.setValue(n)
self.changed.emit()
def setParameters(self, params):
method = params.get("method", self.EqualFreq)
nintervals = params.get("n", 5)
self.setMethod(method)
if method in [self.EqualFreq, self.EqualWidth]:
self.setIntervals(nintervals)
def parameters(self):
if self.__method in [self.EqualFreq, self.EqualWidth]:
return {"method": self.__method, "n": self.__nintervals}
else:
return {"method": self.__method}
def __on_buttonClicked(self):
# on user 'method' button click
method = self.__group.checkedId()
if method != self.__method:
self.setMethod(self.__group.checkedId())
self.edited.emit()
def __on_valueChanged(self):
# on user n intervals slider change.
self.__nintervals = self.__slider.value()
self.changed.emit()
self.edited.emit()
@staticmethod
def createinstance(params):
params = dict(params)
method = params.pop("method", DiscretizeEditor.EqualFreq)
method, defaults = DiscretizeEditor.Discretizers[method]
if method is None:
return None
resolved = dict(defaults)
# update only keys in defaults?
resolved.update(params)
return preprocess.Discretize(method(**params))
class ContinuizeEditor(BaseEditor):
Continuizers = [
("Most frequent is base", Continuize.FrequentAsBase),
("One attribute per value", Continuize.Indicators),
("Remove multinomial attributes", Continuize.RemoveMultinomial),
("Remove all discrete attributes", Continuize.Remove),
("Treat as ordinal", Continuize.AsOrdinal),
("Divide by number of values", Continuize.AsNormalizedOrdinal)]
def __init__(self, parent=None, **kwargs):
super().__init__(parent, **kwargs)
self.setLayout(QVBoxLayout())
self.__treatment = Continuize.Indicators
self.__group = group = QButtonGroup(exclusive=True)
group.buttonClicked.connect(self.__on_buttonClicked)
for text, treatment in ContinuizeEditor.Continuizers:
rb = QRadioButton(
text=text,
checked=self.__treatment == treatment)
group.addButton(rb, int(treatment))
self.layout().addWidget(rb)
self.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
def setTreatment(self, treatment):
b = self.__group.button(treatment)
if b is not None:
b.setChecked(True)
self.__treatment = treatment
self.changed.emit()
def treatment(self):
return self.__treatment
def setParameters(self, params):
treatment = params.get("multinomial_treatment", Continuize.Indicators)
self.setTreatment(treatment)
def parameters(self):
return {"multinomial_treatment": self.__treatment}
def __on_buttonClicked(self):
self.__treatment = self.__group.checkedId()
self.changed.emit()
self.edited.emit()
@staticmethod
def createinstance(params):
params = dict(params)
treatment = params.pop("multinomial_treatment", Continuize.Indicators)
return Continuize(multinomial_treatment=treatment,
normalize_continuous=Continuize.Leave)
class _ImputeRandom:
class ReplaceUnknownsSampleRandom(RandomTransform):
def transform(self, column):
mask = numpy.isnan(column)
c = column[mask]
if not c.size:
return column
else:
c = super().transform(c)
column = numpy.array(column)
column[mask] = c
return column
def __call__(self, data, variable):
dist = distribution.get_distribution(data, variable)
var = copy.copy(variable)
var.compute_value = self.ReplaceUnknownsSampleRandom(variable, dist)
return var
class _RemoveNaNRows(preprocess.preprocess.Preprocess):
def __call__(self, data):
mask = numpy.isnan(data.X)
mask = numpy.any(mask, axis=1)
return data[~mask]
class ImputeEditor(BaseEditor):
(NoImputation, Constant, Average,
Model, Random, DropRows, DropColumns) = 0, 1, 2, 3, 4, 5, 6
Imputers = {
NoImputation: (None, {}),
# Constant: (None, {"value": 0})
Average: (preprocess.impute.Average(), {}),
# Model: (preprocess.impute.Model, {}),
Random: (_ImputeRandom(), {}),
DropRows: (None, {})
}
Names = {
NoImputation: "Don't impute.",
Constant: "Replace with constant",
Average: "Average/Most frequent",
Model: "Model based imputer",
Random: "Replace with random value",
DropRows: "Remove rows with missing values.",
}
def __init__(self, parent=None, **kwargs):
super().__init__(parent, **kwargs)
self.setLayout(QVBoxLayout())
self.__method = ImputeEditor.Average
self.__group = group = QButtonGroup(self, exclusive=True)
group.buttonClicked.connect(self.__on_buttonClicked)
for methodid in [self.Average, self.Random, self.DropRows]:
text = self.Names[methodid]
rb = QRadioButton(text=text, checked=self.__method == methodid)
group.addButton(rb, methodid)
self.layout().addWidget(rb)
def setMethod(self, method):
b = self.__group.button(method)
if b is not None:
b.setChecked(True)
self.__method = method
self.changed.emit()
def setParameters(self, params):
method = params.get("method", ImputeEditor.Average)
self.setMethod(method)
def parameters(self):
return {"method": self.__method}
def __on_buttonClicked(self):
self.__method = self.__group.checkedId()
self.changed.emit()
self.edited.emit()
@staticmethod
def createinstance(params):
params = dict(params)
method = params.pop("method", ImputeEditor.Average)
if method == ImputeEditor.NoImputation:
return None
elif method == ImputeEditor.Average:
return preprocess.SklImpute()
elif method == ImputeEditor.Model:
return preprocess.Impute(method=preprocess.impute.Model())
elif method == ImputeEditor.DropRows:
return _RemoveNaNRows()
elif method == ImputeEditor.DropColumns:
return preprocess.RemoveNaNColumns()
else:
method, defaults = ImputeEditor.Imputers[method]
defaults = dict(defaults)
defaults.update(params)
return preprocess.Impute(method=method)
class UnivariateFeatureSelect(QWidget):
changed = Signal()
edited = Signal()
#: Strategy
Fixed, Percentile, FDR, FPR, FWE = 1, 2, 3, 4, 5
def __init__(self, parent=None, **kwargs):
super().__init__(parent, **kwargs)
self.setLayout(QVBoxLayout())
self.__scoreidx = 0
self.__strategy = UnivariateFeatureSelect.Fixed
self.__k = 10
self.__p = 75.0
box = QGroupBox(title="Score", flat=True)
box.setLayout(QVBoxLayout())
self.__cb = cb = QComboBox(self, )
self.__cb.currentIndexChanged.connect(self.setScoreIndex)
self.__cb.activated.connect(self.edited)
box.layout().addWidget(cb)
self.layout().addWidget(box)
box = QGroupBox(title="Strategy", flat=True)
self.__group = group = QButtonGroup(self, exclusive=True)
self.__spins = {}
form = QFormLayout()
fixedrb = QRadioButton("Fixed", checked=True)
group.addButton(fixedrb, UnivariateFeatureSelect.Fixed)
kspin = QSpinBox(
minimum=1, value=self.__k,
enabled=self.__strategy == UnivariateFeatureSelect.Fixed
)
kspin.valueChanged[int].connect(self.setK)
kspin.editingFinished.connect(self.edited)
self.__spins[UnivariateFeatureSelect.Fixed] = kspin
form.addRow(fixedrb, kspin)
percrb = QRadioButton("Percentile")
group.addButton(percrb, UnivariateFeatureSelect.Percentile)
pspin = QDoubleSpinBox(
minimum=0.0, maximum=100.0, singleStep=0.5,
value=self.__p, suffix="%",
enabled=self.__strategy == UnivariateFeatureSelect.Percentile
)
pspin.valueChanged[float].connect(self.setP)
pspin.editingFinished.connect(self.edited)
self.__spins[UnivariateFeatureSelect.Percentile] = pspin
# Percentile controls disabled for now.
pspin.setEnabled(False)
percrb.setEnabled(False)
form.addRow(percrb, pspin)
# form.addRow(QRadioButton("FDR"), QDoubleSpinBox())
# form.addRow(QRadioButton("FPR"), QDoubleSpinBox())
# form.addRow(QRadioButton("FWE"), QDoubleSpinBox())
self.__group.buttonClicked.connect(self.__on_buttonClicked)
box.setLayout(form)
self.layout().addWidget(box)
def setScoreIndex(self, scoreindex):
if self.__scoreidx != scoreindex:
self.__scoreidx = scoreindex
self.__cb.setCurrentIndex(scoreindex)
self.changed.emit()
def scoreIndex(self):
return self.__scoreidx
def setStrategy(self, strategy):
if self.__strategy != strategy:
self.__strategy = strategy
b = self.__group.button(strategy)
b.setChecked(True)
for st, rb in self.__spins.items():
rb.setEnabled(st == strategy)
self.changed.emit()
def setK(self, k):
if self.__k != k:
self.__k = k
spin = self.__spins[UnivariateFeatureSelect.Fixed]
spin.setValue(k)
if self.__strategy == UnivariateFeatureSelect.Fixed:
self.changed.emit()
def setP(self, p):
if self.__p != p:
self.__p = p
spin = self.__spins[UnivariateFeatureSelect.Percentile]
spin.setValue(p)
if self.__strategy == UnivariateFeatureSelect.Percentile:
self.changed.emit()
def setItems(self, itemlist):
for item in itemlist:
self.__cb.addItem(item["text"])
def __on_buttonClicked(self):
strategy = self.__group.checkedId()
self.setStrategy(strategy)
self.edited.emit()
def setParameters(self, params):
score = params.get("score", 0)
strategy = params.get("strategy", UnivariateFeatureSelect.Fixed)
self.setScoreIndex(score)
self.setStrategy(strategy)
if strategy == UnivariateFeatureSelect.Fixed:
self.setK(params.get("k", 10))
else:
self.setP(params.get("p", 75))
def parameters(self):
score = self.__scoreidx
strategy = self.__strategy
p = self.__p
k = self.__k
return {"score": score, "strategy": strategy, "p": p, "k": k}
class FeatureSelectEditor(BaseEditor):
MEASURES = [
("Information Gain", preprocess.score.InfoGain),
("Gain ratio", preprocess.score.GainRatio),
("Gini index", preprocess.score.Gini),
]
def __init__(self, parent=None):
super().__init__(parent)
self.setLayout(QVBoxLayout())
self.layout().setContentsMargins(0, 0, 0, 0)
self.__score = 0
self.__selecionidx = 0
self.__uni_fs = UnivariateFeatureSelect()
self.__uni_fs.setItems(
[{"text": "Information gain", "tooltip": ""},
{"text": "Gain ratio"},
{"text": "Gini index"}
]
)
self.layout().addWidget(self.__uni_fs)
self.__uni_fs.changed.connect(self.changed)
self.__uni_fs.edited.connect(self.edited)
def setParameters(self, params):
self.__uni_fs.setParameters(params)
def parameters(self):
return self.__uni_fs.parameters()
@staticmethod
def createinstance(params):
params = dict(params)
score = params.pop("score", 0)
score = FeatureSelectEditor.MEASURES[score][1]
strategy = params.get("strategy", UnivariateFeatureSelect.Fixed)
k = params.get("k", 10)
if strategy == UnivariateFeatureSelect.Fixed:
return preprocess.fss.SelectBestFeatures(score, k=k)
else:
# TODO: implement top percentile selection
raise NotImplementedError
# TODO: Model based FS (random forest variable importance, ...), RFE
# Unsupervised (min variance, constant, ...)??
class _Scaling(preprocess.preprocess.Preprocess):
"""
Scale data preprocessor.
"""
@staticmethod
def mean(dist):
values, counts = numpy.array(dist)
return numpy.average(values, weights=counts)
@staticmethod
def median(dist):
values, counts = numpy.array(dist)
cumdist = numpy.cumsum(counts)
if cumdist[-1] > 0:
cumdist /= cumdist[-1]
return numpy.interp(0.5, cumdist, values)
@staticmethod
def span(dist):
values = numpy.array(dist[0])
minval = numpy.min(values)
maxval = numpy.max(values)
return maxval - minval
@staticmethod
def std(dist):
values, counts = numpy.array(dist)
mean = numpy.average(values, weights=counts)
diff = values - mean
return numpy.sqrt(numpy.average(diff ** 2, weights=counts))
def __init__(self, center=mean, scale=std):
self.center = center
self.scale = scale
def __call__(self, data):
if self.center is None and self.scale is None:
return data
def transform(var):
dist = distribution.get_distribution(data, var)
if self.center:
c = self.center(dist)
dist[0, :] -= c
else:
c = 0
if self.scale:
s = self.scale(dist)
if s < 1e-15:
s = 1
else:
s = 1
factor = 1 / s
newvar = copy.copy(var)
newvar.compute_value = \
preprocess.transformation.Normalizer(var, c, factor)
return newvar
newvars = []
for var in data.domain.attributes:
if isinstance(var, Orange.data.ContinuousVariable):
newvars.append(transform(var))
else:
newvars.append(var)
domain = Orange.data.Domain(newvars, data.domain.class_vars,
data.domain.metas)
return data.from_table(domain, data)
class Scale(BaseEditor):
NoCentering, CenterMean, CenterMedian = 0, 1, 2
NoScaling, ScaleBySD, ScaleBySpan = 0, 1, 2
def __init__(self, parent=None, **kwargs):
super().__init__(parent, **kwargs)
self.setLayout(QVBoxLayout())
form = QFormLayout()
self.__centercb = QComboBox()
self.__centercb.addItems(["No centering", "Center by mean",
"Center by median"])
self.__scalecb = QComboBox()
self.__scalecb.addItems(["No scaling", "Scale by std",
"Scale by span"])
form.addRow("Center", self.__centercb)
form.addRow("Scale", self.__scalecb)
self.layout().addLayout(form)
self.__centercb.currentIndexChanged.connect(self.changed)
self.__scalecb.currentIndexChanged.connect(self.changed)
self.__centercb.activated.connect(self.edited)
self.__scalecb.activated.connect(self.edited)
def setParameters(self, params):
center = params.get("center", Scale.CenterMean)
scale = params.get("scale", Scale.ScaleBySD)
self.__centercb.setCurrentIndex(center)
self.__scalecb.setCurrentIndex(scale)
def parameters(self):
return {"center": self.__centercb.currentIndex(),
"scale": self.__scalecb.currentIndex()}
@staticmethod
def createinstance(params):
center = params.get("center", Scale.CenterMean)
scale = params.get("scale", Scale.ScaleBySD)
if center == Scale.NoCentering:
center = None
elif center == Scale.CenterMean:
center = _Scaling.mean
elif center == Scale.CenterMedian:
center = _Scaling.median
else:
assert False
if scale == Scale.NoScaling:
scale = None
elif scale == Scale.ScaleBySD:
scale = _Scaling.std
elif scale == Scale.ScaleBySpan:
scale = _Scaling.span
else:
assert False
return _Scaling(center=center, scale=scale)
# This is intended for future improvements.
# I.e. it should be possible to add/register preprocessor actions
# through entry points (for use by add-ons). Maybe it should be a
# general framework (this is not the only place where such
# functionality is desired (for instance in Orange v2.* Rank widget
# already defines its own entry point).
class Description(object):
"""
A description of an action/function.
"""
def __init__(self, title, icon=None, summary=None, input=None, output=None,
requires=None, note=None, related=None, keywords=None,
helptopic=None):
self.title = title
self.icon = icon
self.summary = summary
self.input = input
self.output = output
self.requires = requires
self.note = note
self.related = related
self.keywords = keywords
self.helptopic = helptopic
class PreprocessAction(object):
def __init__(self, name, qualname, category, description, viewclass):
self.name = name
self.qualname = qualname
self.category = category
self.description = description
self.viewclass = viewclass
def icon_path(basename):
return pkg_resources.resource_filename(__name__, "icons/" + basename)
PREPROCESSORS = [
PreprocessAction(
"Discretize", "orange.preprocess.discretize", "Discretization",
Description("Discretize Continuous Variables",
icon_path("Discretize.svg")),
DiscretizeEditor
),
PreprocessAction(
"Continuize", "orange.preprocess.continuize", "Continuization",
Description("Continuize Discrete Variables",
icon_path("Continuize.svg")),
ContinuizeEditor
),
PreprocessAction(
"Impute", "orange.preprocess.impute", "Impute",
Description("Impute Missing Values",
icon_path("Impute.svg")),
ImputeEditor
),
PreprocessAction(
"Feature Selection", "orange.preprocess.fss", "Feature Selection",
Description("Select Relevant Features",
icon_path("SelectColumns.svg")),
FeatureSelectEditor
),
PreprocessAction(
"Normalize", "orange.preprocess.scale", "Scaling",
Description("Center and Scale Features",
icon_path("Continuize.svg")),
Scale
)
]
# TODO: Extend with entry points here
# PREPROCESSORS += iter_entry_points("Orange.widgets.data.owpreprocess")
# ####
# The actual owwidget (with helper classes)
# ####
# Note:
# The preprocessors are drag/dropped onto a sequence widget, where
# they can be reordered/removed/edited.
#
# Model <-> Adapter/Controller <-> View
#
# * `Model`: the current constructed preprocessor model.
# * the source (of drag/drop) is an item model displayed in a list
# view (source list).
# * the drag/drop is controlled by the controller/adapter,
#: Qt.ItemRole holding the PreprocessAction instance
DescriptionRole = Qt.UserRole
#: Qt.ItemRole storing the preprocess parameters
ParametersRole = Qt.UserRole + 1
class Controller(QObject):
"""
Controller for displaying/editing QAbstractItemModel using SequenceFlow.
It creates/deletes updates the widgets in the view when the model
changes, as well as interprets drop events (with appropriate mime data)
onto the view, modifying the model appropriately.
Parameters
----------
view : SeqeunceFlow
The view to control (required).
model : QAbstarctItemModel
A list model
parent : QObject
The controller's parent.
"""
MimeType = "application/x-qwidget-ref"
def __init__(self, view, model=None, parent=None):
super().__init__(parent)
self._model = None
self.view = view
view.installEventFilter(self)
view.widgetCloseRequested.connect(self._closeRequested)
view.widgetMoved.connect(self._widgetMoved)
# gruesome
self._setDropIndicatorAt = view._SequenceFlow__setDropIndicatorAt
self._insertIndexAt = view._SequenceFlow__insertIndexAt
if model is not None:
self.setModel(model)
def __connect(self, model):
model.dataChanged.connect(self._dataChanged)
model.rowsInserted.connect(self._rowsInserted)
model.rowsRemoved.connect(self._rowsRemoved)
model.rowsMoved.connect(self._rowsMoved)
def __disconnect(self, model):
model.dataChanged.disconnect(self._dataChanged)
model.rowsInserted.disconnect(self._rowsInserted)
model.rowsRemoved.disconnect(self._rowsRemoved)
model.rowsMoved.disconnect(self._rowsMoved)
def setModel(self, model):
"""Set the model for the view.
:type model: QAbstarctItemModel.
"""
if self._model is model:
return
if self._model is not None:
self.__disconnect(self._model)
self._clear()
self._model = model
if self._model is not None:
self._initialize(model)
self.__connect(model)
def model(self):
"""Return the model.
"""
return self._model
def _initialize(self, model):
for i in range(model.rowCount()):
index = model.index(i, 0)
self._insertWidgetFor(i, index)
def _clear(self):
self.view.clear()
def dragEnterEvent(self, event):
if event.mimeData().hasFormat(self.MimeType) and \
self.model() is not None:
event.setDropAction(Qt.CopyAction)
event.accept()
return True
else:
return False
def dragMoveEvent(self, event):
if event.mimeData().hasFormat(self.MimeType) and \
self.model() is not None:
event.accept()
self._setDropIndicatorAt(event.pos())
return True
else:
return False
def dragLeaveEvent(self, event):
return False
# TODO: Remember if we have seen enter with the proper data
# (leave event does not have mimeData)
# if event.mimeData().hasFormat(self.MimeType) and \
# event.proposedAction() == Qt.CopyAction:
# event.accept()
# self._setDropIndicatorAt(None)
# return True
# else:
# return False
def dropEvent(self, event):
if event.mimeData().hasFormat(self.MimeType) and \
self.model() is not None:
# Create and insert appropriate widget.
self._setDropIndicatorAt(None)
row = self._insertIndexAt(event.pos())
model = self.model()
diddrop = model.dropMimeData(
event.mimeData(), Qt.CopyAction, row, 0, QModelIndex())
if diddrop:
event.accept()
return True
else:
return False
def eventFilter(self, view, event):
if view is not self.view:
return False
if event.type() == QEvent.DragEnter:
return self.dragEnterEvent(event)
elif event.type() == QEvent.DragMove:
return self.dragMoveEvent(event)
elif event.type() == QEvent.DragLeave:
return self.dragLeaveEvent(event)
elif event.type() == QEvent.Drop:
return self.dropEvent(event)
else:
return super().eventFilter(view, event)
def _dataChanged(self, topleft, bottomright):
model = self.model()
widgets = self.view.widgets()
top, left = topleft.row(), topleft.column()
bottom, right = bottomright.row(), bottomright.column()
assert left == 0 and right == 0
for row in range(top, bottom + 1):
self.setWidgetData(widgets[row], model.index(row, 0))
def _rowsInserted(self, parent, start, end):
model = self.model()
for row in range(start, end + 1):
index = model.index(row, 0, parent)
self._insertWidgetFor(row, index)
def _rowsRemoved(self, parent, start, end):
for row in reversed(range(start, end + 1)):
self._removeWidgetFor(row, None)
def _rowsMoved(self, srcparetn, srcstart, srcend,
dstparent, dststart, dstend):
raise NotImplementedError
def _closeRequested(self, row):
model = self.model()
assert 0 <= row < model.rowCount()
model.removeRows(row, 1, QModelIndex())
def _widgetMoved(self, from_, to):
# The widget in the view were already swaped, so
# we must disconnect from the model when moving the rows.
# It would be better if this class would also filter and
# handle internal widget moves.
model = self.model()
self.__disconnect(model)
try:
model.moveRow
except AttributeError:
data = model.itemData(model.index(from_, 0))
model.removeRow(from_, QModelIndex())
model.insertRow(to, QModelIndex())
model.setItemData(model.index(to, 0), data)
assert model.rowCount() == len(self.view.widgets())
else:
model.moveRow(QModelIndex(), from_, QModelIndex(), to)
finally:
self.__connect(model)
def _insertWidgetFor(self, row, index):
widget = self.createWidgetFor(index)
self.view.insertWidget(row, widget, title=index.data(Qt.DisplayRole))
self.view.setIcon(row, index.data(Qt.DecorationRole))
self.setWidgetData(widget, index)
widget.edited.connect(self.__edited)
def _removeWidgetFor(self, row, index):
widget = self.view.widgets()[row]
self.view.removeWidget(widget)
widget.edited.disconnect(self.__edited)
widget.deleteLater()
def createWidgetFor(self, index):
"""
Create a QWidget instance for the index (:class:`QModelIndex`)
"""
definition = index.data(DescriptionRole)
widget = definition.viewclass()
return widget
def setWidgetData(self, widget, index):
"""
Set/update the widget state from the model at index.
"""
params = index.data(ParametersRole)
if not isinstance(params, dict):
params = {}
widget.setParameters(params)
def setModelData(self, widget, index):
"""
Get the data from the widget state and set/update the model at index.
"""
params = widget.parameters()
assert isinstance(params, dict)
self._model.setData(index, params, ParametersRole)
@Slot()
def __edited(self,):
widget = self.sender()
row = self.view.indexOf(widget)
index = self.model().index(row, 0)
self.setModelData(widget, index)
class SequenceFlow(QWidget):
"""
A re-orderable list of widgets.
"""
#: Emitted when the user clicks the Close button in the header
widgetCloseRequested = Signal(int)
#: Emitted when the user moves/drags a widget to a new location.
widgetMoved = Signal(int, int)
class Frame(QtGui.QDockWidget):
"""
Widget frame with a handle.
"""
closeRequested = Signal()
def __init__(self, parent=None, widget=None, title=None, **kwargs):
super().__init__(parent, **kwargs)
self.setFeatures(QtGui.QDockWidget.DockWidgetClosable)
self.setAllowedAreas(Qt.NoDockWidgetArea)
self.__title = ""
self.__icon = ""
self.__focusframe = None
if widget is not None:
self.setWidget(widget)
self.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
if title:
self.setTitle(title)
self.setFocusPolicy(Qt.ClickFocus | Qt.TabFocus)
def setTitle(self, title):
if self.__title != title:
self.__title = title
self.setWindowTitle(title)
self.update()
def setIcon(self, icon):
icon = QIcon(icon)
if self.__icon != icon:
self.__icon = icon
self.setWindowIcon(icon)
self.update()
def paintEvent(self, event):
painter = QStylePainter(self)
opt = QStyleOptionFrame()
opt.init(self)
painter.drawPrimitive(QStyle.PE_FrameDockWidget, opt)
painter.end()
super().paintEvent(event)
def focusInEvent(self, event):
event.accept()
self.__focusframe = QtGui.QFocusFrame(self)
self.__focusframe.setWidget(self)
def focusOutEvent(self, event):
event.accept()
self.__focusframe.deleteLater()
self.__focusframe = None
def closeEvent(self, event):
super().closeEvent(event)
event.ignore()
self.closeRequested.emit()
def __init__(self, parent=None, **kwargs):
super().__init__(parent, **kwargs)
self.__dropindicator = QSpacerItem(
16, 16, QSizePolicy.Expanding, QSizePolicy.Fixed
)
self.__dragstart = (None, None, None)
layout = QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
self.__flowlayout = QVBoxLayout()
layout.addLayout(self.__flowlayout)
layout.addSpacerItem(
QSpacerItem(1, 1, QSizePolicy.Expanding, QSizePolicy.Expanding))
self.setLayout(layout)
self.setAcceptDrops(True)
def sizeHint(self):
"""Reimplemented."""
if self.widgets():
return super().sizeHint()
else:
return QSize(150, 100)
def addWidget(self, widget, title):
"""Add `widget` with `title` to list of widgets (in the last position).
Parameters
----------
widget : QWidget
Widget instance.
title : str
Widget title.
"""
index = len(self.widgets())
self.insertWidget(index, widget, title)
def insertWidget(self, index, widget, title):
"""Insert `widget` with `title` at `index`.
Parameters
----------
index : int
Position at which the widget should be inserted.
widget : QWidget
Widget instance.
title : str
Widget title.
"""
# TODO: Check if widget is already inserted.
frame = SequenceFlow.Frame(widget=widget, title=title)
frame.closeRequested.connect(self.__closeRequested)
layout = self.__flowlayout
frames = [item.widget() for item in self.layout_iter(layout)
if item.widget()]
if 0 < index < len(frames):
# find the layout index of a widget occupying the current
# index'th slot.
insert_index = layout.indexOf(frames[index])
elif index == 0:
insert_index = 0
elif index < 0 or index >= len(frames):
insert_index = layout.count()
else:
assert False
layout.insertWidget(insert_index, frame)
frame.installEventFilter(self)
def removeWidget(self, widget):
"""Remove widget from the list.
Parameters
----------
widget : QWidget
Widget instance to remove.
"""
layout = self.__flowlayout
frame = self.__widgetFrame(widget)
if frame is not None:
frame.setWidget(None)
widget.setVisible(False)
widget.setParent(None)
layout.takeAt(layout.indexOf(frame))
frame.hide()
frame.deleteLater()
def clear(self):
"""Clear the list (remove all widgets).
"""
for w in reversed(self.widgets()):
self.removeWidget(w)
def widgets(self):
"""Return a list of all `widgets`.
"""
layout = self.__flowlayout
items = (layout.itemAt(i) for i in range(layout.count()))
return [item.widget().widget()
for item in items if item.widget() is not None]
def indexOf(self, widget):
"""Return the index (logical position) of `widget`
"""
widgets = self.widgets()
return widgets.index(widget)
def setTitle(self, index, title):
"""Set title for `widget` at `index`.
"""
widget = self.widgets()[index]
frame = self.__widgetFrame(widget)
frame.setTitle(title)
def setIcon(self, index, icon):
widget = self.widgets()[index]
frame = self.__widgetFrame(widget)
frame.setIcon(icon)
def dropEvent(self, event):
"""Reimplemented."""
layout = self.__flowlayout
index = self.__insertIndexAt(self.mapFromGlobal(QCursor.pos()))
if event.mimeData().hasFormat("application/x-internal-move") and \
event.source() is self:
# Complete the internal move
frame, oldindex, _ = self.__dragstart
# Remove the drop indicator spacer item before re-inserting
# the frame
self.__setDropIndicatorAt(None)
if oldindex != index:
layout.insertWidget(index, frame)
if index > oldindex:
self.widgetMoved.emit(oldindex, index - 1)
else:
self.widgetMoved.emit(oldindex, index)
event.accept()
self.__dragstart = None, None, None
def dragEnterEvent(self, event):
"""Reimplemented."""
if event.mimeData().hasFormat("application/x-internal-move") and \
event.source() is self:
assert self.__dragstart[0] is not None
event.acceptProposedAction()
def dragMoveEvent(self, event):
"""Reimplemented."""
pos = self.mapFromGlobal(QCursor.pos())
self.__setDropIndicatorAt(pos)
def dragLeaveEvent(self, event):
"""Reimplemented."""
self.__setDropIndicatorAt(None)
def eventFilter(self, obj, event):
"""Reimplemented."""
if isinstance(obj, SequenceFlow.Frame) and obj.parent() is self:
etype = event.type()
if etype == QEvent.MouseButtonPress and \
event.button() == Qt.LeftButton:
# Is the mouse press on the dock title bar
# (assume everything above obj.widget is a title bar)
# TODO: Get the proper title bar geometry.
if event.pos().y() < obj.widget().y():
index = self.indexOf(obj.widget())
self.__dragstart = (obj, index, event.pos())
elif etype == QEvent.MouseMove and \
event.buttons() & Qt.LeftButton and \
obj is self.__dragstart[0]:
_, _, down = self.__dragstart
if (down - event.pos()).manhattanLength() >= \
QApplication.startDragDistance():
self.__startInternalDrag(obj, event.pos())
self.__dragstart = None, None, None
return True
elif etype == QEvent.MouseButtonRelease and \
event.button() == Qt.LeftButton and \
self.__dragstart[0] is obj:
self.__dragstart = None, None, None
return super().eventFilter(obj, event)
def __setDropIndicatorAt(self, pos):
# find the index where drop at pos would insert.
index = -1
layout = self.__flowlayout
if pos is not None:
index = self.__insertIndexAt(pos)
spacer = self.__dropindicator
currentindex = self.layout_index_of(layout, spacer)
if currentindex != -1:
item = layout.takeAt(currentindex)
assert item is spacer
if currentindex < index:
index -= 1
if index != -1:
layout.insertItem(index, spacer)
def __insertIndexAt(self, pos):
y = pos.y()
midpoints = [item.widget().geometry().center().y()
for item in self.layout_iter(self.__flowlayout)
if item.widget() is not None]
index = bisect.bisect_left(midpoints, y)
return index
def __startInternalDrag(self, frame, hotSpot=None):
drag = QDrag(self)
pixmap = QPixmap(frame.size())
frame.render(pixmap)
transparent = QPixmap(pixmap.size())
transparent.fill(Qt.transparent)
painter = QtGui.QPainter(transparent)
painter.setOpacity(0.35)
painter.drawPixmap(0, 0, pixmap.width(), pixmap.height(), pixmap)
painter.end()
drag.setPixmap(transparent)
if hotSpot is not None:
drag.setHotSpot(hotSpot)
mime = QMimeData()
mime.setData("application/x-internal-move", "")
drag.setMimeData(mime)
return drag.exec_(Qt.MoveAction)
def __widgetFrame(self, widget):
layout = self.__flowlayout
for item in self.layout_iter(layout):
if item.widget() is not None and \
isinstance(item.widget(), SequenceFlow.Frame) and \
item.widget().widget() is widget:
return item.widget()
else:
return None
def __closeRequested(self):
frame = self.sender()
index = self.indexOf(frame.widget())
self.widgetCloseRequested.emit(index)
@staticmethod
def layout_iter(layout):
return (layout.itemAt(i) for i in range(layout.count()))
@staticmethod
def layout_index_of(layout, item):
for i, item1 in enumerate(SequenceFlow.layout_iter(layout)):
if item == item1:
return i
return -1
class OWPreprocess(widget.OWWidget):
name = "Preprocess"
description = """Construct and apply data preprocessors."""
icon = "icons/Preprocess.svg"
priority = 2105
inputs = [("Data", Orange.data.Table, "set_data")]
outputs = [("Preprocessor", preprocess.preprocess.Preprocess),
("Preprocessed Data", Orange.data.Table)]
storedsettings = settings.Setting({})
autocommit = settings.Setting(False)
def __init__(self, parent=None):
super().__init__(parent)
self.data = None
self._invalidated = False
# List of available preprocessors (DescriptionRole : Description)
self.preprocessors = QStandardItemModel()
def mimeData(indexlist):
assert len(indexlist) == 1
index = indexlist[0]
qname = index.data(DescriptionRole).qualname
m = QMimeData()
m.setData("application/x-qwidget-ref", qname)
return m
# TODO: Fix this (subclass even if just to pass a function
# for mimeData delegate)
self.preprocessors.mimeData = mimeData
box = gui.widgetBox(self.controlArea, "Preprocessors")
self.preprocessorsView = view = QListView(
selectionMode=QListView.SingleSelection,
dragEnabled=True,
dragDropMode=QListView.DragOnly
)
view.setModel(self.preprocessors)
view.activated.connect(self.__activated)
box.layout().addWidget(view)
####
self._qname2ppdef = {ppdef.qualname: ppdef for ppdef in PREPROCESSORS}
# List of 'selected' preprocessors and their parameters.
self.preprocessormodel = None
self.flow_view = SequenceFlow()
self.controler = Controller(self.flow_view, parent=self)
self.scroll_area = QtGui.QScrollArea()
self.scroll_area.viewport().setAcceptDrops(True)
self.scroll_area.setWidget(self.flow_view)
self.scroll_area.setWidgetResizable(True)
self.mainArea.layout().addWidget(self.scroll_area)
####
box = gui.widgetBox(self.controlArea, "Output")
gui.auto_commit(box, self, "autocommit", "Commit", box=False)
self._initialize()
def _initialize(self):
for pp_def in PREPROCESSORS:
description = pp_def.description
if description.icon:
icon = QIcon(description.icon)
else:
icon = QIcon()
item = QStandardItem(icon, description.title)
item.setToolTip(description.summary or "")
item.setData(pp_def, DescriptionRole)
item.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable |
Qt.ItemIsDragEnabled)
self.preprocessors.appendRow([item])
try:
model = self.load(self.storedsettings)
except Exception:
model = self.load({})
self.set_model(model)
def load(self, saved):
"""Load a preprocessor list from a dict."""
name = saved.get("name", "")
preprocessors = saved.get("preprocessors", [])
model = QStandardItemModel()
def dropMimeData(data, action, row, column, parent):
if data.hasFormat("application/x-qwidget-ref") and \
action == Qt.CopyAction:
qname = bytes(data.data("application/x-qwidget-ref")).decode()
ppdef = self._qname2ppdef[qname]
item = QStandardItem(ppdef.description.title)
item.setData({}, ParametersRole)
item.setData(ppdef.description.title, Qt.DisplayRole)
item.setData(ppdef, DescriptionRole)
self.preprocessormodel.insertRow(row, [item])
return True
else:
return False
model.dropMimeData = dropMimeData
for qualname, params in preprocessors:
pp_def = self._qname2ppdef[qualname]
description = pp_def.description
item = QStandardItem(description.title)
if description.icon:
icon = QIcon(description.icon)
else:
icon = QIcon()
item.setIcon(icon)
item.setToolTip(description.summary)
item.setData(pp_def, DescriptionRole)
item.setData(params, ParametersRole)
model.appendRow(item)
return model
def save(self, model):
"""Save the preprocessor list to a dict."""
d = {"name": ""}
preprocessors = []
for i in range(model.rowCount()):
item = model.item(i)
pp_def = item.data(DescriptionRole)
params = item.data(ParametersRole)
preprocessors.append((pp_def.qualname, params))
d["preprocessors"] = preprocessors
return d
def set_model(self, ppmodel):
if self.preprocessormodel:
self.preprocessormodel.dataChanged.disconnect(self.commit)
self.preprocessormodel.rowsInserted.disconnect(self.commit)
self.preprocessormodel.rowsRemoved.disconnect(self.commit)
self.preprocessormodel.deleteLater()
self.preprocessormodel = ppmodel
self.controler.setModel(ppmodel)
if ppmodel is not None:
self.preprocessormodel.dataChanged.connect(self.commit)
self.preprocessormodel.rowsInserted.connect(self.commit)
self.preprocessormodel.rowsRemoved.connect(self.commit)
def set_data(self, data=None):
"""Set the input data set."""
self.data = data
def handleNewSignals(self):
self.apply()
def __activated(self, index):
item = self.preprocessors.itemFromIndex(index)
action = item.data(DescriptionRole)
item = QStandardItem()
item.setData({}, ParametersRole)
item.setData(action.description.title, Qt.DisplayRole)
item.setData(action, DescriptionRole)
self.preprocessormodel.appendRow([item])
def buildpreproc(self):
plist = []
for i in range(self.preprocessormodel.rowCount()):
item = self.preprocessormodel.item(i)
desc = item.data(DescriptionRole)
params = item.data(ParametersRole)
if not isinstance(params, dict):
params = {}
create = desc.viewclass.createinstance
plist.append(create(params))
if len(plist) == 1:
return plist[0]
else:
return preprocess.preprocess.PreprocessorList(plist)
def apply(self):
preprocessor = self.buildpreproc()
if self.data is not None:
data = preprocessor(self.data)
else:
data = None
self.send("Preprocessor", preprocessor)
self.send("Preprocessed Data", data)
def commit(self):
# Sync the model into storedsettings on every change commit.
self.storeSpecificSettings()
if not self._invalidated:
self._invalidated = True
QApplication.postEvent(self, QEvent(QEvent.User))
def customEvent(self, event):
if self._invalidated:
self._invalidated = False
self.apply()
def storeSpecificSettings(self):
"""Reimplemented."""
self.storedsettings = self.save(self.preprocessormodel)
super().storeSpecificSettings()
def saveSettings(self):
"""Reimplemented."""
self.storedsettings = self.save(self.preprocessormodel)
super().saveSettings()
def onDeleteWidget(self):
self.data = None
self.set_model(None)
super().onDeleteWidget()
def test_main():
app = QtGui.QApplication(sys.argv)
w = OWPreprocess()
w.set_data(Orange.data.Table("brown-selected"))
w.show()
w.raise_()
r = app.exec_()
w.saveSettings()
w.onDeleteWidget()
return r
if __name__ == "__main__":
sys.exit(test_main())
|
|
#!/usr/bin/python
# Copyright (c) 2009, Purdue University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# Neither the name of the Purdue University nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Regression test for dnsmassadd
Make sure you are running this against a database that can be destroyed.
DO NOT EVER RUN THIS TEST AGAINST A PRODUCTION DATABASE.
"""
__copyright__ = 'Copyright (C) 2009, Purdue University'
__license__ = 'BSD'
__version__ = '#TRUNK#'
import os
import sys
import socket
import subprocess
from subprocess import Popen, PIPE, STDOUT
import threading
import time
import getpass
import unittest
sys.path.append('../')
import roster_core
from roster_user_tools import roster_client_lib
import roster_server
USER_CONFIG = 'test_data/roster_user_tools.conf'
CONFIG_FILE = 'test_data/roster.conf' # Example in test_data
DATA_FILE = 'test_data/test_data.sql'
TEST_FILE = 'test_data/test_massadd'
HOST = u'localhost'
USERNAME = u'sharrell'
PASSWORD = u'test'
KEYFILE=('test_data/dnsmgmt.key.pem')
CERTFILE=('test_data/dnsmgmt.cert.pem')
CREDFILE='%s/.dnscred' % os.getcwd()
EXEC='../roster-user-tools/scripts/dnsmassadd'
class options(object):
password = u'test'
username = u'sharrell'
server = None
ldap = u'ldaps://ldap.cs.university.edu:636'
credfile = CREDFILE
view_name = None
ip_address = None
target = u'machine1'
ttl = 64
class DaemonThread(threading.Thread):
def __init__(self, config_instance, port):
threading.Thread.__init__(self)
self.config_instance = config_instance
self.port = port
self.daemon_instance = None
def run(self):
self.daemon_instance = roster_server.Server(self.config_instance, KEYFILE,
CERTFILE)
self.daemon_instance.Serve(port=self.port)
class TestDnsMassAdd(unittest.TestCase):
def setUp(self):
def PickUnusedPort():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((HOST, 0))
addr, port = s.getsockname()
s.close()
return port
self.config_instance = roster_core.Config(file_name=CONFIG_FILE)
db_instance = self.config_instance.GetDb()
db_instance.CreateRosterDatabase()
data = open(DATA_FILE, 'r').read()
db_instance.StartTransaction()
db_instance.cursor.execute(data)
db_instance.EndTransaction()
db_instance.close()
self.port = PickUnusedPort()
self.server_name = 'https://%s:%s' % (HOST, self.port)
self.daemon_thread = DaemonThread(self.config_instance, self.port)
self.daemon_thread.daemon = True
self.daemon_thread.start()
self.core_instance = roster_core.Core(USERNAME, self.config_instance)
self.password = 'test'
time.sleep(1)
roster_client_lib.GetCredentials(USERNAME, u'test', credfile=CREDFILE,
server_name=self.server_name)
self.core_instance.MakeView(u'test_view')
self.core_instance.MakeView(u'test_view2')
self.core_instance.MakeView(u'test_view3')
self.core_instance.MakeZone(u'reverse_zone', u'master',
u'1.168.192.in-addr.arpa.',
view_name=u'test_view')
self.core_instance.MakeZone(u'forward_zone', u'master',
u'university.edu.',
view_name=u'test_view')
self.core_instance.MakeZone(u'forward_zone', u'master',
u'university.edu.',
view_name=u'test_view3')
self.core_instance.MakeZone(u'foward_zone_ipv6', u'master',
u'university2.edu.',
view_name=u'test_view')
self.core_instance.MakeZone(
u'reverse_zone_ipv6', u'master',
u'0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.'
'0.0.0.1.2.3.4.ip6.arpa.', view_name=u'test_view')
self.core_instance.MakeReverseRangeZoneAssignment(u'reverse_zone_ipv6',
u'4321::/32')
self.core_instance.MakeZone(u'reverse_zone', u'master',
u'1.168.192.in-addr.arpa.',
view_name=u'test_view2')
self.core_instance.MakeReverseRangeZoneAssignment(u'reverse_zone',
u'192.168.1/24')
self.core_instance.MakeRecord(
u'soa', u'@', u'forward_zone',
{u'name_server': u'ns1.university.edu.',
u'admin_email': u'admin.university.edu.',
u'serial_number': 1, u'refresh_seconds': 5,
u'retry_seconds': 5, u'expiry_seconds': 5,
u'minimum_seconds': 5}, view_name=u'test_view')
self.core_instance.MakeRecord(
u'soa', u'@', u'forward_zone',
{u'name_server': u'ns1.university.edu.',
u'admin_email': u'admin.university.edu.',
u'serial_number': 1, u'refresh_seconds': 5,
u'retry_seconds': 5, u'expiry_seconds': 5,
u'minimum_seconds': 5}, view_name=u'test_view3')
self.core_instance.MakeRecord(
u'soa', u'@', u'reverse_zone',
{u'name_server': u'ns1.university.edu.',
u'admin_email': u'admin.university.edu.',
u'serial_number': 1, u'refresh_seconds': 5,
u'retry_seconds': 5, u'expiry_seconds': 5,
u'minimum_seconds': 5}, view_name=u'test_view')
self.core_instance.MakeRecord(
u'soa', u'@', u'reverse_zone',
{u'name_server': u'ns1.university.edu.',
u'admin_email': u'admin.university.edu.',
u'serial_number': 1, u'refresh_seconds': 5,
u'retry_seconds': 5, u'expiry_seconds': 5,
u'minimum_seconds': 5}, view_name=u'test_view2')
self.core_instance.MakeRecord(
u'soa', u'@', u'foward_zone_ipv6',
{u'name_server': u'ns1.university.edu.',
u'admin_email': u'admin.university.edu.',
u'serial_number': 1, u'refresh_seconds': 5,
u'retry_seconds': 5, u'expiry_seconds': 5,
u'minimum_seconds': 5}, view_name=u'test_view')
self.core_instance.MakeRecord(
u'soa', u'@', u'reverse_zone_ipv6',
{u'name_server': u'ns1.university.edu.',
u'admin_email': u'admin.university.edu.',
u'serial_number': 1, u'refresh_seconds': 5,
u'retry_seconds': 5, u'expiry_seconds': 5,
u'minimum_seconds': 5}, view_name=u'test_view')
self.core_instance.MakeRecord(
u'aaaa', u'host2', u'foward_zone_ipv6', {u'assignment_ip':
u'4321:0000:0001:0002:0003:0004:0567:89ab'}, view_name=u'test_view')
self.core_instance.MakeRecord(u'a', u'host3', u'forward_zone',
{u'assignment_ip': u'192.168.1.5'},
view_name=u'test_view')
self.core_instance.MakeRecord(u'a', u'www.host3', u'forward_zone',
{u'assignment_ip': u'192.168.1.5'},
view_name=u'test_view')
self.core_instance.MakeRecord(u'ptr', u'5',
u'reverse_zone',
{u'assignment_host': u'host2.university.edu.'},
view_name=u'test_view')
self.core_instance.MakeRecord(u'ptr', u'4',
u'reverse_zone',
{u'assignment_host': u'host3.university.edu.'},
view_name=u'test_view')
def tearDown(self):
if( os.path.exists(CREDFILE) ):
os.remove(CREDFILE)
def testMassAddNoCommitFlag(self):
## Check initial records
self.assertEqual(
self.core_instance.ListRecords(view_name=u'test_view'),
[{u'serial_number': 4, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{u'serial_number': 4, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{u'serial_number': 3, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'foward_zone_ipv6',
u'admin_email': u'admin.university.edu.', u'expiry_seconds': 5},
{u'serial_number': 2, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone_ipv6', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{'target': u'host2', 'ttl': 3600, 'record_type': u'aaaa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'foward_zone_ipv6',
u'assignment_ip': u'4321:0000:0001:0002:0003:0004:0567:89ab'},
{'target': u'host3', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'192.168.1.5'},
{'target': u'www.host3', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'192.168.1.5'},
{'target': u'5', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'assignment_host': u'host2.university.edu.'},
{'target': u'4', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'assignment_host': u'host3.university.edu.'}])
## Get test_file
handle = open(TEST_FILE, 'r')
try:
file_contents = handle.read()
finally:
handle.close()
self.assertEqual(file_contents,
'192.168.1.5 computer1\n'
'4321::1:2:3:4:567:89ab computer2\n'
'4321::1:2:3:4:567:89ac computer3\n')
## Run script against running database with no commit flag, and then
## simulating the user typing n when dnsmassadd prompts y or n to commit.
command = Popen(['python', EXEC, '-v', 'test_view', '-z', 'forward_zone',
'-f', TEST_FILE, '-s', self.server_name, '-u', USERNAME,
'-p', PASSWORD, '--config-file', USER_CONFIG],
stdout=PIPE, stdin=PIPE, stderr=STDOUT)
## Check output of replaced hosts
self.assertEqual(command.communicate(input='n')[0],
'Host: host2.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: www.host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host2.university2.edu with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be REMOVED\n'
'Host: computer1.university.edu. with ip address 192.168.1.5 will be ADDED\n'
'Host: computer2.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be ADDED\n'
'Host: computer3.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ac will be ADDED\n'
'Do you want to commit these changes? (y/N): No changes made.\n')
## Ensure nothing got changed
self.assertEqual(
self.core_instance.ListRecords(view_name=u'test_view'),
[{u'serial_number': 4, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{u'serial_number': 4, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{u'serial_number': 3, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'foward_zone_ipv6',
u'admin_email': u'admin.university.edu.', u'expiry_seconds': 5},
{u'serial_number': 2, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone_ipv6', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{'target': u'host2', 'ttl': 3600, 'record_type': u'aaaa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'foward_zone_ipv6',
u'assignment_ip': u'4321:0000:0001:0002:0003:0004:0567:89ab'},
{'target': u'host3', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'192.168.1.5'},
{'target': u'www.host3', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'192.168.1.5'},
{'target': u'5', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'assignment_host': u'host2.university.edu.'},
{'target': u'4', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'assignment_host': u'host3.university.edu.'}])
## Run script against running database with no commit flag, and then
## simulating the user typing y when dnsmassadd prompts y or n to commit.
command = Popen(['python', EXEC, '-v', 'test_view', '-z', 'forward_zone',
'-f', TEST_FILE, '-s', self.server_name, '-u', USERNAME,
'-p', PASSWORD, '--config-file', USER_CONFIG],
stdout=PIPE, stdin=PIPE, stderr=STDOUT)
## Check output of replaced hosts
self.assertEqual(command.communicate(input='y')[0],
'Host: host2.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: www.host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host2.university2.edu with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be REMOVED\n'
'Host: computer1.university.edu. with ip address 192.168.1.5 will be ADDED\n'
'Host: computer2.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be ADDED\n'
'Host: computer3.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ac will be ADDED\n'
'Do you want to commit these changes? (y/N): '
'Records Committed.\n')
## Check output of replaced hosts
self.assertEqual(
self.core_instance.ListRecords(view_name=u'test_view'),
[{u'serial_number': 5, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'admin_email':
u'admin.university.edu.', u'expiry_seconds': 5},
{u'serial_number': 5, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'admin_email':
u'admin.university.edu.', u'expiry_seconds': 5},
{u'serial_number': 3, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell', 'zone_name':
u'foward_zone_ipv6', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5}, {u'serial_number': 3, u'refresh_seconds': 5,
'target': u'@', u'name_server': u'ns1.university.edu.',
u'retry_seconds': 5, 'ttl': 3600, u'minimum_seconds': 5,
'record_type': u'soa', 'view_name': u'test_view', 'last_user':
u'sharrell', 'zone_name': u'reverse_zone_ipv6', u'admin_email':
u'admin.university.edu.', u'expiry_seconds': 5}, {'target': u'4',
'ttl': 3600, 'record_type': u'ptr', 'view_name': u'test_view',
'last_user': u'sharrell', 'zone_name': u'reverse_zone',
u'assignment_host': u'host3.university.edu.'}, {'target':
u'computer1', 'ttl': 3600, 'record_type': u'a', 'view_name':
u'test_view', 'last_user': u'sharrell', 'zone_name':
u'forward_zone', u'assignment_ip': u'192.168.1.5'}, {'target':
u'5', 'ttl': 3600, 'record_type': u'ptr', 'view_name': u'test_view',
'last_user': u'sharrell', 'zone_name': u'reverse_zone',
u'assignment_host': u'computer1.university.edu.'}, {'target':
u'computer2', 'ttl': 3600, 'record_type': u'aaaa', 'view_name':
u'test_view', 'last_user': u'sharrell', 'zone_name': u'forward_zone',
u'assignment_ip': u'4321:0000:0001:0002:0003:0004:0567:89ab'},
{'target': u'b.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1', 'ttl': 3600,
'record_type': u'ptr', 'view_name': u'test_view', 'last_user':
u'sharrell', 'zone_name': u'reverse_zone_ipv6', u'assignment_host':
u'computer2.university.edu.'}, {'target': u'computer3', 'ttl': 3600,
'record_type': u'aaaa', 'view_name': u'test_view', 'last_user':
u'sharrell', 'zone_name': u'forward_zone', u'assignment_ip':
u'4321:0000:0001:0002:0003:0004:0567:89ac'},
{'target': u'c.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1', 'ttl': 3600,
'record_type': u'ptr', 'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone_ipv6',
u'assignment_host': u'computer3.university.edu.'}])
def testMassAdd(self):
## Check initial records
self.assertEqual(
self.core_instance.ListRecords(view_name=u'test_view'),
[{u'serial_number': 4, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{u'serial_number': 4, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{u'serial_number': 3, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'foward_zone_ipv6',
u'admin_email': u'admin.university.edu.', u'expiry_seconds': 5},
{u'serial_number': 2, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone_ipv6', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{'target': u'host2', 'ttl': 3600, 'record_type': u'aaaa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'foward_zone_ipv6',
u'assignment_ip': u'4321:0000:0001:0002:0003:0004:0567:89ab'},
{'target': u'host3', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'192.168.1.5'},
{'target': u'www.host3', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'192.168.1.5'},
{'target': u'5', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'assignment_host': u'host2.university.edu.'},
{'target': u'4', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'assignment_host': u'host3.university.edu.'}])
## Get test_file
handle = open(TEST_FILE, 'r')
try:
file_contents = handle.read()
finally:
handle.close()
self.assertEqual(file_contents,
'192.168.1.5 computer1\n'
'4321::1:2:3:4:567:89ab computer2\n'
'4321::1:2:3:4:567:89ac computer3\n')
## Run script against running database with no-commit flag
command = os.popen(('python %s -v %s -z %s --no-commit '
'-f %s -s %s -u %s -p %s --config-file %s' % (
EXEC, 'test_view', 'forward_zone', TEST_FILE,
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
## Check output of replaced hosts
self.assertEqual(command.read(), (
'Host: host2.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: www.host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host2.university2.edu with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be REMOVED\n'
'Host: computer1.university.edu. with ip address 192.168.1.5 will be ADDED\n'
'Host: computer2.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be ADDED\n'
'Host: computer3.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ac will be ADDED\n'
'No changes made.\n'))
command.close()
## Ensure nothing got changed
self.assertEqual(
self.core_instance.ListRecords(view_name=u'test_view'),
[{u'serial_number': 4, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{u'serial_number': 4, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{u'serial_number': 3, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'foward_zone_ipv6',
u'admin_email': u'admin.university.edu.', u'expiry_seconds': 5},
{u'serial_number': 2, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone_ipv6', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{'target': u'host2', 'ttl': 3600, 'record_type': u'aaaa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'foward_zone_ipv6',
u'assignment_ip': u'4321:0000:0001:0002:0003:0004:0567:89ab'},
{'target': u'host3', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'192.168.1.5'},
{'target': u'www.host3', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'192.168.1.5'},
{'target': u'5', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'assignment_host': u'host2.university.edu.'},
{'target': u'4', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'assignment_host': u'host3.university.edu.'}])
## Run script against running database
command = os.popen(('python %s -v %s -z %s --commit '
'-f %s -s %s -u %s -p %s --config-file %s' % (
EXEC, u'test_view', u'forward_zone', TEST_FILE,
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
self.assertEqual(command.read(), (
'Host: host2.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: www.host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host2.university2.edu with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be REMOVED\n'
'Host: computer1.university.edu. with ip address 192.168.1.5 will be ADDED\n'
'Host: computer2.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be ADDED\n'
'Host: computer3.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ac will be ADDED\n'
'Records Committed.\n'))
command.close()
## Check output of replaced hosts
self.assertEqual(
self.core_instance.ListRecords(view_name=u'test_view'),
[{u'serial_number': 5, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'admin_email':
u'admin.university.edu.', u'expiry_seconds': 5},
{u'serial_number': 5, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'admin_email':
u'admin.university.edu.', u'expiry_seconds': 5},
{u'serial_number': 3, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell', 'zone_name':
u'foward_zone_ipv6', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5}, {u'serial_number': 3, u'refresh_seconds': 5,
'target': u'@', u'name_server': u'ns1.university.edu.',
u'retry_seconds': 5, 'ttl': 3600, u'minimum_seconds': 5,
'record_type': u'soa', 'view_name': u'test_view', 'last_user':
u'sharrell', 'zone_name': u'reverse_zone_ipv6', u'admin_email':
u'admin.university.edu.', u'expiry_seconds': 5}, {'target': u'4',
'ttl': 3600, 'record_type': u'ptr', 'view_name': u'test_view',
'last_user': u'sharrell', 'zone_name': u'reverse_zone',
u'assignment_host': u'host3.university.edu.'}, {'target':
u'computer1', 'ttl': 3600, 'record_type': u'a', 'view_name':
u'test_view', 'last_user': u'sharrell', 'zone_name':
u'forward_zone', u'assignment_ip': u'192.168.1.5'}, {'target':
u'5', 'ttl': 3600, 'record_type': u'ptr', 'view_name': u'test_view',
'last_user': u'sharrell', 'zone_name': u'reverse_zone',
u'assignment_host': u'computer1.university.edu.'}, {'target':
u'computer2', 'ttl': 3600, 'record_type': u'aaaa', 'view_name':
u'test_view', 'last_user': u'sharrell', 'zone_name': u'forward_zone',
u'assignment_ip': u'4321:0000:0001:0002:0003:0004:0567:89ab'},
{'target': u'b.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1', 'ttl': 3600,
'record_type': u'ptr', 'view_name': u'test_view', 'last_user':
u'sharrell', 'zone_name': u'reverse_zone_ipv6', u'assignment_host':
u'computer2.university.edu.'}, {'target': u'computer3', 'ttl': 3600,
'record_type': u'aaaa', 'view_name': u'test_view', 'last_user':
u'sharrell', 'zone_name': u'forward_zone', u'assignment_ip':
u'4321:0000:0001:0002:0003:0004:0567:89ac'},
{'target': u'c.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1', 'ttl': 3600,
'record_type': u'ptr', 'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone_ipv6',
u'assignment_host': u'computer3.university.edu.'}])
## Run script against running database, but fail
command = os.popen(('python %s -v %s -z %s --commit '
'-f %s -s %s -u %s -p %s --config-file %s' % (
EXEC, u'test_view', u'forward_zone', TEST_FILE,
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
lines = command.read()
self.assertTrue(
'SERVER ERROR:' in lines and
'Duplicate record found' in lines)
## Check output of a failed run, make sure nothing changed.
self.assertEqual(
self.core_instance.ListRecords(view_name=u'test_view'),
[{u'serial_number': 5, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5,
'ttl': 3600, u'minimum_seconds': 5, 'record_type': u'soa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'admin_email': u'admin.university.edu.',
u'expiry_seconds': 5},
{u'serial_number': 5, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5, 'ttl': 3600,
u'minimum_seconds': 5, 'record_type': u'soa', 'view_name': u'test_view',
'last_user': u'sharrell', 'zone_name': u'reverse_zone',
u'admin_email': u'admin.university.edu.', u'expiry_seconds': 5},
{u'serial_number': 3, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5, 'ttl': 3600,
u'minimum_seconds': 5, 'record_type': u'soa', 'view_name': u'test_view',
'last_user': u'sharrell', 'zone_name': u'foward_zone_ipv6',
u'admin_email': u'admin.university.edu.', u'expiry_seconds': 5},
{u'serial_number': 3, u'refresh_seconds': 5, 'target': u'@',
u'name_server': u'ns1.university.edu.', u'retry_seconds': 5, 'ttl': 3600,
u'minimum_seconds': 5, 'record_type': u'soa', 'view_name': u'test_view',
'last_user': u'sharrell', 'zone_name': u'reverse_zone_ipv6',
u'admin_email': u'admin.university.edu.', u'expiry_seconds': 5},
{'target': u'4', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'assignment_host': u'host3.university.edu.'},
{'target': u'computer1', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'192.168.1.5'},
{'target': u'5', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'reverse_zone', u'assignment_host': u'computer1.university.edu.'},
{'target': u'computer2', 'ttl': 3600, 'record_type': u'aaaa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'4321:0000:0001:0002:0003:0004:0567:89ab'},
{'target': u'b.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1',
'ttl': 3600, 'record_type': u'ptr', 'view_name': u'test_view',
'last_user': u'sharrell', 'zone_name': u'reverse_zone_ipv6',
u'assignment_host': u'computer2.university.edu.'},
{'target': u'computer3', 'ttl': 3600, 'record_type': u'aaaa',
'view_name': u'test_view', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'4321:0000:0001:0002:0003:0004:0567:89ac'},
{'target': u'c.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1', 'ttl': 3600,
'record_type': u'ptr', 'view_name': u'test_view',
'last_user': u'sharrell', 'zone_name': u'reverse_zone_ipv6',
u'assignment_host': u'computer3.university.edu.'}])
command.close()
def testMassAddWithAny(self):
## Check initial records
self.assertEqual(
self.core_instance.ListRecords(view_name=u'any'), [])
## Get test_file
handle = open(TEST_FILE, 'r')
try:
file_contents = handle.read()
finally:
handle.close()
self.assertEqual(file_contents,
'192.168.1.5 computer1\n'
'4321::1:2:3:4:567:89ab computer2\n'
'4321::1:2:3:4:567:89ac computer3\n')
## Run script against running database with no-commit flag
command = os.popen(('python %s -v %s -z %s --no-commit '
'-f %s -s %s -u %s -p %s --config-file %s' % (
EXEC, 'any', 'forward_zone', TEST_FILE,
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
## Check output of replaced hosts
self.assertEqual(command.read(), (
'Host: host2.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: www.host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host2.university2.edu with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be REMOVED\n'
'Host: computer1.university.edu. with ip address 192.168.1.5 will be ADDED\n'
'Host: computer2.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be ADDED\n'
'Host: computer3.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ac will be ADDED\n'
'No changes made.\n'))
command.close()
## Ensure nothing got changed
self.assertEqual(
self.core_instance.ListRecords(view_name=u'any'), [])
## Run script against running database
command = os.popen(('python %s -v %s -z %s --commit '
'-f %s -s %s -u %s -p %s --config-file %s' % (
EXEC, u'any', u'forward_zone', TEST_FILE,
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
self.assertEqual(command.read(),
'Host: host2.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: www.host3.university.edu with ip address 192.168.1.5 will be REMOVED\n'
'Host: host2.university2.edu with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be REMOVED\n'
'Host: computer1.university.edu. with ip address 192.168.1.5 will be ADDED\n'
'Host: computer2.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ab will be ADDED\n'
'Host: computer3.university.edu. with ip address 4321:0000:0001:0002:0003:0004:0567:89ac will be ADDED\n'
'Records Committed.\n')
command.close()
## Check output of replaced hosts
self.assertEqual(
self.core_instance.ListRecords(view_name=u'any'),
[{'target': u'computer1', 'ttl': 3600, 'record_type': u'a',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'forward_zone', u'assignment_ip': u'192.168.1.5'},
{'target': u'5', 'ttl': 3600, 'record_type': u'ptr',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'reverse_zone',
u'assignment_host': u'computer1.university.edu.'},
{'target': u'computer2', 'ttl': 3600, 'record_type': u'aaaa',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'forward_zone',
u'assignment_ip': u'4321:0000:0001:0002:0003:0004:0567:89ab'},
{'target': u'b.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1', 'ttl': 3600,
'record_type': u'ptr', 'view_name': u'any',
'last_user': u'sharrell', 'zone_name': u'reverse_zone_ipv6',
u'assignment_host': u'computer2.university.edu.'},
{'target': u'computer3', 'ttl': 3600, 'record_type': u'aaaa',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'forward_zone',
u'assignment_ip': u'4321:0000:0001:0002:0003:0004:0567:89ac'},
{'target': u'c.a.9.8.7.6.5.0.4.0.0.0.3.0.0.0.2.0.0.0.1', 'ttl': 3600,
'record_type': u'ptr', 'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'reverse_zone_ipv6',
u'assignment_host': u'computer3.university.edu.'}])
def testMassAddErrors(self):
## Required flags
command = os.popen(('python %s -v %s --commit '
'-f %s -s %s -u %s -p %s --config-file %s' % (
EXEC, u'test_view', TEST_FILE,
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
self.assertEquals(command.read(),
'CLIENT ERROR: The -z/--zone-name flag is required.\n')
command = os.popen(('python %s -z %s --commit '
'-f %s -s %s -u %s -p %s --config-file %s' % (
EXEC, u'forward_zone', TEST_FILE,
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
self.assertEquals(command.read(),
'CLIENT ERROR: The -v/--view-name flag is required.\n')
command = os.popen(('python %s -v %s -z %s --commit '
'-s %s -u %s -p %s --config-file %s' % (
EXEC, u'test_view', u'forward_zone',
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
self.assertEquals(command.read(),
'CLIENT ERROR: The -f/--file flag is required.\n')
## Errors
command = os.popen(('python %s -v %s -z %s --commit '
'-f %s -s %s -u %s -p %s --config-file %s' % (
EXEC, u'bad_view', u'forward_zone', TEST_FILE,
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
self.assertEquals(command.read(),
'CLIENT ERROR: Zone "forward_zone" not found in "bad_view" view.\n')
command = os.popen(('python %s -v %s -z %s --commit '
'-f %s -s %s -u %s -p %s --config-file %s' % (
EXEC, u'test_view', u'bad_zone', TEST_FILE,
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
self.assertEquals(command.read(),
'CLIENT ERROR: Zone "bad_zone" does not exist.\n')
command = os.popen(('python %s -v %s -z %s --commit '
'-f %s -s %s -u %s -p %s --config-file %s' % (
EXEC, u'test_view', u'test_zone', 'bad_file',
self.server_name, USERNAME, PASSWORD, USER_CONFIG)))
self.assertEquals(command.read(),
'Specified file, bad_file, does not exist\n')
if( __name__ == '__main__' ):
unittest.main()
|
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
from __future__ import print_function
import unittest
import paddle
import numpy as np
import random
import paddle
import paddle.distributed as dist
import paddle.distributed.fleet as fleet
from paddle.fluid.dygraph.container import Sequential
from paddle.distributed.fleet.meta_parallel import PipelineLayer
from paddle.fluid.dygraph.layers import Layer
import paddle.nn as nn
import paddle.fluid as fluid
def set_random_seed(seed, dp_id, rank_id):
"""Set random seed for reproducability."""
random.seed(seed)
np.random.seed(seed + dp_id)
paddle.seed(seed + dp_id)
batch_size = 16
micro_batch_size = 4
vocab_size = 128
hidden_size = 8
class SimpleNet(Layer):
def __init__(self):
super(SimpleNet, self).__init__()
self.word_embeddings = nn.Embedding(vocab_size, hidden_size)
self.softmax_weight = self.create_parameter(
shape=[hidden_size, vocab_size])
self.softmax_bias = self.create_parameter(
shape=[vocab_size], is_bias=False)
def forward(self, x1, x2, y1):
x_emb = self.word_embeddings(x1)
fc = fluid.layers.matmul(x_emb, self.softmax_weight)
fc = fluid.layers.elementwise_add(fc, self.softmax_bias)
projection = fluid.layers.reshape(fc, shape=[-1, vocab_size])
loss = fluid.layers.softmax_with_cross_entropy(
logits=projection, label=y1, soft_label=False)
return loss.mean()
class EmbeddingNet(Layer):
def __init__(self):
super(EmbeddingNet, self).__init__()
self.word_embeddings = nn.Embedding(vocab_size, hidden_size)
def forward(self, args):
x1, x2 = args
x_emb = self.word_embeddings(x1)
return x_emb, x2
class MatmulNet(Layer):
def __init__(self):
super(MatmulNet, self).__init__()
self.softmax_weight = self.create_parameter(
shape=[hidden_size, vocab_size])
def forward(self, args):
x1, x2 = args
fc = fluid.layers.matmul(x1, self.softmax_weight)
return fc, x2
class BiasNet(Layer):
def __init__(self):
super(BiasNet, self).__init__()
self.softmax_bias = self.create_parameter(shape=[vocab_size])
def forward(self, args):
fc, x2 = args
fc = fluid.layers.elementwise_add(fc, self.softmax_bias)
projection = fluid.layers.reshape(fc, shape=[-1, vocab_size])
return projection, x2
class LossNet(Layer):
def __init__(self):
super(LossNet, self).__init__()
def forward(self, args, y1):
projection, x2 = args
loss = fluid.layers.softmax_with_cross_entropy(
logits=projection, label=y1[0], soft_label=False)
return loss.mean()
class SimpleNetPipe(Layer):
def __init__(self):
super(SimpleNetPipe, self).__init__()
self.features = Sequential(EmbeddingNet(), MatmulNet(), BiasNet())
def to_layers(self):
feat = [self.features[i] for i in range(len(self.features))]
return feat
class TestDistEmbeddingTraning(unittest.TestCase):
def setUp(self):
strategy = fleet.DistributedStrategy()
self.model_parallel_size = 1
self.data_parallel_size = 1
self.pipeline_parallel_size = 2
strategy.hybrid_configs = {
"dp_degree": self.data_parallel_size,
"mp_degree": self.model_parallel_size,
"pp_degree": self.pipeline_parallel_size,
}
strategy.pipeline_configs = {
"accumulate_steps": batch_size // micro_batch_size,
"micro_batch_size": micro_batch_size
}
fleet.init(is_collective=True, strategy=strategy)
def test_pp_model(self):
hcg = fleet.get_hybrid_communicate_group()
word_size = hcg.get_model_parallel_world_size()
dp_id = hcg.get_data_parallel_rank()
pp_id = hcg.get_stage_id()
rank_id = dist.get_rank()
set_random_seed(1024, dp_id, rank_id)
#construct model a
model_a = SimpleNet()
scheduler_a = paddle.optimizer.lr.PiecewiseDecay(
boundaries=[2, 3, 4], values=[0.01, 0.02, 0.03, 0.04], verbose=True)
optimizer_a = paddle.optimizer.SGD(learning_rate=scheduler_a,
parameters=model_a.parameters())
init_net = SimpleNetPipe()
model_b = PipelineLayer(
layers=init_net.to_layers(),
num_stages=self.pipeline_parallel_size,
loss_fn=LossNet())
scheduler_b = paddle.optimizer.lr.PiecewiseDecay(
boundaries=[2, 3, 4], values=[0.01, 0.02, 0.03, 0.04], verbose=True)
optimizer_b = paddle.optimizer.SGD(learning_rate=scheduler_b,
parameters=model_b.parameters())
model_b = fleet.distributed_model(model_b)
optimizer_b = fleet.distributed_optimizer(optimizer_b)
param_len = len(model_a.parameters())
parameters = []
for param in model_a.parameters():
print(param.name, param.shape)
parameters.append(param.numpy())
model_b_params = model_b.parameters()
if pp_id == 0:
model_b_params[0].set_value(parameters[2])
else:
model_b_params[0].set_value(parameters[0])
model_b_params[1].set_value(parameters[1])
for step in range(5):
x1_data = np.random.randint(0, vocab_size, size=[batch_size, 1])
x2_data = np.random.randint(0, vocab_size, size=[batch_size, 1])
y1_data = np.random.randint(0, 10, size=[batch_size, 1])
x1 = paddle.to_tensor(x1_data)
x2 = paddle.to_tensor(x2_data)
y1 = paddle.to_tensor(y1_data)
x1.stop_gradient = True
x2.stop_gradient = True
y1.stop_gradient = True
loss_a = model_a(x1, x2, y1)
loss_a.backward()
optimizer_a.step()
optimizer_a.clear_grad()
scheduler_a.step()
loss_b = model_b.train_batch([(x1, x2), (y1, )], optimizer_b,
scheduler_b)
print("loss", loss_a.numpy(), loss_b.numpy())
np.testing.assert_allclose(loss_a.numpy(), loss_b.numpy())
if __name__ == "__main__":
unittest.main()
|
|
# Copyright (c) 2016 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import six
from cinder import context
from cinder.tests.unit.consistencygroup import fake_cgsnapshot
from cinder.tests.unit.consistencygroup import fake_consistencygroup
from cinder.tests.unit import fake_constants
from cinder.tests.unit import fake_group
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.tests.unit.volume.drivers.dell_emc.vnx import fake_exception as \
lib_ex
from cinder.tests.unit.volume.drivers.dell_emc.vnx import fake_storops as \
storops
from cinder.tests.unit.volume.drivers.dell_emc.vnx import utils
from cinder.volume.drivers.dell_emc.vnx import adapter
from cinder.volume.drivers.dell_emc.vnx import client
from cinder.volume.drivers.dell_emc.vnx import common
from cinder.volume.drivers.dell_emc.vnx import driver
from cinder.volume.drivers.dell_emc.vnx import utils as vnx_utils
SYMBOL_TYPE = '_type'
SYMBOL_PROPERTIES = '_properties'
SYMBOL_METHODS = '_methods'
SYMBOL_SIDE_EFFECT = '_side_effect'
SYMBOL_RAISE = '_raise'
SYMBOL_CONTEXT = '_context'
UUID = '_uuid'
SYMBOL_ENUM = '_enum'
def _is_driver_object(obj_body):
return isinstance(obj_body, dict) and SYMBOL_PROPERTIES in obj_body
class DriverResourceMock(dict):
fake_func_mapping = {}
def __init__(self, yaml_file):
yaml_dict = utils.load_yaml(yaml_file)
if not isinstance(yaml_dict, dict):
return
for case_name, case_res in yaml_dict.items():
if not isinstance(case_res, dict):
continue
self[case_name] = {}
for obj_name, obj_body in case_res.items():
self[case_name][obj_name] = self._parse_driver_object(obj_body)
def _parse_driver_object(self, obj_body):
if isinstance(obj_body, dict):
obj_body = {k: self._parse_driver_object(v)
for k, v in obj_body.items()}
if _is_driver_object(obj_body):
return self._create_object(obj_body)
else:
return obj_body
elif isinstance(obj_body, list):
return map(self._parse_driver_object, obj_body)
else:
return obj_body
def _create_object(self, obj_body):
props = obj_body[SYMBOL_PROPERTIES]
for prop_name, prop_value in props.items():
if isinstance(prop_value, dict) and prop_value:
# get the first key as the convert function
func_name = list(prop_value.keys())[0]
if func_name.startswith('_'):
func = getattr(self, func_name)
props[prop_name] = func(prop_value[func_name])
if (SYMBOL_TYPE in obj_body and
obj_body[SYMBOL_TYPE] in self.fake_func_mapping):
return self.fake_func_mapping[obj_body[SYMBOL_TYPE]](**props)
else:
return props
@staticmethod
def _uuid(uuid_key):
uuid_key = uuid_key.upper()
return getattr(fake_constants, uuid_key)
def _fake_volume_wrapper(*args, **kwargs):
expected_attrs_key = {'volume_attachment': 'volume_attachment',
'volume_metadata': 'metadata'}
if 'group' in kwargs:
expected_attrs_key['group'] = kwargs['group']
return fake_volume.fake_volume_obj(
context.get_admin_context(),
expected_attrs=[
v for (k, v) in expected_attrs_key.items() if k in kwargs],
**kwargs)
def _fake_cg_wrapper(*args, **kwargs):
return fake_consistencygroup.fake_consistencyobject_obj(
'fake_context', **kwargs)
def _fake_snapshot_wrapper(*args, **kwargs):
return fake_snapshot.fake_snapshot_obj('fake_context',
expected_attrs=(
['volume'] if 'volume' in kwargs
else None),
**kwargs)
def _fake_cg_snapshot_wrapper(*args, **kwargs):
return fake_cgsnapshot.fake_cgsnapshot_obj(None, **kwargs)
def _fake_group_wrapper(*args, **kwargs):
return fake_group.fake_group_obj(None, **kwargs)
class EnumBuilder(object):
def __init__(self, enum_dict):
enum_dict = enum_dict[SYMBOL_ENUM]
for k, v in enum_dict.items():
self.klazz = k
self.value = v
def __call__(self, *args, **kwargs):
return getattr(storops, self.klazz).parse(self.value)
class CinderResourceMock(DriverResourceMock):
# fake_func in the mapping should be like func(*args, **kwargs)
fake_func_mapping = {'volume': _fake_volume_wrapper,
'cg': _fake_cg_wrapper,
'snapshot': _fake_snapshot_wrapper,
'cg_snapshot': _fake_cg_snapshot_wrapper,
'group': _fake_group_wrapper}
def __init__(self, yaml_file):
super(CinderResourceMock, self).__init__(yaml_file)
@staticmethod
def _build_provider_location(props):
return vnx_utils.build_provider_location(
props.get('system'), props.get('type'),
six.text_type(props.get('id')),
six.text_type(props.get('base_lun_name')),
props.get('version'))
class ContextMock(object):
"""Mocks the return value of a context function."""
def __enter__(self):
pass
def __exit__(self, exc_type, exc_valu, exc_tb):
pass
class MockBase(object):
"""Base object of all the Mocks.
This mock convert the dict to object when the '_type' is
included in the dict
"""
def _is_mock_object(self, yaml_info):
return (isinstance(yaml_info, dict) and
(SYMBOL_PROPERTIES in yaml_info or
SYMBOL_METHODS in yaml_info))
def _is_object_with_type(self, yaml_dict):
return isinstance(yaml_dict, dict) and SYMBOL_TYPE in yaml_dict
def _is_object_with_enum(self, yaml_dict):
return isinstance(yaml_dict, dict) and SYMBOL_ENUM in yaml_dict
def _build_mock_object(self, yaml_dict):
if self._is_object_with_type(yaml_dict):
return FakePort(yaml_dict)
elif self._is_object_with_enum(yaml_dict):
return EnumBuilder(yaml_dict)()
elif self._is_mock_object(yaml_dict):
return StorageObjectMock(yaml_dict)
elif isinstance(yaml_dict, dict):
return {k: self._build_mock_object(v)
for k, v in yaml_dict.items()}
elif isinstance(yaml_dict, list):
return [self._build_mock_object(each) for each in yaml_dict]
else:
return yaml_dict
class StorageObjectMock(object):
PROPS = 'props'
def __init__(self, yaml_dict):
self.__dict__[StorageObjectMock.PROPS] = {}
props = yaml_dict.get(SYMBOL_PROPERTIES, None)
if props:
for k, v in props.items():
setattr(self, k, StoragePropertyMock(k, v)())
methods = yaml_dict.get(SYMBOL_METHODS, None)
if methods:
for k, v in methods.items():
setattr(self, k, StorageMethodMock(k, v))
def __setattr__(self, key, value):
self.__dict__[StorageObjectMock.PROPS][key] = value
def __getattr__(self, item):
try:
super(StorageObjectMock, self).__getattr__(item)
except AttributeError:
return self.__dict__[StorageObjectMock.PROPS][item]
except KeyError:
raise KeyError('%(item)s not exist in mock object.'
) % {'item': item}
class FakePort(StorageObjectMock):
def __eq__(self, other):
o_sp = other.sp
o_port_id = other.port_id
o_vport_id = other.vport_id
ret = True
ret &= self.sp == o_sp
ret &= self.port_id == o_port_id
ret &= self.vport_id == o_vport_id
return ret
def __hash__(self):
return hash((self.sp, self.port_id, self.vport_id))
class StoragePropertyMock(mock.PropertyMock, MockBase):
def __init__(self, name, property_body):
return_value = property_body
side_effect = None
# only support return_value and side_effect for property
if (isinstance(property_body, dict) and
SYMBOL_SIDE_EFFECT in property_body):
side_effect = self._build_mock_object(
property_body[SYMBOL_SIDE_EFFECT])
return_value = None
if side_effect is not None:
super(StoragePropertyMock, self).__init__(
name=name,
side_effect=side_effect)
else:
return_value = self._build_mock_object(return_value)
super(StoragePropertyMock, self).__init__(
name=name,
return_value=return_value)
class StorageMethodMock(mock.Mock, MockBase):
def __init__(self, name, method_body):
return_value = method_body
exception = None
side_effect = None
# support return_value, side_effect and exception for method
if isinstance(method_body, dict):
if (SYMBOL_SIDE_EFFECT in method_body or
SYMBOL_RAISE in method_body):
exception = method_body.get(SYMBOL_RAISE, None)
side_effect = method_body.get(SYMBOL_SIDE_EFFECT, None)
return_value = None
if exception is not None:
ex = None
if isinstance(exception, dict) and exception:
ex_name = list(exception.keys())[0]
ex_tmp = [getattr(ex_module, ex_name, None)
for ex_module in [lib_ex, common]]
try:
ex = [each for each in ex_tmp if each is not None][0]
super(StorageMethodMock, self).__init__(
name=name,
side_effect=ex(exception[ex_name]))
except IndexError:
raise KeyError('Exception %(ex_name)s not found.'
% {'ex_name': ex_name})
else:
raise KeyError('Invalid Exception body, should be a dict.')
elif side_effect is not None:
super(StorageMethodMock, self).__init__(
name=name,
side_effect=self._build_mock_object(side_effect))
elif return_value is not None:
super(StorageMethodMock, self).__init__(
name=name,
return_value=(ContextMock() if return_value == SYMBOL_CONTEXT
else self._build_mock_object(return_value)))
else:
super(StorageMethodMock, self).__init__(
name=name, return_value=None)
class StorageResourceMock(dict, MockBase):
def __init__(self, yaml_file):
yaml_dict = utils.load_yaml(yaml_file)
if not isinstance(yaml_dict, dict):
return
for section, sec_body in yaml_dict.items():
if isinstance(sec_body, dict):
self[section] = {obj_name: self._build_mock_object(obj_body)
for obj_name, obj_body
in sec_body.items()}
else:
self[section] = {}
cinder_res = CinderResourceMock('mocked_cinder.yaml')
DRIVER_RES_MAPPING = {
'TestResMock': cinder_res,
'TestCommonAdapter': cinder_res,
'TestReplicationAdapter': cinder_res,
'TestISCSIAdapter': cinder_res,
'TestFCAdapter': cinder_res,
'TestUtils': cinder_res,
'TestClient': cinder_res
}
def mock_driver_input(func):
@six.wraps(func)
def decorated(cls, *args, **kwargs):
return func(cls,
DRIVER_RES_MAPPING[cls.__class__.__name__][func.__name__],
*args, **kwargs)
return decorated
vnx_res = StorageResourceMock('mocked_vnx.yaml')
STORAGE_RES_MAPPING = {
'TestResMock': StorageResourceMock('test_res_mock.yaml'),
'TestCondition': vnx_res,
'TestClient': vnx_res,
'TestCommonAdapter': vnx_res,
'TestReplicationAdapter': vnx_res,
'TestISCSIAdapter': vnx_res,
'TestFCAdapter': vnx_res,
'TestTaskflow': vnx_res,
'TestExtraSpecs': vnx_res,
}
DEFAULT_STORAGE_RES = 'vnx'
def _build_client():
return client.Client(ip='192.168.1.2',
username='sysadmin',
password='sysadmin',
scope='global',
naviseccli=None,
sec_file=None,
queue_path='vnx-cinder')
def patch_client(func):
@six.wraps(func)
def decorated(cls, *args, **kwargs):
storage_res = (
STORAGE_RES_MAPPING[cls.__class__.__name__][func.__name__])
with utils.patch_vnxsystem as patched_vnx:
if DEFAULT_STORAGE_RES in storage_res:
patched_vnx.return_value = storage_res[DEFAULT_STORAGE_RES]
client = _build_client()
return func(cls, client, storage_res, *args, **kwargs)
return decorated
PROTOCOL_COMMON = 'Common'
PROTOCOL_MAPPING = {
PROTOCOL_COMMON: adapter.CommonAdapter,
common.PROTOCOL_ISCSI: adapter.ISCSIAdapter,
common.PROTOCOL_FC: adapter.FCAdapter
}
def patch_adapter_init(protocol):
def inner_patch_adapter(func):
@six.wraps(func)
def decorated(cls, *args, **kwargs):
storage_res = (
STORAGE_RES_MAPPING[cls.__class__.__name__][func.__name__])
with utils.patch_vnxsystem as patched_vnx:
if DEFAULT_STORAGE_RES in storage_res:
patched_vnx.return_value = storage_res[DEFAULT_STORAGE_RES]
adapter = PROTOCOL_MAPPING[protocol](cls.configuration)
return func(cls, adapter, storage_res, *args, **kwargs)
return decorated
return inner_patch_adapter
def _patch_adapter_prop(adapter, client):
try:
adapter.serial_number = client.get_serial()
except KeyError:
adapter.serial_number = 'faked_serial_number'
adapter.VERSION = driver.VNXDriver.VERSION
def patch_adapter(protocol):
def inner_patch_adapter(func):
@six.wraps(func)
def decorated(cls, *args, **kwargs):
storage_res = (
STORAGE_RES_MAPPING[cls.__class__.__name__][func.__name__])
with utils.patch_vnxsystem:
client = _build_client()
adapter = PROTOCOL_MAPPING[protocol](cls.configuration, None)
if DEFAULT_STORAGE_RES in storage_res:
client.vnx = storage_res[DEFAULT_STORAGE_RES]
adapter.client = client
_patch_adapter_prop(adapter, client)
return func(cls, adapter, storage_res, *args, **kwargs)
return decorated
return inner_patch_adapter
patch_common_adapter = patch_adapter(PROTOCOL_COMMON)
patch_iscsi_adapter = patch_adapter(common.PROTOCOL_ISCSI)
patch_fc_adapter = patch_adapter(common.PROTOCOL_FC)
def mock_storage_resources(func):
@six.wraps(func)
def decorated(cls, *args, **kwargs):
storage_res = (
STORAGE_RES_MAPPING[cls.__class__.__name__][func.__name__])
return func(cls, storage_res, *args, **kwargs)
return decorated
|
|
from __future__ import absolute_import
from six import binary_type
from typing import Any, AnyStr, Callable, Dict, Iterable, List, MutableMapping, Optional, Text
from django.conf import settings
from django.core.exceptions import DisallowedHost
from django.utils.translation import ugettext as _
from zerver.lib.response import json_error
from zerver.lib.request import JsonableError
from django.db import connection
from django.http import HttpRequest, HttpResponse
from zerver.lib.utils import statsd, get_subdomain
from zerver.lib.queue import queue_json_publish
from zerver.lib.cache import get_remote_cache_time, get_remote_cache_requests
from zerver.lib.bugdown import get_bugdown_time, get_bugdown_requests
from zerver.models import flush_per_request_caches, get_realm
from zerver.exceptions import RateLimited
from django.contrib.sessions.middleware import SessionMiddleware
from django.views.csrf import csrf_failure as html_csrf_failure
from django.utils.cache import patch_vary_headers
from django.utils.http import cookie_date
from django.shortcuts import redirect, render
import logging
import time
import cProfile
import traceback
logger = logging.getLogger('zulip.requests')
def record_request_stop_data(log_data):
# type: (MutableMapping[str, Any]) -> None
log_data['time_stopped'] = time.time()
log_data['remote_cache_time_stopped'] = get_remote_cache_time()
log_data['remote_cache_requests_stopped'] = get_remote_cache_requests()
log_data['bugdown_time_stopped'] = get_bugdown_time()
log_data['bugdown_requests_stopped'] = get_bugdown_requests()
if settings.PROFILE_ALL_REQUESTS:
log_data["prof"].disable()
def async_request_stop(request):
# type: (HttpRequest) -> None
record_request_stop_data(request._log_data)
def record_request_restart_data(log_data):
# type: (MutableMapping[str, Any]) -> None
if settings.PROFILE_ALL_REQUESTS:
log_data["prof"].enable()
log_data['time_restarted'] = time.time()
log_data['remote_cache_time_restarted'] = get_remote_cache_time()
log_data['remote_cache_requests_restarted'] = get_remote_cache_requests()
log_data['bugdown_time_restarted'] = get_bugdown_time()
log_data['bugdown_requests_restarted'] = get_bugdown_requests()
def async_request_restart(request):
# type: (HttpRequest) -> None
if "time_restarted" in request._log_data:
# Don't destroy data when being called from
# finish_current_handler
return
record_request_restart_data(request._log_data)
def record_request_start_data(log_data):
# type: (MutableMapping[str, Any]) -> None
if settings.PROFILE_ALL_REQUESTS:
log_data["prof"] = cProfile.Profile()
log_data["prof"].enable()
log_data['time_started'] = time.time()
log_data['remote_cache_time_start'] = get_remote_cache_time()
log_data['remote_cache_requests_start'] = get_remote_cache_requests()
log_data['bugdown_time_start'] = get_bugdown_time()
log_data['bugdown_requests_start'] = get_bugdown_requests()
def timedelta_ms(timedelta):
# type: (float) -> float
return timedelta * 1000
def format_timedelta(timedelta):
# type: (float) -> str
if (timedelta >= 1):
return "%.1fs" % (timedelta)
return "%.0fms" % (timedelta_ms(timedelta),)
def is_slow_query(time_delta, path):
# type: (float, Text) -> bool
if time_delta < 1.2:
return False
is_exempt = \
path in ["/activity", "/json/report_error",
"/api/v1/deployments/report_error"] \
or path.startswith("/realm_activity/") \
or path.startswith("/user_activity/")
if is_exempt:
return time_delta >= 5
if 'webathena_kerberos' in path:
return time_delta >= 10
return True
def write_log_line(log_data, path, method, remote_ip, email, client_name,
status_code=200, error_content=None, error_content_iter=None):
# type: (MutableMapping[str, Any], Text, str, str, Text, Text, int, Optional[AnyStr], Optional[Iterable[AnyStr]]) -> None
assert error_content is None or error_content_iter is None
if error_content is not None:
error_content_iter = (error_content,)
# For statsd timer name
if path == '/':
statsd_path = u'webreq'
else:
statsd_path = u"webreq.%s" % (path[1:].replace('/', '.'),)
# Remove non-ascii chars from path (there should be none, if there are it's
# because someone manually entered a nonexistant path), as UTF-8 chars make
# statsd sad when it sends the key name over the socket
statsd_path = statsd_path.encode('ascii', errors='ignore').decode("ascii")
blacklisted_requests = ['do_confirm', 'send_confirm',
'eventslast_event_id', 'webreq.content', 'avatar', 'user_uploads',
'password.reset', 'static', 'json.bots', 'json.users', 'json.streams',
'accounts.unsubscribe', 'apple-touch-icon', 'emoji', 'json.bots',
'upload_file', 'realm_activity', 'user_activity']
suppress_statsd = any((blacklisted in statsd_path for blacklisted in blacklisted_requests))
time_delta = -1
# A time duration of -1 means the StartLogRequests middleware
# didn't run for some reason
optional_orig_delta = ""
if 'time_started' in log_data:
time_delta = time.time() - log_data['time_started']
if 'time_stopped' in log_data:
orig_time_delta = time_delta
time_delta = ((log_data['time_stopped'] - log_data['time_started']) +
(time.time() - log_data['time_restarted']))
optional_orig_delta = " (lp: %s)" % (format_timedelta(orig_time_delta),)
remote_cache_output = ""
if 'remote_cache_time_start' in log_data:
remote_cache_time_delta = get_remote_cache_time() - log_data['remote_cache_time_start']
remote_cache_count_delta = get_remote_cache_requests() - log_data['remote_cache_requests_start']
if 'remote_cache_requests_stopped' in log_data:
# (now - restarted) + (stopped - start) = (now - start) + (stopped - restarted)
remote_cache_time_delta += (log_data['remote_cache_time_stopped'] -
log_data['remote_cache_time_restarted'])
remote_cache_count_delta += (log_data['remote_cache_requests_stopped'] -
log_data['remote_cache_requests_restarted'])
if (remote_cache_time_delta > 0.005):
remote_cache_output = " (mem: %s/%s)" % (format_timedelta(remote_cache_time_delta),
remote_cache_count_delta)
if not suppress_statsd:
statsd.timing("%s.remote_cache.time" % (statsd_path,), timedelta_ms(remote_cache_time_delta))
statsd.incr("%s.remote_cache.querycount" % (statsd_path,), remote_cache_count_delta)
startup_output = ""
if 'startup_time_delta' in log_data and log_data["startup_time_delta"] > 0.005:
startup_output = " (+start: %s)" % (format_timedelta(log_data["startup_time_delta"]))
bugdown_output = ""
if 'bugdown_time_start' in log_data:
bugdown_time_delta = get_bugdown_time() - log_data['bugdown_time_start']
bugdown_count_delta = get_bugdown_requests() - log_data['bugdown_requests_start']
if 'bugdown_requests_stopped' in log_data:
# (now - restarted) + (stopped - start) = (now - start) + (stopped - restarted)
bugdown_time_delta += (log_data['bugdown_time_stopped'] -
log_data['bugdown_time_restarted'])
bugdown_count_delta += (log_data['bugdown_requests_stopped'] -
log_data['bugdown_requests_restarted'])
if (bugdown_time_delta > 0.005):
bugdown_output = " (md: %s/%s)" % (format_timedelta(bugdown_time_delta),
bugdown_count_delta)
if not suppress_statsd:
statsd.timing("%s.markdown.time" % (statsd_path,), timedelta_ms(bugdown_time_delta))
statsd.incr("%s.markdown.count" % (statsd_path,), bugdown_count_delta)
# Get the amount of time spent doing database queries
db_time_output = ""
queries = connection.connection.queries if connection.connection is not None else []
if len(queries) > 0:
query_time = sum(float(query.get('time', 0)) for query in queries)
db_time_output = " (db: %s/%sq)" % (format_timedelta(query_time),
len(queries))
if not suppress_statsd:
# Log ms, db ms, and num queries to statsd
statsd.timing("%s.dbtime" % (statsd_path,), timedelta_ms(query_time))
statsd.incr("%s.dbq" % (statsd_path,), len(queries))
statsd.timing("%s.total" % (statsd_path,), timedelta_ms(time_delta))
if 'extra' in log_data:
extra_request_data = " %s" % (log_data['extra'],)
else:
extra_request_data = ""
logger_client = "(%s via %s)" % (email, client_name)
logger_timing = ('%5s%s%s%s%s%s %s' %
(format_timedelta(time_delta), optional_orig_delta,
remote_cache_output, bugdown_output,
db_time_output, startup_output, path))
logger_line = ('%-15s %-7s %3d %s%s %s' %
(remote_ip, method, status_code,
logger_timing, extra_request_data, logger_client))
if (status_code in [200, 304] and method == "GET" and path.startswith("/static")):
logger.debug(logger_line)
else:
logger.info(logger_line)
if (is_slow_query(time_delta, path)):
queue_json_publish("slow_queries", "%s (%s)" % (logger_line, email), lambda e: None)
if settings.PROFILE_ALL_REQUESTS:
log_data["prof"].disable()
profile_path = "/tmp/profile.data.%s.%s" % (path.split("/")[-1], int(time_delta * 1000),)
log_data["prof"].dump_stats(profile_path)
# Log some additional data whenever we return certain 40x errors
if 400 <= status_code < 500 and status_code not in [401, 404, 405]:
assert error_content_iter is not None
error_content_list = list(error_content_iter)
if error_content_list:
error_data = u''
elif isinstance(error_content_list[0], Text):
error_data = u''.join(error_content_list)
elif isinstance(error_content_list[0], binary_type):
error_data = repr(b''.join(error_content_list))
if len(error_data) > 100:
error_data = u"[content more than 100 characters]"
logger.info('status=%3d, data=%s, uid=%s' % (status_code, error_data, email))
class LogRequests(object):
# We primarily are doing logging using the process_view hook, but
# for some views, process_view isn't run, so we call the start
# method here too
def process_request(self, request):
# type: (HttpRequest) -> None
request._log_data = dict()
record_request_start_data(request._log_data)
if connection.connection is not None:
connection.connection.queries = []
def process_view(self, request, view_func, args, kwargs):
# type: (HttpRequest, Callable[..., HttpResponse], List[str], Dict[str, Any]) -> None
# process_request was already run; we save the initialization
# time (i.e. the time between receiving the request and
# figuring out which view function to call, which is primarily
# importing modules on the first start)
request._log_data["startup_time_delta"] = time.time() - request._log_data["time_started"]
# And then completely reset our tracking to only cover work
# done as part of this request
record_request_start_data(request._log_data)
if connection.connection is not None:
connection.connection.queries = []
def process_response(self, request, response):
# type: (HttpRequest, HttpResponse) -> HttpResponse
# The reverse proxy might have sent us the real external IP
remote_ip = request.META.get('HTTP_X_REAL_IP')
if remote_ip is None:
remote_ip = request.META['REMOTE_ADDR']
# Get the requestor's email address and client, if available.
try:
email = request._email
except Exception:
email = "unauth"
try:
client = request.client.name
except Exception:
client = "?"
if response.streaming:
content_iter = response.streaming_content
content = None
else:
content = response.content
content_iter = None
write_log_line(request._log_data, request.path, request.method,
remote_ip, email, client, status_code=response.status_code,
error_content=content, error_content_iter=content_iter)
return response
class JsonErrorHandler(object):
def process_exception(self, request, exception):
# type: (HttpRequest, Any) -> Optional[HttpResponse]
if hasattr(exception, 'to_json_error_msg') and callable(exception.to_json_error_msg):
try:
status_code = exception.status_code
except Exception:
logging.warning("Jsonable exception %s missing status code!" % (exception,))
status_code = 400
return json_error(exception.to_json_error_msg(), status=status_code)
if request.error_format == "JSON":
logging.error(traceback.format_exc())
return json_error(_("Internal server error"), status=500)
return None
class TagRequests(object):
def process_view(self, request, view_func, args, kwargs):
# type: (HttpRequest, Callable[..., HttpResponse], List[str], Dict[str, Any]) -> None
self.process_request(request)
def process_request(self, request):
# type: (HttpRequest) -> None
if request.path.startswith("/api/") or request.path.startswith("/json/"):
request.error_format = "JSON"
else:
request.error_format = "HTML"
def csrf_failure(request, reason=""):
# type: (HttpRequest, Optional[Text]) -> HttpResponse
if request.error_format == "JSON":
return json_error(_("CSRF Error: %s") % (reason,), status=403)
else:
return html_csrf_failure(request, reason)
class RateLimitMiddleware(object):
def process_response(self, request, response):
# type: (HttpRequest, HttpResponse) -> HttpResponse
if not settings.RATE_LIMITING:
return response
from zerver.lib.rate_limiter import max_api_calls
# Add X-RateLimit-*** headers
if hasattr(request, '_ratelimit_applied_limits'):
response['X-RateLimit-Limit'] = max_api_calls(request.user)
if hasattr(request, '_ratelimit_secs_to_freedom'):
response['X-RateLimit-Reset'] = int(time.time() + request._ratelimit_secs_to_freedom)
if hasattr(request, '_ratelimit_remaining'):
response['X-RateLimit-Remaining'] = request._ratelimit_remaining
return response
def process_exception(self, request, exception):
# type: (HttpRequest, Exception) -> HttpResponse
if isinstance(exception, RateLimited):
resp = json_error(_("API usage exceeded rate limit, try again in %s secs") % (
request._ratelimit_secs_to_freedom,), status=429)
resp['Retry-After'] = request._ratelimit_secs_to_freedom
return resp
class FlushDisplayRecipientCache(object):
def process_response(self, request, response):
# type: (HttpRequest, HttpResponse) -> HttpResponse
# We flush the per-request caches after every request, so they
# are not shared at all between requests.
flush_per_request_caches()
return response
class SessionHostDomainMiddleware(SessionMiddleware):
def process_response(self, request, response):
# type: (HttpRequest, HttpResponse) -> HttpResponse
try:
request.get_host()
except DisallowedHost:
# If we get a DisallowedHost exception trying to access
# the host, (1) the request is failed anyway and so the
# below code will do nothing, and (2) the below will
# trigger a recursive exception, breaking things, so we
# just return here.
return response
if settings.REALMS_HAVE_SUBDOMAINS:
if (not request.path.startswith("/static/") and not request.path.startswith("/api/") and
not request.path.startswith("/json/")):
subdomain = get_subdomain(request)
if (request.get_host() == "127.0.0.1:9991" or request.get_host() == "localhost:9991"):
return redirect("%s%s" % (settings.EXTERNAL_URI_SCHEME,
settings.EXTERNAL_HOST))
if subdomain != "":
realm = get_realm(subdomain)
if (realm is None):
return render(request, "zerver/invalid_realm.html")
"""
If request.session was modified, or if the configuration is to save the
session every time, save the changes and set a session cookie.
"""
try:
accessed = request.session.accessed
modified = request.session.modified
except AttributeError:
pass
else:
if accessed:
patch_vary_headers(response, ('Cookie',))
if modified or settings.SESSION_SAVE_EVERY_REQUEST:
if request.session.get_expire_at_browser_close():
max_age = None
expires = None
else:
max_age = request.session.get_expiry_age()
expires_time = time.time() + max_age
expires = cookie_date(expires_time)
# Save the session data and refresh the client cookie.
# Skip session save for 500 responses, refs #3881.
if response.status_code != 500:
request.session.save()
host = request.get_host().split(':')[0]
session_cookie_domain = settings.SESSION_COOKIE_DOMAIN
# The subdomains feature overrides the
# SESSION_COOKIE_DOMAIN setting, since the setting
# is a fixed value and with subdomains enabled,
# the session cookie domain has to vary with the
# subdomain.
if settings.REALMS_HAVE_SUBDOMAINS:
session_cookie_domain = host
response.set_cookie(settings.SESSION_COOKIE_NAME,
request.session.session_key, max_age=max_age,
expires=expires, domain=session_cookie_domain,
path=settings.SESSION_COOKIE_PATH,
secure=settings.SESSION_COOKIE_SECURE or None,
httponly=settings.SESSION_COOKIE_HTTPONLY or None)
return response
class SetRemoteAddrFromForwardedFor(object):
"""
Middleware that sets REMOTE_ADDR based on the HTTP_X_FORWARDED_FOR.
This middleware replicates Django's former SetRemoteAddrFromForwardedFor middleware.
Because Zulip sits behind a NGINX reverse proxy, if the HTTP_X_FORWARDED_FOR
is set in the request, then it has properly been set by NGINX.
Therefore HTTP_X_FORWARDED_FOR's value is trusted.
"""
def process_request(self, request):
# type: (HttpRequest) -> None
try:
real_ip = request.META['HTTP_X_FORWARDED_FOR']
except KeyError:
return None
else:
# HTTP_X_FORWARDED_FOR can be a comma-separated list of IPs.
# For NGINX reverse proxy servers, the client's IP will be the first one.
real_ip = real_ip.split(",")[0].strip()
request.META['REMOTE_ADDR'] = real_ip
|
|
import numba
from numba.tests.support import TestCase, unittest
from numba.core.registry import cpu_target
from numba.core.compiler import CompilerBase, Flags
from numba.core.compiler_machinery import PassManager
from numba.core import types, ir, bytecode, compiler, ir_utils, registry
from numba.core.untyped_passes import (ExtractByteCode, TranslateByteCode,
FixupArgs, IRProcessing,)
from numba.core.typed_passes import (NopythonTypeInference,
type_inference_stage, DeadCodeElimination)
from numba.experimental import jitclass
# global constant for testing find_const
GLOBAL_B = 11
@jitclass([('val', numba.core.types.List(numba.intp))])
class Dummy(object):
def __init__(self, val):
self.val = val
class TestIrUtils(TestCase):
"""
Tests ir handling utility functions like find_callname.
"""
def test_obj_func_match(self):
"""Test matching of an object method (other than Array see #3449)
"""
def test_func():
d = Dummy([1])
d.val.append(2)
test_ir = compiler.run_frontend(test_func)
typingctx = cpu_target.typing_context
typing_res = type_inference_stage(
typingctx, test_ir, (), None)
matched_call = ir_utils.find_callname(
test_ir, test_ir.blocks[0].body[7].value, typing_res.typemap)
self.assertTrue(isinstance(matched_call, tuple) and
len(matched_call) == 2 and
matched_call[0] == 'append')
def test_dead_code_elimination(self):
class Tester(CompilerBase):
@classmethod
def mk_pipeline(cls, args, return_type=None, flags=None, locals={},
library=None, typing_context=None,
target_context=None):
if not flags:
flags = Flags()
flags.nrt = True
if typing_context is None:
typing_context = registry.cpu_target.typing_context
if target_context is None:
target_context = registry.cpu_target.target_context
return cls(typing_context, target_context, library, args,
return_type, flags, locals)
def compile_to_ir(self, func, DCE=False):
"""
Compile and return IR
"""
func_id = bytecode.FunctionIdentity.from_function(func)
self.state.func_id = func_id
ExtractByteCode().run_pass(self.state)
state = self.state
name = "DCE_testing"
pm = PassManager(name)
pm.add_pass(TranslateByteCode, "analyzing bytecode")
pm.add_pass(FixupArgs, "fix up args")
pm.add_pass(IRProcessing, "processing IR")
pm.add_pass(NopythonTypeInference, "nopython frontend")
if DCE is True:
pm.add_pass(DeadCodeElimination, "DCE after typing")
pm.finalize()
pm.run(state)
return state.func_ir
def check_initial_ir(the_ir):
# dead stuff:
# a const int value 0xdead
# an assign of above into to variable `dead`
# a const int above 0xdeaddead
# an assign of said int to variable `deaddead`
# this is 2 statements to remove
self.assertEqual(len(the_ir.blocks), 1)
block = the_ir.blocks[0]
deads = []
for x in block.find_insts(ir.Assign):
if isinstance(getattr(x, 'target', None), ir.Var):
if 'dead' in getattr(x.target, 'name', ''):
deads.append(x)
self.assertEqual(len(deads), 2)
for d in deads:
# check the ir.Const is the definition and the value is expected
const_val = the_ir.get_definition(d.value)
self.assertTrue(int('0x%s' % d.target.name, 16),
const_val.value)
return deads
def check_dce_ir(the_ir):
self.assertEqual(len(the_ir.blocks), 1)
block = the_ir.blocks[0]
deads = []
consts = []
for x in block.find_insts(ir.Assign):
if isinstance(getattr(x, 'target', None), ir.Var):
if 'dead' in getattr(x.target, 'name', ''):
deads.append(x)
if isinstance(getattr(x, 'value', None), ir.Const):
consts.append(x)
self.assertEqual(len(deads), 0)
# check the consts to make sure there's no reference to 0xdead or
# 0xdeaddead
for x in consts:
self.assertTrue(x.value.value not in [0xdead, 0xdeaddead])
def foo(x):
y = x + 1
dead = 0xdead # noqa
z = y + 2
deaddead = 0xdeaddead # noqa
ret = z * z
return ret
test_pipeline = Tester.mk_pipeline((types.intp,))
no_dce = test_pipeline.compile_to_ir(foo)
removed = check_initial_ir(no_dce)
test_pipeline = Tester.mk_pipeline((types.intp,))
w_dce = test_pipeline.compile_to_ir(foo, DCE=True)
check_dce_ir(w_dce)
# check that the count of initial - removed = dce
self.assertEqual(len(no_dce.blocks[0].body) - len(removed),
len(w_dce.blocks[0].body))
def test_find_const_global(self):
"""
Test find_const() for values in globals (ir.Global) and freevars
(ir.FreeVar) that are considered constants for compilation.
"""
FREEVAR_C = 12
def foo(a):
b = GLOBAL_B
c = FREEVAR_C
return a + b + c
f_ir = compiler.run_frontend(foo)
block = f_ir.blocks[0]
const_b = None
const_c = None
for inst in block.body:
if isinstance(inst, ir.Assign) and inst.target.name == 'b':
const_b = ir_utils.guard(
ir_utils.find_const, f_ir, inst.target)
if isinstance(inst, ir.Assign) and inst.target.name == 'c':
const_c = ir_utils.guard(
ir_utils.find_const, f_ir, inst.target)
self.assertEqual(const_b, GLOBAL_B)
self.assertEqual(const_c, FREEVAR_C)
def test_flatten_labels(self):
""" tests flatten_labels """
def foo(a):
acc = 0
if a > 3:
acc += 1
if a > 19:
return 53
elif a < 1000:
if a >= 12:
acc += 1
for x in range(10):
acc -= 1
if acc < 2:
break
else:
acc += 7
else:
raise ValueError("some string")
return acc
def bar(a):
acc = 0
z = 12
if a > 3:
acc += 1
z += 12
if a > 19:
z += 12
return 53
elif a < 1000:
if a >= 12:
z += 12
acc += 1
for x in range(10):
z += 12
acc -= 1
if acc < 2:
break
else:
z += 12
acc += 7
else:
raise ValueError("some string")
return acc
def baz(a):
acc = 0
if a > 3:
acc += 1
if a > 19:
return 53
else: # extra control flow in comparison to foo
return 55
elif a < 1000:
if a >= 12:
acc += 1
for x in range(10):
acc -= 1
if acc < 2:
break
else:
acc += 7
else:
raise ValueError("some string")
return acc
def get_flat_cfg(func):
func_ir = ir_utils.compile_to_numba_ir(func, dict())
flat_blocks = ir_utils.flatten_labels(func_ir.blocks)
self.assertEqual(max(flat_blocks.keys()) + 1, len(func_ir.blocks))
return ir_utils.compute_cfg_from_blocks(flat_blocks)
foo_cfg = get_flat_cfg(foo)
bar_cfg = get_flat_cfg(bar)
baz_cfg = get_flat_cfg(baz)
self.assertEqual(foo_cfg, bar_cfg)
self.assertNotEqual(foo_cfg, baz_cfg)
if __name__ == "__main__":
unittest.main()
|
|
"""
Action creates a new RDS instance and stores its data as an attribute on the
new deployed service.
"""
import json
from infrastructure.models import CustomField, Environment
from orders.models import CustomFieldValue
from resourcehandlers.aws.models import AWSHandler
from common.methods import set_progress
def create_custom_fields_as_needed():
CustomField.objects.get_or_create(
name='aws_rh_id',
defaults={
"label": 'AWS RH ID',
"type": 'STR',
"description": 'Used by the AWS Databases blueprint'
}
)
CustomField.objects.get_or_create(
name='db_identifier',
defaults={
"label": 'AWS database identifier',
"type": 'STR',
"description": 'Used by the AWS Databases blueprint'
}
)
def run(resource, logger=None, **kwargs):
create_custom_fields_as_needed()
set_progress('Creating AWS RDS instance...')
# AWS requires DB Name to have a certain format (only alphanumeric). To
# have CB properly validate user input on this parameter, an admin should go
# to the action's detail page, edit the 'DB Name' Action Input and set a
# "Regex constraint" value of "^[a-zA-Z]\w+$".
db_name = '{{ db_name }}'
db_username = '{{ db_username }}'
db_password = '{{ db_password }}'
db_identifier = '{{ db_identifier }}'
instance_class = '{{ instance_class }}'
engine = '{{ aws_rds_engine }}'
allocated_storage = int('{{ allocated_storage }}')
license_model = "{{ license_model }}"
env = Environment.objects.get(id='{{ aws_environment }}')
set_progress('Connecting to AWS RDS in region {0}.'.format(env.aws_region))
client = connect_to_rds(env)
rds_settings = dict(
DBName=db_name,
DBInstanceIdentifier=db_identifier,
AllocatedStorage=allocated_storage,
DBInstanceClass=instance_class,
Engine=engine,
MasterUsername=db_username,
MasterUserPassword='********',
LicenseModel=license_model,
)
if not license_model:
rds_settings.pop('LicenseModel')
# Log with password redacted, then update dict with actual password
set_progress('RDS settings:\n{0}'.format(rds_settings))
rds_settings.update(dict(MasterUserPassword=db_password))
try:
response = client.create_db_instance(**rds_settings)
except Exception as err:
if 'DBInstanceAlreadyExists' in str(err):
return "FAILURE", "Database already exists", "DB instance {} exists already".format(db_identifier)
raise
# It takes awhile for the DB to be created and backed up.
waiter = client.get_waiter('db_instance_available')
waiter.config.max_attempts = 100 # default is 40 but oracle takes more time.
waiter.wait(DBInstanceIdentifier=db_identifier)
instance = boto_instance_to_dict(response['DBInstance'])
store_instance_data_on_service(instance, resource)
store_aws_environment_on_service(env, resource)
resource.db_identifier = db_identifier
resource.name = db_identifier
resource.save()
set_progress('RDS instance {0} created.'.format(instance['identifier']))
return 'SUCCESS', '', ''
def connect_to_rds(env):
"""
Return boto connection to the RDS in the specified environment's region.
"""
rh = env.resource_handler.cast()
wrapper = rh.get_api_wrapper()
client = wrapper.get_boto3_client(
'rds',
rh.serviceaccount,
rh.servicepasswd,
env.aws_region
)
return client
def boto_instance_to_dict(boto_instance):
"""
Create a pared-down representation of an RDS instance from the full boto
dictionary.
"""
instance = {
'identifier': boto_instance['DBInstanceIdentifier'],
'engine': boto_instance['Engine'],
'status': boto_instance['DBInstanceStatus'],
'username': boto_instance['MasterUsername'],
}
# Endpoint may not be returned if networking is not set up yet
endpoint = boto_instance.get('Endpoint', {})
instance.update({
'address': endpoint.get('Address'),
'port': endpoint.get('Port')
})
return instance
def store_instance_data_on_service(instance, resource):
"""
Create parameter and CFV objects as needed to store the JSON-formatted
instance data in CloudBolt. Used for ongoing management of databases and the
RDS instance.
"""
rds_instance_cf, _ = CustomField.objects.get_or_create(
name='rds_instance',
defaults={'label': 'RDS Instance', 'type': 'CODE', 'description': 'JSON-formatted data about an AWS RDS instance.'}
)
cfv, _ = CustomFieldValue.objects.get_or_create(
field=rds_instance_cf, value=json.dumps(instance))
resource.attributes.add(cfv)
# resource.rds_intance = json.dumps({'field': rds_instance_cf, 'value': json.dumps(instance)})
def store_aws_environment_on_service(env, resource):
"""
Create parameter and CFV objects as needed to store the chosen Environment's
ID as an attribute on the deployed service. Used by the
"Refresh RDS Instance Data" action.
"""
aws_env_cf, _ = CustomField.objects.get_or_create(
name='aws_environment',
defaults={'label': 'AWS Environment', 'type': 'INT'}
)
cfv, _ = CustomFieldValue.objects.get_or_create(
field=aws_env_cf, value=env.id)
resource.attributes.add(cfv)
# resource.rds_intance = json.dumps({'field': aws_env_cf, 'value': env.id})
def generate_options_for_aws_environment(profile=None, **kwargs):
envs_this_user_can_view = Environment.objects_for_profile(profile)
aws_handlers = AWSHandler.objects.all()
aws_envs = envs_this_user_can_view.filter(resource_handler_id__in=aws_handlers)
return [(env.id, env.name) for env in aws_envs]
def generate_options_for_aws_rds_engine(**kwargs):
engines = [
'aurora',
'aurora-mysql',
'aurora-postgresql',
'mariadb',
'MySQL',
'postgres',
'oracle-se2',
'oracle-se1',
'oracle-se',
'oracle-ee',
'sqlserver-ee',
'sqlserver-se',
'sqlserver-ex',
'sqlserver-web',
]
return list(zip(engines, engines))
def generate_options_for_instance_class(**kwargs):
return [
('db.t2.micro', 'Burst Capable - db.t2.micro'),
('db.t2.small', 'Burst Capable - db.t2.small'),
('db.t2.medium', 'Burst Capable - db.t2.medium'),
('db.t2.large', 'Burst Capable - db.t2.large'),
('db.m4.large', 'Standard - db.m4.large'),
('db.m4.xlarge', 'Standard - db.m4.xlarge'),
('db.m4.2xlarge', 'Standard - db.m4.2xlarge'),
('db.m4.4xlarge', 'Standard - db.m4.4xlarge'),
('db.m4.10xlarge', 'Standard - db.m4.10xlarge'),
('db.r3.large', 'Memory Optimized - db.r3.large'),
('db.r3.xlarge', 'Memory Optimized - db.r3.xlarge'),
('db.r3.2xlarge', 'Memory Optimized - db.r3.2xlarge'),
('db.r3.4xlarge', 'Memory Optimized - db.r3.4xlarge'),
('db.r3.8xlarge', 'Memory Optimized - db.r3.8xlarge'),
]
def generate_options_for_license_model(**kwargs):
return [
"license-included",
"bring-your-own-license",
"general-public-license"
]
|
|
#!/usr/bin/python3
import json
import os
import getopt
import sys, inspect
this_folder = os.path.realpath(os.path.abspath(os.path.split(inspect.getfile(inspect.currentframe()))[0]))
src_folder = os.path.realpath(os.path.abspath(os.path.join(this_folder,"..", "src")))
if src_folder not in sys.path:
sys.path.insert(0, src_folder)
# must first init loadenv
import tools, env
config = env.getenv("CONFIG")
tools.loadenv(config)
from webViews.log import initlogging
initlogging("docklet-web")
from webViews.log import logger
from flask import Flask, request, session, render_template, redirect, send_from_directory, make_response, url_for, abort
from webViews.dashboard import dashboardView
from webViews.user.userlist import userlistView, useraddView, usermodifyView, userdataView, userqueryView
from webViews.notification.notification import CreateNotificationView, NotificationView, QuerySelfNotificationsView, \
QueryNotificationView, ModifyNotificationView, DeleteNotificationView
from webViews.user.userinfo import userinfoView
from webViews.user.userActivate import userActivateView
from webViews.user.grouplist import grouplistView, groupqueryView, groupdetailView, groupmodifyView
from functools import wraps
from webViews.dockletrequest import dockletRequest
from webViews.cluster import *
from webViews.admin import *
from webViews.monitor import *
from webViews.beansapplication import *
from webViews.authenticate.auth import login_required, administration_required,activated_required
from webViews.authenticate.register import registerView
from webViews.authenticate.login import loginView, logoutView
import webViews.dockletrequest
from webViews import cookie_tool
external_login = env.getenv('EXTERNAL_LOGIN')
#default config
external_login_url = '/external_auth/'
external_login_callback_url = '/external_auth_callback/'
if (external_login == 'True'):
sys.path.insert(0, os.path.realpath(os.path.abspath(os.path.join(this_folder,"../src", "plugin"))))
import external_generate
from webViews.authenticate.login import external_loginView, external_login_callbackView
external_login_url = external_generate.external_login_url
external_login_callback_url = external_generate.external_login_callback_url
app = Flask(__name__)
@app.route("/", methods=['GET'])
def home():
return render_template('home.html')
@app.route("/login/", methods=['GET', 'POST'])
def login():
return loginView.as_view()
@app.route(external_login_url, methods=['GET'])
def external_login_func():
try:
return external_loginView.as_view()
except:
abort(404)
@app.route(external_login_callback_url, methods=['GET'])
def external_login_callback():
try:
return external_login_callbackView.as_view()
except:
abort(404)
@app.route("/logout/", methods=["GET"])
@login_required
def logout():
return logoutView.as_view()
@app.route("/register/", methods=['GET', 'POST'])
@administration_required
#now forbidden,only used by SEI & PKU Staffs and students.
#can be used by admin for testing
def register():
return registerView.as_view()
@app.route("/activate/", methods=['GET', 'POST'])
@login_required
def activate():
return userActivateView.as_view()
@app.route("/dashboard/", methods=['GET'])
@login_required
def dashboard():
return dashboardView.as_view()
@app.route("/document/", methods=['GET'])
def redirect_dochome():
return redirect("http://docklet.unias.org/userguide")
@app.route("/examples/", methods=['GET'])
def show_examples():
return redirect("http://docklet.unias.org/docklet-book/userguide/_book/zh/notebook/docklet-bigdata-python.html")
@app.route("/config/", methods=['GET'])
@login_required
def config():
return configView.as_view()
@app.route("/workspace/create/", methods=['GET'])
@activated_required
def addCluster():
return addClusterView.as_view()
@app.route("/workspace/list/", methods=['GET'])
@login_required
def listCluster():
return listClusterView.as_view()
@app.route("/workspace/add/", methods=['POST'])
@login_required
def createCluster():
createClusterView.clustername = request.form["clusterName"]
createClusterView.image = request.form["image"]
return createClusterView.as_view()
@app.route("/workspace/scaleout/<clustername>/", methods=['POST'])
@login_required
def scaleout(clustername):
scaleoutView.image = request.form["image"]
scaleoutView.clustername = clustername
return scaleoutView.as_view()
@app.route("/workspace/scalein/<clustername>/<containername>/", methods=['GET'])
@login_required
def scalein(clustername,containername):
scaleinView.clustername = clustername
scaleinView.containername = containername
return scaleinView.as_view()
@app.route("/workspace/start/<clustername>/", methods=['GET'])
@login_required
def startClustet(clustername):
startClusterView.clustername = clustername
return startClusterView.as_view()
@app.route("/workspace/stop/<clustername>/", methods=['GET'])
@login_required
def stopClustet(clustername):
stopClusterView.clustername = clustername
return stopClusterView.as_view()
@app.route("/workspace/delete/<clustername>/", methods=['GET'])
@login_required
def deleteClustet(clustername):
deleteClusterView.clustername = clustername
return deleteClusterView.as_view()
@app.route("/workspace/detail/<clustername>/", methods=['GET'])
@login_required
def detailCluster(clustername):
detailClusterView.clustername = clustername
return detailClusterView.as_view()
@app.route("/workspace/flush/<clustername>/<containername>/", methods=['GET'])
@login_required
def flushCluster(clustername,containername):
flushClusterView.clustername = clustername
flushClusterView.containername = containername
return flushClusterView.as_view()
@app.route("/workspace/save/<clustername>/<containername>/", methods=['POST'])
@login_required
def saveImage(clustername,containername):
saveImageView.clustername = clustername
saveImageView.containername = containername
saveImageView.isforce = "false"
saveImageView.imagename = request.form['ImageName']
saveImageView.description = request.form['description']
return saveImageView.as_view()
@app.route("/workspace/save/<clustername>/<containername>/force/", methods=['POST'])
@login_required
def saveImage_force(clustername,containername):
saveImageView.clustername = clustername
saveImageView.containername = containername
saveImageView.isforce = "true"
saveImageView.imagename = request.form['ImageName']
saveImageView.description = request.form['description']
return saveImageView.as_view()
@app.route("/addproxy/<clustername>/", methods=['POST'])
@login_required
def addproxy(clustername):
addproxyView.clustername = clustername
addproxyView.ip = request.form['proxy_ip']
addproxyView.port = request.form['proxy_port']
return addproxyView.as_view()
@app.route("/deleteproxy/<clustername>/", methods=['GET'])
@login_required
def deleteproxy(clustername):
deleteproxyView.clustername = clustername
return deleteproxyView.as_view()
@app.route("/image/description/<image>/", methods=['GET'])
@login_required
def descriptionImage(image):
descriptionImageView.image = image
return descriptionImageView.as_view()
@app.route("/image/share/<image>/", methods=['GET'])
@login_required
def shareImage(image):
shareImageView.image = image
return shareImageView.as_view()
@app.route("/image/unshare/<image>/", methods=['GET'])
@login_required
def unshareImage(image):
unshareImageView.image = image
return unshareImageView.as_view()
@app.route("/image/delete/<image>/", methods=['GET'])
@login_required
def deleteImage(image):
deleteImageView.image = image
return deleteImageView.as_view()
@app.route("/image/updatebase/<image>/", methods=['GET'])
@login_required
def updatebaseImage(image):
updatebaseImageView.image = image
return updatebaseImageView.as_view()
@app.route("/hosts/", methods=['GET'])
@administration_required
def hosts():
return hostsView.as_view()
@app.route("/hosts/<com_ip>/", methods=['GET'])
@administration_required
def hostsRealtime(com_ip):
hostsRealtimeView.com_ip = com_ip
return hostsRealtimeView.as_view()
@app.route("/hosts/<com_ip>/containers/", methods=['GET'])
@administration_required
def hostsConAll(com_ip):
hostsConAllView.com_ip = com_ip
return hostsConAllView.as_view()
@app.route("/vclusters/", methods=['GET'])
@login_required
def status():
return statusView.as_view()
@app.route("/vclusters/<vcluster_name>/<node_name>/", methods=['GET'])
@login_required
def statusRealtime(vcluster_name,node_name):
statusRealtimeView.node_name = node_name
return statusRealtimeView.as_view()
@app.route("/history/", methods=['GET'])
#@login_required
def history():
return historyView.as_view()
@app.route("/history/<vnode_name>/", methods=['GET'])
@login_required
def historyVNode(vnode_name):
historyVNodeView.vnode_name = vnode_name
return historyVNodeView.as_view()
@app.route("/monitor/hosts/<comid>/<infotype>/", methods=['POST'])
@app.route("/monitor/vnodes/<comid>/<infotype>/", methods=['POST'])
@login_required
def monitor_request(comid,infotype):
data = {
"user": session['username']
}
result = dockletRequest.post(request.path, data)
return json.dumps(result)
@app.route("/beans/application/", methods=['GET'])
@login_required
def beansapplication():
return beansapplicationView.as_view()
@app.route("/beans/apply/", methods=['POST'])
@login_required
def beansapply():
return beansapplyView.as_view()
@app.route("/beans/admin/<msgid>/<cmd>/", methods=['GET'])
@login_required
@administration_required
def beansadmin(msgid,cmd):
beansadminView.msgid = msgid
if cmd == "agree" or cmd == "reject":
beansadminView.cmd = cmd
return beansadminView.as_view()
else:
return redirect("/user/list/")
'''@app.route("/monitor/User/", methods=['GET'])
@administration_required
def monitorUserAll():
return monitorUserAllView.as_view()
'''
@app.route("/user/list/", methods=['GET', 'POST'])
@administration_required
def userlist():
return userlistView.as_view()
@app.route("/group/list/", methods=['POST'])
@administration_required
def grouplist():
return grouplistView.as_view()
@app.route("/group/detail/", methods=['POST'])
@administration_required
def groupdetail():
return groupdetailView.as_view()
@app.route("/group/query/", methods=['POST'])
@administration_required
def groupquery():
return groupqueryView.as_view()
@app.route("/group/modify/<groupname>/", methods=['POST'])
@administration_required
def groupmodify(groupname):
return groupmodifyView.as_view()
@app.route("/user/data/", methods=['GET', 'POST'])
@administration_required
def userdata():
return userdataView.as_view()
@app.route("/user/add/", methods=['POST'])
@administration_required
def useradd():
return useraddView.as_view()
@app.route("/user/modify/", methods=['POST'])
@administration_required
def usermodify():
return usermodifyView.as_view()
@app.route("/user/change/", methods=['POST'])
@administration_required
def userchange():
return usermodifyView.as_view()
@app.route("/quota/add/", methods=['POST'])
@administration_required
def quotaadd():
return quotaaddView.as_view()
@app.route("/quota/chdefault/", methods=['POST'])
@administration_required
def chdefault():
return chdefaultView.as_view()
@app.route("/quota/chlxcsetting/", methods=['POST'])
@administration_required
def chlxcsetting():
return chlxcsettingView.as_view()
@app.route("/group/add/", methods=['POST'])
@administration_required
def groupadd():
return groupaddView.as_view()
@app.route("/group/delete/<groupname>/", methods=['POST', 'GET'])
@administration_required
def groupdel(groupname):
groupdelView.groupname = groupname
return groupdelView.as_view()
@app.route("/user/info/", methods=['GET', 'POST'])
@login_required
def userinfo():
return userinfoView.as_view()
@app.route("/user/query/", methods=['GET', 'POST'])
@administration_required
def userquery():
return userqueryView.as_view()
@app.route("/notification/", methods=['GET'])
@administration_required
def notification_list():
return NotificationView.as_view()
@app.route("/notification/create/", methods=['GET', 'POST'])
@administration_required
def create_notification():
return CreateNotificationView.as_view()
@app.route("/notification/modify/", methods=['POST'])
@administration_required
def modify_notification():
return ModifyNotificationView.as_view()
@app.route("/notification/delete/", methods=['POST'])
@administration_required
def delete_notification():
return DeleteNotificationView.as_view()
@app.route("/notification/query_self/", methods=['POST'])
@login_required
def query_self_notifications():
return QuerySelfNotificationsView.as_view()
@app.route("/notification/detail/<notify_id>/", methods=['GET'])
@login_required
def query_notification_detail(notify_id):
return QueryNotificationView.get_by_id(notify_id)
@app.route("/system/modify/", methods=['POST'])
@administration_required
def systemmodify():
return systemmodifyView.as_view()
@app.route("/system/clear_history/", methods=['POST'])
@administration_required
def systemclearhistory():
return systemclearView.as_view()
@app.route("/system/add/", methods=['POST'])
@administration_required
def systemadd():
return systemaddView.as_view()
@app.route("/system/delete/", methods=['POST'])
@administration_required
def systemdelete():
return systemdeleteView.as_view()
@app.route("/system/resetall/", methods=['POST'])
@administration_required
def systemresetall():
return systemresetallView.as_view()
@app.route("/admin/", methods=['GET', 'POST'])
@administration_required
def adminpage():
return adminView.as_view()
@app.route('/index/', methods=['GET'])
def jupyter_control():
return redirect('/dashboard/')
# for download basefs.tar.bz
# remove, not the function of docklet
# should download it from a http server
#@app.route('/download/basefs', methods=['GET'])
#def download():
#fsdir = env.getenv("FS_PREFIX")
#return send_from_directory(fsdir+'/local', 'basefs.tar.bz', as_attachment=True)
# jupyter auth APIs
@app.route('/jupyter/', methods=['GET'])
def jupyter_prefix():
path = request.args.get('next')
if path == None:
return redirect('/login/')
return redirect('/login/'+'?next='+path)
@app.route('/jupyter/home/', methods=['GET'])
def jupyter_home():
return redirect('/dashboard/')
@app.route('/jupyter/login/', methods=['GET', 'POST'])
def jupyter_login():
return redirect('/login/')
@app.route('/jupyter/logout/', methods=['GET'])
def jupyter_logout():
return redirect('/logout/')
@app.route('/jupyter/authorizations/cookie/<cookie_name>/<cookie_content>/', methods=['GET'])
def jupyter_auth(cookie_name, cookie_content):
username = cookie_tool.parse_cookie(cookie_content, app.secret_key)
if username == None:
resp = make_response('cookie auth failed')
resp.status_code = 404
return resp
return json.dumps({'name': username})
@app.errorhandler(401)
def not_authorized(error):
if "username" in session:
if "401" in session:
reason = session['401']
session.pop('401', None)
if (reason == 'Token Expired'):
return redirect('/logout/')
return render_template('error/401.html', mysession = session)
else:
return redirect('/login/')
@app.errorhandler(500)
def internal_server_error(error):
logger.error(error)
if "username" in session:
if "500" in session and "500_title" in session:
reason = session['500']
title = session['500_title']
session.pop('500', None)
session.pop('500_title', None)
else:
reason = '''The server encountered something unexpected that didn't allow it to complete the request. We apologize.You can go back to
<a href="/dashboard/">dashboard</a> or <a href="/logout">log out</a>'''
title = 'Internal Server Error'
return render_template('error/500.html', mysession = session, reason = reason, title = title)
else:
return redirect('/login/')
if __name__ == '__main__':
'''
to generate a secret_key
from base64 import b64encode
from os import urandom
secret_key = urandom(24)
secret_key = b64encode(secret_key).decode('utf-8')
'''
logger.info('Start Flask...:')
try:
secret_key_file = open(env.getenv('FS_PREFIX') + '/local/web_secret_key.txt')
app.secret_key = secret_key_file.read()
secret_key_file.close()
except:
from base64 import b64encode
from os import urandom
secret_key = urandom(24)
secret_key = b64encode(secret_key).decode('utf-8')
app.secret_key = secret_key
secret_key_file = open(env.getenv('FS_PREFIX') + '/local/web_secret_key.txt', 'w')
secret_key_file.write(secret_key)
secret_key_file.close()
os.environ['APP_KEY'] = app.secret_key
runcmd = sys.argv[0]
app.runpath = runcmd.rsplit('/', 1)[0]
webip = "0.0.0.0"
webport = env.getenv("WEB_PORT")
webViews.dockletrequest.endpoint = 'http://%s:%d' % (env.getenv('MASTER_IP'), env.getenv('MASTER_PORT'))
try:
opts, args = getopt.getopt(sys.argv[1:], "i:p:", ["ip=", "port="])
except getopt.GetoptError:
print ("%s -i ip -p port" % sys.argv[0])
sys.exit(2)
for opt, arg in opts:
if opt in ("-i", "--ip"):
webip = arg
elif opt in ("-p", "--port"):
webport = int(arg)
app.run(host = webip, port = webport, threaded=True,)
|
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
# Visual studio steps
from buildbot.steps.shell import ShellCommand
from buildbot.process.buildstep import LogLineObserver
from buildbot import config
from buildbot.status.results import SUCCESS, WARNINGS, FAILURE
import re
def addEnvPath(env, name, value):
""" concat a path for this name """
try:
oldval = env[name]
if not oldval.endswith(';'):
oldval = oldval + ';'
except KeyError:
oldval = ""
if not value.endswith(';'):
value = value + ';'
env[name] = oldval + value
class MSLogLineObserver(LogLineObserver):
_re_delimiter = re.compile(r'^(\d+>)?-{5}.+-{5}$')
_re_file = re.compile(r'^(\d+>)?[^ ]+\.(cpp|c)$')
_re_warning = re.compile(r' ?: warning [A-Z]+[0-9]+:')
_re_error = re.compile(r' ?error ([A-Z]+[0-9]+)?\s?: ')
nbFiles = 0
nbProjects = 0
nbWarnings = 0
nbErrors = 0
logwarnings = None
logerrors = None
def __init__(self, logwarnings, logerrors, **kwargs):
LogLineObserver.__init__(self, **kwargs)
self.logwarnings = logwarnings
self.logerrors = logerrors
self.stdoutParser.delimiter = "\r\n"
self.stderrParser.delimiter = "\r\n"
def outLineReceived(self, line):
if self._re_delimiter.search(line):
self.nbProjects += 1
self.logwarnings.addStdout("%s\n" % line)
self.logerrors.addStdout("%s\n" % line)
self.step.setProgress('projects', self.nbProjects)
elif self._re_file.search(line):
self.nbFiles += 1
self.step.setProgress('files', self.nbFiles)
elif self._re_warning.search(line):
self.nbWarnings += 1
self.logwarnings.addStdout("%s\n" % line)
self.step.setProgress('warnings', self.nbWarnings)
elif self._re_error.search("%s\n" % line):
# error has no progress indication
self.nbErrors += 1
self.logerrors.addStderr("%s\n" % line)
class VisualStudio(ShellCommand):
# an *abstract* base class, which will not itself work as a buildstep
name = "compile"
description = "compiling"
descriptionDone = "compile"
progressMetrics = ( ShellCommand.progressMetrics +
('projects', 'files','warnings',))
logobserver = None
installdir = None
default_installdir = None
# One of build, or rebuild
mode = "rebuild"
projectfile = None
config = None
useenv = False
project = None
PATH = []
INCLUDE = []
LIB = []
renderables = [ 'projectfile', 'config', 'project' ]
def __init__(self,
installdir = None,
mode = "rebuild",
projectfile = None,
config = 'release',
useenv = False,
project = None,
INCLUDE = [],
LIB = [],
PATH = [],
**kwargs):
self.installdir = installdir
self.mode = mode
self.projectfile = projectfile
self.config = config
self.useenv = useenv
self.project = project
if len(INCLUDE) > 0:
self.INCLUDE = INCLUDE
self.useenv = True
if len(LIB) > 0:
self.LIB = LIB
self.useenv = True
if len(PATH) > 0:
self.PATH = PATH
# always upcall !
ShellCommand.__init__(self, **kwargs)
def setupLogfiles(self, cmd, logfiles):
logwarnings = self.addLog("warnings")
logerrors = self.addLog("errors")
self.logobserver = MSLogLineObserver(logwarnings, logerrors)
self.addLogObserver('stdio', self.logobserver)
ShellCommand.setupLogfiles(self, cmd, logfiles)
def setupInstalldir(self):
if not self.installdir:
self.installdir = self.default_installdir
def setupEnvironment(self, cmd):
ShellCommand.setupEnvironment(self, cmd)
if cmd.args['env'] is None:
cmd.args['env'] = {}
# setup the custom one, those one goes first
for path in self.PATH:
addEnvPath(cmd.args['env'], "PATH", path)
for path in self.INCLUDE:
addEnvPath(cmd.args['env'], "INCLUDE", path)
for path in self.LIB:
addEnvPath(cmd.args['env'], "LIB", path)
self.setupInstalldir()
def describe(self, done=False):
description = ShellCommand.describe(self, done)
if done:
description.append('%d projects' % self.step_status.getStatistic('projects', 0))
description.append('%d files' % self.step_status.getStatistic('files', 0))
warnings = self.step_status.getStatistic('warnings', 0)
if warnings > 0:
description.append('%d warnings' % warnings)
errors = self.step_status.getStatistic('errors', 0)
if errors > 0:
description.append('%d errors' % errors)
return description
def createSummary(self, log):
self.step_status.setStatistic('projects', self.logobserver.nbProjects)
self.step_status.setStatistic('files', self.logobserver.nbFiles)
self.step_status.setStatistic('warnings', self.logobserver.nbWarnings)
self.step_status.setStatistic('errors', self.logobserver.nbErrors)
def evaluateCommand(self, cmd):
if cmd.didFail():
return FAILURE
if self.logobserver.nbErrors > 0:
return FAILURE
if self.logobserver.nbWarnings > 0:
return WARNINGS
else:
return SUCCESS
def finished(self, result):
self.getLog("warnings").finish()
self.getLog("errors").finish()
ShellCommand.finished(self, result)
class VC6(VisualStudio):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio'
def setupEnvironment(self, cmd):
VisualStudio.setupEnvironment(self, cmd)
# Root of Visual Developer Studio Common files.
VSCommonDir = self.installdir + '\\Common'
MSVCDir = self.installdir + '\\VC98'
MSDevDir = VSCommonDir + '\\msdev98'
addEnvPath(cmd.args['env'], "PATH", MSDevDir + '\\BIN')
addEnvPath(cmd.args['env'], "PATH", MSVCDir + '\\BIN')
addEnvPath(cmd.args['env'], "PATH", VSCommonDir + '\\TOOLS\\WINNT')
addEnvPath(cmd.args['env'], "PATH", VSCommonDir + '\\TOOLS')
addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\INCLUDE')
addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\ATL\\INCLUDE')
addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\MFC\\INCLUDE')
addEnvPath(cmd.args['env'], "LIB", MSVCDir + '\\LIB')
addEnvPath(cmd.args['env'], "LIB", MSVCDir + '\\MFC\\LIB')
def start(self):
command = ["msdev"]
command.append(self.projectfile)
command.append("/MAKE")
if self.project is not None:
command.append(self.project + " - " + self.config)
else:
command.append("ALL - " + self.config)
if self.mode == "rebuild":
command.append("/REBUILD")
elif self.mode == "clean":
command.append("/CLEAN")
else:
command.append("/BUILD")
if self.useenv:
command.append("/USEENV")
self.setCommand(command)
return VisualStudio.start(self)
class VC7(VisualStudio):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio .NET 2003'
def setupEnvironment(self, cmd):
VisualStudio.setupEnvironment(self, cmd)
VSInstallDir = self.installdir + '\\Common7\\IDE'
VCInstallDir = self.installdir
MSVCDir = self.installdir + '\\VC7'
addEnvPath(cmd.args['env'], "PATH", VSInstallDir)
addEnvPath(cmd.args['env'], "PATH", MSVCDir + '\\BIN')
addEnvPath(cmd.args['env'], "PATH", VCInstallDir + '\\Common7\\Tools')
addEnvPath(cmd.args['env'], "PATH", VCInstallDir + '\\Common7\\Tools\\bin')
addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\INCLUDE')
addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\ATLMFC\\INCLUDE')
addEnvPath(cmd.args['env'], "INCLUDE", MSVCDir + '\\PlatformSDK\\include')
addEnvPath(cmd.args['env'], "INCLUDE", VCInstallDir + '\\SDK\\v1.1\\include')
addEnvPath(cmd.args['env'], "LIB", MSVCDir + '\\LIB')
addEnvPath(cmd.args['env'], "LIB", MSVCDir + '\\ATLMFC\\LIB')
addEnvPath(cmd.args['env'], "LIB", MSVCDir + '\\PlatformSDK\\lib')
addEnvPath(cmd.args['env'], "LIB", VCInstallDir + '\\SDK\\v1.1\\lib')
def start(self):
command = ["devenv.com"]
command.append(self.projectfile)
if self.mode == "rebuild":
command.append("/Rebuild")
elif self.mode == "clean":
command.append("/Clean")
else:
command.append("/Build")
command.append(self.config)
if self.useenv:
command.append("/UseEnv")
if self.project is not None:
command.append("/Project")
command.append(self.project)
self.setCommand(command)
return VisualStudio.start(self)
#alias VC7 as VS2003
VS2003 = VC7
class VC8(VC7):
# Our ones
arch = None
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 8'
renderables = ['arch']
def __init__(self, arch = "x86", **kwargs):
self.arch = arch
# always upcall !
VisualStudio.__init__(self, **kwargs)
def setupEnvironment(self, cmd):
VisualStudio.setupEnvironment(self, cmd)
VSInstallDir = self.installdir
VCInstallDir = self.installdir + '\\VC'
addEnvPath(cmd.args['env'], "PATH", VSInstallDir + '\\Common7\\IDE')
if self.arch == "x64":
addEnvPath(cmd.args['env'], "PATH", VCInstallDir + '\\BIN\\x86_amd64')
addEnvPath(cmd.args['env'], "PATH", VCInstallDir + '\\BIN')
addEnvPath(cmd.args['env'], "PATH", VSInstallDir + '\\Common7\\Tools')
addEnvPath(cmd.args['env'], "PATH", VSInstallDir + '\\Common7\\Tools\\bin')
addEnvPath(cmd.args['env'], "PATH", VCInstallDir + '\\PlatformSDK\\bin')
addEnvPath(cmd.args['env'], "PATH", VSInstallDir + '\\SDK\\v2.0\\bin')
addEnvPath(cmd.args['env'], "PATH", VCInstallDir + '\\VCPackages')
addEnvPath(cmd.args['env'], "PATH", r'${PATH}')
addEnvPath(cmd.args['env'], "INCLUDE", VCInstallDir + '\\INCLUDE')
addEnvPath(cmd.args['env'], "INCLUDE", VCInstallDir + '\\ATLMFC\\include')
addEnvPath(cmd.args['env'], "INCLUDE", VCInstallDir + '\\PlatformSDK\\include')
archsuffix = ''
if self.arch == "x64":
archsuffix = '\\amd64'
addEnvPath(cmd.args['env'], "LIB", VCInstallDir + '\\LIB' + archsuffix)
addEnvPath(cmd.args['env'], "LIB", VCInstallDir + '\\ATLMFC\\LIB' + archsuffix)
addEnvPath(cmd.args['env'], "LIB", VCInstallDir + '\\PlatformSDK\\lib' + archsuffix)
addEnvPath(cmd.args['env'], "LIB", VSInstallDir + '\\SDK\\v2.0\\lib' + archsuffix)
#alias VC8 as VS2005
VS2005 = VC8
class VCExpress9(VC8):
def start(self):
command = ["vcexpress"]
command.append(self.projectfile)
if self.mode == "rebuild":
command.append("/Rebuild")
elif self.mode == "clean":
command.append("/Clean")
else:
command.append("/Build")
command.append(self.config)
if self.useenv:
command.append("/UseEnv")
if self.project is not None:
command.append("/Project")
command.append(self.project)
self.setCommand(command)
return VisualStudio.start(self)
# Add first support for VC9 (Same as VC8, with a different installdir)
class VC9(VC8):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 9.0'
VS2008 = VC9
# VC10 doesn't look like it needs extra stuff.
class VC10(VC9):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 10.0'
VS2010 = VC10
# VC11 doesn't look like it needs extra stuff.
class VC11(VC10):
default_installdir = 'C:\\Program Files\\Microsoft Visual Studio 11.0'
VS2012 = VC11
class MsBuild(VisualStudio):
platform = None
def __init__(self, platform, **kwargs):
self.platform = platform
VisualStudio.__init__(self, **kwargs)
def setupEnvironment(self, cmd):
VisualStudio.setupEnvironment(self, cmd)
cmd.args['env']['VCENV_BAT'] = "\"${VS110COMNTOOLS}..\\..\\VC\\vcvarsall.bat\""
def describe(self, done=False):
rv = []
if done:
rv.append("built")
else:
rv.append("building")
if self.project is not None:
rv.append("%s for" % (self.project))
else:
rv.append("solution for")
rv.append("%s|%s" % (self.config, self.platform))
return rv
def start(self):
if self.platform is None:
config.error('platform is mandatory. Please specify a string such as "Win32"')
command = ["%VCENV_BAT%",
"x86",
"&&",
"msbuild",
self.projectfile,
"/p:Configuration=%s" % (self.config),
"/p:Platform=%s" % (self.platform)]
if self.project is not None:
command.append("/t:%s" % (self.project))
self.setCommand(command)
return VisualStudio.start(self)
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_request(
vault_name: str,
resource_group_name: str,
subscription_id: str,
resource_guard_proxy_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupResourceGuardProxies/{resourceGuardProxyName}')
path_format_arguments = {
"vaultName": _SERIALIZER.url("vault_name", vault_name, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGuardProxyName": _SERIALIZER.url("resource_guard_proxy_name", resource_guard_proxy_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_put_request(
vault_name: str,
resource_group_name: str,
subscription_id: str,
resource_guard_proxy_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupResourceGuardProxies/{resourceGuardProxyName}')
path_format_arguments = {
"vaultName": _SERIALIZER.url("vault_name", vault_name, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGuardProxyName": _SERIALIZER.url("resource_guard_proxy_name", resource_guard_proxy_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_delete_request(
vault_name: str,
resource_group_name: str,
subscription_id: str,
resource_guard_proxy_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2021-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupResourceGuardProxies/{resourceGuardProxyName}')
path_format_arguments = {
"vaultName": _SERIALIZER.url("vault_name", vault_name, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGuardProxyName": _SERIALIZER.url("resource_guard_proxy_name", resource_guard_proxy_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_unlock_delete_request(
vault_name: str,
resource_group_name: str,
subscription_id: str,
resource_guard_proxy_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2021-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupResourceGuardProxies/{resourceGuardProxyName}/unlockDelete')
path_format_arguments = {
"vaultName": _SERIALIZER.url("vault_name", vault_name, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGuardProxyName": _SERIALIZER.url("resource_guard_proxy_name", resource_guard_proxy_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
class ResourceGuardProxyOperations(object):
"""ResourceGuardProxyOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.recoveryservicesbackup.activestamp.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get(
self,
vault_name: str,
resource_group_name: str,
resource_guard_proxy_name: str,
**kwargs: Any
) -> "_models.ResourceGuardProxyBaseResource":
"""Returns ResourceGuardProxy under vault and with the name referenced in request.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param resource_guard_proxy_name:
:type resource_guard_proxy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGuardProxyBaseResource, or the result of cls(response)
:rtype: ~azure.mgmt.recoveryservicesbackup.activestamp.models.ResourceGuardProxyBaseResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceGuardProxyBaseResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
vault_name=vault_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
resource_guard_proxy_name=resource_guard_proxy_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ResourceGuardProxyBaseResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupResourceGuardProxies/{resourceGuardProxyName}'} # type: ignore
@distributed_trace
def put(
self,
vault_name: str,
resource_group_name: str,
resource_guard_proxy_name: str,
**kwargs: Any
) -> "_models.ResourceGuardProxyBaseResource":
"""Add or Update ResourceGuardProxy under vault
Secures vault critical operations.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param resource_guard_proxy_name:
:type resource_guard_proxy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ResourceGuardProxyBaseResource, or the result of cls(response)
:rtype: ~azure.mgmt.recoveryservicesbackup.activestamp.models.ResourceGuardProxyBaseResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceGuardProxyBaseResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_put_request(
vault_name=vault_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
resource_guard_proxy_name=resource_guard_proxy_name,
template_url=self.put.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ResourceGuardProxyBaseResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupResourceGuardProxies/{resourceGuardProxyName}'} # type: ignore
@distributed_trace
def delete(
self,
vault_name: str,
resource_group_name: str,
resource_guard_proxy_name: str,
**kwargs: Any
) -> None:
"""Delete ResourceGuardProxy under vault.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param resource_guard_proxy_name:
:type resource_guard_proxy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
vault_name=vault_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
resource_guard_proxy_name=resource_guard_proxy_name,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupResourceGuardProxies/{resourceGuardProxyName}'} # type: ignore
@distributed_trace
def unlock_delete(
self,
vault_name: str,
resource_group_name: str,
resource_guard_proxy_name: str,
parameters: "_models.UnlockDeleteRequest",
**kwargs: Any
) -> "_models.UnlockDeleteResponse":
"""Secures delete ResourceGuardProxy operations.
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param resource_guard_proxy_name:
:type resource_guard_proxy_name: str
:param parameters: Request body for operation.
:type parameters: ~azure.mgmt.recoveryservicesbackup.activestamp.models.UnlockDeleteRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: UnlockDeleteResponse, or the result of cls(response)
:rtype: ~azure.mgmt.recoveryservicesbackup.activestamp.models.UnlockDeleteResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.UnlockDeleteResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'UnlockDeleteRequest')
request = build_unlock_delete_request(
vault_name=vault_name,
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
resource_guard_proxy_name=resource_guard_proxy_name,
content_type=content_type,
json=_json,
template_url=self.unlock_delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('UnlockDeleteResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
unlock_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupResourceGuardProxies/{resourceGuardProxyName}/unlockDelete'} # type: ignore
|
|
import logging
log = logging.getLogger(__name__)
import random
import string
from zope.interface import implementer
from pyramid.interfaces import (
IAuthenticationPolicy,
IDebugLogger,
)
from pyramid.security import (
Authenticated,
Everyone,
)
from webob.cookies import SignedCookieProfile as CookieHelper
from ...models import (
DBSession,
User,
UserTickets,
)
def _clean_principal(princid):
""" Utility function that cleans up the passed in principal
This can easily also be extended for example to make sure that certain
usernames are automatically off-limits.
"""
if princid in (Authenticated, Everyone):
princid = None
return princid
@implementer(IAuthenticationPolicy)
class AuthPolicy(object):
def _log(self, msg, methodname, request):
logger = request.registry.queryUtility(IDebugLogger)
if logger:
cls = self.__class__
classname = cls.__module__ + '.' + cls.__name__
methodname = classname + '.' + methodname
logger.debug(methodname + ': ' + msg)
def __init__(self,
secret,
cookie_name='auth',
secure=False,
max_age=None,
httponly=False,
path="/",
domains=None,
timeout=None,
reissue_time=None,
debug=False,
hashalg='sha512',
):
self.domains = domains
self.cookie = CookieHelper(
secret,
'usingnamespace-auth',
cookie_name,
secure=secure,
max_age=max_age,
httponly=httponly,
path=path,
domains=domains,
hashalg=hashalg,
)
self.debug = debug
def unauthenticated_userid(self, request):
""" The userid key within the auth_tkt cookie."""
result = self.cookie.bind(request).get_value()
self.debug and self._log('Got result from cookie: %s' % (result,), 'unauthenticated_userid', request)
if result:
principal = result['principal']
if _clean_principal(principal) is None:
self.debug and self._log('use of principal %r is disallowed by any '
'built-in Pyramid security policy, returning None' %
principal)
return None
auth = {'principal': principal}
if 'tokens' in result:
auth['tokens'] = result['tokens']
if 'auth_ticket' in result:
auth['ticket'] = result['auth_ticket']
request.state['auth'] = auth
return principal
def authenticated_userid(self, request):
""" Return the authenticated userid or ``None``."""
userid = request.user.id
return userid
def find_user_ticket(self, request):
""" Return the user object if valid for the ticket or ``None``."""
auth = request.state.get('auth', {})
ticket = auth.get('ticket', '')
principal = auth.get('principal', '')
if not ticket or not principal:
return None
ticket = UserTickets.find_ticket_userid(ticket, principal)
if ticket is None:
self.debug and self._log('No ticket found', 'find_user_ticket', request)
self.cookie.set_cookies(request.response, '', max_age=0)
return ticket
def effective_principals(self, request):
""" A list of effective principals derived from request.
This will return a list of principals including, at least,
:data:`pyramid.security.Everyone`. If there is no authenticated
userid, or the ``callback`` returns ``None``, this will be the
only principal:
.. code-block:: python
return [Everyone]
"""
debug = self.debug
effective_principals = [Everyone]
userid = self.authenticated_userid(request)
if userid is None:
debug and self._log(
'authenticated_userid returned %r; returning %r' % (
userid, effective_principals),
'effective_principals',
request
)
return effective_principals
groups = []
# Get the groups here ...
effective_principals.append(Authenticated)
effective_principals.append(userid)
effective_principals.extend(groups)
debug and self._log(
'returning effective principals: %r' % (
effective_principals,),
'effective_principals',
request
)
return effective_principals
def remember(self, request, principal, tokens=None, max_age=None):
""" Accepts the following kw args: ``max_age=<int-seconds>``
Return a list of headers which will set appropriate cookies on
the response.
"""
debug = self.debug
value = {}
value['principal'] = principal
value['auth_ticket'] = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for x in range(128))
value['tokens'] = tokens if tokens is not None else []
user = DBSession.query(User).filter(User.email == principal).first()
if user is None:
raise ValueError('Invalid principal provided')
debug and self._log('Remember user: %s, ticket: %s' % (user.email, value['auth_ticket']), 'remember', request)
ticket = value['auth_ticket']
remote_addr = request.environ['REMOTE_ADDR'] if 'REMOTE_ADDR' in request.environ else None
user.tickets.append(UserTickets(ticket=ticket, remote_addr=remote_addr))
if self.domains is None:
self.domains = []
self.domains.append(request.domain)
return self.cookie.get_headers(value, domains=self.domains)
def forget(self, request):
""" A list of headers which will delete appropriate cookies."""
debug = self.debug
user = request.user
if user.ticket:
debug and self._log('forgetting user: %s, removing ticket: %s' % (user.id, user.ticket.ticket), 'forget', request)
DBSession.delete(user.ticket)
return self.cookie.get_headers('', max_age=0)
|
|
from pymeta.grammar import OMeta
from pymeta.runtime import character, ParseError, EOFError
from terml.common import CommonParser
termLGrammar = r"""
literal ::= (<string>:x => TermLiteral(".String.", x)
| <character>:x => TermLiteral(".char.", x)
| <number>:x => TermLiteral(numberType(x), x))
tag ::= (<segment>:seg1 (':' ':' <sos>)*:segs => makeTag(cons(seg1, segs))
| (':' ':' <sos>)+:segs => prefixedTag(segs))
sos ::= <segment> | (<string>:s => tagString(s))
segment ::= <ident> | <special> | <uri>
ident ::= <segStart>:i1 <segPart>*:ibits => join(cons(i1, ibits))
segStart ::= <letter> | '_' | '$'
segPart ::= <letterOrDigit> | '_' | '.' | '-' | '$'
special ::= '.':a <ident>:b => concat(a, b)
uri ::= '<' <uriBody>*:uriChars '>' => concat(b, uriChars, e)
functor ::= <spaces> (<literal> | <tag>:t (<functorHole>:h => taggedHole(t, h)
| => t)
| <functorHole>)
functorHole ::= ((<token "${"> <decdigits>:n '}' => ValueHole(n))
|(<token "$"> <decdigits>:n => ValueHole(n))
|(<token "$"> <tag>:t => NamedValueHole(t))
|(<token "@{"> <decdigits>:n '}' => PatternHole(n))
|(<token "@"> <decdigits>:n => PatternHole(n))
|(<token "@"> <tag>:t => NamedPatternHole(t)))
baseTerm ::= <functor>:f ('(' <argList>:a ')' => Term(f, a)
| => Term(f, emptyList()))
argList ::= ((<term>:t (',' <term>)*:ts ) => cons(t, ts)
| => emptyList())
tupleTerm ::= <token '['> <argList>:a <token ']'> => Tuple(a)
bagTerm ::= <token '{'> <argList>:a <token '}'> => Bag(a)
labelledBagTerm ::= <functor>:f <bagTerm>:b => LabelledBag(f, b)
extraTerm ::= <tupleTerm> | <labelledBagTerm> | <bagTerm> | <baseTerm>
attrTerm ::= <extraTerm>:k <token ':'> <extraTerm>:v => Attr(k, v)
term ::= <attrTerm> | <extraTerm>
"""
class _Term(object):
def __init__(self, functor, arglist):
self.functor = functor
self.arglist = arglist
assert len(arglist) >= 0
def __eq__(self, other):
return (self.functor, self.arglist) == (other.functor, other.arglist)
def __repr__(self):
return "Term(%r)" % (self._unparse())
def _unparse(self):
if len(self.arglist) == 0:
return self.functor._unparse()
args = ', '.join([a._unparse() for a in self.arglist])
if self.functor.name == '.tuple.':
return "[%s]" % (args,)
elif self.functor.name == '.attr.':
return "%s: %s" % (self.arglist[0]._unparse(), self.arglist[1]._unparse())
elif self.functor.name == '.bag.':
return "{%s}" % (args,)
elif len(self.arglist) == 1 and self.arglist[0].functor.name == '.bag.':
return "%s%s" % (self.functor._unparse(), args)
else:
return "%s(%s)" % (self.functor._unparse(), args)
class TermLiteral(object):
def __init__(self, name, data):
self.name = name
self.data = data
def __eq__(self, other):
return other.__class__ == self.__class__ and self.data == other.data
def __repr__(self):
return "TermLiteral(%r)" % (self.data,)
def _unparse(self):
if self.name == '.String.':
return '"%s"' % self.data
elif self.name == '.char.':
return "'%s'" % self.data
else:
return str(self.data)
class Tag(object):
def __init__(self, name):
if name[0] == '':
import pdb; pdb.set_trace()
self.name = name
def __eq__(self, other):
return other.__class__ == self.__class__ and self.name == other.name
def __repr__(self):
return "Tag(%r)" % (self.name,)
def _unparse(self):
return self.name
## Functions called from grammar actions
def Character(char):
return character(char)
def makeTag(nameSegs):
return Tag('::'.join(nameSegs))
def prefixedTag(tagnameSegs):
return makeTag([''] + tagnameSegs)
def tagString(string):
return '"' + string + '"'
def emptyList():
return []
def Term(functor, argList):
if isinstance(functor, TermLiteral) and len(argList) > 0:
raise ValueError("Term %s can't have both data and children" % (functor.name,))
return _Term(functor, argList)
def numberType(n):
try:
int_types = (long, int)
except NameError:
int_types = (int,)
if isinstance(n, float):
return ".float64."
elif isinstance(n, int_types):
return ".int."
raise ValueError("wtf")
def Tuple(args):
return _Term(Tag(".tuple."), args)
def Bag(args):
return _Term(Tag(".bag."), args)
def LabelledBag(f, arg):
return _Term(f, [arg])
def Attr(k, v):
return _Term(Tag(".attr."), [k, v])
BaseTermLParser = OMeta.makeGrammar(termLGrammar, globals(), "TermLParser")
class TermLParser(BaseTermLParser, CommonParser):
pass
TermLParser.globals.update(CommonParser.globals)
def _parseTerm(termString):
"""
Parser frontend for term strings.
"""
p = TermLParser(termString)
result, error = p.apply("term")
try:
p.input.head()
except EOFError:
pass
else:
raise error
return result
def parseTerm(termString):
"""
Friendly interface for parsing.
"""
try:
return _parseTerm(termString)
except ParseError as e:
print(e.formatError(termString))
raise
|
|
"""Scrapy Shell
See documentation in docs/topics/shell.rst
"""
import os
import signal
import warnings
from twisted.internet import threads, defer
from twisted.python import threadable
from w3lib.url import any_to_uri
from scrapy.crawler import Crawler
from scrapy.exceptions import IgnoreRequest, ScrapyDeprecationWarning
from scrapy.http import Request, Response
from scrapy.item import BaseItem
from scrapy.settings import Settings
from scrapy.spiders import Spider
from scrapy.utils.console import start_python_console
from scrapy.utils.datatypes import SequenceExclude
from scrapy.utils.misc import load_object
from scrapy.utils.response import open_in_browser
from scrapy.utils.conf import get_config
from scrapy.utils.console import DEFAULT_PYTHON_SHELLS
class Shell(object):
relevant_classes = (Crawler, Spider, Request, Response, BaseItem,
Settings)
def __init__(self, crawler, update_vars=None, code=None):
self.crawler = crawler
self.update_vars = update_vars or (lambda x: None)
self.item_class = load_object(crawler.settings['DEFAULT_ITEM_CLASS'])
self.spider = None
self.inthread = not threadable.isInIOThread()
self.code = code
self.vars = {}
def start(self, url=None, request=None, response=None, spider=None, redirect=True):
# disable accidental Ctrl-C key press from shutting down the engine
signal.signal(signal.SIGINT, signal.SIG_IGN)
if url:
self.fetch(url, spider, redirect=redirect)
elif request:
self.fetch(request, spider)
elif response:
request = response.request
self.populate_vars(response, request, spider)
else:
self.populate_vars()
if self.code:
print(eval(self.code, globals(), self.vars))
else:
"""
Detect interactive shell setting in scrapy.cfg
e.g.: ~/.config/scrapy.cfg or ~/.scrapy.cfg
[settings]
# shell can be one of ipython, bpython or python;
# to be used as the interactive python console, if available.
# (default is ipython, fallbacks in the order listed above)
shell = python
"""
cfg = get_config()
section, option = 'settings', 'shell'
env = os.environ.get('SCRAPY_PYTHON_SHELL')
shells = []
if env:
shells += env.strip().lower().split(',')
elif cfg.has_option(section, option):
shells += [cfg.get(section, option).strip().lower()]
else: # try all by default
shells += DEFAULT_PYTHON_SHELLS.keys()
# always add standard shell as fallback
shells += ['python']
start_python_console(self.vars, shells=shells,
banner=self.vars.pop('banner', ''))
def _schedule(self, request, spider):
spider = self._open_spider(request, spider)
d = _request_deferred(request)
d.addCallback(lambda x: (x, spider))
self.crawler.engine.crawl(request, spider)
return d
def _open_spider(self, request, spider):
if self.spider:
return self.spider
if spider is None:
spider = self.crawler.spider or self.crawler._create_spider()
self.crawler.spider = spider
self.crawler.engine.open_spider(spider, close_if_idle=False)
self.spider = spider
return spider
def fetch(self, request_or_url, spider=None, redirect=True, **kwargs):
from twisted.internet import reactor
if isinstance(request_or_url, Request):
request = request_or_url
else:
url = any_to_uri(request_or_url)
request = Request(url, dont_filter=True, **kwargs)
if redirect:
request.meta['handle_httpstatus_list'] = SequenceExclude(range(300, 400))
else:
request.meta['handle_httpstatus_all'] = True
response = None
try:
response, spider = threads.blockingCallFromThread(
reactor, self._schedule, request, spider)
except IgnoreRequest:
pass
self.populate_vars(response, request, spider)
def populate_vars(self, response=None, request=None, spider=None):
import scrapy
self.vars['scrapy'] = scrapy
self.vars['crawler'] = self.crawler
self.vars['item'] = self.item_class()
self.vars['settings'] = self.crawler.settings
self.vars['spider'] = spider
self.vars['request'] = request
self.vars['response'] = response
self.vars['sel'] = _SelectorProxy(response)
if self.inthread:
self.vars['fetch'] = self.fetch
self.vars['view'] = open_in_browser
self.vars['shelp'] = self.print_help
self.update_vars(self.vars)
if not self.code:
self.vars['banner'] = self.get_help()
def print_help(self):
print(self.get_help())
def get_help(self):
b = []
b.append("Available Scrapy objects:")
b.append(" scrapy scrapy module (contains scrapy.Request, scrapy.Selector, etc)")
for k, v in sorted(self.vars.items()):
if self._is_relevant(v):
b.append(" %-10s %s" % (k, v))
b.append("Useful shortcuts:")
if self.inthread:
b.append(" fetch(url[, redirect=True]) "
"Fetch URL and update local objects "
"(by default, redirects are followed)")
b.append(" fetch(req) "
"Fetch a scrapy.Request and update local objects ")
b.append(" shelp() Shell help (print this help)")
b.append(" view(response) View response in a browser")
return "\n".join("[s] %s" % l for l in b)
def _is_relevant(self, value):
return isinstance(value, self.relevant_classes)
def inspect_response(response, spider):
"""Open a shell to inspect the given response"""
Shell(spider.crawler).start(response=response, spider=spider)
def _request_deferred(request):
"""Wrap a request inside a Deferred.
This function is harmful, do not use it until you know what you are doing.
This returns a Deferred whose first pair of callbacks are the request
callback and errback. The Deferred also triggers when the request
callback/errback is executed (ie. when the request is downloaded)
WARNING: Do not call request.replace() until after the deferred is called.
"""
request_callback = request.callback
request_errback = request.errback
def _restore_callbacks(result):
request.callback = request_callback
request.errback = request_errback
return result
d = defer.Deferred()
d.addBoth(_restore_callbacks)
if request.callback:
d.addCallbacks(request.callback, request.errback)
request.callback, request.errback = d.callback, d.errback
return d
class _SelectorProxy(object):
def __init__(self, response):
self._proxiedresponse = response
def __getattr__(self, name):
warnings.warn('"sel" shortcut is deprecated. Use "response.xpath()", '
'"response.css()" or "response.selector" instead',
category=ScrapyDeprecationWarning, stacklevel=2)
return getattr(self._proxiedresponse.selector, name)
|
|
#! /usr/local/bin/python
"""
See LICENSE file for copyright and license details.
"""
from sqlalchemy import Table, MetaData, Column, Integer, or_, and_
from sqlalchemy.types import VARCHAR
#from sqlalchemy.sql import exisst
from sqlalchemy.orm import sessionmaker
from sqlalchemy import func
from decimal import Decimal
from datetime import datetime
from generic.modules.function import *
from generic.modules.function_sqlalchemy import row_to_dict
from generic.modules.messagehandler import *
from generic.modules.calculator_finance import *
from modules.constant import *
from modules.function import *
from meta import engine, Base
from database.mappings import *
from database.mappings_views import *
class DatabaseAccess():
"""
Connecting to the database.
"""
def __init__(self, config):
"""
Initialize object.
"""
try:
self.config = config
self.Session = sessionmaker(bind=engine)
self.metadata = Base.metadata
#self.map_tables()
#self.map_views()
self.tables = [x for x in self.metadata.tables.keys() if is_a_table(x)]
except Exception as ex:
print "Error in initialisation of DatabaseAccess: ", ex
def get_accounts(self):
"""
Get the accounts.
"""
values = []
try:
session = self.Session()
query = session.query(T_ACCOUNT)
for instance in query:
values.append(
{
"name": instance.name,
"account_id": instance.account_id
}
)
except Exception as ex:
print Error.GET_ACCOUNTS, ex
finally:
session.rollback()
session = None
return values
def get_account_list(self):
"""
Get the account_names in a list.
"""
values = []
try:
session = self.Session()
query = session.query(T_ACCOUNT)
for instance in query:
values.append(instance.name)
except Exception as ex:
print Error.GET_ACCOUNT_LIST, ex
finally:
session.rollback()
session = None
return values
def get_markets(self):
"""
Get the market codes.
"""
values = []
try:
session = self.Session()
query = session.query(T_MARKET).filter(
T_MARKET.active == 1)
for instance in query:
values.append(instance.code)
except Exception as ex:
print Error.GET_MARKETS, ex
finally:
session.rollback()
session = None
return values
def get_commodity_names(self, code):
"""
Get the commodity names.
"""
values = []
try:
session = self.Session()
query = session.query(V_COMMODITY_INFO).filter(
V_COMMODITY_INFO.market_code == code
)
for instance in query:
values.append(instance.commodity_name)
except Exception as ex:
print "Error in get_commodity_names: ", ex
finally:
session.rollback()
session = None
return values
def get_market_description(self, market):
"""
Get the market description.
"""
value = ''
try:
session = self.Session()
query = session.query(T_MARKET).filter(T_MARKET.code == market)
for instance in query:
value = instance.name
break
except Exception as ex:
print Error.GET_MARKET_DESCRIPTION, ex
finally:
session.rollback()
session = None
return value
def get_commodity_description(self, commodity):
"""
Get the commodity description.
"""
value = ''
try:
session = self.Session()
query = session.query(T_COMMODITY).filter(
T_COMMODITY.name == commodity
)
for instance in query:
value = instance.description
break
except Exception as ex:
print "Error in get_commodity_description: ", ex
finally:
session.rollback()
session = None
return value
def get_commodity_info(self, commodity_name):
"""
Get extra commodity info.
"""
values = []
try:
session = self.Session()
query = session.query(
T_COMMODITY.name.label("commodity_name"),
T_MARKET.name.label("marketname"),
T_MARKET.country
).join(
T_MARKET,
T_COMMODITY.market_id == T_MARKET.market_id
).filter(
T_COMMODITY.name == commodity_name
)
for instance in query:
values.append(instance.commodity_name)
values.append(instance.marketname)
values.append(instance.country)
except Exception as ex:
print Error.GET_COMMODITY_INFO, ex
finally:
session.rollback()
session = None
return values
def get_currencies(self):
"""
Get the currency codes.
"""
values = []
try:
session = self.Session()
query = session.query(T_CURRENCY)
for instance in query:
values.append(instance.code)
except Exception as ex:
print Error.GET_CURRENCIES, ex
finally:
session.rollback()
session = None
return values
def trade_closed(self, trade_record):
"""
Checks if a trade is closed.
"""
return (
(trade_record['date_buy'] != DEFAULT_DATE)
and (trade_record['date_sell'] != DEFAULT_DATE)
and (trade_record['shares_buy'] == trade_record['shares_sell'])
)
def get_long_flag_value(self, account_from, account_to, trade_record):
"""
Are we long?
"""
result = False
if trade_record == {}:
result = is_a_trading_account(account_from) and \
deals_with_commodities(account_to)
# NOTE: if account_from = broker and account_to is a commodity,
# we know we are trading + buying
# and that is going long if we don't have a trading record yet.
else:
result = (deals_with_commodities(account_from, account_to)
and trade_record['date_buy'] != DEFAULT_DATE)
return 1 if result else 0
def get_win_flag_value(self, price_buy, price_sell):
"""
Trade finished... did we win?
"""
result = (price_buy < price_sell)
return 1 if result else 0
def write_to_database(self, statements):
"""
Writes the records of a given statements list to the database.
"""
try:
if statements != []:
#insert
statements_insert = self.assemble_statement_list_insert(
statements, StatementType.INSERT)
self.write_statement_list_insert(
statements_insert, statements.table)
#update
statements_update = self.assemble_statement_list_update(
statements, StatementType.UPDATE)
print "test: statements_update = ", statements_update
self.write_statement_list_update(
statements_update, statements.table)
#delete
statements_delete = self.assemble_statement_list_delete(
statements, StatementType.DELETE)
self.write_statement_list_delete(
statements_delete, statements.table)
except Exception as ex:
print Error.WRITE_TO_DATABASE_SESSION, ex
def write_statement_list_insert(self, final_statements, table):
"""
Commit the insert statements to database.
"""
session = self.Session()
try:
if final_statements != []:
#NOTE: The below 3 lines are this one-liner in python3:
#print(table_name, end=': ')
#splitnames = table.__name__.split(':')
#for name in splitnames:
# print name
session.add_all(final_statements)
session.commit()
# TODO: {0} in python2.7?
print "{0} records added.".format(str(len(final_statements)))
print ''
except Exception as ex:
print Error.INSERT_DATABASE, ex
finally:
session.rollback()
session = None
def write_statement_list_update(self, final_statements, table):
"""
Execute the update statements on the database.
"""
# TODO: this code needs the update instruction I've written somewhere
#in databaseaccess.py
#=> session.query(Supplier).filter_by(id=2).update({"name": u"Mayowa"})
session = self.Session()
try:
#import pdb; pdb.set_trace()
if final_statements != []:
#session.add_all(final_statements)
#session.commit()
for statement in final_statements:
print 'test:', statement
# TODO: make trade_id a var? Is that possible?
session.query(table).filter_by(
trade_id=statement[0]).update(statement[1])
print 'test: after query'
# TODO: commit/flush code in for or outside?
session.commit()
print 'test: after commit'
print "{0} records updated.".format(str(len(final_statements)))
print ''
except Exception as ex:
print Error.UPDATE_DATABASE, ex
finally:
session.rollback()
session = None
def write_statement_list_delete(self, final_statements, table):
"""
Write the insert statements to database.
"""
# TODO: finish this later. Low priority!
pass
def assemble_statement_list_insert(self, statements,
insupdel=StatementType.INSERT):
"""
Creates list of TABLE_NAME(..., ..., ...) records
from new statements, that we can use to insert at once.
"""
# TODO: find a way to refactor this piece of crap code.
result = []
inner_part_list = statements.get_statement_list(insupdel)
if statements.table == T_CURRENCY_EXCHANGE:
for record in inner_part_list:
result.append(T_CURRENCY_EXCHANGE(
record['currency_exchange_id'],
record['currency_from_id'],
record['currency_to_id'],
record['exchange_rate'],
record['date_created'],
record['date_modified']))
elif statements.table == T_RATE:
for record in inner_part_list:
result.append(T_RATE(
record['rate_id'],
record['commission'],
record['tax'],
record['automatic_flag'],
record['date_created'],
record['date_modified']))
elif statements.table == T_FINANCE:
for record in inner_part_list:
result.append(T_FINANCE(
record['finance_id'],
record['date'],
record['year'],
record['month'],
record['day'],
record['account_from_id'],
record['account_to_id'],
record['amount'],
record['comment'],
record['currency_exchange_id'],
record['rate_id'],
record['active'],
record['date_created'],
record['date_modified']))
elif statements.table == T_TRADE:
for record in inner_part_list:
result.append(T_TRADE(
record['trade_id'],
record['market_id'],
record['commodity_id'],
record['date_buy'],
record['year_buy'],
record['month_buy'],
record['day_buy'],
record['date_sell'],
record['year_sell'],
record['month_sell'],
record['day_sell'],
record['long_flag'],
record['price_buy'],
record['price_buy_orig'],
record['price_sell'],
record['price_sell_orig'],
record['shares_buy'],
record['shares_sell'],
record['commission_buy'],
record['commission_sell'],
record['tax_buy'],
record['tax_sell'],
record['amount_buy'],
record['amount_sell'],
record['amount_buy_simple'],
record['amount_sell_simple'],
record['risk_input'],
record['risk_input_percent'],
record['risk_initial'],
record['risk_initial_percent'],
record['risk_actual'],
record['risk_actual_percent'],
record['cost_total'],
record['cost_other'],
record['stoploss'],
record['stoploss_orig'],
record['profit_loss'],
record['profit_loss_orig'],
record['profit_loss_total'],
record['profit_loss_total_percent'],
record['r_multiple'],
record['win_flag'],
record['id_buy'],
record['id_sell'],
record['drawdown_id'],
record['pool_at_start'],
record['date_expiration'],
record['expired_flag'],
record['spread'],
record['active'],
record['date_created'],
record['date_modified']))
return result
def assemble_statement_list_update(self, statements,
insupdel=StatementType.UPDATE):
"""
Creates list of update records from statements,
that we can use to update.
"""
result = []
#import pdb; pdb.set_trace()
inner_part_list = statements.get_statement_list(insupdel)
for record in inner_part_list:
if statements.table == T_TRADE:
var_id = record['trade_id']
else:
var_id = -1
result.append(
[
var_id,
{
"market_id": record["market_id"],
"commodity_id": record["commodity_id"],
"date_buy": record["date_buy"],
"year_buy": record["year_buy"],
"month_buy": record["month_buy"],
"day_buy": record["day_buy"],
"date_sell": record["date_sell"],
"year_sell": record["year_sell"],
"month_sell": record["month_sell"],
"day_sell": record["day_sell"],
"long_flag": record["long_flag"],
"price_buy": record["price_buy"],
"price_buy_orig": record["price_buy_orig"],
"price_sell": record["price_sell"],
"price_sell_orig": record["price_sell_orig"],
"shares_buy": record["shares_buy"],
"shares_sell": record["shares_sell"],
"commission_buy": record["commission_buy"],
"commission_sell": record["commission_sell"],
"tax_buy": record["tax_buy"],
"tax_sell": record["tax_sell"],
"amount_buy": record["amount_buy"],
"amount_sell": record["amount_sell"],
"amount_buy_simple": record["amount_buy_simple"],
"amount_sell_simple": record["amount_sell_simple"],
"risk_input": record["risk_input"],
"risk_input_percent": record["risk_input_percent"],
"risk_initial": record["risk_initial"],
"risk_initial_percent": record["risk_initial_percent"],
"risk_actual": record["risk_actual"],
"risk_actual_percent": record["risk_actual_percent"],
"cost_total": record["cost_total"],
"cost_other": record["cost_other"],
"stoploss": record["stoploss"],
"stoploss_orig": record["stoploss_orig"],
"profit_loss": record["profit_loss"],
"profit_loss_orig": record["profit_loss_orig"],
"profit_loss_total": record["profit_loss_total"],
"profit_loss_total_percent":
record["profit_loss_total_percent"],
"r_multiple": record["r_multiple"],
"win_flag": record["win_flag"],
"id_buy": record["id_buy"],
"id_sell": record["id_sell"],
"drawdown_id": record["drawdown_id"],
"pool_at_start": record["pool_at_start"],
"date_expiration": record["date_expiration"],
"expired_flag": record["expired_flag"],
"spread": record["spread"],
"active": record["active"],
"date_created": record["date_created"],
"date_modified": record["date_modified"]
}
]
)
return result
def assemble_statement_list_delete(self, statements,
insupdel=StatementType.DELETE):
"""
Creates list of from delete statements,
that we can use to delete at once.
"""
#NOTE: deleting is not used, because we don't need it.
# Deleting is done on the table object rows, before pressing execute.
# When using the app, you need to make sure everything is ok before
# you press execute.
return statements.get_statement_list(insupdel)
def export_records(self, name):
"""
Return the records from the table or view, defined by name.
"""
records = None
session = self.Session()
try:
records = session.query(name).all()
except Exception as ex:
print "Error in export_records: ", ex
finally:
session.rollback()
session = None
return records
def account_id_from_account_name(self,
account_name, from_account=True):
"""
Get the account_id from an account.
"""
result = - 1
session = self.Session()
try:
date_created = current_date()
date_modified = current_date()
# Get account id, based on account name
# but first check if the account already exists
# in T_ACCOUNT. If not, add it to the t_account table.
obj = session.query(T_ACCOUNT).filter_by(
name=account_name
).first() is not None
if not obj:
if from_account:
description_list = self.gui.get_account_from().split(':')
description = description_list[len(descpription_list) - 1]
else:
description = self.gui.get_account_to().split(':')
description = description_list[len(descpription_list) - 1]
session.add(T_ACCOUNT(
account_name,
description,
date_created,
date_modified)
)
session.commit()
for instance in session.query(
func.max(T_ACCOUNT.account_id).label('account_id')
):
result = instance.account_id
else:
for instance in session.query(T_ACCOUNT).filter_by(
name=account_name
):
result = str(instance.account_id)
except Exception as ex:
print Error.ACCOUNT_ID_FROM_ACCOUNT, ex
finally:
session.rollback()
session = None
return result
def commodity_id_from_commodity_name(self, commodity_name, market_id):
"""
Get the commodity_id from T_COMMODITY.
"""
result = -1
session = self.Session()
try:
# Get commodity_id, based on commodity_name
# but first check if the commodity_name already exists
# in T_COMMODITY. If not, add it to the table.
first_obj = session.query(V_COMMODITY_INFO).filter_by(
commodity_name=commodity_name,
market_id=market_id).first()
if first_obj is not None:
result = str(first_obj.commodity_id)
except Exception as ex:
print "Error retrieving commodity_id: ", ex
finally:
session.rollback()
session = None
return result
def market_id_from_market(self, code):
"""
Get the market_id from T_MARKET.
"""
result = -1
session = self.Session()
try:
date_created = current_date()
date_modified = current_date()
obj = session.query(T_MARKET).filter_by(
code=code
).first() is not None
if not obj:
# NOTE: this code means that when new market records
# have been added during normal usage, a new
# uninstall/install/import will not be able
# to fill in the name and country of the market.
# For now, assume no new ones are added. If there are,
# add them to the init_tables script!
# TODO: add extra field in gui for the country code
# and country name + add this to the input_fields.
# This way, we can also add new markets.
# But: perhaps this makes the input too complex and a new
# button with a dialog window behind it is needed?
session.add(T_MARKET(
None,
code,
'TBD',
'??',
1,
date_created,
date_modified)
)
session.commit()
for instance in session.query(
func.max(T_MARKET.market_id).label('market_id')
):
result = instance.market_id
else:
for instance in session.query(T_MARKET).filter_by(code=code):
result = str(instance.market_id)
except Exception as ex:
print "Error retrieving market_id: ", ex
finally:
session.rollback()
session = None
return result
def account_name_from_account_id(self, account_id):
"""
Get the account_name for a given account_id
from the T_ACCOUNT table.
"""
result = ''
session = self.Session()
try:
for instance in session.query(V_ACCOUNT_NAME).filter_by(
account_id=account_id
):
result = instance.name
except Exception as ex:
print "Error retrieving accountname from account_id: ", ex
finally:
session.rollback()
session = None
return result
def currency_id_from_currency(self, currency):
"""
Get the currency_id from a currency string (e.g.'USD').
"""
result = -1
session = self.Session()
try:
first_obj = session.query(T_CURRENCY).filter_by(
code=currency
).first()
if first_obj is not None:
result = str(first_obj.currency_id)
else:
raise Exception("Error: currency {0} not found!" +
" -1 used as a currency_id.".format(currency))
except Exception as ex:
print Error.ACCOUNT_ID_FROM_ACCOUNT, ex
finally:
session.rollback()
session = None
return result
def get_latest_rate_id(self):
"""
Gets the latest rate_id.
"""
result = -1
session = self.Session()
try:
first_obj = session.query(T_RATE).order_by(
T_RATE.rate_id.desc()
).first()
if first_obj is not None:
result = first_obj.rate_id
else:
# We don't have one yet, so by making the last one 0,
# a get_latest_rate_id() + 1 would become 1
result = 0
except Exception as ex:
print "Error retrieving latest rate_id from T_RATE: ", ex
finally:
session.rollback()
session = None
return result
def get_parameter_value(self, parameter_id):
"""
Function to get the value that belongs to the given parameter.
"""
result = DEFAULT_DECIMAL
session = self.Session()
try:
for instance in session.query(T_PARAMETER).filter_by(
parameter_id=parameter_id):
result = str(instance.value)
except Exception as ex:
print "Error retrieving parameter value: ", ex
finally:
session.rollback()
session = None
return result
def first_currency_exchange_id_from_latest(self):
"""
Gets the first currency_exchange_id from the latest update
block, which is determined by examining the date_created column.
"""
result = -1
session = self.Session()
try:
currency_exchange_created = self.get_latest_date_created(
Table.CURRENCY_EXCHANGE)
first_obj = session.query(T_CURRENCY_EXCHANGE).filter_by(
date_created=currency_exchange_created).first()
if first_obj is not None:
result = first_obj.currency_exchange_id
except Exception as ex:
print "Error in first_currency_id_from_latest: ", ex
finally:
session.rollback()
session = None
return result
def first_rate_id_from_latest(self):
"""
Gets the first rate_id from the latest update
block, which is determined by examining the date_created column.
"""
result = -1
session = self.Session()
try:
rate_created = self.get_latest_date_created(Table.RATE)
obj = session.query(T_RATE).filter_by(date_created=rate_created)
for instance in obj:
result = instance.rate_id
except Exception as ex:
print "Error in first_rate_id_from_latest: ", ex
finally:
session.rollback()
session = None
return result
def first_finance_id_from_latest(self):
"""
Gets the first finance_id from the latest update
block, which is determined by examining the date_created column.
"""
result = -1
session = self.Session()
try:
finance_created = self.get_latest_date_created(Table.FINANCE)
obj = session.query(T_FINANCE).filter_by(
date_created=finance_created)
for instance in obj:
result = instance.finance_id
except Exception as ex:
print "Error in first_finance_id_from_latest: ", ex
finally:
session.rollback()
session = None
return result
def get_latest_date_created(self, tablename):
"""
Get's the latest date_created value that was added.
"""
result = current_date()
session = self.Session()
try:
if tablename == Table.FINANCE:
first_obj = session.query(T_FINANCE).order_by(
T_FINANCE.finance_id.desc()).first()
elif tablename == Table.RATE:
first_obj = session.query(T_RATE).order_by(
T_RATE.rate_id.desc()).first()
elif tablename == Table.CURRENCY_EXCHANGE:
first_obj = session.query(T_CURRENCY_EXCHANGE).order_by(
T_CURRENCY_EXCHANGE.currency_exchange_id.desc()).first()
else:
first_obj = None
if first_obj is not None:
result = first_obj.date_created
except Exception as ex:
print 'Error in get_latest_date_created for table', tablename,
':', ex
finally:
session.rollback()
session = None
return result
def get_finance_record(self, afinance_id):
"""
Gets the finance_record with the given finance_id.
"""
result = {}
session = self.Session()
try:
first_obj = session.query(T_FINANCE).filter(T_FINANCE.finance_id ==
afinance_id).first()
if first_obj is not None:
result = first_obj.__dict__
except Exception as ex:
print "Error in get_finance_record: ", ex
finally:
session.rollback()
session = None
return result
def get_rep_check_total(self, check_totals):
"""
Returns a string with the totals per account.
"""
result = ""
i = 0
for entry in check_totals:
if i == 0:
result = entry[0] + \
'|' + str(entry[1])
else:
result = result + ' ' + entry[0] + \
'|' + str(entry[1])
i = i + 1
return result
def get_rep_check_totals(self):
"""
Returns a list with the account name and totals.
"""
pass
#values = []
#session = self.Session()
#try:
# #obj = session.query(V_REP_CHECK_TOTAL)
# #for instance in obj:
# # values.append([instance.account_name,
# # instance.account_total])
#except Exception as ex:
# print "Error in get_rep_check_totals: ", ex
#finally:
# session.rollback()
# session = None
#return values
def new_drawdown_record(self):
"""
Creates a new record in T_DRAWDOWN with a default value of 0.
"""
result = -1
session = self.Session()
try:
date_created = current_date()
date_modified = current_date()
session.add(T_DRAWDOWN(None, 0, 0, date_created, date_modified))
session.commit()
for instance in session.query(
func.max(T_DRAWDOWN.drawdown_id).label(
'drawdown_id')):
result = instance.drawdown_id
except Exception as ex:
print Error.NEW_DRAWDOWN_RECORD, ex
finally:
session.rollback()
session = None
return result
def get_pool(self):
"""
Gets the pool available for trading.
"""
result = DEFAULT_DECIMAL
session = self.Session()
try:
first_obj = session.query(func.sum(T_FINANCE.amount).label('total')
).filter_by(account_from_id=TRADING_ACCOUNT_ID).first()
if first_obj.total is not None:
result = Decimal(first_obj.total)
except Exception as ex:
print "Error in get_pool: ", ex
finally:
session.rollback()
session = None
return result
def get_specific_finance_record(self, date, account_id, account_from_id,
account_to_id, amount, comment, commodity_id, shares, price,
tax, commission):
"""
Looks for a finance record with the given parameters.
"""
try:
session = self.Session()
result = session.query(T_FINANCE).filter_by(
date=date,
account_id=account_id,
account_from_id=account_from_id,
account_to_id=account_to_id,
amount=amount,
comment=comment,
commodity_id=commodity_id,
shares=shares,
price=price,
tax=tax,
commission=commission,
active=1
).first()
except Exception as ex:
print Error.GET_SPECIFIC_FINANCE_RECORD, ex
session.rollback()
result = None
finally:
session = None
return result
def open_trade_position(self, market_id, commodity_id, table_class):
"""
Check if this trade or investment has already started
and return it's trade_id (or investment_id).
Retuns:
when not found -> -1
when found -> the id
"""
result = -1
try:
session = self.Session()
#NOTE: id_buy or id_sell must be -1
# but both can't be filled in (= finished trade)
first_obj = session.query(table_class).filter(
table_class.market_id == market_id,
table_class.commodity_id == commodity_id,
table_class.active == 1).filter(
or_(
table_class.id_buy == -1,
table_class.id_sell == -1
)).filter(
table_class.id_buy != table_class.id_sell
).first()
if first_obj is not None:
result = first_obj.trade_id
except Exception as ex:
print Error.INVADE_ALREADY_STARTED, ex
finally:
session.rollback()
session = None
return result
def get_trade_record(self, atrade_id):
"""
Gets the trade_record with the given trade_id.
"""
# TODO: this code can only deal with buying all and selling all for now!
result = {}
session = self.Session()
try:
first_obj = session.query(T_TRADE).filter(
T_TRADE.trade_id == atrade_id,
).first()
if first_obj is not None:
result = row_to_dict(first_obj)
except Exception as ex:
print "Error in get_trade_record: ", ex
finally:
session.rollback()
session = None
return result
def get_tick_size_from_commodity_id(self, commodity_id):
"""
Get the tick size for a given commodity.
"""
result = DEFAULT_DECIMAL
try:
session = self.Session()
first_obj = session.query(T_TRADE.spread).filter_by(
commodity_id=commodity_id).first()
if first_obj is not None:
result = Decimal(first_obj.tick_size)
except Exception as ex:
print "Error in get_tick_size_from_commodity_id: ", ex
finally:
session.rollback()
session = None
return result
def get_tick_value_from_commodity_id(self, commodity_id):
"""
Get the tick value for a given commodity.
"""
result = DEFAULT_DECIMAL
try:
session = self.Session()
first_obj = session.query(T_TRADE.spread).filter_by(
commodity_id=commodity_id).first()
if first_obj is not None:
result = Decimal(first_obj.tick_value)
except Exception as ex:
print "Error in get_tick_value_from_commodity_id: ", ex
finally:
session.rollback()
session = None
return result
def get_margin_pool(self):
"""
Get margin to use on the pool.
"""
return Decimal(self.get_parameter_value(1))
|
|
from __future__ import absolute_import
import socket
import types
from collections import defaultdict
from itertools import count
from kombu import Connection, Exchange, Queue, Consumer, Producer
from kombu.exceptions import InconsistencyError, VersionMismatch
from kombu.five import Empty, Queue as _Queue
from kombu.transport import virtual
from kombu.utils import eventio # patch poll
from kombu.utils.json import dumps
from kombu.tests.case import (
Case, ContextMock, Mock, call, module_exists, skip_if_not_module, patch, ANY,
)
class _poll(eventio._select):
def register(self, fd, flags):
if flags & eventio.READ:
self._rfd.add(fd)
def poll(self, timeout):
events = []
for fd in self._rfd:
if fd.data:
events.append((fd.fileno(), eventio.READ))
return events
eventio.poll = _poll
# must import after poller patch, pep8 complains
from kombu.transport import redis # noqa
class ResponseError(Exception):
pass
class Client(object):
queues = {}
sets = defaultdict(set)
hashes = defaultdict(dict)
shard_hint = None
def __init__(self, db=None, port=None, connection_pool=None, **kwargs):
self._called = []
self._connection = None
self.bgsave_raises_ResponseError = False
self.connection = self._sconnection(self)
def bgsave(self):
self._called.append('BGSAVE')
if self.bgsave_raises_ResponseError:
raise ResponseError()
def delete(self, key):
self.queues.pop(key, None)
def exists(self, key):
return key in self.queues or key in self.sets
def hset(self, key, k, v):
self.hashes[key][k] = v
def hget(self, key, k):
return self.hashes[key].get(k)
def hdel(self, key, k):
self.hashes[key].pop(k, None)
def sadd(self, key, member, *args):
self.sets[key].add(member)
def zadd(self, key, score1, member1, *args):
self.sets[key].add(member1)
def smembers(self, key):
return self.sets.get(key, set())
def srem(self, key, *args):
self.sets.pop(key, None)
zrem = srem
def llen(self, key):
try:
return self.queues[key].qsize()
except KeyError:
return 0
def lpush(self, key, value):
self.queues[key].put_nowait(value)
def parse_response(self, connection, type, **options):
cmd, queues = self.connection._sock.data.pop()
queues = list(queues)
assert cmd == type
self.connection._sock.data = []
if type == 'BRPOP':
timeout = queues.pop()
item = self.brpop(queues, timeout)
if item:
return item
raise Empty()
def brpop(self, keys, timeout=None):
for key in keys:
try:
item = self.queues[key].get_nowait()
except Empty:
pass
else:
return key, item
def rpop(self, key):
try:
return self.queues[key].get_nowait()
except (KeyError, Empty):
pass
def __contains__(self, k):
return k in self._called
def pipeline(self):
return Pipeline(self)
def encode(self, value):
return str(value)
def _new_queue(self, key):
self.queues[key] = _Queue()
class _sconnection(object):
disconnected = False
class _socket(object):
blocking = True
filenos = count(30)
def __init__(self, *args):
self._fileno = next(self.filenos)
self.data = []
def fileno(self):
return self._fileno
def setblocking(self, blocking):
self.blocking = blocking
def __init__(self, client):
self.client = client
self._sock = self._socket()
def disconnect(self):
self.disconnected = True
def send_command(self, cmd, *args):
self._sock.data.append((cmd, args))
def info(self):
return {'foo': 1}
def pubsub(self, *args, **kwargs):
connection = self.connection
class ConnectionPool(object):
def get_connection(self, *args, **kwargs):
return connection
self.connection_pool = ConnectionPool()
return self
class Pipeline(object):
def __init__(self, client):
self.client = client
self.stack = []
def __enter__(self):
return self
def __exit__(self, *exc_info):
pass
def __getattr__(self, key):
if key not in self.__dict__:
def _add(*args, **kwargs):
self.stack.append((getattr(self.client, key), args, kwargs))
return self
return _add
return self.__dict__[key]
def execute(self):
stack = list(self.stack)
self.stack[:] = []
return [fun(*args, **kwargs) for fun, args, kwargs in stack]
class Channel(redis.Channel):
def _get_client(self):
return Client
def _get_pool(self, async=False):
return Mock()
def _get_response_error(self):
return ResponseError
def _new_queue(self, queue, **kwargs):
for pri in self.priority_steps:
self.client._new_queue(self._q_for_pri(queue, pri))
def pipeline(self):
return Pipeline(Client())
class Transport(redis.Transport):
Channel = Channel
def _get_errors(self):
return ((KeyError,), (IndexError,))
class test_Channel(Case):
@skip_if_not_module('redis')
def setup(self):
self.connection = self.create_connection()
self.channel = self.connection.default_channel
def create_connection(self, **kwargs):
kwargs.setdefault('transport_options', {'fanout_patterns': True})
return Connection(transport=Transport, **kwargs)
def _get_one_delivery_tag(self, n='test_uniq_tag'):
with self.create_connection() as conn1:
chan = conn1.default_channel
chan.exchange_declare(n)
chan.queue_declare(n)
chan.queue_bind(n, n, n)
msg = chan.prepare_message('quick brown fox')
chan.basic_publish(msg, n, n)
payload = chan._get(n)
self.assertTrue(payload)
pymsg = chan.message_to_python(payload)
return pymsg.delivery_tag
def test_delivery_tag_is_uuid(self):
seen = set()
for i in range(100):
tag = self._get_one_delivery_tag()
self.assertNotIn(tag, seen)
seen.add(tag)
with self.assertRaises(ValueError):
int(tag)
self.assertEqual(len(tag), 36)
def test_disable_ack_emulation(self):
conn = Connection(transport=Transport, transport_options={
'ack_emulation': False,
})
chan = conn.channel()
self.assertFalse(chan.ack_emulation)
self.assertEqual(chan.QoS, virtual.QoS)
def test_redis_info_raises(self):
pool = Mock(name='pool')
pool_at_init = [pool]
client = Mock(name='client')
class XChannel(Channel):
def __init__(self, *args, **kwargs):
self._pool = pool_at_init[0]
super(XChannel, self).__init__(*args, **kwargs)
def _get_client(self):
return lambda *_, **__: client
class XTransport(Transport):
Channel = XChannel
conn = Connection(transport=XTransport)
client.info.side_effect = RuntimeError()
with self.assertRaises(RuntimeError):
conn.channel()
pool.disconnect.assert_called_with()
pool.disconnect.reset_mock()
pool_at_init = [None]
with self.assertRaises(RuntimeError):
conn.channel()
self.assertFalse(pool.disconnect.called)
def test_after_fork(self):
self.channel._pool = None
self.channel._after_fork()
pool = self.channel._pool = Mock(name='pool')
self.channel._after_fork()
pool.disconnect.assert_called_with()
def test_next_delivery_tag(self):
self.assertNotEqual(
self.channel._next_delivery_tag(),
self.channel._next_delivery_tag(),
)
def test_do_restore_message(self):
client = Mock(name='client')
pl1 = {'body': 'BODY'}
spl1 = dumps(pl1)
lookup = self.channel._lookup = Mock(name='_lookup')
lookup.return_value = ['george', 'elaine']
self.channel._do_restore_message(
pl1, 'ex', 'rkey', client,
)
client.rpush.assert_has_calls([
call('george', spl1), call('elaine', spl1),
])
pl2 = {'body': 'BODY2', 'headers': {'x-funny': 1}}
headers_after = dict(pl2['headers'], redelivered=True)
spl2 = dumps(dict(pl2, headers=headers_after))
self.channel._do_restore_message(
pl2, 'ex', 'rkey', client,
)
client.rpush.assert_has_calls([
call('george', spl2), call('elaine', spl2),
])
client.rpush.side_effect = KeyError()
with patch('kombu.transport.redis.crit') as crit:
self.channel._do_restore_message(
pl2, 'ex', 'rkey', client,
)
self.assertTrue(crit.called)
def test_restore(self):
message = Mock(name='message')
with patch('kombu.transport.redis.loads') as loads:
loads.return_value = 'M', 'EX', 'RK'
client = self.channel._create_client = Mock(name='client')
client = client()
client.pipeline = ContextMock()
restore = self.channel._do_restore_message = Mock(
name='_do_restore_message',
)
pipe = client.pipeline.return_value
pipe_hget = Mock(name='pipe.hget')
pipe.hget.return_value = pipe_hget
pipe_hget_hdel = Mock(name='pipe.hget.hdel')
pipe_hget.hdel.return_value = pipe_hget_hdel
result = Mock(name='result')
pipe_hget_hdel.execute.return_value = None, None
self.channel._restore(message)
client.pipeline.assert_called_with()
unacked_key = self.channel.unacked_key
self.assertFalse(loads.called)
tag = message.delivery_tag
pipe.hget.assert_called_with(unacked_key, tag)
pipe_hget.hdel.assert_called_with(unacked_key, tag)
pipe_hget_hdel.execute.assert_called_with()
pipe_hget_hdel.execute.return_value = result, None
self.channel._restore(message)
loads.assert_called_with(result)
restore.assert_called_with('M', 'EX', 'RK', client, False)
def test_qos_restore_visible(self):
client = self.channel._create_client = Mock(name='client')
client = client()
def pipe(*args, **kwargs):
return Pipeline(client)
client.pipeline = pipe
client.zrevrangebyscore.return_value = [
(1, 10),
(2, 20),
(3, 30),
]
qos = redis.QoS(self.channel)
restore = qos.restore_by_tag = Mock(name='restore_by_tag')
qos._vrestore_count = 1
qos.restore_visible()
self.assertFalse(client.zrevrangebyscore.called)
self.assertEqual(qos._vrestore_count, 2)
qos._vrestore_count = 0
qos.restore_visible()
restore.assert_has_calls([
call(1, client), call(2, client), call(3, client),
])
self.assertEqual(qos._vrestore_count, 1)
qos._vrestore_count = 0
restore.reset_mock()
client.zrevrangebyscore.return_value = []
qos.restore_visible()
self.assertFalse(restore.called)
self.assertEqual(qos._vrestore_count, 1)
qos._vrestore_count = 0
client.setnx.side_effect = redis.MutexHeld()
qos.restore_visible()
def test_basic_consume_when_fanout_queue(self):
self.channel.exchange_declare(exchange='txconfan', type='fanout')
self.channel.queue_declare(queue='txconfanq')
self.channel.queue_bind(queue='txconfanq', exchange='txconfan')
self.assertIn('txconfanq', self.channel._fanout_queues)
self.channel.basic_consume('txconfanq', False, None, 1)
self.assertIn('txconfanq', self.channel.active_fanout_queues)
self.assertEqual(self.channel._fanout_to_queue.get('txconfan'),
'txconfanq')
def test_basic_cancel_unknown_delivery_tag(self):
self.assertIsNone(self.channel.basic_cancel('txaseqwewq'))
def test_subscribe_no_queues(self):
self.channel.subclient = Mock()
self.channel.active_fanout_queues.clear()
self.channel._subscribe()
self.assertFalse(self.channel.subclient.subscribe.called)
def test_subscribe(self):
self.channel.subclient = Mock()
self.channel.active_fanout_queues.add('a')
self.channel.active_fanout_queues.add('b')
self.channel._fanout_queues.update(a=('a', ''), b=('b', ''))
self.channel._subscribe()
self.assertTrue(self.channel.subclient.psubscribe.called)
s_args, _ = self.channel.subclient.psubscribe.call_args
self.assertItemsEqual(s_args[0], ['/{db}.a', '/{db}.b'])
self.channel.subclient.connection._sock = None
self.channel._subscribe()
self.channel.subclient.connection.connect.assert_called_with()
def test_handle_unsubscribe_message(self):
s = self.channel.subclient
s.subscribed = True
self.channel._handle_message(s, ['unsubscribe', 'a', 0])
self.assertFalse(s.subscribed)
def test_handle_pmessage_message(self):
self.assertDictEqual(
self.channel._handle_message(
self.channel.subclient,
['pmessage', 'pattern', 'channel', 'data'],
),
{
'type': 'pmessage',
'pattern': 'pattern',
'channel': 'channel',
'data': 'data',
},
)
def test_handle_message(self):
self.assertDictEqual(
self.channel._handle_message(
self.channel.subclient,
['type', 'channel', 'data'],
),
{
'type': 'type',
'pattern': None,
'channel': 'channel',
'data': 'data',
},
)
def test_brpop_start_but_no_queues(self):
self.assertIsNone(self.channel._brpop_start())
def test_receive(self):
s = self.channel.subclient = Mock()
self.channel._fanout_to_queue['a'] = 'b'
s.parse_response.return_value = ['message', 'a',
dumps({'hello': 'world'})]
payload, queue = self.channel._receive()
self.assertDictEqual(payload, {'hello': 'world'})
self.assertEqual(queue, 'b')
def test_receive_raises_for_connection_error(self):
self.channel._in_listen = True
s = self.channel.subclient = Mock()
s.parse_response.side_effect = KeyError('foo')
with self.assertRaises(KeyError):
self.channel._receive()
self.assertFalse(self.channel._in_listen)
def test_receive_empty(self):
s = self.channel.subclient = Mock()
s.parse_response.return_value = None
with self.assertRaises(redis.Empty):
self.channel._receive()
def test_receive_different_message_Type(self):
s = self.channel.subclient = Mock()
s.parse_response.return_value = ['message', '/foo/', 0, 'data']
with self.assertRaises(redis.Empty):
self.channel._receive()
def test_brpop_read_raises(self):
c = self.channel.client = Mock()
c.parse_response.side_effect = KeyError('foo')
with self.assertRaises(KeyError):
self.channel._brpop_read()
c.connection.disconnect.assert_called_with()
def test_brpop_read_gives_None(self):
c = self.channel.client = Mock()
c.parse_response.return_value = None
with self.assertRaises(redis.Empty):
self.channel._brpop_read()
def test_poll_error(self):
c = self.channel.client = Mock()
c.parse_response = Mock()
self.channel._poll_error('BRPOP')
c.parse_response.assert_called_with(c.connection, 'BRPOP')
c.parse_response.side_effect = KeyError('foo')
with self.assertRaises(KeyError):
self.channel._poll_error('BRPOP')
def test_poll_error_on_type_LISTEN(self):
c = self.channel.subclient = Mock()
c.parse_response = Mock()
self.channel._poll_error('LISTEN')
c.parse_response.assert_called_with()
c.parse_response.side_effect = KeyError('foo')
with self.assertRaises(KeyError):
self.channel._poll_error('LISTEN')
def test_put_fanout(self):
self.channel._in_poll = False
c = self.channel._create_client = Mock()
body = {'hello': 'world'}
self.channel._put_fanout('exchange', body, '')
c().publish.assert_called_with('/{db}.exchange', dumps(body))
def test_put_priority(self):
client = self.channel._create_client = Mock(name='client')
msg1 = {'properties': {'priority': 3}}
self.channel._put('george', msg1)
client().lpush.assert_called_with(
self.channel._q_for_pri('george', 6), dumps(msg1),
)
msg2 = {'properties': {'priority': 313}}
self.channel._put('george', msg2)
client().lpush.assert_called_with(
self.channel._q_for_pri('george', 0), dumps(msg2),
)
msg3 = {'properties': {}}
self.channel._put('george', msg3)
client().lpush.assert_called_with(
self.channel._q_for_pri('george', 9), dumps(msg3),
)
def test_delete(self):
x = self.channel
x._create_client = Mock()
x._create_client.return_value = x.client
delete = x.client.delete = Mock()
srem = x.client.srem = Mock()
x._delete('queue', 'exchange', 'routing_key', None)
delete.assert_has_calls([
call(x._q_for_pri('queue', pri)) for pri in redis.PRIORITY_STEPS
])
srem.assert_called_with(x.keyprefix_queue % ('exchange',),
x.sep.join(['routing_key', '', 'queue']))
def test_has_queue(self):
self.channel._create_client = Mock()
self.channel._create_client.return_value = self.channel.client
exists = self.channel.client.exists = Mock()
exists.return_value = True
self.assertTrue(self.channel._has_queue('foo'))
exists.assert_has_calls([
call(self.channel._q_for_pri('foo', pri))
for pri in redis.PRIORITY_STEPS
])
exists.return_value = False
self.assertFalse(self.channel._has_queue('foo'))
def test_close_when_closed(self):
self.channel.closed = True
self.channel.close()
def test_close_deletes_autodelete_fanout_queues(self):
self.channel._fanout_queues = {'foo': ('foo', ''), 'bar': ('bar', '')}
self.channel.auto_delete_queues = ['foo']
self.channel.queue_delete = Mock(name='queue_delete')
self.channel.close()
self.channel.queue_delete.assert_has_calls([call('foo')])
def test_close_client_close_raises(self):
c = self.channel.client = Mock()
c.connection.disconnect.side_effect = self.channel.ResponseError()
self.channel.close()
c.connection.disconnect.assert_called_with()
def test_invalid_database_raises_ValueError(self):
with self.assertRaises(ValueError):
self.channel.connection.client.virtual_host = 'dwqeq'
self.channel._connparams()
@skip_if_not_module('redis')
def test_connparams_allows_slash_in_db(self):
self.channel.connection.client.virtual_host = '/123'
self.assertEqual(self.channel._connparams()['db'], 123)
@skip_if_not_module('redis')
def test_connparams_db_can_be_int(self):
self.channel.connection.client.virtual_host = 124
self.assertEqual(self.channel._connparams()['db'], 124)
def test_new_queue_with_auto_delete(self):
redis.Channel._new_queue(self.channel, 'george', auto_delete=False)
self.assertNotIn('george', self.channel.auto_delete_queues)
redis.Channel._new_queue(self.channel, 'elaine', auto_delete=True)
self.assertIn('elaine', self.channel.auto_delete_queues)
@skip_if_not_module('redis')
def test_connparams_regular_hostname(self):
self.channel.connection.client.hostname = 'george.vandelay.com'
self.assertEqual(
self.channel._connparams()['host'],
'george.vandelay.com',
)
def test_rotate_cycle_ValueError(self):
cycle = self.channel._queue_cycle
cycle.update(['kramer', 'jerry'])
cycle.rotate('kramer')
self.assertEqual(cycle.items, ['jerry', 'kramer'])
cycle.rotate('elaine')
@skip_if_not_module('redis')
def test_get_client(self):
import redis as R
KombuRedis = redis.Channel._get_client(self.channel)
self.assertTrue(KombuRedis)
Rv = getattr(R, 'VERSION', None)
try:
R.VERSION = (2, 4, 0)
with self.assertRaises(VersionMismatch):
redis.Channel._get_client(self.channel)
finally:
if Rv is not None:
R.VERSION = Rv
@skip_if_not_module('redis')
def test_get_response_error(self):
from redis.exceptions import ResponseError
self.assertIs(redis.Channel._get_response_error(self.channel),
ResponseError)
def test_avail_client(self):
self.channel._pool = Mock()
cc = self.channel._create_client = Mock()
client = cc.return_value = Mock()
with self.channel.conn_or_acquire():
pass
self.channel.pool.release.assert_called_with(client.connection)
cc.assert_called_with()
def test_register_with_event_loop(self):
transport = self.connection.transport
transport.cycle = Mock(name='cycle')
transport.cycle.fds = {12: 'LISTEN', 13: 'BRPOP'}
conn = Mock(name='conn')
loop = Mock(name='loop')
redis.Transport.register_with_event_loop(transport, conn, loop)
transport.cycle.on_poll_init.assert_called_with(loop.poller)
loop.call_repeatedly.assert_called_with(
10, transport.cycle.maybe_restore_messages,
)
self.assertTrue(loop.on_tick.add.called)
on_poll_start = loop.on_tick.add.call_args[0][0]
on_poll_start()
transport.cycle.on_poll_start.assert_called_with()
loop.add_reader.assert_has_calls([
call(12, transport.on_readable, 12),
call(13, transport.on_readable, 13),
])
def test_transport_on_readable(self):
transport = self.connection.transport
cycle = transport.cycle = Mock(name='cyle')
cycle.on_readable.return_value = None
redis.Transport.on_readable(transport, 13)
cycle.on_readable.assert_called_with(13)
cycle.on_readable.reset_mock()
queue = Mock(name='queue')
ret = (Mock(name='message'), queue)
cycle.on_readable.return_value = ret
transport._reject_inbound_message = Mock(name='_reject_inbound')
redis.Transport.on_readable(transport, 14)
transport._reject_inbound_message.assert_called_with(ret[0])
cb = transport._callbacks[queue] = Mock(name='callback')
redis.Transport.on_readable(transport, 14)
cb.assert_called_with(ret[0])
@skip_if_not_module('redis')
def test_transport_get_errors(self):
self.assertTrue(redis.Transport._get_errors(self.connection.transport))
@skip_if_not_module('redis')
def test_transport_driver_version(self):
self.assertTrue(
redis.Transport.driver_version(self.connection.transport),
)
@skip_if_not_module('redis')
def test_transport_get_errors_when_InvalidData_used(self):
from redis import exceptions
class ID(Exception):
pass
DataError = getattr(exceptions, 'DataError', None)
InvalidData = getattr(exceptions, 'InvalidData', None)
exceptions.InvalidData = ID
exceptions.DataError = None
try:
errors = redis.Transport._get_errors(self.connection.transport)
self.assertTrue(errors)
self.assertIn(ID, errors[1])
finally:
if DataError is not None:
exceptions.DataError = DataError
if InvalidData is not None:
exceptions.InvalidData = InvalidData
def test_empty_queues_key(self):
channel = self.channel
channel._in_poll = False
key = channel.keyprefix_queue % 'celery'
# Everything is fine, there is a list of queues.
channel.client.sadd(key, 'celery\x06\x16\x06\x16celery')
self.assertListEqual(channel.get_table('celery'),
[('celery', '', 'celery')])
# ... then for some reason, the _kombu.binding.celery key gets lost
channel.client.srem(key)
# which raises a channel error so that the consumer/publisher
# can recover by redeclaring the required entities.
with self.assertRaises(InconsistencyError):
self.channel.get_table('celery')
@skip_if_not_module('redis')
def test_socket_connection(self):
with patch('kombu.transport.redis.Channel._create_client'):
with Connection('redis+socket:///tmp/redis.sock') as conn:
connparams = conn.default_channel._connparams()
self.assertTrue(issubclass(
connparams['connection_class'],
redis.redis.UnixDomainSocketConnection,
))
self.assertEqual(connparams['path'], '/tmp/redis.sock')
@skip_if_not_module('redis')
def test_ssl_argument__dict(self):
with patch('kombu.transport.redis.Channel._create_client'):
with Connection('redis://', ssl={'ca_cert': '/foo'}) as conn:
connparams = conn.default_channel._connparams()
self.assertTrue(connparams['ssl'])
self.assertEqual(connparams['ca_cert'], '/foo')
@skip_if_not_module('redis')
def test_ssl_argument__bool(self):
with patch('kombu.transport.redis.Channel._create_client'):
with Connection('redis://', ssl=True) as conn:
connparams = conn.default_channel._connparams()
self.assertTrue(connparams['ssl'])
class test_Redis(Case):
@skip_if_not_module('redis')
def setup(self):
self.connection = Connection(transport=Transport)
self.exchange = Exchange('test_Redis', type='direct')
self.queue = Queue('test_Redis', self.exchange, 'test_Redis')
def teardown(self):
self.connection.close()
def test_publish__get(self):
channel = self.connection.channel()
producer = Producer(channel, self.exchange, routing_key='test_Redis')
self.queue(channel).declare()
producer.publish({'hello': 'world'})
self.assertDictEqual(self.queue(channel).get().payload,
{'hello': 'world'})
self.assertIsNone(self.queue(channel).get())
self.assertIsNone(self.queue(channel).get())
self.assertIsNone(self.queue(channel).get())
def test_publish__consume(self):
connection = Connection(transport=Transport)
channel = connection.channel()
producer = Producer(channel, self.exchange, routing_key='test_Redis')
consumer = Consumer(channel, queues=[self.queue])
producer.publish({'hello2': 'world2'})
_received = []
def callback(message_data, message):
_received.append(message_data)
message.ack()
consumer.register_callback(callback)
consumer.consume()
self.assertIn(channel, channel.connection.cycle._channels)
try:
connection.drain_events(timeout=1)
self.assertTrue(_received)
with self.assertRaises(socket.timeout):
connection.drain_events(timeout=0.01)
finally:
channel.close()
def test_purge(self):
channel = self.connection.channel()
producer = Producer(channel, self.exchange, routing_key='test_Redis')
self.queue(channel).declare()
for i in range(10):
producer.publish({'hello': 'world-%s' % (i,)})
self.assertEqual(channel._size('test_Redis'), 10)
self.assertEqual(self.queue(channel).purge(), 10)
channel.close()
def test_db_values(self):
Connection(virtual_host=1,
transport=Transport).channel()
Connection(virtual_host='1',
transport=Transport).channel()
Connection(virtual_host='/1',
transport=Transport).channel()
with self.assertRaises(Exception):
Connection('redis:///foo').channel()
def test_db_port(self):
c1 = Connection(port=None, transport=Transport).channel()
c1.close()
c2 = Connection(port=9999, transport=Transport).channel()
c2.close()
def test_close_poller_not_active(self):
c = Connection(transport=Transport).channel()
cycle = c.connection.cycle
c.client.connection
c.close()
self.assertNotIn(c, cycle._channels)
def test_close_ResponseError(self):
c = Connection(transport=Transport).channel()
c.client.bgsave_raises_ResponseError = True
c.close()
def test_close_disconnects(self):
c = Connection(transport=Transport).channel()
conn1 = c.client.connection
conn2 = c.subclient.connection
c.close()
self.assertTrue(conn1.disconnected)
self.assertTrue(conn2.disconnected)
def test_get__Empty(self):
channel = self.connection.channel()
with self.assertRaises(Empty):
channel._get('does-not-exist')
channel.close()
def test_get_client(self):
myredis, exceptions = _redis_modules()
@module_exists(myredis, exceptions)
def _do_test():
conn = Connection(transport=Transport)
chan = conn.channel()
self.assertTrue(chan.Client)
self.assertTrue(chan.ResponseError)
self.assertTrue(conn.transport.connection_errors)
self.assertTrue(conn.transport.channel_errors)
_do_test()
def test_check_at_least_we_try_to_connect_and_fail(self):
import redis
connection = Connection('redis://localhost:65534/')
with self.assertRaises(redis.exceptions.ConnectionError):
chan = connection.channel()
chan._size('some_queue')
def _redis_modules():
class ConnectionError(Exception):
pass
class AuthenticationError(Exception):
pass
class InvalidData(Exception):
pass
class InvalidResponse(Exception):
pass
class ResponseError(Exception):
pass
exceptions = types.ModuleType('redis.exceptions')
exceptions.ConnectionError = ConnectionError
exceptions.AuthenticationError = AuthenticationError
exceptions.InvalidData = InvalidData
exceptions.InvalidResponse = InvalidResponse
exceptions.ResponseError = ResponseError
class Redis(object):
pass
myredis = types.ModuleType('redis')
myredis.exceptions = exceptions
myredis.Redis = Redis
return myredis, exceptions
class test_MultiChannelPoller(Case):
@skip_if_not_module('redis')
def setup(self):
self.Poller = redis.MultiChannelPoller
def test_on_poll_start(self):
p = self.Poller()
p._channels = []
p.on_poll_start()
p._register_BRPOP = Mock(name='_register_BRPOP')
p._register_LISTEN = Mock(name='_register_LISTEN')
chan1 = Mock(name='chan1')
p._channels = [chan1]
chan1.active_queues = []
chan1.active_fanout_queues = []
p.on_poll_start()
chan1.active_queues = ['q1']
chan1.active_fanout_queues = ['q2']
chan1.qos.can_consume.return_value = False
p.on_poll_start()
p._register_LISTEN.assert_called_with(chan1)
self.assertFalse(p._register_BRPOP.called)
chan1.qos.can_consume.return_value = True
p._register_LISTEN.reset_mock()
p.on_poll_start()
p._register_BRPOP.assert_called_with(chan1)
p._register_LISTEN.assert_called_with(chan1)
def test_on_poll_init(self):
p = self.Poller()
chan1 = Mock(name='chan1')
p._channels = []
poller = Mock(name='poller')
p.on_poll_init(poller)
self.assertIs(p.poller, poller)
p._channels = [chan1]
p.on_poll_init(poller)
chan1.qos.restore_visible.assert_called_with(
num=chan1.unacked_restore_limit,
)
def test_handle_event(self):
p = self.Poller()
chan = Mock(name='chan')
p._fd_to_chan[13] = chan, 'BRPOP'
chan.handlers = {'BRPOP': Mock(name='BRPOP')}
chan.qos.can_consume.return_value = False
p.handle_event(13, redis.READ)
self.assertFalse(chan.handlers['BRPOP'].called)
chan.qos.can_consume.return_value = True
p.handle_event(13, redis.READ)
chan.handlers['BRPOP'].assert_called_with()
p.handle_event(13, redis.ERR)
chan._poll_error.assert_called_with('BRPOP')
p.handle_event(13, ~(redis.READ | redis.ERR))
def test_fds(self):
p = self.Poller()
p._fd_to_chan = {1: 2}
self.assertDictEqual(p.fds, p._fd_to_chan)
def test_close_unregisters_fds(self):
p = self.Poller()
poller = p.poller = Mock()
p._chan_to_sock.update({1: 1, 2: 2, 3: 3})
p.close()
self.assertEqual(poller.unregister.call_count, 3)
u_args = poller.unregister.call_args_list
self.assertItemsEqual(u_args, [((1,), {}),
((2,), {}),
((3,), {})])
def test_close_when_unregister_raises_KeyError(self):
p = self.Poller()
p.poller = Mock()
p._chan_to_sock.update({1: 1})
p.poller.unregister.side_effect = KeyError(1)
p.close()
def test_close_resets_state(self):
p = self.Poller()
p.poller = Mock()
p._channels = Mock()
p._fd_to_chan = Mock()
p._chan_to_sock = Mock()
p._chan_to_sock.itervalues.return_value = []
p._chan_to_sock.values.return_value = [] # py3k
p.close()
p._channels.clear.assert_called_with()
p._fd_to_chan.clear.assert_called_with()
p._chan_to_sock.clear.assert_called_with()
def test_register_when_registered_reregisters(self):
p = self.Poller()
p.poller = Mock()
channel, client, type = Mock(), Mock(), Mock()
sock = client.connection._sock = Mock()
sock.fileno.return_value = 10
p._chan_to_sock = {(channel, client, type): 6}
p._register(channel, client, type)
p.poller.unregister.assert_called_with(6)
self.assertTupleEqual(p._fd_to_chan[10], (channel, type))
self.assertEqual(p._chan_to_sock[(channel, client, type)], sock)
p.poller.register.assert_called_with(sock, p.eventflags)
# when client not connected yet
client.connection._sock = None
def after_connected():
client.connection._sock = Mock()
client.connection.connect.side_effect = after_connected
p._register(channel, client, type)
client.connection.connect.assert_called_with()
def test_register_BRPOP(self):
p = self.Poller()
channel = Mock()
channel.client.connection._sock = None
p._register = Mock()
channel._in_poll = False
p._register_BRPOP(channel)
self.assertEqual(channel._brpop_start.call_count, 1)
self.assertEqual(p._register.call_count, 1)
channel.client.connection._sock = Mock()
p._chan_to_sock[(channel, channel.client, 'BRPOP')] = True
channel._in_poll = True
p._register_BRPOP(channel)
self.assertEqual(channel._brpop_start.call_count, 1)
self.assertEqual(p._register.call_count, 1)
def test_register_LISTEN(self):
p = self.Poller()
channel = Mock()
channel.subclient.connection._sock = None
channel._in_listen = False
p._register = Mock()
p._register_LISTEN(channel)
p._register.assert_called_with(channel, channel.subclient, 'LISTEN')
self.assertEqual(p._register.call_count, 1)
self.assertEqual(channel._subscribe.call_count, 1)
channel._in_listen = True
p._chan_to_sock[(channel, channel.subclient, 'LISTEN')] = 3
channel.subclient.connection._sock = Mock()
p._register_LISTEN(channel)
self.assertEqual(p._register.call_count, 1)
self.assertEqual(channel._subscribe.call_count, 1)
def create_get(self, events=None, queues=None, fanouts=None):
_pr = [] if events is None else events
_aq = [] if queues is None else queues
_af = [] if fanouts is None else fanouts
p = self.Poller()
p.poller = Mock()
p.poller.poll.return_value = _pr
p._register_BRPOP = Mock()
p._register_LISTEN = Mock()
channel = Mock()
p._channels = [channel]
channel.active_queues = _aq
channel.active_fanout_queues = _af
return p, channel
def test_get_no_actions(self):
p, channel = self.create_get()
with self.assertRaises(redis.Empty):
p.get()
def test_qos_reject(self):
p, channel = self.create_get()
qos = redis.QoS(channel)
qos.ack = Mock(name='Qos.ack')
qos.reject(1234)
qos.ack.assert_called_with(1234)
def test_get_brpop_qos_allow(self):
p, channel = self.create_get(queues=['a_queue'])
channel.qos.can_consume.return_value = True
with self.assertRaises(redis.Empty):
p.get()
p._register_BRPOP.assert_called_with(channel)
def test_get_brpop_qos_disallow(self):
p, channel = self.create_get(queues=['a_queue'])
channel.qos.can_consume.return_value = False
with self.assertRaises(redis.Empty):
p.get()
self.assertFalse(p._register_BRPOP.called)
def test_get_listen(self):
p, channel = self.create_get(fanouts=['f_queue'])
with self.assertRaises(redis.Empty):
p.get()
p._register_LISTEN.assert_called_with(channel)
def test_get_receives_ERR(self):
p, channel = self.create_get(events=[(1, eventio.ERR)])
p._fd_to_chan[1] = (channel, 'BRPOP')
with self.assertRaises(redis.Empty):
p.get()
channel._poll_error.assert_called_with('BRPOP')
def test_get_receives_multiple(self):
p, channel = self.create_get(events=[(1, eventio.ERR),
(1, eventio.ERR)])
p._fd_to_chan[1] = (channel, 'BRPOP')
with self.assertRaises(redis.Empty):
p.get()
channel._poll_error.assert_called_with('BRPOP')
class test_Mutex(Case):
@skip_if_not_module('redis')
def test_mutex(self, lock_id='xxx'):
client = Mock(name='client')
with patch('kombu.transport.redis.uuid') as uuid:
# Won
uuid.return_value = lock_id
client.setnx.return_value = True
client.pipeline = ContextMock()
pipe = client.pipeline.return_value
pipe.get.return_value = lock_id
held = False
with redis.Mutex(client, 'foo1', 100):
held = True
self.assertTrue(held)
client.setnx.assert_called_with('foo1', lock_id)
pipe.get.return_value = 'yyy'
held = False
with redis.Mutex(client, 'foo1', 100):
held = True
self.assertTrue(held)
# Did not win
client.expire.reset_mock()
pipe.get.return_value = lock_id
client.setnx.return_value = False
with self.assertRaises(redis.MutexHeld):
held = False
with redis.Mutex(client, 'foo1', '100'):
held = True
self.assertFalse(held)
client.ttl.return_value = 0
with self.assertRaises(redis.MutexHeld):
held = False
with redis.Mutex(client, 'foo1', '100'):
held = True
self.assertFalse(held)
self.assertTrue(client.expire.called)
# Wins but raises WatchError (and that is ignored)
client.setnx.return_value = True
pipe.watch.side_effect = redis.redis.WatchError()
held = False
with redis.Mutex(client, 'foo1', 100):
held = True
self.assertTrue(held)
class test_RedisSentinel(Case):
@skip_if_not_module('redis.sentinel')
def setUp(self):
super(test_RedisSentinel, self).setUp()
def test_method_called(self):
from kombu.transport.redis import SentinelChannel
with patch.object(SentinelChannel, '_sentinel_managed_pool') as patched:
connection = Connection('sentinel://localhost:65534/', transport_options={
'master_name': 'not_important'
})
connection.channel()
self.assertTrue(patched.called)
def test_getting_master_from_sentinel(self):
from redis.sentinel import Sentinel
with patch.object(Sentinel, '__new__') as patched:
connection = Connection('sentinel://localhost:65534/', transport_options={
'master_name': 'not_important'
})
connection.channel()
self.assertTrue(patched)
sentinel_obj = patched.return_value
self.assertTrue(sentinel_obj.master_for.called, 'master_for was not called')
sentinel_obj.master_for.assert_called_with('not_important', ANY)
self.assertTrue(sentinel_obj.master_for().connection_pool.get_connection.called, 'get_connection on redis connection pool was not called')
def test_can_create_connection(self):
from redis.exceptions import ConnectionError
with self.assertRaises(ConnectionError):
connection = Connection('sentinel://localhost:65534/', transport_options={
'master_name': 'not_important'
})
connection.channel()
|
|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
from helpers import with_config, LuigiTestCase, parsing, in_parse, RunOnceTask
from datetime import timedelta
import enum
import mock
import luigi
import luigi.date_interval
import luigi.interface
import luigi.notifications
from luigi.mock import MockTarget
from luigi.parameter import ParameterException
from luigi import six
from worker_test import email_patch
luigi.notifications.DEBUG = True
class A(luigi.Task):
p = luigi.IntParameter()
class WithDefault(luigi.Task):
x = luigi.Parameter(default='xyz')
class WithDefaultTrue(luigi.Task):
x = luigi.BoolParameter(default=True)
class Foo(luigi.Task):
bar = luigi.Parameter()
p2 = luigi.IntParameter()
not_a_param = "lol"
class Baz(luigi.Task):
bool = luigi.BoolParameter()
def run(self):
Baz._val = self.bool
class ListFoo(luigi.Task):
my_list = luigi.ListParameter()
def run(self):
ListFoo._val = self.my_list
class TupleFoo(luigi.Task):
my_tuple = luigi.TupleParameter()
def run(self):
TupleFoo._val = self.my_tuple
class ForgotParam(luigi.Task):
param = luigi.Parameter()
def run(self):
pass
class ForgotParamDep(luigi.Task):
def requires(self):
return ForgotParam()
def run(self):
pass
class BananaDep(luigi.Task):
x = luigi.Parameter()
y = luigi.Parameter(default='def')
def output(self):
return MockTarget('banana-dep-%s-%s' % (self.x, self.y))
def run(self):
self.output().open('w').close()
class Banana(luigi.Task):
x = luigi.Parameter()
y = luigi.Parameter()
style = luigi.Parameter(default=None)
def requires(self):
if self.style is None:
return BananaDep() # will fail
elif self.style == 'x-arg':
return BananaDep(self.x)
elif self.style == 'y-kwarg':
return BananaDep(y=self.y)
elif self.style == 'x-arg-y-arg':
return BananaDep(self.x, self.y)
else:
raise Exception('unknown style')
def output(self):
return MockTarget('banana-%s-%s' % (self.x, self.y))
def run(self):
self.output().open('w').close()
class MyConfig(luigi.Config):
mc_p = luigi.IntParameter()
mc_q = luigi.IntParameter(default=73)
class MyConfigWithoutSection(luigi.Config):
use_cmdline_section = False
mc_r = luigi.IntParameter()
mc_s = luigi.IntParameter(default=99)
class NoopTask(luigi.Task):
pass
class MyEnum(enum.Enum):
A = 1
def _value(parameter):
"""
A hackish way to get the "value" of a parameter.
Previously Parameter exposed ``param_obj._value``. This is replacement for
that so I don't need to rewrite all test cases.
"""
class DummyLuigiTask(luigi.Task):
param = parameter
return DummyLuigiTask().param
class ParameterTest(LuigiTestCase):
def test_default_param(self):
self.assertEqual(WithDefault().x, 'xyz')
def test_missing_param(self):
def create_a():
return A()
self.assertRaises(luigi.parameter.MissingParameterException, create_a)
def test_unknown_param(self):
def create_a():
return A(p=5, q=4)
self.assertRaises(luigi.parameter.UnknownParameterException, create_a)
def test_unknown_param_2(self):
def create_a():
return A(1, 2, 3)
self.assertRaises(luigi.parameter.UnknownParameterException, create_a)
def test_duplicated_param(self):
def create_a():
return A(5, p=7)
self.assertRaises(luigi.parameter.DuplicateParameterException, create_a)
def test_parameter_registration(self):
self.assertEqual(len(Foo.get_params()), 2)
def test_task_creation(self):
f = Foo("barval", p2=5)
self.assertEqual(len(f.get_params()), 2)
self.assertEqual(f.bar, "barval")
self.assertEqual(f.p2, 5)
self.assertEqual(f.not_a_param, "lol")
def test_bool_false(self):
self.run_locally(['Baz'])
self.assertEqual(Baz._val, False)
def test_bool_true(self):
self.run_locally(['Baz', '--bool'])
self.assertEqual(Baz._val, True)
def test_bool_default_true(self):
self.assertTrue(WithDefaultTrue().x)
def test_bool_coerce(self):
self.assertEqual(True, WithDefaultTrue(x='yes').x)
def test_bool_no_coerce_none(self):
self.assertIsNone(WithDefaultTrue(x=None).x)
def test_forgot_param(self):
self.assertRaises(luigi.parameter.MissingParameterException, self.run_locally, ['ForgotParam'],)
@email_patch
def test_forgot_param_in_dep(self, emails):
# A programmatic missing parameter will cause an error email to be sent
self.run_locally(['ForgotParamDep'])
self.assertNotEqual(emails, [])
def test_default_param_cmdline(self):
self.assertEqual(WithDefault().x, 'xyz')
def test_default_param_cmdline_2(self):
self.assertEqual(WithDefault().x, 'xyz')
def test_insignificant_parameter(self):
class InsignificantParameterTask(luigi.Task):
foo = luigi.Parameter(significant=False, default='foo_default')
bar = luigi.Parameter()
t1 = InsignificantParameterTask(foo='x', bar='y')
self.assertEqual(str(t1), 'InsignificantParameterTask(bar=y)')
t2 = InsignificantParameterTask('u', 'z')
self.assertEqual(t2.foo, 'u')
self.assertEqual(t2.bar, 'z')
self.assertEqual(str(t2), 'InsignificantParameterTask(bar=z)')
def test_local_significant_param(self):
""" Obviously, if anything should be positional, so should local
significant parameters """
class MyTask(luigi.Task):
# This could typically be "--label-company=disney"
x = luigi.Parameter(significant=True)
MyTask('arg')
self.assertRaises(luigi.parameter.MissingParameterException,
lambda: MyTask())
def test_local_insignificant_param(self):
""" Ensure we have the same behavior as in before a78338c """
class MyTask(luigi.Task):
# This could typically be "--num-threads=True"
x = luigi.Parameter(significant=False)
MyTask('arg')
self.assertRaises(luigi.parameter.MissingParameterException,
lambda: MyTask())
def test_nonpositional_param(self):
""" Ensure we have the same behavior as in before a78338c """
class MyTask(luigi.Task):
# This could typically be "--num-threads=10"
x = luigi.Parameter(significant=False, positional=False)
MyTask(x='arg')
self.assertRaises(luigi.parameter.UnknownParameterException,
lambda: MyTask('arg'))
def test_enum_param_valid(self):
p = luigi.parameter.EnumParameter(enum=MyEnum)
self.assertEqual(MyEnum.A, p.parse('A'))
def test_enum_param_invalid(self):
p = luigi.parameter.EnumParameter(enum=MyEnum)
self.assertRaises(ValueError, lambda: p.parse('B'))
def test_enum_param_missing(self):
self.assertRaises(ParameterException, lambda: luigi.parameter.EnumParameter())
def test_list_serialize_parse(self):
a = luigi.ListParameter()
b_list = [1, 2, 3]
self.assertEqual(b_list, a.parse(a.serialize(b_list)))
def test_tuple_serialize_parse(self):
a = luigi.TupleParameter()
b_tuple = ((1, 2), (3, 4))
self.assertEqual(b_tuple, a.parse(a.serialize(b_tuple)))
def test_parse_list_without_batch_method(self):
param = luigi.Parameter()
for xs in [], ['x'], ['x', 'y']:
self.assertRaises(NotImplementedError, param._parse_list, xs)
def test_parse_empty_list_raises_value_error(self):
for batch_method in (max, min, tuple, ','.join):
param = luigi.Parameter(batch_method=batch_method)
self.assertRaises(ValueError, param._parse_list, [])
def test_parse_int_list_max(self):
param = luigi.IntParameter(batch_method=max)
self.assertEqual(17, param._parse_list(['7', '17', '5']))
def test_parse_string_list_max(self):
param = luigi.Parameter(batch_method=max)
self.assertEqual('7', param._parse_list(['7', '17', '5']))
def test_parse_list_as_tuple(self):
param = luigi.IntParameter(batch_method=tuple)
self.assertEqual((7, 17, 5), param._parse_list(['7', '17', '5']))
@mock.patch('luigi.parameter.warnings')
def test_warn_on_default_none(self, warnings):
class TestConfig(luigi.Config):
param = luigi.Parameter(default=None)
TestConfig()
warnings.warn.assert_called_once_with('Parameter "param" with value "None" is not of type string.')
@mock.patch('luigi.parameter.warnings')
def test_no_warn_on_string(self, warnings):
class TestConfig(luigi.Config):
param = luigi.Parameter(default=None)
TestConfig(param="str")
warnings.warn.assert_not_called()
@mock.patch('luigi.parameter.warnings')
def test_no_warn_on_none_in_optional(self, warnings):
class TestConfig(luigi.Config):
param = luigi.OptionalParameter(default=None)
TestConfig()
warnings.warn.assert_not_called()
@mock.patch('luigi.parameter.warnings')
def test_no_warn_on_string_in_optional(self, warnings):
class TestConfig(luigi.Config):
param = luigi.OptionalParameter(default=None)
TestConfig(param='value')
warnings.warn.assert_not_called()
@mock.patch('luigi.parameter.warnings')
def test_warn_on_bad_type_in_optional(self, warnings):
class TestConfig(luigi.Config):
param = luigi.OptionalParameter()
TestConfig(param=1)
warnings.warn.assert_called_once_with('OptionalParameter "param" with value "1" is not of type string or None.')
def test_optional_parameter_parse_none(self):
self.assertIsNone(luigi.OptionalParameter().parse(''))
def test_optional_parameter_parse_string(self):
self.assertEqual('test', luigi.OptionalParameter().parse('test'))
def test_optional_parameter_serialize_none(self):
self.assertEqual('', luigi.OptionalParameter().serialize(None))
def test_optional_parameter_serialize_string(self):
self.assertEqual('test', luigi.OptionalParameter().serialize('test'))
class TestParametersHashability(LuigiTestCase):
def test_date(self):
class Foo(luigi.Task):
args = luigi.parameter.DateParameter()
p = luigi.parameter.DateParameter()
self.assertEqual(hash(Foo(args=datetime.date(2000, 1, 1)).args), hash(p.parse('2000-1-1')))
def test_dateminute(self):
class Foo(luigi.Task):
args = luigi.parameter.DateMinuteParameter()
p = luigi.parameter.DateMinuteParameter()
self.assertEqual(hash(Foo(args=datetime.datetime(2000, 1, 1, 12, 0)).args), hash(p.parse('2000-1-1T1200')))
def test_dateinterval(self):
class Foo(luigi.Task):
args = luigi.parameter.DateIntervalParameter()
p = luigi.parameter.DateIntervalParameter()
di = luigi.date_interval.Custom(datetime.date(2000, 1, 1), datetime.date(2000, 2, 12))
self.assertEqual(hash(Foo(args=di).args), hash(p.parse('2000-01-01-2000-02-12')))
def test_timedelta(self):
class Foo(luigi.Task):
args = luigi.parameter.TimeDeltaParameter()
p = luigi.parameter.TimeDeltaParameter()
self.assertEqual(hash(Foo(args=datetime.timedelta(days=2, hours=3, minutes=2)).args), hash(p.parse('P2DT3H2M')))
def test_boolean(self):
class Foo(luigi.Task):
args = luigi.parameter.BoolParameter()
p = luigi.parameter.BoolParameter()
self.assertEqual(hash(Foo(args=True).args), hash(p.parse('true')))
def test_int(self):
class Foo(luigi.Task):
args = luigi.parameter.IntParameter()
p = luigi.parameter.IntParameter()
self.assertEqual(hash(Foo(args=1).args), hash(p.parse('1')))
def test_float(self):
class Foo(luigi.Task):
args = luigi.parameter.FloatParameter()
p = luigi.parameter.FloatParameter()
self.assertEqual(hash(Foo(args=1.0).args), hash(p.parse('1')))
def test_enum(self):
class Foo(luigi.Task):
args = luigi.parameter.EnumParameter(enum=MyEnum)
p = luigi.parameter.EnumParameter(enum=MyEnum)
self.assertEqual(hash(Foo(args=MyEnum.A).args), hash(p.parse('A')))
def test_dict(self):
class Foo(luigi.Task):
args = luigi.parameter.DictParameter()
p = luigi.parameter.DictParameter()
self.assertEqual(hash(Foo(args=dict(foo=1, bar="hello")).args), hash(p.parse('{"foo":1,"bar":"hello"}')))
def test_list(self):
class Foo(luigi.Task):
args = luigi.parameter.ListParameter()
p = luigi.parameter.ListParameter()
self.assertEqual(hash(Foo(args=[1, "hello"]).args), hash(p.normalize(p.parse('[1,"hello"]'))))
def test_list_dict(self):
class Foo(luigi.Task):
args = luigi.parameter.ListParameter()
p = luigi.parameter.ListParameter()
self.assertEqual(hash(Foo(args=[{'foo': 'bar'}, {'doge': 'wow'}]).args),
hash(p.normalize(p.parse('[{"foo": "bar"}, {"doge": "wow"}]'))))
def test_list_nested(self):
class Foo(luigi.Task):
args = luigi.parameter.ListParameter()
p = luigi.parameter.ListParameter()
self.assertEqual(hash(Foo(args=[['foo', 'bar'], ['doge', 'wow']]).args),
hash(p.normalize(p.parse('[["foo", "bar"], ["doge", "wow"]]'))))
def test_tuple(self):
class Foo(luigi.Task):
args = luigi.parameter.TupleParameter()
p = luigi.parameter.TupleParameter()
self.assertEqual(hash(Foo(args=(1, "hello")).args), hash(p.parse('(1,"hello")')))
def test_tuple_dict(self):
class Foo(luigi.Task):
args = luigi.parameter.TupleParameter()
p = luigi.parameter.TupleParameter()
self.assertEqual(hash(Foo(args=({'foo': 'bar'}, {'doge': 'wow'})).args),
hash(p.normalize(p.parse('({"foo": "bar"}, {"doge": "wow"})'))))
def test_tuple_nested(self):
class Foo(luigi.Task):
args = luigi.parameter.TupleParameter()
p = luigi.parameter.TupleParameter()
self.assertEqual(hash(Foo(args=(('foo', 'bar'), ('doge', 'wow'))).args),
hash(p.normalize(p.parse('(("foo", "bar"), ("doge", "wow"))'))))
def test_task(self):
class Bar(luigi.Task):
pass
class Foo(luigi.Task):
args = luigi.parameter.TaskParameter()
p = luigi.parameter.TaskParameter()
self.assertEqual(hash(Foo(args=Bar).args), hash(p.parse('Bar')))
class TestNewStyleGlobalParameters(LuigiTestCase):
def setUp(self):
super(TestNewStyleGlobalParameters, self).setUp()
MockTarget.fs.clear()
def expect_keys(self, expected):
self.assertEqual(set(MockTarget.fs.get_all_data().keys()), set(expected))
def test_x_arg(self):
self.run_locally(['Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-def'])
def test_x_arg_override(self):
self.run_locally(['Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg', '--BananaDep-y', 'xyz'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-xyz'])
def test_x_arg_override_stupid(self):
self.run_locally(['Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg', '--BananaDep-x', 'blabla'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-def'])
def test_x_arg_y_arg(self):
self.run_locally(['Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg-y-arg'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-bar'])
def test_x_arg_y_arg_override(self):
self.run_locally(['Banana', '--x', 'foo', '--y', 'bar', '--style', 'x-arg-y-arg', '--BananaDep-y', 'xyz'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-bar'])
def test_x_arg_y_arg_override_all(self):
self.run_locally(['Banana', '--x', 'foo',
'--y', 'bar', '--style', 'x-arg-y-arg', '--BananaDep-y',
'xyz', '--BananaDep-x', 'blabla'])
self.expect_keys(['banana-foo-bar', 'banana-dep-foo-bar'])
def test_y_arg_override(self):
self.run_locally(['Banana', '--x', 'foo', '--y', 'bar', '--style', 'y-kwarg', '--BananaDep-x', 'xyz'])
self.expect_keys(['banana-foo-bar', 'banana-dep-xyz-bar'])
def test_y_arg_override_both(self):
self.run_locally(['Banana', '--x', 'foo',
'--y', 'bar', '--style', 'y-kwarg', '--BananaDep-x', 'xyz',
'--BananaDep-y', 'blah'])
self.expect_keys(['banana-foo-bar', 'banana-dep-xyz-bar'])
def test_y_arg_override_banana(self):
self.run_locally(['Banana', '--y', 'bar', '--style', 'y-kwarg', '--BananaDep-x', 'xyz', '--Banana-x', 'baz'])
self.expect_keys(['banana-baz-bar', 'banana-dep-xyz-bar'])
class TestRemoveGlobalParameters(LuigiTestCase):
def run_and_check(self, args):
run_exit_status = self.run_locally(args)
self.assertTrue(run_exit_status)
return run_exit_status
@parsing(['--MyConfig-mc-p', '99', '--mc-r', '55', 'NoopTask'])
def test_use_config_class_1(self):
self.assertEqual(MyConfig().mc_p, 99)
self.assertEqual(MyConfig().mc_q, 73)
self.assertEqual(MyConfigWithoutSection().mc_r, 55)
self.assertEqual(MyConfigWithoutSection().mc_s, 99)
@parsing(['NoopTask', '--MyConfig-mc-p', '99', '--mc-r', '55'])
def test_use_config_class_2(self):
self.assertEqual(MyConfig().mc_p, 99)
self.assertEqual(MyConfig().mc_q, 73)
self.assertEqual(MyConfigWithoutSection().mc_r, 55)
self.assertEqual(MyConfigWithoutSection().mc_s, 99)
@parsing(['--MyConfig-mc-p', '99', '--mc-r', '55', 'NoopTask', '--mc-s', '123', '--MyConfig-mc-q', '42'])
def test_use_config_class_more_args(self):
self.assertEqual(MyConfig().mc_p, 99)
self.assertEqual(MyConfig().mc_q, 42)
self.assertEqual(MyConfigWithoutSection().mc_r, 55)
self.assertEqual(MyConfigWithoutSection().mc_s, 123)
@with_config({"MyConfig": {"mc_p": "666", "mc_q": "777"}})
@parsing(['--mc-r', '555', 'NoopTask'])
def test_use_config_class_with_configuration(self):
self.assertEqual(MyConfig().mc_p, 666)
self.assertEqual(MyConfig().mc_q, 777)
self.assertEqual(MyConfigWithoutSection().mc_r, 555)
self.assertEqual(MyConfigWithoutSection().mc_s, 99)
@with_config({"MyConfigWithoutSection": {"mc_r": "999", "mc_s": "888"}})
@parsing(['NoopTask', '--MyConfig-mc-p', '222', '--mc-r', '555'])
def test_use_config_class_with_configuration_2(self):
self.assertEqual(MyConfig().mc_p, 222)
self.assertEqual(MyConfig().mc_q, 73)
self.assertEqual(MyConfigWithoutSection().mc_r, 555)
self.assertEqual(MyConfigWithoutSection().mc_s, 888)
def test_misc_1(self):
class Dogs(luigi.Config):
n_dogs = luigi.IntParameter()
class CatsWithoutSection(luigi.Config):
use_cmdline_section = False
n_cats = luigi.IntParameter()
with luigi.cmdline_parser.CmdlineParser.global_instance(['--n-cats', '123', '--Dogs-n-dogs', '456', 'WithDefault'], allow_override=True):
self.assertEqual(Dogs().n_dogs, 456)
self.assertEqual(CatsWithoutSection().n_cats, 123)
with luigi.cmdline_parser.CmdlineParser.global_instance(['WithDefault', '--n-cats', '321', '--Dogs-n-dogs', '654'], allow_override=True):
self.assertEqual(Dogs().n_dogs, 654)
self.assertEqual(CatsWithoutSection().n_cats, 321)
if six.PY3:
def test_global_significant_param_warning(self):
""" We don't want any kind of global param to be positional """
with self.assertWarnsRegex(DeprecationWarning, 'is_global support is removed. Assuming positional=False'):
class MyTask(luigi.Task):
# This could typically be called "--test-dry-run"
x_g1 = luigi.Parameter(default='y', is_global=True, significant=True)
self.assertRaises(luigi.parameter.UnknownParameterException,
lambda: MyTask('arg'))
def test_global_insignificant_param_warning(self):
""" We don't want any kind of global param to be positional """
with self.assertWarnsRegex(DeprecationWarning, 'is_global support is removed. Assuming positional=False'):
class MyTask(luigi.Task):
# This could typically be "--yarn-pool=development"
x_g2 = luigi.Parameter(default='y', is_global=True, significant=False)
self.assertRaises(luigi.parameter.UnknownParameterException,
lambda: MyTask('arg'))
class TestParamWithDefaultFromConfig(LuigiTestCase):
def testNoSection(self):
self.assertRaises(ParameterException, lambda: _value(luigi.Parameter(config_path=dict(section="foo", name="bar"))))
@with_config({"foo": {}})
def testNoValue(self):
self.assertRaises(ParameterException, lambda: _value(luigi.Parameter(config_path=dict(section="foo", name="bar"))))
@with_config({"foo": {"bar": "baz"}})
def testDefault(self):
class LocalA(luigi.Task):
p = luigi.Parameter(config_path=dict(section="foo", name="bar"))
self.assertEqual("baz", LocalA().p)
self.assertEqual("boo", LocalA(p="boo").p)
@with_config({"foo": {"bar": "2001-02-03T04"}})
def testDateHour(self):
p = luigi.DateHourParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(datetime.datetime(2001, 2, 3, 4, 0, 0), _value(p))
@with_config({"foo": {"bar": "2001-02-03T05"}})
def testDateHourWithInterval(self):
p = luigi.DateHourParameter(config_path=dict(section="foo", name="bar"), interval=2)
self.assertEqual(datetime.datetime(2001, 2, 3, 4, 0, 0), _value(p))
@with_config({"foo": {"bar": "2001-02-03T0430"}})
def testDateMinute(self):
p = luigi.DateMinuteParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(datetime.datetime(2001, 2, 3, 4, 30, 0), _value(p))
@with_config({"foo": {"bar": "2001-02-03T0431"}})
def testDateWithMinuteInterval(self):
p = luigi.DateMinuteParameter(config_path=dict(section="foo", name="bar"), interval=2)
self.assertEqual(datetime.datetime(2001, 2, 3, 4, 30, 0), _value(p))
@with_config({"foo": {"bar": "2001-02-03T04H30"}})
def testDateMinuteDeprecated(self):
p = luigi.DateMinuteParameter(config_path=dict(section="foo", name="bar"))
if six.PY3:
with self.assertWarnsRegex(DeprecationWarning, 'Using "H" between hours and minutes is deprecated, omit it instead.'):
self.assertEqual(datetime.datetime(2001, 2, 3, 4, 30, 0), _value(p))
else:
self.assertEqual(datetime.datetime(2001, 2, 3, 4, 30, 0), _value(p))
@with_config({"foo": {"bar": "2001-02-03T040506"}})
def testDateSecond(self):
p = luigi.DateSecondParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(datetime.datetime(2001, 2, 3, 4, 5, 6), _value(p))
@with_config({"foo": {"bar": "2001-02-03T040507"}})
def testDateSecondWithInterval(self):
p = luigi.DateSecondParameter(config_path=dict(section="foo", name="bar"), interval=2)
self.assertEqual(datetime.datetime(2001, 2, 3, 4, 5, 6), _value(p))
@with_config({"foo": {"bar": "2001-02-03"}})
def testDate(self):
p = luigi.DateParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(datetime.date(2001, 2, 3), _value(p))
@with_config({"foo": {"bar": "2001-02-03"}})
def testDateWithInterval(self):
p = luigi.DateParameter(config_path=dict(section="foo", name="bar"),
interval=3, start=datetime.date(2001, 2, 1))
self.assertEqual(datetime.date(2001, 2, 1), _value(p))
@with_config({"foo": {"bar": "2015-07"}})
def testMonthParameter(self):
p = luigi.MonthParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(datetime.date(2015, 7, 1), _value(p))
@with_config({"foo": {"bar": "2015-07"}})
def testMonthWithIntervalParameter(self):
p = luigi.MonthParameter(config_path=dict(section="foo", name="bar"),
interval=13, start=datetime.date(2014, 1, 1))
self.assertEqual(datetime.date(2015, 2, 1), _value(p))
@with_config({"foo": {"bar": "2015"}})
def testYearParameter(self):
p = luigi.YearParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(datetime.date(2015, 1, 1), _value(p))
@with_config({"foo": {"bar": "2015"}})
def testYearWithIntervalParameter(self):
p = luigi.YearParameter(config_path=dict(section="foo", name="bar"),
start=datetime.date(2011, 1, 1), interval=5)
self.assertEqual(datetime.date(2011, 1, 1), _value(p))
@with_config({"foo": {"bar": "123"}})
def testInt(self):
p = luigi.IntParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(123, _value(p))
@with_config({"foo": {"bar": "true"}})
def testBool(self):
p = luigi.BoolParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(True, _value(p))
@with_config({"foo": {"bar": "false"}})
def testBoolConfigOutranksDefault(self):
p = luigi.BoolParameter(default=True, config_path=dict(section="foo", name="bar"))
self.assertEqual(False, _value(p))
@with_config({"foo": {"bar": "2001-02-03-2001-02-28"}})
def testDateInterval(self):
p = luigi.DateIntervalParameter(config_path=dict(section="foo", name="bar"))
expected = luigi.date_interval.Custom.parse("2001-02-03-2001-02-28")
self.assertEqual(expected, _value(p))
@with_config({"foo": {"bar": "0 seconds"}})
def testTimeDeltaNoSeconds(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(seconds=0), _value(p))
@with_config({"foo": {"bar": "0 d"}})
def testTimeDeltaNoDays(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(days=0), _value(p))
@with_config({"foo": {"bar": "1 day"}})
def testTimeDelta(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(days=1), _value(p))
@with_config({"foo": {"bar": "2 seconds"}})
def testTimeDeltaPlural(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(seconds=2), _value(p))
@with_config({"foo": {"bar": "3w 4h 5m"}})
def testTimeDeltaMultiple(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(weeks=3, hours=4, minutes=5), _value(p))
@with_config({"foo": {"bar": "P4DT12H30M5S"}})
def testTimeDelta8601(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(days=4, hours=12, minutes=30, seconds=5), _value(p))
@with_config({"foo": {"bar": "P5D"}})
def testTimeDelta8601NoTimeComponent(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(days=5), _value(p))
@with_config({"foo": {"bar": "P5W"}})
def testTimeDelta8601Weeks(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(weeks=5), _value(p))
@with_config({"foo": {"bar": "P3Y6M4DT12H30M5S"}})
def testTimeDelta8601YearMonthNotSupported(self):
def f():
return _value(luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar")))
self.assertRaises(luigi.parameter.ParameterException, f) # ISO 8601 durations with years or months are not supported
@with_config({"foo": {"bar": "PT6M"}})
def testTimeDelta8601MAfterT(self):
p = luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar"))
self.assertEqual(timedelta(minutes=6), _value(p))
@with_config({"foo": {"bar": "P6M"}})
def testTimeDelta8601MBeforeT(self):
def f():
return _value(luigi.TimeDeltaParameter(config_path=dict(section="foo", name="bar")))
self.assertRaises(luigi.parameter.ParameterException, f) # ISO 8601 durations with months are not supported
def testHasDefaultNoSection(self):
self.assertRaises(luigi.parameter.MissingParameterException,
lambda: _value(luigi.Parameter(config_path=dict(section="foo", name="bar"))))
@with_config({"foo": {}})
def testHasDefaultNoValue(self):
self.assertRaises(luigi.parameter.MissingParameterException,
lambda: _value(luigi.Parameter(config_path=dict(section="foo", name="bar"))))
@with_config({"foo": {"bar": "baz"}})
def testHasDefaultWithBoth(self):
self.assertTrue(_value(luigi.Parameter(config_path=dict(section="foo", name="bar"))))
@with_config({"foo": {"bar": "baz"}})
def testWithDefault(self):
p = luigi.Parameter(config_path=dict(section="foo", name="bar"), default='blah')
self.assertEqual('baz', _value(p)) # config overrides default
def testWithDefaultAndMissing(self):
p = luigi.Parameter(config_path=dict(section="foo", name="bar"), default='blah')
self.assertEqual('blah', _value(p))
@with_config({"LocalA": {"p": "p_default"}})
def testDefaultFromTaskName(self):
class LocalA(luigi.Task):
p = luigi.Parameter()
self.assertEqual("p_default", LocalA().p)
self.assertEqual("boo", LocalA(p="boo").p)
@with_config({"LocalA": {"p": "999"}})
def testDefaultFromTaskNameInt(self):
class LocalA(luigi.Task):
p = luigi.IntParameter()
self.assertEqual(999, LocalA().p)
self.assertEqual(777, LocalA(p=777).p)
@with_config({"LocalA": {"p": "p_default"}, "foo": {"bar": "baz"}})
def testDefaultFromConfigWithTaskNameToo(self):
class LocalA(luigi.Task):
p = luigi.Parameter(config_path=dict(section="foo", name="bar"))
self.assertEqual("p_default", LocalA().p)
self.assertEqual("boo", LocalA(p="boo").p)
@with_config({"LocalA": {"p": "p_default_2"}})
def testDefaultFromTaskNameWithDefault(self):
class LocalA(luigi.Task):
p = luigi.Parameter(default="banana")
self.assertEqual("p_default_2", LocalA().p)
self.assertEqual("boo_2", LocalA(p="boo_2").p)
@with_config({"MyClass": {"p_wohoo": "p_default_3"}})
def testWithLongParameterName(self):
class MyClass(luigi.Task):
p_wohoo = luigi.Parameter(default="banana")
self.assertEqual("p_default_3", MyClass().p_wohoo)
self.assertEqual("boo_2", MyClass(p_wohoo="boo_2").p_wohoo)
@with_config({"RangeDaily": {"days_back": "123"}})
def testSettingOtherMember(self):
class LocalA(luigi.Task):
pass
self.assertEqual(123, luigi.tools.range.RangeDaily(of=LocalA).days_back)
self.assertEqual(70, luigi.tools.range.RangeDaily(of=LocalA, days_back=70).days_back)
@with_config({"MyClass": {"p_not_global": "123"}})
def testCommandLineWithDefault(self):
"""
Verify that we also read from the config when we build tasks from the
command line parsers.
"""
class MyClass(luigi.Task):
p_not_global = luigi.Parameter(default='banana')
def complete(self):
import sys
luigi.configuration.get_config().write(sys.stdout)
if self.p_not_global != "123":
raise ValueError("The parameter didn't get set!!")
return True
def run(self):
pass
self.assertTrue(self.run_locally(['MyClass']))
self.assertFalse(self.run_locally(['MyClass', '--p-not-global', '124']))
self.assertFalse(self.run_locally(['MyClass', '--MyClass-p-not-global', '124']))
@with_config({"MyClass2": {"p_not_global_no_default": "123"}})
def testCommandLineNoDefault(self):
"""
Verify that we also read from the config when we build tasks from the
command line parsers.
"""
class MyClass2(luigi.Task):
""" TODO: Make luigi clean it's register for tests. Hate this 2 dance. """
p_not_global_no_default = luigi.Parameter()
def complete(self):
import sys
luigi.configuration.get_config().write(sys.stdout)
luigi.configuration.get_config().write(sys.stdout)
if self.p_not_global_no_default != "123":
raise ValueError("The parameter didn't get set!!")
return True
def run(self):
pass
self.assertTrue(self.run_locally(['MyClass2']))
self.assertFalse(self.run_locally(['MyClass2', '--p-not-global-no-default', '124']))
self.assertFalse(self.run_locally(['MyClass2', '--MyClass2-p-not-global-no-default', '124']))
@with_config({"mynamespace.A": {"p": "999"}})
def testWithNamespaceConfig(self):
class A(luigi.Task):
task_namespace = 'mynamespace'
p = luigi.IntParameter()
self.assertEqual(999, A().p)
self.assertEqual(777, A(p=777).p)
def testWithNamespaceCli(self):
class A(luigi.Task):
task_namespace = 'mynamespace'
p = luigi.IntParameter(default=100)
expected = luigi.IntParameter()
def complete(self):
if self.p != self.expected:
raise ValueError
return True
self.assertTrue(self.run_locally_split('mynamespace.A --expected 100'))
# TODO(arash): Why is `--p 200` hanging with multiprocessing stuff?
# self.assertTrue(self.run_locally_split('mynamespace.A --p 200 --expected 200'))
self.assertTrue(self.run_locally_split('mynamespace.A --mynamespace.A-p 200 --expected 200'))
self.assertFalse(self.run_locally_split('mynamespace.A --A-p 200 --expected 200'))
def testListWithNamespaceCli(self):
class A(luigi.Task):
task_namespace = 'mynamespace'
l_param = luigi.ListParameter(default=[1, 2, 3])
expected = luigi.ListParameter()
def complete(self):
if self.l_param != self.expected:
raise ValueError
return True
self.assertTrue(self.run_locally_split('mynamespace.A --expected [1,2,3]'))
self.assertTrue(self.run_locally_split('mynamespace.A --mynamespace.A-l [1,2,3] --expected [1,2,3]'))
def testTupleWithNamespaceCli(self):
class A(luigi.Task):
task_namespace = 'mynamespace'
t = luigi.TupleParameter(default=((1, 2), (3, 4)))
expected = luigi.TupleParameter()
def complete(self):
if self.t != self.expected:
raise ValueError
return True
self.assertTrue(self.run_locally_split('mynamespace.A --expected ((1,2),(3,4))'))
self.assertTrue(self.run_locally_split('mynamespace.A --mynamespace.A-t ((1,2),(3,4)) --expected ((1,2),(3,4))'))
@with_config({"foo": {"bar": "[1,2,3]"}})
def testListConfig(self):
self.assertTrue(_value(luigi.ListParameter(config_path=dict(section="foo", name="bar"))))
@with_config({"foo": {"bar": "((1,2),(3,4))"}})
def testTupleConfig(self):
self.assertTrue(_value(luigi.TupleParameter(config_path=dict(section="foo", name="bar"))))
@with_config({"foo": {"bar": "-3"}})
def testNumericalParameter(self):
p = luigi.NumericalParameter(min_value=-3, max_value=7, var_type=int, config_path=dict(section="foo", name="bar"))
self.assertEqual(-3, _value(p))
@with_config({"foo": {"bar": "3"}})
def testChoiceParameter(self):
p = luigi.ChoiceParameter(var_type=int, choices=[1, 2, 3], config_path=dict(section="foo", name="bar"))
self.assertEqual(3, _value(p))
class OverrideEnvStuff(LuigiTestCase):
@with_config({"core": {"default-scheduler-port": '6543'}})
def testOverrideSchedulerPort(self):
if six.PY3:
with self.assertWarnsRegex(DeprecationWarning, r'default-scheduler-port is deprecated'):
env_params = luigi.interface.core()
else:
env_params = luigi.interface.core()
self.assertEqual(env_params.scheduler_port, 6543)
@with_config({"core": {"scheduler-port": '6544'}})
def testOverrideSchedulerPort2(self):
if six.PY3:
with self.assertWarnsRegex(DeprecationWarning, r'scheduler_port \(with dashes\) should be avoided'):
env_params = luigi.interface.core()
else:
env_params = luigi.interface.core()
self.assertEqual(env_params.scheduler_port, 6544)
@with_config({"core": {"scheduler_port": '6545'}})
def testOverrideSchedulerPort3(self):
env_params = luigi.interface.core()
self.assertEqual(env_params.scheduler_port, 6545)
class TestSerializeDateParameters(LuigiTestCase):
def testSerialize(self):
date = datetime.date(2013, 2, 3)
self.assertEqual(luigi.DateParameter().serialize(date), '2013-02-03')
self.assertEqual(luigi.YearParameter().serialize(date), '2013')
self.assertEqual(luigi.MonthParameter().serialize(date), '2013-02')
dt = datetime.datetime(2013, 2, 3, 4, 5)
self.assertEqual(luigi.DateHourParameter().serialize(dt), '2013-02-03T04')
class TestSerializeTimeDeltaParameters(LuigiTestCase):
def testSerialize(self):
tdelta = timedelta(weeks=5, days=4, hours=3, minutes=2, seconds=1)
self.assertEqual(luigi.TimeDeltaParameter().serialize(tdelta), '5 w 4 d 3 h 2 m 1 s')
tdelta = timedelta(seconds=0)
self.assertEqual(luigi.TimeDeltaParameter().serialize(tdelta), '0 w 0 d 0 h 0 m 0 s')
class TestTaskParameter(LuigiTestCase):
def testUsage(self):
class MetaTask(luigi.Task):
task_namespace = "mynamespace"
a = luigi.TaskParameter()
def run(self):
self.__class__.saved_value = self.a
class OtherTask(luigi.Task):
task_namespace = "other_namespace"
self.assertEqual(MetaTask(a=MetaTask).a, MetaTask)
self.assertEqual(MetaTask(a=OtherTask).a, OtherTask)
# So I first thought this "should" work, but actually it should not,
# because it should not need to parse values known at run-time
self.assertRaises(AttributeError,
lambda: MetaTask(a="mynamespace.MetaTask"))
# But is should be able to parse command line arguments
self.assertRaises(luigi.task_register.TaskClassNotFoundException,
lambda: (self.run_locally_split('mynamespace.MetaTask --a blah')))
self.assertRaises(luigi.task_register.TaskClassNotFoundException,
lambda: (self.run_locally_split('mynamespace.MetaTask --a Taskk')))
self.assertTrue(self.run_locally_split('mynamespace.MetaTask --a mynamespace.MetaTask'))
self.assertEqual(MetaTask.saved_value, MetaTask)
self.assertTrue(self.run_locally_split('mynamespace.MetaTask --a other_namespace.OtherTask'))
self.assertEqual(MetaTask.saved_value, OtherTask)
def testSerialize(self):
class OtherTask(luigi.Task):
def complete(self):
return True
class DepTask(luigi.Task):
dep = luigi.TaskParameter()
ran = False
def complete(self):
return self.__class__.ran
def requires(self):
return self.dep()
def run(self):
self.__class__.ran = True
class MainTask(luigi.Task):
def run(self):
yield DepTask(dep=OtherTask)
# OtherTask is serialized because it is used as an argument for DepTask.
self.assertTrue(self.run_locally(['MainTask']))
class NewStyleParameters822Test(LuigiTestCase):
"""
I bet these tests created at 2015-03-08 are reduntant by now (Oct 2015).
But maintaining them anyway, just in case I have overlooked something.
"""
# See https://github.com/spotify/luigi/issues/822
def test_subclasses(self):
class BarBaseClass(luigi.Task):
x = luigi.Parameter(default='bar_base_default')
class BarSubClass(BarBaseClass):
pass
in_parse(['BarSubClass', '--x', 'xyz', '--BarBaseClass-x', 'xyz'],
lambda task: self.assertEqual(task.x, 'xyz'))
# https://github.com/spotify/luigi/issues/822#issuecomment-77782714
in_parse(['BarBaseClass', '--BarBaseClass-x', 'xyz'],
lambda task: self.assertEqual(task.x, 'xyz'))
class LocalParameters1304Test(LuigiTestCase):
"""
It was discussed and decided that local parameters (--x) should be
semantically different from global parameters (--MyTask-x).
The former sets only the parsed root task, and the later sets the parameter
for all the tasks.
https://github.com/spotify/luigi/issues/1304#issuecomment-148402284
"""
def test_local_params(self):
class MyTask(RunOnceTask):
param1 = luigi.IntParameter()
param2 = luigi.BoolParameter(default=False)
def requires(self):
if self.param1 > 0:
yield MyTask(param1=(self.param1 - 1))
def run(self):
assert self.param1 == 1 or not self.param2
self.comp = True
self.assertTrue(self.run_locally_split('MyTask --param1 1 --param2'))
def test_local_takes_precedence(self):
class MyTask(luigi.Task):
param = luigi.IntParameter()
def complete(self):
return False
def run(self):
assert self.param == 5
self.assertTrue(self.run_locally_split('MyTask --param 5 --MyTask-param 6'))
def test_local_only_affects_root(self):
class MyTask(RunOnceTask):
param = luigi.IntParameter(default=3)
def requires(self):
assert self.param != 3
if self.param == 5:
yield MyTask()
# It would be a cyclic dependency if local took precedence
self.assertTrue(self.run_locally_split('MyTask --param 5 --MyTask-param 6'))
def test_range_doesnt_propagate_args(self):
"""
Ensure that ``--task Range --of Blah --blah-arg 123`` doesn't work.
This will of course not work unless support is explicitly added for it.
But being a bit paranoid here and adding this test case so that if
somebody decides to add it in the future, they'll be redircted to the
dicussion in #1304
"""
class Blah(RunOnceTask):
date = luigi.DateParameter()
blah_arg = luigi.IntParameter()
# The SystemExit is assumed to be thrown by argparse
self.assertRaises(SystemExit, self.run_locally_split, 'RangeDailyBase --of Blah --start 2015-01-01 --task-limit 1 --blah-arg 123')
self.assertTrue(self.run_locally_split('RangeDailyBase --of Blah --start 2015-01-01 --task-limit 1 --Blah-blah-arg 123'))
class TaskAsParameterName1335Test(LuigiTestCase):
def test_parameter_can_be_named_task(self):
class MyTask(luigi.Task):
# Indeed, this is not the most realistic example, but still ...
task = luigi.IntParameter()
self.assertTrue(self.run_locally_split('MyTask --task 5'))
|
|
import os
import random
import time
import WifiConnUtility
from NativeLog import NativeLog
from TCAction import TCActionBase
from Utility import Encoding
from Utility import MakeFolder
STEPS = {"SCAN1": 0x01, "JAP": 0x02, "SCAN2": 0x04, "RECONNECT": 0x08}
AP_PROP = ("ssid", "ssid_len", "pwd",
"pwd_len", "channel", "enc", "apc")
JAP_TEST_METHOD = ("Normal", "OFF_ON", "OFF", "WRONG_PROP")
RECONNECT_TEST_METHOD = ("OFF_ON", "OFF")
LOG_FOLDER = os.path.join("Performance", "JAP")
SSID_LEN_RANGE = (1, 32) # in bytes
ENC_TYPE = (0, 2, 3, 4) # do not support WEP for 8266 soft AP
PWD_RANGE = {0: [0, 0],
1: [5, 5],
2: [8, 63],
3: [8, 63],
4: [8, 63],
}
class TestCase(TCActionBase.CommonTCActionBase):
def __init__(self, test_case, test_env, timeout=30, log_path=TCActionBase.LOG_PATH):
TCActionBase.CommonTCActionBase.__init__(self, test_case, test_env, timeout=timeout, log_path=log_path)
# default value for optional configurable params
self.performance_folder_path = log_path
self.pwd_len = [8, 64]
self.step_config = [0x03, 0x01, 0x02, 0x0B, 0x0F]
self.join_test_method = ["Normal"]
self.join_delay = [[1.5, 5], [1.5, 5]]
self.reconnect_test_method = ["OFF_ON"]
self.reconnect_delay = [[1.5, 5], [1.5, 6]]
# load param from excel
cmd_set = test_case["cmd set"]
for i in range(1, len(cmd_set)):
if cmd_set[i][0] != "dummy" and cmd_set[i][0] != "":
cmd_string = "self." + cmd_set[i][0]
exec cmd_string
# read AP list
self.ap_list = []
for i in range(1, len(cmd_set)):
for j in range(len(cmd_set[i][1])):
if cmd_set[i][1][j] != "":
cmd_string = "self.ap_list.append(dict(zip(AP_PROP, " + cmd_set[i][1][j] + ")))"
exec cmd_string
folder_path = MakeFolder.make_folder(self.performance_folder_path + "\\" + LOG_FOLDER)
file_name = "JAP_log_%s.log" % (time.strftime("%m%d%H%M%S", time.localtime()))
self._performance_log_file = os.path.join(folder_path, file_name)
# test statistics
self._succeed_count = self._fail_count = self._time_cost_count = 0
self._total_time = self._longest_time = 0
self.result_cntx = TCActionBase.ResultCheckContext(self, test_env, self.tc_name)
# get target type "SSC" or "AT"
self.target_type = ["SSC" if test_env.get_port_by_name("AT1") is None else "AT"]
self.target_type.append("SSC" if test_env.get_port_by_name("AT2") is None else "AT")
self._utility = WifiConnUtility.WifiConnUtility(self)
pass
def _generate_random_ap_prop(self):
ap_prop = dict.fromkeys(AP_PROP)
# generate target ap_value
ap_prop["ssid_len"] = random.randint(SSID_LEN_RANGE[0], SSID_LEN_RANGE[1])
ap_prop["channel"] = random.choice(range(1, 14))
ap_prop["enc"] = random.choice(ENC_TYPE)
ap_prop["pwd_len"] = random.randint(PWD_RANGE[ap_prop["enc"]][0], PWD_RANGE[ap_prop["enc"]][1])
# generate string
if self.target_type[0] == self.target_type[1] == "AT":
ap_prop["ssid"] = Encoding.generate_random_utf8_str(ap_prop["ssid_len"])
ap_prop["pwd"] = Encoding.generate_random_utf8_str(ap_prop["pwd_len"])
# NativeLog.add_trace_info("ssid hex is : %x" % ap_prop["ssid"])
# NativeLog.add_trace_info("pwd hex is : %x" % ap_prop["pwd"])
else:
ap_prop["ssid"] = Encoding.generate_random_printable_str(ap_prop["ssid_len"])
ap_prop["pwd"] = Encoding.generate_random_printable_str(ap_prop["pwd_len"])
return ap_prop
def _logging_performance(self, ssid, join_method="Normal", time_cost=0):
# log performance to performance log file
with open(self._performance_log_file, "ab+") as f:
# log time and ssid
f.write("\r\n[%s]:\r\n[AP name] %s\r\n" %
(time.strftime("%m-%d %H:%M:%S", time.localtime()), ssid))
if join_method == "Normal" or join_method == "OFF_ON":
if time_cost is not False:
self._succeed_count += 1
if join_method == "Normal":
f.write("[Succeed][%f]\r\n" % time_cost)
self._longest_time = (time_cost > self._longest_time and
[time_cost] or [self._longest_time])[0]
self._time_cost_count += 1
self._total_time += time_cost
else:
f.write("[Succeed][%s]\r\n" % join_method)
else:
self._fail_count += 1
f.write("[Fail][%s]\r\n" % join_method)
pass
def _logging_fail_step(self, ssid, step):
with open(self._performance_log_file, "ab+") as f:
f.write("\r\n[%s]:\r\n[AP name] %s\r\n" %
(time.strftime("%m-%d %H:%M:%S", time.localtime()), ssid))
f.write("[Fail][%s]\r\n" % step)
pass
def _generate_performance_report(self):
with open(self._performance_log_file, "ab+") as f:
f.write("[Test report] Succeed: %d\r\n" % self._succeed_count)
f.write("[Test report] Failed: %d\r\n" % self._fail_count)
if self._succeed_count > 0 or self._fail_count > 0:
f.write("[Test report] Pass Rate: %f\r\n" %
(self._succeed_count/(self._fail_count+self._succeed_count)))
if self._time_cost_count > 0:
f.write("[Test report] Average time: %f\r\n" % (self._total_time/self._time_cost_count))
f.write("[Test report] Longest time: %f\r\n" % self._longest_time)
def execute(self):
TCActionBase.TCActionBase.execute(self)
self.result_cntx.start()
# mandatory configurable params
try:
target_ap_num = self.target_ap_num
test_count = self.test_count
except StandardError, e:
NativeLog.add_trace_critical("Error configuration for WifiJAP script, error is %s" % e)
raise StandardError("Error configuration")
# prepare ap list
_ap_list = [["target", None]] * target_ap_num
for _ap_prop in self.ap_list:
_ap_list.append(["AP", _ap_prop])
# set to correct mode first
self._utility.set_mode([1, 2])
for i in xrange(test_count):
_ap = random.choice(_ap_list)
# arrange ap
_ap_type = _ap[0]
_ap_prop = _ap[1]
if _ap_type == "target":
_ap_prop = self._generate_random_ap_prop()
pass
# step 1 : mandatory step, set up AP
if self._utility.setup_ap(_ap_type, _ap_prop) is False:
self._logging_fail_step(_ap_prop["ssid"], "Set AP")
NativeLog.add_prompt_trace("[Step1] setup AP Fail")
continue
step_config = random.choice(self.step_config)
NativeLog.add_prompt_trace("[Step1] setup AP succeed")
# step 2 : optional step, do scan before connect
if step_config & STEPS["SCAN1"] != 0: # check option
if self._utility.do_scan(_ap_prop) is False:
self._logging_fail_step(_ap_prop["ssid"], "Scan before JAP")
NativeLog.add_prompt_trace("[Step2] Scan Done")
# step 3 : mandatory step, join AP
if step_config & STEPS["JAP"] != 0: # check option
_join_test_method = random.choice(self.join_test_method)
time_cost = self._utility.join_ap(_join_test_method, _ap_type, _ap_prop, self.join_delay)
# log performance to performance log file
self._logging_performance(_ap_prop["ssid"], _join_test_method, time_cost)
if time_cost is False:
# do scan once to check if AP exist
self._utility.do_scan(_ap_prop)
continue
NativeLog.add_prompt_trace("[Step3] Join AP done")
# step 4 : optional step, scan after join AP
if step_config & STEPS["SCAN2"] != 0: # check option
if self._utility.do_scan(_ap_prop) is False:
self._logging_fail_step(_ap_prop["ssid"], "Scan after JAP")
NativeLog.add_prompt_trace("[Step4] Scan done")
# step 5 : optional step, reconnect test
if step_config & STEPS["RECONNECT"] != 0: # check option
_reconnect_test_method = random.choice(self.reconnect_test_method)
if self._utility.do_reconnect(_reconnect_test_method,
_ap_type, _ap_prop, self.reconnect_delay) is False:
self._logging_fail_step(_ap_prop["ssid"], "Reconnect")
NativeLog.add_prompt_trace("[Step5] Reconnect done")
# continue to next loop
NativeLog.add_prompt_trace("[WifiJAP] Test count %d done" % i)
# generate report and cleanup
self._generate_performance_report()
self.result_cntx.set_result("Succeed")
def result_check(self, port_name, data):
TCActionBase.CommonTCActionBase.result_check(self, port_name, data)
self.result_cntx.append_data(port_name, data)
def main():
pass
if __name__ == '__main__':
main()
|
|
""" Tests on the DAG implementation """
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import threading
import pytest
from stacker.dag import (
DAG,
DAGValidationError,
ThreadedWalker,
UnlimitedSemaphore
)
@pytest.fixture
def empty_dag():
return DAG()
@pytest.fixture
def basic_dag():
dag = DAG()
dag.from_dict({'a': ['b', 'c'],
'b': ['d'],
'c': ['d'],
'd': []})
return dag
def test_add_node(empty_dag):
dag = empty_dag
dag.add_node('a')
assert dag.graph == {'a': set()}
def test_transpose(basic_dag):
dag = basic_dag
transposed = dag.transpose()
assert transposed.graph == {'d': set(['c', 'b']),
'c': set(['a']),
'b': set(['a']),
'a': set([])}
def test_add_edge(empty_dag):
dag = empty_dag
dag.add_node('a')
dag.add_node('b')
dag.add_edge('a', 'b')
assert dag.graph == {'a': set('b'), 'b': set()}
def test_from_dict(empty_dag):
dag = empty_dag
dag.from_dict({'a': ['b', 'c'],
'b': ['d'],
'c': ['d'],
'd': []})
assert dag.graph == {'a': set(['b', 'c']),
'b': set('d'),
'c': set('d'),
'd': set()}
def test_reset_graph(empty_dag):
dag = empty_dag
dag.add_node('a')
assert dag.graph == {'a': set()}
dag.reset_graph()
assert dag.graph == {}
def test_walk(empty_dag):
dag = empty_dag
# b and c should be executed at the same time.
dag.from_dict({'a': ['b', 'c'],
'b': ['d'],
'c': ['d'],
'd': []})
nodes = []
def walk_func(n):
nodes.append(n)
return True
dag.walk(walk_func)
assert nodes == ['d', 'c', 'b', 'a'] or nodes == ['d', 'b', 'c', 'a']
def test_ind_nodes(basic_dag):
dag = basic_dag
assert dag.ind_nodes() == ['a']
def test_topological_sort(empty_dag):
dag = empty_dag
dag.from_dict({'a': [],
'b': ['a'],
'c': ['b']})
assert dag.topological_sort() == ['c', 'b', 'a']
def test_successful_validation(basic_dag):
dag = basic_dag
assert dag.validate()[0] == True # noqa: E712
def test_failed_validation(empty_dag):
dag = empty_dag
with pytest.raises(DAGValidationError):
dag.from_dict({'a': ['b'],
'b': ['a']})
def test_downstream(basic_dag):
dag = basic_dag
assert set(dag.downstream('a')) == set(['b', 'c'])
def test_all_downstreams(basic_dag):
dag = basic_dag
assert dag.all_downstreams('a') == ['b', 'c', 'd']
assert dag.all_downstreams('b') == ['d']
assert dag.all_downstreams('d') == []
def test_all_downstreams_pass_graph(empty_dag):
dag = empty_dag
dag.from_dict({'a': ['c'],
'b': ['d'],
'c': ['d'],
'd': []})
assert dag.all_downstreams('a') == ['c', 'd']
assert dag.all_downstreams('b') == ['d']
assert dag.all_downstreams('d') == []
def test_predecessors(basic_dag):
dag = basic_dag
assert set(dag.predecessors('a')) == set([])
assert set(dag.predecessors('b')) == set(['a'])
assert set(dag.predecessors('c')) == set(['a'])
assert set(dag.predecessors('d')) == set(['b', 'c'])
def test_filter(basic_dag):
dag = basic_dag
dag2 = dag.filter(['b', 'c'])
assert dag2.graph == {'b': set('d'),
'c': set('d'),
'd': set()}
def test_all_leaves(basic_dag):
dag = basic_dag
assert dag.all_leaves() == ['d']
def test_size(basic_dag):
dag = basic_dag
assert dag.size() == 4
dag.delete_node('a')
assert dag.size() == 3
def test_transitive_reduction_no_reduction(empty_dag):
dag = empty_dag
dag.from_dict({'a': ['b', 'c'],
'b': ['d'],
'c': ['d'],
'd': []})
dag.transitive_reduction()
assert dag.graph == {'a': set(['b', 'c']),
'b': set('d'),
'c': set('d'),
'd': set()}
def test_transitive_reduction(empty_dag):
dag = empty_dag
# https://en.wikipedia.org/wiki/Transitive_reduction#/media/File:Tred-G.svg
dag.from_dict({'a': ['b', 'c', 'd', 'e'],
'b': ['d'],
'c': ['d', 'e'],
'd': ['e'],
'e': []})
dag.transitive_reduction()
# https://en.wikipedia.org/wiki/Transitive_reduction#/media/File:Tred-Gprime.svg
assert dag.graph == {'a': set(['b', 'c']),
'b': set('d'),
'c': set('d'),
'd': set('e'),
'e': set()}
def test_transitive_deep_reduction(empty_dag):
dag = empty_dag
# https://en.wikipedia.org/wiki/Transitive_reduction#/media/File:Tred-G.svg
dag.from_dict({
'a': ['b', 'd'],
'b': ['c'],
'c': ['d'],
'd': [],
})
dag.transitive_reduction()
# https://en.wikipedia.org/wiki/Transitive_reduction#/media/File:Tred-Gprime.svg
assert dag.graph == {'a': set('b'),
'b': set('c'),
'c': set('d'),
'd': set()}
def test_threaded_walker(empty_dag):
dag = empty_dag
walker = ThreadedWalker(UnlimitedSemaphore())
# b and c should be executed at the same time.
dag.from_dict({'a': ['b', 'c'],
'b': ['d'],
'c': ['d'],
'd': []})
lock = threading.Lock() # Protects nodes from concurrent access
nodes = []
def walk_func(n):
lock.acquire()
nodes.append(n)
lock.release()
return True
walker.walk(dag, walk_func)
assert nodes == ['d', 'c', 'b', 'a'] or nodes == ['d', 'b', 'c', 'a']
|
|
##
# Copyright (c) 2005-2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
CardDAV XML Support.
This module provides XML utilities for use with CardDAV.
This API is considered private to static.py and is therefore subject to
change.
See draft spec:
"""
from txdav.xml.element import registerElement, dav_namespace
from txdav.xml.element import WebDAVElement, PCDATAElement
from txdav.xml.element import WebDAVEmptyElement, WebDAVTextElement
from txdav.xml.element import ResourceType, Collection
from twistedcaldav.config import config
from twistedcaldav.vcard import Component
##
# CardDAV objects
##
carddav_namespace = "urn:ietf:params:xml:ns:carddav"
carddav_compliance = (
"addressbook",
)
class CardDAVElement (WebDAVElement):
"""
CardDAV XML element.
"""
namespace = carddav_namespace
class CardDAVEmptyElement (WebDAVEmptyElement):
"""
CardDAV element with no contents.
"""
namespace = carddav_namespace
class CardDAVTextElement (WebDAVTextElement):
"""
CardDAV element containing PCDATA.
"""
namespace = carddav_namespace
class CardDAVDataMixin(object):
"""
A mixin to support accept/returning data in various formats.
"""
def __init__(self, *children, **attributes):
if "content-type" in attributes:
self.content_type = attributes["content-type"]
else:
self.content_type = "text/vcard"
if "version" in attributes:
self.version = attributes["version"]
else:
self.version = "3.0"
super(CardDAVDataMixin, self).__init__(*children, **attributes)
def verifyTypeVersion(self):
"""
Make sure any content-type and version matches at least one supported set.
@return: C{True} if there is at least one match, C{False} otherwise.
"""
allowedTypes = set()
allowedTypes.add(("text/vcard", "3.0",))
if config.EnableJSONData:
allowedTypes.add(("application/vcard+json", "3.0",))
for format, version in allowedTypes:
if (format == self.content_type) and (version == self.version):
return True
return False
@classmethod
def fromAddress(clazz, address, format=None):
attrs = {}
if format is not None and format != "text/vcard":
attrs["content-type"] = format
if isinstance(address, str):
if not address:
raise ValueError("Missing address data")
return clazz(PCDATAElement(address), **attrs)
elif isinstance(address, Component):
assert address.name() == "VCARD", "Not a vCard: %r" % (address,)
return clazz(PCDATAElement(address.getText(format)), **attrs)
else:
raise ValueError("Not an address: %s" % (address,))
fromTextData = fromAddress
fromComponent = fromAddress
def address(self):
"""
Returns an address component derived from this element.
"""
data = self.addressData()
if data:
return Component.fromString(data, format=self.content_type)
else:
return None
generateComponent = address
def addressData(self):
"""
Returns the address data derived from this element.
"""
for data in self.children:
if not isinstance(data, PCDATAElement):
return None
else:
# We guaranteed in __init__() that there is only one child...
break
return str(data)
textData = addressData
@registerElement
class AddressBookHomeSet (CardDAVElement):
"""
The address book collections URLs for this principal.
(CardDAV, RFC 6352 section 7.1.1)
"""
name = "addressbook-home-set"
hidden = True
allowed_children = {(dav_namespace, "href"): (0, None)}
@registerElement
class AddressBookDescription (CardDAVTextElement):
"""
Provides a human-readable description of what this address book collection
represents.
(CardDAV, RFC 6352 section 6.2.1)
"""
name = "addressbook-description"
hidden = True
# May be protected; but we'll let the client set this if they like.
@registerElement
class SupportedAddressData (CardDAVElement):
"""
Specifies restrictions on an address book collection.
(CardDAV, RFC 6352 section 6.2.2)
"""
name = "supported-address-data"
hidden = True
protected = True
allowed_children = {(carddav_namespace, "address-data-type"): (0, None)}
@registerElement
class MaxResourceSize (CardDAVTextElement):
"""
Specifies restrictions on an address book collection.
(CardDAV, RFC 6352 section 6.2.3)
"""
name = "max-resource-size"
hidden = True
protected = True
@registerElement
class AddressBook (CardDAVEmptyElement):
"""
Denotes an address book collection.
(CardDAV, RFC 6352 sections 5.2, 10.1)
"""
name = "addressbook"
@registerElement
class AddressBookQuery (CardDAVElement):
"""
Defines a report for querying address book data.
(CardDAV, RFC 6352 section 10.3)
"""
name = "addressbook-query"
allowed_children = {
(dav_namespace, "allprop"): (0, None),
(dav_namespace, "propname"): (0, None),
(dav_namespace, "prop"): (0, None),
(carddav_namespace, "filter"): (0, 1), # Actually (1, 1) unless element is empty
(carddav_namespace, "limit"): (0, None),
}
def __init__(self, *children, **attributes):
super(AddressBookQuery, self).__init__(*children, **attributes)
props = None
filter = None
limit = None
for child in self.children:
qname = child.qname()
if qname in (
(dav_namespace, "allprop"),
(dav_namespace, "propname"),
(dav_namespace, "prop"),
):
if props is not None:
raise ValueError("Only one of CardDAV:allprop, CardDAV:propname, CardDAV:prop allowed")
props = child
elif qname == (carddav_namespace, "filter"):
filter = child
elif qname == (carddav_namespace, "limit"):
# type check
child.childOfType(NResults)
limit = child
else:
raise AssertionError("We shouldn't be here")
if len(self.children) > 0:
if filter is None:
raise ValueError("CARDDAV:filter required")
self.props = props
self.filter = filter
self.limit = limit
@registerElement
class AddressDataType (CardDAVEmptyElement):
"""
Defines which parts of a address component object should be returned by a
report.
(CardDAV, RFC 6352 section 6.2.2)
"""
name = "address-data-type"
allowed_attributes = {
"content-type": False,
"version" : False,
}
@registerElement
class AddressData (CardDAVDataMixin, CardDAVElement):
"""
Defines which parts of a address component object should be returned by a
report.
(CardDAV, RFC 6352 section 10.4)
"""
name = "address-data"
allowed_children = {
(carddav_namespace, "allprop"): (0, 1),
(carddav_namespace, "prop"): (0, None),
PCDATAElement : (0, None),
}
allowed_attributes = {
"content-type": False,
"version" : False,
}
def __init__(self, *children, **attributes):
super(AddressData, self).__init__(*children, **attributes)
properties = None
data = None
for child in self.children:
qname = child.qname()
if qname == (carddav_namespace, "allprop"):
if properties is not None:
raise ValueError(
"CardDAV:allprop and CardDAV:prop may not be combined"
)
properties = child
elif qname == (carddav_namespace, "prop"):
try:
properties.append(child)
except AttributeError:
if properties is None:
properties = [child]
else:
raise ValueError("CardDAV:allprop and CardDAV:prop may not be combined")
elif isinstance(child, PCDATAElement):
if data is None:
data = child
else:
data += child
else:
raise AssertionError("We shouldn't be here")
self.properties = properties
if data is not None:
try:
if properties is not None:
raise ValueError("Only one of allprop, prop (%r) or PCDATA (%r) allowed" % (properties, str(data)))
except ValueError:
if not data.isWhitespace():
raise
else:
# Since we've already combined PCDATA elements, we'd may as well
# optimize them originals away
self.children = (data,)
@registerElement
class AllProperties (CardDAVEmptyElement):
"""
Specifies that all properties shall be returned.
(CardDAV, RFC 6352 section 10.4.1)
"""
name = "allprop"
@registerElement
class Property (CardDAVEmptyElement):
"""
Defines a property to return in a response.
(CardDAV, RFC 6352 section 10.4.2)
"""
name = "prop"
allowed_attributes = {
"name" : True,
"novalue": False,
}
def __init__(self, *children, **attributes):
super(Property, self).__init__(*children, **attributes)
self.property_name = attributes["name"]
if "novalue" in attributes:
novalue = attributes["novalue"]
if novalue == "yes":
self.novalue = True
elif novalue == "no":
self.novalue = False
else:
raise ValueError("Invalid novalue: %r" % (novalue,))
else:
self.novalue = False
@registerElement
class Filter (CardDAVElement):
"""
Determines which matching components are returned.
(CardDAV, RFC 6352 section 10.5)
"""
name = "filter"
allowed_children = {(carddav_namespace, "prop-filter"): (0, None)}
allowed_attributes = {"test": False}
@registerElement
class PropertyFilter (CardDAVElement):
"""
Limits a search to specific properties.
(CardDAV-access-09, RFC 6352 section 10.5.1)
"""
name = "prop-filter"
allowed_children = {
(carddav_namespace, "is-not-defined"): (0, 1),
(carddav_namespace, "text-match"): (0, None),
(carddav_namespace, "param-filter"): (0, None),
}
allowed_attributes = {
"name": True,
"test": False,
}
@registerElement
class ParameterFilter (CardDAVElement):
"""
Limits a search to specific parameters.
(CardDAV, RFC 6352 section 10.5.2)
"""
name = "param-filter"
allowed_children = {
(carddav_namespace, "is-not-defined"): (0, 1),
(carddav_namespace, "text-match"): (0, 1),
}
allowed_attributes = {"name": True}
@registerElement
class Limit (WebDAVElement):
"""
Client supplied limit for reports.
"""
namespace = carddav_namespace
name = "limit"
allowed_children = {
(carddav_namespace, "nresults") : (1, 1),
}
@registerElement
class NResults (WebDAVTextElement):
"""
Number of results limit.
"""
namespace = carddav_namespace
name = "nresults"
@registerElement
class IsNotDefined (CardDAVEmptyElement):
"""
Specifies that the named vCard item does not exist.
(CardDAV, RFC 6352 section 10.5.3)
"""
name = "is-not-defined"
@registerElement
class TextMatch (CardDAVTextElement):
"""
Specifies a substring match on a property or parameter value.
(CardDAV, RFC 6352 section 10.5.4)
"""
name = "text-match"
def fromString(clazz, string): #@NoSelf
if type(string) is str:
return clazz(PCDATAElement(string))
elif type(string) is unicode:
return clazz(PCDATAElement(string.encode("utf-8")))
else:
return clazz(PCDATAElement(str(string)))
fromString = classmethod(fromString)
allowed_attributes = {
"collation": False,
"negate-condition": False,
"match-type": False
}
@registerElement
class AddressBookMultiGet (CardDAVElement):
"""
CardDAV report used to retrieve specific vCard items via their URIs.
(CardDAV, RFC 6352 section 10.7)
"""
name = "addressbook-multiget"
# To allow for an empty element in a supported-report-set property we need
# to relax the child restrictions
allowed_children = {
(dav_namespace, "allprop"): (0, 1),
(dav_namespace, "propname"): (0, 1),
(dav_namespace, "prop"): (0, 1),
(dav_namespace, "href"): (0, None), # Actually ought to be (1, None)
}
def __init__(self, *children, **attributes):
super(AddressBookMultiGet, self).__init__(*children, **attributes)
property = None
resources = []
for child in self.children:
qname = child.qname()
if qname in (
(dav_namespace, "allprop"),
(dav_namespace, "propname"),
(dav_namespace, "prop"),
):
if property is not None:
raise ValueError("Only one of DAV:allprop, DAV:propname, DAV:prop allowed")
property = child
elif qname == (dav_namespace, "href"):
resources.append(child)
self.property = property
self.resources = resources
@registerElement
class NoUIDConflict(CardDAVElement):
"""
CardDAV precondition used to indicate a UID conflict during PUT/COPY/MOVE.
The conflicting resource href must be returned as a child.
"""
name = "no-uid-conflict"
allowed_children = {(dav_namespace, "href"): (1, 1)}
@registerElement
class SupportedFilter(CardDAVElement):
"""
CardDAV precondition used to indicate an unsupported component type in a
query filter.
The conflicting filter elements are returned.
"""
name = "supported-filter"
allowed_children = {
(carddav_namespace, "prop-filter"): (0, None),
(carddav_namespace, "param-filter"): (0, None)
}
@registerElement
class DirectoryGateway(CardDAVElement):
"""
CardDAV property on a principal to indicate where the directory gateway resource is.
"""
name = "directory-gateway"
hidden = True
protected = True
allowed_children = {(dav_namespace, "href"): (0, None)}
@registerElement
class Directory(CardDAVEmptyElement):
"""
CardDAV property on a principal to indicate where the directory resource is.
"""
name = "directory"
@registerElement
class DefaultAddressBookURL (CardDAVElement):
"""
A single href indicating which addressbook is the default.
"""
name = "default-addressbook-URL"
allowed_children = {(dav_namespace, "href"): (0, 1)}
##
# Extensions to ResourceType
##
def _isAddressBook(self):
return bool(self.childrenOfType(AddressBook))
ResourceType.isAddressBook = _isAddressBook
ResourceType.addressbook = ResourceType(Collection(), AddressBook())
ResourceType.directory = ResourceType(Collection(), AddressBook(), Directory())
|
|
# -*- coding: utf-8 -*-
# Copyright (c)2014 Rackspace US, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from functools import wraps
import pyrax
from pyrax.object_storage import StorageObject
from pyrax.client import BaseClient
import pyrax.exceptions as exc
from pyrax.manager import BaseManager
from pyrax.resource import BaseResource
import pyrax.utils as utils
DEFAULT_FORMAT = "vhd"
def assure_image(fnc):
"""
Converts a image ID passed as the 'image' parameter to a image object.
"""
@wraps(fnc)
def _wrapped(self, img, *args, **kwargs):
if not isinstance(img, Image):
# Must be the ID
img = self._manager.get(img)
return fnc(self, img, *args, **kwargs)
return _wrapped
class Image(BaseResource):
"""
This class represents an Image.
"""
def __init__(self, manager, info, key=None, loaded=False,
member_manager_class=None, tag_manager_class=None):
super(Image, self).__init__(manager, info, key=key, loaded=loaded)
member_manager_class = member_manager_class or ImageMemberManager
tag_manager_class = tag_manager_class or ImageTagManager
self._member_manager = member_manager_class(self.manager.api,
resource_class=ImageMember, response_key="",
plural_response_key="members", uri_base="images/%s/members" %
self.id)
self._tag_manager = tag_manager_class(self.manager.api,
resource_class=ImageTag, response_key="",
plural_response_key="tags", uri_base="images/%s/tags" %
self.id)
self._non_display = [
"com.rackspace__1__build_core",
"com.rackspace__1__build_managed",
"com.rackspace__1__build_rackconnect",
"com.rackspace__1__options",
"com.rackspace__1__platform_target",
"com.rackspace__1__release_build_date",
"com.rackspace__1__release_id",
"com.rackspace__1__release_version",
"com.rackspace__1__source",
"com.rackspace__1__visible_core",
"com.rackspace__1__visible_managed",
"com.rackspace__1__visible_rackconnect",
"file",
"instance_type_ephemeral_gb",
"instance_type_flavorid",
"instance_type_id",
"instance_type_memory_mb",
"instance_type_name",
"instance_type_root_gb",
"instance_type_rxtx_factor",
"instance_type_swap",
"instance_type_vcpu_weight",
"instance_type_vcpus",
"instance_uuid",
"org.openstack__1__architecture",
"org.openstack__1__os_distro",
"org.openstack__1__os_version",
"rax_activation_profile",
"rax_managed",
"rax_options",
"schema",
"self",
]
def update(self, value_dict):
"""
Accepts a and dictionary of key/value pairs, where the key is an
attribute of the image, and the value is the desired new value for that
image.
"""
return self.manager.update(self, value_dict)
def change_name(self, newname):
"""
Image name can be changed via the update() method. This is simply a
convenience method.
"""
return self.update({"name": newname})
def list_members(self):
"""
Returns a list of all Members for this image.
"""
return self._member_manager.list()
def get_member(self, member):
"""
Returns the ImageMember object representing the specified member
"""
return self._member_manager.get(member)
def add_member(self, project_id):
"""
Adds the project (tenant) represented by the project_id as a member of
this image.
"""
return self._member_manager.create(name=None, project_id=project_id)
def delete_member(self, project_id):
"""
Removes the project (tenant) represented by the project_id as a member
of this image.
"""
return self._member_manager.delete(project_id)
def add_tag(self, tag):
"""
Adds the tag to this image.
"""
return self._tag_manager.add(tag)
def delete_tag(self, tag):
"""
Deletes the tag from this image.
"""
return self._tag_manager.delete(tag)
class ImageMember(BaseResource):
"""
This class represents a member (user) of an Image.
"""
@property
def id(self):
return self.member_id
class ImageTag(BaseResource):
"""
This class represents a tag for an Image.
"""
pass
class ImageTask(BaseResource):
"""
This class represents a ImageTask.
"""
pass
class ImageManager(BaseManager):
"""
Manager class for an Image.
"""
def _create_body(self, name, metadata=None):
"""
Used to create the dict required to create a new queue
"""
if metadata is None:
body = {}
else:
body = {"metadata": metadata}
return body
def list(self, limit=None, marker=None, name=None, visibility=None,
member_status=None, owner=None, tag=None, status=None,
size_min=None, size_max=None, sort_key=None, sort_dir=None,
return_raw=False):
"""
Returns a list of resource objects. Pagination is supported through the
optional 'marker' and 'limit' parameters. Filtering the returned value
is possible by specifying values for any of the other parameters.
"""
uri = "/%s" % self.uri_base
qs = utils.dict_to_qs(dict(limit=limit, marker=marker, name=name,
visibility=visibility, member_status=member_status,
owner=owner, tag=tag, status=status, size_min=size_min,
size_max=size_max, sort_key=sort_key, sort_dir=sort_dir))
if qs:
uri = "%s?%s" % (uri, qs)
return self._list(uri, return_raw=return_raw)
def list_all(self, name=None, visibility=None, member_status=None,
owner=None, tag=None, status=None, size_min=None, size_max=None,
sort_key=None, sort_dir=None):
"""
Returns all of the images in one call, rather than in paginated batches.
"""
def strip_version(uri):
"""
The 'next' uri contains a redundant version number. We need to
strip it to use in the method_get() call.
"""
pos = uri.find("/images")
return uri[pos:]
obj_class = self.resource_class
resp, resp_body = self.list(name=name, visibility=visibility,
member_status=member_status, owner=owner, tag=tag,
status=status, size_min=size_min, size_max=size_max,
sort_key=sort_key, sort_dir=sort_dir, return_raw=True)
data = resp_body.get(self.plural_response_key, resp_body)
next_uri = strip_version(resp_body.get("next", ""))
ret = [obj_class(manager=self, info=res) for res in data if res]
while next_uri:
resp, resp_body = self.api.method_get(next_uri)
data = resp_body.get(self.plural_response_key, resp_body)
next_uri = strip_version(resp_body.get("next", ""))
ret.extend([obj_class(manager=self, info=res)
for res in data if res])
return ret
def create(self, name, img_format=None, img_container_format=None,
data=None, container=None, obj=None, metadata=None):
"""
Creates a new image with the specified name. The image data can either
be supplied directly in the 'data' parameter, or it can be an image
stored in the object storage service. In the case of the latter, you
can either supply the container and object names, or simply a
StorageObject reference.
You may specify the image and image container formats; if unspecified,
the default of "vhd" for image format and "bare" for image container
format will be used.
NOTE: This is blocking, and may take a while to complete.
"""
if img_format is None:
img_format = "vhd"
if img_container_format is None:
img_container_format = "bare"
headers = {
"X-Image-Meta-name": name,
"X-Image-Meta-disk_format": img_format,
"X-Image-Meta-container_format": img_container_format,
}
if data:
img_data = data
else:
ident = self.api.identity
region = self.api.region_name
clt = ident.get_client("object_store", region)
if not isinstance(obj, StorageObject):
obj = clt.get_object(container, obj)
img_data = obj.fetch()
uri = "%s/images" % self.uri_base
resp, resp_body = self.api.method_post(uri, headers=headers,
data=img_data)
def update(self, img, value_dict):
"""
Accepts an image reference (object or ID) and dictionary of key/value
pairs, where the key is an attribute of the image, and the value is the
desired new value for that image.
NOTE: There is a bug in Glance where the 'add' operation returns a 409
if the property already exists, which conflicts with the spec. So to
get around this a fresh copy of the image must be retrieved, and the
value of 'op' must be determined based on whether this attribute exists
or not.
"""
img = self.get(img)
uri = "/%s/%s" % (self.uri_base, utils.get_id(img))
body = []
for key, val in value_dict.items():
op = "replace" if key in img.__dict__ else "add"
body.append({"op": op,
"path": "/%s" % key,
"value": val})
headers = {"Content-Type":
"application/openstack-images-v2.1-json-patch"}
resp, resp_body = self.api.method_patch(uri, body=body, headers=headers)
def update_image_member(self, img_id, status):
"""
Updates the image whose ID is given with the status specified. This
must be called by the user whose project_id is in the members for the
image. If called by the owner of the image, an InvalidImageMember
exception will be raised.
Valid values for 'status' include:
pending
accepted
rejected
Any other value will result in an InvalidImageMemberStatus exception
being raised.
"""
if status not in ("pending", "accepted", "rejected"):
raise exc.InvalidImageMemberStatus("The status value must be one "
"of 'accepted', 'rejected', or 'pending'. Received: '%s'" %
status)
api = self.api
project_id = api.identity.tenant_id
uri = "/%s/%s/members/%s" % (self.uri_base, img_id, project_id)
body = {"status": status}
try:
resp, resp_body = self.api.method_put(uri, body=body)
except exc.NotFound as e:
raise exc.InvalidImageMember("The update member request could not "
"be completed. No member request for that image was found.")
class ImageMemberManager(BaseManager):
"""
Manager class for members (users) of an Image.
"""
def _create_body(self, name, project_id):
"""
Used to create the dict required to add a member to this image.
"""
body = {"member": project_id}
return body
def create(self, name, *args, **kwargs):
"""
Need to wrap the default call to handle exceptions.
"""
try:
return super(ImageMemberManager, self).create(name, *args, **kwargs)
except Exception as e:
if e.http_status == 403:
raise exc.UnsharableImage("You cannot share a public image.")
else:
raise
class ImageTagManager(BaseManager):
"""
Manager class for Image tags.
"""
def _create_body(self, name):
"""
Not used; the add() method is used with a PUT request.
"""
return {}
def add(self, tag):
"""
"""
uri = "/%s/%s" % (self.uri_base, tag)
resp, resp_body = self.api.method_put(uri)
class ImageTasksManager(BaseManager):
"""
Manager class for ImageTasks.
"""
def _create_body(self, name, img=None, cont=None, img_format=None,
img_name=None):
"""
Used to create a new task. Since tasks don't have names, the required
'name' parameter is used for the type of task: 'import' or 'export'.
"""
img = utils.get_id(img)
cont = utils.get_name(cont)
body = {"type": name}
if name == "export":
body["input"] = {
"image_uuid": img,
"receiving_swift_container": cont}
else:
nm = "%s/%s" % (cont, utils.get_name(img))
body["input"] = {
"image_properties": {"name": img_name or img},
"import_from": nm,
"import_from_format": img_format or DEFAULT_FORMAT}
return body
def create(self, name, *args, **kwargs):
"""
Standard task creation, but first check for the existence of the
containers, and raise an exception if they don't exist.
"""
cont = kwargs.get("cont")
if cont:
# Verify that it exists. If it doesn't, a NoSuchContainer exception
# will be raised.
api = self.api
rgn = api.region_name
cf = api.identity.object_store[rgn].client
cf.get_container(cont)
return super(ImageTasksManager, self).create(name, *args, **kwargs)
class JSONSchemaManager(BaseManager):
"""
Manager class for retrieving JSON schemas.
"""
def _create_body(self, name):
"""
Not used.
"""
pass
def images(self):
"""
Returns a json-schema document that represents an image members entity,
which is a container of image member entities.
"""
uri = "/%s/images" % self.uri_base
resp, resp_body = self.api.method_get(uri)
return resp_body
def image(self):
"""
Returns a json-schema document that represents a single image entity.
"""
uri = "/%s/image" % self.uri_base
resp, resp_body = self.api.method_get(uri)
return resp_body
def image_members(self):
"""
Returns a json-schema document that represents an image members entity
(a container of member entities).
"""
uri = "/%s/members" % self.uri_base
resp, resp_body = self.api.method_get(uri)
return resp_body
def image_member(self):
"""
Returns a json-schema document that represents an image member entity.
(a container of member entities).
"""
uri = "/%s/member" % self.uri_base
resp, resp_body = self.api.method_get(uri)
return resp_body
def image_tasks(self):
"""
Returns a json-schema document that represents a container of tasks
entities.
"""
uri = "/%s/tasks" % self.uri_base
resp, resp_body = self.api.method_get(uri)
return resp_body
def image_task(self):
"""
Returns a json-schema document that represents an task entity.
"""
uri = "/%s/task" % self.uri_base
resp, resp_body = self.api.method_get(uri)
return resp_body
class ImageClient(BaseClient):
"""
This is the primary class for interacting with Images.
"""
name = "Images"
def _configure_manager(self):
"""
Create the manager to handle queues.
"""
self._manager = ImageManager(self, resource_class=Image,
response_key="", plural_response_key="images",
uri_base="images")
self._tasks_manager = ImageTasksManager(self, resource_class=ImageTask,
response_key="", plural_response_key="tasks",
uri_base="tasks")
self._schema_manager = JSONSchemaManager(self, resource_class=None,
response_key="", plural_response_key="", uri_base="schemas")
def list(self, limit=None, marker=None, name=None, visibility=None,
member_status=None, owner=None, tag=None, status=None,
size_min=None, size_max=None, sort_key=None, sort_dir=None):
"""
Returns a list of resource objects. Pagination is supported through the
optional 'marker' and 'limit' parameters. Filtering the returned value
is possible by specifying values for any of the other parameters.
"""
return self._manager.list(limit=limit, marker=marker, name=name,
visibility=visibility, member_status=member_status,
owner=owner, tag=tag, status=status, size_min=size_min,
size_max=size_max, sort_key=sort_key, sort_dir=sort_dir)
def list_all(self, name=None, visibility=None, member_status=None,
owner=None, tag=None, status=None, size_min=None, size_max=None,
sort_key=None, sort_dir=None):
"""
Returns all of the images in one call, rather than in paginated batches.
The same filtering options available in list() apply here, with the
obvious exception of limit and marker.
"""
return self._manager.list_all(name=name, visibility=visibility,
member_status=member_status, owner=owner, tag=tag,
status=status, size_min=size_min, size_max=size_max,
sort_key=sort_key, sort_dir=sort_dir)
def update(self, img, value_dict):
"""
Accepts an image reference (object or ID) and dictionary of key/value
pairs, where the key is an attribute of the image, and the value is the
desired new value for that image.
"""
return self._manager.update(img, value_dict)
def create(self, name, img_format=None, data=None, container=None,
obj=None, metadata=None):
"""
Creates a new image with the specified name. The image data can either
be supplied directly in the 'data' parameter, or it can be an image
stored in the object storage service. In the case of the latter, you
can either supply the container and object names, or simply a
StorageObject reference.
"""
return self._manager.create(name, img_format, data=data,
container=container, obj=obj)
def change_image_name(self, img, newname):
"""
Image name can be changed via the update() method. This is simply a
convenience method.
"""
return self.update(img, {"name": newname})
@assure_image
def list_image_members(self, img):
"""
Returns a list of members (users) of the specified image.
"""
return img.list_members()
@assure_image
def get_image_member(self, img, member):
"""
Returns the ImageMember object representing the specified member for the
specified image.
"""
return img.get_member(member)
@assure_image
def add_image_member(self, img, project_id):
"""
Adds the project (tenant) represented by the project_id as a member of
the specified image.
"""
return img.add_member(project_id)
@assure_image
def delete_image_member(self, img, project_id):
"""
Removes the project (tenant) represented by the project_id as a member
of the specified image.
"""
return img.delete_member(project_id)
def update_image_member(self, img_id, status):
"""
Updates the image whose ID is given with the status specified. This
must be called by the user whose project_id is in the members for the
image; that is, the user with whom the image is being shared. If called
by the owner of the image, an `InvalidImageMember` exception will be
raised.
Valid values for 'status' include:
pending
accepted
rejected
Any other value will result in an `InvalidImageMemberStatus` exception
being raised.
"""
return self._manager.update_image_member(img_id, status)
@assure_image
def add_image_tag(self, img, tag):
"""
Adds the tag to the specified image.
"""
return img.add_tag(tag)
@assure_image
def delete_image_tag(self, img, tag):
"""
Deletes the tag from the specified image.
"""
return img.delete_tag(tag)
def list_tasks(self):
"""
Returns a list of all tasks.
"""
return self._tasks_manager.list()
def get_task(self, task):
"""
Returns the ImageTask object for the supplied ID.
"""
return self._tasks_manager.get(task)
def export_task(self, img, cont):
"""
Creates a task to export the specified image to the swift container
named in the 'cont' parameter. If the container does not exist, a
NoSuchContainer exception is raised.
The 'img' parameter can be either an Image object or the ID of an
image. If these do not correspond to a valid image, a NotFound
exception is raised.
"""
return self._tasks_manager.create("export", img=img, cont=cont)
def import_task(self, img, cont, img_format=None, img_name=None):
"""
Creates a task to import the specified image from the swift container
named in the 'cont' parameter. The new image will be named the same as
the object in the container unless you specify a value for the
'img_name' parameter.
By default it is assumed that the image is in 'vhd' format; if it is
another format, you must specify that in the 'img_format' parameter.
"""
return self._tasks_manager.create("import", img=img, cont=cont,
img_format=img_format, img_name=img_name)
def get_images_schema(self):
"""
Returns a json-schema document that represents an image members entity,
which is a container of image member entities.
"""
return self._schema_manager.images()
def get_image_schema(self):
"""
Returns a json-schema document that represents a single image entity.
"""
return self._schema_manager.image()
def get_image_members_schema(self):
"""
Returns a json-schema document that represents an image members entity
(a container of member entities).
"""
return self._schema_manager.image_members()
def get_image_member_schema(self):
"""
Returns a json-schema document that represents an image member entity.
(a container of member entities).
"""
return self._schema_manager.image_member()
def get_image_tasks_schema(self):
"""
Returns a json-schema document that represents a container of tasks
entities.
"""
return self._schema_manager.image_tasks()
def get_image_task_schema(self):
"""
Returns a json-schema document that represents an task entity.
"""
return self._schema_manager.image_task()
|
|
# Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.orchestration.portable.importer_node_handler."""
import os
import tensorflow as tf
from tfx import version as tfx_version
from tfx.dsl.compiler import constants
from tfx.orchestration import metadata
from tfx.orchestration.portable import importer_node_handler
from tfx.orchestration.portable import runtime_parameter_utils
from tfx.proto.orchestration import pipeline_pb2
from tfx.utils import test_case_utils
class ImporterNodeHandlerTest(test_case_utils.TfxTest):
def setUp(self):
super().setUp()
pipeline_root = os.path.join(
os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()),
self.id())
# Makes sure multiple connections within a test always connect to the same
# MLMD instance.
metadata_path = os.path.join(pipeline_root, 'metadata', 'metadata.db')
connection_config = metadata.sqlite_metadata_connection_config(
metadata_path)
connection_config.sqlite.SetInParent()
self._mlmd_connection = metadata.Metadata(
connection_config=connection_config)
self._testdata_dir = os.path.join(os.path.dirname(__file__), 'testdata')
# Sets up pipelines
pipeline = pipeline_pb2.Pipeline()
self.load_proto_from_text(
os.path.join(
os.path.dirname(__file__), 'testdata',
'pipeline_for_launcher_test.pbtxt'), pipeline)
self._pipeline_info = pipeline.pipeline_info
self._pipeline_runtime_spec = pipeline.runtime_spec
runtime_parameter_utils.substitute_runtime_parameter(
pipeline, {
constants.PIPELINE_RUN_ID_PARAMETER_NAME: 'my_pipeline_run',
})
# Extracts components
self._importer = pipeline.nodes[3].pipeline_node
# Fake tfx_version for tests.
tfx_version.__version__ = '0.123.4.dev'
def testLauncher_importer_mode_reimport_enabled(self):
handler = importer_node_handler.ImporterNodeHandler()
execution_info = handler.run(
mlmd_connection=self._mlmd_connection,
pipeline_node=self._importer,
pipeline_info=self._pipeline_info,
pipeline_runtime_spec=self._pipeline_runtime_spec)
with self._mlmd_connection as m:
[artifact] = m.store.get_artifacts_by_type('Schema')
self.assertProtoPartiallyEquals(
"""
id: 1
uri: "my_url"
custom_properties {
key: "int_custom_property"
value {
int_value: 123
}
}
custom_properties {
key: "str_custom_property"
value {
string_value: "abc"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "0.123.4.dev"
}
}
state: LIVE""",
artifact,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch'
])
[execution] = m.store.get_executions_by_id([execution_info.execution_id])
self.assertProtoPartiallyEquals(
"""
id: 1
last_known_state: COMPLETE
custom_properties {
key: "artifact_uri"
value {
string_value: "my_url"
}
}
custom_properties {
key: "reimport"
value {
int_value: 1
}
}
""",
execution,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch', 'name'
])
execution_info = handler.run(
mlmd_connection=self._mlmd_connection,
pipeline_node=self._importer,
pipeline_info=self._pipeline_info,
pipeline_runtime_spec=self._pipeline_runtime_spec)
with self._mlmd_connection as m:
new_artifact = m.store.get_artifacts_by_type('Schema')[1]
self.assertProtoPartiallyEquals(
"""
id: 2
uri: "my_url"
custom_properties {
key: "int_custom_property"
value {
int_value: 123
}
}
custom_properties {
key: "str_custom_property"
value {
string_value: "abc"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "0.123.4.dev"
}
}
state: LIVE""",
new_artifact,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch'
])
[execution] = m.store.get_executions_by_id([execution_info.execution_id])
self.assertProtoPartiallyEquals(
"""
id: 2
last_known_state: COMPLETE
custom_properties {
key: "artifact_uri"
value {
string_value: "my_url"
}
}
custom_properties {
key: "reimport"
value {
int_value: 1
}
}
""",
execution,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch', 'name'
])
def testLauncher_importer_mode_reimport_disabled(self):
self._importer.parameters.parameters['reimport'].field_value.int_value = 0
handler = importer_node_handler.ImporterNodeHandler()
execution_info = handler.run(
mlmd_connection=self._mlmd_connection,
pipeline_node=self._importer,
pipeline_info=self._pipeline_info,
pipeline_runtime_spec=self._pipeline_runtime_spec)
with self._mlmd_connection as m:
[artifact] = m.store.get_artifacts_by_type('Schema')
self.assertProtoPartiallyEquals(
"""
id: 1
uri: "my_url"
custom_properties {
key: "int_custom_property"
value {
int_value: 123
}
}
custom_properties {
key: "str_custom_property"
value {
string_value: "abc"
}
}
custom_properties {
key: "tfx_version"
value {
string_value: "0.123.4.dev"
}
}
state: LIVE""",
artifact,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch'
])
[execution] = m.store.get_executions_by_id([execution_info.execution_id])
self.assertProtoPartiallyEquals(
"""
id: 1
last_known_state: COMPLETE
custom_properties {
key: "artifact_uri"
value {
string_value: "my_url"
}
}
custom_properties {
key: "reimport"
value {
int_value: 0
}
}
""",
execution,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch', 'name'
])
# Run the 2nd execution. Since the reimport is disabled, no new schema
# is imported and the corresponding execution is published as CACHED.
execution_info = handler.run(
mlmd_connection=self._mlmd_connection,
pipeline_node=self._importer,
pipeline_info=self._pipeline_info,
pipeline_runtime_spec=self._pipeline_runtime_spec)
with self._mlmd_connection as m:
# No new Schema is produced.
self.assertLen(m.store.get_artifacts_by_type('Schema'), 1)
[execution] = m.store.get_executions_by_id([execution_info.execution_id])
self.assertProtoPartiallyEquals(
"""
id: 2
last_known_state: CACHED
custom_properties {
key: "artifact_uri"
value {
string_value: "my_url"
}
}
custom_properties {
key: "reimport"
value {
int_value: 0
}
}
""",
execution,
ignored_fields=[
'type_id', 'create_time_since_epoch',
'last_update_time_since_epoch', 'name'
])
if __name__ == '__main__':
tf.test.main()
|
|
#!/usr/bin/env python2
#
# ESP8266 luatool
# Author e-mail: 4ref0nt@gmail.com
# Site: http://esp8266.ru
# Contributions from: https://github.com/sej7278
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51 Franklin
# Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sys
import serial
from time import sleep
import socket
import argparse
from os.path import basename
version = "0.6.4"
class TransportError(Exception):
"""Custom exception to represent errors with a transport
"""
def __init__(self, message):
self.message = message
class AbstractTransport:
def __init__(self):
raise NotImplementedError('abstract transports cannot be instantiated.')
def close(self):
raise NotImplementedError('Function not implemented')
def read(self, length):
raise NotImplementedError('Function not implemented')
def writeln(self, data, check=1):
raise NotImplementedError('Function not implemented')
def writer(self, data):
self.writeln("file.writeline([==[" + data + "]==])\r")
def performcheck(self, expected):
line = ''
char = ''
i = -1
while char != chr(62): # '>'
char = self.read(1)
if char == '':
raise Exception('No proper answer from MCU')
if char == chr(13) or char == chr(10): # LF or CR
if line != '':
line = line.strip()
if line+'\r' == expected:
sys.stdout.write(" -> ok")
else:
if line[:4] == "lua:":
sys.stdout.write("\r\n\r\nLua ERROR: %s" % line)
raise Exception('ERROR from Lua interpreter\r\n\r\n')
else:
expected = expected.split("\r")[0]
sys.stdout.write("\r\n\r\nERROR")
sys.stdout.write("\r\n send string : '%s'" % expected)
sys.stdout.write("\r\n expected echo : '%s'" % expected)
sys.stdout.write("\r\n but got answer : '%s'" % line)
sys.stdout.write("\r\n\r\n")
raise Exception('Error sending data to MCU\r\n\r\n')
line = ''
else:
line += char
if char == chr(62) and expected[i] == char:
char = ''
i += 1
class SerialTransport(AbstractTransport):
def __init__(self, port, baud, delay):
self.port = port
self.baud = baud
self.serial = None
self.delay = delay
try:
self.serial = serial.Serial(port, baud)
except serial.SerialException as e:
raise TransportError(e.strerror)
self.serial.timeout = 3
self.serial.interCharTimeout = 3
def writeln(self, data, check=1):
if self.serial.inWaiting() > 0:
self.serial.flushInput()
if len(data) > 0:
sys.stdout.write("\r\n->")
sys.stdout.write(data.split("\r")[0])
self.serial.write(data)
sleep(self.delay)
if check > 0:
self.performcheck(data)
else:
sys.stdout.write(" -> send without check")
def read(self, length):
return self.serial.read(length)
def close(self):
self.serial.flush()
self.serial.close()
class TcpSocketTransport(AbstractTransport):
def __init__(self, host, port):
self.host = host
self.port = port
self.socket = None
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error as e:
raise TransportError(e.strerror)
try:
self.socket.connect((host, port))
except socket.error as e:
raise TransportError(e.strerror)
# read intro from telnet server (see telnet_srv.lua)
self.socket.recv(50)
def writeln(self, data, check=1):
if len(data) > 0:
sys.stdout.write("\r\n->")
sys.stdout.write(data.split("\r")[0])
self.socket.sendall(data)
if check > 0:
self.performcheck(data)
else:
sys.stdout.write(" -> send without check")
def read(self, length):
return self.socket.recv(length)
def close(self):
self.socket.close()
def decidetransport(cliargs):
if cliargs.ip:
data = cliargs.ip.split(':')
host = data[0]
if len(data) == 2:
port = int(data[1])
else:
port = 23
return TcpSocketTransport(host, port)
else:
return SerialTransport(cliargs.port, cliargs.baud, cliargs.delay)
if __name__ == '__main__':
# parse arguments or use defaults
parser = argparse.ArgumentParser(description='ESP8266 Lua script uploader.')
parser.add_argument('-p', '--port', default='/dev/ttyUSB0', help='Device name, default /dev/ttyUSB0')
parser.add_argument('-b', '--baud', default=9600, help='Baudrate, default 9600')
parser.add_argument('-f', '--src', default='main.lua', help='Source file on computer, default main.lua')
parser.add_argument('-t', '--dest', default=None, help='Destination file on MCU, default to source file name')
parser.add_argument('-c', '--compile', action='store_true', help='Compile lua to lc after upload')
parser.add_argument('-r', '--restart', action='store_true', help='Restart MCU after upload')
parser.add_argument('-d', '--dofile', action='store_true', help='Run the Lua script after upload')
parser.add_argument('-v', '--verbose', action='store_true', help="Show progress messages.")
parser.add_argument('-a', '--append', action='store_true', help='Append source file to destination file.')
parser.add_argument('-l', '--list', action='store_true', help='List files on device')
parser.add_argument('-w', '--wipe', action='store_true', help='Delete all lua/lc files on device.')
parser.add_argument('-i', '--id', action='store_true', help='Query the modules chip id.')
parser.add_argument('-e', '--echo', action='store_true', help='Echo output of MCU until script is terminated.')
parser.add_argument('--delay', default=0.01, help='Delay in seconds between each write.', type=float)
parser.add_argument('--delete', default=None, help='Delete a lua/lc file from device.')
parser.add_argument('--ip', default=None, help='Connect to a telnet server on the device (--ip IP[:port])')
args = parser.parse_args()
transport = decidetransport(args)
if args.list:
transport.writeln("local l = file.list();for k,v in pairs(l) do print('name:'..k..', size:'..v)end\r", 0)
while True:
char = transport.read(1)
if char == '' or char == chr(62):
break
sys.stdout.write(char)
sys.exit(0)
if args.id:
transport.writeln("=node.chipid()\r", 0)
id=""
while True:
char = transport.read(1)
if char == '' or char == chr(62):
break
if char.isdigit():
id += char
print("\n"+id)
sys.exit(0)
if args.wipe:
transport.writeln("local l = file.list();for k,v in pairs(l) do print(k)end\r", 0)
file_list = []
fn = ""
while True:
char = transport.read(1)
if char == '' or char == chr(62):
break
if char not in ['\r', '\n']:
fn += char
else:
if fn:
file_list.append(fn.strip())
fn = ''
for fn in file_list[1:]: # first line is the list command sent to device
if args.verbose:
sys.stderr.write("Delete file {} from device.\r\n".format(fn))
transport.writeln("file.remove(\"" + fn + "\")\r")
sys.exit(0)
if args.delete:
transport.writeln("file.remove(\"" + args.delete + "\")\r")
sys.exit(0)
if args.dest is None:
args.dest = basename(args.src)
# open source file for reading
try:
f = open(args.src, "rt")
except:
sys.stderr.write("Could not open input file \"%s\"\n" % args.src)
sys.exit(1)
# Verify the selected file will not exceed the size of the serial buffer.
# The size of the buffer is 256. This script does not accept files with
# lines longer than 230 characters to have some room for command overhead.
for ln in f:
if len(ln) > 230:
sys.stderr.write("File \"%s\" contains a line with more than 240 "
"characters. This exceeds the size of the serial buffer.\n"
% args.src)
f.close()
sys.exit(1)
# Go back to the beginning of the file after verifying it has the correct
# line length
f.seek(0)
# set serial timeout
if args.verbose:
sys.stderr.write("Upload starting\r\n")
# remove existing file on device
if args.append==False:
if args.verbose:
sys.stderr.write("Stage 1. Deleting old file from flash memory")
transport.writeln("file.open(\"" + args.dest + "\", \"w\")\r")
transport.writeln("file.close()\r")
transport.writeln("file.remove(\"" + args.dest + "\")\r")
else:
if args.verbose:
sys.stderr.write("[SKIPPED] Stage 1. Deleting old file from flash memory [SKIPPED]")
# read source file line by line and write to device
if args.verbose:
sys.stderr.write("\r\nStage 2. Creating file in flash memory and write first line")
if args.append:
transport.writeln("file.open(\"" + args.dest + "\", \"a+\")\r")
else:
transport.writeln("file.open(\"" + args.dest + "\", \"w+\")\r")
line = f.readline()
if args.verbose:
sys.stderr.write("\r\nStage 3. Start writing data to flash memory...")
while line != '':
transport.writer(line.strip())
line = f.readline()
# close both files
f.close()
if args.verbose:
sys.stderr.write("\r\nStage 4. Flush data and closing file")
transport.writeln("file.flush()\r")
transport.writeln("file.close()\r")
# compile?
if args.compile:
if args.verbose:
sys.stderr.write("\r\nStage 5. Compiling")
transport.writeln("node.compile(\"" + args.dest + "\")\r")
transport.writeln("file.remove(\"" + args.dest + "\")\r")
# restart or dofile
if args.restart:
transport.writeln("node.restart()\r")
if args.dofile: # never exec if restart=1
transport.writeln("dofile(\"" + args.dest + "\")\r", 0)
if args.echo:
if args.verbose:
sys.stderr.write("\r\nEchoing MCU output, press Ctrl-C to exit")
while True:
sys.stdout.write(transport.read(1))
# close serial port
transport.close()
# flush screen
sys.stdout.flush()
sys.stderr.flush()
sys.stderr.write("\r\n--->>> All done <<<---\r\n")
|
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Note.title'
db.alter_column(u'catalog_note', 'title', self.gf('django.db.models.fields.CharField')(max_length=140, null=True))
def backwards(self, orm):
# Changing field 'Note.title'
db.alter_column(u'catalog_note', 'title', self.gf('django.db.models.fields.CharField')(default='', max_length=140))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'catalog.cfistoreitem': {
'Meta': {'object_name': 'CfiStoreItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'item': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['catalog.Product']", 'unique': 'True'}),
'likers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'cfi_store_item_likes'", 'symmetrical': 'False', 'through': "orm['catalog.LikeCfiStoreItem']", 'to': u"orm['django_facebook.FacebookCustomUser']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.comment': {
'Meta': {'object_name': 'Comment'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.documentation': {
'Meta': {'object_name': 'Documentation'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.emailcollect': {
'Meta': {'object_name': 'EmailCollect'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '30'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'catalog.image': {
'Meta': {'object_name': 'Image'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'large_url': ('django.db.models.fields.URLField', [], {'max_length': '1000'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'small_url': ('django.db.models.fields.URLField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'images'", 'null': 'True', 'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.like': {
'Meta': {'object_name': 'Like'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.likecfistoreitem': {
'Meta': {'unique_together': "(('user', 'cfi_store_item'),)", 'object_name': 'LikeCfiStoreItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'cfi_store_item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.CfiStoreItem']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.likemakey': {
'Meta': {'unique_together': "(('user', 'makey'),)", 'object_name': 'LikeMakey'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.likeproduct': {
'Meta': {'unique_together': "(('user', 'product'),)", 'object_name': 'LikeProduct'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.likeproductdescription': {
'Meta': {'unique_together': "(('user', 'product_description'),)", 'object_name': 'LikeProductDescription'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product_description': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductDescription']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.likeproductimage': {
'Meta': {'unique_together': "(('user', 'image'),)", 'object_name': 'LikeProductImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductImage']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.likeproducttutorial': {
'Meta': {'unique_together': "(('user', 'tutorial', 'product'),)", 'object_name': 'LikeProductTutorial'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.likeshop': {
'Meta': {'unique_together': "(('user', 'shop'),)", 'object_name': 'LikeShop'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.list': {
'Meta': {'object_name': 'List'},
'access': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'access'", 'symmetrical': 'False', 'to': u"orm['django_facebook.FacebookCustomUser']"}),
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_private': ('django.db.models.fields.BooleanField', [], {}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.ListItem']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'owner'", 'to': u"orm['django_facebook.FacebookCustomUser']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.listgroup': {
'Meta': {'object_name': 'ListGroup'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'lists': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalog.List']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.listitem': {
'Meta': {'object_name': 'ListItem'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'createdby': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.location': {
'Meta': {'object_name': 'Location'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.logidenticalproduct': {
'Meta': {'object_name': 'LogIdenticalProduct'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product1'", 'to': "orm['catalog.Product']"}),
'product2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product2'", 'to': "orm['catalog.Product']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.makey': {
'Meta': {'object_name': 'Makey'},
'about': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'collaborators': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'collaborators'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['django_facebook.FacebookCustomUser']"}),
'comments': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeycomments'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Comment']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'documentations': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeydocumentations'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Documentation']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'new_users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeys'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.NewUser']"}),
'notes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'makeynotes'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Note']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})
},
'catalog.makeyimage': {
'Meta': {'object_name': 'MakeyImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey_id': ('django.db.models.fields.IntegerField', [], {}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.newuser': {
'Meta': {'object_name': 'NewUser'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.note': {
'Meta': {'object_name': 'Note'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'body': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.product': {
'Meta': {'object_name': 'Product'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identicalto': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']", 'null': 'True', 'blank': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likers': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'product_likes'", 'symmetrical': 'False', 'through': "orm['catalog.LikeProduct']", 'to': u"orm['django_facebook.FacebookCustomUser']"}),
'makeys': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'partsused'", 'blank': 'True', 'to': "orm['catalog.Makey']"}),
'makeys_as_tools': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'tools_used'", 'blank': 'True', 'to': "orm['catalog.Makey']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'sku': ('django.db.models.fields.IntegerField', [], {}),
'tutorials': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'products'", 'blank': 'True', 'to': "orm['catalog.Tutorial']"})
},
'catalog.productdescription': {
'Meta': {'object_name': 'ProductDescription'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productdescriptions'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'blank': 'True'}),
'user_or_shop': ('django.db.models.fields.BooleanField', [], {})
},
'catalog.productimage': {
'Meta': {'object_name': 'ProductImage'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productimages'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.productreview': {
'Meta': {'object_name': 'ProductReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'product_reviews'", 'to': "orm['catalog.Product']"}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.productshopurl': {
'Meta': {'object_name': 'ProductShopUrl'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'productshopurls'", 'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.searchlog': {
'Meta': {'object_name': 'SearchLog'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'term': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'})
},
'catalog.shop': {
'Meta': {'object_name': 'Shop'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'shopimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shop_likes'", 'symmetrical': 'False', 'through': "orm['catalog.LikeShop']", 'to': u"orm['django_facebook.FacebookCustomUser']"}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.shopreview': {
'Meta': {'object_name': 'ShopReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rating': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'review': ('django.db.models.fields.CharField', [], {'max_length': '100000'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shop_reviews'", 'to': "orm['catalog.Shop']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.toindexstore': {
'Meta': {'object_name': 'ToIndexStore'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'catalog.topmakeys': {
'Meta': {'object_name': 'TopMakeys'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.topproducts': {
'Meta': {'object_name': 'TopProducts'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.topshops': {
'Meta': {'object_name': 'TopShops'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Shop']"})
},
'catalog.toptutorials': {
'Meta': {'object_name': 'TopTutorials'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"})
},
'catalog.topusers': {
'Meta': {'object_name': 'TopUsers'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.tutorial': {
'Meta': {'object_name': 'Tutorial'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'images': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'tutorialimages'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalog.Image']"}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']", 'null': 'True', 'blank': 'True'}),
'votes': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.userinteraction': {
'Meta': {'object_name': 'UserInteraction'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
'event': ('django.db.models.fields.IntegerField', [], {}),
'event_id': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"})
},
'catalog.votemakey': {
'Meta': {'unique_together': "(('user', 'makey'),)", 'object_name': 'VoteMakey'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'makey': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Makey']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalog.voteproductreview': {
'Meta': {'unique_together': "(('user', 'review'),)", 'object_name': 'VoteProductReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ProductReview']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalog.voteshopreview': {
'Meta': {'unique_together': "(('user', 'review'),)", 'object_name': 'VoteShopReview'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.ShopReview']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalog.votetutorial': {
'Meta': {'unique_together': "(('user', 'tutorial'),)", 'object_name': 'VoteTutorial'},
'added_time': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tutorial': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Tutorial']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_facebook.FacebookCustomUser']"}),
'vote': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'django_facebook.facebookcustomuser': {
'Meta': {'object_name': 'FacebookCustomUser'},
'about_me': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'access_token': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'blog_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'facebook_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'facebook_open_graph': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'facebook_profile_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'new_token_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'raw_data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['catalog']
|
|
"""Open ports in your router for Home Assistant and provide statistics."""
from __future__ import annotations
import asyncio
from collections.abc import Mapping
from datetime import timedelta
from ipaddress import ip_address
from typing import Any
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import ssdp
from homeassistant.components.network import async_get_source_ip
from homeassistant.components.network.const import PUBLIC_TARGET_IP
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from .const import (
CONF_LOCAL_IP,
CONFIG_ENTRY_HOSTNAME,
CONFIG_ENTRY_SCAN_INTERVAL,
CONFIG_ENTRY_ST,
CONFIG_ENTRY_UDN,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
DOMAIN_CONFIG,
DOMAIN_DEVICES,
DOMAIN_LOCAL_IP,
LOGGER,
)
from .device import Device
NOTIFICATION_ID = "upnp_notification"
NOTIFICATION_TITLE = "UPnP/IGD Setup"
PLATFORMS = ["binary_sensor", "sensor"]
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_LOCAL_IP): vol.All(ip_address, cv.string),
},
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up UPnP component."""
LOGGER.debug("async_setup, config: %s", config)
conf_default = CONFIG_SCHEMA({DOMAIN: {}})[DOMAIN]
conf = config.get(DOMAIN, conf_default)
local_ip = await async_get_source_ip(hass, PUBLIC_TARGET_IP)
hass.data[DOMAIN] = {
DOMAIN_CONFIG: conf,
DOMAIN_DEVICES: {},
DOMAIN_LOCAL_IP: conf.get(CONF_LOCAL_IP, local_ip),
}
# Only start if set up via configuration.yaml.
if DOMAIN in config:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up UPnP/IGD device from a config entry."""
LOGGER.debug("Setting up config entry: %s", entry.unique_id)
udn = entry.data[CONFIG_ENTRY_UDN]
st = entry.data[CONFIG_ENTRY_ST] # pylint: disable=invalid-name
usn = f"{udn}::{st}"
# Register device discovered-callback.
device_discovered_event = asyncio.Event()
discovery_info: Mapping[str, Any] | None = None
@callback
def device_discovered(info: Mapping[str, Any]) -> None:
nonlocal discovery_info
LOGGER.debug(
"Device discovered: %s, at: %s", usn, info[ssdp.ATTR_SSDP_LOCATION]
)
discovery_info = info
device_discovered_event.set()
cancel_discovered_callback = ssdp.async_register_callback(
hass,
device_discovered,
{
"usn": usn,
},
)
try:
await asyncio.wait_for(device_discovered_event.wait(), timeout=10)
except asyncio.TimeoutError as err:
LOGGER.debug("Device not discovered: %s", usn)
raise ConfigEntryNotReady from err
finally:
cancel_discovered_callback()
# Create device.
location = discovery_info[ # pylint: disable=unsubscriptable-object
ssdp.ATTR_SSDP_LOCATION
]
device = await Device.async_create_device(hass, location)
# Ensure entry has a unique_id.
if not entry.unique_id:
LOGGER.debug(
"Setting unique_id: %s, for config_entry: %s",
device.unique_id,
entry,
)
hass.config_entries.async_update_entry(
entry=entry,
unique_id=device.unique_id,
)
# Ensure entry has a hostname, for older entries.
if (
CONFIG_ENTRY_HOSTNAME not in entry.data
or entry.data[CONFIG_ENTRY_HOSTNAME] != device.hostname
):
hass.config_entries.async_update_entry(
entry=entry,
data={CONFIG_ENTRY_HOSTNAME: device.hostname, **entry.data},
)
# Create device registry entry.
device_registry = await dr.async_get_registry(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
connections={(dr.CONNECTION_UPNP, device.udn)},
identifiers={(DOMAIN, device.udn)},
name=device.name,
manufacturer=device.manufacturer,
model=device.model_name,
)
update_interval_sec = entry.options.get(
CONFIG_ENTRY_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
)
update_interval = timedelta(seconds=update_interval_sec)
LOGGER.debug("update_interval: %s", update_interval)
coordinator = UpnpDataUpdateCoordinator(
hass,
device=device,
update_interval=update_interval,
)
# Save coordinator.
hass.data[DOMAIN][entry.entry_id] = coordinator
await coordinator.async_config_entry_first_refresh()
# Create sensors.
LOGGER.debug("Enabling sensors")
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
# Start device updater.
await device.async_start()
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a UPnP/IGD device from a config entry."""
LOGGER.debug("Unloading config entry: %s", config_entry.unique_id)
if coordinator := hass.data[DOMAIN].pop(config_entry.entry_id, None):
await coordinator.device.async_stop()
LOGGER.debug("Deleting sensors")
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
class UpnpDataUpdateCoordinator(DataUpdateCoordinator):
"""Define an object to update data from UPNP device."""
def __init__(
self, hass: HomeAssistant, device: Device, update_interval: timedelta
) -> None:
"""Initialize."""
self.device = device
super().__init__(
hass, LOGGER, name=device.name, update_interval=update_interval
)
async def _async_update_data(self) -> Mapping[str, Any]:
"""Update data."""
update_values = await asyncio.gather(
self.device.async_get_traffic_data(),
self.device.async_get_status(),
)
data = dict(update_values[0])
data.update(update_values[1])
return data
class UpnpEntity(CoordinatorEntity):
"""Base class for UPnP/IGD entities."""
coordinator: UpnpDataUpdateCoordinator
def __init__(self, coordinator: UpnpDataUpdateCoordinator) -> None:
"""Initialize the base entities."""
super().__init__(coordinator)
self._device = coordinator.device
self._attr_device_info = {
"connections": {(dr.CONNECTION_UPNP, coordinator.device.udn)},
"name": coordinator.device.name,
"manufacturer": coordinator.device.manufacturer,
"model": coordinator.device.model_name,
}
|
|
# monitoring v3 for topology 2
# Jack Zhao
# s.zhao.j@gmail.com
from __future__ import division
from operator import attrgetter
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import MAIN_DISPATCHER, DEAD_DISPATCHER
from ryu.controller.handler import CONFIG_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.lib import hub
from ryu.lib.packet import packet
import os.path
import os
# from my_switch_v11_topo_2 import SimpleSwitch13
OFP_SWITCHES_FLOW_STATS = \
'./network-data2/ofp_switches_{0}_flow_stats.db'
OFP_SWITCHES_FLOW_STATS_PREVIOUS = \
'./network-data2/ofp_switches_{0}_flow_stats_prev.db'
OFP_SWITCHES_PORT_STATS = \
'./network-data2/ofp_switches_{0}_port_stats.db'
OFP_SWITCHES_PORT_STATS_PREVIOUS = \
'./network-data2/ofp_switches_{0}_port_stats_prev.db'
OFP_SWITCHES_LIST_PREVIOUS = \
'./network-data2/ofp_switches_list_prev.db'
OFP_SWITCHES_LIST = \
'./network-data2/ofp_switches_list.db'
ICMP_PRIORITY = 3
IPERF_PRIORITY = 4
PRIORITY_LIST = [ICMP_PRIORITY, IPERF_PRIORITY]
STATS_UPDATE_TIMER = 3
class MySimpleMonitor(app_manager.RyuApp):
def __init__(self, *args, **kwargs):
super(MySimpleMonitor, self).__init__(*args, **kwargs)
self.datapaths = {}
self.monitor_thread = hub.spawn(self._monitor)
self.port_stats = {}
self.port_speed = {}
self.flow_stats = {}
self.flow_speed = {}
self.hostname_list = {}
self.sleep = 10
# length of saved dictionary value
self.state_len = 3
@set_ev_cls(ofp_event.EventOFPStateChange,
[MAIN_DISPATCHER, DEAD_DISPATCHER])
def _state_change_handler(self, ev):
datapath = ev.datapath
if ev.state == MAIN_DISPATCHER:
if not datapath.id in self.datapaths:
self.logger.debug('register datapath: %016x', datapath.id)
self.datapaths[datapath.id] = datapath
elif ev.state == DEAD_DISPATCHER:
if datapath.id in self.datapaths:
self.logger.debug('unregister datapath: %016x', datapath.id)
del self.datapaths[datapath.id]
def _monitor(self):
while True:
for dp in self.datapaths.values():
self._request_stats(dp)
hub.sleep(STATS_UPDATE_TIMER)
def _hostname_Check(self, datapath):
# Given decimal datapath ID, return hostname
if os.path.exists(os.path.abspath(OFP_SWITCHES_LIST_PREVIOUS)):
f = os.path.abspath(OFP_SWITCHES_LIST_PREVIOUS)
else:
f = os.path.abspath(OFP_SWITCHES_LIST)
with open(f, 'r') as iff:
for line in iff:
hostname, dpid = line.split()
self.hostname_list[int(dpid, 16)] = hostname
# print self.hostname_list
# NEED add some datapath check later
if datapath not in self.hostname_list.keys():
return datapath
else:
return self.hostname_list[datapath]
def _request_stats(self, datapath):
self.logger.debug('send stats request: %016x', datapath.id)
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
req = parser.OFPFlowStatsRequest(datapath)
datapath.send_msg(req)
req = parser.OFPPortStatsRequest(datapath, 0, ofproto.OFPP_ANY)
datapath.send_msg(req)
def _save_stats(self, dist, key, value, length):
"""
save 3 most recent flow stats/speed for each key: (in_port, dst_mac, output_port)
each value is a list containing 3 most recently flow stats [(x.x.x.x), (x.x.x.x), (x.x.x.x)]
"""
if key not in dist:
dist[key] = []
dist[key].append(value)
if len(dist[key]) > length:
dist[key].pop(0)
def _get_speed(self, now, pre, period):
"""
return flow speed: bytes/sec
abs((most recent flow's byte_count) - (previous flow's byte_count)) / period
"""
if period == 0:
# if flow stat does not change, return
return
return (now - pre) / period
def _get_time(self, sec, nsec):
"""
return combined secondes and nsec
ex: sec = 2, nsec = 12000000
return 2.12
"""
return sec + nsec / (10**9)
def _get_period(self, n_sec, n_nsec, p_sec, p_nsec):
"""
return the different difference between two adjancenty flows
"""
return self._get_time(n_sec, n_nsec) - self._get_time(p_sec, p_nsec)
@set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER)
def _flow_stats_reply_handler(self, ev):
self.logger.debug("simple_monitor.flow_stats:")
body = ev.msg.body
dpid = ev.msg.datapath.id
self.logger.debug("Switch Flow States Msg reply Details")
# for entry in ev.msg.body:
# if type(entry) == {}:
# for key, value in entry.items():
# self.logg.debug(("%s: %s") % (key, value))
# self.logger.debug(entry)
switch_name = self._hostname_Check(ev.msg.datapath.id)
with open(OFP_SWITCHES_FLOW_STATS.format(switch_name), 'w') as iff:
# print "writing to %s" % (os.path.abspath(OFP_SWITCHES_FLOW_STATS.format(switch_name)))
self.logger.debug("\n> Flow Stats:")
self.logger.debug('datapath '
'hostname '
'in-port duration_sec duration_nsec '
' eth-dst out-port packets bytes speed(bits/sec)')
self.logger.debug('---------------- '
'---------- '
'-------- ---------------- -------------- '
'------------------- -------- -------- -------- --------------')
iff.write('datapath '
'hostname '
'in-port duration_sec duration_nsec '
' eth-dst out-port packets bytes speed(bits/sec) priority\n')
iff.write('---------------- '
'---------- '
'-------- ------------------ -------------- '
'----------------------- -------- -------- -------- -------------- ----\n')
for stat in sorted([flow for flow in body if flow.priority == ICMP_PRIORITY or flow.priority == IPERF_PRIORITY],
key=lambda flow: (flow.match['in_port'],
flow.match['eth_dst'])):
# update flow stats
# key (dpid, in_port, dst_mac, output_port)
# key is a tuple
key = (dpid,
stat.match['in_port'], stat.match['eth_dst'],
stat.instructions[0].actions[0].port,)
# value (packet_count, byte_count, duration_sec, duration_msec)
# value is a tuple, It will be appended to self.flow_stats[key]
value = (
stat.packet_count, stat.byte_count,
stat.duration_sec, stat.duration_nsec)
self._save_stats(self.flow_stats, key, value, self.state_len)
# Update this flow's speed for every 10s
pre = 0
period = self.sleep
tmp = self.flow_stats[key]
if len(tmp) > 1:
# get previous flow's byte_count
pre = tmp[-2][1]
"""
# tmp[-1][2]: current flow's duration seconds
# tmp[-1][3]: current flow's duration nsec
# tmp[-2][2]: previous flow's duration seconds
# tmp[-2][3]: previous flow's duration nseconds
# period: get the time difference between two adjanct flows stats
# if flow stats does change every 10s, period = 0
"""
period = self._get_period(
tmp[-1][2], tmp[-1][3],
tmp[-2][2], tmp[-2][3])
self.logger.debug("%s %s %s %s" %
(tmp[-1][2], tmp[-1][3], tmp[-2][2], tmp[-2][3]))
# key[-1][1]: current flow's byte_count
# speed: eacho flow's current speed (every 10s)
speed = self._get_speed(
self.flow_stats[key][-1][1], pre, period)
self.logger.debug("pre_byte=%s current_byte=%s period=%s speed=%s" % (pre, self.flow_stats[key][-1][1], period, speed))
if speed == None:
self.logger.debug("Speed == None ----------------------------------------------------------")
speed = 0.0
self._save_stats(self.flow_speed, key, speed, self.state_len)
iff.write('%16d %8s %8x %16d %16d %17s %8x %8d %8d %16d %10d' %
(ev.msg.datapath.id,
# str(ev.msg.datapath.id),
self._hostname_Check(ev.msg.datapath.id),
stat.match['in_port'], stat.duration_sec,
stat.duration_nsec, stat.match['eth_dst'],
stat.instructions[0].actions[0].port,
stat.packet_count, stat.byte_count, int(speed) * 8, stat.priority))
iff.write("\n")
self.logger.debug('%16d %8s %8x %16d %16d %17s %8x %8d %8d %16d',
ev.msg.datapath.id,
# str(ev.msg.datapath.id),
self._hostname_Check(ev.msg.datapath.id),
stat.match['in_port'], stat.duration_sec,
stat.duration_nsec, stat.match['eth_dst'],
stat.instructions[0].actions[0].port,
stat.packet_count, stat.byte_count,
int(speed) * 8)
# print each key, value for verification
# self.logger.debug("flow_sttas:")
# for key, val in self.flow_stats.items():
# self.logger.debug(" key=%s value=%s" % (key, val))
# self.logger.debug("flow_speed:")
# for key, val in self.flow_speed.items():
# self.logger.debug(" key=%s value=%s" % (key, val))
@set_ev_cls(ofp_event.EventOFPPortStatsReply, MAIN_DISPATCHER)
def _port_stats_reply_handler(self, ev):
self.logger.debug("simple_monitor.port_stats:")
body = ev.msg.body
dpid = ev.msg.datapath.id
self.logger.debug("Switch Port States Msg reply Details")
# for entry in ev.msg.body:
# if type(entry) == {}:
# for key, value in entry.items():
# self.logg.debug(("%s: %s") % (key, value))
# self.logger.debug(entry)
switch_name = self._hostname_Check(ev.msg.datapath.id)
with open(OFP_SWITCHES_PORT_STATS.format(switch_name), 'w') as iff:
# print "writing to %s" % (os.path.abspath(OFP_SWITCHES_PORT_STATS.format(switch_name)))
self.logger.debug("\n> Port Stats:")
self.logger.debug('datapath '
'hostname '
'port duration_sec duration_nsec'
' rx-pkts rx-bytes rx-error '
' tx-pkts tx-bytes tx-error speed(bits/sec)')
self.logger.debug('---------------- '
'-------------- '
'----- ------------- ---------------- '
'-------- -------- -------- '
'-------- -------- -------- --------------')
iff.write('datapath '
'hostname '
'port duration_sec duration_nsec'
' rx-pkts rx-bytes rx-error '
' tx-pkts tx-bytes tx-error speed(bits/sec)\n')
iff.write('---------------- '
'-------------- '
'----- ------------- ---------------- '
'-------- -------- -------- '
'-------- -------- -------- --------------\n')
for stat in sorted(body, key=attrgetter('port_no')):
# key: (datapath.di, switch_port_number)
key = (dpid, stat.port_no)
# value: (stat.rx_packets, stat.rx_bytes, stat.rx_errors, stat.duration_sec, stat.duration_nsec)
# value is a tuple, It will be appended to self.port_stats[key]
value = (
stat.rx_packets, stat.rx_bytes, stat.rx_errors,
stat.duration_sec, stat.duration_nsec)
self._save_stats(self.port_stats, key, value, self.state_len)
# Get port speed.
pre = 0
period = self.sleep # update every self.sleep timer
# tmp: the port stats entry for this key (datapath.di, switch_port_number)
tmp = self.port_stats[key]
if len(tmp) > 1:
# pre: the previous received_bytes (strx_bytes)
pre = tmp[-2][1]
# tmp[-1][3]: current port's duration seconds
# tmp[-1][4]: current port's duration nsec
# tmp[-2][3]: previous port's duration seconds
# tmp[-2][4]: previous port's duration nseconds
# period: get the time difference between two adjanct ports stats
# if port stats does change every 10s, period = 0
period = self._get_period(
tmp[-1][3], tmp[-1][4],
tmp[-2][3], tmp[-2][4])
speed = self._get_speed(
self.port_stats[key][-1][1], pre, period)
if speed == None:
speed = 0.0
self._save_stats(self.port_speed, key, speed, self.state_len)
# print '\n Speed: %s bytes\/s\n' % (self.port_speed)
self.logger.debug('%016x %8s %8x %16d %16d %8d %8d %8d %8d %8d %8d %16d',
ev.msg.datapath.id,
self._hostname_Check(ev.msg.datapath.id),
stat.port_no, stat.duration_sec, stat.duration_nsec,
stat.rx_packets, stat.rx_bytes,
stat.rx_errors, stat.tx_packets,
stat.tx_bytes, stat.tx_errors, int(speed) * 8)
iff.write('%016x %8s %8x %16d %16d %8d %8d %8d %8d %8d %8d %16d' %
(ev.msg.datapath.id,
self._hostname_Check(ev.msg.datapath.id),
stat.port_no, stat.duration_sec, stat.duration_nsec,
stat.rx_packets, stat.rx_bytes, stat.rx_errors,
stat.tx_packets, stat.tx_bytes, stat.tx_errors, int(speed) * 8))
iff.write("\n")
# print each key, value for verification
# self.logger.debug("port_sttas:")
# for key, val in self.port_stats.items():
# self.logger.debug(" key=%s value=%s" % (key, val))
# self.logger.debug("port_speed:")
# for key, val in self.port_speed.items():
# self.logger.debug(" key=%s value=%s" % (key, val))
|
|
from crispy_forms.utils import TEMPLATE_PACK
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import PermissionDenied
from django.db import models
from django.db.models.query import QuerySet
from django.forms.models import model_to_dict
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.template.response import TemplateResponse
from django.utils import six
from django.utils.encoding import force_text, smart_text
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from xadmin.layout import Field, render_field
from xadmin.plugins.inline import Inline
from xadmin.plugins.actions import BaseActionView
from xadmin.plugins.inline import InlineModelAdmin
from xadmin.sites import site
from xadmin.util import unquote, quote, model_format_dict, is_related_field2
from xadmin.views import BaseAdminPlugin, ModelAdminView, CreateAdminView, UpdateAdminView, DetailAdminView, ModelFormAdminView, DeleteAdminView, ListAdminView
from xadmin.views.base import csrf_protect_m, filter_hook
from xadmin.views.detail import DetailAdminUtil
from reversion.models import Revision, Version
from reversion.revisions import is_active, register, is_registered, set_comment, create_revision, set_user
from contextlib import contextmanager
from functools import partial
def _autoregister(admin, model, follow=None):
"""Registers a model with reversion, if required."""
if model._meta.proxy:
raise RegistrationError("Proxy models cannot be used with django-reversion, register the parent class instead")
if not is_registered(model):
follow = follow or []
for parent_cls, field in model._meta.parents.items():
follow.append(field.name)
_autoregister(admin, parent_cls)
register(model, follow=follow, format=admin.reversion_format)
def _register_model(admin, model):
if not hasattr(admin, 'reversion_format'):
admin.reversion_format = 'json'
if not is_registered(model):
inline_fields = []
for inline in getattr(admin, 'inlines', []):
inline_model = inline.model
if getattr(inline, 'generic_inline', False):
ct_field = getattr(inline, 'ct_field', 'content_type')
ct_fk_field = getattr(inline, 'ct_fk_field', 'object_id')
for field in model._meta.many_to_many:
if isinstance(field, GenericRelation) \
and field.rel.to == inline_model \
and field.object_id_field_name == ct_fk_field \
and field.content_type_field_name == ct_field:
inline_fields.append(field.name)
_autoregister(admin, inline_model)
else:
fk_name = getattr(inline, 'fk_name', None)
if not fk_name:
for field in inline_model._meta.fields:
if isinstance(field, (models.ForeignKey, models.OneToOneField)) and issubclass(model, field.rel.to):
fk_name = field.name
_autoregister(admin, inline_model, follow=[fk_name])
if not inline_model._meta.get_field(fk_name).rel.is_hidden():
accessor = inline_model._meta.get_field(fk_name).remote_field.get_accessor_name()
inline_fields.append(accessor)
_autoregister(admin, model, inline_fields)
def register_models(admin_site=None):
if admin_site is None:
admin_site = site
for model, admin in admin_site._registry.items():
if getattr(admin, 'reversion_enable', False):
_register_model(admin, model)
@contextmanager
def do_create_revision(request):
with create_revision():
set_user(request.user)
yield
class ReversionPlugin(BaseAdminPlugin):
# The serialization format to use when registering models with reversion.
reversion_format = "json"
# Whether to ignore duplicate revision data.
ignore_duplicate_revisions = False
reversion_enable = False
def init_request(self, *args, **kwargs):
return self.reversion_enable
def do_post(self, __):
def _method():
self.revision_context_manager.set_user(self.user)
comment = ''
admin_view = self.admin_view
if isinstance(admin_view, CreateAdminView):
comment = _(u"Initial version.")
elif isinstance(admin_view, UpdateAdminView):
comment = _(u"Change version.")
elif isinstance(admin_view, RevisionView):
comment = _(u"Revert version.")
elif isinstance(admin_view, RecoverView):
comment = _(u"Rercover version.")
elif isinstance(admin_view, DeleteAdminView):
comment = _(u"Deleted %(verbose_name)s.") % {
"verbose_name": self.opts.verbose_name}
self.revision_context_manager.set_comment(comment)
return __()
return _method
def post(self, __, request, *args, **kwargs):
with do_create_revision(request):
return __()
# Block Views
def block_top_toolbar(self, context, nodes):
recoverlist_url = self.admin_view.model_admin_url('recoverlist')
nodes.append(mark_safe('<div class="btn-group"><a class="btn btn-default btn-sm" href="%s"><i class="fa fa-trash-o"></i> %s</a></div>' % (recoverlist_url, _(u"Recover"))))
def block_nav_toggles(self, context, nodes):
obj = getattr(
self.admin_view, 'org_obj', getattr(self.admin_view, 'obj', None))
if obj:
revisionlist_url = self.admin_view.model_admin_url(
'revisionlist', quote(obj.pk))
nodes.append(mark_safe('<a href="%s" class="navbar-toggle pull-right"><i class="fa fa-calendar"></i></a>' % revisionlist_url))
def block_nav_btns(self, context, nodes):
obj = getattr(
self.admin_view, 'org_obj', getattr(self.admin_view, 'obj', None))
if obj:
revisionlist_url = self.admin_view.model_admin_url(
'revisionlist', quote(obj.pk))
nodes.append(mark_safe('<a href="%s" class="btn btn-default"><i class="fa fa-calendar"></i> <span>%s</span></a>' % (revisionlist_url, _(u'History'))))
# action revision
class ActionRevisionPlugin(BaseAdminPlugin):
reversion_enable = False
def init_request(self, *args, **kwargs):
return self.reversion_enable
def do_action(self, __, queryset):
with do_create_revision(self.request):
return __()
class BaseReversionView(ModelAdminView):
# The serialization format to use when registering models with reversion.
reversion_format = "json"
# Whether to ignore duplicate revision data.
ignore_duplicate_revisions = False
# If True, then the default ordering of object_history and recover lists will be reversed.
history_latest_first = False
reversion_enable = False
def init_request(self, *args, **kwargs):
if not self.has_change_permission() and not self.has_add_permission():
raise PermissionDenied
def _order_version_queryset(self, queryset):
"""Applies the correct ordering to the given version queryset."""
if self.history_latest_first:
return queryset.order_by("-pk")
return queryset.order_by("pk")
class RecoverListView(BaseReversionView):
recover_list_template = None
def get_context(self):
context = super(RecoverListView, self).get_context()
opts = self.opts
deleted = self._order_version_queryset(Version.objects.get_deleted(self.model))
context.update({
"opts": opts,
"app_label": opts.app_label,
"model_name": capfirst(opts.verbose_name),
"title": _("Recover deleted %(name)s") % {"name": force_text(opts.verbose_name_plural)},
"deleted": deleted,
"changelist_url": self.model_admin_url("changelist"),
})
return context
@csrf_protect_m
def get(self, request, *args, **kwargs):
context = self.get_context()
return TemplateResponse(
request, self.recover_list_template or self.get_template_list(
"views/recover_list.html"),
context)
class RevisionListView(BaseReversionView):
object_history_template = None
revision_diff_template = None
def _reversion_order_version_queryset(self, queryset):
"""Applies the correct ordering to the given version queryset."""
if not self.history_latest_first:
queryset = queryset.order_by("pk")
return queryset
def get_context(self):
context = super(RevisionListView, self).get_context()
opts = self.opts
action_list = [
{
"revision": version.revision,
"url": self.model_admin_url('revision', quote(version.object_id), version.id),
"version": version
}
for version
in self._reversion_order_version_queryset(Version.objects.get_for_object_reference(
self.model,
self.obj.pk,
).select_related("revision__user"))
]
context.update({
'title': _('Change history: %s') % force_text(self.obj),
'action_list': action_list,
'model_name': capfirst(force_text(opts.verbose_name_plural)),
'object': self.obj,
'app_label': opts.app_label,
"changelist_url": self.model_admin_url("changelist"),
"update_url": self.model_admin_url("change", self.obj.pk),
'opts': opts,
})
return context
def get(self, request, object_id, *args, **kwargs):
object_id = unquote(object_id)
self.obj = self.get_object(object_id)
if not self.has_change_permission(self.obj):
raise PermissionDenied
return self.get_response()
def get_response(self):
context = self.get_context()
return TemplateResponse(self.request, self.object_history_template or
self.get_template_list('views/model_history.html'), context)
def get_version_object(self, version):
obj_version = version._object_version
obj = obj_version.object
obj._state.db = self.obj._state.db
for field_name, pks in obj_version.m2m_data.items():
f = self.opts.get_field(field_name)
if f.rel and isinstance(f.rel, models.ManyToManyRel):
setattr(obj, f.name, f.rel.to._default_manager.get_query_set(
).filter(pk__in=pks).all())
detail = self.get_model_view(DetailAdminUtil, self.model, obj)
return obj, detail
def post(self, request, object_id, *args, **kwargs):
object_id = unquote(object_id)
self.obj = self.get_object(object_id)
if not self.has_change_permission(self.obj):
raise PermissionDenied
params = self.request.POST
if 'version_a' not in params or 'version_b' not in params:
self.message_user(_("Must select two versions."), 'error')
return self.get_response()
version_a_id = params['version_a']
version_b_id = params['version_b']
if version_a_id == version_b_id:
self.message_user(
_("Please select two different versions."), 'error')
return self.get_response()
version_a = get_object_or_404(Version, pk=version_a_id)
version_b = get_object_or_404(Version, pk=version_b_id)
diffs = []
obj_a, detail_a = self.get_version_object(version_a)
obj_b, detail_b = self.get_version_object(version_b)
for f in (self.opts.fields + self.opts.many_to_many):
if is_related_field2(f):
label = f.opts.verbose_name
else:
label = f.verbose_name
value_a = f.value_from_object(obj_a)
value_b = f.value_from_object(obj_b)
is_diff = value_a != value_b
if type(value_a) in (list, tuple) and type(value_b) in (list, tuple) \
and len(value_a) == len(value_b) and is_diff:
is_diff = False
for i in xrange(len(value_a)):
if value_a[i] != value_a[i]:
is_diff = True
break
if type(value_a) is QuerySet and type(value_b) is QuerySet:
is_diff = list(value_a) != list(value_b)
diffs.append((label, detail_a.get_field_result(
f.name).val, detail_b.get_field_result(f.name).val, is_diff))
context = super(RevisionListView, self).get_context()
context.update({
'object': self.obj,
'opts': self.opts,
'version_a': version_a,
'version_b': version_b,
'revision_a_url': self.model_admin_url('revision', quote(version_a.object_id), version_a.id),
'revision_b_url': self.model_admin_url('revision', quote(version_b.object_id), version_b.id),
'diffs': diffs
})
return TemplateResponse(
self.request, self.revision_diff_template or self.get_template_list('views/revision_diff.html'),
context)
@filter_hook
def get_media(self):
return super(RevisionListView, self).get_media() + self.vendor('xadmin.plugin.revision.js', 'xadmin.form.css')
class BaseRevisionView(ModelFormAdminView):
@filter_hook
def get_revision(self):
return self.version.field_dict
@filter_hook
def get_form_datas(self):
datas = {"instance": self.org_obj, "initial": self.get_revision()}
if self.request_method == 'post':
datas.update(
{'data': self.request.POST, 'files': self.request.FILES})
return datas
@filter_hook
def get_context(self):
context = super(BaseRevisionView, self).get_context()
context.update({
'object': self.org_obj
})
return context
@filter_hook
def get_media(self):
return super(BaseRevisionView, self).get_media() + self.vendor('xadmin.plugin.revision.js')
class DiffField(Field):
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK, **kwargs):
html = ''
for field in self.fields:
html += ('<div class="diff_field" rel="tooltip"><textarea class="org-data" style="display:none;">%s</textarea>%s</div>' %
(_('Current: %s') % self.attrs.pop('orgdata', ''), render_field(field, form, form_style, context, template_pack=template_pack, attrs=self.attrs)))
return html
class RevisionView(BaseRevisionView):
revision_form_template = None
def init_request(self, object_id, version_id):
self.detail = self.get_model_view(
DetailAdminView, self.model, object_id)
self.org_obj = self.detail.obj
self.version = get_object_or_404(
Version, pk=version_id, object_id=smart_text(self.org_obj.pk))
self.prepare_form()
def get_form_helper(self):
helper = super(RevisionView, self).get_form_helper()
diff_fields = {}
version_data = self.version.field_dict
for f in self.opts.fields:
fvalue = f.value_from_object(self.org_obj)
vvalue = version_data.get(f.name, None)
if fvalue is None and vvalue == '':
vvalue = None
if is_related_field2(f):
vvalue = version_data.get(f.name + '_' + f.rel.get_related_field().name, None)
if fvalue != vvalue:
diff_fields[f.name] = self.detail.get_field_result(f.name).val
for k, v in diff_fields.items():
helper[k].wrap(DiffField, orgdata=v)
return helper
@filter_hook
def get_context(self):
context = super(RevisionView, self).get_context()
context["title"] = _(
"Revert %s") % force_text(self.model._meta.verbose_name)
return context
@filter_hook
def get_response(self):
context = self.get_context()
context.update(self.kwargs or {})
form_template = self.revision_form_template
return TemplateResponse(
self.request, form_template or self.get_template_list(
'views/revision_form.html'),
context)
@filter_hook
def post_response(self):
self.message_user(_('The %(model)s "%(name)s" was reverted successfully. You may edit it again below.') %
{"model": force_text(self.opts.verbose_name), "name": smart_text(self.new_obj)}, 'success')
return HttpResponseRedirect(self.model_admin_url('change', self.new_obj.pk))
class RecoverView(BaseRevisionView):
recover_form_template = None
def init_request(self, version_id):
if not self.has_change_permission() and not self.has_add_permission():
raise PermissionDenied
self.version = get_object_or_404(Version, pk=version_id)
self.org_obj = self.version._object_version.object
self.prepare_form()
@filter_hook
def get_context(self):
context = super(RecoverView, self).get_context()
context["title"] = _("Recover %s") % self.version.object_repr
return context
@filter_hook
def get_response(self):
context = self.get_context()
context.update(self.kwargs or {})
form_template = self.recover_form_template
return TemplateResponse(
self.request, form_template or self.get_template_list(
'views/recover_form.html'),
context)
@filter_hook
def post_response(self):
self.message_user(_('The %(model)s "%(name)s" was recovered successfully. You may edit it again below.') %
{"model": force_text(self.opts.verbose_name), "name": smart_text(self.new_obj)}, 'success')
return HttpResponseRedirect(self.model_admin_url('change', self.new_obj.pk))
class InlineDiffField(Field):
def render(self, form, form_style, context, template_pack=TEMPLATE_PACK, **kwargs):
html = ''
instance = form.instance
if not instance.pk:
return super(InlineDiffField, self).render(form, form_style, context)
initial = form.initial
opts = instance._meta
detail = form.detail
for field in self.fields:
f = opts.get_field(field)
f_html = render_field(field, form, form_style, context,
template_pack=template_pack, attrs=self.attrs)
if f.value_from_object(instance) != initial.get(field, None):
current_val = detail.get_field_result(f.name).val
html += ('<div class="diff_field" rel="tooltip"><textarea class="org-data" style="display:none;">%s</textarea>%s</div>'
% (_('Current: %s') % current_val, f_html))
else:
html += f_html
return html
# inline hack plugin
class InlineRevisionPlugin(BaseAdminPlugin):
def get_related_versions(self, obj, version, formset):
"""Retreives all the related Version objects for the given FormSet."""
object_id = obj.pk
# Get the fk name.
try:
fk_name = formset.fk.name + '_' + formset.fk.rel.get_related_field().name
except AttributeError:
# This is a GenericInlineFormset, or similar.
fk_name = formset.ct_fk_field.name
# Look up the revision data.
revision_versions = version.revision.version_set.all()
related_versions = dict([(related_version.object_id, related_version)
for related_version in revision_versions
if ContentType.objects.get_for_id(related_version.content_type_id).model_class() == formset.model
and smart_text(related_version.field_dict[fk_name]) == smart_text(object_id)])
return related_versions
def _hack_inline_formset_initial(self, revision_view, formset):
"""Hacks the given formset to contain the correct initial data."""
# Now we hack it to push in the data from the revision!
initial = []
related_versions = self.get_related_versions(
revision_view.org_obj, revision_view.version, formset)
formset.related_versions = related_versions
for related_obj in formset.queryset:
if smart_text(related_obj.pk) in related_versions:
initial.append(
related_versions.pop(smart_text(related_obj.pk)).field_dict)
else:
initial_data = model_to_dict(related_obj)
initial_data["DELETE"] = True
initial.append(initial_data)
for related_version in related_versions.values():
initial_row = related_version.field_dict
pk_name = ContentType.objects.get_for_id(
related_version.content_type_id).model_class()._meta.pk.name
del initial_row[pk_name]
initial.append(initial_row)
# Reconstruct the forms with the new revision data.
formset.initial = initial
formset.forms = [formset._construct_form(
n) for n in xrange(len(initial))]
# Hack the formset to force a save of everything.
def get_changed_data(form):
return [field.name for field in form.fields]
for form in formset.forms:
form.has_changed = lambda: True
form._get_changed_data = partial(get_changed_data, form=form)
def total_form_count_hack(count):
return lambda: count
formset.total_form_count = total_form_count_hack(len(initial))
if self.request.method == 'GET' and formset.helper and formset.helper.layout:
helper = formset.helper
cls_str = str if six.PY3 else basestring
helper.filter(cls_str).wrap(InlineDiffField)
fake_admin_class = type(str('%s%sFakeAdmin' % (self.opts.app_label, self.opts.model_name)), (object, ), {'model': self.model})
for form in formset.forms:
instance = form.instance
if instance.pk:
form.detail = self.get_view(
DetailAdminUtil, fake_admin_class, instance)
def instance_form(self, formset, **kwargs):
admin_view = self.admin_view.admin_view
if hasattr(admin_view, 'version') and hasattr(admin_view, 'org_obj'):
self._hack_inline_formset_initial(admin_view, formset)
return formset
class VersionInline(object):
model = Version
extra = 0
style = 'accordion'
class ReversionAdmin(object):
model_icon = 'fa fa-exchange'
list_display = ('__str__', 'date_created', 'user', 'comment')
list_display_links = ('__str__',)
list_filter = ('date_created', 'user')
inlines = [VersionInline]
site.register(Revision, ReversionAdmin)
site.register_modelview(
r'^recover/$', RecoverListView, name='%s_%s_recoverlist')
site.register_modelview(
r'^recover/([^/]+)/$', RecoverView, name='%s_%s_recover')
site.register_modelview(
r'^([^/]+)/revision/$', RevisionListView, name='%s_%s_revisionlist')
site.register_modelview(
r'^([^/]+)/revision/([^/]+)/$', RevisionView, name='%s_%s_revision')
site.register_plugin(ReversionPlugin, ListAdminView)
site.register_plugin(ReversionPlugin, ModelFormAdminView)
site.register_plugin(ReversionPlugin, DeleteAdminView)
site.register_plugin(InlineRevisionPlugin, InlineModelAdmin)
site.register_plugin(ActionRevisionPlugin, BaseActionView)
|
|
import datetime
from collections import OrderedDict
import warnings
import numpy as np
from numpy import array, nan
import pandas as pd
import pytest
from numpy.testing import assert_almost_equal, assert_allclose
from pvlib import irradiance
from .conftest import (
assert_frame_equal,
assert_series_equal,
requires_ephem,
requires_numba
)
# fixtures create realistic test input data
# test input data generated at Location(32.2, -111, 'US/Arizona', 700)
# test input data is hard coded to avoid dependencies on other parts of pvlib
@pytest.fixture
def times():
# must include night values
return pd.date_range(start='20140624', freq='6H', periods=4,
tz='US/Arizona')
@pytest.fixture
def irrad_data(times):
return pd.DataFrame(np.array(
[[ 0. , 0. , 0. ],
[ 79.73860422, 316.1949056 , 40.46149818],
[1042.48031487, 939.95469881, 118.45831879],
[ 257.20751138, 646.22886049, 62.03376265]]),
columns=['ghi', 'dni', 'dhi'], index=times)
@pytest.fixture
def ephem_data(times):
return pd.DataFrame(np.array(
[[124.0390863 , 124.0390863 , -34.0390863 , -34.0390863 ,
352.69550699, -2.36677158],
[ 82.85457044, 82.97705621, 7.14542956, 7.02294379,
66.71410338, -2.42072165],
[ 10.56413562, 10.56725766, 79.43586438, 79.43274234,
144.76567754, -2.47457321],
[ 72.41687122, 72.46903556, 17.58312878, 17.53096444,
287.04104128, -2.52831909]]),
columns=['apparent_zenith', 'zenith', 'apparent_elevation',
'elevation', 'azimuth', 'equation_of_time'],
index=times)
@pytest.fixture
def dni_et(times):
return np.array(
[1321.1655834833093, 1321.1655834833093, 1321.1655834833093,
1321.1655834833093])
@pytest.fixture
def relative_airmass(times):
return pd.Series([np.nan, 7.58831596, 1.01688136, 3.27930443], times)
# setup for et rad test. put it here for readability
timestamp = pd.Timestamp('20161026')
dt_index = pd.DatetimeIndex([timestamp])
doy = timestamp.dayofyear
dt_date = timestamp.date()
dt_datetime = datetime.datetime.combine(dt_date, datetime.time(0))
dt_np64 = np.datetime64(dt_datetime)
value = 1383.636203
@pytest.mark.parametrize('testval, expected', [
(doy, value),
(np.float64(doy), value),
(dt_date, value),
(dt_datetime, value),
(dt_np64, value),
(np.array([doy]), np.array([value])),
(pd.Series([doy]), np.array([value])),
(dt_index, pd.Series([value], index=dt_index)),
(timestamp, value)
])
@pytest.mark.parametrize('method', [
'asce', 'spencer', 'nrel', pytest.param('pyephem', marks=requires_ephem)])
def test_get_extra_radiation(testval, expected, method):
out = irradiance.get_extra_radiation(testval, method=method)
assert_allclose(out, expected, atol=10)
def test_get_extra_radiation_epoch_year():
out = irradiance.get_extra_radiation(doy, method='nrel', epoch_year=2012)
assert_allclose(out, 1382.4926804890767, atol=0.1)
@requires_numba
def test_get_extra_radiation_nrel_numba(times):
with warnings.catch_warnings():
# don't warn on method reload or num threads
warnings.simplefilter("ignore")
result = irradiance.get_extra_radiation(
times, method='nrel', how='numba', numthreads=4)
# and reset to no-numba state
irradiance.get_extra_radiation(times, method='nrel')
assert_allclose(result,
[1322.332316, 1322.296282, 1322.261205, 1322.227091])
def test_get_extra_radiation_invalid():
with pytest.raises(ValueError):
irradiance.get_extra_radiation(300, method='invalid')
def test_grounddiffuse_simple_float():
result = irradiance.get_ground_diffuse(40, 900)
assert_allclose(result, 26.32000014911496)
def test_grounddiffuse_simple_series(irrad_data):
ground_irrad = irradiance.get_ground_diffuse(40, irrad_data['ghi'])
assert ground_irrad.name == 'diffuse_ground'
def test_grounddiffuse_albedo_0(irrad_data):
ground_irrad = irradiance.get_ground_diffuse(
40, irrad_data['ghi'], albedo=0)
assert 0 == ground_irrad.all()
def test_grounddiffuse_albedo_invalid_surface(irrad_data):
with pytest.raises(KeyError):
irradiance.get_ground_diffuse(
40, irrad_data['ghi'], surface_type='invalid')
def test_grounddiffuse_albedo_surface(irrad_data):
result = irradiance.get_ground_diffuse(40, irrad_data['ghi'],
surface_type='sand')
assert_allclose(result, [0, 3.731058, 48.778813, 12.035025], atol=1e-4)
def test_isotropic_float():
result = irradiance.isotropic(40, 100)
assert_allclose(result, 88.30222215594891)
def test_isotropic_series(irrad_data):
result = irradiance.isotropic(40, irrad_data['dhi'])
assert_allclose(result, [0, 35.728402, 104.601328, 54.777191], atol=1e-4)
def test_klucher_series_float():
# klucher inputs
surface_tilt, surface_azimuth = 40.0, 180.0
dhi, ghi = 100.0, 900.0
solar_zenith, solar_azimuth = 20.0, 180.0
# expect same result for floats and pd.Series
expected = irradiance.klucher(
surface_tilt, surface_azimuth,
pd.Series(dhi), pd.Series(ghi),
pd.Series(solar_zenith), pd.Series(solar_azimuth)
) # 94.99429931664851
result = irradiance.klucher(
surface_tilt, surface_azimuth, dhi, ghi, solar_zenith, solar_azimuth
)
assert_allclose(result, expected[0])
def test_klucher_series(irrad_data, ephem_data):
result = irradiance.klucher(40, 180, irrad_data['dhi'], irrad_data['ghi'],
ephem_data['apparent_zenith'],
ephem_data['azimuth'])
# pvlib matlab 1.4 does not contain the max(cos_tt, 0) correction
# so, these values are different
assert_allclose(result, [0., 36.789794, 109.209347, 56.965916], atol=1e-4)
# expect same result for np.array and pd.Series
expected = irradiance.klucher(
40, 180, irrad_data['dhi'].values, irrad_data['ghi'].values,
ephem_data['apparent_zenith'].values, ephem_data['azimuth'].values
)
assert_allclose(result, expected, atol=1e-4)
def test_haydavies(irrad_data, ephem_data, dni_et):
result = irradiance.haydavies(
40, 180, irrad_data['dhi'], irrad_data['dni'], dni_et,
ephem_data['apparent_zenith'], ephem_data['azimuth'])
# values from matlab 1.4 code
assert_allclose(result, [0, 27.1775, 102.9949, 33.1909], atol=1e-4)
def test_reindl(irrad_data, ephem_data, dni_et):
result = irradiance.reindl(
40, 180, irrad_data['dhi'], irrad_data['dni'], irrad_data['ghi'],
dni_et, ephem_data['apparent_zenith'], ephem_data['azimuth'])
# values from matlab 1.4 code
assert_allclose(result, [0., 27.9412, 104.1317, 34.1663], atol=1e-4)
def test_king(irrad_data, ephem_data):
result = irradiance.king(40, irrad_data['dhi'], irrad_data['ghi'],
ephem_data['apparent_zenith'])
assert_allclose(result, [0, 44.629352, 115.182626, 79.719855], atol=1e-4)
def test_perez(irrad_data, ephem_data, dni_et, relative_airmass):
dni = irrad_data['dni'].copy()
dni.iloc[2] = np.nan
out = irradiance.perez(40, 180, irrad_data['dhi'], dni,
dni_et, ephem_data['apparent_zenith'],
ephem_data['azimuth'], relative_airmass)
expected = pd.Series(np.array(
[ 0. , 31.46046871, np.nan, 45.45539877]),
index=irrad_data.index)
assert_series_equal(out, expected, check_less_precise=2)
def test_perez_components(irrad_data, ephem_data, dni_et, relative_airmass):
dni = irrad_data['dni'].copy()
dni.iloc[2] = np.nan
out = irradiance.perez(40, 180, irrad_data['dhi'], dni,
dni_et, ephem_data['apparent_zenith'],
ephem_data['azimuth'], relative_airmass,
return_components=True)
expected = pd.DataFrame(np.array(
[[ 0. , 31.46046871, np.nan, 45.45539877],
[ 0. , 26.84138589, np.nan, 31.72696071],
[ 0. , 0. , np.nan, 4.47966439],
[ 0. , 4.62212181, np.nan, 9.25316454]]).T,
columns=['sky_diffuse', 'isotropic', 'circumsolar', 'horizon'],
index=irrad_data.index
)
expected_for_sum = expected['sky_diffuse'].copy()
expected_for_sum.iloc[2] = 0
sum_components = out.iloc[:, 1:].sum(axis=1)
sum_components.name = 'sky_diffuse'
assert_frame_equal(out, expected, check_less_precise=2)
assert_series_equal(sum_components, expected_for_sum, check_less_precise=2)
def test_perez_negative_horizon():
times = pd.date_range(start='20190101 11:30:00', freq='1H',
periods=5, tz='US/Central')
# Avoid test dependencies on functionality not being tested by hard-coding
# the inputs. This data corresponds to Goodwin Creek in the afternoon on
# 1/1/2019.
# dni_e is slightly rounded from irradiance.get_extra_radiation
# airmass from atmosphere.get_relative_airmas
inputs = pd.DataFrame(np.array(
[[ 158, 19, 1, 0, 0],
[ 249, 165, 136, 93, 50],
[ 57.746951, 57.564205, 60.813841, 66.989435, 75.353368],
[ 171.003315, 187.346924, 202.974357, 216.725599, 228.317233],
[1414, 1414, 1414, 1414, 1414],
[ 1.869315, 1.859981, 2.044429, 2.544943, 3.900136]]).T,
columns=['dni', 'dhi', 'solar_zenith',
'solar_azimuth', 'dni_extra', 'airmass'],
index=times
)
out = irradiance.perez(34, 180, inputs['dhi'], inputs['dni'],
inputs['dni_extra'], inputs['solar_zenith'],
inputs['solar_azimuth'], inputs['airmass'],
model='allsitescomposite1990',
return_components=True)
# sky_diffuse can be less than isotropic under certain conditions as
# horizon goes negative
expected = pd.DataFrame(np.array(
[[281.410185, 152.20879, 123.867898, 82.836412, 43.517015],
[166.785419, 142.24475, 119.173875, 83.525150, 45.725931],
[113.548755, 16.09757, 9.956174, 3.142467, 0],
[ 1.076010, -6.13353, -5.262151, -3.831230, -2.208923]]).T,
columns=['sky_diffuse', 'isotropic', 'circumsolar', 'horizon'],
index=times
)
expected_for_sum = expected['sky_diffuse'].copy()
sum_components = out.iloc[:, 1:].sum(axis=1)
sum_components.name = 'sky_diffuse'
assert_frame_equal(out, expected, check_less_precise=2)
assert_series_equal(sum_components, expected_for_sum, check_less_precise=2)
def test_perez_arrays(irrad_data, ephem_data, dni_et, relative_airmass):
dni = irrad_data['dni'].copy()
dni.iloc[2] = np.nan
out = irradiance.perez(40, 180, irrad_data['dhi'].values, dni.values,
dni_et, ephem_data['apparent_zenith'].values,
ephem_data['azimuth'].values,
relative_airmass.values)
expected = np.array(
[ 0. , 31.46046871, np.nan, 45.45539877])
assert_allclose(out, expected, atol=1e-2)
assert isinstance(out, np.ndarray)
def test_perez_scalar():
# copied values from fixtures
out = irradiance.perez(40, 180, 118.45831879, 939.95469881,
1321.1655834833093, 10.56413562, 144.76567754,
1.01688136)
# this will fail. out is ndarry with ndim == 0. fix in future version.
# assert np.isscalar(out)
assert_allclose(out, 109.084332)
@pytest.mark.parametrize('model', ['isotropic', 'klucher', 'haydavies',
'reindl', 'king', 'perez'])
def test_sky_diffuse_zenith_close_to_90(model):
# GH 432
sky_diffuse = irradiance.get_sky_diffuse(
30, 180, 89.999, 230,
dni=10, ghi=51, dhi=50, dni_extra=1360, airmass=12, model=model)
assert sky_diffuse < 100
def test_get_sky_diffuse_model_invalid():
with pytest.raises(ValueError):
irradiance.get_sky_diffuse(
30, 180, 0, 180, 1000, 1100, 100, dni_extra=1360, airmass=1,
model='invalid')
def test_get_sky_diffuse_missing_dni_extra():
msg = 'dni_extra is required'
with pytest.raises(ValueError, match=msg):
irradiance.get_sky_diffuse(
30, 180, 0, 180, 1000, 1100, 100, airmass=1,
model='haydavies')
def test_get_sky_diffuse_missing_airmass(irrad_data, ephem_data, dni_et):
# test assumes location is Tucson, AZ
# calculated airmass should be the equivalent to fixture airmass
dni = irrad_data['dni'].copy()
dni.iloc[2] = np.nan
out = irradiance.get_sky_diffuse(
40, 180, ephem_data['apparent_zenith'], ephem_data['azimuth'], dni,
irrad_data['ghi'], irrad_data['dhi'], dni_et, model='perez')
expected = pd.Series(np.array(
[0., 31.46046871, np.nan, 45.45539877]),
index=irrad_data.index)
assert_series_equal(out, expected, check_less_precise=2)
def test_campbell_norman():
expected = pd.DataFrame(np.array(
[[863.859736967, 653.123094076, 220.65905025]]),
columns=['ghi', 'dni', 'dhi'],
index=[0])
out = irradiance.campbell_norman(
pd.Series([10]), pd.Series([0.5]), pd.Series([109764.21013135818]),
dni_extra=1400)
assert_frame_equal(out, expected)
def test_get_total_irradiance(irrad_data, ephem_data, dni_et,
relative_airmass):
models = ['isotropic', 'klucher',
'haydavies', 'reindl', 'king', 'perez']
for model in models:
total = irradiance.get_total_irradiance(
32, 180,
ephem_data['apparent_zenith'], ephem_data['azimuth'],
dni=irrad_data['dni'], ghi=irrad_data['ghi'],
dhi=irrad_data['dhi'],
dni_extra=dni_et, airmass=relative_airmass,
model=model,
surface_type='urban')
assert total.columns.tolist() == ['poa_global', 'poa_direct',
'poa_diffuse', 'poa_sky_diffuse',
'poa_ground_diffuse']
@pytest.mark.parametrize('model', ['isotropic', 'klucher',
'haydavies', 'reindl', 'king', 'perez'])
def test_get_total_irradiance_scalars(model):
total = irradiance.get_total_irradiance(
32, 180,
10, 180,
dni=1000, ghi=1100,
dhi=100,
dni_extra=1400, airmass=1,
model=model,
surface_type='urban')
assert list(total.keys()) == ['poa_global', 'poa_direct',
'poa_diffuse', 'poa_sky_diffuse',
'poa_ground_diffuse']
# test that none of the values are nan
assert np.isnan(np.array(list(total.values()))).sum() == 0
def test_get_total_irradiance_missing_dni_extra():
msg = 'dni_extra is required'
with pytest.raises(ValueError, match=msg):
irradiance.get_total_irradiance(
32, 180,
10, 180,
dni=1000, ghi=1100,
dhi=100,
model='haydavies')
def test_get_total_irradiance_missing_airmass():
total = irradiance.get_total_irradiance(
32, 180,
10, 180,
dni=1000, ghi=1100,
dhi=100,
dni_extra=1400,
model='perez')
assert list(total.keys()) == ['poa_global', 'poa_direct',
'poa_diffuse', 'poa_sky_diffuse',
'poa_ground_diffuse']
def test_poa_components(irrad_data, ephem_data, dni_et, relative_airmass):
aoi = irradiance.aoi(40, 180, ephem_data['apparent_zenith'],
ephem_data['azimuth'])
gr_sand = irradiance.get_ground_diffuse(40, irrad_data['ghi'],
surface_type='sand')
diff_perez = irradiance.perez(
40, 180, irrad_data['dhi'], irrad_data['dni'], dni_et,
ephem_data['apparent_zenith'], ephem_data['azimuth'], relative_airmass)
out = irradiance.poa_components(
aoi, irrad_data['dni'], diff_perez, gr_sand)
expected = pd.DataFrame(np.array(
[[ 0. , -0. , 0. , 0. ,
0. ],
[ 35.19456561, 0. , 35.19456561, 31.4635077 ,
3.73105791],
[956.18253696, 798.31939281, 157.86314414, 109.08433162,
48.77881252],
[ 90.99624896, 33.50143401, 57.49481495, 45.45978964,
12.03502531]]),
columns=['poa_global', 'poa_direct', 'poa_diffuse', 'poa_sky_diffuse',
'poa_ground_diffuse'],
index=irrad_data.index)
assert_frame_equal(out, expected)
@pytest.mark.parametrize('pressure,expected', [
(93193, [[830.46567, 0.79742, 0.93505],
[676.09497, 0.63776, 3.02102]]),
(None, [[868.72425, 0.79742, 1.01664],
[680.66679, 0.63776, 3.28463]]),
(101325, [[868.72425, 0.79742, 1.01664],
[680.66679, 0.63776, 3.28463]])
])
def test_disc_value(pressure, expected):
# see GH 449 for pressure=None vs. 101325.
columns = ['dni', 'kt', 'airmass']
times = pd.DatetimeIndex(['2014-06-24T1200', '2014-06-24T1800'],
tz='America/Phoenix')
ghi = pd.Series([1038.62, 254.53], index=times)
zenith = pd.Series([10.567, 72.469], index=times)
out = irradiance.disc(ghi, zenith, times, pressure=pressure)
expected_values = np.array(expected)
expected = pd.DataFrame(expected_values, columns=columns, index=times)
# check the pandas dataframe. check_less_precise is weird
assert_frame_equal(out, expected, check_less_precise=True)
# use np.assert_allclose to check values more clearly
assert_allclose(out.values, expected_values, atol=1e-5)
def test_disc_overirradiance():
columns = ['dni', 'kt', 'airmass']
ghi = np.array([3000])
solar_zenith = np.full_like(ghi, 0)
times = pd.date_range(start='2016-07-19 12:00:00', freq='1s',
periods=len(ghi), tz='America/Phoenix')
out = irradiance.disc(ghi=ghi, solar_zenith=solar_zenith,
datetime_or_doy=times)
expected = pd.DataFrame(np.array(
[[8.72544336e+02, 1.00000000e+00, 9.99493933e-01]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
def test_disc_min_cos_zenith_max_zenith():
# map out behavior under difficult conditions with various
# limiting kwargs settings
columns = ['dni', 'kt', 'airmass']
times = pd.DatetimeIndex(['2016-07-19 06:11:00'], tz='America/Phoenix')
out = irradiance.disc(ghi=1.0, solar_zenith=89.99, datetime_or_doy=times)
expected = pd.DataFrame(np.array(
[[0.00000000e+00, 1.16046346e-02, 12.0]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
# max_zenith and/or max_airmass keep these results reasonable
out = irradiance.disc(ghi=1.0, solar_zenith=89.99, datetime_or_doy=times,
min_cos_zenith=0)
expected = pd.DataFrame(np.array(
[[0.00000000e+00, 1.0, 12.0]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
# still get reasonable values because of max_airmass=12 limit
out = irradiance.disc(ghi=1.0, solar_zenith=89.99, datetime_or_doy=times,
max_zenith=100)
expected = pd.DataFrame(np.array(
[[0., 1.16046346e-02, 12.0]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
# still get reasonable values because of max_airmass=12 limit
out = irradiance.disc(ghi=1.0, solar_zenith=89.99, datetime_or_doy=times,
min_cos_zenith=0, max_zenith=100)
expected = pd.DataFrame(np.array(
[[277.50185968, 1.0, 12.0]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
# max_zenith keeps this result reasonable
out = irradiance.disc(ghi=1.0, solar_zenith=89.99, datetime_or_doy=times,
min_cos_zenith=0, max_airmass=100)
expected = pd.DataFrame(np.array(
[[0.00000000e+00, 1.0, 36.39544757]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
# allow zenith to be close to 90 and airmass to be infinite
# and we get crazy values
out = irradiance.disc(ghi=1.0, solar_zenith=89.99, datetime_or_doy=times,
max_zenith=100, max_airmass=100)
expected = pd.DataFrame(np.array(
[[6.68577449e+03, 1.16046346e-02, 3.63954476e+01]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
# allow min cos zenith to be 0, zenith to be close to 90,
# and airmass to be very big and we get even higher DNI values
out = irradiance.disc(ghi=1.0, solar_zenith=89.99, datetime_or_doy=times,
min_cos_zenith=0, max_zenith=100, max_airmass=100)
expected = pd.DataFrame(np.array(
[[7.21238390e+03, 1., 3.63954476e+01]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
def test_dirint_value():
times = pd.DatetimeIndex(['2014-06-24T12-0700', '2014-06-24T18-0700'])
ghi = pd.Series([1038.62, 254.53], index=times)
zenith = pd.Series([10.567, 72.469], index=times)
pressure = 93193.
dirint_data = irradiance.dirint(ghi, zenith, times, pressure=pressure)
assert_almost_equal(dirint_data.values,
np.array([868.8, 699.7]), 1)
def test_dirint_nans():
times = pd.date_range(start='2014-06-24T12-0700', periods=5, freq='6H')
ghi = pd.Series([np.nan, 1038.62, 1038.62, 1038.62, 1038.62], index=times)
zenith = pd.Series([10.567, np.nan, 10.567, 10.567, 10.567], index=times)
pressure = pd.Series([93193., 93193., np.nan, 93193., 93193.], index=times)
temp_dew = pd.Series([10, 10, 10, np.nan, 10], index=times)
dirint_data = irradiance.dirint(ghi, zenith, times, pressure=pressure,
temp_dew=temp_dew)
assert_almost_equal(dirint_data.values,
np.array([np.nan, np.nan, np.nan, np.nan, 893.1]), 1)
def test_dirint_tdew():
times = pd.DatetimeIndex(['2014-06-24T12-0700', '2014-06-24T18-0700'])
ghi = pd.Series([1038.62, 254.53], index=times)
zenith = pd.Series([10.567, 72.469], index=times)
pressure = 93193.
dirint_data = irradiance.dirint(ghi, zenith, times, pressure=pressure,
temp_dew=10)
assert_almost_equal(dirint_data.values,
np.array([882.1, 672.6]), 1)
def test_dirint_no_delta_kt():
times = pd.DatetimeIndex(['2014-06-24T12-0700', '2014-06-24T18-0700'])
ghi = pd.Series([1038.62, 254.53], index=times)
zenith = pd.Series([10.567, 72.469], index=times)
pressure = 93193.
dirint_data = irradiance.dirint(ghi, zenith, times, pressure=pressure,
use_delta_kt_prime=False)
assert_almost_equal(dirint_data.values,
np.array([861.9, 670.4]), 1)
def test_dirint_coeffs():
coeffs = irradiance._get_dirint_coeffs()
assert coeffs[0, 0, 0, 0] == 0.385230
assert coeffs[0, 1, 2, 1] == 0.229970
assert coeffs[3, 2, 6, 3] == 1.032260
def test_dirint_min_cos_zenith_max_zenith():
# map out behavior under difficult conditions with various
# limiting kwargs settings
# times don't have any physical relevance
times = pd.DatetimeIndex(['2014-06-24T12-0700', '2014-06-24T18-0700'])
ghi = pd.Series([0, 1], index=times)
solar_zenith = pd.Series([90, 89.99], index=times)
out = irradiance.dirint(ghi, solar_zenith, times)
expected = pd.Series([0.0, 0.0], index=times, name='dni')
assert_series_equal(out, expected)
out = irradiance.dirint(ghi, solar_zenith, times, min_cos_zenith=0)
expected = pd.Series([0.0, 0.0], index=times, name='dni')
assert_series_equal(out, expected)
out = irradiance.dirint(ghi, solar_zenith, times, max_zenith=90)
expected = pd.Series([0.0, 0.0], index=times, name='dni')
assert_series_equal(out, expected, check_less_precise=True)
out = irradiance.dirint(ghi, solar_zenith, times, min_cos_zenith=0,
max_zenith=90)
expected = pd.Series([0.0, 144.264507], index=times, name='dni')
assert_series_equal(out, expected, check_less_precise=True)
out = irradiance.dirint(ghi, solar_zenith, times, min_cos_zenith=0,
max_zenith=100)
expected = pd.Series([0.0, 144.264507], index=times, name='dni')
assert_series_equal(out, expected, check_less_precise=True)
def test_gti_dirint():
times = pd.DatetimeIndex(
['2014-06-24T06-0700', '2014-06-24T09-0700', '2014-06-24T12-0700'])
poa_global = np.array([20, 300, 1000])
aoi = np.array([100, 70, 10])
zenith = np.array([80, 45, 20])
azimuth = np.array([90, 135, 180])
surface_tilt = 30
surface_azimuth = 180
# test defaults
output = irradiance.gti_dirint(
poa_global, aoi, zenith, azimuth, times, surface_tilt, surface_azimuth)
expected_col_order = ['ghi', 'dni', 'dhi']
expected = pd.DataFrame(array(
[[ 21.05796198, 0. , 21.05796198],
[ 291.40037163, 63.41290679, 246.56067523],
[ 931.04078010, 695.94965324, 277.06172442]]),
columns=expected_col_order, index=times)
assert_frame_equal(output, expected)
# test ignore calculate_gt_90
output = irradiance.gti_dirint(
poa_global, aoi, zenith, azimuth, times, surface_tilt, surface_azimuth,
calculate_gt_90=False)
expected_no_90 = expected.copy()
expected_no_90.iloc[0, :] = np.nan
assert_frame_equal(output, expected_no_90)
# test pressure input
pressure = 93193.
output = irradiance.gti_dirint(
poa_global, aoi, zenith, azimuth, times, surface_tilt, surface_azimuth,
pressure=pressure)
expected = pd.DataFrame(array(
[[ 21.05796198, 0. , 21.05796198],
[ 293.21310935, 63.27500913, 248.47092131],
[ 932.46756378, 648.05001357, 323.49974813]]),
columns=expected_col_order, index=times)
assert_frame_equal(output, expected)
# test albedo input
albedo = 0.05
output = irradiance.gti_dirint(
poa_global, aoi, zenith, azimuth, times, surface_tilt, surface_azimuth,
albedo=albedo)
expected = pd.DataFrame(array(
[[ 21.3592591, 0. , 21.3592591 ],
[ 294.4985420, 66.25848451, 247.64671830],
[ 941.7943404, 727.50552952, 258.16276278]]),
columns=expected_col_order, index=times)
assert_frame_equal(output, expected)
# test temp_dew input
temp_dew = np.array([70, 80, 20])
output = irradiance.gti_dirint(
poa_global, aoi, zenith, azimuth, times, surface_tilt, surface_azimuth,
temp_dew=temp_dew)
expected = pd.DataFrame(array(
[[ 21.05796198, 0., 21.05796198],
[ 295.06070190, 38.20346345, 268.0467738],
[ 931.79627208, 689.81549269, 283.5817439]]),
columns=expected_col_order, index=times)
assert_frame_equal(output, expected)
def test_erbs():
index = pd.DatetimeIndex(['20190101']*3 + ['20190620'])
ghi = pd.Series([0, 50, 1000, 1000], index=index)
zenith = pd.Series([120, 85, 10, 10], index=index)
expected = pd.DataFrame(np.array(
[[0.00000000e+00, 0.00000000e+00, 0.00000000e+00],
[9.67192672e+01, 4.15703604e+01, 4.05723511e-01],
[7.94205651e+02, 2.17860117e+02, 7.18132729e-01],
[8.42001578e+02, 1.70790318e+02, 7.68214312e-01]]),
columns=['dni', 'dhi', 'kt'], index=index)
out = irradiance.erbs(ghi, zenith, index)
assert_frame_equal(np.round(out, 0), np.round(expected, 0))
def test_erbs_min_cos_zenith_max_zenith():
# map out behavior under difficult conditions with various
# limiting kwargs settings
columns = ['dni', 'dhi', 'kt']
times = pd.DatetimeIndex(['2016-07-19 06:11:00'], tz='America/Phoenix')
# max_zenith keeps these results reasonable
out = irradiance.erbs(ghi=1.0, zenith=89.99999,
datetime_or_doy=times, min_cos_zenith=0)
expected = pd.DataFrame(np.array(
[[0., 1., 1.]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
# 4-5 9s will produce bad behavior without max_zenith limit
out = irradiance.erbs(ghi=1.0, zenith=89.99999,
datetime_or_doy=times, max_zenith=100)
expected = pd.DataFrame(np.array(
[[6.00115286e+03, 9.98952601e-01, 1.16377640e-02]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
# 1-2 9s will produce bad behavior without either limit
out = irradiance.erbs(ghi=1.0, zenith=89.99, datetime_or_doy=times,
min_cos_zenith=0, max_zenith=100)
expected = pd.DataFrame(np.array(
[[4.78419761e+03, 1.65000000e-01, 1.00000000e+00]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
# check default behavior under hardest condition
out = irradiance.erbs(ghi=1.0, zenith=90, datetime_or_doy=times)
expected = pd.DataFrame(np.array(
[[0., 1., 0.01163776]]),
columns=columns, index=times)
assert_frame_equal(out, expected)
def test_erbs_all_scalar():
ghi = 1000
zenith = 10
doy = 180
expected = OrderedDict()
expected['dni'] = 8.42358014e+02
expected['dhi'] = 1.70439297e+02
expected['kt'] = 7.68919470e-01
out = irradiance.erbs(ghi, zenith, doy)
for k, v in out.items():
assert_allclose(v, expected[k], 5)
def test_dirindex(times):
ghi = pd.Series([0, 0, 1038.62, 254.53], index=times)
ghi_clearsky = pd.Series(
np.array([0., 79.73860422, 1042.48031487, 257.20751138]),
index=times
)
dni_clearsky = pd.Series(
np.array([0., 316.1949056, 939.95469881, 646.22886049]),
index=times
)
zenith = pd.Series(
np.array([124.0390863, 82.85457044, 10.56413562, 72.41687122]),
index=times
)
pressure = 93193.
tdew = 10.
out = irradiance.dirindex(ghi, ghi_clearsky, dni_clearsky,
zenith, times, pressure=pressure,
temp_dew=tdew)
dirint_close_values = irradiance.dirint(ghi, zenith, times,
pressure=pressure,
use_delta_kt_prime=True,
temp_dew=tdew).values
expected_out = np.array([np.nan, 0., 748.31562753, 630.72592644])
tolerance = 1e-8
assert np.allclose(out, expected_out, rtol=tolerance, atol=0,
equal_nan=True)
tol_dirint = 0.2
assert np.allclose(out.values, dirint_close_values, rtol=tol_dirint, atol=0,
equal_nan=True)
def test_dirindex_min_cos_zenith_max_zenith():
# map out behavior under difficult conditions with various
# limiting kwargs settings
# times don't have any physical relevance
times = pd.DatetimeIndex(['2014-06-24T12-0700', '2014-06-24T18-0700'])
ghi = pd.Series([0, 1], index=times)
ghi_clearsky = pd.Series([0, 1], index=times)
dni_clearsky = pd.Series([0, 5], index=times)
solar_zenith = pd.Series([90, 89.99], index=times)
out = irradiance.dirindex(ghi, ghi_clearsky, dni_clearsky, solar_zenith,
times)
expected = pd.Series([nan, nan], index=times)
assert_series_equal(out, expected)
out = irradiance.dirindex(ghi, ghi_clearsky, dni_clearsky, solar_zenith,
times, min_cos_zenith=0)
expected = pd.Series([nan, nan], index=times)
assert_series_equal(out, expected)
out = irradiance.dirindex(ghi, ghi_clearsky, dni_clearsky, solar_zenith,
times, max_zenith=90)
expected = pd.Series([nan, nan], index=times)
assert_series_equal(out, expected)
out = irradiance.dirindex(ghi, ghi_clearsky, dni_clearsky, solar_zenith,
times, min_cos_zenith=0, max_zenith=100)
expected = pd.Series([nan, 5.], index=times)
assert_series_equal(out, expected)
def test_dni():
ghi = pd.Series([90, 100, 100, 100, 100])
dhi = pd.Series([100, 90, 50, 50, 50])
zenith = pd.Series([80, 100, 85, 70, 85])
clearsky_dni = pd.Series([50, 50, 200, 50, 300])
dni = irradiance.dni(ghi, dhi, zenith,
clearsky_dni=clearsky_dni, clearsky_tolerance=2)
assert_series_equal(dni,
pd.Series([float('nan'), float('nan'), 400,
146.190220008, 573.685662283]))
dni = irradiance.dni(ghi, dhi, zenith)
assert_series_equal(dni,
pd.Series([float('nan'), float('nan'), 573.685662283,
146.190220008, 573.685662283]))
@pytest.mark.parametrize(
'surface_tilt,surface_azimuth,solar_zenith,' +
'solar_azimuth,aoi_expected,aoi_proj_expected',
[(0, 0, 0, 0, 0, 1),
(30, 180, 30, 180, 0, 1),
(30, 180, 150, 0, 180, -1),
(90, 0, 30, 60, 75.5224878, 0.25),
(90, 0, 30, 170, 119.4987042, -0.4924038)])
def test_aoi_and_aoi_projection(surface_tilt, surface_azimuth, solar_zenith,
solar_azimuth, aoi_expected,
aoi_proj_expected):
aoi = irradiance.aoi(surface_tilt, surface_azimuth, solar_zenith,
solar_azimuth)
assert_allclose(aoi, aoi_expected, atol=1e-5)
aoi_projection = irradiance.aoi_projection(
surface_tilt, surface_azimuth, solar_zenith, solar_azimuth)
assert_allclose(aoi_projection, aoi_proj_expected, atol=1e-6)
def test_aoi_projection_precision():
# GH 1185 -- test that aoi_projection does not exceed 1.0, and when
# given identical inputs, the returned projection is very close to 1.0
# scalars
zenith = 89.26778228223463
azimuth = 60.932028605997004
projection = irradiance.aoi_projection(zenith, azimuth, zenith, azimuth)
assert projection <= 1
assert np.isclose(projection, 1)
# arrays
zeniths = np.array([zenith])
azimuths = np.array([azimuth])
projections = irradiance.aoi_projection(zeniths, azimuths,
zeniths, azimuths)
assert all(projections <= 1)
assert all(np.isclose(projections, 1))
assert projections.dtype == np.dtype('float64')
@pytest.fixture
def airmass_kt():
# disc algorithm stopped at am=12. test am > 12 for out of range behavior
return np.array([1, 5, 12, 20])
def test_kt_kt_prime_factor(airmass_kt):
out = irradiance._kt_kt_prime_factor(airmass_kt)
expected = np.array([ 0.999971, 0.723088, 0.548811, 0.471068])
assert_allclose(out, expected, atol=1e-5)
def test_clearsky_index():
ghi = np.array([-1., 0., 1., 500., 1000., np.nan])
ghi_measured, ghi_modeled = np.meshgrid(ghi, ghi)
# default max_clearsky_index
with np.errstate(invalid='ignore', divide='ignore'):
out = irradiance.clearsky_index(ghi_measured, ghi_modeled)
expected = np.array(
[[1. , 0. , 0. , 0. , 0. , np.nan],
[0. , 0. , 0. , 0. , 0. , np.nan],
[0. , 0. , 1. , 2. , 2. , np.nan],
[0. , 0. , 0.002 , 1. , 2. , np.nan],
[0. , 0. , 0.001 , 0.5 , 1. , np.nan],
[np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]])
assert_allclose(out, expected, atol=0.001)
# specify max_clearsky_index
with np.errstate(invalid='ignore', divide='ignore'):
out = irradiance.clearsky_index(ghi_measured, ghi_modeled,
max_clearsky_index=1.5)
expected = np.array(
[[1. , 0. , 0. , 0. , 0. , np.nan],
[0. , 0. , 0. , 0. , 0. , np.nan],
[0. , 0. , 1. , 1.5 , 1.5 , np.nan],
[0. , 0. , 0.002 , 1. , 1.5 , np.nan],
[0. , 0. , 0.001 , 0.5 , 1. , np.nan],
[np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]])
assert_allclose(out, expected, atol=0.001)
# scalars
out = irradiance.clearsky_index(10, 1000)
expected = 0.01
assert_allclose(out, expected, atol=0.001)
# series
times = pd.date_range(start='20180601', periods=2, freq='12H')
ghi_measured = pd.Series([100, 500], index=times)
ghi_modeled = pd.Series([500, 1000], index=times)
out = irradiance.clearsky_index(ghi_measured, ghi_modeled)
expected = pd.Series([0.2, 0.5], index=times)
assert_series_equal(out, expected)
def test_clearness_index():
ghi = np.array([-1, 0, 1, 1000])
solar_zenith = np.array([180, 90, 89.999, 0])
ghi, solar_zenith = np.meshgrid(ghi, solar_zenith)
# default min_cos_zenith
out = irradiance.clearness_index(ghi, solar_zenith, 1370)
# np.set_printoptions(precision=3, floatmode='maxprec', suppress=True)
expected = np.array(
[[0. , 0. , 0.011, 2. ],
[0. , 0. , 0.011, 2. ],
[0. , 0. , 0.011, 2. ],
[0. , 0. , 0.001, 0.73 ]])
assert_allclose(out, expected, atol=0.001)
# specify min_cos_zenith
with np.errstate(invalid='ignore', divide='ignore'):
out = irradiance.clearness_index(ghi, solar_zenith, 1400,
min_cos_zenith=0)
expected = np.array(
[[0. , nan, 2. , 2. ],
[0. , 0. , 2. , 2. ],
[0. , 0. , 2. , 2. ],
[0. , 0. , 0.001, 0.714]])
assert_allclose(out, expected, atol=0.001)
# specify max_clearness_index
out = irradiance.clearness_index(ghi, solar_zenith, 1370,
max_clearness_index=0.82)
expected = np.array(
[[ 0. , 0. , 0.011, 0.82 ],
[ 0. , 0. , 0.011, 0.82 ],
[ 0. , 0. , 0.011, 0.82 ],
[ 0. , 0. , 0.001, 0.73 ]])
assert_allclose(out, expected, atol=0.001)
# specify min_cos_zenith and max_clearness_index
with np.errstate(invalid='ignore', divide='ignore'):
out = irradiance.clearness_index(ghi, solar_zenith, 1400,
min_cos_zenith=0,
max_clearness_index=0.82)
expected = np.array(
[[ 0. , nan, 0.82 , 0.82 ],
[ 0. , 0. , 0.82 , 0.82 ],
[ 0. , 0. , 0.82 , 0.82 ],
[ 0. , 0. , 0.001, 0.714]])
assert_allclose(out, expected, atol=0.001)
# scalars
out = irradiance.clearness_index(1000, 10, 1400)
expected = 0.725
assert_allclose(out, expected, atol=0.001)
# series
times = pd.date_range(start='20180601', periods=2, freq='12H')
ghi = pd.Series([0, 1000], index=times)
solar_zenith = pd.Series([90, 0], index=times)
extra_radiation = pd.Series([1360, 1400], index=times)
out = irradiance.clearness_index(ghi, solar_zenith, extra_radiation)
expected = pd.Series([0, 0.714285714286], index=times)
assert_series_equal(out, expected)
def test_clearness_index_zenith_independent(airmass_kt):
clearness_index = np.array([-1, 0, .1, 1])
clearness_index, airmass_kt = np.meshgrid(clearness_index, airmass_kt)
out = irradiance.clearness_index_zenith_independent(clearness_index,
airmass_kt)
expected = np.array(
[[0. , 0. , 0.1 , 1. ],
[0. , 0. , 0.138, 1.383],
[0. , 0. , 0.182, 1.822],
[0. , 0. , 0.212, 2. ]])
assert_allclose(out, expected, atol=0.001)
# test max_clearness_index
out = irradiance.clearness_index_zenith_independent(
clearness_index, airmass_kt, max_clearness_index=0.82)
expected = np.array(
[[ 0. , 0. , 0.1 , 0.82 ],
[ 0. , 0. , 0.138, 0.82 ],
[ 0. , 0. , 0.182, 0.82 ],
[ 0. , 0. , 0.212, 0.82 ]])
assert_allclose(out, expected, atol=0.001)
# scalars
out = irradiance.clearness_index_zenith_independent(.4, 2)
expected = 0.443
assert_allclose(out, expected, atol=0.001)
# series
times = pd.date_range(start='20180601', periods=2, freq='12H')
clearness_index = pd.Series([0, .5], index=times)
airmass = pd.Series([np.nan, 2], index=times)
out = irradiance.clearness_index_zenith_independent(clearness_index,
airmass)
expected = pd.Series([np.nan, 0.553744437562], index=times)
assert_series_equal(out, expected)
|
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from .fetchers import NUEventLogsFetcher
from bambou import NURESTObject
class NULicense(NURESTObject):
""" Represents a License in the VSD
Notes:
Enables retrieval/modification and creation of license files. Most of the attributes are retrieved from the encrypted license. The create API simply provides the encrypted license that is in base64 format.
"""
__rest_name__ = "license"
__resource_name__ = "licenses"
## Constants
CONST_LICENSE_ENCRYPTION_ENCRYPTION_ENABLED = "ENCRYPTION_ENABLED"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
CONST_LICENSE_TYPE_CLUSTERED = "CLUSTERED"
CONST_LICENSE_TYPE_STANDARD = "STANDARD"
CONST_LICENSE_ENCRYPTION_ENCRYPTION_DISABLED = "ENCRYPTION_DISABLED"
def __init__(self, **kwargs):
""" Initializes a License instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> license = NULicense(id=u'xxxx-xxx-xxx-xxx', name=u'License')
>>> license = NULicense(data=my_dict)
"""
super(NULicense, self).__init__()
# Read/Write Attributes
self._major_release = None
self._last_updated_by = None
self._additional_supported_versions = None
self._request_id = None
self._phone = None
self._license = None
self._license_encryption = None
self._license_entities = None
self._license_id = None
self._license_type = None
self._licensed_feature = None
self._minor_release = None
self._zip = None
self._city = None
self._allowed_avrsgs_count = None
self._allowed_avrss_count = None
self._allowed_cpes_count = None
self._allowed_nics_count = None
self._allowed_vdfgs_count = None
self._allowed_vdfs_count = None
self._allowed_vms_count = None
self._allowed_vrsgs_count = None
self._allowed_vrss_count = None
self._email = None
self._encryption_mode = None
self._unique_license_identifier = None
self._entity_scope = None
self._company = None
self._country = None
self._product_version = None
self._provider = None
self._is_cluster_license = None
self._user_name = None
self._state = None
self._street = None
self._customer_key = None
self._expiration_date = None
self._expiry_timestamp = None
self._external_id = None
self._system = None
self.expose_attribute(local_name="major_release", remote_name="majorRelease", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="additional_supported_versions", remote_name="additionalSupportedVersions", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="request_id", remote_name="requestID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="phone", remote_name="phone", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="license", remote_name="license", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="license_encryption", remote_name="licenseEncryption", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENCRYPTION_DISABLED', u'ENCRYPTION_ENABLED'])
self.expose_attribute(local_name="license_entities", remote_name="licenseEntities", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="license_id", remote_name="licenseID", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="license_type", remote_name="licenseType", attribute_type=str, is_required=False, is_unique=False, choices=[u'CLUSTERED', u'STANDARD'])
self.expose_attribute(local_name="licensed_feature", remote_name="licensedFeature", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="minor_release", remote_name="minorRelease", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="zip", remote_name="zip", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="city", remote_name="city", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="allowed_avrsgs_count", remote_name="allowedAVRSGsCount", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="allowed_avrss_count", remote_name="allowedAVRSsCount", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="allowed_cpes_count", remote_name="allowedCPEsCount", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="allowed_nics_count", remote_name="allowedNICsCount", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="allowed_vdfgs_count", remote_name="allowedVDFGsCount", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="allowed_vdfs_count", remote_name="allowedVDFsCount", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="allowed_vms_count", remote_name="allowedVMsCount", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="allowed_vrsgs_count", remote_name="allowedVRSGsCount", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="allowed_vrss_count", remote_name="allowedVRSsCount", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="email", remote_name="email", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="encryption_mode", remote_name="encryptionMode", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="unique_license_identifier", remote_name="uniqueLicenseIdentifier", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="company", remote_name="company", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="country", remote_name="country", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="product_version", remote_name="productVersion", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="provider", remote_name="provider", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="is_cluster_license", remote_name="isClusterLicense", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="user_name", remote_name="userName", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="state", remote_name="state", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="street", remote_name="street", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="customer_key", remote_name="customerKey", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="expiration_date", remote_name="expirationDate", attribute_type=float, is_required=False, is_unique=False)
self.expose_attribute(local_name="expiry_timestamp", remote_name="expiryTimestamp", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="system", remote_name="system", attribute_type=str, is_required=False, is_unique=False)
# Fetchers
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.event_logs = NUEventLogsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def major_release(self):
""" Get major_release value.
Notes:
Major software release associated with this license
This attribute is named `majorRelease` in VSD API.
"""
return self._major_release
@major_release.setter
def major_release(self, value):
""" Set major_release value.
Notes:
Major software release associated with this license
This attribute is named `majorRelease` in VSD API.
"""
self._major_release = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def additional_supported_versions(self):
""" Get additional_supported_versions value.
Notes:
Indicates additional versions supported by the license.
This attribute is named `additionalSupportedVersions` in VSD API.
"""
return self._additional_supported_versions
@additional_supported_versions.setter
def additional_supported_versions(self, value):
""" Set additional_supported_versions value.
Notes:
Indicates additional versions supported by the license.
This attribute is named `additionalSupportedVersions` in VSD API.
"""
self._additional_supported_versions = value
@property
def request_id(self):
""" Get request_id value.
Notes:
Unique number generated by the License Management system used to identify each license.
This attribute is named `requestID` in VSD API.
"""
return self._request_id
@request_id.setter
def request_id(self, value):
""" Set request_id value.
Notes:
Unique number generated by the License Management system used to identify each license.
This attribute is named `requestID` in VSD API.
"""
self._request_id = value
@property
def phone(self):
""" Get phone value.
Notes:
Phone number of the owner associated with the license file
"""
return self._phone
@phone.setter
def phone(self, value):
""" Set phone value.
Notes:
Phone number of the owner associated with the license file
"""
self._phone = value
@property
def license(self):
""" Get license value.
Notes:
Base 64 value of the license
"""
return self._license
@license.setter
def license(self, value):
""" Set license value.
Notes:
Base 64 value of the license
"""
self._license = value
@property
def license_encryption(self):
""" Get license_encryption value.
Notes:
License encryption
This attribute is named `licenseEncryption` in VSD API.
"""
return self._license_encryption
@license_encryption.setter
def license_encryption(self, value):
""" Set license_encryption value.
Notes:
License encryption
This attribute is named `licenseEncryption` in VSD API.
"""
self._license_encryption = value
@property
def license_entities(self):
""" Get license_entities value.
Notes:
Indicates non enforceable entities associated with the license.
This attribute is named `licenseEntities` in VSD API.
"""
return self._license_entities
@license_entities.setter
def license_entities(self, value):
""" Set license_entities value.
Notes:
Indicates non enforceable entities associated with the license.
This attribute is named `licenseEntities` in VSD API.
"""
self._license_entities = value
@property
def license_id(self):
""" Get license_id value.
Notes:
Unique identifier of the license file
This attribute is named `licenseID` in VSD API.
"""
return self._license_id
@license_id.setter
def license_id(self, value):
""" Set license_id value.
Notes:
Unique identifier of the license file
This attribute is named `licenseID` in VSD API.
"""
self._license_id = value
@property
def license_type(self):
""" Get license_type value.
Notes:
None
This attribute is named `licenseType` in VSD API.
"""
return self._license_type
@license_type.setter
def license_type(self, value):
""" Set license_type value.
Notes:
None
This attribute is named `licenseType` in VSD API.
"""
self._license_type = value
@property
def licensed_feature(self):
""" Get licensed_feature value.
Notes:
Indicates the feature supported by the license. Possible value is "vss".
This attribute is named `licensedFeature` in VSD API.
"""
return self._licensed_feature
@licensed_feature.setter
def licensed_feature(self, value):
""" Set licensed_feature value.
Notes:
Indicates the feature supported by the license. Possible value is "vss".
This attribute is named `licensedFeature` in VSD API.
"""
self._licensed_feature = value
@property
def minor_release(self):
""" Get minor_release value.
Notes:
Minor software release for which this license has been issued
This attribute is named `minorRelease` in VSD API.
"""
return self._minor_release
@minor_release.setter
def minor_release(self, value):
""" Set minor_release value.
Notes:
Minor software release for which this license has been issued
This attribute is named `minorRelease` in VSD API.
"""
self._minor_release = value
@property
def zip(self):
""" Get zip value.
Notes:
Zipcode of the owner associated with the license file
"""
return self._zip
@zip.setter
def zip(self, value):
""" Set zip value.
Notes:
Zipcode of the owner associated with the license file
"""
self._zip = value
@property
def city(self):
""" Get city value.
Notes:
City of the owner associated with the license file
"""
return self._city
@city.setter
def city(self, value):
""" Set city value.
Notes:
City of the owner associated with the license file
"""
self._city = value
@property
def allowed_avrsgs_count(self):
""" Get allowed_avrsgs_count value.
Notes:
Maximum number of AVRSGs enabled with this license. A value of -1 indicates an unlimited number of AVRSGs
This attribute is named `allowedAVRSGsCount` in VSD API.
"""
return self._allowed_avrsgs_count
@allowed_avrsgs_count.setter
def allowed_avrsgs_count(self, value):
""" Set allowed_avrsgs_count value.
Notes:
Maximum number of AVRSGs enabled with this license. A value of -1 indicates an unlimited number of AVRSGs
This attribute is named `allowedAVRSGsCount` in VSD API.
"""
self._allowed_avrsgs_count = value
@property
def allowed_avrss_count(self):
""" Get allowed_avrss_count value.
Notes:
Maximum number of AVRSs enabled with this license. A value of -1 indicates an unlimited number of AVRSs
This attribute is named `allowedAVRSsCount` in VSD API.
"""
return self._allowed_avrss_count
@allowed_avrss_count.setter
def allowed_avrss_count(self, value):
""" Set allowed_avrss_count value.
Notes:
Maximum number of AVRSs enabled with this license. A value of -1 indicates an unlimited number of AVRSs
This attribute is named `allowedAVRSsCount` in VSD API.
"""
self._allowed_avrss_count = value
@property
def allowed_cpes_count(self):
""" Get allowed_cpes_count value.
Notes:
Maximum number of NSGs enabled with this license. A value of -1 indicates an unlimited number of NSGs
This attribute is named `allowedCPEsCount` in VSD API.
"""
return self._allowed_cpes_count
@allowed_cpes_count.setter
def allowed_cpes_count(self, value):
""" Set allowed_cpes_count value.
Notes:
Maximum number of NSGs enabled with this license. A value of -1 indicates an unlimited number of NSGs
This attribute is named `allowedCPEsCount` in VSD API.
"""
self._allowed_cpes_count = value
@property
def allowed_nics_count(self):
""" Get allowed_nics_count value.
Notes:
Maximum number of NICs allowed. A value of -1 indicates unlimited number of NICs
This attribute is named `allowedNICsCount` in VSD API.
"""
return self._allowed_nics_count
@allowed_nics_count.setter
def allowed_nics_count(self, value):
""" Set allowed_nics_count value.
Notes:
Maximum number of NICs allowed. A value of -1 indicates unlimited number of NICs
This attribute is named `allowedNICsCount` in VSD API.
"""
self._allowed_nics_count = value
@property
def allowed_vdfgs_count(self):
""" Get allowed_vdfgs_count value.
Notes:
Maximum number of VDFGs enabled with this license. A value of -1 indicates an unlimited number of VDFGs.
This attribute is named `allowedVDFGsCount` in VSD API.
"""
return self._allowed_vdfgs_count
@allowed_vdfgs_count.setter
def allowed_vdfgs_count(self, value):
""" Set allowed_vdfgs_count value.
Notes:
Maximum number of VDFGs enabled with this license. A value of -1 indicates an unlimited number of VDFGs.
This attribute is named `allowedVDFGsCount` in VSD API.
"""
self._allowed_vdfgs_count = value
@property
def allowed_vdfs_count(self):
""" Get allowed_vdfs_count value.
Notes:
Maximum number of VDFs enabled with this license. A value of -1 indicates an unlimited number of VDFs
This attribute is named `allowedVDFsCount` in VSD API.
"""
return self._allowed_vdfs_count
@allowed_vdfs_count.setter
def allowed_vdfs_count(self, value):
""" Set allowed_vdfs_count value.
Notes:
Maximum number of VDFs enabled with this license. A value of -1 indicates an unlimited number of VDFs
This attribute is named `allowedVDFsCount` in VSD API.
"""
self._allowed_vdfs_count = value
@property
def allowed_vms_count(self):
""" Get allowed_vms_count value.
Notes:
Maximum number of VMs enabled with this license. A value of -1 indicates an unlimited number of VMs
This attribute is named `allowedVMsCount` in VSD API.
"""
return self._allowed_vms_count
@allowed_vms_count.setter
def allowed_vms_count(self, value):
""" Set allowed_vms_count value.
Notes:
Maximum number of VMs enabled with this license. A value of -1 indicates an unlimited number of VMs
This attribute is named `allowedVMsCount` in VSD API.
"""
self._allowed_vms_count = value
@property
def allowed_vrsgs_count(self):
""" Get allowed_vrsgs_count value.
Notes:
Maximum number of VRSGs enabled with this license. A value of -1 indicates an unlimited number of VRSGs
This attribute is named `allowedVRSGsCount` in VSD API.
"""
return self._allowed_vrsgs_count
@allowed_vrsgs_count.setter
def allowed_vrsgs_count(self, value):
""" Set allowed_vrsgs_count value.
Notes:
Maximum number of VRSGs enabled with this license. A value of -1 indicates an unlimited number of VRSGs
This attribute is named `allowedVRSGsCount` in VSD API.
"""
self._allowed_vrsgs_count = value
@property
def allowed_vrss_count(self):
""" Get allowed_vrss_count value.
Notes:
Maximum number of VRSs enabled with this license. A value of -1 indicates an unlimited number of VRSs
This attribute is named `allowedVRSsCount` in VSD API.
"""
return self._allowed_vrss_count
@allowed_vrss_count.setter
def allowed_vrss_count(self, value):
""" Set allowed_vrss_count value.
Notes:
Maximum number of VRSs enabled with this license. A value of -1 indicates an unlimited number of VRSs
This attribute is named `allowedVRSsCount` in VSD API.
"""
self._allowed_vrss_count = value
@property
def email(self):
""" Get email value.
Notes:
Email of the owner associated with the license file
"""
return self._email
@email.setter
def email(self, value):
""" Set email value.
Notes:
Email of the owner associated with the license file
"""
self._email = value
@property
def encryption_mode(self):
""" Get encryption_mode value.
Notes:
Indicates if the system is associated with a license that allows encryption or not
This attribute is named `encryptionMode` in VSD API.
"""
return self._encryption_mode
@encryption_mode.setter
def encryption_mode(self, value):
""" Set encryption_mode value.
Notes:
Indicates if the system is associated with a license that allows encryption or not
This attribute is named `encryptionMode` in VSD API.
"""
self._encryption_mode = value
@property
def unique_license_identifier(self):
""" Get unique_license_identifier value.
Notes:
Indicates combined string of first 16 and last 16 characters of the license string to be shown in the API
This attribute is named `uniqueLicenseIdentifier` in VSD API.
"""
return self._unique_license_identifier
@unique_license_identifier.setter
def unique_license_identifier(self, value):
""" Set unique_license_identifier value.
Notes:
Indicates combined string of first 16 and last 16 characters of the license string to be shown in the API
This attribute is named `uniqueLicenseIdentifier` in VSD API.
"""
self._unique_license_identifier = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def company(self):
""" Get company value.
Notes:
Company of the owner associated with the license file
"""
return self._company
@company.setter
def company(self, value):
""" Set company value.
Notes:
Company of the owner associated with the license file
"""
self._company = value
@property
def country(self):
""" Get country value.
Notes:
Country of the owner associated with the license file
"""
return self._country
@country.setter
def country(self, value):
""" Set country value.
Notes:
Country of the owner associated with the license file
"""
self._country = value
@property
def product_version(self):
""" Get product_version value.
Notes:
Version of the product that this license applies to
This attribute is named `productVersion` in VSD API.
"""
return self._product_version
@product_version.setter
def product_version(self, value):
""" Set product_version value.
Notes:
Version of the product that this license applies to
This attribute is named `productVersion` in VSD API.
"""
self._product_version = value
@property
def provider(self):
""" Get provider value.
Notes:
Provider of the license file
"""
return self._provider
@provider.setter
def provider(self, value):
""" Set provider value.
Notes:
Provider of the license file
"""
self._provider = value
@property
def is_cluster_license(self):
""" Get is_cluster_license value.
Notes:
Indicates if the license is associated with standlone or cluster setup of VSD
This attribute is named `isClusterLicense` in VSD API.
"""
return self._is_cluster_license
@is_cluster_license.setter
def is_cluster_license(self, value):
""" Set is_cluster_license value.
Notes:
Indicates if the license is associated with standlone or cluster setup of VSD
This attribute is named `isClusterLicense` in VSD API.
"""
self._is_cluster_license = value
@property
def user_name(self):
""" Get user_name value.
Notes:
The name of the user associated with the license
This attribute is named `userName` in VSD API.
"""
return self._user_name
@user_name.setter
def user_name(self, value):
""" Set user_name value.
Notes:
The name of the user associated with the license
This attribute is named `userName` in VSD API.
"""
self._user_name = value
@property
def state(self):
""" Get state value.
Notes:
State of the owner associated with the license file
"""
return self._state
@state.setter
def state(self, value):
""" Set state value.
Notes:
State of the owner associated with the license file
"""
self._state = value
@property
def street(self):
""" Get street value.
Notes:
Address of the owner associated with the license file
"""
return self._street
@street.setter
def street(self, value):
""" Set street value.
Notes:
Address of the owner associated with the license file
"""
self._street = value
@property
def customer_key(self):
""" Get customer_key value.
Notes:
Customer key associated with the licese
This attribute is named `customerKey` in VSD API.
"""
return self._customer_key
@customer_key.setter
def customer_key(self, value):
""" Set customer_key value.
Notes:
Customer key associated with the licese
This attribute is named `customerKey` in VSD API.
"""
self._customer_key = value
@property
def expiration_date(self):
""" Get expiration_date value.
Notes:
Expiration date of this license
This attribute is named `expirationDate` in VSD API.
"""
return self._expiration_date
@expiration_date.setter
def expiration_date(self, value):
""" Set expiration_date value.
Notes:
Expiration date of this license
This attribute is named `expirationDate` in VSD API.
"""
self._expiration_date = value
@property
def expiry_timestamp(self):
""" Get expiry_timestamp value.
Notes:
The Timestamp value of the expiration date of this license
This attribute is named `expiryTimestamp` in VSD API.
"""
return self._expiry_timestamp
@expiry_timestamp.setter
def expiry_timestamp(self, value):
""" Set expiry_timestamp value.
Notes:
The Timestamp value of the expiration date of this license
This attribute is named `expiryTimestamp` in VSD API.
"""
self._expiry_timestamp = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
@property
def system(self):
""" Get system value.
Notes:
System name information provided by the License Management System.
"""
return self._system
@system.setter
def system(self, value):
""" Set system value.
Notes:
System name information provided by the License Management System.
"""
self._system = value
|
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2017, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import concurrent.futures
import inspect
import os.path
import tempfile
import unittest
import uuid
import qiime2.plugin
import qiime2.core.type
from qiime2.core.type import VisualizerSignature, Str
from qiime2.core.type.visualization import Visualization as VisualizationType
from qiime2.sdk import Artifact, Visualization, Visualizer, Results
from qiime2.core.testing.visualizer import (most_common_viz, mapping_viz,
params_only_viz, no_input_viz)
from qiime2.core.testing.type import IntSequence1, IntSequence2, Mapping
from qiime2.core.testing.util import get_dummy_plugin, ArchiveTestingMixin
class TestVisualizer(unittest.TestCase, ArchiveTestingMixin):
def setUp(self):
# TODO standardize temporary directories created by QIIME 2
self.test_dir = tempfile.TemporaryDirectory(prefix='qiime2-test-temp-')
self.plugin = get_dummy_plugin()
def tearDown(self):
self.test_dir.cleanup()
def test_private_constructor(self):
with self.assertRaisesRegex(NotImplementedError,
'Visualizer constructor.*private'):
Visualizer()
def test_from_function_with_artifacts_and_parameters(self):
visualizer = self.plugin.visualizers['mapping_viz']
self.assertEqual(visualizer.id, 'mapping_viz')
exp_sig = VisualizerSignature(
mapping_viz,
inputs={
'mapping1': Mapping,
'mapping2': Mapping
},
parameters={
'key_label': qiime2.plugin.Str,
'value_label': qiime2.plugin.Str
},
)
self.assertEqual(visualizer.signature, exp_sig)
self.assertEqual(visualizer.name, 'Visualize two mappings')
self.assertTrue(
visualizer.description.startswith('This visualizer produces an '
'HTML visualization'))
self.assertTrue(
visualizer.source.startswith('\n```python\ndef mapping_viz('))
def test_from_function_without_parameters(self):
visualizer = self.plugin.visualizers['most_common_viz']
self.assertEqual(visualizer.id, 'most_common_viz')
exp_sig = VisualizerSignature(
most_common_viz,
inputs={
'ints': IntSequence1 | IntSequence2
},
parameters={}
)
self.assertEqual(visualizer.signature, exp_sig)
self.assertEqual(visualizer.name, 'Visualize most common integers')
self.assertTrue(
visualizer.description.startswith('This visualizer produces HTML '
'and TSV'))
self.assertTrue(
visualizer.source.startswith('\n```python\ndef most_common_viz('))
def test_from_function_with_parameters_only(self):
visualizer = self.plugin.visualizers['params_only_viz']
self.assertEqual(visualizer.id, 'params_only_viz')
exp_sig = VisualizerSignature(
params_only_viz,
inputs={},
parameters={
'name': qiime2.plugin.Str,
'age': qiime2.plugin.Int
}
)
self.assertEqual(visualizer.signature, exp_sig)
self.assertEqual(visualizer.name, 'Parameters only viz')
self.assertTrue(
visualizer.description.startswith('This visualizer only accepts '
'parameters.'))
self.assertTrue(
visualizer.source.startswith('\n```python\ndef params_only_viz('))
def test_from_function_without_inputs_or_parameters(self):
visualizer = self.plugin.visualizers['no_input_viz']
self.assertEqual(visualizer.id, 'no_input_viz')
exp_sig = VisualizerSignature(
no_input_viz,
inputs={},
parameters={}
)
self.assertEqual(visualizer.signature, exp_sig)
self.assertEqual(visualizer.name, 'No input viz')
self.assertTrue(
visualizer.description.startswith('This visualizer does not '
'accept any'))
self.assertTrue(
visualizer.source.startswith('\n```python\ndef no_input_viz('))
def test_is_callable(self):
self.assertTrue(callable(self.plugin.visualizers['mapping_viz']))
self.assertTrue(callable(self.plugin.visualizers['most_common_viz']))
def test_callable_properties(self):
mapping_viz = self.plugin.visualizers['mapping_viz']
most_common_viz = self.plugin.visualizers['most_common_viz']
mapping_exp = {
'mapping1': Mapping, 'return': (VisualizationType,),
'key_label': Str, 'mapping2': Mapping, 'value_label': Str}
most_common_exp = {
'ints': IntSequence1 | IntSequence2,
'return': (VisualizationType,)}
mapper = {
mapping_viz: mapping_exp,
most_common_viz: most_common_exp}
for visualizer, exp in mapper.items():
self.assertEqual(visualizer.__call__.__name__, '__call__')
self.assertEqual(visualizer.__call__.__annotations__, exp)
self.assertFalse(hasattr(visualizer.__call__, '__wrapped__'))
def test_async_properties(self):
mapping_viz = self.plugin.visualizers['mapping_viz']
most_common_viz = self.plugin.visualizers['most_common_viz']
mapping_exp = {
'mapping1': Mapping, 'return': (VisualizationType,),
'key_label': Str, 'mapping2': Mapping, 'value_label': Str}
most_common_exp = {
'ints': IntSequence1 | IntSequence2,
'return': (VisualizationType,)}
mapper = {
mapping_viz: mapping_exp,
most_common_viz: most_common_exp}
for visualizer, exp in mapper.items():
self.assertEqual(visualizer.async.__name__, 'async')
self.assertEqual(visualizer.async.__annotations__, exp)
self.assertFalse(hasattr(visualizer.async, '__wrapped__'))
def test_callable_and_async_signature(self):
mapping_viz = self.plugin.visualizers['mapping_viz']
for callable_attr in '__call__', 'async':
signature = inspect.Signature.from_callable(
getattr(mapping_viz, callable_attr))
parameters = list(signature.parameters.items())
kind = inspect.Parameter.POSITIONAL_OR_KEYWORD
exp_parameters = [
('mapping1', inspect.Parameter(
'mapping1', kind, annotation=Mapping)),
('mapping2', inspect.Parameter(
'mapping2', kind, annotation=Mapping)),
('key_label', inspect.Parameter(
'key_label', kind, annotation=Str)),
('value_label', inspect.Parameter(
'value_label', kind, annotation=Str))
]
self.assertEqual(parameters, exp_parameters)
def test_callable_and_async_different_signature(self):
# Test that a different Visualizer object has a different dynamic
# signature.
most_common_viz = self.plugin.visualizers['most_common_viz']
for callable_attr in '__call__', 'async':
signature = inspect.Signature.from_callable(
getattr(most_common_viz, callable_attr))
parameters = list(signature.parameters.items())
kind = inspect.Parameter.POSITIONAL_OR_KEYWORD
exp_parameters = [
('ints', inspect.Parameter(
'ints', kind, annotation=IntSequence1 | IntSequence2))
]
self.assertEqual(parameters, exp_parameters)
def test_call_with_artifacts_and_parameters(self):
mapping_viz = self.plugin.visualizers['mapping_viz']
artifact1 = Artifact.import_data(Mapping, {'foo': 'abc', 'bar': 'def'})
artifact2 = Artifact.import_data(
Mapping, {'baz': 'abc', 'bazz': 'ghi'})
result = mapping_viz(artifact1, artifact2, 'Key', 'Value')
# Test properties of the `Results` object.
self.assertIsInstance(result, tuple)
self.assertIsInstance(result, Results)
self.assertEqual(len(result), 1)
self.assertEqual(result.visualization, result[0])
result = result[0]
self.assertIsInstance(result, Visualization)
self.assertEqual(result.type, qiime2.core.type.Visualization)
self.assertIsInstance(result.uuid, uuid.UUID)
# TODO qiime2.sdk.Visualization doesn't have an API to access its
# contents yet. For now, save and assert the correct files are present.
filepath = os.path.join(self.test_dir.name, 'visualization.qzv')
result.save(filepath)
root_dir = str(result.uuid)
expected = {
'VERSION',
'metadata.yaml',
'data/index.html',
'data/css/style.css',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml',
'provenance/artifacts/%s/metadata.yaml' % artifact1.uuid,
'provenance/artifacts/%s/VERSION' % artifact1.uuid,
'provenance/artifacts/%s/action/action.yaml' % artifact1.uuid,
'provenance/artifacts/%s/metadata.yaml' % artifact2.uuid,
'provenance/artifacts/%s/VERSION' % artifact2.uuid,
'provenance/artifacts/%s/action/action.yaml' % artifact2.uuid
}
self.assertArchiveMembers(filepath, root_dir, expected)
def test_call_with_no_parameters(self):
most_common_viz = self.plugin.visualizers['most_common_viz']
artifact = Artifact.import_data(
IntSequence1, [42, 42, 10, 0, 42, 5, 0])
result = most_common_viz(artifact)
# Test properties of the `Results` object.
self.assertIsInstance(result, tuple)
self.assertIsInstance(result, Results)
self.assertEqual(len(result), 1)
self.assertEqual(result.visualization, result[0])
result = result[0]
self.assertIsInstance(result, Visualization)
self.assertEqual(result.type, qiime2.core.type.Visualization)
self.assertIsInstance(result.uuid, uuid.UUID)
# TODO qiime2.sdk.Visualization doesn't have an API to access its
# contents yet. For now, save and assert the correct files are present.
filepath = os.path.join(self.test_dir.name, 'visualization.qzv')
result.save(filepath)
root_dir = str(result.uuid)
expected = {
'VERSION',
'metadata.yaml',
'data/index.html',
'data/index.tsv',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml',
'provenance/artifacts/%s/metadata.yaml' % artifact.uuid,
'provenance/artifacts/%s/VERSION' % artifact.uuid,
'provenance/artifacts/%s/action/action.yaml' % artifact.uuid
}
self.assertArchiveMembers(filepath, root_dir, expected)
def test_call_with_parameters_only(self):
params_only_viz = self.plugin.visualizers['params_only_viz']
# Parameters all have default values.
result, = params_only_viz()
self.assertIsInstance(result, Visualization)
self.assertEqual(result.type, qiime2.core.type.Visualization)
self.assertIsInstance(result.uuid, uuid.UUID)
filepath = os.path.join(self.test_dir.name, 'visualization.qzv')
result.save(filepath)
root_dir = str(result.uuid)
expected = {
'VERSION',
'metadata.yaml',
'data/index.html',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml'
}
self.assertArchiveMembers(filepath, root_dir, expected)
def test_call_without_inputs_or_parameters(self):
no_input_viz = self.plugin.visualizers['no_input_viz']
result, = no_input_viz()
self.assertIsInstance(result, Visualization)
self.assertEqual(result.type, qiime2.core.type.Visualization)
self.assertIsInstance(result.uuid, uuid.UUID)
filepath = os.path.join(self.test_dir.name, 'visualization.qzv')
result.save(filepath)
root_dir = str(result.uuid)
expected = {
'VERSION',
'metadata.yaml',
'data/index.html',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml'
}
self.assertArchiveMembers(filepath, root_dir, expected)
def test_async(self):
mapping_viz = self.plugin.visualizers['mapping_viz']
artifact1 = Artifact.import_data(Mapping, {'foo': 'abc', 'bar': 'def'})
artifact2 = Artifact.import_data(
Mapping, {'baz': 'abc', 'bazz': 'ghi'})
future = mapping_viz.async(artifact1, artifact2, 'Key', 'Value')
self.assertIsInstance(future, concurrent.futures.Future)
result = future.result()
# Test properties of the `Results` object.
self.assertIsInstance(result, tuple)
self.assertIsInstance(result, Results)
self.assertEqual(len(result), 1)
self.assertEqual(result.visualization, result[0])
result = result[0]
self.assertIsInstance(result, Visualization)
self.assertEqual(result.type, qiime2.core.type.Visualization)
self.assertIsInstance(result.uuid, uuid.UUID)
# TODO qiime2.sdk.Visualization doesn't have an API to access its
# contents yet. For now, save and assert the correct files are present.
filepath = os.path.join(self.test_dir.name, 'visualization.qzv')
result.save(filepath)
root_dir = str(result.uuid)
expected = {
'VERSION',
'metadata.yaml',
'data/index.html',
'data/css/style.css',
'provenance/metadata.yaml',
'provenance/VERSION',
'provenance/action/action.yaml',
'provenance/artifacts/%s/metadata.yaml' % artifact1.uuid,
'provenance/artifacts/%s/VERSION' % artifact1.uuid,
'provenance/artifacts/%s/action/action.yaml' % artifact1.uuid,
'provenance/artifacts/%s/metadata.yaml' % artifact2.uuid,
'provenance/artifacts/%s/VERSION' % artifact2.uuid,
'provenance/artifacts/%s/action/action.yaml' % artifact2.uuid
}
self.assertArchiveMembers(filepath, root_dir, expected)
def test_visualizer_callable_output(self):
artifact = Artifact.import_data(Mapping, {'foo': 'abc', 'bar': 'def'})
# Callable returns a value from `return_vals`
return_vals = (True, False, [], {}, '', 0, 0.0)
for return_val in return_vals:
def func(output_dir: str, foo: dict) -> None:
return return_val
self.plugin.visualizers.register_function(
func, {'foo': Mapping}, {}, '', ''
)
visualizer = self.plugin.visualizers['func']
with self.assertRaisesRegex(TypeError, "should not return"):
visualizer(foo=artifact)
# Callable returns None (default function return)
def func(output_dir: str, foo: dict) -> None:
return None
self.plugin.visualizers.register_function(
func, {'foo': Mapping}, {}, '', ''
)
visualizer = self.plugin.visualizers['func']
# Should not raise an exception
output = visualizer(foo=artifact)
self.assertIsInstance(output, Results)
self.assertIsInstance(output.visualization, Visualization)
def test_docstring(self):
mapping_viz = self.plugin.visualizers['mapping_viz']
common_viz = self.plugin.visualizers['most_common_viz']
params_only_viz = self.plugin.visualizers['params_only_viz']
no_input_viz = self.plugin.visualizers['no_input_viz']
obs = mapping_viz.__call__.__doc__
self.assertEqual(obs, exp_mapping_viz)
obs = common_viz.__call__.__doc__
self.assertEqual(obs, exp_common_viz)
obs = params_only_viz.__call__.__doc__
self.assertEqual(obs, exp_params_only_viz)
obs = no_input_viz.__call__.__doc__
self.assertEqual(obs, exp_no_input_viz)
exp_mapping_viz = """\
Visualize two mappings
This visualizer produces an HTML visualization of two key-value mappings,
each sorted in alphabetical order by key.
Parameters
----------
mapping1 : Mapping
mapping2 : Mapping
key_label : Str
value_label : Str
Returns
-------
visualization : Visualization
"""
exp_common_viz = """\
Visualize most common integers
This visualizer produces HTML and TSV outputs containing the input sequence
of integers ordered from most- to least-frequently occurring, along with
their respective frequencies.
Parameters
----------
ints : IntSequence1 | IntSequence2
Returns
-------
visualization : Visualization
"""
exp_params_only_viz = """\
Parameters only viz
This visualizer only accepts parameters.
Parameters
----------
name : Str, optional
age : Int, optional
Returns
-------
visualization : Visualization
"""
exp_no_input_viz = """\
No input viz
This visualizer does not accept any type of input.
Returns
-------
visualization : Visualization
"""
if __name__ == '__main__':
unittest.main()
|
|
"""Test the Enphase Envoy config flow."""
from unittest.mock import MagicMock, patch
import httpx
from homeassistant import config_entries, setup
from homeassistant.components.enphase_envoy.const import DOMAIN
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
async def test_form(hass: HomeAssistant) -> None:
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.enphase_envoy.config_flow.EnvoyReader.getData",
return_value=True,
), patch(
"homeassistant.components.enphase_envoy.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Envoy"
assert result2["data"] == {
"host": "1.1.1.1",
"name": "Envoy",
"username": "test-username",
"password": "test-password",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_invalid_auth(hass: HomeAssistant) -> None:
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.enphase_envoy.config_flow.EnvoyReader.getData",
side_effect=httpx.HTTPStatusError(
"any", request=MagicMock(), response=MagicMock()
),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "invalid_auth"}
async def test_form_cannot_connect(hass: HomeAssistant) -> None:
"""Test we handle cannot connect error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.enphase_envoy.config_flow.EnvoyReader.getData",
side_effect=httpx.HTTPError("any", request=MagicMock()),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "cannot_connect"}
async def test_form_unknown_error(hass: HomeAssistant) -> None:
"""Test we handle unknown error."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.enphase_envoy.config_flow.EnvoyReader.getData",
side_effect=ValueError,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
assert result2["type"] == "form"
assert result2["errors"] == {"base": "unknown"}
async def test_import(hass: HomeAssistant) -> None:
"""Test we can import from yaml."""
await setup.async_setup_component(hass, "persistent_notification", {})
with patch(
"homeassistant.components.enphase_envoy.config_flow.EnvoyReader.getData",
return_value=True,
), patch(
"homeassistant.components.enphase_envoy.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "import"},
data={
"ip_address": "1.1.1.1",
"name": "Pool Envoy",
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Pool Envoy"
assert result2["data"] == {
"host": "1.1.1.1",
"name": "Pool Envoy",
"username": "test-username",
"password": "test-password",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_zeroconf(hass: HomeAssistant) -> None:
"""Test we can setup from zeroconf."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "zeroconf"},
data={
"properties": {"serialnum": "1234"},
"host": "1.1.1.1",
},
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["step_id"] == "user"
with patch(
"homeassistant.components.enphase_envoy.config_flow.EnvoyReader.getData",
return_value=True,
), patch(
"homeassistant.components.enphase_envoy.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Envoy 1234"
assert result2["result"].unique_id == "1234"
assert result2["data"] == {
"host": "1.1.1.1",
"name": "Envoy 1234",
"username": "test-username",
"password": "test-password",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_host_already_exists(hass: HomeAssistant) -> None:
"""Test host already exists."""
await setup.async_setup_component(hass, "persistent_notification", {})
config_entry = MockConfigEntry(
domain=DOMAIN,
data={
"host": "1.1.1.1",
"name": "Envoy",
"username": "test-username",
"password": "test-password",
},
title="Envoy",
)
config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.enphase_envoy.config_flow.EnvoyReader.getData",
return_value=True,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"host": "1.1.1.1",
"username": "test-username",
"password": "test-password",
},
)
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured"
async def test_zeroconf_serial_already_exists(hass: HomeAssistant) -> None:
"""Test serial number already exists from zeroconf."""
await setup.async_setup_component(hass, "persistent_notification", {})
config_entry = MockConfigEntry(
domain=DOMAIN,
data={
"host": "1.1.1.1",
"name": "Envoy",
"username": "test-username",
"password": "test-password",
},
unique_id="1234",
title="Envoy",
)
config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "zeroconf"},
data={
"properties": {"serialnum": "1234"},
"host": "1.1.1.1",
},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_zeroconf_host_already_exists(hass: HomeAssistant) -> None:
"""Test hosts already exists from zeroconf."""
await setup.async_setup_component(hass, "persistent_notification", {})
config_entry = MockConfigEntry(
domain=DOMAIN,
data={
"host": "1.1.1.1",
"name": "Envoy",
"username": "test-username",
"password": "test-password",
},
title="Envoy",
)
config_entry.add_to_hass(hass)
with patch(
"homeassistant.components.enphase_envoy.config_flow.EnvoyReader.getData",
return_value=True,
), patch(
"homeassistant.components.enphase_envoy.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": "zeroconf"},
data={
"properties": {"serialnum": "1234"},
"host": "1.1.1.1",
},
)
await hass.async_block_till_done()
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
assert config_entry.unique_id == "1234"
assert config_entry.title == "Envoy 1234"
assert len(mock_setup_entry.mock_calls) == 1
|
|
"""A module to help manage coordinate frames and objects attached to them.
Similar to the tf module in ROS.
You may attach points / vectors to frames and determine relative or world
coordinates in a straightforward, object-oriented way.
The ``coordinates`` module is set up with a default coordinate manager so that
if you call ``coordinates.[X]``, where ``[X]`` is a method of
:class:`klampt.model.coordinates.Manager`, such as ``setWorldModel()``,
``addPoint()``, ``addFrame()``, etc., then the default ``Manager``
instance gets called.
Advanced users might create their own ``Manager``, or swap top-level managers
in/out using :meth:`setManager`.
"""
from ..math import so3,se3,vectorops
from ..robotsim import RobotModelLink,RigidObjectModel
import ik
from collections import defaultdict
class Frame:
"""Represents some coordinate frame in space."""
def __init__(self,name,worldCoordinates=se3.identity(),
parent=None,relativeCoordinates=None):
self._name = name
self._parent = parent
self._worldCoordinates = worldCoordinates
self._data = None
if relativeCoordinates == None:
if worldCoordinates == None:
raise ValueError("One of relativeCoordinates or worldCoordinates must be provided")
if parent == None:
self._relativeCoordinates = worldCoordinates
else:
self._relativeCoordinates = se3.mul(se3.inv(parent.worldCoordinates()),worldCoordinates)
else:
self._relativeCoordinates = relativeCoordinates
if worldCoordinates == None:
if parent == None:
self._worldCoordinates = relativeCoordinates
else:
self._worldCoordinates = se3.mul(parent.worldCoordinates(),relativeCoordinates)
def name(self):
"""Returns the name of this frame"""
return self._name
def data(self):
"""If any data is attached to this frame, returns it"""
return self._data
def worldOrigin(self):
"""Returns an element of R^3 denoting the translation of the origin
of this frame in world coordinates"""
return self._worldCoordinates[1]
def relativeOrigin(self):
"""Returns an element of R^3 denoting the translation of the origin
of this frame relative to its parent"""
return self._relativeCoordinates[1]
def worldRotation(self):
"""Returns an element of SO(3) denoting the rotation from this frame
to world coordinates"""
return self._worldCoordinates[0]
def relativeRotation(self):
"""Returns an element of SO(3) denoting the rotation from this frame
to its parent"""
return self._relativeCoordinates[0]
def worldCoordinates(self):
"""Returns an element of SE(3) denoting the transform from this frame
to world coordinates"""
return self._worldCoordinates
def relativeCoordinates(self):
"""Returns an element of SE(3) denoting the transform from this frame
to its parent"""
return self._relativeCoordinates
def parent(self):
"""Returns the parent of the frame, or None if it's given in the world
frame."""
return self._parent
class Transform:
"""A transform from one Frame (source) to another (destination). The
destination may be None, in which case the transform is the world transform
of the source.
The difference between a Transform and a relative Frame (i.e., one with
a parent) is that a Transform is a sort of "read-only" structure whose
coordinates change as the frames' coordinates change."""
def __init__(self,source,destination=None):
assert isinstance(source,Frame)
if destination is not None:
assert isinstance(destination,Frame)
self._name = None
self._source = source
self._destination = destination
def source(self):
"""Returns the source Frame"""
return self._source
def destination(self):
"""Returns the source Frame"""
return self._destination
def coordinates(self):
"""Returns the SE(3) coordinates that transform elements from the
source to the destination Frame."""
if self._destination==None:
return self._source.worldCoordinates()
return se3.mul(se3.inv(self._destination.worldCoordinates()),self._source.worldCoordinates())
def translationCoordinates(self):
"""Returns the coordinates of the origin of this frame in R^3, relative
to its destination"""
if self._destination==None:
return self._source.worldOrigin()
return se3.apply(se3.inv(self._destination.worldCoordinates()),self._source.worldOrigin())
def rotationCoordinates(self):
"""Returns the SO(3) coordinates that rotate elements from the source
to the destination Frame"""
if self._destination==None:
return self._source.worldRotation()
return so3.mul(so3.inv(self._destination.worldRotation()),self._source.worldRotation())
def toWorld(self):
"""Returns a Transform designating the transformation from the
source frame to the world frame."""
return Transform(self.source,None)
def to(self,frame):
"""Returns a Transform designating the transformation from the
source frame to the given frame."""
return Transform(self.source,frame)
class Point:
"""Represents a point in 3D space. It is attached to a frame, so if the
frame is changed then its world coordinates will also change."""
def __init__(self,localCoordinates=[0,0,0],frame=None):
if frame is not None:
assert isinstance(frame,Frame)
self._name = None
self._localCoordinates = localCoordinates
self._frame = frame
def localCoordinates(self):
"""Returns the coordinates of this point in its parent Frame"""
return self._localCoordinates[:]
def worldCoordinates(self):
"""Returns the coordinates of this point in the world Frame"""
if self._frame ==None:
return self._localCoordinates[:]
return se3.apply(self._frame.worldCoordinates(),self._localCoordinates)
def frame(self):
"""Returns the frame to which this Point is attached"""
return self._frame
def toWorld(self):
"""Returns a Point representing the same point in space, but
in the world reference frame"""
return Point(self.worldCoordinates(),None)
def to(self,newframe):
"""Returns a Point representing the same point in space, but
in a different reference frame"""
if newframe == None or newframe=='world':
return self.toWorld()
newlocal = se3.apply(se3.inv(newframe.worldCoordinates()),self.worldCoordinates())
return Point(newlocal,newframe)
def localOffset(self,dir):
"""Offsets this point by a vector in local coordinates"""
self._localCoordinates = vectorops.add(self._localCoordinates,dir)
def worldOffset(self,dir):
"""Offsets this point by a vector in world coordinates"""
if self._frame == None:
self._localCoordinates = vectorops.add(self._localCoordinates,dir)
else:
self._localCoordinates = vectorops.add(so3.apply(so3.inv(self._frame.worldCoordinates()[0]),self._localCoordinates),dir)
class Direction:
"""Represents a directional quantity in 3D space. It is attached to a
frame, so if the frame is rotated then its world coordinates will also
change."""
def __init__(self,localCoordinates=[0,0,0],frame=None):
if frame is not None:
assert isinstance(frame,Frame)
self._name = None
self._localCoordinates = localCoordinates
self._frame = frame
def localCoordinates(self):
return self._localCoordinates[:]
def worldCoordinates(self):
if self._frame ==None:
return self._localCoordinates[:]
return so3.apply(self._frame.worldCoordinates()[0],self._localCoordinates)
def frame(self):
return self._frame
def toWorld(self):
"""Returns a Direction representing the same direction in space, but
in the world reference frame"""
return Direction(self.worldCoordinates(),None)
def to(self,newframe):
"""Returns a Direction representing the same direction in space, but
in a different reference frame"""
if newframe == None or newframe=='world':
return self.toWorld()
newlocal = so3.apply(so3.inv(newframe.worldCoordinates()[0]),self.worldCoordinates())
return Direction(newlocal,newframe)
def scale(self,amount):
"""Scales this direction by a scalar amount"""
self._localCoordinates = vectorops.mul(self._localCoordinates,amount)
def localOffset(self,dir):
"""Offsets this direction by a vector in local coordinates"""
self._localCoordinates = vectorops.add(self._localCoordinates,dir)
def worldOffset(self,dir):
"""Offsets this direction by a vector in world coordinates"""
if self._frame == None:
self._localCoordinates = vectorops.add(self._localCoordinates,dir)
else:
self._localCoordinates = vectorops.add(so3.apply(so3.inv(self._frame.worldCoordinates()[0]),self._localCoordinates),dir)
class Group:
"""A collection of Frames, Points, Directions, and sub-Groups.
All groups have a privileged frame called 'root'.
The default manager is a Group with a privileged frame called 'world'
which is just an alias for 'root'.
Subgroup items can be accessed using the syntax [group]:[itemname].
Subgroups can also be nested.
Attributes:
frames (dict): a map from frame names to Frame objects
childLists (dict): a map from frame names to lists of children
points (dict): a map from point names to Point objects
directions (dict): a map from direction names to Direction objects
subgroups (dict): a map from subgroup names to Group objects
"""
def __init__(self):
self._name = None
self.destroy()
def rootFrame(self):
return self.frames.get('root',None)
def destroy(self):
"""Call this to destroy a group cleanly"""
self.frames = {}
self.childLists = defaultdict(list)
self.frames['root'] = Frame('root')
self.points = {}
self.directions = {}
self.subgroups = {}
def setWorldModel(self,worldModel):
"""Sets this group to contain all entities of a world model"""
for i in xrange(worldModel.numRobots()):
rgroup = self.addGroup(worldModel.robot(i).getName())
rgroup.setRobotModel(worldModel.robot(i))
for i in xrange(worldModel.numRigidObjects()):
try:
f = self.addFrame(worldModel.rigidObject(i).getName(),worldCoordinates=worldModel.rigidObject(i).getTransform())
f._data = worldModel.rigidObject(i)
except ValueError:
f = self.addFrame("%s[%d]"%(worldModel.rigidObject(i).getName(),i),worldCoordinates=worldModel.rigidObject(i).getTransform())
f._data = worldModel.rigidObject(i)
for i in xrange(worldModel.numTerrains()):
try:
f = self.addFrame(worldModel.terrain(i).getName(),worldCoordinates=se3.identity())
f._data = worldModel.terrain(i)
except ValueError:
f = self.addFrame("%s[%d]"%(worldModel.terrain(i).getName(),i),worldCoordinates=se3.identity())
f._data = worldModel.terrain(i)
return
def setRobotModel(self,robotModel):
"""Sets this group to contain all links of a robot model"""
root = self.frames['root']
for i in xrange(robotModel.numLinks()):
p = robotModel.link(i).getParent()
if p >= 0:
Fp = self.frames[robotModel.link(p).getName()]
else:
Fp = root
f = self.addFrame(robotModel.link(i).getName(),worldCoordinates=robotModel.link(i).getTransform(),parent=Fp)
f._data = robotModel.link(i)
return
def setController(self,controller):
"""Given a robotController, sets this group to contain all sensed
and commanded frames."""
root = self.frames['root']
robot = controller.robot()
robot.setConfig(controller.getCommandedConfig())
for i in xrange(robot.numLinks()):
if p >= 0:
Fp = self.frames[robotModel.link(p).getName()+"_commanded"]
else:
Fp = root
f = self.addFrame(robot.link(i).getName()+"_commanded",worldCoordinates=robot.link(i).getTransform(),parent=Fp)
f._data = (controller,i,'commanded')
robot.setConfig(controller.getSensedConfig())
for i in xrange(robot.numLinks()):
if p >= 0:
Fp = self.frames[robotModel.link(p).getName()+"_commanded"]
else:
Fp = root
f = self.addFrame(robot.link(i).getName()+"_sensed",worldCoordinates=robot.link(i).getTransform(),parent=Fp)
f._data = (controller,i,'sensed')
return
def setSimBody(self,name,simBody):
"""Sets this group to be attached to a simBody"""
f = self.addFrame(name,worldCoordinates=simBody.getTransform())
f._data = simBody
return
def updateFromWorld(self):
"""For any frames with associated world elements, updates the
transforms from the world elements."""
for (n,f) in self.frames.iteritems():
if f._data == None:
continue
if hasattr(f._data,'getTransform'):
worldCoordinates = f._data.getTransform()
if hasattr(f._data,'getParent'):
p = f._data.getParent()
if p >= 0:
plink = f._data.robot().link(p)
parentCoordinates = plink.getTransform()
f._relativeCoordinates = se3.mul(se3.inv(parentCoordinates),worldCoordinates)
else:
f._relativeCoordinates = worldCoordinates
else:
f._relativeCoordinates = worldCoordinates
f._worldCoordinates = worldCoordinates
#update downstream non-link items
for c in self.childLists[f._name]:
if c._data == None or not hasattr(c._data,'getTransform'):
c._worldCoordinates = se3.mul(f._worldCoordinates,c._relativeCoordinates)
self.updateDependentFrames(c)
if isinstance(f._data,tuple) and isinstance(f._data[0],SimRobotController):
controller,index,itemtype = f._data
#TODO: update the frame from the controller data
for (n,g) in self.subgroups.iteritems():
g.updateFromWorld()
def updateToWorld(self):
"""For any frames with associated world elements, updates the
transforms of the world elements. Note: this does NOT perform inverse
kinematics!"""
for (n,f) in self.frames.iteritems():
if f.data == None: continue
if hasattr(f.data,'setTransform'):
f.data.setTransform(*f.worldCoordinates())
for (n,g) in self.subgroups.iteritems():
g.updateToWorld()
def addFrame(self,name,worldCoordinates=None,parent=None,relativeCoordinates=None):
"""Adds a new named Frame, possibly with a parent. 'parent' may either be a string
identifying another named Frame in this Group, or it can be a Frame object. (Warning:
unknown behavior may result from specifying a Frame not in this Group).
Either worldCoordinates or relativeCoordinates must be given. If worldCoordinates is given,
then the frame's initial relative transform is determined by the current coordinates of the
parent. If all parameters are left as default, the frame is placed directly at the origin
of the parent"""
if name in self.frames:
raise ValueError("Frame "+name+" already exists")
if parent==None:
parent = 'root'
if isinstance(parent,str):
parent = self.frames[parent]
if worldCoordinates == None and relativeCoordinates == None:
relativeCoordinates = se3.identity()
self.frames[name] = Frame(name,worldCoordinates=worldCoordinates,parent=parent,relativeCoordinates=relativeCoordinates)
self.childLists[parent._name].append(self.frames[name])
return self.frames[name]
def addPoint(self,name,coordinates=[0,0,0],frame='root'):
if name in self.points:
raise ValueError("Point "+name+" already exists")
res = self.point(coordinates,frame)
res._name = name
self.points[name] = res
return res
def addDirection(self,name,coordinates=[0,0,0],frame='root'):
if name in self.direction:
raise ValueError("Direction "+name+" already exists")
res = self.direction(coordinates,frame)
res._name = name
self.directions[name] = res
return res
def addGroup(self,name,group=None,parentFrame='root'):
"""Adds a subgroup to this group. If parentFrame is given,
then the group is attached relative to the given frame.
Otherwise, it is assumed attached to the root frame. """
if group==None:
group = Group()
if name in self.subgroups:
raise ValueError("Subgroup "+name+" already exists")
group._name = name
self.subgroups[name] = group
group.frames['root']._parent = self.frame(parentFrame)
return group
def deleteFrame(self,name):
"""Deletes the named frame. All items that refer to this frame
will be automatically converted to be relative to the root coordinate
system"""
assert name != 'root',"Root frame may not be deleted"
if name not in self.frames:
raise ValueError("Invalid frame to delete")
f = self.frames[name]
f._parent = None
if f._parent != None:
self.childLists[f._parent._name].remove(f)
for (n,p) in self.points.iteritems():
if p._parent == f:
p._localCoordinates = p.worldCoordinates()
p._parent = self.frames['root']
for (n,p) in self.directions.iteritems():
if p._parent == f:
p._localCoordinates = p.worldCoordinates()
p._parent = self.frames['root']
for c in self.childLists[name]:
p._relativeCoordinates = p._worldCoordinates
p._parent = self.frames['root']
del self.frames[name]
del self.childLists[name]
def deletePoint(self,name):
del self.points[name]
def deleteDirection(self,name):
del self.directions[name]
def deleteGroup(self,name):
del self.subgroups[name]
def setFrameCoordinates(self,name,coordinates,parent='relative'):
"""Sets the coordinates of the frame, given as an se3 element.
The coordinates can be given either in 'relative' mode, where the
coordinates are the natural coordinates of the frame relative to
its parent, or in 'world' mode, where the coordinates are the
global world coordinates, or they can be given relative to any
other frame in this coordinate Group. If None, this defaults
to the root frame of this Group."""
f = self.frame(name)
if parent==None:
parent = 'root'
if isinstance(parent,str):
if parent=='relative':
parent = f._parent
elif parent=='world':
parent = None
else:
parent = self.frames[parent]
if parent:
worldCoordinates = se3.mul(parent._worldCoordinates,coordinates)
else:
worldCoordinates = coordinates
if parent == f._parent:
f._relativeCoordinates = coordinates
else:
f._relativeCoordinates = se3.mul(se3.inv(f._parent._worldCoordinates),worldCoordinates)
f._worldCoordinates = worldCoordinates
self.updateDependentFrames(f)
def updateDependentFrames(self,frame):
"""Whenever Frame's world coordinates are updated, call this to update
the downstream frames. This will be called automatically via
setFrameCoordinates but not if you change a Frame's coordinates
manually."""
for c in self.childLists[frame._name]:
c._worldCoordinates = se3.mul(frame.worldCoordinates(),c._relativeCoordinates)
self.updateDependentFrames(c)
def frame(self,name):
"""Retrieves a named Frame."""
if isinstance(name,Frame): return name
try:
return self.frames[name]
except KeyError:
#try looking through groups
splits = name.split(":",1)
if len(splits)==1:
raise ValueError("Frame "+name+" does not exist")
if splits[0] not in self.subgroups:
raise ValueError("Frame "+name+" or subgroup "+splits[0]+" do not exist")
return self.subgroups[splits[0]].frame(splits[1])
def getPoint(self,name):
"""Retrieves a named Point."""
if isinstance(name,Point): return name
try:
return self.points[name]
except KeyError:
#try looking through groups
splits = name.split(":",1)
if len(splits)==1:
raise ValueError("Point "+name+" does not exist")
if splits[0] not in self.subgroups:
raise ValueError("Point "+name+" or subgroup "+splits[0]+" do not exist")
return self.subgroups[splits[0]].getPoint(splits[1])
def getDirection(self,name):
"""Retrieves a named Direction."""
if isinstance(name,Direction): return name
try:
return self.directions[name]
except KeyError:
#try looking through groups
splits = name.split(":",1)
if len(splits)==1:
raise ValueError("Direction "+name+" does not exist")
if splits[0] not in self.subgroups:
raise ValueError("Direction "+name+" or subgroup "+splits[0]+" do not exist")
return self.subgroups[splits[0]].getDirection(splits[1])
def toWorld(self,object):
"""Converts a Transform, Point, or Direction to have coordinates
relative to the world frame."""
return object.toWorld()
def to(self,object,frame):
"""Converts a Transform, Point, or Direction to have coordinates
relative to the given frame 'frame'."""
return object.to(self.frame(frame))
def transform(self,sourceFrame,destFrame='root'):
"""Makes a Transform object from the source frame to the destination
frame. """
return Transform(self.frame(sourceFrame),self.frame(testFrame))
def point(self,coordinates=[0,0,0],frame='root'):
"""Makes a Point object with the given local coordinates in the given
frame. Does not add it to the list of managed points."""
return Point(coordinates,self.frame(frame))
def direction(self,coordinates=[0,0,0],frame='root'):
"""Makes a Direction object with the given local coordinates in the
given frame. Does not add it to the list of managed points."""
return Direction(coordinates,self.frame(frame))
def pointFromWorld(self,worldCoordinates=[0,0,0],frame='root'):
"""Alias for to(point(worldCoordinates,'root'),frame)"""
f = self.frame(frame)
local = se3.apply(se3.inv(f._worldCoordinates),worldCoordinates)
return Point(local,f)
def directionFromWorld(self,worldCoordinates=[0,0,0],frame='world'):
"""Alias for to(direction(worldCoordinates,'root'),frame)"""
f = self.frame(frame)
local = so3.apply(so3.inv(f._worldCoordinates[0]),worldCoordinates)
return Direction(local,f)
def listFrames(self,indent=0):
"""Prints all the frames in this group and subgroups"""
for k,f in self.frames.iteritems():
if indent > 0:
print " "*(indent-1),
if f._parent == None:
print k
else:
print k,"(%s)"%(f._parent._name,)
for n,g in self.subgroups.iteritems():
if indent > 0:
print " "*(indent-1),
print n,":"
g.listFrames(indent+2)
def listItems(self,indent=0):
"""Prints all the items in this group"""
if len(self.frames) > 0:
if indent > 0:
print " "*(indent-1),
print "Frames:"
for k,f in self.frames.iteritems():
if indent > 0:
print " "*(indent+1),
if f._parent == None:
print k
else:
print k,"(%s)"%(f._parent._name,)
if len(self.points) > 0:
if indent > 0:
print " "*(indent-1),
print "Points:"
for k in self.points.iterkeys():
if indent > 0:
print " "*(indent+1),
print k
if len(self.directions) > 0:
if indent > 0:
print " "*(indent-1),
print "Directions:"
for k in self.directions.iterkeys():
if indent > 0:
print " "*(indent+1),
print k
if len(self.subgroups) > 0:
if indent > 0:
print " "*(indent-1),
print "Subgroups:"
for n,g in self.subgroups.iteritems():
if indent > 0:
print " "*(indent+1),
print n,":"
g.listItems(indent+2)
class Manager(Group):
"""A manager of coordinate frames."""
def __init__(self):
Group.__init__(self)
self._name = "world_group"
self.frames['world'] = self.frames['root']
def worldFrame(self):
return self.frames.get('world',None)
def destroy(self):
Group.destroy(self)
def deleteFrame(self,name):
assert name != 'world',"World frame may not be deleted"
def setFrameCoordinates(self,name,coordinates,parent='relative'):
assert name != 'world',"World frame must stay fixed at identity"
Group.setFrameCoordinates(self,name,coordinates,parent)
#create defaults so you can just call coordinates.addFrame() etc.
_defaultManager = Manager()
def _callfn(name):
global _defaultManager
return lambda *args,**kwargs:getattr(_defaultManager,name)(*args,**kwargs)
def manager():
"""Retrieves the default top-level manager"""
global _defaultManager
return _defaultManager
def setManager(manager):
"""Sets the new top-level manager to a new Manager instance, and
returns the old top-level manager."""
assert isinstance(manager,Manager),"setManager must be called with a Manager instance"
global _defaultManager
res = _defaultManager
_defaultManager = manager
return res
destroy = _callfn("destroy")
setWorldModel = _callfn("setWorldModel")
setRobotModel = _callfn("setRobotModel")
setController = _callfn("setController")
setSimBody = _callfn("setSimBody")
updateFromWorld = _callfn("updateFromWorld")
updateToWorld = _callfn("updateToWorld")
addFrame = _callfn("addFrame")
addPoint = _callfn("addPoint")
addDirection = _callfn("addDirection")
addGroup = _callfn("addGroup")
deleteFrame = _callfn("deleteFrame")
deletePoint = _callfn("deletePoint")
deleteDirection = _callfn("deleteDirection")
deleteGroup = _callfn("deleteGroup")
setFrameCoordinates = _callfn("setFrameCoordinates")
frame = _callfn("frame")
getPoint = _callfn("getPoint")
getDirection = _callfn("getDirection")
toWorld = _callfn("toWorld")
to = _callfn("to")
transform = _callfn("transform")
point = _callfn("point")
direction = _callfn("direction")
pointFromWorld = _callfn("pointFromWorld")
directionFromWorld = _callfn("directionFromWorld")
listFrames = _callfn("listFrames")
listItems = _callfn("listItems")
def _ancestor_with_link(frame):
"""Returns the nearest ancestor of the given frame attached to a robot
link or rigid object"""
while frame and (frame._data == None or not isinstance(frame._data,(RobotModelLink,RigidObjectModel))):
frame = frame._parent
return frame
def ik_objective(obj,target):
"""Returns an IK objective that attempts to fix the given
klampt.coordinates object 'obj' at given target object 'target'.
Arguments:
obj: An instance of one of the {Point,Direction,Transform,Frame} classes.
target: If 'obj' is a Point, Direction, or Frame objects, this
must be an object of the same type of 'obj' denoting the target to
which 'obj' should be fixed. In other words, the local coordinates
of 'obj' relative to 'target's parent frame will be equal to 'target's
local coordinates.
If obj is a Transform object, this element is an se3 object.
Returns:
IKObjective: An IK objective to be used with the klampt.ik module.
Since the klampt.ik module is not aware about custom frames, an
ancestor of the object must be attached to a RobotModelLink or a
RigidObjectModel, or else None will be returned. The same goes for target,
if provided.
TODO: support lists of objects to fix.
TODO: support Direction constraints.
"""
body = None
coords = None
ref = None
if isinstance(obj,Frame):
assert isinstance(target,Frame),"ik_objective: target must be of same type as obj"
body = obj
ref = target.parent()
coords = target.relativeCoordinates()
elif isinstance(obj,Transform):
if ref != None: print "ik_objective: Warning, ref argument passed with Transform object, ignoring"
body = obj.source()
ref = obj.destination()
coords = target
elif isinstance(obj,(Point,Direction)):
assert type(target)==type(obj),"ik_objective: target must be of same type as obj"
body = obj.frame()
ref = target.frame()
coords = target.localCoordinates()
else:
raise ValueError("Argument to ik_objective must be an object from the coordinates module")
linkframe = _ancestor_with_link(body)
if linkframe == None:
print "Warning: object provided to ik_objective is not attached to a robot link or rigid object, returning None"
return None
linkbody = linkframe._data
#find the movable frame attached to ref
refframe = _ancestor_with_link(ref) if ref != None else None
refbody = (refframe._data if refframe!=None else None)
if isinstance(obj,(Frame,Transform)):
#figure out the desired transform T[linkbody->refbody], given
#coords = T[obj->ref], T[obj->linkbody], T[ref->refbody]
#result = (T[ref->refbody] * coords * T[obj->linkbody]^-1)
if linkframe != body: coords = se3.mul(coords,Transform(linkframe,body).coordinates())
if refframe != ref: coords = se3.mul(Transform(ref,refframe).coordinates(),coords)
return ik.objective(linkbody,ref=refbody,R=coords[0],t=coords[1])
elif isinstance(obj,Point):
#figure out the local and world points
local = obj.to(linkframe).localCoordinates()
world = target.to(refframe).localCoordinates()
return ik.objective(linkbody,local=[local],world=[world])
elif isinstance(obj,Direction):
raise ValueError("Axis constraints are not yet supported in the klampt.ik module")
return None
def ik_fixed_objective(obj,ref=None):
"""Returns an IK objective that attempts to fix the given
klampt.coordinates object at its current pose. If ref=None,
its pose is fixed in world coordinates. Otherwise, its pose is fixed
relative to the reference frame ref.
Arguments:
obj: An instance of one of the {Point,Direction,Transform,Frame} classes.
ref (optional): either None, or a Frame object denoting the reference frame
to which the object should be fixed. (If obj is a Transform object,
its destination frame is used as the reference frame, and this argument
is ignored.)
Returns:
IKObjective: An IK objective to be used with the klampt.ik module. For
Point, Direction, and Frame objects this objective fixes the
object coordinates relative to the ref frame, or the world if None frame
is provided. For Transform objects the source frame is fixed
relative to the destination frame.
Since the klampt.ik module is not aware about custom frames, an
ancestor of the object must be attached to a RobotModelLink or a
RigidObjectModel, or else None will be returned. The same goes for ref,
if provided.
TODO: support lists of objects to fix.
TODO: support Direction constraints.
"""
if isinstance(obj,(Point,Direction)):
return ik_objective(obj,obj.to(ref))
elif isinstance(obj,Frame):
return ik_fixed_objective(Transform(obj,ref))
elif isinstance(obj,Transform):
if ref != None: print "ik_fixed_objective: Warning, ref argument passed with Transform object, ignoring"
return ik_objective(obj,obj.coordinates())
else:
raise ValueError("Argument to ik_fixed_objective must be an object from the coordinates module")
|
|
from pandac.PandaModules import *
from toontown.toonbase import ToontownGlobals
from direct.showbase import DirectObject
from direct.fsm import StateData
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from toontown.toonbase import TTLocalizer
from toontown.effects import DistributedFireworkShow
from toontown.parties import DistributedPartyFireworksActivity
from direct.directnotify import DirectNotifyGlobal
class ShtikerBook(DirectFrame, StateData.StateData):
notify = DirectNotifyGlobal.directNotify.newCategory('ShtikerBook')
def __init__(self, doneEvent):
DirectFrame.__init__(self, relief=None, sortOrder=DGG.BACKGROUND_SORT_INDEX)
self.initialiseoptions(ShtikerBook)
StateData.StateData.__init__(self, doneEvent)
self.pages = []
self.pageTabs = []
self.currPageTabIndex = None
self.pageTabFrame = DirectFrame(parent=self, relief=None, pos=(0.93, 1, 0.575), scale=1.25)
self.pageTabFrame.hide()
self.currPageIndex = None
self.pageBeforeNews = None
self.entered = 0
self.safeMode = 0
self.__obscured = 0
self.__shown = 0
self.__isOpen = 0
self.hide()
self.setPos(0, 0, 0.1)
self.pageOrder = [TTLocalizer.OptionsPageTitle,
TTLocalizer.ShardPageTitle,
TTLocalizer.MapPageTitle,
TTLocalizer.InventoryPageTitle,
TTLocalizer.QuestPageToonTasks,
TTLocalizer.TrackPageShortTitle,
TTLocalizer.SuitPageTitle,
TTLocalizer.FishPageTitle,
TTLocalizer.KartPageTitle,
TTLocalizer.DisguisePageTitle,
TTLocalizer.NPCFriendPageTitle,
TTLocalizer.GardenPageTitle,
TTLocalizer.GolfPageTitle,
TTLocalizer.EventsPageName,
TTLocalizer.NewsPageName]
return
def setSafeMode(self, setting):
self.safeMode = setting
def enter(self):
if base.config.GetBool('want-qa-regression', 0):
self.notify.info('QA-REGRESSION: SHTICKERBOOK: Open')
if self.entered:
return
self.entered = 1
messenger.send('releaseDirector')
messenger.send('stickerBookEntered')
base.playSfx(self.openSound)
base.disableMouse()
base.render.hide()
base.setBackgroundColor(0.05, 0.15, 0.4)
base.setCellsAvailable([base.rightCells[0]], 0)
self.oldMin2dAlpha = NametagGlobals.getMin2dAlpha()
self.oldMax2dAlpha = NametagGlobals.getMax2dAlpha()
NametagGlobals.setMin2dAlpha(0.8)
NametagGlobals.setMax2dAlpha(1.0)
self.__isOpen = 1
self.__setButtonVisibility()
self.show()
self.showPageArrows()
if not self.safeMode:
self.accept('shtiker-page-done', self.__pageDone)
self.accept(ToontownGlobals.StickerBookHotkey, self.__close)
self.accept(ToontownGlobals.OptionsPageHotkey, self.__close)
self.pageTabFrame.show()
self.pages[self.currPageIndex].enter()
if hasattr(localAvatar, 'newsButtonMgr') and localAvatar.newsButtonMgr:
localAvatar.newsButtonMgr.hideNewIssueButton()
def exit(self):
if not self.entered:
return
self.entered = 0
messenger.send('stickerBookExited')
base.playSfx(self.closeSound)
self.pages[self.currPageIndex].exit()
base.render.show()
setBlackBackground = 0
for obj in base.cr.doId2do.values():
if isinstance(obj, DistributedFireworkShow.DistributedFireworkShow) or isinstance(obj, DistributedPartyFireworksActivity.DistributedPartyFireworksActivity):
setBlackBackground = 1
if setBlackBackground:
base.setBackgroundColor(Vec4(0, 0, 0, 1))
else:
base.setBackgroundColor(ToontownGlobals.DefaultBackgroundColor)
gsg = base.win.getGsg()
if gsg:
base.render.prepareScene(gsg)
NametagGlobals.setMin2dAlpha(self.oldMin2dAlpha)
NametagGlobals.setMax2dAlpha(self.oldMax2dAlpha)
base.setCellsAvailable([base.rightCells[0]], 1)
self.__isOpen = 0
self.hide()
self.hideButton()
cleanupDialog('globalDialog')
self.pageTabFrame.hide()
self.ignore('shtiker-page-done')
self.ignore(ToontownGlobals.StickerBookHotkey)
self.ignore(ToontownGlobals.OptionsPageHotkey)
self.ignore('arrow_right')
self.ignore('arrow_left')
if base.config.GetBool('want-qa-regression', 0):
self.notify.info('QA-REGRESSION: SHTICKERBOOK: Close')
def load(self):
self.checkGardenStarted = localAvatar.getGardenStarted()
bookModel = loader.loadModel('phase_3.5/models/gui/stickerbook_gui')
self['image'] = bookModel.find('**/big_book')
self['image_scale'] = (2, 1, 1.5)
self.resetFrameSize()
self.bookOpenButton = DirectButton(image=(bookModel.find('**/BookIcon_CLSD'), bookModel.find('**/BookIcon_OPEN'), bookModel.find('**/BookIcon_RLVR')), relief=None, pos=(1.175, 0, -0.83), scale=0.305, command=self.__open)
self.bookCloseButton = DirectButton(image=(bookModel.find('**/BookIcon_OPEN'), bookModel.find('**/BookIcon_CLSD'), bookModel.find('**/BookIcon_RLVR2')), relief=None, pos=(1.175, 0, -0.83), scale=0.305, command=self.__close)
self.bookOpenButton.hide()
self.bookCloseButton.hide()
self.nextArrow = DirectButton(parent=self, relief=None, image=(bookModel.find('**/arrow_button'), bookModel.find('**/arrow_down'), bookModel.find('**/arrow_rollover')), scale=(0.1, 0.1, 0.1), pos=(0.838, 0, -0.661), command=self.__pageChange, extraArgs=[1])
self.prevArrow = DirectButton(parent=self, relief=None, image=(bookModel.find('**/arrow_button'), bookModel.find('**/arrow_down'), bookModel.find('**/arrow_rollover')), scale=(-0.1, 0.1, 0.1), pos=(-0.838, 0, -0.661), command=self.__pageChange, extraArgs=[-1])
bookModel.removeNode()
self.openSound = base.loadSfx('phase_3.5/audio/sfx/GUI_stickerbook_open.mp3')
self.closeSound = base.loadSfx('phase_3.5/audio/sfx/GUI_stickerbook_delete.mp3')
self.pageSound = base.loadSfx('phase_3.5/audio/sfx/GUI_stickerbook_turn.mp3')
return
def unload(self):
loader.unloadModel('phase_3.5/models/gui/stickerbook_gui')
self.destroy()
self.bookOpenButton.destroy()
del self.bookOpenButton
self.bookCloseButton.destroy()
del self.bookCloseButton
self.nextArrow.destroy()
del self.nextArrow
self.prevArrow.destroy()
del self.prevArrow
for page in self.pages:
page.unload()
del self.pages
for pageTab in self.pageTabs:
pageTab.destroy()
del self.pageTabs
del self.currPageTabIndex
del self.openSound
del self.closeSound
del self.pageSound
def addPage(self, page, pageName = 'Page'):
if pageName not in self.pageOrder:
self.notify.error('Trying to add page %s in the ShtickerBook. Page not listed in the order.' % pageName)
return
pageIndex = 0
if len(self.pages):
newIndex = len(self.pages)
prevIndex = newIndex - 1
if self.pages[prevIndex].pageName == TTLocalizer.NewsPageName:
self.pages.insert(prevIndex, page)
pageIndex = prevIndex
if self.currPageIndex >= pageIndex:
self.currPageIndex += 1
else:
self.pages.append(page)
pageIndex = len(self.pages) - 1
else:
self.pages.append(page)
pageIndex = len(self.pages) - 1
page.setBook(self)
page.setPageName(pageName)
page.reparentTo(self)
self.addPageTab(page, pageIndex, pageName)
from toontown.shtiker import MapPage
if isinstance(page, MapPage.MapPage):
self.pageBeforeNews = page
def addPageTab(self, page, pageIndex, pageName = 'Page'):
tabIndex = len(self.pageTabs)
def goToPage():
messenger.send('wakeup')
base.playSfx(self.pageSound)
self.setPage(page)
if base.config.GetBool('want-qa-regression', 0):
self.notify.info('QA-REGRESSION: SHTICKERBOOK: Browse tabs %s' % page.pageName)
localAvatar.newsButtonMgr.setGoingToNewsPageFromStickerBook(False)
localAvatar.newsButtonMgr.showAppropriateButton()
yOffset = 0.07 * pageIndex
iconGeom = None
iconImage = None
iconScale = 1
iconColor = Vec4(1)
buttonPressedCommand = goToPage
extraArgs = []
if pageName == TTLocalizer.OptionsPageTitle:
iconModels = loader.loadModel('phase_3.5/models/gui/sos_textures')
iconGeom = iconModels.find('**/switch')
iconModels.detachNode()
elif pageName == TTLocalizer.ShardPageTitle:
iconModels = loader.loadModel('phase_3.5/models/gui/sos_textures')
iconGeom = iconModels.find('**/district')
iconModels.detachNode()
elif pageName == TTLocalizer.MapPageTitle:
iconModels = loader.loadModel('phase_3.5/models/gui/sos_textures')
iconGeom = iconModels.find('**/teleportIcon')
iconModels.detachNode()
elif pageName == TTLocalizer.InventoryPageTitle:
iconModels = loader.loadModel('phase_3.5/models/gui/inventory_icons')
iconGeom = iconModels.find('**/inventory_tart')
iconScale = 7
iconModels.detachNode()
elif pageName == TTLocalizer.QuestPageToonTasks:
iconModels = loader.loadModel('phase_3.5/models/gui/stickerbook_gui')
iconGeom = iconModels.find('**/questCard')
iconScale = 0.9
iconModels.detachNode()
elif pageName == TTLocalizer.TrackPageShortTitle:
iconGeom = iconModels = loader.loadModel('phase_3.5/models/gui/filmstrip')
iconScale = 1.1
iconColor = Vec4(0.7, 0.7, 0.7, 1)
iconModels.detachNode()
elif pageName == TTLocalizer.SuitPageTitle:
iconModels = loader.loadModel('phase_3.5/models/gui/sos_textures')
iconGeom = iconModels.find('**/gui_gear')
iconModels.detachNode()
elif pageName == TTLocalizer.FishPageTitle:
iconModels = loader.loadModel('phase_3.5/models/gui/sos_textures')
iconGeom = iconModels.find('**/fish')
iconModels.detachNode()
elif pageName == TTLocalizer.GardenPageTitle:
iconModels = loader.loadModel('phase_3.5/models/gui/sos_textures')
iconGeom = iconModels.find('**/gardenIcon')
iconModels.detachNode()
elif pageName == TTLocalizer.DisguisePageTitle:
iconModels = loader.loadModel('phase_3.5/models/gui/sos_textures')
iconGeom = iconModels.find('**/disguise2')
iconColor = Vec4(0.7, 0.7, 0.7, 1)
iconModels.detachNode()
elif pageName == TTLocalizer.NPCFriendPageTitle:
iconModels = loader.loadModel('phase_3.5/models/gui/playingCard')
iconImage = iconModels.find('**/card_back')
iconGeom = iconModels.find('**/logo')
iconScale = 0.22
iconModels.detachNode()
elif pageName == TTLocalizer.KartPageTitle:
iconModels = loader.loadModel('phase_3.5/models/gui/sos_textures')
iconGeom = iconModels.find('**/kartIcon')
iconModels.detachNode()
elif pageName == TTLocalizer.GolfPageTitle:
iconModels = loader.loadModel('phase_6/models/golf/golf_gui')
iconGeom = iconModels.find('**/score_card_icon')
iconModels.detachNode()
elif pageName == TTLocalizer.EventsPageName:
iconModels = loader.loadModel('phase_4/models/parties/partyStickerbook')
iconGeom = iconModels.find('**/Stickerbook_PartyIcon')
iconModels.detachNode()
elif pageName == TTLocalizer.NewsPageName:
iconModels = loader.loadModel('phase_3.5/models/gui/sos_textures')
iconGeom = iconModels.find('**/tt_t_gui_sbk_newsPageTab')
iconModels.detachNode()
buttonPressedCommand = self.goToNewsPage
extraArgs = [page]
if pageName == TTLocalizer.OptionsPageTitle:
pageName = TTLocalizer.OptionsTabTitle
pageTab = DirectButton(parent=self.pageTabFrame, relief=DGG.RAISED, frameSize=(-0.575,
0.575,
-0.575,
0.575), borderWidth=(0.05, 0.05), text=('',
'',
pageName,
''), text_align=TextNode.ALeft, text_pos=(1, -0.2), text_scale=TTLocalizer.SBpageTab, text_fg=(1, 1, 1, 1), text_shadow=(0, 0, 0, 1), image=iconImage, image_scale=iconScale, geom=iconGeom, geom_scale=iconScale, geom_color=iconColor, pos=(0, 0, -yOffset), scale=0.06, command=buttonPressedCommand, extraArgs=extraArgs)
self.pageTabs.insert(pageIndex, pageTab)
return
def setPage(self, page, enterPage = True):
if self.currPageIndex is not None:
self.pages[self.currPageIndex].exit()
self.currPageIndex = self.pages.index(page)
self.setPageTabIndex(self.currPageIndex)
if enterPage:
self.showPageArrows()
page.enter()
from toontown.shtiker import NewsPage
if not isinstance(page, NewsPage.NewsPage):
self.pageBeforeNews = page
return
def setPageBeforeNews(self, enterPage = True):
self.setPage(self.pageBeforeNews, enterPage)
self.accept(ToontownGlobals.StickerBookHotkey, self.__close)
self.accept(ToontownGlobals.OptionsPageHotkey, self.__close)
def setPageTabIndex(self, pageTabIndex):
if self.currPageTabIndex is not None and pageTabIndex != self.currPageTabIndex:
self.pageTabs[self.currPageTabIndex]['relief'] = DGG.RAISED
self.currPageTabIndex = pageTabIndex
self.pageTabs[self.currPageTabIndex]['relief'] = DGG.SUNKEN
return
def isOnPage(self, page):
result = False
if self.currPageIndex is not None:
curPage = self.pages[self.currPageIndex]
if curPage == page:
result = True
return result
def obscureButton(self, obscured):
self.__obscured = obscured
self.__setButtonVisibility()
def isObscured(self):
return self.__obscured
def showButton(self):
self.__shown = 1
self.__setButtonVisibility()
localAvatar.newsButtonMgr.showAppropriateButton()
def hideButton(self):
self.__shown = 0
self.__setButtonVisibility()
localAvatar.newsButtonMgr.request('Hidden')
def __setButtonVisibility(self):
if self.__isOpen:
self.bookOpenButton.hide()
self.bookCloseButton.show()
elif self.__shown and not self.__obscured:
self.bookOpenButton.show()
self.bookCloseButton.hide()
else:
self.bookOpenButton.hide()
self.bookCloseButton.hide()
def shouldBookButtonBeHidden(self):
result = False
if self.__isOpen:
pass
elif self.__shown and not self.__obscured:
pass
else:
result = True
return result
def __open(self):
messenger.send('enterStickerBook')
if not localAvatar.getGardenStarted():
for tab in self.pageTabs:
if tab['text'][2] == TTLocalizer.GardenPageTitle:
tab.hide()
def __close(self):
base.playSfx(self.closeSound)
self.doneStatus = {'mode': 'close'}
messenger.send('exitStickerBook')
messenger.send(self.doneEvent)
def closeBook(self):
self.__close()
def __pageDone(self):
page = self.pages[self.currPageIndex]
pageDoneStatus = page.getDoneStatus()
if pageDoneStatus:
if pageDoneStatus['mode'] == 'close':
self.__close()
else:
self.doneStatus = pageDoneStatus
messenger.send(self.doneEvent)
def __pageChange(self, offset):
messenger.send('wakeup')
base.playSfx(self.pageSound)
self.pages[self.currPageIndex].exit()
self.currPageIndex = self.currPageIndex + offset
messenger.send('stickerBookPageChange-' + str(self.currPageIndex))
self.currPageIndex = max(self.currPageIndex, 0)
self.currPageIndex = min(self.currPageIndex, len(self.pages) - 1)
self.setPageTabIndex(self.currPageIndex)
self.showPageArrows()
page = self.pages[self.currPageIndex]
from toontown.shtiker import NewsPage
if isinstance(page, NewsPage.NewsPage):
self.goToNewsPage(page)
else:
page.enter()
self.pageBeforeNews = page
def showPageArrows(self):
if self.currPageIndex == len(self.pages) - 1:
self.prevArrow.show()
self.nextArrow.hide()
else:
self.prevArrow.show()
self.nextArrow.show()
self.__checkForNewsPage()
if self.currPageIndex == 0:
self.prevArrow.hide()
self.nextArrow.show()
def __checkForNewsPage(self):
from toontown.shtiker import NewsPage
self.ignore('arrow_left')
self.ignore('arrow_right')
if isinstance(self.pages[self.currPageIndex], NewsPage.NewsPage):
self.ignore('arrow_left')
self.ignore('arrow_right')
else:
self.accept('arrow_right', self.__pageChange, [1])
self.accept('arrow_left', self.__pageChange, [-1])
def goToNewsPage(self, page):
messenger.send('wakeup')
base.playSfx(self.pageSound)
localAvatar.newsButtonMgr.setGoingToNewsPageFromStickerBook(True)
localAvatar.newsButtonMgr.showAppropriateButton()
self.setPage(page)
if base.config.GetBool('want-qa-regression', 0):
self.notify.info('QA-REGRESSION: SHTICKERBOOK: Browse tabs %s' % page.pageName)
self.ignore(ToontownGlobals.StickerBookHotkey)
self.ignore(ToontownGlobals.OptionsPageHotkey)
localAvatar.newsButtonMgr.acceptEscapeKeyPress()
def disableBookCloseButton(self):
if self.bookCloseButton:
self.bookCloseButton['command'] = None
return
def enableBookCloseButton(self):
if self.bookCloseButton:
self.bookCloseButton['command'] = self.__close
def disableAllPageTabs(self):
for button in self.pageTabs:
button['state'] = DGG.DISABLED
def enableAllPageTabs(self):
for button in self.pageTabs:
button['state'] = DGG.NORMAL
|
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for grit.gather.chrome_html'''
import os
import re
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import unittest
from grit import lazy_re
from grit import util
from grit.gather import chrome_html
_NEW_LINE = lazy_re.compile('(\r\n|\r|\n)', re.MULTILINE)
def StandardizeHtml(text):
'''Standardizes the newline format and png mime type in Html text.'''
return _NEW_LINE.sub('\n', text).replace('data:image/x-png;',
'data:image/png;')
class ChromeHtmlUnittest(unittest.TestCase):
'''Unit tests for ChromeHtml.'''
def testFileResources(self):
'''Tests inlined image file resources with available high DPI assets.'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<head>
<link rel="stylesheet" href="test.css">
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
''',
'test.css': '''
.image {
background: url('test.png');
}
''',
'test.png': 'PNG DATA',
'1.4x/test.png': '1.4x PNG DATA',
'1.8x/test.png': '1.8x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '1.4x,1.8x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<head>
<style>
.image {
background: -webkit-image-set(url('data:image/png;base64,UE5HIERBVEE=') 1x, url('data:image/png;base64,MS40eCBQTkcgREFUQQ==') 1.4x, url('data:image/png;base64,MS44eCBQTkcgREFUQQ==') 1.8x);
}
</style>
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
'''))
tmp_dir.CleanUp()
def testFileResourcesImageTag(self):
'''Tests inlined image file resources with available high DPI assets on
an image tag.'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<body>
<img id="foo" src="test.png">
</body>
</html>
''',
'test.png': 'PNG DATA',
'2x/test.png': '2x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '2x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<body>
<img id="foo" src="data:image/png;base64,UE5HIERBVEE=" style="content: -webkit-image-set(url('data:image/png;base64,UE5HIERBVEE=') 1x, url('data:image/png;base64,MnggUE5HIERBVEE=') 2x);">
</body>
</html>
'''))
tmp_dir.CleanUp()
def testFileResourcesNoFlatten(self):
'''Tests non-inlined image file resources with available high DPI assets.'''
tmp_dir = util.TempDir({
'test.css': '''
.image {
background: url('test.png');
}
''',
'test.png': 'PNG DATA',
'1.4x/test.png': '1.4x PNG DATA',
'1.8x/test.png': '1.8x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('test.css'))
html.SetDefines({'scale_factors': '1.4x,1.8x'})
html.SetAttributes({'flattenhtml': 'false'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
.image {
background: -webkit-image-set(url('test.png') 1x, url('1.4x/test.png') 1.4x, url('1.8x/test.png') 1.8x);
}
'''))
tmp_dir.CleanUp()
def testFileResourcesDoubleQuotes(self):
'''Tests inlined image file resources if url() filename is double quoted.'''
tmp_dir = util.TempDir({
'test.css': '''
.image {
background: url("test.png");
}
''',
'test.png': 'PNG DATA',
'2x/test.png': '2x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('test.css'))
html.SetDefines({'scale_factors': '2x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
.image {
background: -webkit-image-set(url("data:image/png;base64,UE5HIERBVEE=") 1x, url("data:image/png;base64,MnggUE5HIERBVEE=") 2x);
}
'''))
tmp_dir.CleanUp()
def testFileResourcesNoQuotes(self):
'''Tests inlined image file resources when url() filename is unquoted.'''
tmp_dir = util.TempDir({
'test.css': '''
.image {
background: url(test.png);
}
''',
'test.png': 'PNG DATA',
'2x/test.png': '2x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('test.css'))
html.SetDefines({'scale_factors': '2x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
.image {
background: -webkit-image-set(url(data:image/png;base64,UE5HIERBVEE=) 1x, url(data:image/png;base64,MnggUE5HIERBVEE=) 2x);
}
'''))
tmp_dir.CleanUp()
def testFileResourcesNoFile(self):
'''Tests inlined image file resources without available high DPI assets.'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<head>
<link rel="stylesheet" href="test.css">
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
''',
'test.css': '''
.image {
background: url('test.png');
}
''',
'test.png': 'PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '2x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<head>
<style>
.image {
background: url('data:image/png;base64,UE5HIERBVEE=');
}
</style>
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
'''))
tmp_dir.CleanUp()
def testThemeResources(self):
'''Tests inserting high DPI chrome://theme references.'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<head>
<link rel="stylesheet" href="test.css">
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
''',
'test.css': '''
.image {
background: url('chrome://theme/IDR_RESOURCE_NAME');
}
''',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '2x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<head>
<style>
.image {
background: -webkit-image-set(url('chrome://theme/IDR_RESOURCE_NAME') 1x, url('chrome://theme/IDR_RESOURCE_NAME@2x') 2x);
}
</style>
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
'''))
tmp_dir.CleanUp()
def testRemoveUnsupportedScale(self):
'''Tests removing an unsupported scale factor from an explicit image-set.'''
tmp_dir = util.TempDir({
'index.html': '''
<!DOCTYPE HTML>
<html>
<head>
<link rel="stylesheet" href="test.css">
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
''',
'test.css': '''
.image {
background: -webkit-image-set(url('test.png') 1x,
url('test1.4.png') 1.4x,
url('test1.8.png') 1.8x);
}
''',
'test.png': 'PNG DATA',
'test1.4.png': '1.4x PNG DATA',
'test1.8.png': '1.8x PNG DATA',
})
html = chrome_html.ChromeHtml(tmp_dir.GetPath('index.html'))
html.SetDefines({'scale_factors': '1.8x'})
html.SetAttributes({'flattenhtml': 'true'})
html.Parse()
self.failUnlessEqual(StandardizeHtml(html.GetData('en', 'utf-8')),
StandardizeHtml('''
<!DOCTYPE HTML>
<html>
<head>
<style>
.image {
background: -webkit-image-set(url('data:image/png;base64,UE5HIERBVEE=') 1x,
url('data:image/png;base64,MS44eCBQTkcgREFUQQ==') 1.8x);
}
</style>
</head>
<body>
<!-- Don't need a body. -->
</body>
</html>
'''))
tmp_dir.CleanUp()
if __name__ == '__main__':
unittest.main()
|
|
import time
import json
import random
from snapchat_agent import SnapchatAgent
from snapchat_cache import SnapchatCache
from pprint import pprint
class Snapchat(SnapchatAgent):
#Media Flags
MEDIA_IMAGE = 0
MEDIA_VIDEO = 1
MEDIA_VIDEO_NOAUDIO = 2
MEDIA_FRIEND_REQUEST = 3
MEDIA_FRIEND_REQUEST_IMAGE = 4
MEDIA_FRIEND_REQUEST_VIDEO = 5
MEDIA_FRIEND_REQUEST_VIDEO_NOAUDIO = 6
STATUS_NONE = -1
STATUS_SENT = 0
STATUS_DELIVERED = 1
STATUS_OPENED = 2
STATUS_SCREENSHOT = 3
FRIEND_CONFIRMED = 0
FRIEND_UNCONFIRMED = 1
FRIEND_BLOCKED = 2
FRIEND_DELETED = 3
PRIVACY_EVERYONE = 0
PRIVACY_FRIENDS = 1
def __init__(self, username=None, password=None, auth_token=None):
super(Snapchat, self).__init__()
self.auth_token = None
self.username = None
if password is not None:
if self.login(username, password) is False:
raise Exception("Login Failure")
elif auth_token is not None:
self.auth_token = auth_token
self.username = username
self.cache = SnapchatCache()
def _empty(self, dictionary, key):
if key in dictionary:
return False
return True
def login(self, username, password):
timestamp = super(Snapchat, self).timestamp()
result = super(Snapchat, self).post(
'/login',
[
('username', username),
('password', password),
('timestamp', str(timestamp))
],
[
super(Snapchat, self).STATIC_TOKEN,
str(timestamp)
]
)
if result is None:
return False
if 'logged' in result and result['logged']:
self.auth_token = result['auth_token']
self.username = result['username']
self.cache = SnapchatCache()
self.cache.set('updates', result)
return True
else:
return False
def logout(self):
if self.auth_token is None or self.username is None:
return False
timestamp = super(Snapchat, self).timestamp()
result = super(Snapchat, self).post(
'/logout',
[
('timestamp', str(timestamp)),
('username', self.username)
],
[
self.auth_token,
str(timestamp)
]
)
self.cache = None
return result is None
def register(self, username, password, email, birthday):
#TODO: implement
return
def getUpdates(self, force=False):
if not force:
result = self.cache.get('updates')
if result:
return result
if self.auth_token is None or self.username is None:
return None
timestamp = super(Snapchat, self).timestamp()
result = super(Snapchat, self).post(
'/all_updates',
[
('timestamp', str(timestamp)),
('username', self.username)
],
[
self.auth_token,
str(timestamp)
]
)
if 'updates_response' in result:
self.auth_token = result['updates_response']['auth_token']
self.cache.set('updates', result['updates_response'])
return result['updates_response']
return result
def getSnaps(self):
updates = self.getUpdates()
if updates is None:
return None
snaps = []
for snap in updates['snaps']:
snaps.append({
'id': snap['id'],
'media_id': None if self._empty(snap, 'm') else snap['m'],
'time': None if self._empty(snap, 't') else snap['t'],
'sender': None if self._empty(snap, 'sn') else snap['sn'],
'recipient': None if self._empty(snap, 'rp') else snap['rp'],
'status': snap['st'],
'screenshot_count': None if self._empty(snap, 'c') else snap['c'],
'sent': snap['sts'],
'opened': snap['ts'],
'broadcast': None if self._empty(snap, 'broadcast') else {
'url': snap['broadcast_url'],
'action_text': snap['broadcast_action_text'],
'hide_timer': snap['broadcast_hide_timer']
}
})
return snaps
def getImages(self):
images = [s for s in self.getSnaps() if s['media_id'] == self.MEDIA_IMAGE]
return images
def getFriendStories(self, force=False):
#TODO: Implement
return
def findFriends(self, numbers, country='US'):
#TODO: Implement
return
def getFriends(self):
#TODO: :(
return
def getAddedFriends(self):
#TODO: Implement
return
def addFriend(self, username):
#TODO: Implement
return
def addFriends(self, usernames):
#TODO: Implement
return
def deleteFriend(self, username):
#TODO: Implement
return
def setDisplayName(self, username, display):
#TODO: Implement
return
def block(self, username):
#TODO: Implement
return
def unblock(self, username):
#TODO: Implement
return
def getMedia(self, id):
if self.auth_token is None or self.username is None:
return None
timestamp = super(Snapchat, self).timestamp()
result = super(Snapchat, self).post(
'/blob',
[
('id', id),
('timestamp', str(timestamp)),
('username', self.username)
],
[
self.auth_token,
str(timestamp)
]
)
if result is None:
return None
if super(Snapchat, self).isMedia(result[:2]): # not encrypted
return result
else: # must decrypt
result = super(Snapchat, self).decryptECB(result)
if super(Snapchat, self).isMedia(result[:2]):
return result
if super(Snapchat, self).isCompressed(result[:2]):
result = super(Snapchat, self).unCompress(result)
return result
return None
def sendEvents(self, events, snap_info=None):
if self.auth_token is None or self.username is None:
return False
timestamp = super(Snapchat, self).timestamp()
result = super(Snapchat, self).post(
'/update_snaps',
[
('events', json.dumps(events)),
('json', json.dumps(snap_info)),
('timestamp', str(timestamp)),
('username', self.username)
],
[
self.auth_token,
str(timestamp)
]
)
return result is None
def markSnapViewed(self, id, time_s=1):
snap_info = {
'id': {
't': str(time.time() * 1000),
'sv': str(time_s + (random.random() / 10))
}
}
events = [
{
'eventName': 'SNAP_VIEW',
'params': {'id': id},
'ts': str((time.time() * 1000) - time_s)
},
{
'eventName': 'SNAP_EXPIRED',
'params': {'id': id},
'ts': str(time.time() * 1000)
}
]
return self.sendEvents(events, snap_info)
def markSnapShot(self, id, time_s=1):
#TODO: Implement
return
def upload(self, type, data):
#TODO: Implement
return
def send(self, media_id, recipients, time_s=3):
#TODO: Implement
return
def setStory(self, media_id, media_type, time_s=3):
#TODO: Implement
return
def getStory(self, media_id, key, iv):
#TODO: Implement
return
def getStoryThumb(self, media_id, key, iv):
#TODO: Implement
return
def markStoryViewed(self, id, screenshot_count=0):
#TODO: Implement
return
def getBests(self, friends):
#TODO: Implement
return
def clearFeed(self):
"""
Sends clear feed. Returns False on failure.
:return: bool
"""
if self.auth_token is None or self.username is None:
return False
timestamp = super(Snapchat, self).timestamp()
result = super(Snapchat, self).post(
'/clear',
[
('timestamp', str(timestamp)),
('username', self.username)
],
[
self.auth_token,
str(timestamp)
]
)
return result is None
def updatePrivacy(self, setting):
#TODO: Implement
return
def updateEmail(self, email):
#TODO: Implement
return
|
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ops which manipulate lists of tensors."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np # pylint: disable=unused-import
from tensorflow.python.client import session
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import list_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import test
from tensorflow.python.training import server_lib
def scalar_shape():
return ops.convert_to_tensor([], dtype=dtypes.int32)
@test_util.with_c_shapes
class ListOpsTest(test_util.TensorFlowTestCase):
@test_util.run_in_graph_and_eager_modes
def testPushPop(self):
l = list_ops.empty_tensor_list(element_dtype=dtypes.float32,
element_shape=scalar_shape())
l = list_ops.tensor_list_push_back(l, constant_op.constant(1.0))
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e), 1.0)
@test_util.run_in_graph_and_eager_modes
def testPushPopGPU(self):
if not context.num_gpus():
return
with context.device("gpu:0"):
self.testPushPop()
@test_util.run_in_graph_and_eager_modes
def testStack(self):
l = list_ops.empty_tensor_list(element_dtype=dtypes.float32,
element_shape=scalar_shape())
l = list_ops.tensor_list_push_back(l, constant_op.constant(1.0))
l = list_ops.tensor_list_push_back(l, constant_op.constant(2.0))
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [1.0, 2.0])
@test_util.run_in_graph_and_eager_modes
def testGatherGrad(self):
with backprop.GradientTape() as tape:
l = list_ops.empty_tensor_list(element_dtype=dtypes.float32,
element_shape=scalar_shape())
c0 = constant_op.constant(1.0)
tape.watch(c0)
l = list_ops.tensor_list_push_back(l, c0)
l = list_ops.tensor_list_push_back(l, constant_op.constant(2.0))
t = list_ops.tensor_list_gather(l, [1, 0], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [2.0, 1.0])
s = (t[0] + t[1]) * (t[0] + t[1])
dt = tape.gradient(s, c0)
self.assertAllEqual(self.evaluate(dt), 6.0)
@test_util.run_in_graph_and_eager_modes
def testScatterGrad(self):
with backprop.GradientTape() as tape:
c0 = constant_op.constant([1.0, 2.0])
tape.watch(c0)
l = list_ops.tensor_list_scatter(
c0, [1, 0], ops.convert_to_tensor([], dtype=dtypes.int32))
t0 = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
t1 = list_ops.tensor_list_get_item(l, 1, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t0), 2.0)
self.assertAllEqual(self.evaluate(t1), 1.0)
loss = t0 * t0 + t1 * t1
dt = tape.gradient(loss, c0)
self.assertAllEqual(self.evaluate(dt), [2., 4.])
@test_util.run_in_graph_and_eager_modes
def testStackGPU(self):
if not context.num_gpus():
return
with context.device("gpu:0"):
self.testStack()
@test_util.run_in_graph_and_eager_modes
def testTensorListFromTensor(self):
t = constant_op.constant([1.0, 2.0])
l = list_ops.tensor_list_from_tensor(t, element_shape=scalar_shape())
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e), 2.0)
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e), 1.0)
self.assertAllEqual(self.evaluate(list_ops.tensor_list_length(l)), 0)
@test_util.run_in_graph_and_eager_modes
def testFromTensorGPU(self):
if not context.num_gpus():
return
with context.device("gpu:0"):
self.testTensorListFromTensor()
@test_util.run_in_graph_and_eager_modes
def testGetSetItem(self):
t = constant_op.constant([1.0, 2.0])
l = list_ops.tensor_list_from_tensor(t, element_shape=scalar_shape())
e0 = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e0), 1.0)
l = list_ops.tensor_list_set_item(l, 0, 3.0)
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [3.0, 2.0])
@test_util.run_in_graph_and_eager_modes
def testGetSetGPU(self):
if not context.num_gpus():
return
with context.device("gpu:0"):
self.testGetSetItem()
@test_util.run_in_graph_and_eager_modes
def testUnknownShape(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=-1)
l = list_ops.tensor_list_push_back(l, constant_op.constant(1.0))
l = list_ops.tensor_list_push_back(l, constant_op.constant([1.0, 2.0]))
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e), [1.0, 2.0])
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e), 1.0)
@test_util.run_in_graph_and_eager_modes
def testCPUGPUCopy(self):
if not context.num_gpus():
return
t = constant_op.constant([1.0, 2.0])
l = list_ops.tensor_list_from_tensor(t, element_shape=scalar_shape())
with context.device("gpu:0"):
l_gpu = array_ops.identity(l)
self.assertAllEqual(
self.evaluate(
list_ops.tensor_list_pop_back(
l_gpu, element_dtype=dtypes.float32)[1]), 2.0)
l_cpu = array_ops.identity(l_gpu)
self.assertAllEqual(
self.evaluate(
list_ops.tensor_list_pop_back(
l_cpu, element_dtype=dtypes.float32)[1]), 2.0)
def testGraphStack(self):
with self.cached_session():
tl = list_ops.empty_tensor_list(
element_shape=constant_op.constant([1], dtype=dtypes.int32),
element_dtype=dtypes.int32)
tl = list_ops.tensor_list_push_back(tl, [1])
self.assertAllEqual(
self.evaluate(
list_ops.tensor_list_stack(tl, element_dtype=dtypes.int32)),
[[1]])
def testGraphStackInLoop(self):
with self.cached_session():
t1 = list_ops.empty_tensor_list(
element_shape=constant_op.constant([], dtype=dtypes.int32),
element_dtype=dtypes.int32)
i = constant_op.constant(0, dtype=dtypes.int32)
def body(i, t1):
t1 = list_ops.tensor_list_push_back(t1, i)
i += 1
return i, t1
i, t1 = control_flow_ops.while_loop(lambda i, t1: math_ops.less(i, 4),
body, [i, t1])
s1 = list_ops.tensor_list_stack(t1, element_dtype=dtypes.int32)
self.assertAllEqual(self.evaluate(s1), [0, 1, 2, 3])
def testGraphStackSwitchDtype(self):
with self.cached_session():
list_ = list_ops.empty_tensor_list(
element_shape=constant_op.constant([], dtype=dtypes.int32),
element_dtype=dtypes.int32)
m = constant_op.constant([1, 2, 3], dtype=dtypes.float32)
def body(list_, m):
list_ = control_flow_ops.cond(
math_ops.equal(list_ops.tensor_list_length(list_), 0),
lambda: list_ops.empty_tensor_list(m.shape, m.dtype), lambda: list_)
list_ = list_ops.tensor_list_push_back(list_, m)
return list_, m
for _ in range(2):
list_, m = body(list_, m)
s1 = list_ops.tensor_list_stack(list_, element_dtype=dtypes.float32)
np_s1 = np.array([[1, 2, 3], [1, 2, 3]], dtype=np.float32)
self.assertAllEqual(self.evaluate(s1), np_s1)
def testGraphStackInLoopSwitchDtype(self):
with self.cached_session():
t1 = list_ops.empty_tensor_list(
element_shape=constant_op.constant([], dtype=dtypes.int32),
element_dtype=dtypes.int32)
i = constant_op.constant(0, dtype=dtypes.float32)
m = constant_op.constant([1, 2, 3], dtype=dtypes.float32)
def body(i, m, t1):
t1 = control_flow_ops.cond(
math_ops.equal(list_ops.tensor_list_length(t1), 0),
lambda: list_ops.empty_tensor_list(m.shape, m.dtype), lambda: t1)
t1 = list_ops.tensor_list_push_back(t1, m * i)
i += 1.0
return i, m, t1
i, m, t1 = control_flow_ops.while_loop(
lambda i, m, t1: math_ops.less(i, 4), body, [i, m, t1])
s1 = list_ops.tensor_list_stack(t1, element_dtype=dtypes.float32)
np_s1 = np.vstack([np.arange(1, 4) * i for i in range(4)])
self.assertAllEqual(self.evaluate(s1), np_s1)
@test_util.run_in_graph_and_eager_modes
def testSerialize(self):
# pylint: disable=g-import-not-at-top
try:
import portpicker
except ImportError:
return
with context.graph_mode():
worker_port = portpicker.pick_unused_port()
ps_port = portpicker.pick_unused_port()
cluster_dict = {
"worker": ["localhost:%s" % worker_port],
"ps": ["localhost:%s" % ps_port]
}
cs = server_lib.ClusterSpec(cluster_dict)
worker = server_lib.Server(
cs, job_name="worker", protocol="grpc", task_index=0, start=True)
unused_ps = server_lib.Server(
cs, job_name="ps", protocol="grpc", task_index=0, start=True)
with ops.Graph().as_default(), session.Session(target=worker.target):
with ops.device("/job:worker"):
t = constant_op.constant([[1.0], [2.0]])
l = list_ops.tensor_list_from_tensor(t, element_shape=[1])
with ops.device("/job:ps"):
l_ps = array_ops.identity(l)
l_ps, e = list_ops.tensor_list_pop_back(
l_ps, element_dtype=dtypes.float32)
with ops.device("/job:worker"):
worker_e = array_ops.identity(e)
self.assertAllEqual(self.evaluate(worker_e), [2.0])
@test_util.run_in_graph_and_eager_modes
def testPushPopGradients(self):
with backprop.GradientTape() as tape:
l = list_ops.empty_tensor_list(element_dtype=dtypes.float32,
element_shape=scalar_shape())
c = constant_op.constant(1.0)
tape.watch(c)
l = list_ops.tensor_list_push_back(l, c)
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
e = 2 * e
self.assertAllEqual(self.evaluate(tape.gradient(e, [c])[0]), 2.0)
@test_util.run_in_graph_and_eager_modes
def testStackFromTensorGradients(self):
with backprop.GradientTape() as tape:
c = constant_op.constant([1.0, 2.0])
tape.watch(c)
l = list_ops.tensor_list_from_tensor(c, element_shape=scalar_shape())
c2 = list_ops.tensor_list_stack(
l, element_dtype=dtypes.float32, num_elements=2)
result = c2 * 2.0
grad = tape.gradient(result, [c])[0]
self.assertAllEqual(self.evaluate(grad), [2.0, 2.0])
@test_util.run_in_graph_and_eager_modes
def testGetSetGradients(self):
with backprop.GradientTape() as tape:
c = constant_op.constant([1.0, 2.0])
tape.watch(c)
l = list_ops.tensor_list_from_tensor(c, element_shape=scalar_shape())
c2 = constant_op.constant(3.0)
tape.watch(c2)
l = list_ops.tensor_list_set_item(l, 0, c2)
e = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
ee = list_ops.tensor_list_get_item(l, 1, element_dtype=dtypes.float32)
y = e * e + ee * ee
grad_c, grad_c2 = tape.gradient(y, [c, c2])
self.assertAllEqual(self.evaluate(grad_c), [0.0, 4.0])
self.assertAllEqual(self.evaluate(grad_c2), 6.0)
@test_util.run_in_graph_and_eager_modes
def testSetOutOfBounds(self):
c = constant_op.constant([1.0, 2.0])
l = list_ops.tensor_list_from_tensor(c, element_shape=scalar_shape())
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(list_ops.tensor_list_set_item(l, 20, 3.0))
@test_util.run_in_graph_and_eager_modes
def testResourceVariableScatterGather(self):
c = constant_op.constant([1.0, 2.0], dtype=dtypes.float32)
l = list_ops.tensor_list_from_tensor(c, element_shape=scalar_shape())
v = vs.get_variable("var", initializer=[l] * 10, use_resource=True)
v_r_0_stacked = list_ops.tensor_list_stack(v[0], dtypes.float32)
self.evaluate(v.initializer)
self.assertAllEqual([1.0, 2.0], self.evaluate(v_r_0_stacked))
v_r_sparse_stacked = list_ops.tensor_list_stack(
v.sparse_read(0), dtypes.float32)
self.assertAllEqual([1.0, 2.0], self.evaluate(v_r_sparse_stacked))
l_new_0 = list_ops.tensor_list_from_tensor(
[3.0, 4.0], element_shape=scalar_shape())
l_new_1 = list_ops.tensor_list_from_tensor(
[5.0, 6.0], element_shape=scalar_shape())
updated_v = state_ops.scatter_update(v, [3, 5], [l_new_0, l_new_1])
updated_v_elems = array_ops.unstack(updated_v)
updated_v_stacked = [
list_ops.tensor_list_stack(el, dtypes.float32) for el in updated_v_elems
]
expected = ([[1.0, 2.0]] * 3 + [[3.0, 4.0], [1.0, 2.0], [5.0, 6.0]] +
[[1.0, 2.0]] * 4)
self.assertAllEqual(self.evaluate(updated_v_stacked), expected)
@test_util.run_in_graph_and_eager_modes
def testConcat(self):
c = constant_op.constant([1.0, 2.0], dtype=dtypes.float32)
l0 = list_ops.tensor_list_from_tensor(c, element_shape=scalar_shape())
l1 = list_ops.tensor_list_from_tensor([-1.0], element_shape=scalar_shape())
l_batch_0 = array_ops.stack([l0, l1])
l_batch_1 = array_ops.stack([l1, l0])
l_concat_01 = list_ops.tensor_list_concat_lists(
l_batch_0, l_batch_1, element_dtype=dtypes.float32)
l_concat_10 = list_ops.tensor_list_concat_lists(
l_batch_1, l_batch_0, element_dtype=dtypes.float32)
l_concat_00 = list_ops.tensor_list_concat_lists(
l_batch_0, l_batch_0, element_dtype=dtypes.float32)
l_concat_11 = list_ops.tensor_list_concat_lists(
l_batch_1, l_batch_1, element_dtype=dtypes.float32)
expected_00 = [[1.0, 2.0, 1.0, 2.0], [-1.0, -1.0]]
expected_01 = [[1.0, 2.0, -1.0], [-1.0, 1.0, 2.0]]
expected_10 = [[-1.0, 1.0, 2.0], [1.0, 2.0, -1.0]]
expected_11 = [[-1.0, -1.0], [1.0, 2.0, 1.0, 2.0]]
for i, (concat, expected) in enumerate(zip(
[l_concat_00, l_concat_01, l_concat_10, l_concat_11],
[expected_00, expected_01, expected_10, expected_11])):
splitted = array_ops.unstack(concat)
splitted_stacked_ret = self.evaluate(
(list_ops.tensor_list_stack(splitted[0], dtypes.float32),
list_ops.tensor_list_stack(splitted[1], dtypes.float32)))
print("Test concat %d: %s, %s, %s, %s"
% (i, expected[0], splitted_stacked_ret[0],
expected[1], splitted_stacked_ret[1]))
self.assertAllClose(expected[0], splitted_stacked_ret[0])
self.assertAllClose(expected[1], splitted_stacked_ret[1])
# Concatenating mismatched shapes fails.
with self.assertRaises((errors.InvalidArgumentError, ValueError)):
self.evaluate(
list_ops.tensor_list_concat_lists(
l_batch_0,
list_ops.empty_tensor_list(scalar_shape(), dtypes.float32),
element_dtype=dtypes.float32))
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"element shapes are not identical at index 0"):
l_batch_of_vec_tls = array_ops.stack(
[list_ops.tensor_list_from_tensor([[1.0]], element_shape=[1])] * 2)
self.evaluate(
list_ops.tensor_list_concat_lists(l_batch_0, l_batch_of_vec_tls,
element_dtype=dtypes.float32))
with self.assertRaisesRegexp(errors.InvalidArgumentError,
r"input_b\[0\].dtype != element_dtype."):
l_batch_of_int_tls = array_ops.stack(
[list_ops.tensor_list_from_tensor([1], element_shape=scalar_shape())]
* 2)
self.evaluate(
list_ops.tensor_list_concat_lists(l_batch_0, l_batch_of_int_tls,
element_dtype=dtypes.float32))
@test_util.run_in_graph_and_eager_modes
def testPushBackBatch(self):
c = constant_op.constant([1.0, 2.0], dtype=dtypes.float32)
l0 = list_ops.tensor_list_from_tensor(c, element_shape=scalar_shape())
l1 = list_ops.tensor_list_from_tensor([-1.0], element_shape=scalar_shape())
l_batch = array_ops.stack([l0, l1])
l_push = list_ops.tensor_list_push_back_batch(l_batch, [3.0, 4.0])
l_unstack = array_ops.unstack(l_push)
l0_ret = list_ops.tensor_list_stack(l_unstack[0], dtypes.float32)
l1_ret = list_ops.tensor_list_stack(l_unstack[1], dtypes.float32)
self.assertAllClose([1.0, 2.0, 3.0], self.evaluate(l0_ret))
self.assertAllClose([-1.0, 4.0], self.evaluate(l1_ret))
with ops.control_dependencies([l_push]):
l_unstack_orig = array_ops.unstack(l_batch)
l0_orig_ret = list_ops.tensor_list_stack(l_unstack_orig[0],
dtypes.float32)
l1_orig_ret = list_ops.tensor_list_stack(l_unstack_orig[1],
dtypes.float32)
# Check that without aliasing, push_back_batch still works; and
# that it doesn't modify the input.
l0_r_v, l1_r_v, l0_orig_v, l1_orig_v = self.evaluate(
(l0_ret, l1_ret, l0_orig_ret, l1_orig_ret))
self.assertAllClose([1.0, 2.0, 3.0], l0_r_v)
self.assertAllClose([-1.0, 4.0], l1_r_v)
self.assertAllClose([1.0, 2.0], l0_orig_v)
self.assertAllClose([-1.0], l1_orig_v)
# Pushing back mismatched shapes fails.
with self.assertRaises((errors.InvalidArgumentError, ValueError)):
self.evaluate(list_ops.tensor_list_push_back_batch(l_batch, []))
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"incompatible shape to a list at index 0"):
self.evaluate(
list_ops.tensor_list_push_back_batch(l_batch, [[3.0], [4.0]]))
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"Invalid data type at index 0"):
self.evaluate(list_ops.tensor_list_push_back_batch(l_batch, [3, 4]))
@test_util.run_in_graph_and_eager_modes
def testZerosLike(self):
for dtype in (dtypes.uint8, dtypes.uint16, dtypes.int8, dtypes.int16,
dtypes.int32, dtypes.int64, dtypes.float16, dtypes.float32,
dtypes.float64, dtypes.complex64, dtypes.complex128,
dtypes.bool):
l_empty = list_ops.empty_tensor_list(
element_dtype=dtype, element_shape=scalar_shape())
l_empty_zeros = array_ops.zeros_like(l_empty)
t_empty_zeros = list_ops.tensor_list_stack(
l_empty_zeros, element_dtype=dtype)
l_full = list_ops.tensor_list_push_back(l_empty,
math_ops.cast(0, dtype=dtype))
l_full = list_ops.tensor_list_push_back(l_full,
math_ops.cast(1, dtype=dtype))
l_full_zeros = array_ops.zeros_like(l_full)
t_full_zeros = list_ops.tensor_list_stack(
l_full_zeros, element_dtype=dtype)
self.assertAllEqual(self.evaluate(t_empty_zeros), [])
self.assertAllEqual(
self.evaluate(t_full_zeros), np.zeros(
(2,), dtype=dtype.as_numpy_dtype))
@test_util.run_in_graph_and_eager_modes
def testZerosLikeVariant(self):
for dtype in (dtypes.uint8, dtypes.uint16, dtypes.int8, dtypes.int16,
dtypes.int32, dtypes.int64, dtypes.float16, dtypes.float32,
dtypes.float64, dtypes.complex64, dtypes.complex128,
dtypes.bool):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.variant, element_shape=scalar_shape())
sub_l = list_ops.empty_tensor_list(
element_dtype=dtype, element_shape=scalar_shape())
l = list_ops.tensor_list_push_back(l, sub_l)
sub_l = list_ops.tensor_list_push_back(sub_l, math_ops.cast(
1, dtype=dtype))
l = list_ops.tensor_list_push_back(l, sub_l)
sub_l = list_ops.tensor_list_push_back(sub_l, math_ops.cast(
2, dtype=dtype))
l = list_ops.tensor_list_push_back(l, sub_l)
# l : [[],
# [1],
# [1, 2]]
#
# l_zeros : [[],
# [0],
# [0, 0]]
l_zeros = array_ops.zeros_like(l)
outputs = []
for _ in range(3):
l_zeros, out = list_ops.tensor_list_pop_back(
l_zeros, element_dtype=dtypes.variant)
outputs.append(list_ops.tensor_list_stack(out, element_dtype=dtype))
# Note: `outputs` contains popped values so the order is reversed.
self.assertAllEqual(self.evaluate(outputs[2]), [])
self.assertAllEqual(
self.evaluate(outputs[1]), np.zeros((1,), dtype=dtype.as_numpy_dtype))
self.assertAllEqual(
self.evaluate(outputs[0]), np.zeros((2,), dtype=dtype.as_numpy_dtype))
if __name__ == "__main__":
test.main()
|
|
# Copyright 2012 Grid Dynamics
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import base64
import contextlib
import functools
import os
import shutil
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import excutils
from oslo_utils import fileutils
from oslo_utils import strutils
from oslo_utils import units
import six
import nova.conf
from nova import exception
from nova.i18n import _
from nova.i18n import _LE, _LI, _LW
from nova import image
from nova import keymgr
from nova import utils
from nova.virt.disk import api as disk
from nova.virt.image import model as imgmodel
from nova.virt import images
from nova.virt.libvirt import config as vconfig
from nova.virt.libvirt.storage import dmcrypt
from nova.virt.libvirt.storage import lvm
from nova.virt.libvirt.storage import rbd_utils
from nova.virt.libvirt import utils as libvirt_utils
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
IMAGE_API = image.API()
@six.add_metaclass(abc.ABCMeta)
class Image(object):
SUPPORTS_CLONE = False
def __init__(self, path, source_type, driver_format, is_block_dev=False):
"""Image initialization.
:param path: libvirt's representation of the path of this disk.
:param source_type: block or file
:param driver_format: raw or qcow2
:param is_block_dev:
"""
if (CONF.ephemeral_storage_encryption.enabled and
not self._supports_encryption()):
raise exception.NovaException(_('Incompatible settings: '
'ephemeral storage encryption is supported '
'only for LVM images.'))
self.path = path
self.source_type = source_type
self.driver_format = driver_format
self.driver_io = None
self.discard_mode = CONF.libvirt.hw_disk_discard
self.is_block_dev = is_block_dev
self.preallocate = False
# NOTE(dripton): We store lines of json (path, disk_format) in this
# file, for some image types, to prevent attacks based on changing the
# disk_format.
self.disk_info_path = None
# NOTE(mikal): We need a lock directory which is shared along with
# instance files, to cover the scenario where multiple compute nodes
# are trying to create a base file at the same time
self.lock_path = os.path.join(CONF.instances_path, 'locks')
def _supports_encryption(self):
"""Used to test that the backend supports encryption.
Override in the subclass if backend supports encryption.
"""
return False
@abc.abstractmethod
def create_image(self, prepare_template, base, size, *args, **kwargs):
"""Create image from template.
Contains specific behavior for each image type.
:prepare_template: function, that creates template.
Should accept `target` argument.
:base: Template name
:size: Size of created image in bytes
"""
pass
@abc.abstractmethod
def resize_image(self, size):
"""Resize image to size (in bytes).
:size: Desired size of image in bytes
"""
pass
def libvirt_info(self, disk_bus, disk_dev, device_type, cache_mode,
extra_specs, hypervisor_version, boot_order=None):
"""Get `LibvirtConfigGuestDisk` filled for this image.
:disk_dev: Disk bus device name
:disk_bus: Disk bus type
:device_type: Device type for this image.
:cache_mode: Caching mode for this image
:extra_specs: Instance type extra specs dict.
:hypervisor_version: the hypervisor version
:boot_order: Disk device boot order
"""
info = vconfig.LibvirtConfigGuestDisk()
info.source_type = self.source_type
info.source_device = device_type
info.target_bus = disk_bus
info.target_dev = disk_dev
info.driver_cache = cache_mode
info.driver_discard = self.discard_mode
info.driver_io = self.driver_io
info.driver_format = self.driver_format
driver_name = libvirt_utils.pick_disk_driver_name(hypervisor_version,
self.is_block_dev)
info.driver_name = driver_name
info.source_path = self.path
info.boot_order = boot_order
self.disk_qos(info, extra_specs)
return info
def disk_qos(self, info, extra_specs):
tune_items = ['disk_read_bytes_sec', 'disk_read_iops_sec',
'disk_write_bytes_sec', 'disk_write_iops_sec',
'disk_total_bytes_sec', 'disk_total_iops_sec']
for key, value in six.iteritems(extra_specs):
scope = key.split(':')
if len(scope) > 1 and scope[0] == 'quota':
if scope[1] in tune_items:
setattr(info, scope[1], value)
def libvirt_fs_info(self, target, driver_type=None):
"""Get `LibvirtConfigGuestFilesys` filled for this image.
:target: target directory inside a container.
:driver_type: filesystem driver type, can be loop
nbd or ploop.
"""
info = vconfig.LibvirtConfigGuestFilesys()
info.target_dir = target
if self.is_block_dev:
info.source_type = "block"
info.source_dev = self.path
else:
info.source_type = "file"
info.source_file = self.path
info.driver_format = self.driver_format
if driver_type:
info.driver_type = driver_type
else:
if self.driver_format == "raw":
info.driver_type = "loop"
else:
info.driver_type = "nbd"
return info
def exists(self):
return os.path.exists(self.path)
def cache(self, fetch_func, filename, size=None, *args, **kwargs):
"""Creates image from template.
Ensures that template and image not already exists.
Ensures that base directory exists.
Synchronizes on template fetching.
:fetch_func: Function that creates the base image
Should accept `target` argument.
:filename: Name of the file in the image directory
:size: Size of created image in bytes (optional)
"""
@utils.synchronized(filename, external=True, lock_path=self.lock_path)
def fetch_func_sync(target, *args, **kwargs):
# The image may have been fetched while a subsequent
# call was waiting to obtain the lock.
if not os.path.exists(target):
fetch_func(target=target, *args, **kwargs)
base_dir = os.path.join(CONF.instances_path,
CONF.image_cache_subdirectory_name)
if not os.path.exists(base_dir):
fileutils.ensure_tree(base_dir)
base = os.path.join(base_dir, filename)
if not self.exists() or not os.path.exists(base):
self.create_image(fetch_func_sync, base, size,
*args, **kwargs)
if size:
# create_image() only creates the base image if needed, so
# we cannot rely on it to exist here
if os.path.exists(base) and size > self.get_disk_size(base):
self.resize_image(size)
if (self.preallocate and self._can_fallocate() and
os.access(self.path, os.W_OK)):
utils.execute('fallocate', '-n', '-l', size, self.path)
def _can_fallocate(self):
"""Check once per class, whether fallocate(1) is available,
and that the instances directory supports fallocate(2).
"""
can_fallocate = getattr(self.__class__, 'can_fallocate', None)
if can_fallocate is None:
test_path = self.path + '.fallocate_test'
_out, err = utils.trycmd('fallocate', '-l', '1', test_path)
fileutils.delete_if_exists(test_path)
can_fallocate = not err
self.__class__.can_fallocate = can_fallocate
if not can_fallocate:
LOG.warning(_LW('Unable to preallocate image at path: '
'%(path)s'), {'path': self.path})
return can_fallocate
def verify_base_size(self, base, size, base_size=0):
"""Check that the base image is not larger than size.
Since images can't be generally shrunk, enforce this
constraint taking account of virtual image size.
"""
# Note(pbrady): The size and min_disk parameters of a glance
# image are checked against the instance size before the image
# is even downloaded from glance, but currently min_disk is
# adjustable and doesn't currently account for virtual disk size,
# so we need this extra check here.
# NOTE(cfb): Having a flavor that sets the root size to 0 and having
# nova effectively ignore that size and use the size of the
# image is considered a feature at this time, not a bug.
if size is None:
return
if size and not base_size:
base_size = self.get_disk_size(base)
if size < base_size:
msg = _LE('%(base)s virtual size %(base_size)s '
'larger than flavor root disk size %(size)s')
LOG.error(msg % {'base': base,
'base_size': base_size,
'size': size})
raise exception.FlavorDiskSmallerThanImage(
flavor_size=size, image_size=base_size)
def get_disk_size(self, name):
return disk.get_disk_size(name)
def snapshot_extract(self, target, out_format):
raise NotImplementedError()
def _get_driver_format(self):
return self.driver_format
def resolve_driver_format(self):
"""Return the driver format for self.path.
First checks self.disk_info_path for an entry.
If it's not there, calls self._get_driver_format(), and then
stores the result in self.disk_info_path
See https://bugs.launchpad.net/nova/+bug/1221190
"""
def _dict_from_line(line):
if not line:
return {}
try:
return jsonutils.loads(line)
except (TypeError, ValueError) as e:
msg = (_("Could not load line %(line)s, got error "
"%(error)s") %
{'line': line, 'error': e})
raise exception.InvalidDiskInfo(reason=msg)
@utils.synchronized(self.disk_info_path, external=False,
lock_path=self.lock_path)
def write_to_disk_info_file():
# Use os.open to create it without group or world write permission.
fd = os.open(self.disk_info_path, os.O_RDONLY | os.O_CREAT, 0o644)
with os.fdopen(fd, "r") as disk_info_file:
line = disk_info_file.read().rstrip()
dct = _dict_from_line(line)
if self.path in dct:
msg = _("Attempted overwrite of an existing value.")
raise exception.InvalidDiskInfo(reason=msg)
dct.update({self.path: driver_format})
tmp_path = self.disk_info_path + ".tmp"
fd = os.open(tmp_path, os.O_WRONLY | os.O_CREAT, 0o644)
with os.fdopen(fd, "w") as tmp_file:
tmp_file.write('%s\n' % jsonutils.dumps(dct))
os.rename(tmp_path, self.disk_info_path)
try:
if (self.disk_info_path is not None and
os.path.exists(self.disk_info_path)):
with open(self.disk_info_path) as disk_info_file:
line = disk_info_file.read().rstrip()
dct = _dict_from_line(line)
for path, driver_format in six.iteritems(dct):
if path == self.path:
return driver_format
driver_format = self._get_driver_format()
if self.disk_info_path is not None:
fileutils.ensure_tree(os.path.dirname(self.disk_info_path))
write_to_disk_info_file()
except OSError as e:
raise exception.DiskInfoReadWriteFail(reason=six.text_type(e))
return driver_format
@staticmethod
def is_shared_block_storage():
"""True if the backend puts images on a shared block storage."""
return False
@staticmethod
def is_file_in_instance_path():
"""True if the backend stores images in files under instance path."""
return False
def clone(self, context, image_id_or_uri):
"""Clone an image.
Note that clone operation is backend-dependent. The backend may ask
the image API for a list of image "locations" and select one or more
of those locations to clone an image from.
:param image_id_or_uri: The ID or URI of an image to clone.
:raises: exception.ImageUnacceptable if it cannot be cloned
"""
reason = _('clone() is not implemented')
raise exception.ImageUnacceptable(image_id=image_id_or_uri,
reason=reason)
def direct_snapshot(self, context, snapshot_name, image_format, image_id,
base_image_id):
"""Prepare a snapshot for direct reference from glance
:raises: exception.ImageUnacceptable if it cannot be
referenced directly in the specified image format
:returns: URL to be given to glance
"""
raise NotImplementedError(_('direct_snapshot() is not implemented'))
def cleanup_direct_snapshot(self, location, also_destroy_volume=False,
ignore_errors=False):
"""Performs any cleanup actions required after calling
direct_snapshot(), for graceful exception handling and the like.
This should be a no-op on any backend where it is not implemented.
"""
pass
def _get_lock_name(self, base):
"""Get an image's name of a base file."""
return os.path.split(base)[-1]
def get_model(self, connection):
"""Get the image information model
:returns: an instance of nova.virt.image.model.Image
"""
raise NotImplementedError()
def import_file(self, instance, local_file, remote_name):
"""Import an image from local storage into this backend.
Import a local file into the store used by this image type. Note that
this is a noop for stores using local disk (the local file is
considered "in the store").
If the image already exists it will be overridden by the new file
:param local_file: path to the file to import
:param remote_name: the name for the file in the store
"""
# NOTE(mikal): this is a noop for now for all stores except RBD, but
# we should talk about if we want this functionality for everything.
pass
def create_snap(self, name):
"""Create a snapshot on the image. A noop on backends that don't
support snapshots.
:param name: name of the snapshot
"""
pass
def remove_snap(self, name, ignore_errors=False):
"""Remove a snapshot on the image. A noop on backends that don't
support snapshots.
:param name: name of the snapshot
:param ignore_errors: don't log errors if the snapshot does not exist
"""
pass
def rollback_to_snap(self, name):
"""Rollback the image to the named snapshot. A noop on backends that
don't support snapshots.
:param name: name of the snapshot
"""
pass
class Flat(Image):
"""The Flat backend uses either raw or qcow2 storage. It never uses
a backing store, so when using qcow2 it copies an image rather than
creating an overlay. By default it creates raw files, but will use qcow2
when creating a disk from a qcow2 if force_raw_images is not set in config.
"""
def __init__(self, instance=None, disk_name=None, path=None):
self.disk_name = disk_name
path = (path or os.path.join(libvirt_utils.get_instance_path(instance),
disk_name))
super(Flat, self).__init__(path, "file", "raw", is_block_dev=False)
self.preallocate = (
strutils.to_slug(CONF.preallocate_images) == 'space')
if self.preallocate:
self.driver_io = "native"
self.disk_info_path = os.path.join(os.path.dirname(path), 'disk.info')
self.correct_format()
def _get_driver_format(self):
try:
data = images.qemu_img_info(self.path)
return data.file_format
except exception.InvalidDiskInfo as e:
LOG.info(_LI('Failed to get image info from path %(path)s; '
'error: %(error)s'),
{'path': self.path,
'error': e})
return 'raw'
def _supports_encryption(self):
# NOTE(dgenin): Kernel, ramdisk and disk.config are fetched using
# the Flat backend regardless of which backend is configured for
# ephemeral storage. Encryption for the Flat backend is not yet
# implemented so this loophole is necessary to allow other
# backends already supporting encryption to function. This can
# be removed once encryption for Flat is implemented.
if self.disk_name not in ['kernel', 'ramdisk', 'disk.config']:
return False
else:
return True
def correct_format(self):
if os.path.exists(self.path):
self.driver_format = self.resolve_driver_format()
def create_image(self, prepare_template, base, size, *args, **kwargs):
filename = self._get_lock_name(base)
@utils.synchronized(filename, external=True, lock_path=self.lock_path)
def copy_raw_image(base, target, size):
libvirt_utils.copy_image(base, target)
if size:
image = imgmodel.LocalFileImage(target,
self.driver_format)
disk.extend(image, size)
generating = 'image_id' not in kwargs
if generating:
if not self.exists():
# Generating image in place
prepare_template(target=self.path, *args, **kwargs)
else:
if not os.path.exists(base):
prepare_template(target=base, *args, **kwargs)
# NOTE(mikal): Update the mtime of the base file so the image
# cache manager knows it is in use.
libvirt_utils.update_mtime(base)
self.verify_base_size(base, size)
if not os.path.exists(self.path):
with fileutils.remove_path_on_error(self.path):
copy_raw_image(base, self.path, size)
self.correct_format()
def resize_image(self, size):
image = imgmodel.LocalFileImage(self.path, self.driver_format)
disk.extend(image, size)
def snapshot_extract(self, target, out_format):
images.convert_image(self.path, target, self.driver_format, out_format)
@staticmethod
def is_file_in_instance_path():
return True
def get_model(self, connection):
return imgmodel.LocalFileImage(self.path,
imgmodel.FORMAT_RAW)
class Qcow2(Image):
def __init__(self, instance=None, disk_name=None, path=None):
path = (path or os.path.join(libvirt_utils.get_instance_path(instance),
disk_name))
super(Qcow2, self).__init__(path, "file", "qcow2", is_block_dev=False)
self.preallocate = (
strutils.to_slug(CONF.preallocate_images) == 'space')
if self.preallocate:
self.driver_io = "native"
self.disk_info_path = os.path.join(os.path.dirname(path), 'disk.info')
self.resolve_driver_format()
def create_image(self, prepare_template, base, size, *args, **kwargs):
filename = self._get_lock_name(base)
@utils.synchronized(filename, external=True, lock_path=self.lock_path)
def copy_qcow2_image(base, target, size):
# TODO(pbrady): Consider copying the cow image here
# with preallocation=metadata set for performance reasons.
# This would be keyed on a 'preallocate_images' setting.
libvirt_utils.create_cow_image(base, target)
if size:
image = imgmodel.LocalFileImage(target, imgmodel.FORMAT_QCOW2)
disk.extend(image, size)
# Download the unmodified base image unless we already have a copy.
if not os.path.exists(base):
prepare_template(target=base, *args, **kwargs)
# NOTE(ankit): Update the mtime of the base file so the image
# cache manager knows it is in use.
libvirt_utils.update_mtime(base)
self.verify_base_size(base, size)
legacy_backing_size = None
legacy_base = base
# Determine whether an existing qcow2 disk uses a legacy backing by
# actually looking at the image itself and parsing the output of the
# backing file it expects to be using.
if os.path.exists(self.path):
backing_path = libvirt_utils.get_disk_backing_file(self.path)
if backing_path is not None:
backing_file = os.path.basename(backing_path)
backing_parts = backing_file.rpartition('_')
if backing_file != backing_parts[-1] and \
backing_parts[-1].isdigit():
legacy_backing_size = int(backing_parts[-1])
legacy_base += '_%d' % legacy_backing_size
legacy_backing_size *= units.Gi
# Create the legacy backing file if necessary.
if legacy_backing_size:
if not os.path.exists(legacy_base):
with fileutils.remove_path_on_error(legacy_base):
libvirt_utils.copy_image(base, legacy_base)
image = imgmodel.LocalFileImage(legacy_base,
imgmodel.FORMAT_QCOW2)
disk.extend(image, legacy_backing_size)
if not os.path.exists(self.path):
with fileutils.remove_path_on_error(self.path):
copy_qcow2_image(base, self.path, size)
def resize_image(self, size):
image = imgmodel.LocalFileImage(self.path, imgmodel.FORMAT_QCOW2)
disk.extend(image, size)
def snapshot_extract(self, target, out_format):
libvirt_utils.extract_snapshot(self.path, 'qcow2',
target,
out_format)
@staticmethod
def is_file_in_instance_path():
return True
def get_model(self, connection):
return imgmodel.LocalFileImage(self.path,
imgmodel.FORMAT_QCOW2)
class Lvm(Image):
@staticmethod
def escape(filename):
return filename.replace('_', '__')
def __init__(self, instance=None, disk_name=None, path=None):
self.ephemeral_key_uuid = instance.get('ephemeral_key_uuid')
if self.ephemeral_key_uuid is not None:
self.key_manager = keymgr.API(CONF)
else:
self.key_manager = None
if path:
if self.ephemeral_key_uuid is None:
info = lvm.volume_info(path)
self.vg = info['VG']
self.lv = info['LV']
else:
self.vg = CONF.libvirt.images_volume_group
else:
if not CONF.libvirt.images_volume_group:
raise RuntimeError(_('You should specify'
' images_volume_group'
' flag to use LVM images.'))
self.vg = CONF.libvirt.images_volume_group
self.lv = '%s_%s' % (instance.uuid,
self.escape(disk_name))
if self.ephemeral_key_uuid is None:
path = os.path.join('/dev', self.vg, self.lv)
else:
self.lv_path = os.path.join('/dev', self.vg, self.lv)
path = '/dev/mapper/' + dmcrypt.volume_name(self.lv)
super(Lvm, self).__init__(path, "block", "raw", is_block_dev=True)
# TODO(pbrady): possibly deprecate libvirt.sparse_logical_volumes
# for the more general preallocate_images
self.sparse = CONF.libvirt.sparse_logical_volumes
self.preallocate = not self.sparse
if not self.sparse:
self.driver_io = "native"
def _supports_encryption(self):
return True
def _can_fallocate(self):
return False
def create_image(self, prepare_template, base, size, *args, **kwargs):
def encrypt_lvm_image():
dmcrypt.create_volume(self.path.rpartition('/')[2],
self.lv_path,
CONF.ephemeral_storage_encryption.cipher,
CONF.ephemeral_storage_encryption.key_size,
key)
filename = self._get_lock_name(base)
@utils.synchronized(filename, external=True, lock_path=self.lock_path)
def create_lvm_image(base, size):
base_size = disk.get_disk_size(base)
self.verify_base_size(base, size, base_size=base_size)
resize = size > base_size
size = size if resize else base_size
lvm.create_volume(self.vg, self.lv,
size, sparse=self.sparse)
if self.ephemeral_key_uuid is not None:
encrypt_lvm_image()
# NOTE: by calling convert_image_unsafe here we're
# telling qemu-img convert to do format detection on the input,
# because we don't know what the format is. For example,
# we might have downloaded a qcow2 image, or created an
# ephemeral filesystem locally, we just don't know here. Having
# audited this, all current sources have been sanity checked,
# either because they're locally generated, or because they have
# come from images.fetch_to_raw. However, this is major code smell.
images.convert_image_unsafe(base, self.path, self.driver_format,
run_as_root=True)
if resize:
disk.resize2fs(self.path, run_as_root=True)
generated = 'ephemeral_size' in kwargs
if self.ephemeral_key_uuid is not None:
if 'context' in kwargs:
try:
# NOTE(dgenin): Key manager corresponding to the
# specific backend catches and reraises an
# an exception if key retrieval fails.
key = self.key_manager.get(kwargs['context'],
self.ephemeral_key_uuid).get_encoded()
except Exception:
with excutils.save_and_reraise_exception():
LOG.error(_LE("Failed to retrieve ephemeral encryption"
" key"))
else:
raise exception.NovaException(
_("Instance disk to be encrypted but no context provided"))
# Generate images with specified size right on volume
if generated and size:
lvm.create_volume(self.vg, self.lv,
size, sparse=self.sparse)
with self.remove_volume_on_error(self.path):
if self.ephemeral_key_uuid is not None:
encrypt_lvm_image()
prepare_template(target=self.path, *args, **kwargs)
else:
if not os.path.exists(base):
prepare_template(target=base, *args, **kwargs)
with self.remove_volume_on_error(self.path):
create_lvm_image(base, size)
# NOTE(nic): Resizing the image is already handled in create_image(),
# and migrate/resize is not supported with LVM yet, so this is a no-op
def resize_image(self, size):
pass
@contextlib.contextmanager
def remove_volume_on_error(self, path):
try:
yield
except Exception:
with excutils.save_and_reraise_exception():
if self.ephemeral_key_uuid is None:
lvm.remove_volumes([path])
else:
dmcrypt.delete_volume(path.rpartition('/')[2])
lvm.remove_volumes([self.lv_path])
def snapshot_extract(self, target, out_format):
images.convert_image(self.path, target, self.driver_format,
out_format, run_as_root=True)
def get_model(self, connection):
return imgmodel.LocalBlockImage(self.path)
class Rbd(Image):
SUPPORTS_CLONE = True
def __init__(self, instance=None, disk_name=None, path=None, **kwargs):
if not CONF.libvirt.images_rbd_pool:
raise RuntimeError(_('You should specify'
' images_rbd_pool'
' flag to use rbd images.'))
if path:
try:
self.rbd_name = path.split('/')[1]
except IndexError:
raise exception.InvalidDevicePath(path=path)
else:
self.rbd_name = '%s_%s' % (instance.uuid, disk_name)
self.pool = CONF.libvirt.images_rbd_pool
self.rbd_user = CONF.libvirt.rbd_user
self.ceph_conf = CONF.libvirt.images_rbd_ceph_conf
path = 'rbd:%s/%s' % (self.pool, self.rbd_name)
if self.rbd_user:
path += ':id=' + self.rbd_user
if self.ceph_conf:
path += ':conf=' + self.ceph_conf
super(Rbd, self).__init__(path, "block", "rbd", is_block_dev=False)
self.driver = rbd_utils.RBDDriver(
pool=self.pool,
ceph_conf=self.ceph_conf,
rbd_user=self.rbd_user)
self.discard_mode = CONF.libvirt.hw_disk_discard
def libvirt_info(self, disk_bus, disk_dev, device_type, cache_mode,
extra_specs, hypervisor_version, boot_order=None):
"""Get `LibvirtConfigGuestDisk` filled for this image.
:disk_dev: Disk bus device name
:disk_bus: Disk bus type
:device_type: Device type for this image.
:cache_mode: Caching mode for this image
:extra_specs: Instance type extra specs dict.
:hypervisor_version: the hypervisor version
:boot_order: Disk device boot order
"""
info = vconfig.LibvirtConfigGuestDisk()
hosts, ports = self.driver.get_mon_addrs()
info.source_device = device_type
info.driver_format = 'raw'
info.driver_cache = cache_mode
info.driver_discard = self.discard_mode
info.target_bus = disk_bus
info.target_dev = disk_dev
info.source_type = 'network'
info.source_protocol = 'rbd'
info.source_name = '%s/%s' % (self.pool, self.rbd_name)
info.source_hosts = hosts
info.source_ports = ports
info.boot_order = boot_order
auth_enabled = (CONF.libvirt.rbd_user is not None)
if CONF.libvirt.rbd_secret_uuid:
info.auth_secret_uuid = CONF.libvirt.rbd_secret_uuid
auth_enabled = True # Force authentication locally
if CONF.libvirt.rbd_user:
info.auth_username = CONF.libvirt.rbd_user
if auth_enabled:
info.auth_secret_type = 'ceph'
info.auth_secret_uuid = CONF.libvirt.rbd_secret_uuid
self.disk_qos(info, extra_specs)
return info
def _can_fallocate(self):
return False
def exists(self):
return self.driver.exists(self.rbd_name)
def get_disk_size(self, name):
"""Returns the size of the virtual disk in bytes.
The name argument is ignored since this backend already knows
its name, and callers may pass a non-existent local file path.
"""
return self.driver.size(self.rbd_name)
def create_image(self, prepare_template, base, size, *args, **kwargs):
if not self.exists():
prepare_template(target=base, *args, **kwargs)
# prepare_template() may have cloned the image into a new rbd
# image already instead of downloading it locally
if not self.exists():
self.driver.import_image(base, self.rbd_name)
self.verify_base_size(base, size)
if size and size > self.get_disk_size(self.rbd_name):
self.driver.resize(self.rbd_name, size)
def resize_image(self, size):
self.driver.resize(self.rbd_name, size)
def snapshot_extract(self, target, out_format):
images.convert_image(self.path, target, 'raw', out_format)
@staticmethod
def is_shared_block_storage():
return True
def clone(self, context, image_id_or_uri):
image_meta = IMAGE_API.get(context, image_id_or_uri,
include_locations=True)
locations = image_meta['locations']
LOG.debug('Image locations are: %(locs)s' % {'locs': locations})
if image_meta.get('disk_format') not in ['raw', 'iso']:
reason = _('Image is not raw format')
raise exception.ImageUnacceptable(image_id=image_id_or_uri,
reason=reason)
for location in locations:
if self.driver.is_cloneable(location, image_meta):
LOG.debug('Selected location: %(loc)s', {'loc': location})
return self.driver.clone(location, self.rbd_name)
reason = _('No image locations are accessible')
raise exception.ImageUnacceptable(image_id=image_id_or_uri,
reason=reason)
def get_model(self, connection):
secret = None
if CONF.libvirt.rbd_secret_uuid:
secretobj = connection.secretLookupByUUIDString(
CONF.libvirt.rbd_secret_uuid)
secret = base64.b64encode(secretobj.value())
hosts, ports = self.driver.get_mon_addrs()
servers = [str(':'.join(k)) for k in zip(hosts, ports)]
return imgmodel.RBDImage(self.rbd_name,
self.pool,
self.rbd_user,
secret,
servers)
def import_file(self, instance, local_file, remote_name):
name = '%s_%s' % (instance.uuid, remote_name)
if self.exists():
self.driver.remove_image(name)
self.driver.import_image(local_file, name)
def create_snap(self, name):
return self.driver.create_snap(self.rbd_name, name)
def remove_snap(self, name, ignore_errors=False):
return self.driver.remove_snap(self.rbd_name, name, ignore_errors)
def rollback_to_snap(self, name):
return self.driver.rollback_to_snap(self.rbd_name, name)
def _get_parent_pool(self, context, base_image_id, fsid):
parent_pool = None
try:
# The easy way -- the image is an RBD clone, so use the parent
# images' storage pool
parent_pool, _im, _snap = self.driver.parent_info(self.rbd_name)
except exception.ImageUnacceptable:
# The hard way -- the image is itself a parent, so ask Glance
# where it came from
LOG.debug('No parent info for %s; asking the Image API where its '
'store is', base_image_id)
try:
image_meta = IMAGE_API.get(context, base_image_id,
include_locations=True)
except Exception as e:
LOG.debug('Unable to get image %(image_id)s; error: %(error)s',
{'image_id': base_image_id, 'error': e})
image_meta = {}
# Find the first location that is in the same RBD cluster
for location in image_meta.get('locations', []):
try:
parent_fsid, parent_pool, _im, _snap = \
self.driver.parse_url(location['url'])
if parent_fsid == fsid:
break
else:
parent_pool = None
except exception.ImageUnacceptable:
continue
if not parent_pool:
raise exception.ImageUnacceptable(
_('Cannot determine the parent storage pool for %s; '
'cannot determine where to store images') %
base_image_id)
return parent_pool
def direct_snapshot(self, context, snapshot_name, image_format,
image_id, base_image_id):
"""Creates an RBD snapshot directly.
"""
fsid = self.driver.get_fsid()
# NOTE(nic): Nova has zero comprehension of how Glance's image store
# is configured, but we can infer what storage pool Glance is using
# by looking at the parent image. If using authx, write access should
# be enabled on that pool for the Nova user
parent_pool = self._get_parent_pool(context, base_image_id, fsid)
# Snapshot the disk and clone it into Glance's storage pool. librbd
# requires that snapshots be set to "protected" in order to clone them
self.driver.create_snap(self.rbd_name, snapshot_name, protect=True)
location = {'url': 'rbd://%(fsid)s/%(pool)s/%(image)s/%(snap)s' %
dict(fsid=fsid,
pool=self.pool,
image=self.rbd_name,
snap=snapshot_name)}
try:
self.driver.clone(location, image_id, dest_pool=parent_pool)
# Flatten the image, which detaches it from the source snapshot
self.driver.flatten(image_id, pool=parent_pool)
finally:
# all done with the source snapshot, clean it up
self.cleanup_direct_snapshot(location)
# Glance makes a protected snapshot called 'snap' on uploaded
# images and hands it out, so we'll do that too. The name of
# the snapshot doesn't really matter, this just uses what the
# glance-store rbd backend sets (which is not configurable).
self.driver.create_snap(image_id, 'snap', pool=parent_pool,
protect=True)
return ('rbd://%(fsid)s/%(pool)s/%(image)s/snap' %
dict(fsid=fsid, pool=parent_pool, image=image_id))
def cleanup_direct_snapshot(self, location, also_destroy_volume=False,
ignore_errors=False):
"""Unprotects and destroys the name snapshot.
With also_destroy_volume=True, it will also cleanup/destroy the parent
volume. This is useful for cleaning up when the target volume fails
to snapshot properly.
"""
if location:
_fsid, _pool, _im, _snap = self.driver.parse_url(location['url'])
self.driver.remove_snap(_im, _snap, pool=_pool, force=True,
ignore_errors=ignore_errors)
if also_destroy_volume:
self.driver.destroy_volume(_im, pool=_pool)
class Ploop(Image):
def __init__(self, instance=None, disk_name=None, path=None):
path = (path or os.path.join(libvirt_utils.get_instance_path(instance),
disk_name))
super(Ploop, self).__init__(path, "file", "ploop", is_block_dev=False)
self.resolve_driver_format()
def create_image(self, prepare_template, base, size, *args, **kwargs):
filename = os.path.split(base)[-1]
@utils.synchronized(filename, external=True, lock_path=self.lock_path)
def create_ploop_image(base, target, size):
image_path = os.path.join(target, "root.hds")
libvirt_utils.copy_image(base, image_path)
utils.execute('ploop', 'restore-descriptor', '-f', self.pcs_format,
target, image_path)
if size:
self.resize_image(size)
if not os.path.exists(self.path):
if CONF.force_raw_images:
self.pcs_format = "raw"
else:
image_meta = IMAGE_API.get(kwargs["context"],
kwargs["image_id"])
format = image_meta.get("disk_format")
if format == "ploop":
self.pcs_format = "expanded"
elif format == "raw":
self.pcs_format = "raw"
else:
reason = _("PCS doesn't support images in %s format."
" You should either set force_raw_images=True"
" in config or upload an image in ploop"
" or raw format.") % format
raise exception.ImageUnacceptable(
image_id=kwargs["image_id"],
reason=reason)
if not os.path.exists(base):
prepare_template(target=base, *args, **kwargs)
self.verify_base_size(base, size)
if os.path.exists(self.path):
return
fileutils.ensure_tree(self.path)
remove_func = functools.partial(fileutils.delete_if_exists,
remove=shutil.rmtree)
with fileutils.remove_path_on_error(self.path, remove=remove_func):
create_ploop_image(base, self.path, size)
def resize_image(self, size):
image = imgmodel.LocalFileImage(self.path, imgmodel.FORMAT_PLOOP)
disk.extend(image, size)
def snapshot_extract(self, target, out_format):
img_path = os.path.join(self.path, "root.hds")
libvirt_utils.extract_snapshot(img_path,
'parallels',
target,
out_format)
class Backend(object):
def __init__(self, use_cow):
self.BACKEND = {
'raw': Flat,
'flat': Flat,
'qcow2': Qcow2,
'lvm': Lvm,
'rbd': Rbd,
'ploop': Ploop,
'default': Qcow2 if use_cow else Flat
}
def backend(self, image_type=None):
if not image_type:
image_type = CONF.libvirt.images_type
image = self.BACKEND.get(image_type)
if not image:
raise RuntimeError(_('Unknown image_type=%s') % image_type)
return image
def image(self, instance, disk_name, image_type=None):
"""Constructs image for selected backend
:instance: Instance name.
:name: Image name.
:image_type: Image type.
Optional, is CONF.libvirt.images_type by default.
"""
backend = self.backend(image_type)
return backend(instance=instance, disk_name=disk_name)
def snapshot(self, instance, disk_path, image_type=None):
"""Returns snapshot for given image
:path: path to image
:image_type: type of image
"""
backend = self.backend(image_type)
return backend(instance=instance, path=disk_path)
|
|
"""
# Copyright 2016-2022 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
from intern.remote import Remote
from intern.resource.boss.resource import *
from intern.service.boss.project import ProjectService
from intern.service.boss.metadata import MetadataService
from intern.service.boss.volume import VolumeService
from intern.service.boss.v1.volume import CacheMode
import warnings
CONFIG_PROJECT_SECTION = 'Project Service'
CONFIG_METADATA_SECTION = 'Metadata Service'
CONFIG_VOLUME_SECTION = 'Volume Service'
CONFIG_PROTOCOL = 'protocol'
# CONFIG_HOST example: api.theboss.io
CONFIG_HOST = 'host'
CONFIG_TOKEN = 'token'
LATEST_VERSION = 'v1'
class BossRemote(Remote):
"""
Remote provides an SDK to the Boss API.
Attributes:
_token_project (string): Django Framework token for auth to the
project service.
_token_metadata (string): Django Framework token for auth to the
metadata service.
_token_volume (string): Django Framework token for auth to the
volume service.
"""
def __init__(self, cfg_file_or_dict=None, version=None):
"""
Constructor.
If not config arguments are passed in, ~/.intern/intern.cfg is read by
default. Config data is in INI format. If both cfg_file and cfg_str
are passed in, the value in cfg_str is used.
Args:
version (optional[string]): Version of Boss API to use.
cfg_file_or_dict (optional[string|dict]): Path to config file in
INI format or a dict of config parameters.
Raises:
(FileNotFoundError): if can't load given config file.
(KeyError): if given invalid version.
"""
Remote.__init__(self, cfg_file_or_dict)
if version is None:
version = LATEST_VERSION
# Init the services
self._init_project_service(version)
self._init_metadata_service(version)
self._init_volume_service(version)
def __repr__(self):
"""
Stringify the Remote.
Returns a representation of the BossRemote that lists the host.
"""
return "<intern.remote.BossRemote [" + self._config['Default']['host'] + "]>"
def _init_project_service(self, version):
"""
Method to initialize the Project Service from the config data
Args:
version (string): Version of Boss API to use.
Returns:
None
Raises:
(KeyError): if given invalid version.
"""
project_cfg = self._load_config_section(CONFIG_PROJECT_SECTION)
self._token_project = project_cfg[CONFIG_TOKEN]
proto = project_cfg[CONFIG_PROTOCOL]
host = project_cfg[CONFIG_HOST]
self._project = ProjectService(host, version)
self._project.base_protocol = proto
self._project.set_auth(self._token_project)
def _init_metadata_service(self, version):
"""
Method to initialize the Metadata Service from the config data
Args:
version (string): Version of Boss API to use.
Returns:
None
Raises:
(KeyError): if given invalid version.
"""
metadata_cfg = self._load_config_section(CONFIG_METADATA_SECTION)
self._token_metadata = metadata_cfg[CONFIG_TOKEN]
proto = metadata_cfg[CONFIG_PROTOCOL]
host = metadata_cfg[CONFIG_HOST]
self._metadata = MetadataService(host, version)
self._metadata.base_protocol = proto
self._metadata.set_auth(self._token_metadata)
def _init_volume_service(self, version):
"""
Method to initialize the Volume Service from the config data
Args:
version (string): Version of Boss API to use.
Returns:
None
Raises:
(KeyError): if given invalid version.
"""
volume_cfg = self._load_config_section(CONFIG_VOLUME_SECTION)
self._token_volume = volume_cfg[CONFIG_TOKEN]
proto = volume_cfg[CONFIG_PROTOCOL]
host = volume_cfg[CONFIG_HOST]
self._volume = VolumeService(host, version)
self._volume.base_protocol = proto
self._volume.set_auth(self._token_volume)
def _load_config_section(self, section_name):
"""
Method to load the specific Service section from the config file if it
exists, or fall back to the default
Args:
section_name (str): The desired service section name
Returns:
(dict): the section parameters
"""
if self._config.has_section(section_name):
# Load specific section
section = dict(self._config.items(section_name))
elif self._config.has_section("Default"):
# Load Default section
section = dict(self._config.items("Default"))
else:
raise KeyError((
"'{}' was not found in the configuration file and no default " +
"configuration was provided."
).format(section_name))
# Make sure section is valid
if "protocol" in section and "host" in section and "token" in section:
return section
else:
raise KeyError(
"Missing values in configuration data. " +
"Must contain: protocol, host, token"
)
@property
def token_project(self):
"""
Returns the current token
"""
return self._token_project
@token_project.setter
def token_project(self, value):
self._token_project = value
self.project_service.set_auth(self._token_project)
@property
def token_metadata(self):
"""
Returns metadata for the current token
"""
return self._token_metadata
@token_metadata.setter
def token_metadata(self, value):
self._token_metadata = value
self.metadata_service.set_auth(self._token_metadata)
@property
def token_volume(self):
"""
Get the current token volume
"""
return self._token_volume
@token_volume.setter
def token_volume(self, value):
self._token_volume = value
self.volume_service.set_auth(self._token_volume)
def list_groups(self, filtr=None):
"""
Get the groups the logged in user is a member of.
Optionally filter by 'member' or 'maintainer'.
Args:
filtr (optional[string|None]): ['member'|'maintainer']
Returns:
(list[string]): List of group names.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.list_groups(filtr)
def get_group(self, name, user_name=None):
"""
Get information on the given group or whether or not a user is a member
of the group.
Args:
name (string): Name of group to query.
user_name (optional[string]): Supply None if not interested in
determining if user is a member of the given group.
Returns:
(mixed): Dictionary if getting group information or bool if a user
name is supplied.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.get_group(name, user_name)
def create_group(self, name):
"""
Create a new group.
Args:
name (string): Name of the group to create.
Returns:
(bool): True on success.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.create_group(name)
def delete_group(self, name):
"""
Delete given group.
Args:
name (string): Name of group.
Returns:
(bool): True on success.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.delete_group(name)
def list_group_members(self, name):
"""
Get the members of a group.
Args:
name (string): Name of group to query.
Returns:
(list[string]): List of member names.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.list_group_members(name)
def add_group_member(self, grp_name, user):
"""
Add the given user to the named group.
Both group and user must already exist for this to succeed.
Args:
name (string): Name of group.
user_name (string): User to add to group.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.add_group_member(grp_name, user)
def delete_group_member(self, grp_name, user):
"""
Delete the given user to the named group.
Both group and user must already exist for this to succeed.
Args:
name (string): Name of group.
user_name (string): User to delete from the group.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.delete_group_member(grp_name, user)
def get_is_group_member(self, grp_name, user):
"""
Check if the given user is a member of the named group.
Note that a group maintainer is not considered a member unless the
user is also explicitly added as a member.
Args:
name (string): Name of group.
user_name (string): User of interest.
Returns:
(bool): False if user not a member.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.get_is_group_member(grp_name, user)
def list_group_maintainers(self, name):
"""
Get the maintainers of a group.
Args:
name (string): Name of group to query.
Returns:
(list[string]): List of maintainer names.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.list_group_maintainers(name)
def add_group_maintainer(self, name, user):
"""
Add the given user to the named group.
Both group and user must already exist for this to succeed.
Args:
name (string): Name of group.
user (string): User to add to group.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.add_group_maintainer(name, user)
def delete_group_maintainer(self, grp_name, user):
"""
Delete the given user to the named group.
Both group and user must already exist for this to succeed.
Args:
name (string): Name of group.
user (string): User to add to group.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.delete_group_maintainer(grp_name, user)
def get_is_group_maintainer(self, grp_name, user):
"""
Check if the given user is a member of the named group.
Args:
name (string): Name of group.
user (string): User of interest.
Returns:
(bool): False if user not a member.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.get_is_group_maintainer(grp_name, user)
def list_permissions(self, group_name=None, resource=None):
"""
List permission sets associated filtering by group and/or resource.
Args:
group_name (string): Name of group.
resource (intern.resource.boss.Resource): Identifies which data
model object to operate on.
Returns:
(list): List of permissions.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.list_permissions(group_name, resource)
def get_permissions(self, grp_name, resource):
"""
Get permissions associated the group has with the given resource.
Args:
grp_name (string): Name of group.
resource (intern.resource.boss.Resource): Identifies which data
model object to operate on.
Returns:
(list): List of permissions.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.get_permissions(grp_name, resource)
def add_permissions(self, grp_name, resource, permissions):
"""
Add additional permissions for the group associated with the resource.
Args:
grp_name (string): Name of group.
resource (intern.resource.boss.Resource): Identifies which data
model object to operate on.
permissions (list): List of permissions to add to the given resource
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.add_permissions(grp_name, resource, permissions)
def update_permissions(self, grp_name, resource, permissions):
"""
Update permissions for the group associated with the given resource.
Args:
grp_name (string): Name of group.
resource (intern.resource.boss.Resource): Identifies which data
model object to operate on
permissions (list): List of permissions to add to the given resource
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.update_permissions(grp_name, resource, permissions)
def delete_permissions(self, grp_name, resource):
"""
Removes permissions from the group for the given resource.
Args:
grp_name (string): Name of group.
resource (intern.resource.boss.Resource): Identifies which data
model object to operate on.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.delete_permissions(grp_name, resource)
def get_user_roles(self, user):
"""
Get roles associated with the given user.
Args:
user (string): User name.
Returns:
(list): List of roles that user has.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.get_user_roles(user)
def add_user_role(self, user, role):
"""
Add role to given user.
Args:
user (string): User name.
role (string): Role to assign.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.add_user_role(user, role)
def delete_user_role(self, user, role):
"""
Remove role from given user.
Args:
user (string): User name.
role (string): Role to remove.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.delete_user_role(user, role)
def get_user(self, user):
"""
Get user's data (first and last name, email, etc).
Args:
user (string): User name.
Returns:
(dictionary): User's data encoded in a dictionary.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.get_user(user)
def get_user_groups(self, user):
"""
Get user's group memberships.
Args:
user (string): User name.
Returns:
(list): User's groups.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.get_user_groups(user)
def add_user(
self, user,
first_name=None, last_name=None,
email=None, password=None
):
"""
Add a new user.
Args:
user (string): User name.
first_name (optional[string]): User's first name. Defaults to None.
last_name (optional[string]): User's last name. Defaults to None.
email: (optional[string]): User's email address. Defaults to None.
password: (optional[string]): User's password. Defaults to None.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.add_user(
user, first_name, last_name, email, password)
def delete_user(self, user):
"""
Delete the given user.
Args:
user (string): User name.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.delete_user(user)
def _list_resource(self, resource):
"""
List all instances of the given resource type.
Use the specific list_<resource>() methods instead:
list_collections()
list_experiments()
list_channels()
list_coordinate_frames()
Args:
resource (intern.resource.boss.BossResource): resource.name may be
an empty string.
Returns:
(list)
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return super(BossRemote, self).list_project(resource=resource)
def list_collections(self):
"""
List all collections.
Returns:
(list)
Raises:
requests.HTTPError on failure.
"""
coll = CollectionResource(name='')
return self._list_resource(coll)
def list_experiments(self, collection_name):
"""
List all experiments that belong to a collection.
Args:
collection_name (string): Name of the parent collection.
Returns:
(list)
Raises:
requests.HTTPError on failure.
"""
exp = ExperimentResource(
name='', collection_name=collection_name, coord_frame='foo')
return self._list_resource(exp)
def list_channels(self, collection_name, experiment_name):
"""
List all channels belonging to the named experiment that is part
of the named collection.
Args:
collection_name (string): Name of the parent collection.
experiment_name (string): Name of the parent experiment.
Returns:
(list)
Raises:
requests.HTTPError on failure.
"""
dont_care = 'image'
chan = ChannelResource(
name='', collection_name=collection_name,
experiment_name=experiment_name, type=dont_care)
return self._list_resource(chan)
def list_coordinate_frames(self):
"""
List all coordinate_frames.
Returns:
(list)
Raises:
requests.HTTPError on failure.
"""
cf = CoordinateFrameResource(name='')
return self._list_resource(cf)
def get_channel(self, chan_name, coll_name, exp_name):
"""
Helper that gets a fully initialized ChannelResource for an *existing* channel.
Args:
chan_name (str): Name of channel.
coll_name (str): Name of channel's collection.
exp_name (str): Name of channel's experiment.
Returns:
(intern.resource.boss.ChannelResource)
"""
chan = ChannelResource(chan_name, coll_name, exp_name)
return self.get_project(chan)
def create_project(self, resource):
"""
Create the entity described by the given resource.
Args:
resource (intern.resource.boss.BossResource)
Returns:
(intern.resource.boss.BossResource): Returns resource of type
requested on success.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.create(resource)
def get_project(self, resource):
"""
Get attributes of the data model object named by the given resource.
Args:
resource (intern.resource.boss.BossResource): resource.name as well
as any parents must be identified to succeed.
Returns:
(intern.resource.boss.BossResource): Returns resource of type
requested on success.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.get(resource)
def update_project(self, resource_name, resource):
"""
Updates an entity in the data model using the given resource.
Args:
resource_name (string): Current name of the resource (in case the
resource is getting its name changed).
resource (intern.resource.boss.BossResource): New attributes for
the resource.
Returns:
(intern.resource.boss.BossResource): Returns updated resource of
given type on success.
Raises:
requests.HTTPError on failure.
"""
self.project_service.set_auth(self._token_project)
return self.project_service.update(resource_name, resource)
def delete_project(self, resource):
"""
Deletes the entity described by the given resource.
Args:
resource (intern.resource.boss.BossResource)
Raises:
requests.HTTPError on a failure.
"""
self.project_service.set_auth(self._token_project)
self.project_service.delete(resource)
def list_metadata(self, resource):
"""
List all keys associated with the given resource.
Args:
resource (intern.resource.boss.BossResource)
Returns:
(list)
Raises:
requests.HTTPError on a failure.
"""
self.metadata_service.set_auth(self._token_metadata)
return self.metadata_service.list(resource)
def create_metadata(self, resource, keys_vals):
"""
Associates new key-value pairs with the given resource.
Will attempt to add all key-value pairs even if some fail.
Args:
resource (intern.resource.boss.BossResource)
keys_vals (dictionary): Collection of key-value pairs to assign to
given resource.
Raises:
HTTPErrorList on failure.
"""
self.metadata_service.set_auth(self._token_metadata)
self.metadata_service.create(resource, keys_vals)
def get_metadata(self, resource, keys):
"""
Gets the values for given keys associated with the given resource.
Args:
resource (intern.resource.boss.BossResource)
keys (list)
Returns:
(dictionary)
Raises:
HTTPErrorList on failure.
"""
self.metadata_service.set_auth(self._token_metadata)
return self.metadata_service.get(resource, keys)
def update_metadata(self, resource, keys_vals):
"""
Updates key-value pairs with the given resource.
Will attempt to update all key-value pairs even if some fail.
Keys must already exist.
Args:
resource (intern.resource.boss.BossResource)
keys_vals (dictionary): Collection of key-value pairs to update on
the given resource.
Raises:
HTTPErrorList on failure.
"""
self.metadata_service.set_auth(self._token_metadata)
self.metadata_service.update(resource, keys_vals)
def delete_metadata(self, resource, keys):
"""
Deletes the given key-value pairs associated with the given resource.
Will attempt to delete all key-value pairs even if some fail.
Args:
resource (intern.resource.boss.BossResource)
keys (list)
Raises:
HTTPErrorList on failure.
"""
self.metadata_service.set_auth(self._token_metadata)
self.metadata_service.delete(resource, keys)
def parse_bossURI(self, uri): # type: (str) -> Resource
"""
Parse a bossDB URI and handle malform errors.
Arguments:
uri (str): URI of the form bossdb://<collection>/<experiment>/<channel>
Returns:
Resource
"""
t = uri.split("://")[1].split("/")
if len(t) == 3:
return self.get_channel(t[2], t[0], t[1])
raise ValueError("Cannot parse URI " + uri + ".")
def get_cutout(self, resource, resolution, x_range, y_range, z_range, time_range=None, id_list=[], no_cache=None, access_mode=CacheMode.no_cache, parallel: bool = True, **kwargs):
"""Get a cutout from the volume service.
Note that access_mode=no_cache is desirable when reading large amounts of
data at once. In these cases, the data is not first read into the
cache, but instead, is sent directly from the data store to the
requester.
Args:
resource (intern.resource.boss.resource.ChannelResource | str): Channel or layer Resource. If a
string is provided instead, BossRemote.parse_bossURI is called instead on a URI-formatted
string of the form `bossdb://collection/experiment/channel`.
resolution (int): 0 indicates native resolution.
x_range (list[int]): x range such as [10, 20] which means x>=10 and x<20.
y_range (list[int]): y range such as [10, 20] which means y>=10 and y<20.
z_range (list[int]): z range such as [10, 20] which means z>=10 and z<20.
time_range (optional [list[int]]): time range such as [30, 40] which means t>=30 and t<40.
id_list (optional [list[int]]): list of object ids to filter the cutout by.
no_cache (optional [boolean or None]): Deprecated way to specify the use of cache to be True or False.
access_mode should be used instead
access_mode (optional [Enum]): Identifies one of three cache access options:
cache = Will check both cache and for dirty keys
no_cache = Will skip cache check but check for dirty keys
raw = Will skip both the cache and dirty keys check
parallel (bool: True): Whether downloads should be parallelized using multiprocessing
TODO: Add mode to documentation
Returns:
(numpy.array): A 3D or 4D (time) numpy matrix in (time)ZYX order.
Raises:
requests.HTTPError on error.
"""
if no_cache is not None:
warnings.warn("The no-cache option has been deprecated and will not be used in future versions of intern.")
warnings.warn("Please from intern.service.boss.volume import CacheMode and use access_mode=CacheMode.[cache,no-cache,raw] instead.")
if no_cache and access_mode != CacheMode.no_cache:
warnings.warn("Both no_cache and access_mode were used, please use access_mode only. As no_cache has been deprecated. ")
warnings.warn("Your request will be made using the default mode no_cache.")
access_mode=CacheMode.no_cache
if no_cache:
access_mode=CacheMode.no_cache
elif no_cache == False:
access_mode=CacheMode.cache
return self._volume.get_cutout(
resource, resolution,
x_range, y_range, z_range, time_range,
id_list, access_mode,
parallel=parallel, **kwargs
)
def create_cutout_to_black(self, resource, resolution, x_range, y_range, z_range, time_range=None):
"""Post a black cutout to the volume service.
Args:
resource (intern.resource.Resource): Resource compatible with cutout operations.
resolution (int): 0 indicates native resolution.
x_range (list[int]): x range such as [10, 20] which means x>=10 and x<20.
y_range (list[int]): y range such as [10, 20] which means y>=10 and y<20.
z_range (list[int]): z range such as [10, 20] which means z>=10 and z<20.
time_range (optional [list[int]]): time range such as [30, 40] which means t>=30 and t<40.
Returns:
(): Return type depends on volume service's implementation.
Raises:
RuntimeError when given invalid resource.
Other exceptions may be raised depending on the volume service's implementation.
"""
if not resource.valid_volume():
raise RuntimeError('Resource incompatible with the volume service.')
return self._volume.create_cutout_to_black(
resource, resolution, x_range, y_range, z_range, time_range)
def get_experiment(self, coll_name, exp_name):
"""
Convenience method that gets experiment resource.
Args:
coll_name (str): Collection name
exp_name (str): Experiment name
Returns:
(ExperimentResource)
"""
exp = ExperimentResource(exp_name, coll_name)
return self.get_project(exp)
def get_coordinate_frame(self, name):
"""
Convenience method that gets coordinate frame resource
Args:
name (str): Name of the coordinate frame
Returns:
(CoordinateFrameResource)
"""
cf = CoordinateFrameResource(name)
return self.get_project(cf)
def get_neuroglancer_link(self, resource, resolution, x_range, y_range, z_range, **kwargs):
"""
Get a neuroglancer link of the cutout specified from the host specified in the remote configuration step.
Args:
resource (intern.resource.Resource): Resource compatible with cutout operations.
resolution (int): 0 indicates native resolution.
x_range (list[int]): x range such as [10, 20] which means x>=10 and x<20.
y_range (list[int]): y range such as [10, 20] which means y>=10 and y<20.
z_range (list[int]): z range such as [10, 20] which means z>=10 and z<20.
Returns:
(string): Return neuroglancer link.
Raises:
RuntimeError when given invalid resource.
Other exceptions may be raised depending on the volume service's implementation.
"""
return self._volume.get_neuroglancer_link(resource, resolution, x_range, y_range, z_range, **kwargs)
def get_extents(self, resource):
"""Get extents of the reource
Args:
resource (intern.resource.boss.BossResource.ExperimentResource)
Returns:
extents (array): [[x-min, max-x], [y-min, max-y], [z-min, max-z]]
Raises:
requests.HTTPError on failure.
"""
coord_frame = self.get_coordinate_frame(resource.coord_frame)
min_point = [coord_frame.x_start, coord_frame.y_start, coord_frame.z_start]
max_point = [coord_frame.x_stop, coord_frame.y_stop, coord_frame.z_stop]
extents = [[min_point[0],max_point[0]],[min_point[1],max_point[1]],[min_point[2],max_point[2]]]
return extents
|
|
# Authentication tests based on airmozilla
# https://github.com/mozilla/airmozilla/blob/master/airmozilla/\
# auth/tests/test_views.py
import json
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.test import RequestFactory
from django.test.client import Client
from django.test.utils import override_settings
import mock
from nose.tools import eq_, ok_
from remo.base.tests import RemoTestCase, requires_login, requires_permission
from remo.base.views import robots_txt
from remo.profiles.models import FunctionalArea
from remo.profiles.tasks import check_mozillian_username
from remo.profiles.tests import (FunctionalAreaFactory, UserFactory,
UserStatusFactory)
from remo.reports.models import Activity, Campaign
from remo.reports.tests import ActivityFactory, CampaignFactory
class MozilliansTest(RemoTestCase):
"""Test Moziilians."""
@override_settings(MOZILLIANS_API_KEY='key')
@override_settings(MOZILLIANS_API_URL='https://example.com/api/v2/')
@mock.patch('remo.profiles.tasks.MozilliansClient.lookup_user')
def test_mozillian_username_exists(self, mocked_lookup):
"""Test that if an Anonymous Mozillians changes his
settings in the mozillians.org, we update his username
on our portal.
"""
mozillian = UserFactory.create(groups=['Mozillians'])
mocked_lookup.return_value = {
'is_vouched': True,
'email': mozillian.email,
'username': 'Mozillian',
'full_name': {
'privacy': 'Public',
'value': 'Awesome Mozillian'
}
}
check_mozillian_username.apply()
user = User.objects.get(email=mozillian.email)
eq_(user.userprofile.mozillian_username, u'Mozillian')
eq_(user.get_full_name(), u'Awesome Mozillian')
@override_settings(MOZILLIANS_API_KEY='key')
@override_settings(MOZILLIANS_API_URL='https://example.com/api/v2/')
@mock.patch('remo.profiles.tasks.MozilliansClient.lookup_user')
def test_mozillian_username_missing(self, mocked_lookup):
"""Test that if a Mozillian changes his
settings in the mozillians.org, we update his username
on our portal.
"""
mozillian = UserFactory.create(
groups=['Mozillians'], first_name='Awesome',
last_name='Mozillian',
userprofile__mozillian_username='Mozillian')
mocked_lookup.return_value = {
'is_vouched': True,
'email': mozillian.email,
'username': 'Mozillian',
'full_name': {
'privacy': 'Mozillians',
'value': 'Awesome Mozillian'
}
}
check_mozillian_username.apply()
user = User.objects.get(email=mozillian.email)
eq_(user.userprofile.mozillian_username, '')
eq_(user.get_full_name(), u'Anonymous Mozillian')
class ViewsTest(RemoTestCase):
"""Test views."""
def setUp(self):
self.settings_data = {'receive_email_on_add_comment': True}
self.user_edit_settings_url = reverse('edit_settings')
def test_view_main_page(self):
"""Get main page."""
c = Client()
response = c.get(reverse('main'))
eq_(response.status_code, 200)
self.assertJinja2TemplateUsed(response, 'main.jinja')
@override_settings(ENGAGE_ROBOTS=True)
def test_robots_allowed(self):
"""Test robots.txt generation when crawling allowed."""
# Include a user who's not Rep
UserFactory.create(userprofile__display_name='foo', groups=['Mozillian'])
rep = UserFactory.create(groups=['Rep'])
factory = RequestFactory()
request = factory.get('/robots.txt')
response = robots_txt(request)
eq_(response.content,
('User-agent: *\nDisallow: /reports/\n'
'Disallow: /u/{0}/r/\n'.format(rep.userprofile.display_name)))
@override_settings(ENGAGE_ROBOTS=False)
def test_robots_disallowed(self):
"""Test robots.txt generation when crawling disallowed."""
factory = RequestFactory()
request = factory.get('/robots.txt')
response = robots_txt(request)
eq_(response.content, 'User-agent: *\nDisallow: /\n')
def test_view_edit_settings_page(self):
"""Get edit settings page."""
c = Client()
c.login(username='mentor', password='passwd')
response = c.get(self.user_edit_settings_url)
self.assertJinja2TemplateUsed(response, 'settings.jinja')
def test_edit_settings_rep(self):
"""Test correct edit settings mail preferences as rep."""
user = UserFactory.create()
with self.login(user) as client:
response = client.post(self.user_edit_settings_url,
self.settings_data, follow=True)
eq_(response.request['PATH_INFO'], reverse('dashboard'))
# Ensure that settings data were saved
user = User.objects.get(username=user.username)
eq_(user.userprofile.receive_email_on_add_comment,
self.settings_data['receive_email_on_add_comment'])
class TestContribute(RemoTestCase):
def test_base(self):
response = Client().get('/contribute.json')
eq_(response.status_code, 200)
# should be valid JSON
ok_(json.loads(response.streaming_content.next()))
eq_(response['Content-Type'], 'application/json')
class EditUserStatusTests(RemoTestCase):
"""Tests related to the User status edit View."""
@requires_login()
def test_get_as_anonymous(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
display_name = user.userprofile.display_name
UserStatusFactory.create(user=user)
client = Client()
client.get(reverse('edit_availability',
kwargs={'display_name': display_name}))
def test_get_as_owner(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
display_name = user.userprofile.display_name
UserStatusFactory.create(user=user)
url = reverse('edit_availability',
kwargs={'display_name': display_name})
with self.login(user) as client:
response = client.get(url, user=user)
self.assertJinja2TemplateUsed(response, 'edit_availability.jinja')
@requires_permission()
def test_get_as_other_rep(self):
mentor = UserFactory.create()
user = UserFactory.create(userprofile__mentor=mentor)
rep = UserFactory.create()
display_name = user.userprofile.display_name
UserStatusFactory.create(user=user)
url = reverse('edit_availability',
kwargs={'display_name': display_name})
with self.login(rep) as client:
client.get(url, user=rep)
@mock.patch('remo.base.views.messages.success')
@mock.patch('remo.base.views.redirect', wraps=redirect)
@mock.patch('remo.base.views.UserStatusForm')
def test_add_unavailability_status(self, form_mock, redirect_mock,
messages_mock):
form_mock.is_valid.return_value = True
user = UserFactory.create()
display_name = user.userprofile.display_name
with self.login(user) as client:
response = client.post(reverse('edit_availability',
kwargs={'display_name': display_name}),
user=user, follow=True)
eq_(response.status_code, 200)
messages_mock.assert_called_with(
mock.ANY, 'Request submitted successfully.')
redirect_mock.assert_called_with('dashboard')
ok_(form_mock().save.called)
class BaseListViewTest(RemoTestCase):
"""Test generic BaseListView class."""
def test_base_content_activities_list(self):
"""Test list activities."""
admin = UserFactory.create(groups=['Admin'])
with self.login(admin) as client:
response = client.get(reverse('list_activities'), follow=True)
eq_(response.status_code, 200)
eq_(response.context['verbose_name'], 'activity')
eq_(response.context['verbose_name_plural'], 'activities')
eq_(response.context['create_object_url'], reverse('create_activity'))
self.assertJinja2TemplateUsed(response, 'base_content_list.jinja')
def test_base_content_campaigns_list(self):
"""Test list campaigns."""
admin = UserFactory.create(groups=['Admin'])
with self.login(admin) as client:
response = client.get(reverse('list_campaigns'), follow=True)
eq_(response.status_code, 200)
eq_(response.context['verbose_name'], 'initiative')
eq_(response.context['verbose_name_plural'], 'initiatives')
eq_(response.context['create_object_url'], reverse('create_campaign'))
self.assertJinja2TemplateUsed(response, 'base_content_list.jinja')
def test_base_content_functional_areas_list(self):
"""Test list functional areas."""
admin = UserFactory.create(groups=['Admin'])
with self.login(admin) as client:
response = client.get(reverse('list_functional_areas'), follow=True)
eq_(response.status_code, 200)
eq_(response.context['verbose_name'], 'functional area')
eq_(response.context['verbose_name_plural'], 'functional areas')
eq_(response.context['create_object_url'],
reverse('create_functional_area'))
self.assertJinja2TemplateUsed(response, 'base_content_list.jinja')
@requires_permission()
def test_base_content_list_unauthed(self):
"""Test list base content unauthorized."""
user = UserFactory.create(groups=['Rep'])
with self.login(user) as client:
client.get(reverse('list_activities'), follow=True)
class BaseCreateViewTest(RemoTestCase):
"""Test generic BaseCreateView class."""
def test_base_content_activity_create_get(self):
"""Test get create activity."""
admin = UserFactory.create(groups=['Admin'])
with self.login(admin) as client:
response = client.get(reverse('create_activity'), follow=True)
eq_(response.status_code, 200)
eq_(response.context['verbose_name'], 'activity')
eq_(response.context['creating'], True)
self.assertJinja2TemplateUsed(response, 'base_content_edit.jinja')
def test_base_content_campaign_create_get(self):
"""Test get create campaign."""
admin = UserFactory.create(groups=['Admin'])
with self.login(admin) as client:
response = client.get(reverse('create_campaign'), follow=True)
eq_(response.status_code, 200)
eq_(response.context['verbose_name'], 'initiative')
eq_(response.context['creating'], True)
self.assertJinja2TemplateUsed(response, 'base_content_edit.jinja')
def test_base_content_functional_area_create_get(self):
"""Test get create functional area."""
admin = UserFactory.create(groups=['Admin'])
with self.login(admin) as client:
response = client.get(reverse('create_functional_area'), follow=True)
eq_(response.status_code, 200)
eq_(response.context['verbose_name'], 'functional area')
eq_(response.context['creating'], True)
self.assertJinja2TemplateUsed(response, 'base_content_edit.jinja')
def test_base_content_activity_create_post(self):
"""Test post create activity."""
admin = UserFactory.create(groups=['Admin'])
with self.login(admin) as client:
response = client.post(reverse('create_activity'),
data={'name': 'test activity'},
follow=True)
eq_(response.status_code, 200)
query = Activity.objects.filter(name='test activity')
eq_(query.exists(), True)
def test_base_content_campaign_create_post(self):
"""Test post create campaign."""
admin = UserFactory.create(groups=['Admin'])
with self.login(admin) as client:
response = client.post(reverse('create_campaign'),
data={'name': 'test campaign'},
follow=True)
eq_(response.status_code, 200)
query = Campaign.objects.filter(name='test campaign')
eq_(query.exists(), True)
def test_base_content_functional_area_create_post(self):
"""Test post create functional area."""
admin = UserFactory.create(groups=['Admin'])
with self.login(admin) as client:
response = client.post(reverse('create_functional_area'),
data={'name': 'test functional area'},
follow=True)
eq_(response.status_code, 200)
query = FunctionalArea.objects.filter(name='test functional area')
eq_(query.exists(), True)
@requires_permission()
def test_base_content_create_unauthed(self):
"""Test create base content unauthorized."""
user = UserFactory.create(groups=['Rep'])
with self.login(user) as client:
client.post(reverse('create_functional_area'),
data={'name': 'test functional area'},
follow=True)
class BaseUpdateViewTest(RemoTestCase):
"""Test generic BaseUpdateView class."""
def test_base_content_activity_edit_post(self):
"""Test post edit activity."""
admin = UserFactory.create(groups=['Admin'])
activity = ActivityFactory.create(name='test activity')
with self.login(admin) as client:
response = client.post(reverse('edit_activity', kwargs={'pk': activity.id}),
data={'name': 'edit activity'},
follow=True)
eq_(response.status_code, 200)
query = Activity.objects.filter(name='edit activity')
eq_(query.exists(), True)
def test_base_content_campaign_edit_post(self):
"""Test post edit campaign."""
admin = UserFactory.create(groups=['Admin'])
campaign = CampaignFactory.create(name='test campaign')
with self.login(admin) as client:
response = client.post(reverse('edit_campaign', kwargs={'pk': campaign.id}),
data={'name': 'edit campaign'},
follow=True)
eq_(response.status_code, 200)
query = Campaign.objects.filter(name='edit campaign')
eq_(query.exists(), True)
def test_base_content_functional_area_edit_post(self):
"""Test post edit functional area."""
admin = UserFactory.create(groups=['Admin'])
area = FunctionalAreaFactory.create(name='test functional area')
with self.login(admin) as client:
response = client.post(reverse('edit_functional_area',
kwargs={'pk': area.id}),
data={'name': 'edit functional area'},
follow=True)
eq_(response.status_code, 200)
query = FunctionalArea.objects.filter(name='edit functional area')
eq_(query.exists(), True)
@requires_permission()
def test_base_content_update_unauthed(self):
"""Test update base content unauthorized."""
user = UserFactory.create(groups=['Rep'])
campaign = CampaignFactory.create(name='test campaign')
with self.login(user) as client:
client.post(reverse('edit_campaign', kwargs={'pk': campaign.id}),
data={'name': 'edit campaign'},
follow=True)
|
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2010 OpenStack Foundation
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of an S3-like storage server based on local files.
Useful to test features that will eventually run on S3, or if you want to
run something locally that was once running on S3.
We don't support all the features of S3, but it does work with the
standard S3 client for the most basic semantics. To use the standard
S3 client with this module::
c = S3.AWSAuthConnection("", "", server="localhost", port=8888,
is_secure=False)
c.create_bucket("mybucket")
c.put("mybucket", "mykey", "a value")
print c.get("mybucket", "mykey").body
"""
import bisect
import datetime
import os
import os.path
import urllib
from oslo.config import cfg
import routes
import six
import webob
from nova.openstack.common import fileutils
from nova import paths
from nova import utils
from nova import wsgi
s3_opts = [
cfg.StrOpt('buckets_path',
default=paths.state_path_def('buckets'),
help='Path to S3 buckets'),
cfg.StrOpt('s3_listen',
default="0.0.0.0",
help='IP address for S3 API to listen'),
cfg.IntOpt('s3_listen_port',
default=3333,
help='Port for S3 API to listen'),
]
CONF = cfg.CONF
CONF.register_opts(s3_opts)
def get_wsgi_server():
return wsgi.Server("S3 Objectstore",
S3Application(CONF.buckets_path),
port=CONF.s3_listen_port,
host=CONF.s3_listen)
class S3Application(wsgi.Router):
"""Implementation of an S3-like storage server based on local files.
If bucket depth is given, we break files up into multiple directories
to prevent hitting file system limits for number of files in each
directories. 1 means one level of directories, 2 means 2, etc.
"""
def __init__(self, root_directory, bucket_depth=0, mapper=None):
if mapper is None:
mapper = routes.Mapper()
mapper.connect('/',
controller=lambda *a, **kw: RootHandler(self)(*a, **kw))
mapper.connect('/{bucket}/{object_name}',
controller=lambda *a, **kw: ObjectHandler(self)(*a, **kw))
mapper.connect('/{bucket_name}/',
controller=lambda *a, **kw: BucketHandler(self)(*a, **kw))
self.directory = os.path.abspath(root_directory)
fileutils.ensure_tree(self.directory)
self.bucket_depth = bucket_depth
super(S3Application, self).__init__(mapper)
class BaseRequestHandler(object):
"""Base class emulating Tornado's web framework pattern in WSGI.
This is a direct port of Tornado's implementation, so some key decisions
about how the code interacts have already been chosen.
The two most common ways of designing web frameworks can be
classified as async object-oriented and sync functional.
Tornado's is on the OO side because a response is built up in and using
the shared state of an object and one of the object's methods will
eventually trigger the "finishing" of the response asynchronously.
Most WSGI stuff is in the functional side, we pass a request object to
every call down a chain and the eventual return value will be a response.
Part of the function of the routing code in S3Application as well as the
code in BaseRequestHandler's __call__ method is to merge those two styles
together enough that the Tornado code can work without extensive
modifications.
To do that it needs to give the Tornado-style code clean objects that it
can modify the state of for each request that is processed, so we use a
very simple factory lambda to create new state for each request, that's
the stuff in the router, and when we let the Tornado code modify that
object to handle the request, then we return the response it generated.
This wouldn't work the same if Tornado was being more async'y and doing
other callbacks throughout the process, but since Tornado is being
relatively simple here we can be satisfied that the response will be
complete by the end of the get/post method.
"""
def __init__(self, application):
self.application = application
@webob.dec.wsgify
def __call__(self, request):
method = request.method.lower()
f = getattr(self, method, self.invalid)
self.request = request
self.response = webob.Response()
params = request.environ['wsgiorg.routing_args'][1]
del params['controller']
f(**params)
return self.response
def get_argument(self, arg, default):
return self.request.params.get(arg, default)
def set_header(self, header, value):
self.response.headers[header] = value
def set_status(self, status_code):
self.response.status = status_code
def set_404(self):
self.render_xml({"Error": {
"Code": "NoSuchKey",
"Message": "The resource you requested does not exist"
}})
self.set_status(404)
def finish(self, body=''):
self.response.body = utils.utf8(body)
def invalid(self, **kwargs):
pass
def render_xml(self, value):
assert isinstance(value, dict) and len(value) == 1
self.set_header("Content-Type", "application/xml; charset=UTF-8")
name = value.keys()[0]
parts = []
parts.append('<' + utils.utf8(name) +
' xmlns="http://doc.s3.amazonaws.com/2006-03-01">')
self._render_parts(value.values()[0], parts)
parts.append('</' + utils.utf8(name) + '>')
self.finish('<?xml version="1.0" encoding="UTF-8"?>\n' +
''.join(parts))
def _render_parts(self, value, parts=None):
if not parts:
parts = []
if isinstance(value, six.string_types):
parts.append(utils.xhtml_escape(value))
elif isinstance(value, int) or isinstance(value, long):
parts.append(str(value))
elif isinstance(value, datetime.datetime):
parts.append(value.strftime("%Y-%m-%dT%H:%M:%S.000Z"))
elif isinstance(value, dict):
for name, subvalue in value.iteritems():
if not isinstance(subvalue, list):
subvalue = [subvalue]
for subsubvalue in subvalue:
parts.append('<' + utils.utf8(name) + '>')
self._render_parts(subsubvalue, parts)
parts.append('</' + utils.utf8(name) + '>')
else:
raise Exception("Unknown S3 value type %r", value)
def _object_path(self, bucket, object_name):
if self.application.bucket_depth < 1:
return os.path.abspath(os.path.join(
self.application.directory, bucket, object_name))
hash = utils.get_hash_str(object_name)
path = os.path.abspath(os.path.join(
self.application.directory, bucket))
for i in range(self.application.bucket_depth):
path = os.path.join(path, hash[:2 * (i + 1)])
return os.path.join(path, object_name)
class RootHandler(BaseRequestHandler):
def get(self):
names = os.listdir(self.application.directory)
buckets = []
for name in names:
path = os.path.join(self.application.directory, name)
info = os.stat(path)
buckets.append({
"Name": name,
"CreationDate": datetime.datetime.utcfromtimestamp(
info.st_ctime),
})
self.render_xml({"ListAllMyBucketsResult": {
"Buckets": {"Bucket": buckets},
}})
class BucketHandler(BaseRequestHandler):
def get(self, bucket_name):
prefix = self.get_argument("prefix", u"")
marker = self.get_argument("marker", u"")
max_keys = int(self.get_argument("max-keys", 50000))
path = os.path.abspath(os.path.join(self.application.directory,
bucket_name))
terse = int(self.get_argument("terse", 0))
if (not path.startswith(self.application.directory) or
not os.path.isdir(path)):
self.set_404()
return
object_names = []
for root, dirs, files in os.walk(path):
for file_name in files:
object_names.append(os.path.join(root, file_name))
skip = len(path) + 1
for i in range(self.application.bucket_depth):
skip += 2 * (i + 1) + 1
object_names = [n[skip:] for n in object_names]
object_names.sort()
contents = []
start_pos = 0
if marker:
start_pos = bisect.bisect_right(object_names, marker, start_pos)
if prefix:
start_pos = bisect.bisect_left(object_names, prefix, start_pos)
truncated = False
for object_name in object_names[start_pos:]:
if not object_name.startswith(prefix):
break
if len(contents) >= max_keys:
truncated = True
break
object_path = self._object_path(bucket_name, object_name)
c = {"Key": object_name}
if not terse:
info = os.stat(object_path)
c.update({
"LastModified": datetime.datetime.utcfromtimestamp(
info.st_mtime),
"Size": info.st_size,
})
contents.append(c)
marker = object_name
self.render_xml({"ListBucketResult": {
"Name": bucket_name,
"Prefix": prefix,
"Marker": marker,
"MaxKeys": max_keys,
"IsTruncated": truncated,
"Contents": contents,
}})
def put(self, bucket_name):
path = os.path.abspath(os.path.join(
self.application.directory, bucket_name))
if (not path.startswith(self.application.directory) or
os.path.exists(path)):
self.set_status(403)
return
fileutils.ensure_tree(path)
self.finish()
def delete(self, bucket_name):
path = os.path.abspath(os.path.join(
self.application.directory, bucket_name))
if (not path.startswith(self.application.directory) or
not os.path.isdir(path)):
self.set_404()
return
if len(os.listdir(path)) > 0:
self.set_status(403)
return
os.rmdir(path)
self.set_status(204)
self.finish()
class ObjectHandler(BaseRequestHandler):
def get(self, bucket, object_name):
object_name = urllib.unquote(object_name)
path = self._object_path(bucket, object_name)
if (not path.startswith(self.application.directory) or
not os.path.isfile(path)):
self.set_404()
return
info = os.stat(path)
self.set_header("Content-Type", "application/unknown")
self.set_header("Last-Modified", datetime.datetime.utcfromtimestamp(
info.st_mtime))
object_file = open(path, "r")
try:
self.finish(object_file.read())
finally:
object_file.close()
def put(self, bucket, object_name):
object_name = urllib.unquote(object_name)
bucket_dir = os.path.abspath(os.path.join(
self.application.directory, bucket))
if (not bucket_dir.startswith(self.application.directory) or
not os.path.isdir(bucket_dir)):
self.set_404()
return
path = self._object_path(bucket, object_name)
if not path.startswith(bucket_dir) or os.path.isdir(path):
self.set_status(403)
return
directory = os.path.dirname(path)
fileutils.ensure_tree(directory)
object_file = open(path, "w")
object_file.write(self.request.body)
object_file.close()
self.set_header('ETag',
'"%s"' % utils.get_hash_str(self.request.body))
self.finish()
def delete(self, bucket, object_name):
object_name = urllib.unquote(object_name)
path = self._object_path(bucket, object_name)
if (not path.startswith(self.application.directory) or
not os.path.isfile(path)):
self.set_404()
return
os.unlink(path)
self.set_status(204)
self.finish()
|
|
"""Kernelized Incremental Feature Dependency Discovery"""
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from builtins import super
from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import range
from past.utils import old_div
from builtins import object
import numpy as np
from .Representation import Representation
from itertools import combinations
from rlpy.Tools import addNewElementForAllActions, PriorityQueueWithNovelty
import matplotlib.pyplot as plt
__copyright__ = "Copyright 2013, RLPy http://acl.mit.edu/RLPy"
__credits__ = ["Alborz Geramifard", "Robert H. Klein", "Christoph Dann",
"William Dabney", "Jonathan P. How"]
__license__ = "BSD 3-Clause"
__author__ = "Christoph Dann <cdann@mit.edu>"
class KernelizedFeature(object):
# feature index, -1 for non-discovered ones
index = -1
# relevance used to decide when to discover
relevance = 0.
# list of dimensions that are regarded by this feature
dim = []
# center = data point used to generate the feature
# center gives the highest output of this feature
center = None
def __init__(self, center, dim, kernel, index=-1,
base_ids=None, kernel_args=[]):
self.index = index
self.kernel_args = kernel_args
self.center = center
self.dim = dim
self.kernel = kernel
if base_ids is None:
self.base_ids = frozenset([self.index])
else:
self.base_ids = base_ids
def __str__(self):
res = "{" + ", ".join(sorted([str(i) for i in self.base_ids])) + "} "
res += ", ".join(["s{}={:.3g}".format(d + 1, self.center[d])
for d in self.dim])
return res
def output(self, s):
return self.kernel(s, self.center, self.dim, *self.kernel_args)
class Candidate(object):
"""
candidate feature as a combination of two existing features
"""
activation_count = 0.
td_error_sum = 0.
relevance = 0.
idx1 = -1
idx2 = -1
def __init__(self, idx1, idx2):
self.idx1 = idx1
self.idx2 = idx2
class KernelizediFDD(Representation):
"""
Kernelized version of iFDD
"""
features = []
candidates = {}
# contains a set for each feature indicating the ids of
base_id_sets = set()
# 1-dim features it refines
base_feature_ids = []
max_relevance = 0.
def __init__(self, domain, kernel, active_threshold, discover_threshold,
kernel_args=[], normalization=True, sparsify=True,
max_active_base_feat=2, max_base_feat_sim=0.7):
super(KernelizediFDD, self).__init__(domain)
self.kernel = kernel
self.kernel_args = kernel_args
self.active_threshold = active_threshold
self.discover_threshold = discover_threshold
self.normalization = normalization
self.sparsify = sparsify
self.sorted_ids = PriorityQueueWithNovelty()
self.max_active_base_feat = max_active_base_feat
self.max_base_feat_sim = max_base_feat_sim
self.candidates = {}
self.features = []
self.base_features_ids = []
self.max_relevance = 0.
def show_features(self):
l = self.sorted_ids.toList()[:]
key = lambda x: (
len(self.features[x].base_ids),
tuple(self.features[x].dim),
tuple(self.features[x].center[self.features[x].dim]))
l.sort(key=key)
for i in l:
f = self.features[i]
print("{:>5} {:>20}".format(i, f))
def plot_1d_features(self, dimension_idx=None):
"""Creates a plot for each specified dimension of the state space and shows
all 1-dimensional features in this dimension
If no indices are passed, all dimensions are plotted
dimension_idx: either a single dimension index (int) or a list of indices.
"""
idx = dimension_idx
if isinstance(idx, int):
idx = [idx]
elif idx is None:
idx = self.domain.continuous_dims
feat_list = list(range(self.features_num))
key = lambda x: (
len(self.features[x].base_ids),
tuple(self.features[x].dim),
tuple(self.features[x].center[self.features[x].dim]))
feat_list.sort(key=key)
last_i = -1
for k in feat_list:
if len(self.features[k].dim) > 1:
break
cur_i = self.features[k].dim[0]
if cur_i != last_i:
if last_i in idx:
plt.draw()
if cur_i in idx:
xi = np.linspace(
self.domain.statespace_limits[
cur_i,
0],
self.domain.statespace_limits[
cur_i,
1],
200)
x = np.zeros((200, self.domain.statespace_limits.shape[0]))
x[:, cur_i] = xi
plt.figure("Feature Dimension {}".format(cur_i))
if cur_i in idx:
y = [self.features[k].output(xk) for xk in x]
plt.plot(x, y, label="id {}".format(k))
last_i = cur_i
plt.draw()
def plot_2d_features(self, d1=None, d2=None, n_lines=3):
"""
plot contours of all 2-dimensional features covering
dimension d1 and d2. For each feature, n_lines number of lines
are shown.
If no dimensions are specified, the first two continuous dimensions
are shown.
d1, d2: indices of dimensions to show
n_lines: number of countour lines per feature (default: 3)
"""
if d1 is None and d2 is None:
# just take the first two dimensions
idx = self.domain.continuous_dims[:2]
else:
idx = [d1, d2]
idx.sort()
feat_list = list(range(self.features_num))
key = lambda x: (
len(self.features[x].base_ids),
tuple(self.features[x].dim),
tuple(self.features[x].center[self.features[x].dim]))
feat_list.sort(key=key)
last_i = -1
last_j = -1
for k in feat_list:
if len(self.features[k].dim) < 2:
continue
elif len(self.features[k].dim) > 2:
break
cur_i = self.features[k].dim[0]
cur_j = self.features[k].dim[1]
if cur_i != last_i or cur_j != last_j:
if last_i in idx and last_j in idx:
plt.draw()
if cur_i in idx and cur_j in idx:
xi = np.linspace(
self.domain.statespace_limits[
cur_i,
0],
self.domain.statespace_limits[
cur_i,
1],
100)
xj = np.linspace(
self.domain.statespace_limits[
cur_j,
0],
self.domain.statespace_limits[
cur_j,
1],
100)
X, Y = np.meshgrid(xi, xj)
plt.figure(
"Feature Dimensions {} and {}".format(cur_i, cur_j))
if cur_i in idx and cur_j in idx:
Z = np.zeros_like(X)
for m in range(100):
for n in range(100):
x = np.zeros(self.domain.statespace_limits.shape[0])
x[cur_i] = X[m, n]
x[cur_j] = Y[m, n]
Z[m, n] = self.features[k].output(x)
plt.contour(X, Y, Z, n_lines)
last_i = cur_i
last_j = cur_j
plt.draw()
def plot_2d_feature_centers(self, d1=None, d2=None):
"""
plot the centers of all 2-dimensional features covering
dimension d1 and d2.
If no dimensions are specified, the first two continuous dimensions
are shown.
d1, d2: indices of dimensions to show
"""
if d1 is None and d2 is None:
# just take the first two dimensions
idx = self.domain.continuous_dims[:2]
else:
idx = [d1, d2]
idx.sort()
feat_list = list(range(self.features_num))
key = lambda x: (
len(self.features[x].base_ids),
tuple(self.features[x].dim),
tuple(self.features[x].center[self.features[x].dim]))
feat_list.sort(key=key)
last_i = -1
last_j = -1
for k in feat_list:
if len(self.features[k].dim) < 2:
continue
elif len(self.features[k].dim) > 2:
break
cur_i = self.features[k].dim[0]
cur_j = self.features[k].dim[1]
if cur_i != last_i or cur_j != last_j:
if last_i in idx and last_j in idx:
plt.draw()
if cur_i in idx and cur_j in idx:
plt.figure(
"Feature Dimensions {} and {}".format(cur_i, cur_j))
if cur_i in idx and cur_j in idx:
plt.plot(
[self.features[k].center[cur_i]],
[self.features[k].center[cur_j]],
"r",
marker="x")
last_i = cur_i
last_j = cur_j
plt.draw()
def phi_nonTerminal(self, s):
out = np.zeros(self.features_num)
if not self.sparsify:
for i in range(self.features_num):
out[i] = self.features[i].output(s)
else:
# get all base feature values and check if they are activated
active_bases = set([])
for i in self.sorted_ids.toList()[::-1]:
if len(self.features[i].base_ids) > 1:
break
if self.features[i].output(s) >= self.active_threshold:
active_bases.add(i)
base_vals = {k: 1. for k in active_bases}
# iterate over the remaining compound features
for i in self.sorted_ids.toList():
if active_bases.issuperset(self.features[i].base_ids):
if self.sparsify > 1:
out[i] = self.features[i].output(s)
if self.sparsify > 2 or out[i] >= self.active_threshold:
active_bases -= self.features[i].base_ids
else:
u = 0
for k in self.features[i].base_ids:
u = max(u, base_vals[k])
out[i] = self.features[i].output(s) * u
for k in self.features[i].base_ids:
base_vals[k] -= out[i]
if base_vals[k] < 0:
active_bases.remove(k)
if self.normalization:
summ = out.sum()
if summ != 0:
out /= out.sum()
return out
def phi_raw(self, s, terminal):
assert(terminal is False)
out = np.zeros(self.features_num)
for i in range(self.features_num):
out[i] = self.features[i].output(s)
return out
#@profile
def post_discover(self, s, terminal, a, td_error, phi_s=None):
if phi_s is None:
phi_s = self.phi(s, terminal)
phi_s_unnorm = self.phi_raw(s, terminal)
discovered = 0
Q = self.Qs(s, terminal, phi_s=phi_s).reshape(-1, 1)
# indices of active features
active_indices = list(
np.where(phi_s_unnorm > self.active_threshold)[0])
# "active indices", active_indices
# gather all dimensions regarded by active features
active_dimensions = np.zeros((len(s)), dtype="int")
closest_neighbor = np.zeros((len(s)))
for i in active_indices:
for j in self.features[i].dim:
active_dimensions[j] += 1
closest_neighbor[j] = max(closest_neighbor[j], phi_s_unnorm[i])
# add new base features for all dimension not regarded
for j in range(len(s)):
if active_dimensions[j] < self.max_active_base_feat and (closest_neighbor[j] < self.max_base_feat_sim or active_dimensions[j] < 1):
active_indices.append(self.add_base_feature(s, j, Q=Q))
discovered += 1
# update relevance statistics of all feature candidates
if discovered:
phi_s = self.phi(s, terminal)
la = len(active_indices)
if la * (la - 1) < len(self.candidates):
for ind, cand in list(self.candidates.items()):
g, h = ind
rel = self.update_relevance_stat(
cand,
g,
h,
td_error,
s,
a,
phi_s)
self.max_relevance = max(rel, self.max_relevance)
# add if relevance is high enough
if rel > self.discover_threshold:
self.add_refined_feature(g, h, Q=Q)
discovered += 1
else:
# the result of both branches can be very different as this one
# updates only combinations which are considered active.
for g, h in combinations(active_indices, 2):
# note: g, h are ordered as active_indices are ordered
cand = self.candidates.get((g, h))
if cand is None:
continue
rel = self.update_relevance_stat(
cand,
g,
h,
td_error,
s,
a,
phi_s)
self.max_relevance = max(rel, self.max_relevance)
# add if relevance is high enough
if rel > self.discover_threshold:
self.add_refined_feature(g, h, Q=Q)
discovered += 1
if discovered:
self.max_relevance = 0.
return discovered
def update_relevance_stat(
self, candidate, index1, index2, td_error, s, a, phi_s):
"""
make sure that inputs are ordered, i.e.,index1 <= index2!
returns the relevance of a potential feature combination
"""
candidate.td_error_sum += phi_s[index1] * phi_s[index2] * td_error
candidate.activation_count += phi_s[index1] ** 2 * phi_s[index2] ** 2
if candidate.activation_count == 0.:
return 0.
rel = old_div(np.abs(candidate.td_error_sum), \
np.sqrt(candidate.activation_count))
return rel
def add_base_feature(self, center, dim, Q):
"""
adds a new 1-dimensional feature and returns its index
"""
new_f = KernelizedFeature(
center=center, dim=[dim], kernel_args=self.kernel_args,
kernel=self.kernel, index=self.features_num)
self.features.append(new_f)
self.base_id_sets.add(new_f.base_ids)
self.sorted_ids.push(-1, self.features_num)
self.logger.debug(
"Added Feature {} {}".format(
self.features_num,
new_f))
# add combinations with all existing features as candidates
new_cand = {(f, self.features_num): Candidate(f, self.features_num)
for f in range(self.features_num) if dim not in self.features[f].dim}
self.candidates.update(new_cand)
for f, _ in list(new_cand.keys()):
self.base_id_sets.add(new_f.base_ids | self.features[f].base_ids)
self.features_num += 1
# add parameter dimension
if self.normalization:
self.weight_vec = addNewElementForAllActions(
self.weight_vec,
self.domain.actions_num,
Q)
else:
self.weight_vec = addNewElementForAllActions(
self.weight_vec,
self.domain.actions_num)
return self.features_num - 1
def add_refined_feature(self, index1, index2, Q):
"""
adds the combination of 2 existing features to the representation
"""
f1 = self.features[index1]
f2 = self.features[index2]
new_center = np.zeros_like(f1.center)
cnt = np.zeros_like(f1.center)
cnt[f1.dim] += 1
cnt[f2.dim] += 1
cnt[cnt == 0] = 1.
new_center[f1.dim] += f1.center[f1.dim]
new_center[f2.dim] += f2.center[f2.dim]
new_center /= cnt
new_dim = list(frozenset(f1.dim) | frozenset(f2.dim))
new_base_ids = f1.base_ids | f2.base_ids
new_dim.sort()
new_f = KernelizedFeature(center=new_center, dim=new_dim,
kernel_args=self.kernel_args,
kernel=self.kernel, index=self.features_num,
base_ids=new_base_ids)
self.features.append(new_f)
# Priority is the negative number of base ids
self.sorted_ids.push(-len(new_f.base_ids), self.features_num)
#assert(len(self.sorted_ids.toList()) == self.features_num + 1)
self.base_id_sets.add(new_f.base_ids)
del self.candidates[(index1, index2)]
# add new candidates
new_cand = {(f, self.features_num): Candidate(f, self.features_num) for f in range(self.features_num)
if (self.features[f].base_ids | new_base_ids) not in self.base_id_sets
and len(frozenset(self.features[f].dim) & frozenset(new_dim)) == 0}
for c, _ in list(new_cand.keys()):
self.base_id_sets.add(new_base_ids | self.features[c].base_ids)
self.candidates.update(new_cand)
self.logger.debug(
"Added refined feature {} {}".format(
self.features_num,
new_f))
self.logger.debug("{} candidates".format(len(self.candidates)))
self.features_num += 1
if self.normalization:
self.weight_vec = addNewElementForAllActions(
self.weight_vec,
self.domain.actions_num,
Q)
else:
self.weight_vec = addNewElementForAllActions(
self.weight_vec,
self.domain.actions_num)
return self.features_num - 1
try:
from .kernels import *
except ImportError:
print("C-Extension for kernels not available, expect slow runtime")
from .slow_kernels import *
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 VMware, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from eventlet import greenthread
import netaddr
from oslo.config import cfg
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.common import constants as const
from neutron.common import exceptions as ntn_exc
from neutron.common import rpc as n_rpc
from neutron.db import agents_db
from neutron.db import db_base_plugin_v2
from neutron.db import dhcp_rpc_base
from neutron.db import l3_db
from neutron.db import models_v2
from neutron.openstack.common import log as logging
from neutron.plugins.vmware.api_client import exception as api_exc
from neutron.plugins.vmware.common import config
from neutron.plugins.vmware.common import exceptions as nsx_exc
LOG = logging.getLogger(__name__)
METADATA_DEFAULT_PREFIX = 30
METADATA_SUBNET_CIDR = '169.254.169.252/%d' % METADATA_DEFAULT_PREFIX
METADATA_GATEWAY_IP = '169.254.169.253'
METADATA_DHCP_ROUTE = '169.254.169.254/32'
class NSXRpcCallbacks(dhcp_rpc_base.DhcpRpcCallbackMixin):
RPC_API_VERSION = '1.1'
def create_rpc_dispatcher(self):
'''Get the rpc dispatcher for this manager.
If a manager would like to set an rpc API version, or support more than
one class as the target of rpc messages, override this method.
'''
return n_rpc.PluginRpcDispatcher([self,
agents_db.AgentExtRpcCallback()])
def handle_network_dhcp_access(plugin, context, network, action):
pass
def handle_port_dhcp_access(plugin, context, port_data, action):
active_port = (cfg.CONF.NSX.metadata_mode == config.MetadataModes.INDIRECT
and port_data.get('device_owner') == const.DEVICE_OWNER_DHCP
and port_data.get('fixed_ips', []))
if active_port:
subnet_id = port_data['fixed_ips'][0]['subnet_id']
subnet = plugin.get_subnet(context, subnet_id)
_notify_rpc_agent(context, {'subnet': subnet}, 'subnet.update.end')
def handle_port_metadata_access(plugin, context, port, is_delete=False):
if (cfg.CONF.NSX.metadata_mode == config.MetadataModes.INDIRECT and
port.get('device_owner') == const.DEVICE_OWNER_DHCP):
if port.get('fixed_ips', []) or is_delete:
fixed_ip = port['fixed_ips'][0]
query = context.session.query(models_v2.Subnet)
subnet = query.filter(
models_v2.Subnet.id == fixed_ip['subnet_id']).one()
# If subnet does not have a gateway do not create metadata
# route. This is done via the enable_isolated_metadata
# option if desired.
if not subnet.get('gateway_ip'):
LOG.info(_('Subnet %s does not have a gateway, the metadata '
'route will not be created'), subnet['id'])
return
metadata_routes = [r for r in subnet.routes
if r['destination'] == METADATA_DHCP_ROUTE]
if metadata_routes:
# We should have only a single metadata route at any time
# because the route logic forbids two routes with the same
# destination. Update next hop with the provided IP address
if not is_delete:
metadata_routes[0].nexthop = fixed_ip['ip_address']
else:
context.session.delete(metadata_routes[0])
else:
# add the metadata route
route = models_v2.SubnetRoute(
subnet_id=subnet.id,
destination=METADATA_DHCP_ROUTE,
nexthop=fixed_ip['ip_address'])
context.session.add(route)
def handle_router_metadata_access(plugin, context, router_id, interface=None):
if cfg.CONF.NSX.metadata_mode != config.MetadataModes.DIRECT:
LOG.debug(_("Metadata access network is disabled"))
return
if not cfg.CONF.allow_overlapping_ips:
LOG.warn(_("Overlapping IPs must be enabled in order to setup "
"the metadata access network"))
return
ctx_elevated = context.elevated()
device_filter = {'device_id': [router_id],
'device_owner': [l3_db.DEVICE_OWNER_ROUTER_INTF]}
# Retrieve ports calling database plugin
ports = db_base_plugin_v2.NeutronDbPluginV2.get_ports(
plugin, ctx_elevated, filters=device_filter)
try:
if ports:
if (interface and
not _find_metadata_port(plugin, ctx_elevated, ports)):
_create_metadata_access_network(
plugin, ctx_elevated, router_id)
elif len(ports) == 1:
# The only port left might be the metadata port
_destroy_metadata_access_network(
plugin, ctx_elevated, router_id, ports)
else:
LOG.debug(_("No router interface found for router '%s'. "
"No metadata access network should be "
"created or destroyed"), router_id)
# TODO(salvatore-orlando): A better exception handling in the
# NSX plugin would allow us to improve error handling here
except (ntn_exc.NeutronException, nsx_exc.NsxPluginException,
api_exc.NsxApiException):
# Any exception here should be regarded as non-fatal
LOG.exception(_("An error occurred while operating on the "
"metadata access network for router:'%s'"),
router_id)
def _find_metadata_port(plugin, context, ports):
for port in ports:
for fixed_ip in port['fixed_ips']:
cidr = netaddr.IPNetwork(
plugin.get_subnet(context, fixed_ip['subnet_id'])['cidr'])
if cidr in netaddr.IPNetwork(METADATA_SUBNET_CIDR):
return port
def _create_metadata_access_network(plugin, context, router_id):
# Add network
# Network name is likely to be truncated on NSX
net_data = {'name': 'meta-%s' % router_id,
'tenant_id': '', # intentionally not set
'admin_state_up': True,
'port_security_enabled': False,
'shared': False,
'status': const.NET_STATUS_ACTIVE}
meta_net = plugin.create_network(context,
{'network': net_data})
greenthread.sleep(0) # yield
# From this point on there will be resources to garbage-collect
# in case of failures
meta_sub = None
try:
# Add subnet
subnet_data = {'network_id': meta_net['id'],
'tenant_id': '', # intentionally not set
'name': 'meta-%s' % router_id,
'ip_version': 4,
'shared': False,
'cidr': METADATA_SUBNET_CIDR,
'enable_dhcp': True,
# Ensure default allocation pool is generated
'allocation_pools': attributes.ATTR_NOT_SPECIFIED,
'gateway_ip': METADATA_GATEWAY_IP,
'dns_nameservers': [],
'host_routes': []}
meta_sub = plugin.create_subnet(context,
{'subnet': subnet_data})
greenthread.sleep(0) # yield
plugin.add_router_interface(context, router_id,
{'subnet_id': meta_sub['id']})
greenthread.sleep(0) # yield
except (ntn_exc.NeutronException,
nsx_exc.NsxPluginException,
api_exc.NsxApiException):
# It is not necessary to explicitly delete the subnet
# as it will be removed with the network
plugin.delete_network(context, meta_net['id'])
# Tell to start the metadata agent proxy
_notify_rpc_agent(context, {'network': meta_net}, 'network.create.end')
def _destroy_metadata_access_network(plugin, context, router_id, ports):
if not ports:
return
meta_port = _find_metadata_port(plugin, context, ports)
if not meta_port:
return
meta_net_id = meta_port['network_id']
meta_sub_id = meta_port['fixed_ips'][0]['subnet_id']
plugin.remove_router_interface(
context, router_id, {'port_id': meta_port['id']})
greenthread.sleep(0) # yield
context.session.expunge_all()
try:
# Remove network (this will remove the subnet too)
plugin.delete_network(context, meta_net_id)
greenthread.sleep(0) # yield
except (ntn_exc.NeutronException, nsx_exc.NsxPluginException,
api_exc.NsxApiException):
# must re-add the router interface
plugin.add_router_interface(context, router_id,
{'subnet_id': meta_sub_id})
# Tell to stop the metadata agent proxy
_notify_rpc_agent(
context, {'network': {'id': meta_net_id}}, 'network.delete.end')
def _notify_rpc_agent(context, payload, event):
if cfg.CONF.dhcp_agent_notification:
dhcp_notifier = dhcp_rpc_agent_api.DhcpAgentNotifyAPI()
dhcp_notifier.notify(context, payload, event)
|
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.forms import ValidationError # noqa
from django import http
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_variables # noqa
from horizon_lib import exceptions
from horizon_lib import forms
from horizon_lib import messages
from horizon_lib.utils import validators
from openstack_horizon import api
LOG = logging.getLogger(__name__)
PROJECT_REQUIRED = api.keystone.VERSIONS.active < 3
class BaseUserForm(forms.SelfHandlingForm):
def __init__(self, request, *args, **kwargs):
super(BaseUserForm, self).__init__(request, *args, **kwargs)
# Populate project choices
project_choices = []
# If the user is already set (update action), list only projects which
# the user has access to.
user_id = kwargs['initial'].get('id', None)
domain_id = kwargs['initial'].get('domain_id', None)
projects, has_more = api.keystone.tenant_list(request,
domain=domain_id,
user=user_id)
for project in projects:
if project.enabled:
project_choices.append((project.id, project.name))
if not project_choices:
project_choices.insert(0, ('', _("No available projects")))
elif len(project_choices) > 1:
project_choices.insert(0, ('', _("Select a project")))
self.fields['project'].choices = project_choices
def clean(self):
'''Check to make sure password fields match.'''
data = super(forms.Form, self).clean()
if 'password' in data:
if data['password'] != data.get('confirm_password', None):
raise ValidationError(_('Passwords do not match.'))
return data
ADD_PROJECT_URL = "horizon:identity:projects:create"
class CreateUserForm(BaseUserForm):
# Hide the domain_id and domain_name by default
domain_id = forms.CharField(label=_("Domain ID"),
required=False,
widget=forms.HiddenInput())
domain_name = forms.CharField(label=_("Domain Name"),
required=False,
widget=forms.HiddenInput())
name = forms.CharField(max_length=255, label=_("User Name"))
email = forms.EmailField(
label=_("Email"),
required=False)
password = forms.RegexField(
label=_("Password"),
widget=forms.PasswordInput(render_value=False),
regex=validators.password_validator(),
error_messages={'invalid': validators.password_validator_msg()})
confirm_password = forms.CharField(
label=_("Confirm Password"),
widget=forms.PasswordInput(render_value=False))
project = forms.DynamicChoiceField(label=_("Primary Project"),
required=PROJECT_REQUIRED,
add_item_link=ADD_PROJECT_URL)
role_id = forms.ChoiceField(label=_("Role"),
required=PROJECT_REQUIRED)
no_autocomplete = True
def __init__(self, *args, **kwargs):
roles = kwargs.pop('roles')
super(CreateUserForm, self).__init__(*args, **kwargs)
role_choices = [(role.id, role.name) for role in roles]
self.fields['role_id'].choices = role_choices
# For keystone V3, display the two fields in read-only
if api.keystone.VERSIONS.active >= 3:
readonlyInput = forms.TextInput(attrs={'readonly': 'readonly'})
self.fields["domain_id"].widget = readonlyInput
self.fields["domain_name"].widget = readonlyInput
# We have to protect the entire "data" dict because it contains the
# password and confirm_password strings.
@sensitive_variables('data')
def handle(self, request, data):
domain = api.keystone.get_default_domain(self.request)
try:
LOG.info('Creating user with name "%s"' % data['name'])
if "email" in data:
data['email'] = data['email'] or None
new_user = api.keystone.user_create(request,
name=data['name'],
email=data['email'],
password=data['password'],
project=data['project'],
enabled=True,
domain=domain.id)
messages.success(request,
_('User "%s" was successfully created.')
% data['name'])
if data['project'] and data['role_id']:
roles = api.keystone.roles_for_user(request,
new_user.id,
data['project']) or []
assigned = [role for role in roles if role.id == str(
data['role_id'])]
if not assigned:
try:
api.keystone.add_tenant_user_role(request,
data['project'],
new_user.id,
data['role_id'])
except Exception:
exceptions.handle(request,
_('Unable to add user '
'to primary project.'))
return new_user
except Exception:
exceptions.handle(request, _('Unable to create user.'))
class UpdateUserForm(BaseUserForm):
# Hide the domain_id and domain_name by default
domain_id = forms.CharField(label=_("Domain ID"),
required=False,
widget=forms.HiddenInput())
domain_name = forms.CharField(label=_("Domain Name"),
required=False,
widget=forms.HiddenInput())
id = forms.CharField(label=_("ID"), widget=forms.HiddenInput)
name = forms.CharField(max_length=255, label=_("User Name"))
email = forms.EmailField(
label=_("Email"),
required=False)
password = forms.RegexField(
label=_("Password"),
widget=forms.PasswordInput(render_value=False),
regex=validators.password_validator(),
required=False,
error_messages={'invalid': validators.password_validator_msg()})
confirm_password = forms.CharField(
label=_("Confirm Password"),
widget=forms.PasswordInput(render_value=False),
required=False)
project = forms.ChoiceField(label=_("Primary Project"),
required=PROJECT_REQUIRED)
no_autocomplete = True
def __init__(self, request, *args, **kwargs):
super(UpdateUserForm, self).__init__(request, *args, **kwargs)
if api.keystone.keystone_can_edit_user() is False:
for field in ('name', 'email', 'password', 'confirm_password'):
self.fields.pop(field)
# For keystone V3, display the two fields in read-only
if api.keystone.VERSIONS.active >= 3:
readonlyInput = forms.TextInput(attrs={'readonly': 'readonly'})
self.fields["domain_id"].widget = readonlyInput
self.fields["domain_name"].widget = readonlyInput
# We have to protect the entire "data" dict because it contains the
# password and confirm_password strings.
@sensitive_variables('data', 'password')
def handle(self, request, data):
user = data.pop('id')
# Throw away the password confirmation, we're done with it.
data.pop('confirm_password', None)
data.pop('domain_id')
data.pop('domain_name')
try:
if "email" in data:
data['email'] = data['email'] or None
response = api.keystone.user_update(request, user, **data)
messages.success(request,
_('User has been updated successfully.'))
except Exception:
response = exceptions.handle(request, ignore=True)
messages.error(request, _('Unable to update the user.'))
if isinstance(response, http.HttpResponse):
return response
else:
return True
|
|
import os
import sys
import re
import platform
import time
import requests
def strike(string):
"""
Make a string strikethrough but assure that it can be printed.
Args:
string (str): string to apply strikethrough to.
"""
if platform.system() != "Windows":
return '\u0336'.join(string) + '\u0336'
else:
return "X " + string
def safe_get(url):
headers = requests.utils.default_headers()
headers.update = {
"User-Agent": "campdown/1.49 (+https://github.com/catlinman/campdown)",
"Accept-Encoding": ", ".join(("gzip", "deflate")),
"Accept": "*/*",
"Connection": "keep-alive",
}
# Make a request to the track URL.
r = requests.get(url, headers=headers)
return r
def safe_print(string):
"""
Print to the console while avoiding encoding errors
Args:
string (str): string to print to the console without encoding errors.
"""
try:
print(string)
except UnicodeEncodeError:
try:
print(string.encode(
sys.stdout.encoding, errors="replace").decode())
except UnicodeDecodeError:
print(string.encode(
sys.stdout.encoding, errors="replace"))
def safe_filename(string):
"""
Convert a string into one without illegal characters for the given filesystem.
Args:
string (str): the path to remove illegal characters from.
Returns:
new path string without illegal characters.
"""
string = string.replace('/', '&').replace('\\', '')
if platform.system() == "Windows":
string = re.sub('[":*?<>|]', "", string)
return string
def string_between(string, start, end):
"""
Returns a new string between the start and end range.
Args:
string (str): the string to split.
start (str): string to start the split at.
end (str): string to stop the split at.
Returns:
new string between start and end.
"""
try:
return str(string).split(str(start), 1)[1].split(str(end))[0]
except IndexError:
return ""
def format_information(title, artist, album="", index=0):
"""
Takes in track information and returns everything as a formatted String.
Args:
title (str): track title string
artist (str): track artist string
album (str): optional track album string
index (str): optional track number string
Returns:
A formatted string of all track information.
"""
if " - " in title:
split_title = str(title).split(" - ", 1)
if album:
if index:
return "{} - {} - {} {}".format(split_title[0], album, index, split_title[1])
else:
return "{} - {} - {}".format(split_title[0], album, split_title[1])
else:
if index:
return "{} - {} {}".format(split_title[0], index, split_title[1])
else:
return "{} - {}".format(split_title[0], split_title[1])
else:
if album:
if index:
return "{} - {} - {} {}".format(artist, album, index, title)
else:
return "{} - {} - {}".format(artist, album, title)
else:
if index:
return "{} - {} {}".format(artist, index, title)
else:
return "{} - {}".format(artist, title)
def short_information(title, index=0):
"""
Takes in track information and returns everything as a short formatted String.
Args:
title (str): track title string
index (str): optional track number string
Returns:
A short formatted string of all track information.
"""
if " - " in title:
split_title = str(title).split(" - ", 1)
if index:
return "{} {}".format(index, split_title[1])
else:
return "{}".format(split_title[1])
else:
if index:
return "{} {}".format(index, title)
else:
return title
def valid_url(url):
"""
Validate a URL and make sure that it has the correct URL syntax.
Args:
url (str): URL string to be evaluated.
Returns:
True if the URL is valid. False if it is invalid.
"""
if "http://" not in url and "https://" not in url:
return False
return True
def page_type(content):
"""
Evaluate the request content and identify the type of the page.
Args:
content (str): page content to analyse.
Returns:
"album" if a track list was detected.
"discography" if a set of albums and tracks was found.
"track" if the above do not apply but a Bandcamp page was still identified.
"none" if the supplied page is not a Bandcamp page.
"""
if "bandcamp.com" in content:
if "Digital Album" and "track_list" in content:
return "album"
elif 'id="discography"' not in content:
return "discography"
else:
return "track"
else:
return "none"
def find_string_indices(content, search):
"""
Generate a list that contains all the indices of a string occurrences in another.
Args:
content (str): string to search within.
search (str): string to find occurrences of.
Returns:
List containing all found indicies after the search string.
"""
return [ # Use list comprehension for syntactic diabetes.
# Add the length of the search to the index like before.
i for i in range(len(content))
if content.startswith(search, i)
]
def calculate_confidence(inspected, expected, percentage):
"""
Generate a confidence value possibly confirming or denying data parity.
A positive return value indicates the amount of bytes past the confidence
interval. A negative value indicates the missing amount required to pass.
Args:
inspected (number): value to calculate the percentage of compared to the expected value.
expected (number): value that should be confirmed against.
percentage (number): percentage amount that can be ignored and is still confident.
Returns:
Calculated confidence total value.
"""
# Debug lines to help us possibly identify size differential issues.
# print(
# "\nInspected size: {} | Expected size: {} | Confidence margin percentage: {:.2%} | Confidence: {}".format(
# inspected
# expected,
# percentage,
# int(-(expected - (inspected + (expected * percentage))))
# )
# )
return -(expected - (inspected + (expected * percentage)))
def download_file(url, output, name, force=False, verbose=False, silent=False, sleep=30, timeout=3, max_retries=2):
"""
Downloads and saves a file from the supplied URL and prints progress
to the console. Can use ranged requests to make downloads from Bandcamp faster
in some cases. Returns 0 if the download failed, 1 if the download was successful
and 2 if the download file was already found and has the same file size.
Args:
url (str): URL to make the request to.
output (str): absolute folder path to write to.
name (str): filename with extension to write the content to.
force (bool): ignores checking if the file already exists.
verbose (bool): prints status messages as well as download progress.
silent (bool): if error messages should be ignored and not printed.
sleep (number): Seconds to sleep between failed requests.
timeout (number): The maximum time before a request is timed out.
max_retries (number): The amount of request retries that should be attempted.
Returns:
0 if there was an error in this function
1 if the download and write is successful
2 if the file already exists
r.status_code if a connection error occurred
"""
if verbose:
safe_print("\nDownloading: {}".format(name))
# Status variables.
success = False
retries = 0
headers = requests.utils.default_headers()
headers.update = {
"User-Agent": "campdown/1.49 (+https://github.com/catlinman/campdown)",
"Accept-Encoding": ", ".join(("gzip", "deflate")),
"Accept": "*/*",
"Connection": "keep-alive",
}
# Initilize our response variable.
response = None
# Make a ranged request which will be used to stream data from.
while not response and retries < max_retries:
try:
response = requests.get(url, headers=headers, stream=True, timeout=timeout)
except(requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, requests.exceptions.ConnectionError):
# Print a status message for this sort of timeout error.
print("503 Service Unavailable. Attempting {} of {} retries.".format(retries + 1, max_retries))
print("Waiting for {} seconds ...".format(sleep))
# Sleep for a large amount of time.
time.sleep(sleep)
retries += 1
# Verify that our response data exists and has a valid status code.
if response and response.status_code != 200:
if not silent:
print("Request error {}".format(response.status_code))
return response.status_code
# Get the total length of our remote content. Used for verification and progress calculation.
remote_length = response.headers.get('content-length')
# Fail out if we can't get the data length.
if remote_length is None:
if not silent:
print("Request does not contain an entry for the content length.")
return 0
# Convert our raw length to an integer value for further processing.
remote_length = int(remote_length)
if not force and os.path.isfile(os.path.join(output, name)):
# If we have less data than our confidence percentage we re-download our file.
if calculate_confidence(os.path.getsize(os.path.join(output, name)), remote_length, 0.01) < 0:
if verbose:
print("File already found but the file size does not match up. Re-downloading.")
else:
if verbose:
print("File already found. Skipping download.")
return 2
# Reset retries for the new process of iterating content.
retries = 0
while not success and retries < max_retries:
# Open a file stream which will be used to save the output string
with open(os.path.join(output, safe_filename(name)), "wb") as f:
# Storage variables used while evaluating the already downloaded data.
dl = 0
cleaned_length = int((remote_length * 100) / pow(1024, 2)) / 100
block_size = 2048
try:
for chunk in response.iter_content(chunk_size=block_size):
# Add the length of the chunk to the download size and
# write the chunk to the file.
dl += len(chunk)
f.write(chunk)
if verbose:
# Calculate the the download completion percentage.
done = int(50 * dl / remote_length)
# Display a bar based on the current download progress.
sys.stdout.write(
"\r[{}{}{}] {}MB / {}MB ".format(
"=" * done,
">",
" " * (50 - done),
(int(((dl) * 100) / pow(1024, 2)) / 100),
cleaned_length
)
)
# Flush the output buffer so we can overwrite the same line.
sys.stdout.flush()
f.flush()
# Verify our download size for completion. Since the file sizes will
# not entirely match up because of possible ID3 tag differences or
# additional headers, pass a margin/percentage confidence check instead.
if calculate_confidence(os.path.getsize(f.name), remote_length, 0.01) < 0:
# Print a newline to skip the buffer flush.
print("")
# Print a status message to inform the user of incomplete data.
print("The download didn't complete. Attempting {} of {} retries.".format(retries + 1, max_retries))
print("Waiting for {} seconds ...".format(sleep))
# Sleep for a large amount of time.
time.sleep(sleep)
retries += 1
else:
# Request and download was successful.
success = True
except(requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, requests.exceptions.ConnectionError, requests.exceptions.StreamConsumedError):
# Print a newline to skip the buffer flush.
print("")
# Print a status message for this sort of timeout error.
print("503 Service Unavailable. Attempting {} of {} retries.".format(retries + 1, max_retries))
print("Waiting for {} seconds ...".format(sleep))
# Sleep for a large amount of time.
time.sleep(sleep)
retries += 1
if success:
if verbose:
# Print a newline to skip the buffer flush.
print("")
return 1
else:
if verbose:
# Print a newline to skip the buffer flush.
print("")
print("Connection timed out or interrupted.")
# Remove the possibly partial file and return the correct error code.
os.remove(os.path.join(output, safe_filename(name)))
return 0
|
|
"""
Tests of the permissions on specific models in the auth app. For tests of the permissions system itself, see test_permission_classes.py
"""
from __future__ import absolute_import, print_function, unicode_literals
from django.test import TestCase
from .helpers import create_dummy_facility_data
from ..constants import role_kinds
from ..errors import InvalidHierarchyRelationsArgument
from ..filters import HierarchyRelationsFilter
from ..models import DeviceOwner, Facility, FacilityDataset, Classroom, LearnerGroup, Role, Membership, FacilityUser, KolibriAnonymousUser
class ImproperUsageIsProperlyHandledTestCase(TestCase):
"""
Tests that error cases and misuse of the interface are properly caught.
"""
def setUp(self):
self.data1 = create_dummy_facility_data()
self.data2 = create_dummy_facility_data()
self.device_owner = DeviceOwner.objects.create(username="boss")
self.anon_user = KolibriAnonymousUser()
def test_that_checking_creation_perms_on_invalid_model_returns_false(self):
# cannot create a LearnerGroup with invalid attribute name
self.assertFalse(self.data1["facility_admin"].can_create(LearnerGroup, {"bad_attr_name": 77, "parent": self.data1["facility"]}))
# cannot create a LearnerGroup with missing attribute value ("name")
self.assertFalse(self.data1["facility_admin"].can_create(LearnerGroup, {"parent": self.data1["facility"]}))
def test_that_getting_roles_for_noncollection_fails(self):
with self.assertRaises(ValueError):
self.data1["facility_admin"].get_roles_for(object())
with self.assertRaises(ValueError):
self.data1["facility_admin"].has_role_for([role_kinds.ADMIN], object())
def test_that_getting_roles_for_deviceowner_returns_false(self):
self.assertFalse(self.data1["facility_admin"].has_role_for_user([role_kinds.ADMIN], self.device_owner))
def test_that_getting_roles_for_anonuser_returns_false(self):
self.assertFalse(self.data1["facility_admin"].has_role_for_user([role_kinds.ADMIN], self.anon_user))
def test_that_getting_roles_for_user_in_other_facility_returns_false(self):
self.assertFalse(self.data1["facility_admin"].has_role_for_user([role_kinds.ADMIN], self.data2["learners_one_group"][0][0]))
def test_that_invalid_references_to_hierarchyrelationsfilter_throw_errors(self):
with self.assertRaises(InvalidHierarchyRelationsArgument):
HierarchyRelationsFilter(Facility).filter_by_hierarchy(target_user=object())
with self.assertRaises(InvalidHierarchyRelationsArgument):
HierarchyRelationsFilter(Facility).filter_by_hierarchy(target_user=["test"])
class FacilityDatasetPermissionsTestCase(TestCase):
"""
Tests of permissions for reading/modifying FacilityData instances
"""
def setUp(self):
self.data1 = create_dummy_facility_data()
self.data2 = create_dummy_facility_data()
self.device_owner = DeviceOwner.objects.create(username="boss")
self.anon_user = KolibriAnonymousUser()
def test_facility_users_and_anon_users_cannot_create_facility_dataset(self):
""" FacilityUsers can't create new Facilities, regardless of their roles """
new_facility_dataset = {}
self.assertFalse(self.data1["facility_admin"].can_create(FacilityDataset, new_facility_dataset))
self.assertFalse(self.data1["classroom_coaches"][0].can_create(FacilityDataset, new_facility_dataset))
self.assertFalse(self.data1["learners_one_group"][0][0].can_create(FacilityDataset, new_facility_dataset))
self.assertFalse(self.data1["unattached_users"][0].can_create(FacilityDataset, new_facility_dataset))
def test_facility_users_can_read_own_facility_dataset(self):
""" FacilityUsers can read own FacilityDatasets. """
own_dataset = self.data1["dataset"]
self.assertTrue(self.data1["facility_admin"].can_read(own_dataset))
self.assertTrue(self.data1["classroom_coaches"][0].can_read(own_dataset))
self.assertTrue(self.data1["learners_one_group"][0][0].can_read(own_dataset))
self.assertTrue(self.data1["unattached_users"][0].can_read(own_dataset))
self.assertTrue(self.anon_user.can_read(own_dataset))
self.assertIn(own_dataset, self.anon_user.filter_readable(FacilityDataset.objects.all()))
def test_only_facility_admins_can_update_own_facility_dataset(self):
""" The only FacilityUser who can update a FacilityDataset is a facility admin for that FacilityDataset """
own_dataset = self.data1["dataset"]
self.assertTrue(self.data1["facility_admin"].can_update(own_dataset))
self.assertFalse(self.data1["classroom_coaches"][0].can_update(own_dataset))
self.assertFalse(self.data1["learners_one_group"][0][0].can_update(own_dataset))
self.assertFalse(self.data1["unattached_users"][0].can_update(own_dataset))
self.assertFalse(self.anon_user.can_update(own_dataset))
def test_facility_users_and_anon_users_cannot_delete_own_facility_dataset(self):
""" FacilityUsers can't delete own FacilityDataset, regardless of their roles """
own_dataset = self.data1["dataset"]
self.assertFalse(self.data1["facility_admin"].can_delete(own_dataset))
self.assertFalse(self.data1["classroom_coaches"][0].can_delete(own_dataset))
self.assertFalse(self.data1["learners_one_group"][0][0].can_delete(own_dataset))
self.assertFalse(self.data1["unattached_users"][0].can_delete(own_dataset))
self.assertFalse(self.anon_user.can_delete(own_dataset))
def test_facility_users_cannot_delete_other_facility_dataset(self):
""" FacilityUsers can't delete other FacilityDataset, regardless of their roles """
other_facility_dataset = self.data2["dataset"]
self.assertFalse(self.data1["facility_admin"].can_delete(other_facility_dataset))
self.assertFalse(self.data1["classroom_coaches"][0].can_delete(other_facility_dataset))
self.assertFalse(self.data1["learners_one_group"][0][0].can_delete(other_facility_dataset))
self.assertFalse(self.data1["unattached_users"][0].can_delete(other_facility_dataset))
def test_device_owner_can_do_anything_to_a_facility_dataset(self):
""" DeviceOwner can do anything to a FacilityDataset """
new_facility_data = {}
self.assertTrue(self.device_owner.can_create(FacilityDataset, new_facility_data))
facility_dataset = self.data1["dataset"]
self.assertTrue(self.device_owner.can_read(facility_dataset))
self.assertTrue(self.device_owner.can_update(facility_dataset))
self.assertTrue(self.device_owner.can_delete(facility_dataset))
self.assertSetEqual(set(FacilityDataset.objects.all()), set(self.device_owner.filter_readable(FacilityDataset.objects.all())))
class FacilityPermissionsTestCase(TestCase):
"""
Tests of permissions for reading/modifying Facility instances
"""
def setUp(self):
self.data1 = create_dummy_facility_data()
self.data2 = create_dummy_facility_data(allow_sign_ups=True)
self.device_owner = DeviceOwner.objects.create(username="boss")
self.anon_user = KolibriAnonymousUser()
def test_facility_users_and_anon_users_cannot_create_facility(self):
""" FacilityUsers can't create new Facilities, regardless of their roles """
new_facility_data = {"name": "Home"}
self.assertFalse(self.data1["facility_admin"].can_create(Facility, new_facility_data))
self.assertFalse(self.data1["classroom_coaches"][0].can_create(Facility, new_facility_data))
self.assertFalse(self.data1["learners_one_group"][0][0].can_create(Facility, new_facility_data))
self.assertFalse(self.data1["unattached_users"][0].can_create(Facility, new_facility_data))
def test_facility_users_can_read_own_facility(self):
""" FacilityUsers can read their own Facility, regardless of their roles """
own_facility = self.data1["facility"]
for user in [self.data1["facility_admin"], self.data1["classroom_coaches"][0],
self.data1["learners_one_group"][0][0], self.data1["unattached_users"][0]]:
self.assertTrue(user.can_read(own_facility))
self.assertIn(own_facility, user.filter_readable(Facility.objects.all()))
def test_facility_users_cannot_read_other_facility(self):
""" FacilityUsers cannot read other Facilities, regardless of their roles """
other_facility = self.data2["facility"]
for user in [self.data1["facility_admin"], self.data1["classroom_coaches"][0],
self.data1["learners_one_group"][0][0], self.data1["unattached_users"][0]]:
self.assertFalse(user.can_read(other_facility))
self.assertNotIn(other_facility, user.filter_readable(Facility.objects.all()))
def test_anon_users_cannot_read_facility(self):
""" KolibriAnonymousUser cannot read Facility objects """
self.assertFalse(self.anon_user.can_read(self.data1["facility"]))
self.assertNotIn(self.data1["facility"], self.anon_user.filter_readable(Facility.objects.all()))
def test_only_facility_admins_can_update_own_facility(self):
""" The only FacilityUser who can update a Facility is a facility admin for that Facility """
own_facility = self.data1["facility"]
self.assertTrue(self.data1["facility_admin"].can_update(own_facility))
self.assertFalse(self.data1["classroom_coaches"][0].can_update(own_facility))
self.assertFalse(self.data1["learners_one_group"][0][0].can_update(own_facility))
self.assertFalse(self.data1["unattached_users"][0].can_update(own_facility))
self.assertFalse(self.anon_user.can_update(own_facility))
def test_facility_users_cannot_update_other_facility(self):
""" FacilityUsers cannot update other Facilities, regardless of their roles """
other_facility = self.data2["facility"]
self.assertFalse(self.data1["facility_admin"].can_update(other_facility))
self.assertFalse(self.data1["classroom_coaches"][0].can_update(other_facility))
self.assertFalse(self.data1["learners_one_group"][0][0].can_update(other_facility))
self.assertFalse(self.data1["unattached_users"][0].can_update(other_facility))
def test_facility_users_and_anon_users_cannot_delete_own_facility(self):
""" FacilityUsers can't delete own Facility, regardless of their roles """
own_facility = self.data1["facility"]
self.assertFalse(self.data1["facility_admin"].can_delete(own_facility))
self.assertFalse(self.data1["classroom_coaches"][0].can_delete(own_facility))
self.assertFalse(self.data1["learners_one_group"][0][0].can_delete(own_facility))
self.assertFalse(self.data1["unattached_users"][0].can_delete(own_facility))
self.assertFalse(self.anon_user.can_delete(own_facility))
def test_facility_users_cannot_delete_other_facility(self):
""" FacilityUsers can't delete other Facility, regardless of their roles """
other_facility = self.data2["facility"]
self.assertFalse(self.data1["facility_admin"].can_delete(other_facility))
self.assertFalse(self.data1["classroom_coaches"][0].can_delete(other_facility))
self.assertFalse(self.data1["learners_one_group"][0][0].can_delete(other_facility))
self.assertFalse(self.data1["unattached_users"][0].can_delete(other_facility))
def test_device_owner_can_do_anything_to_a_facility(self):
""" DeviceOwner can do anything to a Facility """
new_facility_data = {"name": "Home"}
self.assertTrue(self.device_owner.can_create(Facility, new_facility_data))
facility = self.data1["facility"]
self.assertTrue(self.device_owner.can_read(facility))
self.assertTrue(self.device_owner.can_update(facility))
self.assertTrue(self.device_owner.can_delete(facility))
self.assertSetEqual(set(Facility.objects.all()), set(self.device_owner.filter_readable(Facility.objects.all())))
def test_anon_user_can_read_facilities_that_allow_sign_ups(self):
can_not_sign_up_facility = self.data1['facility']
can_sign_up_facility = self.data2['facility']
self.assertFalse(self.anon_user.can_read(can_not_sign_up_facility))
self.assertTrue(self.anon_user.can_read(can_sign_up_facility))
def test_anon_user_filters_facility_datasets_that_allow_sign_ups(self):
sign_ups = Facility.objects.filter(dataset__learner_can_sign_up=True)
filtered = self.anon_user.filter_readable(Facility.objects.all())
self.assertEqual(set(sign_ups), set(filtered))
def test_anon_user_can_only_read_facilities_that_allow_sign_ups(self):
self.assertFalse(self.anon_user.can_read(self.data2['classrooms'][0]))
self.assertFalse(self.anon_user.can_read(self.data2['learnergroups'][0][0]))
class ClassroomPermissionsTestCase(TestCase):
"""
Tests of permissions for reading/modifying Classroom instances
"""
def setUp(self):
self.data = create_dummy_facility_data()
self.member = self.data["learners_one_group"][0][0]
self.own_classroom = self.data["classrooms"][0]
self.other_classroom = self.data["classrooms"][1]
self.own_classroom_coach = self.data["classroom_coaches"][0]
self.own_classroom_admin = self.data["classroom_admins"][0]
self.device_owner = DeviceOwner.objects.create(username="boss")
self.anon_user = KolibriAnonymousUser()
def test_only_facility_admin_can_create_classroom(self):
""" The only FacilityUser who can create a Classroom is a facility admin for the Facility """
new_classroom_data = {"name": "Home", "parent": self.data["facility"]}
self.assertTrue(self.data["facility_admin"].can_create(Classroom, new_classroom_data))
self.assertFalse(self.own_classroom_coach.can_create(Classroom, new_classroom_data))
self.assertFalse(self.member.can_create(Classroom, new_classroom_data))
self.assertFalse(self.data["unattached_users"][0].can_create(Classroom, new_classroom_data))
self.assertFalse(self.anon_user.can_create(Classroom, new_classroom_data))
def test_members_can_read_own_classroom(self):
""" Members of a Classroom can read that Classroom, as can coaches and admins for the Classroom """
for user in [self.data["facility_admin"], self.own_classroom_coach,
self.own_classroom_admin, self.member]:
self.assertTrue(user.can_read(self.own_classroom))
self.assertIn(self.own_classroom, user.filter_readable(Classroom.objects.all()))
def test_members_and_classroom_admins_and_coaches_can_read_other_classroom(self):
""" Members and admins/coaches for a Classroom can read another Classroom """
for user in [self.data["facility_admin"], self.own_classroom_coach,
self.own_classroom_admin, self.member]:
self.assertTrue(user.can_read(self.other_classroom))
self.assertIn(self.other_classroom, user.filter_readable(Classroom.objects.all()))
def test_only_admins_can_update_own_classroom(self):
""" The only FacilityUsers who can update a Classroom are admins for that Classroom (or for the Facility) """
self.assertTrue(self.data["facility_admin"].can_update(self.own_classroom))
self.assertTrue(self.own_classroom_admin.can_update(self.own_classroom))
self.assertFalse(self.own_classroom_coach.can_update(self.own_classroom))
self.assertFalse(self.member.can_update(self.own_classroom))
self.assertFalse(self.anon_user.can_update(self.own_classroom))
def test_facility_users_cannot_update_other_classroom(self):
""" FacilityUsers cannot update other Classrooms, unless they are a facility admin """
self.assertFalse(self.own_classroom_admin.can_update(self.other_classroom))
self.assertFalse(self.own_classroom_coach.can_update(self.other_classroom))
self.assertFalse(self.member.can_update(self.other_classroom))
def test_only_admins_can_delete_own_classroom(self):
""" The only FacilityUsers who can delete a Classroom are admins for the Facility """
self.assertTrue(self.data["facility_admin"].can_delete(self.own_classroom))
self.assertFalse(self.own_classroom_admin.can_delete(self.own_classroom))
self.assertFalse(self.own_classroom_coach.can_delete(self.own_classroom))
self.assertFalse(self.member.can_delete(self.own_classroom))
self.assertFalse(self.anon_user.can_delete(self.own_classroom))
def test_facility_users_cannot_delete_other_classroom(self):
""" FacilityUsers cannot delete other Classrooms, unless they are a facility admin """
self.assertFalse(self.own_classroom_admin.can_delete(self.other_classroom))
self.assertFalse(self.own_classroom_coach.can_delete(self.other_classroom))
self.assertFalse(self.member.can_delete(self.other_classroom))
def test_device_owner_can_do_anything_to_a_classroom(self):
""" DeviceOwner can do anything to a Classroom """
new_classroom_data = {"name": "Home", "parent": self.data["facility"]}
self.assertTrue(self.device_owner.can_create(Classroom, new_classroom_data))
self.assertTrue(self.device_owner.can_read(self.own_classroom))
self.assertTrue(self.device_owner.can_update(self.own_classroom))
self.assertTrue(self.device_owner.can_delete(self.own_classroom))
self.assertSetEqual(set(Classroom.objects.all()), set(self.device_owner.filter_readable(Classroom.objects.all())))
class LearnerGroupPermissionsTestCase(TestCase):
"""
Tests of permissions for reading/modifying LearnerGroup instances
"""
def setUp(self):
self.data = create_dummy_facility_data()
self.member = self.data["learners_one_group"][0][0]
self.own_learnergroup = self.data["learnergroups"][0][0]
self.other_learnergroup = self.data["learnergroups"][1][1]
self.own_classroom = self.data["classrooms"][0]
self.own_classroom_coach = self.data["classroom_coaches"][0]
self.own_classroom_admin = self.data["classroom_admins"][0]
self.other_classroom_admin = self.data["classroom_admins"][1]
self.other_classroom_coach = self.data["classroom_coaches"][1]
self.device_owner = DeviceOwner.objects.create(username="boss")
self.anon_user = KolibriAnonymousUser()
def test_facility_or_classroom_admins_or_classroom_coach_can_create_learnergroup(self):
""" The FacilityUser who can create a LearnerGroup is a facility admin for the Facility or coach for the classroom"""
new_learnergroup_data = {"name": "Cool Group", "parent": self.own_classroom}
self.assertTrue(self.data["facility_admin"].can_create(LearnerGroup, new_learnergroup_data))
self.assertTrue(self.own_classroom_admin.can_create(LearnerGroup, new_learnergroup_data))
self.assertFalse(self.other_classroom_admin.can_create(LearnerGroup, new_learnergroup_data))
self.assertTrue(self.own_classroom_coach.can_create(LearnerGroup, new_learnergroup_data))
self.assertFalse(self.other_classroom_coach.can_create(LearnerGroup, new_learnergroup_data))
self.assertFalse(self.member.can_create(LearnerGroup, new_learnergroup_data))
self.assertFalse(self.data["unattached_users"][0].can_create(LearnerGroup, new_learnergroup_data))
self.assertFalse(self.anon_user.can_create(LearnerGroup, new_learnergroup_data))
def test_members_can_read_own_learnergroup(self):
""" Members of a LearnerGroup can read that LearnerGroup, as can coaches and admins for the LearnerGroup """
for user in [self.data["facility_admin"], self.own_classroom_coach,
self.own_classroom_admin, self.member]:
self.assertTrue(user.can_read(self.own_learnergroup))
self.assertIn(self.own_learnergroup, user.filter_readable(LearnerGroup.objects.all()))
def test_admins_or_coach_can_update_own_learnergroup(self):
""" The only FacilityUsers who can update a LearnerGroup are admins for that LearnerGroup """
self.assertTrue(self.data["facility_admin"].can_update(self.own_learnergroup))
self.assertTrue(self.own_classroom_admin.can_update(self.own_learnergroup))
self.assertTrue(self.own_classroom_coach.can_update(self.own_learnergroup))
self.assertFalse(self.member.can_update(self.own_learnergroup))
self.assertFalse(self.anon_user.can_update(self.own_learnergroup))
def test_facility_users_cannot_update_other_learnergroup(self):
""" FacilityUsers cannot update other LearnerGroups, unless they are a facility admin """
self.assertFalse(self.own_classroom_admin.can_update(self.other_learnergroup))
self.assertFalse(self.own_classroom_coach.can_update(self.other_learnergroup))
self.assertFalse(self.member.can_update(self.other_learnergroup))
def test_admins_or_coach_can_delete_own_learnergroup(self):
""" The only FacilityUsers who can delete a LearnerGroup are admins for that LearnerGroup """
self.assertTrue(self.data["facility_admin"].can_delete(self.own_learnergroup))
self.assertTrue(self.own_classroom_admin.can_delete(self.own_learnergroup))
self.assertTrue(self.own_classroom_coach.can_delete(self.own_learnergroup))
self.assertFalse(self.member.can_delete(self.own_learnergroup))
self.assertFalse(self.anon_user.can_delete(self.own_learnergroup))
def test_facility_users_cannot_delete_other_learnergroup(self):
""" FacilityUsers cannot delete other LearnerGroups, if they aren't admin for Facility or parent Classroom """
self.assertFalse(self.own_classroom_admin.can_delete(self.other_learnergroup))
self.assertFalse(self.own_classroom_coach.can_delete(self.other_learnergroup))
self.assertFalse(self.member.can_delete(self.other_learnergroup))
def test_device_owner_can_do_anything_to_a_learnergroup(self):
""" DeviceOwner can do anything to a LearnerGroup """
new_learnergroup_data = {"name": "Cool Group", "parent": self.own_classroom}
self.assertTrue(self.device_owner.can_create(LearnerGroup, new_learnergroup_data))
self.assertTrue(self.device_owner.can_read(self.own_learnergroup))
self.assertTrue(self.device_owner.can_update(self.own_learnergroup))
self.assertTrue(self.device_owner.can_delete(self.own_learnergroup))
self.assertSetEqual(set(LearnerGroup.objects.all()), set(self.device_owner.filter_readable(LearnerGroup.objects.all())))
class FacilityUserPermissionsTestCase(TestCase):
"""
Tests of permissions for reading/modifying FacilityUser instances
"""
def setUp(self):
self.data = create_dummy_facility_data()
self.data2 = create_dummy_facility_data()
self.member = self.data["learners_one_group"][0][0]
self.member2 = self.data2["learners_one_group"][0][0]
self.other_member = self.data["learners_one_group"][1][1]
self.own_learnergroup = self.data["learnergroups"][0][0]
self.own_classroom = self.data["classrooms"][0]
self.own_classroom_coach = self.data["classroom_coaches"][0]
self.own_classroom_admin = self.data["classroom_admins"][0]
self.other_classroom_admin = self.data["classroom_admins"][1]
self.device_owner = DeviceOwner.objects.create(username="boss")
self.anon_user = KolibriAnonymousUser()
def test_only_facility_admins_can_create_facility_user(self):
""" The only FacilityUser who can create a FacilityUser is a facility admin for the Facility """
new_facilityuser_data = {"username": "janedoe", "password": "*", "facility": self.data["facility"]}
self.assertTrue(self.data["facility_admin"].can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.data["facility_coach"].can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.own_classroom_admin.can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.own_classroom_coach.can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.member.can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.data["unattached_users"][0].can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.anon_user.can_create(FacilityUser, new_facilityuser_data))
def test_no_facility_user_can_create_facility_user_for_other_facility(self):
""" FacilityUsers cannot create a FacilityUser for a different Facility """
new_facilityuser_data = {"username": "janedoe", "password": "*", "facility": self.data2["facility"]}
self.assertFalse(self.data["facility_admin"].can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.data["facility_coach"].can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.own_classroom_admin.can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.own_classroom_coach.can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.member.can_create(FacilityUser, new_facilityuser_data))
self.assertFalse(self.data["unattached_users"][0].can_create(FacilityUser, new_facilityuser_data))
def test_facility_user_can_read_self(self):
""" A FacilityUser can read its own FacilityUser model """
for user in [self.own_classroom_admin, self.member, self.own_classroom_coach, self.data["facility_admin"]]:
self.assertTrue(user.can_read(user))
self.assertIn(user, user.filter_readable(FacilityUser.objects.all()))
def test_admins_and_coaches_can_read_facility_users(self):
""" Users with admin/coach role for a FacilityUser can read that FacilityUser """
for user in [self.own_classroom_admin, self.own_classroom_coach, self.data["facility_admin"], self.data["facility_coach"]]:
self.assertTrue(user.can_read(self.member))
self.assertIn(self.member, user.filter_readable(FacilityUser.objects.all()))
def test_members_and_admins_and_coaches_for_other_classrooms_cannot_read_facility_users(self):
""" Users without admin/coach role for a specific FacilityUser cannot read that FacilityUser """
for user in [self.own_classroom_coach, self.own_classroom_admin, self.member, self.anon_user]:
self.assertFalse(user.can_read(self.other_member))
self.assertNotIn(self.other_member, user.filter_readable(FacilityUser.objects.all()))
def test_only_facility_admins_and_coaches_can_read_unaffiliated_facility_users(self):
""" Only Facility admins/coaches can read FacilityUser that is not a member of a Classroom or LearnerGroup """
orphan = self.data["unattached_users"][0]
for user in [self.data["facility_admin"], self.data["facility_coach"]]:
self.assertTrue(user.can_read(orphan))
self.assertIn(orphan, user.filter_readable(FacilityUser.objects.all()))
for user in [self.own_classroom_coach, self.own_classroom_admin, self.member, self.anon_user]:
self.assertFalse(user.can_read(orphan))
self.assertNotIn(orphan, user.filter_readable(FacilityUser.objects.all()))
def test_facility_user_can_update_self(self):
""" A FacilityUser can update its own FacilityUser model """
self.assertTrue(self.member.can_update(self.member))
self.assertTrue(self.own_classroom_coach.can_update(self.own_classroom_coach))
self.assertTrue(self.own_classroom_admin.can_update(self.own_classroom_admin))
self.assertTrue(self.data["facility_admin"].can_update(self.data["facility_admin"]))
def test_admins_but_not_coaches_can_update_facility_users(self):
""" Users with admin (but not coach) role for a FacilityUser can update that FacilityUser """
self.assertTrue(self.data["facility_admin"].can_update(self.member))
self.assertFalse(self.data["facility_coach"].can_update(self.member))
self.assertTrue(self.own_classroom_admin.can_update(self.member))
self.assertFalse(self.own_classroom_coach.can_update(self.member))
def test_admins_and_coaches_for_other_classrooms_cannot_update_facility_users(self):
""" Users without admin/coach role for a specific FacilityUser cannot update that FacilityUser """
self.assertFalse(self.own_classroom_coach.can_update(self.other_member))
self.assertFalse(self.own_classroom_admin.can_update(self.other_member))
def test_only_facility_admins_can_update_unaffiliated_facility_users(self):
""" Only Facility admins can update FacilityUser that is not a member of a Classroom or LearnerGroup """
orphan = self.data["unattached_users"][0]
self.assertTrue(self.data["facility_admin"].can_update(orphan))
self.assertFalse(self.data["facility_coach"].can_update(orphan))
self.assertFalse(self.own_classroom_admin.can_update(orphan))
self.assertFalse(self.own_classroom_coach.can_update(orphan))
self.assertFalse(self.member.can_update(orphan))
self.assertFalse(self.anon_user.can_update(orphan))
def test_facility_user_can_delete_self(self):
""" A FacilityUser can delete its own FacilityUser model """
self.assertTrue(self.member.can_delete(self.member))
self.assertTrue(self.own_classroom_coach.can_delete(self.own_classroom_coach))
self.assertTrue(self.own_classroom_admin.can_delete(self.own_classroom_admin))
self.assertTrue(self.data["facility_admin"].can_delete(self.data["facility_admin"]))
def test_only_facility_admins_can_delete_facility_user(self):
""" The only FacilityUsers who can delete a FacilityUser are admins for the Facility """
self.assertTrue(self.data["facility_admin"].can_delete(self.member))
self.assertFalse(self.data["facility_coach"].can_delete(self.member))
self.assertFalse(self.own_classroom_admin.can_delete(self.member))
self.assertFalse(self.own_classroom_coach.can_delete(self.member))
self.assertFalse(self.anon_user.can_delete(self.member))
def test_facility_users_cannot_delete_facility_users_from_other_facility(self):
""" FacilityUsers cannot delete FacilityUsers from another Facility """
self.assertFalse(self.data["facility_admin"].can_delete(self.member2))
self.assertFalse(self.data["facility_coach"].can_delete(self.member2))
self.assertFalse(self.own_classroom_admin.can_delete(self.member2))
self.assertFalse(self.own_classroom_coach.can_delete(self.member2))
self.assertFalse(self.member.can_delete(self.member2))
def test_device_owner_can_do_anything_to_a_facility_user(self):
""" DeviceOwner can do anything to a FacilityUser """
new_facilityuser_data_1 = {"username": "janedoe", "password": "*", "facility": self.data["facility"]}
self.assertTrue(self.device_owner.can_create(FacilityUser, new_facilityuser_data_1))
new_facilityuser_data_2 = {"username": "janedoe", "password": "*", "facility": self.data2["facility"]}
self.assertTrue(self.device_owner.can_create(FacilityUser, new_facilityuser_data_2))
self.assertTrue(self.device_owner.can_read(self.member))
self.assertTrue(self.device_owner.can_update(self.member))
self.assertTrue(self.device_owner.can_delete(self.member))
self.assertSetEqual(set(FacilityUser.objects.all()), set(self.device_owner.filter_readable(FacilityUser.objects.all())))
class DeviceOwnerPermissionsTestCase(TestCase):
"""
Tests of permissions for reading/modifying DeviceOwner instances
"""
def setUp(self):
self.data = create_dummy_facility_data()
self.member = self.data["learners_one_group"][0][0]
self.own_classroom_coach = self.data["classroom_coaches"][0]
self.own_classroom_admin = self.data["classroom_admins"][0]
self.device_owner = DeviceOwner.objects.create(username="boss")
self.device_owner2 = DeviceOwner.objects.create(username="ubermensch")
self.anon_user = KolibriAnonymousUser()
def test_non_device_owners_cannot_create_device_owner(self):
""" Users who are not DeviceOwners cannot create a DeviceOwner """
new_deviceowner_data = {"username": "janedoe", "password": "*"}
self.assertFalse(self.data["facility_admin"].can_create(DeviceOwner, new_deviceowner_data))
self.assertFalse(self.data["facility_coach"].can_create(DeviceOwner, new_deviceowner_data))
self.assertFalse(self.own_classroom_admin.can_create(DeviceOwner, new_deviceowner_data))
self.assertFalse(self.own_classroom_coach.can_create(DeviceOwner, new_deviceowner_data))
self.assertFalse(self.member.can_create(DeviceOwner, new_deviceowner_data))
self.assertFalse(self.data["unattached_users"][0].can_create(DeviceOwner, new_deviceowner_data))
self.assertFalse(self.anon_user.can_create(DeviceOwner, new_deviceowner_data))
def test_non_device_owners_cannot_read_device_owner(self):
""" Users who are not DeviceOwners cannot read a DeviceOwner """
for user in [self.data["facility_admin"], self.data["facility_coach"], self.own_classroom_admin,
self.own_classroom_coach, self.member, self.data["unattached_users"][0], self.anon_user]:
self.assertFalse(user.can_read(self.device_owner))
self.assertEqual(len(user.filter_readable(DeviceOwner.objects.all())), 0)
def test_non_device_owners_cannot_update_device_owner(self):
""" Users who are not DeviceOwners cannot update a DeviceOwner """
self.assertFalse(self.data["facility_admin"].can_update(self.device_owner))
self.assertFalse(self.data["facility_coach"].can_update(self.device_owner))
self.assertFalse(self.own_classroom_admin.can_update(self.device_owner))
self.assertFalse(self.own_classroom_coach.can_update(self.device_owner))
self.assertFalse(self.member.can_update(self.device_owner))
self.assertFalse(self.data["unattached_users"][0].can_update(self.device_owner))
self.assertFalse(self.anon_user.can_update(self.device_owner))
def test_non_device_owners_cannot_delete_device_owner(self):
""" Users who are not DeviceOwners cannot delete a DeviceOwner """
self.assertFalse(self.data["facility_admin"].can_delete(self.device_owner))
self.assertFalse(self.data["facility_coach"].can_delete(self.device_owner))
self.assertFalse(self.own_classroom_admin.can_delete(self.device_owner))
self.assertFalse(self.own_classroom_coach.can_delete(self.device_owner))
self.assertFalse(self.member.can_delete(self.device_owner))
self.assertFalse(self.data["unattached_users"][0].can_delete(self.device_owner))
self.assertFalse(self.anon_user.can_delete(self.device_owner))
def test_device_owner_can_do_anything_to_a_device_owner(self):
""" DeviceOwner can do anything to a DeviceOwner """
new_deviceowner_data = {"username": "janedoe", "password": "*"}
self.assertTrue(self.device_owner.can_create(DeviceOwner, new_deviceowner_data))
self.assertTrue(self.device_owner.can_read(self.device_owner))
self.assertTrue(self.device_owner.can_update(self.device_owner))
self.assertTrue(self.device_owner.can_delete(self.device_owner))
self.assertTrue(self.device_owner.can_read(self.device_owner2))
self.assertTrue(self.device_owner.can_update(self.device_owner2))
self.assertTrue(self.device_owner.can_delete(self.device_owner2))
self.assertIn(self.device_owner, self.device_owner.filter_readable(DeviceOwner.objects.all()))
self.assertIn(self.device_owner2, self.device_owner.filter_readable(DeviceOwner.objects.all()))
class RolePermissionsTestCase(TestCase):
"""
Tests of permissions for reading/modifying Role instances
"""
def setUp(self):
self.data = create_dummy_facility_data()
self.member = self.data["learners_one_group"][0][0]
self.own_classroom = self.data["classrooms"][0]
self.other_classroom = self.data["classrooms"][1]
self.own_classroom_coach = self.data["classroom_coaches"][0]
self.own_classroom_admin = self.data["classroom_admins"][0]
self.other_classroom_coach = self.data["classroom_coaches"][1]
self.other_classroom_admin = self.data["classroom_admins"][1]
self.device_owner = DeviceOwner.objects.create(username="boss")
self.role_user = self.data["unattached_users"][0]
self.anon_user = KolibriAnonymousUser()
def test_facility_admin_can_create_facility_admin_role(self):
new_role_data = {"user": self.role_user, "collection": self.data["facility"], "kind": role_kinds.ADMIN}
self.assertTrue(self.data["facility_admin"].can_create(Role, new_role_data))
self.assertFalse(self.data["facility_coach"].can_create(Role, new_role_data))
self.assertFalse(self.own_classroom_admin.can_create(Role, new_role_data))
self.assertFalse(self.own_classroom_coach.can_create(Role, new_role_data))
self.assertFalse(self.member.can_create(Role, new_role_data))
self.assertFalse(self.role_user.can_create(Role, new_role_data))
self.assertTrue(self.device_owner.can_create(Role, new_role_data))
def test_facility_admin_can_create_facility_coach_role(self):
new_role_data = {"user": self.role_user, "collection": self.data["facility"], "kind": role_kinds.COACH}
self.assertTrue(self.data["facility_admin"].can_create(Role, new_role_data))
self.assertFalse(self.data["facility_coach"].can_create(Role, new_role_data))
self.assertFalse(self.own_classroom_admin.can_create(Role, new_role_data))
self.assertFalse(self.own_classroom_coach.can_create(Role, new_role_data))
self.assertFalse(self.member.can_create(Role, new_role_data))
self.assertFalse(self.role_user.can_create(Role, new_role_data))
self.assertTrue(self.device_owner.can_create(Role, new_role_data))
self.assertFalse(self.anon_user.can_create(Role, new_role_data))
def test_facility_or_classroom_admin_can_create_classroom_admin_role(self):
new_role_data = {"user": self.role_user, "collection": self.own_classroom, "kind": role_kinds.ADMIN}
self.assertTrue(self.data["facility_admin"].can_create(Role, new_role_data))
self.assertFalse(self.data["facility_coach"].can_create(Role, new_role_data))
self.assertTrue(self.own_classroom_admin.can_create(Role, new_role_data))
self.assertFalse(self.own_classroom_coach.can_create(Role, new_role_data))
self.assertFalse(self.other_classroom_admin.can_create(Role, new_role_data))
self.assertFalse(self.other_classroom_coach.can_create(Role, new_role_data))
self.assertFalse(self.member.can_create(Role, new_role_data))
self.assertFalse(self.role_user.can_create(Role, new_role_data))
self.assertTrue(self.device_owner.can_create(Role, new_role_data))
self.assertFalse(self.anon_user.can_create(Role, new_role_data))
def test_facility_or_classroom_admin_can_create_classroom_coach_role(self):
new_role_data = {"user": self.role_user, "collection": self.own_classroom, "kind": role_kinds.COACH}
self.assertTrue(self.data["facility_admin"].can_create(Role, new_role_data))
self.assertFalse(self.data["facility_coach"].can_create(Role, new_role_data))
self.assertTrue(self.own_classroom_admin.can_create(Role, new_role_data))
self.assertFalse(self.own_classroom_coach.can_create(Role, new_role_data))
self.assertFalse(self.other_classroom_admin.can_create(Role, new_role_data))
self.assertFalse(self.other_classroom_coach.can_create(Role, new_role_data))
self.assertFalse(self.member.can_create(Role, new_role_data))
self.assertFalse(self.role_user.can_create(Role, new_role_data))
self.assertTrue(self.device_owner.can_create(Role, new_role_data))
self.assertFalse(self.anon_user.can_create(Role, new_role_data))
def test_facility_admin_or_coach_can_read_facility_admin_role(self):
role = Role.objects.create(user=self.role_user, collection=self.data["facility"], kind=role_kinds.ADMIN)
for user in [self.data["facility_admin"], self.data["facility_coach"], self.role_user, self.device_owner]:
self.assertTrue(user.can_read(role))
self.assertIn(role, user.filter_readable(Role.objects.all()))
for user in [self.own_classroom_admin, self.own_classroom_coach, self.other_classroom_admin,
self.other_classroom_coach, self.member, self.anon_user]:
self.assertFalse(user.can_read(role))
self.assertNotIn(role, user.filter_readable(Role.objects.all()))
def test_facility_or_classroom_admin_or_coach_can_read_classroom_admin_role(self):
role = Role.objects.create(user=self.role_user, collection=self.own_classroom, kind=role_kinds.ADMIN)
self.assertTrue(self.data["facility_admin"].can_read(role))
self.assertTrue(self.data["facility_coach"].can_read(role))
self.assertTrue(self.own_classroom_admin.can_read(role))
self.assertTrue(self.own_classroom_coach.can_read(role))
self.assertFalse(self.other_classroom_admin.can_read(role))
self.assertFalse(self.other_classroom_coach.can_read(role))
self.assertFalse(self.member.can_read(role))
self.assertTrue(self.role_user.can_read(role))
self.assertTrue(self.device_owner.can_read(role))
self.assertFalse(self.anon_user.can_read(role))
def test_facility_users_cannot_update_roles(self):
# None of the fields in a role are "mutable", so there's no reason to allow updates
# (changing a role from one kind to another means deleting the existing role and creating another)
role = Role.objects.create(user=self.role_user, collection=self.own_classroom, kind=role_kinds.COACH)
self.assertFalse(self.data["facility_admin"].can_update(role))
self.assertFalse(self.data["facility_coach"].can_update(role))
self.assertFalse(self.own_classroom_admin.can_update(role))
self.assertFalse(self.own_classroom_coach.can_update(role))
self.assertFalse(self.other_classroom_admin.can_update(role))
self.assertFalse(self.other_classroom_coach.can_update(role))
self.assertFalse(self.member.can_update(role))
self.assertFalse(self.role_user.can_update(role))
self.assertFalse(self.anon_user.can_update(role))
def test_facility_admin_can_delete_facility_admin_role(self):
role = Role.objects.create(user=self.role_user, collection=self.data["facility"], kind=role_kinds.ADMIN)
self.assertTrue(self.data["facility_admin"].can_delete(role))
self.assertFalse(self.data["facility_coach"].can_delete(role))
self.assertFalse(self.own_classroom_admin.can_delete(role))
self.assertFalse(self.own_classroom_coach.can_delete(role))
self.assertFalse(self.member.can_delete(role))
self.assertTrue(self.role_user.can_delete(role))
self.assertTrue(self.device_owner.can_delete(role))
self.assertFalse(self.anon_user.can_delete(role))
def test_facility_admin_can_delete_facility_coach_role(self):
role = Role.objects.create(user=self.role_user, collection=self.data["facility"], kind=role_kinds.COACH)
self.assertTrue(self.data["facility_admin"].can_delete(role))
self.assertFalse(self.data["facility_coach"].can_delete(role))
self.assertFalse(self.own_classroom_admin.can_delete(role))
self.assertFalse(self.own_classroom_coach.can_delete(role))
self.assertFalse(self.member.can_delete(role))
self.assertFalse(self.role_user.can_delete(role))
self.assertTrue(self.device_owner.can_delete(role))
self.assertFalse(self.anon_user.can_delete(role))
def test_facility_or_classroom_admin_can_delete_classroom_admin_role(self):
role = Role.objects.create(user=self.role_user, collection=self.own_classroom, kind=role_kinds.ADMIN)
self.assertTrue(self.data["facility_admin"].can_delete(role))
self.assertFalse(self.data["facility_coach"].can_delete(role))
self.assertTrue(self.own_classroom_admin.can_delete(role))
self.assertFalse(self.own_classroom_coach.can_delete(role))
self.assertFalse(self.other_classroom_admin.can_delete(role))
self.assertFalse(self.other_classroom_coach.can_delete(role))
self.assertFalse(self.member.can_delete(role))
self.assertTrue(self.role_user.can_delete(role)) # the role's user can delete it as she is an admin for collection
self.assertTrue(self.device_owner.can_delete(role))
self.assertFalse(self.anon_user.can_delete(role))
def test_facility_or_classroom_admin_can_delete_classroom_coach_role(self):
role = Role.objects.create(user=self.role_user, collection=self.own_classroom, kind=role_kinds.COACH)
self.assertTrue(self.data["facility_admin"].can_delete(role))
self.assertFalse(self.data["facility_coach"].can_delete(role))
self.assertTrue(self.own_classroom_admin.can_delete(role))
self.assertFalse(self.own_classroom_coach.can_delete(role))
self.assertFalse(self.other_classroom_admin.can_delete(role))
self.assertFalse(self.other_classroom_coach.can_delete(role))
self.assertFalse(self.member.can_delete(role))
self.assertFalse(self.role_user.can_delete(role))
self.assertTrue(self.device_owner.can_delete(role))
self.assertFalse(self.anon_user.can_delete(role))
class MembershipPermissionsTestCase(TestCase):
"""
Tests of permissions for reading/modifying Membership instances
"""
def setUp(self):
self.data = create_dummy_facility_data()
self.member = self.data["learners_one_group"][0][0]
self.own_classroom = self.data["classrooms"][0]
self.other_classroom = self.data["classrooms"][1]
self.own_learnergroup = self.data["learnergroups"][0][0]
self.other_learnergroup = self.data["learnergroups"][1][1]
self.own_classroom_coach = self.data["classroom_coaches"][0]
self.own_classroom_admin = self.data["classroom_admins"][0]
self.other_classroom_coach = self.data["classroom_coaches"][1]
self.other_classroom_admin = self.data["classroom_admins"][1]
self.device_owner = DeviceOwner.objects.create(username="boss")
self.anon_user = KolibriAnonymousUser()
def test_admin_or_coach_for_user_can_create_membership(self):
# try adding member of own_classroom as a member of other_classroom
new_membership_data = {"user": self.member, "collection": self.other_learnergroup}
self.assertTrue(self.data["facility_admin"].can_create(Membership, new_membership_data))
self.assertFalse(self.data["facility_coach"].can_create(Membership, new_membership_data))
self.assertTrue(self.own_classroom_admin.can_create(Membership, new_membership_data))
self.assertFalse(self.own_classroom_coach.can_create(Membership, new_membership_data))
self.assertFalse(self.other_classroom_admin.can_create(Membership, new_membership_data))
self.assertFalse(self.other_classroom_coach.can_create(Membership, new_membership_data))
self.assertFalse(self.member.can_create(Membership, new_membership_data))
self.assertTrue(self.device_owner.can_create(Membership, new_membership_data))
self.assertFalse(self.anon_user.can_create(Membership, new_membership_data))
def test_facility_or_classroom_admin_or_coach_or_member_can_read_membership(self):
membership = Membership.objects.get(user=self.member, collection=self.own_learnergroup)
for user in [self.data["facility_admin"], self.own_classroom_admin, self.member, self.device_owner]:
self.assertTrue(user.can_read(membership))
self.assertIn(membership, user.filter_readable(Membership.objects.all()))
for user in [self.data["facility_coach"], self.own_classroom_coach]:
self.assertTrue(user.can_read(membership))
for user in [self.other_classroom_admin, self.other_classroom_coach, self.anon_user]:
self.assertFalse(user.can_read(membership))
self.assertNotIn(membership, user.filter_readable(Membership.objects.all()))
def test_facility_users_cannot_update_memberships(self):
# None of the fields in a Membership are "mutable", so there's no reason to allow updates
membership = Membership.objects.get(user=self.member, collection=self.own_learnergroup)
self.assertFalse(self.data["facility_admin"].can_update(membership))
self.assertTrue(self.data["facility_coach"].can_update(membership))
self.assertFalse(self.own_classroom_admin.can_update(membership))
self.assertTrue(self.own_classroom_coach.can_update(membership))
self.assertFalse(self.other_classroom_admin.can_update(membership))
self.assertFalse(self.other_classroom_coach.can_update(membership))
self.assertFalse(self.member.can_update(membership))
self.assertFalse(self.anon_user.can_update(membership))
def test_admin_or_coach_can_delete_membership(self):
membership = Membership.objects.get(user=self.member, collection=self.own_learnergroup)
self.assertTrue(self.data["facility_admin"].can_delete(membership))
self.assertTrue(self.data["facility_coach"].can_delete(membership))
self.assertTrue(self.own_classroom_admin.can_delete(membership))
self.assertTrue(self.own_classroom_coach.can_delete(membership))
self.assertFalse(self.member.can_delete(membership))
self.assertTrue(self.device_owner.can_delete(membership))
self.assertFalse(self.anon_user.can_delete(membership))
class FacilityDatasetCertificateNamespacingTestCase(TestCase):
def test_unsaved_facility_permission_check_and_validation_only_create_one_dataset(self):
device_owner = DeviceOwner.objects.create(username="bossman")
anon_user = KolibriAnonymousUser()
facility = Facility(name="Hello!")
self.assertTrue(anon_user.can_create_instance(facility))
self.assertEqual(FacilityDataset.objects.count(), 0)
self.assertTrue(device_owner.can_create_instance(facility))
self.assertEqual(FacilityDataset.objects.count(), 0)
facility.full_clean()
self.assertEqual(FacilityDataset.objects.count(), 0)
facility.save()
self.assertEqual(FacilityDataset.objects.count(), 1)
|
|
# Copyright (c) 2013-2018, Rethink Robotics Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import re
import sys
from threading import Lock
import rospy
from std_msgs.msg import (
Bool,
Empty,
)
import intera_dataflow
from intera_core_msgs.msg import (
RobotAssemblyState,
)
from . import settings
class RobotEnable(object):
"""
Class RobotEnable - simple control/status wrapper around robot state
enable() - enable all joints
disable() - disable all joints
reset() - reset all joints, reset all jrcp faults, disable the robot
stop() - stop the robot, similar to hitting the e-stop button
"""
param_lock = Lock()
def __init__(self, versioned=False):
"""
Version checking capable constructor.
@type versioned: bool
@param versioned: True to check robot software version
compatibility on initialization. False (default) to ignore.
The compatibility of robot versions to SDK (intera_interface)
versions is defined in the L{intera_interface.VERSIONS_SDK2ROBOT}.
By default, the class does not check, but all examples do. The
example behavior can be overridden by changing the value of
L{intera_interface.CHECK_VERSION} to False.
"""
self._state = None
state_topic = 'robot/state'
self._state_sub = rospy.Subscriber(state_topic,
RobotAssemblyState,
self._state_callback
)
if versioned and not self.version_check():
sys.exit(1)
intera_dataflow.wait_for(
lambda: not self._state is None,
timeout=10.0,
timeout_msg=("Failed to get robot state on %s" %
(state_topic,)),
)
def _state_callback(self, msg):
self._state = msg
def _toggle_enabled(self, status):
pub = rospy.Publisher('robot/set_super_enable', Bool,
queue_size=10)
intera_dataflow.wait_for(
test=lambda: self._state.enabled == status,
timeout=5.0,
timeout_msg=("Failed to %sable robot" %
('en' if status else 'dis',)),
body=lambda: pub.publish(status),
)
rospy.loginfo("Robot %s", ('Enabled' if status else 'Disabled'))
def state(self):
"""
Returns the last known robot state.
@rtype: intera_core_msgs/RobotAssemblyState
@return: Returns the last received RobotAssemblyState message
"""
return self._state
def enable(self):
"""
Enable all joints
"""
if self._state.stopped:
rospy.loginfo("Robot Stopped: Attempting Reset...")
self.reset()
self._toggle_enabled(True)
def disable(self):
"""
Disable all joints
"""
self._toggle_enabled(False)
def reset(self):
"""
Reset all joints. Trigger JRCP hardware to reset all faults. Disable
the robot.
"""
error_not_stopped = """\
Robot is not in a Error State. Cannot perform Reset.
"""
error_estop = """\
E-Stop is ASSERTED. Disengage E-Stop and then reset the robot.
"""
error_nonfatal = """Non-fatal Robot Error on reset.
Robot reset cleared stopped state and robot can be enabled, but a non-fatal
error persists. Check diagnostics or rethink.log for more info.
"""
error_env = """Failed to reset robot.
Please verify that the ROS_IP or ROS_HOSTNAME environment variables are set
and resolvable. For more information please visit:
http://sdk.rethinkrobotics.com/intera/SDK_Shell
"""
is_reset = lambda: (self._state.stopped == False and
self._state.error == False and
self._state.estop_button == 0 and
self._state.estop_source == 0)
pub = rospy.Publisher('robot/set_super_reset', Empty, queue_size=10)
if (not self._state.stopped):
rospy.logfatal(error_not_stopped)
raise IOError(errno.EREMOTEIO, "Failed to Reset due to lack of Error State.")
if (self._state.stopped and
self._state.estop_button == RobotAssemblyState.ESTOP_BUTTON_PRESSED):
rospy.logfatal(error_estop)
raise IOError(errno.EREMOTEIO, "Failed to Reset: E-Stop Engaged")
rospy.loginfo("Resetting robot...")
try:
intera_dataflow.wait_for(
test=is_reset,
timeout=5.0,
timeout_msg=error_env,
body=pub.publish
)
except OSError as e:
if e.errno == errno.ETIMEDOUT:
if self._state.error == True and self._state.stopped == False:
rospy.logwarn(error_nonfatal)
return False
raise
def stop(self):
"""
Simulate an e-stop button being pressed. Robot must be reset to clear
the stopped state.
"""
pub = rospy.Publisher('robot/set_super_stop', Empty, queue_size=10)
intera_dataflow.wait_for(
test=lambda: self._state.stopped == True,
timeout=5.0,
timeout_msg="Failed to stop the robot",
body=pub.publish,
)
def version_check(self):
"""
Verifies the version of the software running on the robot is
compatible with this local version of the Intera SDK.
Currently uses the variables in intera_interface.settings and
can be overridden for all default examples by setting CHECK_VERSION
to False.
@rtype: bool
@return: Returns True if SDK version is compatible with robot Version, False otherwise
"""
param_name = "/manifest/robot_software/version/HLR_VERSION_STRING"
sdk_version = settings.SDK_VERSION
# get local lock for rosparam threading bug
with self.__class__.param_lock:
robot_version = rospy.get_param(param_name, None)
if not robot_version:
rospy.logwarn("RobotEnable: Failed to retrieve robot version "
"from rosparam: %s\n"
"Verify robot state and connectivity "
"(i.e. ROS_MASTER_URI)", param_name)
return False
else:
# parse out first 3 digits of robot version tag
pattern = ("^([0-9]+)\.([0-9]+)\.([0-9]+)")
match = re.search(pattern, robot_version)
if not match:
rospy.logwarn("RobotEnable: Invalid robot version: %s",
robot_version)
return False
robot_version = match.string[match.start(1):match.end(3)]
if robot_version not in settings.VERSIONS_SDK2ROBOT[sdk_version]:
errstr_version = """RobotEnable: Software Version Mismatch.
Robot Software version (%s) does not match local SDK version (%s). Please
Update your Robot Software. \
See: http://sdk.rethinkrobotics.com/intera/Software_Update"""
rospy.logerr(errstr_version, robot_version, sdk_version)
return False
return True
|
|
"""Tests for Plex sensors."""
from datetime import datetime, timedelta
from http import HTTPStatus
from unittest.mock import patch
import requests.exceptions
from homeassistant.components.plex.const import PLEX_UPDATE_LIBRARY_SIGNAL
from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.util import dt
from .helpers import trigger_plex_update, wait_for_debouncer
from tests.common import async_fire_time_changed
LIBRARY_UPDATE_PAYLOAD = {"StatusNotification": [{"title": "Library scan complete"}]}
TIMESTAMP = datetime(2021, 9, 1)
class MockPlexMedia:
"""Minimal mock of base plexapi media object."""
key = "key"
addedAt = str(TIMESTAMP)
listType = "video"
year = 2021
class MockPlexClip(MockPlexMedia):
"""Minimal mock of plexapi clip object."""
type = "clip"
title = "Clip 1"
class MockPlexMovie(MockPlexMedia):
"""Minimal mock of plexapi movie object."""
type = "movie"
title = "Movie 1"
class MockPlexMusic(MockPlexMedia):
"""Minimal mock of plexapi album object."""
listType = "audio"
type = "album"
title = "Album"
parentTitle = "Artist"
class MockPlexTVEpisode(MockPlexMedia):
"""Minimal mock of plexapi episode object."""
type = "episode"
title = "Episode 5"
grandparentTitle = "TV Show"
seasonEpisode = "s01e05"
year = None
parentYear = 2021
async def test_library_sensor_values(
hass,
caplog,
setup_plex_server,
mock_websocket,
requests_mock,
library_movies_size,
library_music_size,
library_tvshows_size,
library_tvshows_size_episodes,
library_tvshows_size_seasons,
):
"""Test the library sensors."""
requests_mock.get(
"/library/sections/1/all?includeCollections=0",
text=library_movies_size,
)
requests_mock.get(
"/library/sections/2/all?includeCollections=0&type=2",
text=library_tvshows_size,
)
requests_mock.get(
"/library/sections/2/all?includeCollections=0&type=3",
text=library_tvshows_size_seasons,
)
requests_mock.get(
"/library/sections/2/all?includeCollections=0&type=4",
text=library_tvshows_size_episodes,
)
requests_mock.get(
"/library/sections/3/all?includeCollections=0",
text=library_music_size,
)
mock_plex_server = await setup_plex_server()
await wait_for_debouncer(hass)
activity_sensor = hass.states.get("sensor.plex_plex_server_1")
assert activity_sensor.state == "1"
# Ensure sensor is created as disabled
assert hass.states.get("sensor.plex_server_1_library_tv_shows") is None
# Enable sensor and validate values
entity_registry = er.async_get(hass)
entity_registry.async_update_entity(
entity_id="sensor.plex_server_1_library_tv_shows", disabled_by=None
)
await hass.async_block_till_done()
async_fire_time_changed(
hass,
dt.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1),
)
media = [MockPlexTVEpisode()]
with patch("plexapi.library.LibrarySection.recentlyAdded", return_value=media):
await hass.async_block_till_done()
library_tv_sensor = hass.states.get("sensor.plex_server_1_library_tv_shows")
assert library_tv_sensor.state == "10"
assert library_tv_sensor.attributes["seasons"] == 1
assert library_tv_sensor.attributes["shows"] == 1
assert (
library_tv_sensor.attributes["last_added_item"]
== "TV Show - S01E05 - Episode 5"
)
assert library_tv_sensor.attributes["last_added_timestamp"] == str(TIMESTAMP)
# Handle `requests` exception
requests_mock.get(
"/library/sections/2/all?includeCollections=0&type=2",
exc=requests.exceptions.ReadTimeout,
)
trigger_plex_update(
mock_websocket, msgtype="status", payload=LIBRARY_UPDATE_PAYLOAD
)
await hass.async_block_till_done()
library_tv_sensor = hass.states.get("sensor.plex_server_1_library_tv_shows")
assert library_tv_sensor.state == STATE_UNAVAILABLE
assert "Could not update library sensor" in caplog.text
# Ensure sensor updates properly when it recovers
requests_mock.get(
"/library/sections/2/all?includeCollections=0&type=2",
text=library_tvshows_size,
)
trigger_plex_update(
mock_websocket, msgtype="status", payload=LIBRARY_UPDATE_PAYLOAD
)
with patch("plexapi.library.LibrarySection.recentlyAdded", return_value=media):
await hass.async_block_till_done()
library_tv_sensor = hass.states.get("sensor.plex_server_1_library_tv_shows")
assert library_tv_sensor.state == "10"
# Handle library deletion
requests_mock.get(
"/library/sections/2/all?includeCollections=0&type=2",
status_code=HTTPStatus.NOT_FOUND,
)
trigger_plex_update(
mock_websocket, msgtype="status", payload=LIBRARY_UPDATE_PAYLOAD
)
await hass.async_block_till_done()
library_tv_sensor = hass.states.get("sensor.plex_server_1_library_tv_shows")
assert library_tv_sensor.state == STATE_UNAVAILABLE
# Test movie library sensor
entity_registry.async_update_entity(
entity_id="sensor.plex_server_1_library_tv_shows", disabled_by="user"
)
entity_registry.async_update_entity(
entity_id="sensor.plex_server_1_library_movies", disabled_by=None
)
await hass.async_block_till_done()
async_fire_time_changed(
hass,
dt.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1),
)
media = [MockPlexMovie()]
with patch("plexapi.library.LibrarySection.recentlyAdded", return_value=media):
await hass.async_block_till_done()
library_movies_sensor = hass.states.get("sensor.plex_server_1_library_movies")
assert library_movies_sensor.state == "1"
assert library_movies_sensor.attributes["last_added_item"] == "Movie 1 (2021)"
assert library_movies_sensor.attributes["last_added_timestamp"] == str(TIMESTAMP)
# Test with clip
media = [MockPlexClip()]
with patch("plexapi.library.LibrarySection.recentlyAdded", return_value=media):
async_dispatcher_send(
hass, PLEX_UPDATE_LIBRARY_SIGNAL.format(mock_plex_server.machine_identifier)
)
async_fire_time_changed(hass, dt.utcnow() + timedelta(seconds=3))
await hass.async_block_till_done()
library_movies_sensor = hass.states.get("sensor.plex_server_1_library_movies")
assert library_movies_sensor.attributes["last_added_item"] == "Clip 1"
# Test music library sensor
entity_registry.async_update_entity(
entity_id="sensor.plex_server_1_library_movies", disabled_by="user"
)
entity_registry.async_update_entity(
entity_id="sensor.plex_server_1_library_music", disabled_by=None
)
await hass.async_block_till_done()
async_fire_time_changed(
hass,
dt.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1),
)
media = [MockPlexMusic()]
with patch("plexapi.library.LibrarySection.recentlyAdded", return_value=media):
await hass.async_block_till_done()
library_music_sensor = hass.states.get("sensor.plex_server_1_library_music")
assert library_music_sensor.state == "1"
assert library_music_sensor.attributes["artists"] == 1
assert library_music_sensor.attributes["albums"] == 1
assert library_music_sensor.attributes["last_added_item"] == "Artist - Album (2021)"
assert library_music_sensor.attributes["last_added_timestamp"] == str(TIMESTAMP)
|
|
# Python
import collections
import urlparse
# Django
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework import serializers
# Tower
from awx.conf import register, register_validate
from awx.sso import fields
from awx.main.validators import validate_private_key, validate_certificate
from awx.sso.validators import * # noqa
class SocialAuthCallbackURL(object):
def __init__(self, provider):
self.provider = provider
def __call__(self):
path = reverse('social:complete', args=(self.provider,))
return urlparse.urljoin(settings.TOWER_URL_BASE, path)
SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT = _('''\
Mapping to organization admins/users from social auth accounts. This setting
controls which users are placed into which Tower organizations based on their
username and email address. Configuration details are available in the Ansible
Tower documentation.'\
''')
# FIXME: /regex/gim (flags)
SOCIAL_AUTH_ORGANIZATION_MAP_PLACEHOLDER = collections.OrderedDict([
('Default', collections.OrderedDict([
('users', True),
])),
('Test Org', collections.OrderedDict([
('admins', ['admin@example.com']),
('users', True),
])),
('Test Org 2', collections.OrderedDict([
('admins', ['admin@example.com', r'/^tower-[^@]+*?@.*$/']),
('remove_admins', True),
('users', r'/^[^@].*?@example\.com$/i'),
('remove_users', True),
])),
])
SOCIAL_AUTH_TEAM_MAP_HELP_TEXT = _('''\
Mapping of team members (users) from social auth accounts. Configuration
details are available in Tower documentation.\
''')
SOCIAL_AUTH_TEAM_MAP_PLACEHOLDER = collections.OrderedDict([
('My Team', collections.OrderedDict([
('organization', 'Test Org'),
('users', [r'/^[^@]+?@test\.example\.com$/']),
('remove', True),
])),
('Other Team', collections.OrderedDict([
('organization', 'Test Org 2'),
('users', r'/^[^@]+?@test2\.example\.com$/i'),
('remove', False),
])),
])
###############################################################################
# AUTHENTICATION BACKENDS DYNAMIC SETTING
###############################################################################
register(
'AUTHENTICATION_BACKENDS',
field_class=fields.AuthenticationBackendsField,
label=_('Authentication Backends'),
help_text=_('List of authentication backends that are enabled based on '
'license features and other authentication settings.'),
read_only=True,
depends_on=fields.AuthenticationBackendsField.get_all_required_settings(),
category=_('Authentication'),
category_slug='authentication',
)
register(
'SOCIAL_AUTH_ORGANIZATION_MAP',
field_class=fields.SocialOrganizationMapField,
allow_null=True,
default=None,
label=_('Social Auth Organization Map'),
help_text=SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT,
category=_('Authentication'),
category_slug='authentication',
placeholder=SOCIAL_AUTH_ORGANIZATION_MAP_PLACEHOLDER,
)
register(
'SOCIAL_AUTH_TEAM_MAP',
field_class=fields.SocialTeamMapField,
allow_null=True,
default=None,
label=_('Social Auth Team Map'),
help_text=SOCIAL_AUTH_TEAM_MAP_HELP_TEXT,
category=_('Authentication'),
category_slug='authentication',
placeholder=SOCIAL_AUTH_TEAM_MAP_PLACEHOLDER,
)
register(
'SOCIAL_AUTH_USER_FIELDS',
field_class=fields.StringListField,
allow_null=True,
default=None,
label=_('Social Auth User Fields'),
help_text=_('When set to an empty list `[]`, this setting prevents new user '
'accounts from being created. Only users who have previously '
'logged in using social auth or have a user account with a '
'matching email address will be able to login.'),
category=_('Authentication'),
category_slug='authentication',
placeholder=['username', 'email'],
)
###############################################################################
# LDAP AUTHENTICATION SETTINGS
###############################################################################
register(
'AUTH_LDAP_SERVER_URI',
field_class=fields.LDAPServerURIField,
allow_blank=True,
default='',
label=_('LDAP Server URI'),
help_text=_('URI to connect to LDAP server, such as "ldap://ldap.example.com:389" '
'(non-SSL) or "ldaps://ldap.example.com:636" (SSL). Multiple LDAP '
'servers may be specified by separating with spaces or commas. LDAP '
'authentication is disabled if this parameter is empty.'),
category=_('LDAP'),
category_slug='ldap',
placeholder='ldaps://ldap.example.com:636',
feature_required='ldap',
)
register(
'AUTH_LDAP_BIND_DN',
field_class=fields.CharField,
allow_blank=True,
default='',
validators=[validate_ldap_bind_dn],
label=_('LDAP Bind DN'),
help_text=_('DN (Distinguished Name) of user to bind for all search queries. This'
' is the system user account we will use to login to query LDAP for other'
' user information. Refer to the Ansible Tower documentation for example syntax.'),
category=_('LDAP'),
category_slug='ldap',
feature_required='ldap',
)
register(
'AUTH_LDAP_BIND_PASSWORD',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('LDAP Bind Password'),
help_text=_('Password used to bind LDAP user account.'),
category=_('LDAP'),
category_slug='ldap',
feature_required='ldap',
encrypted=True,
)
register(
'AUTH_LDAP_START_TLS',
field_class=fields.BooleanField,
default=False,
label=_('LDAP Start TLS'),
help_text=_('Whether to enable TLS when the LDAP connection is not using SSL.'),
category=_('LDAP'),
category_slug='ldap',
feature_required='ldap',
)
register(
'AUTH_LDAP_CONNECTION_OPTIONS',
field_class=fields.LDAPConnectionOptionsField,
default={'OPT_REFERRALS': 0, 'OPT_NETWORK_TIMEOUT': 30},
label=_('LDAP Connection Options'),
help_text=_('Additional options to set for the LDAP connection. LDAP '
'referrals are disabled by default (to prevent certain LDAP '
'queries from hanging with AD). Option names should be strings '
'(e.g. "OPT_REFERRALS"). Refer to '
'https://www.python-ldap.org/doc/html/ldap.html#options for '
'possible options and values that can be set.'),
category=_('LDAP'),
category_slug='ldap',
placeholder=collections.OrderedDict([
('OPT_REFERRALS', 0),
('OPT_NETWORK_TIMEOUT', 30)
]),
feature_required='ldap',
)
register(
'AUTH_LDAP_USER_SEARCH',
field_class=fields.LDAPSearchUnionField,
default=[],
label=_('LDAP User Search'),
help_text=_('LDAP search query to find users. Any user that matches the given '
'pattern will be able to login to Tower. The user should also be '
'mapped into a Tower organization (as defined in the '
'AUTH_LDAP_ORGANIZATION_MAP setting). If multiple search queries '
'need to be supported use of "LDAPUnion" is possible. See '
'Tower documentation for details.'),
category=_('LDAP'),
category_slug='ldap',
placeholder=(
'OU=Users,DC=example,DC=com',
'SCOPE_SUBTREE',
'(sAMAccountName=%(user)s)',
),
feature_required='ldap',
)
register(
'AUTH_LDAP_USER_DN_TEMPLATE',
field_class=fields.LDAPDNWithUserField,
allow_blank=True,
allow_null=True,
default=None,
label=_('LDAP User DN Template'),
help_text=_('Alternative to user search, if user DNs are all of the same '
'format. This approach is more efficient for user lookups than '
'searching if it is usable in your organizational environment. If '
'this setting has a value it will be used instead of '
'AUTH_LDAP_USER_SEARCH.'),
category=_('LDAP'),
category_slug='ldap',
placeholder='uid=%(user)s,OU=Users,DC=example,DC=com',
feature_required='ldap',
)
register(
'AUTH_LDAP_USER_ATTR_MAP',
field_class=fields.LDAPUserAttrMapField,
default={},
label=_('LDAP User Attribute Map'),
help_text=_('Mapping of LDAP user schema to Tower API user attributes. The default'
' setting is valid for ActiveDirectory but users with other LDAP'
' configurations may need to change the values. Refer to the Ansible'
' Tower documentation for additonal details.'),
category=_('LDAP'),
category_slug='ldap',
placeholder=collections.OrderedDict([
('first_name', 'givenName'),
('last_name', 'sn'),
('email', 'mail'),
]),
feature_required='ldap',
)
register(
'AUTH_LDAP_GROUP_SEARCH',
field_class=fields.LDAPSearchField,
default=[],
label=_('LDAP Group Search'),
help_text=_('Users are mapped to organizations based on their membership in LDAP'
' groups. This setting defines the LDAP search query to find groups. '
'Unlike the user search, group search does not support LDAPSearchUnion.'),
category=_('LDAP'),
category_slug='ldap',
placeholder=(
'DC=example,DC=com',
'SCOPE_SUBTREE',
'(objectClass=group)',
),
feature_required='ldap',
)
register(
'AUTH_LDAP_GROUP_TYPE',
field_class=fields.LDAPGroupTypeField,
label=_('LDAP Group Type'),
help_text=_('The group type may need to be changed based on the type of the '
'LDAP server. Values are listed at: '
'http://pythonhosted.org/django-auth-ldap/groups.html#types-of-groups'),
category=_('LDAP'),
category_slug='ldap',
feature_required='ldap',
default='MemberDNGroupType',
)
register(
'AUTH_LDAP_REQUIRE_GROUP',
field_class=fields.LDAPDNField,
allow_blank=True,
allow_null=True,
default=None,
label=_('LDAP Require Group'),
help_text=_('Group DN required to login. If specified, user must be a member '
'of this group to login via LDAP. If not set, everyone in LDAP '
'that matches the user search will be able to login via Tower. '
'Only one require group is supported.'),
category=_('LDAP'),
category_slug='ldap',
placeholder='CN=Tower Users,OU=Users,DC=example,DC=com',
feature_required='ldap',
)
register(
'AUTH_LDAP_DENY_GROUP',
field_class=fields.LDAPDNField,
allow_blank=True,
allow_null=True,
default=None,
label=_('LDAP Deny Group'),
help_text=_('Group DN denied from login. If specified, user will not be '
'allowed to login if a member of this group. Only one deny group '
'is supported.'),
category=_('LDAP'),
category_slug='ldap',
placeholder='CN=Disabled Users,OU=Users,DC=example,DC=com',
feature_required='ldap',
)
register(
'AUTH_LDAP_USER_FLAGS_BY_GROUP',
field_class=fields.LDAPUserFlagsField,
default={},
label=_('LDAP User Flags By Group'),
help_text=_('Retrieve users from a given group. At this time, superuser and system'
' auditors are the only groups supported. Refer to the Ansible Tower'
' documentation for more detail.'),
category=_('LDAP'),
category_slug='ldap',
placeholder=collections.OrderedDict([
('is_superuser', 'CN=Domain Admins,CN=Users,DC=example,DC=com'),
]),
feature_required='ldap',
)
register(
'AUTH_LDAP_ORGANIZATION_MAP',
field_class=fields.LDAPOrganizationMapField,
default={},
label=_('LDAP Organization Map'),
help_text=_('Mapping between organization admins/users and LDAP groups. This '
'controls which users are placed into which Tower organizations '
'relative to their LDAP group memberships. Configuration details '
'are available in the Ansible Tower documentation.'),
category=_('LDAP'),
category_slug='ldap',
placeholder=collections.OrderedDict([
('Test Org', collections.OrderedDict([
('admins', 'CN=Domain Admins,CN=Users,DC=example,DC=com'),
('users', ['CN=Domain Users,CN=Users,DC=example,DC=com']),
('remove_users', True),
('remove_admins', True),
])),
('Test Org 2', collections.OrderedDict([
('admins', 'CN=Administrators,CN=Builtin,DC=example,DC=com'),
('users', True),
('remove_users', True),
('remove_admins', True),
])),
]),
feature_required='ldap',
)
register(
'AUTH_LDAP_TEAM_MAP',
field_class=fields.LDAPTeamMapField,
default={},
label=_('LDAP Team Map'),
help_text=_('Mapping between team members (users) and LDAP groups. Configuration'
' details are available in the Ansible Tower documentation.'),
category=_('LDAP'),
category_slug='ldap',
placeholder=collections.OrderedDict([
('My Team', collections.OrderedDict([
('organization', 'Test Org'),
('users', ['CN=Domain Users,CN=Users,DC=example,DC=com']),
('remove', True),
])),
('Other Team', collections.OrderedDict([
('organization', 'Test Org 2'),
('users', 'CN=Other Users,CN=Users,DC=example,DC=com'),
('remove', False),
])),
]),
feature_required='ldap',
)
###############################################################################
# RADIUS AUTHENTICATION SETTINGS
###############################################################################
register(
'RADIUS_SERVER',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('RADIUS Server'),
help_text=_('Hostname/IP of RADIUS server. RADIUS authentication is '
'disabled if this setting is empty.'),
category=_('RADIUS'),
category_slug='radius',
placeholder='radius.example.com',
feature_required='enterprise_auth',
)
register(
'RADIUS_PORT',
field_class=fields.IntegerField,
min_value=1,
max_value=65535,
default=1812,
label=_('RADIUS Port'),
help_text=_('Port of RADIUS server.'),
category=_('RADIUS'),
category_slug='radius',
feature_required='enterprise_auth',
)
register(
'RADIUS_SECRET',
field_class=fields.RADIUSSecretField,
allow_blank=True,
default='',
label=_('RADIUS Secret'),
help_text=_('Shared secret for authenticating to RADIUS server.'),
category=_('RADIUS'),
category_slug='radius',
feature_required='enterprise_auth',
encrypted=True,
)
###############################################################################
# TACACSPLUS AUTHENTICATION SETTINGS
###############################################################################
register(
'TACACSPLUS_HOST',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('TACACS+ Server'),
help_text=_('Hostname of TACACS+ server.'),
category=_('TACACS+'),
category_slug='tacacsplus',
feature_required='enterprise_auth',
)
register(
'TACACSPLUS_PORT',
field_class=fields.IntegerField,
min_value=1,
max_value=65535,
default=49,
label=_('TACACS+ Port'),
help_text=_('Port number of TACACS+ server.'),
category=_('TACACS+'),
category_slug='tacacsplus',
feature_required='enterprise_auth',
)
register(
'TACACSPLUS_SECRET',
field_class=fields.CharField,
allow_blank=True,
default='',
validators=[validate_tacacsplus_disallow_nonascii],
label=_('TACACS+ Secret'),
help_text=_('Shared secret for authenticating to TACACS+ server.'),
category=_('TACACS+'),
category_slug='tacacsplus',
feature_required='enterprise_auth',
encrypted=True,
)
register(
'TACACSPLUS_SESSION_TIMEOUT',
field_class=fields.IntegerField,
min_value=0,
default=5,
label=_('TACACS+ Auth Session Timeout'),
help_text=_('TACACS+ session timeout value in seconds, 0 disables timeout.'),
category=_('TACACS+'),
category_slug='tacacsplus',
feature_required='enterprise_auth',
)
register(
'TACACSPLUS_AUTH_PROTOCOL',
field_class=fields.ChoiceField,
choices=['ascii', 'pap'],
default='ascii',
label=_('TACACS+ Authentication Protocol'),
help_text=_('Choose the authentication protocol used by TACACS+ client.'),
category=_('TACACS+'),
category_slug='tacacsplus',
feature_required='enterprise_auth',
)
###############################################################################
# GOOGLE OAUTH2 AUTHENTICATION SETTINGS
###############################################################################
register(
'SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL',
field_class=fields.CharField,
read_only=True,
default=SocialAuthCallbackURL('google-oauth2'),
label=_('Google OAuth2 Callback URL'),
help_text=_('Provide this URL as the callback URL for your application as part '
'of your registration process. Refer to the Ansible Tower '
'documentation for more detail.'),
category=_('Google OAuth2'),
category_slug='google-oauth2',
depends_on=['TOWER_URL_BASE'],
)
register(
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('Google OAuth2 Key'),
help_text=_('The OAuth2 key from your web application.'),
category=_('Google OAuth2'),
category_slug='google-oauth2',
placeholder='528620852399-gm2dt4hrl2tsj67fqamk09k1e0ad6gd8.apps.googleusercontent.com',
)
register(
'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('Google OAuth2 Secret'),
help_text=_('The OAuth2 secret from your web application.'),
category=_('Google OAuth2'),
category_slug='google-oauth2',
placeholder='q2fMVCmEregbg-drvebPp8OW',
encrypted=True,
)
register(
'SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS',
field_class=fields.StringListField,
default=[],
label=_('Google OAuth2 Whitelisted Domains'),
help_text=_('Update this setting to restrict the domains who are allowed to '
'login using Google OAuth2.'),
category=_('Google OAuth2'),
category_slug='google-oauth2',
placeholder=['example.com'],
)
register(
'SOCIAL_AUTH_GOOGLE_OAUTH2_AUTH_EXTRA_ARGUMENTS',
field_class=fields.DictField,
default={},
label=_('Google OAuth2 Extra Arguments'),
help_text=_('Extra arguments for Google OAuth2 login. You can restrict it to'
' only allow a single domain to authenticate, even if the user is'
' logged in with multple Google accounts. Refer to the Ansible Tower'
' documentation for more detail.'),
category=_('Google OAuth2'),
category_slug='google-oauth2',
placeholder={'hd': 'example.com'},
)
register(
'SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP',
field_class=fields.SocialOrganizationMapField,
allow_null=True,
default=None,
label=_('Google OAuth2 Organization Map'),
help_text=SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT,
category=_('Google OAuth2'),
category_slug='google-oauth2',
placeholder=SOCIAL_AUTH_ORGANIZATION_MAP_PLACEHOLDER,
)
register(
'SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP',
field_class=fields.SocialTeamMapField,
allow_null=True,
default=None,
label=_('Google OAuth2 Team Map'),
help_text=SOCIAL_AUTH_TEAM_MAP_HELP_TEXT,
category=_('Google OAuth2'),
category_slug='google-oauth2',
placeholder=SOCIAL_AUTH_TEAM_MAP_PLACEHOLDER,
)
###############################################################################
# GITHUB OAUTH2 AUTHENTICATION SETTINGS
###############################################################################
register(
'SOCIAL_AUTH_GITHUB_CALLBACK_URL',
field_class=fields.CharField,
read_only=True,
default=SocialAuthCallbackURL('github'),
label=_('GitHub OAuth2 Callback URL'),
help_text=_('Provide this URL as the callback URL for your application as part '
'of your registration process. Refer to the Ansible Tower '
'documentation for more detail.'),
category=_('GitHub OAuth2'),
category_slug='github',
depends_on=['TOWER_URL_BASE'],
)
register(
'SOCIAL_AUTH_GITHUB_KEY',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('GitHub OAuth2 Key'),
help_text=_('The OAuth2 key (Client ID) from your GitHub developer application.'),
category=_('GitHub OAuth2'),
category_slug='github',
)
register(
'SOCIAL_AUTH_GITHUB_SECRET',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('GitHub OAuth2 Secret'),
help_text=_('The OAuth2 secret (Client Secret) from your GitHub developer application.'),
category=_('GitHub OAuth2'),
category_slug='github',
encrypted=True,
)
register(
'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP',
field_class=fields.SocialOrganizationMapField,
allow_null=True,
default=None,
label=_('GitHub OAuth2 Organization Map'),
help_text=SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT,
category=_('GitHub OAuth2'),
category_slug='github',
placeholder=SOCIAL_AUTH_ORGANIZATION_MAP_PLACEHOLDER,
)
register(
'SOCIAL_AUTH_GITHUB_TEAM_MAP',
field_class=fields.SocialTeamMapField,
allow_null=True,
default=None,
label=_('GitHub OAuth2 Team Map'),
help_text=SOCIAL_AUTH_TEAM_MAP_HELP_TEXT,
category=_('GitHub OAuth2'),
category_slug='github',
placeholder=SOCIAL_AUTH_TEAM_MAP_PLACEHOLDER,
)
###############################################################################
# GITHUB ORG OAUTH2 AUTHENTICATION SETTINGS
###############################################################################
register(
'SOCIAL_AUTH_GITHUB_ORG_CALLBACK_URL',
field_class=fields.CharField,
read_only=True,
default=SocialAuthCallbackURL('github-org'),
label=_('GitHub Organization OAuth2 Callback URL'),
help_text=_('Provide this URL as the callback URL for your application as part '
'of your registration process. Refer to the Ansible Tower '
'documentation for more detail.'),
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
depends_on=['TOWER_URL_BASE'],
)
register(
'SOCIAL_AUTH_GITHUB_ORG_KEY',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('GitHub Organization OAuth2 Key'),
help_text=_('The OAuth2 key (Client ID) from your GitHub organization application.'),
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
)
register(
'SOCIAL_AUTH_GITHUB_ORG_SECRET',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('GitHub Organization OAuth2 Secret'),
help_text=_('The OAuth2 secret (Client Secret) from your GitHub organization application.'),
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
encrypted=True,
)
register(
'SOCIAL_AUTH_GITHUB_ORG_NAME',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('GitHub Organization Name'),
help_text=_('The name of your GitHub organization, as used in your '
'organization\'s URL: https://github.com/<yourorg>/.'),
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
)
register(
'SOCIAL_AUTH_GITHUB_ORG_ORGANIZATION_MAP',
field_class=fields.SocialOrganizationMapField,
allow_null=True,
default=None,
label=_('GitHub Organization OAuth2 Organization Map'),
help_text=SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT,
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
placeholder=SOCIAL_AUTH_ORGANIZATION_MAP_PLACEHOLDER,
)
register(
'SOCIAL_AUTH_GITHUB_ORG_TEAM_MAP',
field_class=fields.SocialTeamMapField,
allow_null=True,
default=None,
label=_('GitHub Organization OAuth2 Team Map'),
help_text=SOCIAL_AUTH_TEAM_MAP_HELP_TEXT,
category=_('GitHub Organization OAuth2'),
category_slug='github-org',
placeholder=SOCIAL_AUTH_TEAM_MAP_PLACEHOLDER,
)
###############################################################################
# GITHUB TEAM OAUTH2 AUTHENTICATION SETTINGS
###############################################################################
register(
'SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL',
field_class=fields.CharField,
read_only=True,
default=SocialAuthCallbackURL('github-team'),
label=_('GitHub Team OAuth2 Callback URL'),
help_text=_('Create an organization-owned application at '
'https://github.com/organizations/<yourorg>/settings/applications '
'and obtain an OAuth2 key (Client ID) and secret (Client Secret). '
'Provide this URL as the callback URL for your application.'),
category=_('GitHub Team OAuth2'),
category_slug='github-team',
depends_on=['TOWER_URL_BASE'],
)
register(
'SOCIAL_AUTH_GITHUB_TEAM_KEY',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('GitHub Team OAuth2 Key'),
help_text=_('The OAuth2 key (Client ID) from your GitHub organization application.'),
category=_('GitHub Team OAuth2'),
category_slug='github-team',
)
register(
'SOCIAL_AUTH_GITHUB_TEAM_SECRET',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('GitHub Team OAuth2 Secret'),
help_text=_('The OAuth2 secret (Client Secret) from your GitHub organization application.'),
category=_('GitHub Team OAuth2'),
category_slug='github-team',
encrypted=True,
)
register(
'SOCIAL_AUTH_GITHUB_TEAM_ID',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('GitHub Team ID'),
help_text=_('Find the numeric team ID using the Github API: '
'http://fabian-kostadinov.github.io/2015/01/16/how-to-find-a-github-team-id/.'),
category=_('GitHub Team OAuth2'),
category_slug='github-team',
)
register(
'SOCIAL_AUTH_GITHUB_TEAM_ORGANIZATION_MAP',
field_class=fields.SocialOrganizationMapField,
allow_null=True,
default=None,
label=_('GitHub Team OAuth2 Organization Map'),
help_text=SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT,
category=_('GitHub Team OAuth2'),
category_slug='github-team',
placeholder=SOCIAL_AUTH_ORGANIZATION_MAP_PLACEHOLDER,
)
register(
'SOCIAL_AUTH_GITHUB_TEAM_TEAM_MAP',
field_class=fields.SocialTeamMapField,
allow_null=True,
default=None,
label=_('GitHub Team OAuth2 Team Map'),
help_text=SOCIAL_AUTH_TEAM_MAP_HELP_TEXT,
category=_('GitHub Team OAuth2'),
category_slug='github-team',
placeholder=SOCIAL_AUTH_TEAM_MAP_PLACEHOLDER,
)
###############################################################################
# MICROSOFT AZURE ACTIVE DIRECTORY SETTINGS
###############################################################################
register(
'SOCIAL_AUTH_AZUREAD_OAUTH2_CALLBACK_URL',
field_class=fields.CharField,
read_only=True,
default=SocialAuthCallbackURL('azuread-oauth2'),
label=_('Azure AD OAuth2 Callback URL'),
help_text=_('Provide this URL as the callback URL for your application as part'
' of your registration process. Refer to the Ansible Tower'
' documentation for more detail. '),
category=_('Azure AD OAuth2'),
category_slug='azuread-oauth2',
depends_on=['TOWER_URL_BASE'],
)
register(
'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('Azure AD OAuth2 Key'),
help_text=_('The OAuth2 key (Client ID) from your Azure AD application.'),
category=_('Azure AD OAuth2'),
category_slug='azuread-oauth2',
)
register(
'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET',
field_class=fields.CharField,
allow_blank=True,
default='',
label=_('Azure AD OAuth2 Secret'),
help_text=_('The OAuth2 secret (Client Secret) from your Azure AD application.'),
category=_('Azure AD OAuth2'),
category_slug='azuread-oauth2',
encrypted=True,
)
register(
'SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP',
field_class=fields.SocialOrganizationMapField,
allow_null=True,
default=None,
label=_('Azure AD OAuth2 Organization Map'),
help_text=SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT,
category=_('Azure AD OAuth2'),
category_slug='azuread-oauth2',
placeholder=SOCIAL_AUTH_ORGANIZATION_MAP_PLACEHOLDER,
)
register(
'SOCIAL_AUTH_AZUREAD_OAUTH2_TEAM_MAP',
field_class=fields.SocialTeamMapField,
allow_null=True,
default=None,
label=_('Azure AD OAuth2 Team Map'),
help_text=SOCIAL_AUTH_TEAM_MAP_HELP_TEXT,
category=_('Azure AD OAuth2'),
category_slug='azuread-oauth2',
placeholder=SOCIAL_AUTH_TEAM_MAP_PLACEHOLDER,
)
###############################################################################
# SAML AUTHENTICATION SETTINGS
###############################################################################
def get_saml_metadata_url():
return urlparse.urljoin(settings.TOWER_URL_BASE, reverse('sso:saml_metadata'))
def get_saml_entity_id():
return settings.TOWER_URL_BASE
register(
'SOCIAL_AUTH_SAML_CALLBACK_URL',
field_class=fields.CharField,
read_only=True,
default=SocialAuthCallbackURL('saml'),
label=_('SAML Assertion Consumer Service (ACS) URL'),
help_text=_('Register Tower as a service provider (SP) with each identity '
'provider (IdP) you have configured. Provide your SP Entity ID '
'and this ACS URL for your application.'),
category=_('SAML'),
category_slug='saml',
depends_on=['TOWER_URL_BASE'],
feature_required='enterprise_auth',
)
register(
'SOCIAL_AUTH_SAML_METADATA_URL',
field_class=fields.CharField,
read_only=True,
default=get_saml_metadata_url,
label=_('SAML Service Provider Metadata URL'),
help_text=_('If your identity provider (IdP) allows uploading an XML '
'metadata file, you can download one from this URL.'),
category=_('SAML'),
category_slug='saml',
feature_required='enterprise_auth',
)
register(
'SOCIAL_AUTH_SAML_SP_ENTITY_ID',
field_class=fields.CharField,
allow_blank=True,
default=get_saml_entity_id,
label=_('SAML Service Provider Entity ID'),
help_text=_('The application-defined unique identifier used as the '
'audience of the SAML service provider (SP) configuration. '
'This is usually the URL for Tower.'),
category=_('SAML'),
category_slug='saml',
feature_required='enterprise_auth',
depends_on=['TOWER_URL_BASE'],
)
register(
'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT',
field_class=fields.CharField,
allow_blank=True,
default='',
validators=[validate_certificate],
label=_('SAML Service Provider Public Certificate'),
help_text=_('Create a keypair for Tower to use as a service provider (SP) '
'and include the certificate content here.'),
category=_('SAML'),
category_slug='saml',
feature_required='enterprise_auth',
)
register(
'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY',
field_class=fields.CharField,
allow_blank=True,
default='',
validators=[validate_private_key],
label=_('SAML Service Provider Private Key'),
help_text=_('Create a keypair for Tower to use as a service provider (SP) '
'and include the private key content here.'),
category=_('SAML'),
category_slug='saml',
feature_required='enterprise_auth',
encrypted=True,
)
register(
'SOCIAL_AUTH_SAML_ORG_INFO',
field_class=fields.SAMLOrgInfoField,
required=True,
label=_('SAML Service Provider Organization Info'),
help_text=_('Provide the URL, display name, and the name of your app. Refer to'
' the Ansible Tower documentation for example syntax.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict([
('en-US', collections.OrderedDict([
('name', 'example'),
('displayname', 'Example'),
('url', 'http://www.example.com'),
])),
]),
feature_required='enterprise_auth',
)
register(
'SOCIAL_AUTH_SAML_TECHNICAL_CONTACT',
field_class=fields.SAMLContactField,
allow_blank=True,
required=True,
label=_('SAML Service Provider Technical Contact'),
help_text=_('Provide the name and email address of the technical contact for'
' your service provider. Refer to the Ansible Tower documentation'
' for example syntax.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict([
('givenName', 'Technical Contact'),
('emailAddress', 'techsup@example.com'),
]),
feature_required='enterprise_auth',
)
register(
'SOCIAL_AUTH_SAML_SUPPORT_CONTACT',
field_class=fields.SAMLContactField,
allow_blank=True,
required=True,
label=_('SAML Service Provider Support Contact'),
help_text=_('Provide the name and email address of the support contact for your'
' service provider. Refer to the Ansible Tower documentation for'
' example syntax.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict([
('givenName', 'Support Contact'),
('emailAddress', 'support@example.com'),
]),
feature_required='enterprise_auth',
)
register(
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
field_class=fields.SAMLEnabledIdPsField,
default={},
label=_('SAML Enabled Identity Providers'),
help_text=_('Configure the Entity ID, SSO URL and certificate for each identity'
' provider (IdP) in use. Multiple SAML IdPs are supported. Some IdPs'
' may provide user data using attribute names that differ from the'
' default OIDs. Attribute names may be overridden for each IdP. Refer'
' to the Ansible documentation for additional details and syntax.'),
category=_('SAML'),
category_slug='saml',
placeholder=collections.OrderedDict([
('Okta', collections.OrderedDict([
('entity_id', 'http://www.okta.com/HHniyLkaxk9e76wD0Thh'),
('url', 'https://dev-123456.oktapreview.com/app/ansibletower/HHniyLkaxk9e76wD0Thh/sso/saml'),
('x509cert', 'MIIDpDCCAoygAwIBAgIGAVVZ4rPzMA0GCSqGSIb3...'),
('attr_user_permanent_id', 'username'),
('attr_first_name', 'first_name'),
('attr_last_name', 'last_name'),
('attr_username', 'username'),
('attr_email', 'email'),
])),
('OneLogin', collections.OrderedDict([
('entity_id', 'https://app.onelogin.com/saml/metadata/123456'),
('url', 'https://example.onelogin.com/trust/saml2/http-post/sso/123456'),
('x509cert', 'MIIEJjCCAw6gAwIBAgIUfuSD54OPSBhndDHh3gZo...'),
('attr_user_permanent_id', 'name_id'),
('attr_first_name', 'User.FirstName'),
('attr_last_name', 'User.LastName'),
('attr_username', 'User.email'),
('attr_email', 'User.email'),
])),
]),
feature_required='enterprise_auth',
)
register(
'SOCIAL_AUTH_SAML_ORGANIZATION_MAP',
field_class=fields.SocialOrganizationMapField,
allow_null=True,
default=None,
label=_('SAML Organization Map'),
help_text=SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT,
category=_('SAML'),
category_slug='saml',
placeholder=SOCIAL_AUTH_ORGANIZATION_MAP_PLACEHOLDER,
feature_required='enterprise_auth',
)
register(
'SOCIAL_AUTH_SAML_TEAM_MAP',
field_class=fields.SocialTeamMapField,
allow_null=True,
default=None,
label=_('SAML Team Map'),
help_text=SOCIAL_AUTH_TEAM_MAP_HELP_TEXT,
category=_('SAML'),
category_slug='saml',
placeholder=SOCIAL_AUTH_TEAM_MAP_PLACEHOLDER,
feature_required='enterprise_auth',
)
def tacacs_validate(serializer, attrs):
if not serializer.instance or \
not hasattr(serializer.instance, 'TACACSPLUS_HOST') or \
not hasattr(serializer.instance, 'TACACSPLUS_SECRET'):
return attrs
errors = []
host = serializer.instance.TACACSPLUS_HOST
if 'TACACSPLUS_HOST' in attrs:
host = attrs['TACACSPLUS_HOST']
secret = serializer.instance.TACACSPLUS_SECRET
if 'TACACSPLUS_SECRET' in attrs:
secret = attrs['TACACSPLUS_SECRET']
if host and not secret:
errors.append('TACACSPLUS_SECRET is required when TACACSPLUS_HOST is provided.')
if errors:
raise serializers.ValidationError(_('\n'.join(errors)))
return attrs
register_validate('tacacsplus', tacacs_validate)
|
|
import glcommon
import glinit
import visualization
from ..math import vectorops,so3,se3
from ..robotsim import WidgetSet,RobotPoser,ObjectPoser,TransformPoser,PointPoser,WorldModel,RobotModelLink,RigidObjectModel,IKObjective
from ..model.subrobot import SubRobotModel
from ..model import collide
from OpenGL.GL import *
class VisualEditorBase(glcommon.GLWidgetPlugin):
"""A base class for editing resources."""
def __init__(self,name,value,description,world):
glcommon.GLWidgetPlugin.__init__(self)
self.name = name
self.value = value
self.description = description
self.world = world
def instructions(self):
return None
def display(self):
if self.world: self.world.drawGL()
self.klamptwidgetmaster.drawGL(self.viewport())
return True
def addDialogItems(self,parent,ui='qt'):
return
def display_screen(self):
glDisable(GL_LIGHTING)
glColor3f(0,0,0)
h = 30
"""
if self.instructions() != None:
glRasterPos(20,h)
gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_12,"Instructions: "+self.instructions())
h += 20
if self.description != None:
glRasterPos(20,h)
gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_12,"Description: "+self.description)
h += 20
glRasterPos(20,h)
gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_12,"Press 'x' to exit without saving, 'q' to save+exit")
"""
return True
class ConfigEditor(VisualEditorBase):
def __init__(self,name,value,description,world,robot=None):
VisualEditorBase.__init__(self,name,value,description,world)
if robot is None:
robot = world.robot(0)
robot.setConfig(value)
self.robot = robot
if isinstance(robot,SubRobotModel):
self.robotposer = RobotPoser(robot._robot)
self.robotposer.setActiveDofs(robot._links)
else:
self.robotposer = RobotPoser(robot)
self.addWidget(self.robotposer)
def instructions(self):
return 'Right-click and drag on the robot links to pose the robot'
def mousefunc(self,button,state,x,y):
if self.robotposer.hasFocus():
self.value = self.robotposer.get()
return VisualEditorBase.mousefunc(self,button,state,x,y)
def display(self):
#Override display handler since the widget draws the robot
#the next few lines draw everything but the robot
if self.world:
for i in xrange(self.world.numTerrains()):
self.world.terrain(i).drawGL()
for i in xrange(self.world.numRigidObjects()):
self.world.rigidObject(i).drawGL()
for i in xrange(self.world.numRobots()):
if i != self.robot.index:
self.world.robot(i).drawGL()
#this line will draw the robot
self.klamptwidgetmaster.drawGL(self.viewport())
return False
class ConfigsEditor(VisualEditorBase):
def __init__(self,name,value,description,world,robot=None):
VisualEditorBase.__init__(self,name,value,description,world)
if robot is None:
robot = world.robot(0)
if len(value) > 0:
robot.setConfig(value[0])
self.robot = robot
self.editingIndex = len(value)-1
self.robotposer = RobotPoser(robot)
self.addWidget(self.robotposer)
def instructions(self):
return 'Right-click and drag on the robot links to pose the robot.\nKeyboard i: insert, d: delete, < to select previous, > to select next'
def addDialogItems(self,parent,ui='qt'):
self.indexSpinBox = QSpinBox()
self.indexSpinBox.setRange(0,len(self.value)-1)
layout = QHBoxLayout(parent)
label = QLabel("Index")
label.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
layout.addWidget(label)
layout.addWidget(self.indexSpinBox)
self.insertButton = QPushButton("Insert")
self.deleteButton = QPushButton("Delete")
layout.addWidget(self.insertButton)
layout.addWidget(self.deleteButton)
self.insertButton.clicked.connect(self.insert)
self.deleteButton.clicked.connect(self.delete)
self.indexSpinBox.valueChanged.connect(self.indexChanged)
def insert(self):
if self.editingIndex < 0:
self.value.append(self.robotposer.get())
self.editingIndex = len(self.value)-1
else:
self.value.insert(self.editingIndex+1,self.robotposer.get())
self.editingIndex += 1
if hasattr(self,'indexSpinBox'):
self.indexSpinBox.setRange(0,len(self.value)-1)
self.indexSpinBox.setValue(self.editingIndex)
self.refresh()
def delete(self):
if self.editingIndex >= 0:
del self.value[self.editingIndex]
if self.editingIndex >= len(self.value):
self.editingIndex = len(self.value)-1
if self.editingIndex >= 0:
self.robotposer.set(self.value[self.editingIndex])
print "Now has",len(self.value),"configs, editing index",self.editingIndex
if hasattr(self,'indexSpinBox'):
self.indexSpinBox.setRange(0,len(self.value)-1)
self.indexSpinBox.setValue(self.editingIndex)
self.refresh()
def indexChanged(self,index):
self.editingIndex = index
if index >= 0 and index < len(self.value):
self.robotposer.set(self.value[self.editingIndex])
self.refresh()
def mousefunc(self,button,state,x,y):
if self.editingIndex >= 0 and self.robotposer.hasFocus():
#mouse release
self.value[self.editingIndex] = self.robotposer.get()
return VisualEditorBase.mousefunc(self,button,state,x,y)
def keyboardfunc(self,c,x,y):
if c=='i':
self.insert()
return True
elif c=='d':
self.delete()
return True
elif c==',' or c=='<':
self.editingIndex -= 1
if self.editingIndex < 0:
self.editingIndex = min(len(self.durations)-1,0)
self.indexEditBox.setValue(self.editingIndex)
self.indexChanged(self.editingIndex)
return True
elif c=='.' or c=='>':
self.editingIndex += 1
self.editingIndex = min(len(self.durations)-1,self.editingIndex)
self.indexEditBox.setValue(self.editingIndex)
self.indexChanged(self.editingIndex)
return True
def display(self):
#Override display handler since the widget draws the robot
#the next few lines draw everything but the robot
if self.world != None:
for i in xrange(self.world.numTerrains()):
self.world.terrain(i).drawGL()
for i in xrange(self.world.numRigidObjects()):
self.world.rigidObject(i).drawGL()
for i in xrange(self.world.numRobots()):
if i != self.robot.index:
self.world.robot(i).drawGL()
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA)
#draw most opaque first
order = []
if self.editingIndex < 0:
order = range(len(self.value))
else:
order = [self.editingIndex]
n = max(self.editingIndex,len(self.value)-self.editingIndex)
for i in range(1,n+1):
if self.editingIndex + i < len(self.value): order.append(self.editingIndex +i)
if self.editingIndex - i >= 0: order.append(self.editingIndex -i)
for i in order:
#draw transparent
opacity = pow(0.5,abs(i-self.editingIndex))
for j in xrange(self.robot.numLinks()):
self.robot.link(j).appearance().setColor(0.5,0.5,0.5,opacity)
if i == self.editingIndex:
#this line will draw the robot at the current editing config
self.klamptwidgetmaster.drawGL(self.viewport())
else:
self.robot.setConfig(self.value[i])
self.robot.drawGL()
for j in xrange(self.robot.numLinks()):
self.robot.link(j).appearance().setColor(0.5,0.5,0.5,1)
glDisable(GL_BLEND)
class TrajectoryEditor(VisualEditorBase):
def __init__(self,name,value,description,world,robot=None):
VisualEditorBase.__init__(self,name,value,description,world)
if robot is None:
robot = world.robot(0)
if len(value.milestones) > 0:
robot.setConfig(value.milestones[0])
self.robot = robot
self.editingIndex = len(value.milestones)-1
self.durations = []
if len(value.times) > 0:
self.durations.append(value.times[0])
for i in xrange(len(value.times)-1):
self.durations.append(value.times[i+1]-value.times[i])
self.animTrajectory = None
self.animTrajectoryTime = 0.0
self.animating = False
self.animSelectorValue = 0
self.lastAnimTrajectoryTime = None
self.robotposer = RobotPoser(robot)
self.addWidget(self.robotposer)
self.updateAnimTrajectory()
def instructions(self):
return 'Right-click and drag on the robot links to pose the robot.\nKeyboard i: insert, d: delete, < to select previous, > to select next'
def addDialogItems(self,parent,ui='qt'):
vlayout = QVBoxLayout(parent)
#adding and editing keyframes
self.indexSpinBox = QSpinBox()
self.indexSpinBox.setRange(0,len(self.durations)-1)
self.durationSpinBox = QDoubleSpinBox()
self.durationSpinBox.setRange(0,10.0)
self.durationSpinBox.setSingleStep(0.01)
self.insertButton = QPushButton("Insert")
self.deleteButton = QPushButton("Delete")
self.indexSpinBox.valueChanged.connect(self.indexChanged)
self.durationSpinBox.valueChanged.connect(self.durationChanged)
self.insertButton.clicked.connect(self.insert)
self.deleteButton.clicked.connect(self.delete)
layout = QHBoxLayout()
vlayout.addLayout(layout)
label = QLabel("Index")
label.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
layout.addWidget(label)
layout.addWidget(self.indexSpinBox)
label = QLabel("Duration")
label.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
layout.addWidget(label)
layout.addWidget(self.durationSpinBox)
layout.addWidget(self.insertButton)
layout.addWidget(self.deleteButton)
#playback
self.timeDriver = QSlider()
self.timeDriver.setOrientation(Qt.Horizontal)
self.timeDriver.setRange(0,1000)
self.timeDriver.valueChanged.connect(self.timeDriverChanged)
self.playButton = QPushButton("Play")
self.playButton.setCheckable(True)
self.playButton.toggled.connect(self.togglePlay)
layout = QHBoxLayout()
vlayout.addLayout(layout)
self.animSelector = QComboBox()
self.animSelector.addItem("Linear")
self.animSelector.addItem("Spline")
#self.animSelector.addItem("Retimed")
#self.animSelector.addItem("Retimed-spline")
self.animSelector.currentIndexChanged.connect(self.animSelectorChanged)
label = QLabel("Time")
label.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
layout.addWidget(label)
layout.addWidget(self.timeDriver)
layout.addWidget(self.playButton)
label = QLabel("Interp.")
label.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
layout.addWidget(label)
layout.addWidget(self.animSelector)
def insert(self):
if self.editingIndex < 0:
self.value.times.append(0.0)
self.value.milestones.append(self.robotposer.get())
self.editingIndex = len(self.durations)-1
else:
newdur = 1.0
if self.editingIndex+1 == len(self.durations):
#extrapolate from previous
if self.editingIndex > 0:
newdur = self.value.times[self.editingIndex] - self.value.times[self.editingIndex-1]
elif self.editingIndex == 0:
#shift everything else
if len(self.durations) > 1:
newdur = self.value.times[1]-self.value.times[0]
else:
#subdivide time between milesones
newdur = self.value.times[self.editingIndex]-self.value.times[self.editingIndex-1]
self.durations.insert(self.editingIndex+1,newdur)
self.value.milestones.insert(self.editingIndex+1,self.robotposer.get())
self.onDurationsChanged()
self.editingIndex += 1
if hasattr(self,'indexSpinBox'):
self.indexSpinBox.setRange(0,len(self.durations)-1)
self.indexSpinBox.setValue(self.editingIndex)
self.refresh()
def delete(self):
if self.editingIndex >= 0:
del self.durations[self.editingIndex]
del self.value.milestones[self.editingIndex]
if self.editingIndex >= len(self.durations):
self.editingIndex = len(self.durations)-1
if self.editingIndex >= 0:
self.robotposer.set(self.value.milestones[self.editingIndex])
self.onDurationsChanged()
print "Now has",len(self.durations),"configs, editing index",self.editingIndex
if hasattr(self,'indexSpinBox'):
self.indexSpinBox.setRange(0,len(self.durations)-1)
self.indexSpinBox.setValue(self.editingIndex)
if self.editingIndex >= 0:
self.durationSpinBox.setValue(self.durations[self.editingIndex])
self.refresh()
def indexChanged(self,index):
self.editingIndex = index
if index >= 0 and index < len(self.durations):
self.durationSpinBox.setValue(self.durations[self.editingIndex])
self.robotposer.set(self.value.milestones[self.editingIndex])
if not self.animating:
self.animTrajectoryTime = self.value.times[index]
self.timeDriver.setValue(int(1000*(self.animTrajectoryTime - self.value.times[0])/self.value.duration()))
self.refresh()
def durationChanged(self,value):
if self.editingIndex >= 0 and self.editingIndex < len(self.durations):
self.durations[self.editingIndex] = max(value,0.0)
self.onDurationsChanged()
self.refresh()
def timeDriverChanged(self,value):
u = value * 0.001
self.animTrajectoryTime = self.animTrajectory.times[0] + u*self.animTrajectory.duration()
self.refresh()
def animSelectorChanged(self,value):
self.animSelectorValue = value
self.updateAnimTrajectory()
self.refresh()
def togglePlay(self,value):
self.animating = value
self.refresh()
if value:
self.idlesleep(0)
else:
self.idlesleep(float('inf'))
def onDurationsChanged(self):
"""Update the trajectory times"""
if len(self.durations)==0:
self.value.times = []
else:
self.value.times = [self.durations[0]]
for i in range(1,len(self.durations)):
self.value.times.append(self.value.times[-1] + self.durations[i])
self.updateAnimTrajectory()
if not self.animating:
if hasattr(self,'timeDriver'):
self.timeDriver.setValue(int(1000*(self.animTrajectoryTime - self.value.times[0])/self.value.duration()))
def updateAnimTrajectory(self):
from ..model import trajectory
if self.animSelectorValue == 1:
traj = trajectory.HermiteTrajectory()
traj.makeSpline(self.value)
self.animTrajectory = traj.configTrajectory()
else:
#TODO: other selections
self.animTrajectory = self.value
def mousefunc(self,button,state,x,y):
if self.editingIndex >= 0 and self.robotposer.hasFocus():
self.value.milestones[self.editingIndex] = self.robotposer.get()
return VisualEditorBase.mousefunc(self,button,state,x,y)
def keyboardfunc(self,c,x,y):
if c=='i':
self.insert()
return True
elif c=='d':
self.delete()
return True
elif c==',' or c=='<':
self.editingIndex -= 1
if self.editingIndex < 0:
self.editingIndex = min(len(self.durations)-1,0)
if hasattr(self,'indexEditBox'):
self.indexEditBox.setValue(self.editingIndex)
self.indexChanged(self.editingIndex)
return True
elif c=='.' or c=='>':
self.editingIndex += 1
self.editingIndex = min(len(self.durations)-1,self.editingIndex)
if hasattr(self,'indexEditBox'):
self.indexEditBox.setValue(self.editingIndex)
self.indexChanged(self.editingIndex)
return True
def display(self):
#Override display handler since the widget draws the robot
#the next few lines draw everything but the robot
if self.world != None:
for i in xrange(self.world.numTerrains()):
self.world.terrain(i).drawGL()
for i in xrange(self.world.numRigidObjects()):
self.world.rigidObject(i).drawGL()
for i in xrange(self.world.numRobots()):
if i != self.robot.index:
self.world.robot(i).drawGL()
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA)
#draw most opaque first
order = []
if self.editingIndex < 0:
order = range(len(self.durations))
else:
order = [self.editingIndex]
n = max(self.editingIndex,len(self.durations)-self.editingIndex)
for i in range(1,n+1):
if self.editingIndex + i < len(self.durations): order.append(self.editingIndex +i)
if self.editingIndex - i >= 0: order.append(self.editingIndex -i)
for i in order:
#draw transparent
opacity = pow(0.5,abs(i-self.editingIndex))
for j in xrange(self.robot.numLinks()):
self.robot.link(j).appearance().setColor(0.5,0.5,0.5,opacity)
if i == self.editingIndex:
#this line will draw the robot at the current editing config
self.klamptwidgetmaster.drawGL(self.viewport())
else:
self.robot.setConfig(self.value.milestones[i])
self.robot.drawGL()
for j in xrange(self.robot.numLinks()):
self.robot.link(j).appearance().setColor(0.5,0.5,0.5,1)
#draw animation, if available
if self.animTrajectoryTime is not None:
for j in xrange(self.robot.numLinks()):
self.robot.link(j).appearance().setColor(1.0,1.0,0,0.5)
q = self.animTrajectory.eval(self.animTrajectoryTime,'loop')
self.robot.setConfig(q)
self.robot.drawGL()
for j in xrange(self.robot.numLinks()):
self.robot.link(j).appearance().setColor(0.5,0.5,0.5,1)
glDisable(GL_BLEND)
def idle(self):
import time
t = time.time()
if self.animating:
self.animTrajectoryTime += t - self.lastAnimTrajectoryTime
if self.animTrajectoryTime > self.value.times[-1]:
self.animTrajectoryTime -= self.value.duration()
self.timeDriver.setValue(int(1000*(self.animTrajectoryTime - self.value.times[0])/self.value.duration()))
self.refresh()
self.lastAnimTrajectoryTime = t
return False
class SelectionEditor(VisualEditorBase):
def __init__(self,name,value,description,world,robot=None):
VisualEditorBase.__init__(self,name,value,description,world)
self.robot = robot
self.lastClicked = -1
self.oldAppearances = {}
self.newAppearances = {}
def instructions(self):
return 'Right-click to toggle selection of robot links / objects in the world.\nKeyboard: < to deselect previous, > to select next'
def addDialogItems(self,parent,ui='qt'):
layout = QHBoxLayout(parent)
self.clearButton = QPushButton("Clear")
self.selectAllButton = QPushButton("Select all")
self.selectionList = QListWidget()
self.selectionList.setSelectionMode(QAbstractItemView.MultiSelection)
if self.robot != None:
for i in xrange(self.robot.numLinks()):
self.selectionList.addItem(self.robot.link(i).getName())
elif self.world != None:
for i in xrange(self.world.numIDs()):
self.selectionList.addItem(self.world.getName(i))
for i in self.value:
self.selectionList.setCurrentItem(self.selectionList.item(i),QItemSelectionModel.Select)
layout.addWidget(self.clearButton)
layout.addWidget(self.selectAllButton)
layout.addWidget(self.selectionList)
self.clearButton.clicked.connect(self.clear)
self.selectAllButton.clicked.connect(self.selectAll)
self.selectionList.itemSelectionChanged.connect(self.selectionListChanged)
self.selectionListChangeFlag = False
def clear(self):
self.value = []
self.selectionList.clearSelection()
self.refresh()
def selectAll(self):
if self.robot == None:
#select all ids in the world
pass
else:
self.value = [l for l in range(self.robot.numLinks())]
self.selectionListChangeFlag = True
for i in self.value:
self.selectionList.setCurrentItem(self.selectionList.item(i),QItemSelectionModel.Select)
self.selectionListChangeFlag = False
self.refresh()
def click_world(self,x,y):
"""Helper: returns a list of world objects sorted in order of
increasing distance."""
#get the viewport ray
(s,d) = self.click_ray(x,y)
geoms = [self.world.geometry(i) for i in range(self.world.numIDs())]
res = collide.ray_cast(geoms,s,d)
if not res:
return
id,geom = res
self.toggle_selection(id)
self.lastClicked = id
self.refresh()
def click_robot(self,x,y):
"""Helper: returns a list of robot objects sorted in order of
increasing distance."""
#get the viewport ray
(s,d) = self.click_ray(x,y)
geoms = [self.robot.link(i).geometry() for i in range(self.robot.numLinks())]
self.robot.setConfig(self.robot.getConfig())
res = collide.ray_cast(geoms,s,d)
if not res:
return
id,geom = res
self.toggle_selection(id)
self.lastClicked = id
self.refresh()
def selectionListChanged(self):
#if the GUI has changed the selection then don't update the selection list
if self.selectionListChangeFlag: return
self.value = []
for item in self.selectionList.selectedItems():
row = self.selectionList.row(item)
self.value.append(row)
self.refresh()
def add_selection(self,id):
self.selectionListChangeFlag = True
if id not in self.value:
self.selectionList.setCurrentItem(self.selectionList.item(id),QItemSelectionModel.Select)
self.value.append(id)
self.selectionListChangeFlag = False
def remove_selection(self,id):
self.selectionListChangeFlag = False
if id in self.value:
self.value.remove(id)
self.selectionList.setCurrentItem(self.selectionList.item(id),QItemSelectionModel.Deselect)
self.selectionListChangeFlag = True
def toggle_selection(self,id):
self.selectionListChangeFlag = True
if id in self.value:
self.value.remove(id)
self.selectionList.setCurrentItem(self.selectionList.item(id),QItemSelectionModel.Deselect)
else:
self.selectionList.setCurrentItem(self.selectionList.item(id),QItemSelectionModel.Select)
self.value.append(id)
self.selectionListChangeFlag = False
def mousefunc(self,button,state,x,y):
if button==2 and state==0:
if self.robot == None:
self.click_world(x,y)
else:
self.click_robot(x,y)
return True
return VisualEditorBase.mousefunc(self,button,state,x,y)
def keyboardfunc(self,c,x,y):
if c==',' or c=='<':
if self.lastClicked >= 0:
self.remove_selection(self.lastClicked)
if self.lastClicked >= 0:
self.lastClicked -= 1
self.refresh()
return True
elif c=='.' or c=='>':
Nmax = (self.robot.numLinks() if self.robot else self.world.numIDs())
if self.lastClicked < Nmax:
self.lastClicked += 1
if self.lastClicked < Nmax:
self.add_selection(self.lastClicked)
self.refresh()
return True
def display(self):
#Override display handler to highlight selected links
#save old appearance and set new appearance
apps = {}
if self.robot != None:
for i in xrange(self.robot.numLinks()):
apps[i] = self.robot.link(i).appearance()
elif self.world != None:
for i in xrange(self.world.numIDs()):
apps[i] = self.world.appearance(i)
changed = self.value[:]
if self.lastClicked >= 0: changed.append(self.lastClicked)
for i in changed:
if i not in self.oldAppearances:
self.oldAppearances[i] = apps[i].clone()
self.newAppearances[i] = apps[i].clone()
if i == self.lastClicked:
if i in self.value:
self.newAppearances[i].setColor(1,0.5,0)
else:
self.newAppearances[i].setColor(1,0,0)
else:
self.newAppearances[i].setColor(1,1,0)
apps[i].set(self.newAppearances[i])
#draw
self.world.drawGL()
#restore old appearance
for i in changed:
apps[i].set(self.oldAppearances[i])
glDisable(GL_BLEND)
class PointEditor(VisualEditorBase):
def __init__(self,name,value,description,world,frame=None):
VisualEditorBase.__init__(self,name,value,description,world)
self.frame = se3.identity() if frame==None else frame
self.pointposer = PointPoser()
self.pointposer.set(se3.apply(self.frame,value))
self.pointposer.setAxes(self.frame[0])
self.addWidget(self.pointposer)
def instructions(self):
return 'Right-click and drag on the widget to pose the point'
def mousefunc(self,button,state,x,y):
if self.pointposer.hasFocus():
self.value = se3.apply(se3.inv(self.frame),self.pointposer.get())
return VisualEditorBase.mousefunc(self,button,state,x,y)
class RigidTransformEditor(VisualEditorBase):
def __init__(self,name,value,description,world,frame=None):
VisualEditorBase.__init__(self,name,value,description,world)
self.frame = se3.identity() if frame==None else frame
self.xformposer = TransformPoser()
self.xformposer.set(*se3.mul(self.frame,value))
self.xformposer.enableRotation(True)
self.xformposer.enableTranslation(True)
self.addWidget(self.xformposer)
self.attachedObjects = []
self.attachedRelativePoses = []
self.rotationEnabled = True
self.translationEnabled = True
def disableTranslation(self):
self.translationEnabled = False
self.xformposer.enableTranslation(False)
def disableRotation(self):
self.rotationEnabled = False
self.xformposer.enableRotation(False)
def attach(self,object):
assert hasattr(object,'setTransform'),"Can only attach objects with setTransform and getTransform methods"
assert hasattr(object,'getTransform'),"Can only attach objects with setTransform and getTransform methods"
self.attachedObjects.append(object)
def instructions(self):
if self.rotationEnabled and self.translationEnabled:
return 'Right-click and drag on the widget to pose the transform'
elif self.rotationEnabled:
return 'Right-click and drag on the widget to pose the rotation'
else:
return 'Right-click and drag on the widget to change the translation'
def mousefunc(self,button,state,x,y):
if VisualEditorBase.mousefunc(self,button,state,x,y):
self.value = se3.mul(se3.inv(self.frame),self.xformposer.get())
if len(self.attachedRelativePoses) < len(self.attachedObjects):
for o in self.attachedObjects[len(self.attachedRelativePoses):]:
self.attachedRelativePoses.append(se3.mul(se3.inv(self.xformposer.get()),o.getTransform()))
return True
return False
def motionfunc(self,x,y,dx,dy):
if self.xformposer.hasFocus():
self.value = se3.mul(se3.inv(self.frame),self.xformposer.get())
for o,p in zip(self.attachedObjects,self.attachedRelativePoses):
o.setTransform(*se3.mul(self.xformposer.get(),p))
return VisualEditorBase.motionfunc(self,x,y,dx,dy)
def display(self):
VisualEditorBase.display(self)
return True
class ObjectTransformEditor(VisualEditorBase):
def __init__(self,name,value,description,world,object):
VisualEditorBase.__init__(self,name,value,description,world)
self.object = object
self.objposer = ObjectPoser(object)
self.addWidget(self.objposer)
def instructions(self):
return 'Right-click and drag on the widget to pose the object'
def mousefunc(self,button,state,x,y):
if self.objposer.hasFocus():
self.value = self.objposer.get()
return VisualEditorBase.mousefunc(self,button,state,x,y)
#Qt stuff
if glinit._PyQtAvailable:
from PyQt4.QtCore import *
from PyQt4.QtGui import *
_vis_id = None
_my_dialog_res = None
_my_dialog_retval = None
_doexit = False
class _EditDialog(QDialog):
def __init__(self,glwidget):
QDialog.__init__(self)
self.glwidget = glwidget
glwidget.setMinimumSize(glwidget.width,glwidget.height)
glwidget.setMaximumSize(4000,4000)
glwidget.setSizePolicy(QSizePolicy(QSizePolicy.Maximum,QSizePolicy.Maximum))
self.instructions = QLabel()
self.description = QLabel()
self.description2 = QLabel("Press OK to save, Cancel to continue without saving")
self.topBox = QFrame()
self.topBoxLayout = QVBoxLayout(self.topBox)
self.topBoxLayout.addWidget(self.description)
self.topBoxLayout.addWidget(self.instructions)
self.extraDialog = QFrame()
self.extraDialog.setSizePolicy(QSizePolicy(QSizePolicy.Minimum,QSizePolicy.Minimum))
self.topBoxLayout.addWidget(self.extraDialog)
self.layout = QVBoxLayout(self)
self.layout.addWidget(self.topBox)
self.layout.addWidget(glwidget)
self.layout.addWidget(self.description2)
self.layout.setStretchFactor(glwidget,10)
self.buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel,Qt.Horizontal, self)
self.buttons.accepted.connect(self.accept)
self.buttons.rejected.connect(self.reject)
self.layout.addWidget(self.buttons)
def setEditor(self,editorObject):
self.editorObject = editorObject
self.setWindowTitle("Editing "+editorObject.name)
if editorObject.description==None:
self.description.setText("")
else:
self.description.setText(editorObject.description)
self.instructions.setText(editorObject.instructions())
editorObject.addDialogItems(self.extraDialog,ui='qt')
def closeEvent(self,event):
reply = QMessageBox.question(self, 'Message',
"Are you sure to quit the program?", QMessageBox.Yes |
QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.Yes:
_doexit = True
event.accept()
else:
event.ignore()
def accept(self):
global _my_dialog_res
_my_dialog_res = True
print "Calling GLWidget.close"
self.glwidget.close()
print "#########################################"
print "klampt.vis: Dialog accept"
print "#########################################"
return QDialog.accept(self)
def reject(self):
global _my_dialog_res
_my_dialog_res = False
print "Calling GLWidget.close"
self.glwidget.close()
print "#########################################"
print "klampt.vis: Dialog reject"
print "#########################################"
return QDialog.reject(self)
def run(editorObject):
"""Returns a pair (res,value) where res is True / False if OK / Cancel was pressed, respectively,
and value is the return value of the editor object
"""
assert isinstance(editorObject,VisualEditorBase),"Must provide a VisualEditorBase instance to run()"
global _vis_id, _my_dialog_res, _my_dialog_retval
old_vis_window = visualization.getWindow()
if _vis_id == None:
_vis_id = visualization.createWindow("Resource Editor")
else:
visualization.setWindow(_vis_id)
visualization.setPlugin(editorObject)
def makefunc(gl_backend):
res = _EditDialog(gl_backend)
res.setEditor(editorObject)
visualization._checkWindowCurrent(editorObject.world)
return res
visualization.customUI(makefunc)
visualization.dialog()
res,retVal = _my_dialog_res,editorObject.value
if _doexit:
visualization.kill()
print "Exiting program."
exit(0)
visualization.setWindow(old_vis_window)
visualization.setPlugin(None)
visualization.customUI(None)
print "Result",res,"return value",retVal
return res,retVal
else:
def run(editorObject):
raise ValueError("Unable to perform visual editing without PyQt")
|
|
"""
Django settings for bughouse-rankings project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import excavator
from dj_database_url import config as dj_config
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = excavator.env_string('DJANGO_SECRET_KEY', required=True)
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = excavator.env_bool('DJANGO_DEBUG', required=True)
ALLOWED_HOSTS = excavator.env_list("DJANGO_ALLOWED_HOSTS", required=True)
TEMPLATE_DEBUG = DEBUG
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# local
'bughouse',
# 3rd party
'pipeline',
'argonauts',
'rest_framework',
'sorl.thumbnail',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'bughouse.urls'
WSGI_APPLICATION = 'bughouse.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': dj_config(),
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images) and Media
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = excavator.env_string(
'DJANGO_STATIC_URL', default='/static/', required=False,
)
STATIC_ROOT = os.path.abspath(
excavator.env_string('DJANGO_STATIC_ROOT', required=True),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'pipeline.finders.CachedFileFinder',
'pipeline.finders.PipelineFinder',
)
MEDIA_URL = excavator.env_string(
'DJANGO_MEDIA_URL', default='/media/', required=False,
)
MEDIA_ROOT = os.path.abspath(
excavator.env_string('DJANGO_MEDIA_ROOT', required=True),
)
DEFAULT_FILE_STORAGE = excavator.env_string('DEFAULT_FILE_STORAGE')
STATICFILES_STORAGE = excavator.env_string('STATICFILES_STORAGE')
# Pipeline
PIPELINE_ENABLED = excavator.env_bool('DJANGO_PIPELINE_ENABLED', default=not DEBUG)
PIPELINE_DISABLE_WRAPPER = True
PIPELINE_CSS = {
'base': {
'source_filenames': (
"css/bootstrap.css",
"css/bootstrap-theme.css",
"css/custom.css",
),
'output_filename': 'css/base.css',
},
}
PIPELINE_JS = {
'base': {
'source_filenames': (
"js/jquery.js",
"js/d3.js",
"js/bootstrap.js",
"js/handlebars.js",
"js/underscore.js",
"js/backbone.js",
"js/backbone.wreqr.js",
"js/backbone.babysitter.js",
"js/backbone.marionette.js",
"js/backbone.marionette.export.js",
# Config
"js/config.js",
),
'output_filename': 'js/base.js',
},
'player-roster-templates': {
'source_filenames': (
"js/player-roster/templates/**.handlebars",
),
'output_filename': 'js/player-roster-templates.js',
},
'player-roster': {
'source_filenames': (
"js/player-roster/templates/**.handlebars",
"js/player-roster/models.js",
"js/player-roster/collections.js",
"js/player-roster/views.js",
"js/player-roster/layouts.js",
"js/player-roster/app.js",
),
'output_filename': 'js/player-roster.js',
},
'report-game-templates': {
'source_filenames': (
"js/report-game/templates/**.handlebars",
),
'output_filename': 'js/report-game-templates.js',
},
'report-game': {
'source_filenames': (
"js/report-game/templates/**.handlebars",
"js/report-game/models.js",
"js/report-game/collections.js",
"js/report-game/views.js",
"js/report-game/layouts.js",
"js/report-game/app.js",
),
'output_filename': 'js/report-game.js',
},
'player-rating-visualizations-templates': {
'source_filenames': (
"js/player-rating-visualizations/templates/**.handlebars",
),
'output_filename': 'js/player-rating-visualizations-templates.js',
},
'player-rating-visualizations': {
'source_filenames': (
"js/player-rating-visualizations/app.js",
"js/player-rating-visualizations/layouts.js",
"js/player-rating-visualizations/views.js",
"js/player-rating-visualizations/models.js",
"js/player-rating-visualizations/collections.js",
),
'output_filename': 'js/player-rating-visualizations.js',
},
}
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.NoopCompressor'
PIPELINE_JS_COMPRESSOR = 'pipeline.compressors.NoopCompressor'
PIPELINE_TEMPLATE_EXT = '.handlebars'
PIPELINE_TEMPLATE_FUNC = 'Handlebars.compile'
PIPELINE_TEMPLATE_NAMESPACE = 'Handlebars.templates'
# Ratings Engines
ELO_RATING_ENGINES = (
'bughouse.ratings.engines.overall.OverallPlayerRatings',
'bughouse.ratings.engines.overall.OverallPlayerRatingsAsWhite',
'bughouse.ratings.engines.overall.OverallPlayerRatingsAsBlack',
'bughouse.ratings.engines.overall.OverallTeamRatings',
# Experimental
'bughouse.ratings.engines.batman.BatmanRatings',
)
PRIMARY_RATING_KEY = excavator.env_string(
'PRIMARY_RATING_KEY', default='experimental:batman',
)
# ELO constants
ELO_K = 4.0
ELO_WIN_TEAM = 50.0 / ELO_K
ELO_LOSE_TEAM = - ELO_WIN_TEAM
ELO_WIN_SELF = 55 / ELO_K
ELO_WIN_PARTNER = 45 / ELO_K
ELO_LOSE_SELF = (-1) * ELO_WIN_SELF
ELO_LOSE_PARTNER = (-1) * ELO_WIN_PARTNER
ELO_PARTNER_WEIGHT = 1.0 / 5
ELO_SELF_WEIGHT = 1 - ELO_PARTNER_WEIGHT
ELO_PROVISIONAL_GAME_LIMIT = 10
ELO_PROVISIONAL_GAME_MODIFIER = 4
# Sorl Thumbnailer
THUMBNAIL_FORMAT = "PNG"
|
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""A utility script for automating the beets release process.
"""
import click
import os
import re
import subprocess
from contextlib import contextmanager
import datetime
BASE = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
CHANGELOG = os.path.join(BASE, 'docs', 'changelog.rst')
@contextmanager
def chdir(d):
"""A context manager that temporary changes the working directory.
"""
olddir = os.getcwd()
os.chdir(d)
yield
os.chdir(olddir)
@click.group()
def release():
pass
# Locations (filenames and patterns) of the version number.
VERSION_LOCS = [
(
os.path.join(BASE, 'beets', '__init__.py'),
[
(
r'__version__\s*=\s*[\'"]([0-9\.]+)[\'"]',
"__version__ = '{version}'",
)
]
),
(
os.path.join(BASE, 'docs', 'conf.py'),
[
(
r'version\s*=\s*[\'"]([0-9\.]+)[\'"]',
"version = '{minor}'",
),
(
r'release\s*=\s*[\'"]([0-9\.]+)[\'"]',
"release = '{version}'",
),
]
),
(
os.path.join(BASE, 'setup.py'),
[
(
r'\s*version\s*=\s*[\'"]([0-9\.]+)[\'"]',
" version='{version}',",
)
]
),
]
def bump_version(version):
"""Update the version number in setup.py, docs config, changelog,
and root module.
"""
version_parts = [int(p) for p in version.split('.')]
assert len(version_parts) == 3, "invalid version number"
minor = '{}.{}'.format(*version_parts)
major = '{}'.format(*version_parts)
# Replace the version each place where it lives.
for filename, locations in VERSION_LOCS:
# Read and transform the file.
out_lines = []
with open(filename) as f:
found = False
for line in f:
for pattern, template in locations:
match = re.match(pattern, line)
if match:
# Check that this version is actually newer.
old_version = match.group(1)
old_parts = [int(p) for p in old_version.split('.')]
assert version_parts > old_parts, \
"version must be newer than {}".format(
old_version
)
# Insert the new version.
out_lines.append(template.format(
version=version,
major=major,
minor=minor,
) + '\n')
found = True
break
else:
# Normal line.
out_lines.append(line)
if not found:
print("No pattern found in {}".format(filename))
# Write the file back.
with open(filename, 'w') as f:
f.write(''.join(out_lines))
# Generate bits to insert into changelog.
header_line = '{} (in development)'.format(version)
header = '\n\n' + header_line + '\n' + '-' * len(header_line) + '\n\n'
header += 'Changelog goes here!\n'
# Insert into the right place.
with open(CHANGELOG) as f:
contents = f.read()
location = contents.find('\n\n') # First blank line.
contents = contents[:location] + header + contents[location:]
# Write back.
with open(CHANGELOG, 'w') as f:
f.write(contents)
@release.command()
@click.argument('version')
def bump(version):
"""Bump the version number.
"""
bump_version(version)
def get_latest_changelog():
"""Extract the first section of the changelog.
"""
started = False
lines = []
with open(CHANGELOG) as f:
for line in f:
if re.match(r'^--+$', line.strip()):
# Section boundary. Start or end.
if started:
# Remove last line, which is the header of the next
# section.
del lines[-1]
break
else:
started = True
elif started:
lines.append(line)
return ''.join(lines).strip()
def rst2md(text):
"""Use Pandoc to convert text from ReST to Markdown.
"""
pandoc = subprocess.Popen(
['pandoc', '--from=rst', '--to=markdown', '--no-wrap'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
stdout, _ = pandoc.communicate(text.encode('utf8'))
md = stdout.decode('utf8').strip()
# Fix up odd spacing in lists.
return re.sub(r'^- ', '- ', md, flags=re.M)
def changelog_as_markdown():
"""Get the latest changelog entry as hacked up Markdown.
"""
rst = get_latest_changelog()
# Replace plugin links with plugin names.
rst = re.sub(r':doc:`/plugins/(\w+)`', r'``\1``', rst)
# References with text.
rst = re.sub(r':ref:`([^<]+)(<[^>]+>)`', r'\1', rst)
# Other backslashes with verbatim ranges.
rst = re.sub(r'(\s)`([^`]+)`([^_])', r'\1``\2``\3', rst)
# Command links with command names.
rst = re.sub(r':ref:`(\w+)-cmd`', r'``\1``', rst)
# Bug numbers.
rst = re.sub(r':bug:`(\d+)`', r'#\1', rst)
# Users.
rst = re.sub(r':user:`(\w+)`', r'@\1', rst)
# Convert with Pandoc.
md = rst2md(rst)
# Restore escaped issue numbers.
md = re.sub(r'\\#(\d+)\b', r'#\1', md)
return md
@release.command()
def changelog():
"""Get the most recent version's changelog as Markdown.
"""
print(changelog_as_markdown())
def get_version(index=0):
"""Read the current version from the changelog.
"""
with open(CHANGELOG) as f:
cur_index = 0
for line in f:
match = re.search(r'^\d+\.\d+\.\d+', line)
if match:
if cur_index == index:
return match.group(0)
else:
cur_index += 1
@release.command()
def version():
"""Display the current version.
"""
print(get_version())
@release.command()
def datestamp():
"""Enter today's date as the release date in the changelog.
"""
dt = datetime.datetime.now()
stamp = '({} {}, {})'.format(dt.strftime('%B'), dt.day, dt.year)
marker = '(in development)'
lines = []
underline_length = None
with open(CHANGELOG) as f:
for line in f:
if marker in line:
# The header line.
line = line.replace(marker, stamp)
lines.append(line)
underline_length = len(line.strip())
elif underline_length:
# This is the line after the header. Rewrite the dashes.
lines.append('-' * underline_length + '\n')
underline_length = None
else:
lines.append(line)
with open(CHANGELOG, 'w') as f:
for line in lines:
f.write(line)
@release.command()
def prep():
"""Run all steps to prepare a release.
- Tag the commit.
- Build the sdist package.
- Generate the Markdown changelog to ``changelog.md``.
- Bump the version number to the next version.
"""
cur_version = get_version()
# Tag.
subprocess.check_output(['git', 'tag', 'v{}'.format(cur_version)])
# Build.
with chdir(BASE):
subprocess.check_call(['python2', 'setup.py', 'sdist'])
# Generate Markdown changelog.
cl = changelog_as_markdown()
with open(os.path.join(BASE, 'changelog.md'), 'w') as f:
f.write(cl)
# Version number bump.
# FIXME It should be possible to specify this as an argument.
version_parts = [int(n) for n in cur_version.split('.')]
version_parts[-1] += 1
next_version = u'.'.join(map(str, version_parts))
bump_version(next_version)
@release.command()
def publish():
"""Unleash a release unto the world.
- Push the tag to GitHub.
- Upload to PyPI.
"""
version = get_version(1)
# Push to GitHub.
with chdir(BASE):
subprocess.check_call(['git', 'push'])
subprocess.check_call(['git', 'push', '--tags'])
# Upload to PyPI.
path = os.path.join(BASE, 'dist', 'beets-{}.tar.gz'.format(version))
subprocess.check_call(['twine', 'upload', path])
if __name__ == '__main__':
release()
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse # noqa
from django import http
from horizon.workflows import views
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.networks import tests
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:admin:networks:index')
class NetworkTests(test.BaseAdminViewTests):
@test.create_stubs({api.neutron: ('network_list',),
api.keystone: ('tenant_list',)})
def test_index(self):
tenants = self.tenants.list()
api.neutron.network_list(IsA(http.HttpRequest)) \
.AndReturn(self.networks.list())
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'admin/networks/index.html')
networks = res.context['networks_table'].data
self.assertItemsEqual(networks, self.networks.list())
@test.create_stubs({api.neutron: ('network_list',)})
def test_index_network_list_exception(self):
api.neutron.network_list(IsA(http.HttpRequest)) \
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res, 'admin/networks/index.html')
self.assertEqual(len(res.context['networks_table'].data), 0)
self.assertMessageCount(res, error=1)
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',)})
def test_network_detail(self):
network_id = self.networks.first().id
api.neutron.network_get(IsA(http.HttpRequest), network_id)\
.AndReturn(self.networks.first())
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:networks:detail',
args=[network_id]))
self.assertTemplateUsed(res, 'project/networks/detail.html')
subnets = res.context['subnets_table'].data
ports = res.context['ports_table'].data
self.assertItemsEqual(subnets, [self.subnets.first()])
self.assertItemsEqual(ports, [self.ports.first()])
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',)})
def test_network_detail_network_exception(self):
network_id = self.networks.first().id
api.neutron.network_get(IsA(http.HttpRequest), network_id)\
.AndRaise(self.exceptions.neutron)
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:detail', args=[network_id])
res = self.client.get(url)
redir_url = INDEX_URL
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',)})
def test_network_detail_subnet_exception(self):
network_id = self.networks.first().id
api.neutron.network_get(IsA(http.HttpRequest), network_id).\
AndReturn(self.networks.first())
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id).\
AndRaise(self.exceptions.neutron)
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id).\
AndReturn([self.ports.first()])
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:networks:detail',
args=[network_id]))
self.assertTemplateUsed(res, 'project/networks/detail.html')
subnets = res.context['subnets_table'].data
ports = res.context['ports_table'].data
self.assertEqual(len(subnets), 0)
self.assertItemsEqual(ports, [self.ports.first()])
@test.create_stubs({api.neutron: ('network_get',
'subnet_list',
'port_list',)})
def test_network_detail_port_exception(self):
network_id = self.networks.first().id
api.neutron.network_get(IsA(http.HttpRequest), network_id).\
AndReturn(self.networks.first())
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id).\
AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id).\
AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:networks:detail',
args=[network_id]))
self.assertTemplateUsed(res, 'project/networks/detail.html')
subnets = res.context['subnets_table'].data
ports = res.context['ports_table'].data
self.assertItemsEqual(subnets, [self.subnets.first()])
self.assertEqual(len(ports), 0)
@test.create_stubs({api.neutron: ('profile_list',),
api.keystone: ('tenant_list',)})
def test_network_create_get(self):
tenants = self.tenants.list()
api.keystone.tenant_list(IsA(
http.HttpRequest)).AndReturn([tenants, False])
# TODO(absubram): Remove if clause and create separate
# test stubs for when profile_support is being used.
# Additionally ensure those are always run even in default setting
if api.neutron.is_port_profiles_supported():
net_profiles = self.net_profiles.list()
api.neutron.profile_list(IsA(http.HttpRequest),
'network').AndReturn(net_profiles)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:create')
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/create.html')
@test.create_stubs({api.neutron: ('network_create',
'profile_list',),
api.keystone: ('tenant_list',)})
def test_network_create_post(self):
tenants = self.tenants.list()
tenant_id = self.tenants.first().id
network = self.networks.first()
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
params = {'name': network.name,
'tenant_id': tenant_id,
'admin_state_up': network.admin_state_up,
'router:external': True,
'shared': True}
# TODO(absubram): Remove if clause and create separate
# test stubs for when profile_support is being used.
# Additionally ensure those are always run even in default setting
if api.neutron.is_port_profiles_supported():
net_profiles = self.net_profiles.list()
net_profile_id = self.net_profiles.first().id
api.neutron.profile_list(IsA(http.HttpRequest),
'network').AndReturn(net_profiles)
params['net_profile_id'] = net_profile_id
api.neutron.network_create(IsA(http.HttpRequest), **params)\
.AndReturn(network)
self.mox.ReplayAll()
form_data = {'tenant_id': tenant_id,
'name': network.name,
'admin_state': network.admin_state_up,
'external': True,
'shared': True}
if api.neutron.is_port_profiles_supported():
form_data['net_profile_id'] = net_profile_id
url = reverse('horizon:admin:networks:create')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.neutron: ('network_create',
'profile_list',),
api.keystone: ('tenant_list',)})
def test_network_create_post_network_exception(self):
tenants = self.tenants.list()
tenant_id = self.tenants.first().id
network = self.networks.first()
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
params = {'name': network.name,
'tenant_id': tenant_id,
'admin_state_up': network.admin_state_up,
'router:external': True,
'shared': False}
# TODO(absubram): Remove if clause and create separate
# test stubs for when profile_support is being used.
# Additionally ensure those are always run even in default setting
if api.neutron.is_port_profiles_supported():
net_profiles = self.net_profiles.list()
net_profile_id = self.net_profiles.first().id
api.neutron.profile_list(IsA(http.HttpRequest),
'network').AndReturn(net_profiles)
params['net_profile_id'] = net_profile_id
api.neutron.network_create(IsA(http.HttpRequest), **params)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'tenant_id': tenant_id,
'name': network.name,
'admin_state': network.admin_state_up,
'external': True,
'shared': False}
if api.neutron.is_port_profiles_supported():
form_data['net_profile_id'] = net_profile_id
url = reverse('horizon:admin:networks:create')
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.neutron: ('network_get',)})
def test_network_update_get(self):
network = self.networks.first()
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndReturn(network)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:update', args=[network.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/update.html')
@test.create_stubs({api.neutron: ('network_get',)})
def test_network_update_get_exception(self):
network = self.networks.first()
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:update', args=[network.id])
res = self.client.get(url)
redir_url = INDEX_URL
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_modify',
'network_get',)})
def test_network_update_post(self):
network = self.networks.first()
params = {'name': network.name,
'shared': True,
'admin_state_up': network.admin_state_up,
'router:external': True}
api.neutron.network_modify(IsA(http.HttpRequest), network.id,
**params)\
.AndReturn(network)
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndReturn(network)
self.mox.ReplayAll()
form_data = {'network_id': network.id,
'name': network.name,
'tenant_id': network.tenant_id,
'admin_state': network.admin_state_up,
'shared': True,
'external': True}
url = reverse('horizon:admin:networks:update', args=[network.id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.neutron: ('network_modify',
'network_get',)})
def test_network_update_post_exception(self):
network = self.networks.first()
params = {'name': network.name,
'shared': False,
'admin_state_up': network.admin_state_up,
'router:external': False}
api.neutron.network_modify(IsA(http.HttpRequest), network.id,
**params)\
.AndRaise(self.exceptions.neutron)
api.neutron.network_get(IsA(http.HttpRequest), network.id)\
.AndReturn(network)
self.mox.ReplayAll()
form_data = {'network_id': network.id,
'name': network.name,
'tenant_id': network.tenant_id,
'admin_state': network.admin_state_up,
'shared': False,
'external': False}
url = reverse('horizon:admin:networks:update', args=[network.id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.neutron: ('network_list',
'network_delete'),
api.keystone: ('tenant_list',)})
def test_delete_network(self):
tenants = self.tenants.list()
network = self.networks.first()
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
api.neutron.network_list(IsA(http.HttpRequest))\
.AndReturn([network])
api.neutron.network_delete(IsA(http.HttpRequest), network.id)
self.mox.ReplayAll()
form_data = {'action': 'networks__delete__%s' % network.id}
res = self.client.post(INDEX_URL, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
@test.create_stubs({api.neutron: ('network_list',
'network_delete'),
api.keystone: ('tenant_list',)})
def test_delete_network_exception(self):
tenants = self.tenants.list()
network = self.networks.first()
api.keystone.tenant_list(IsA(http.HttpRequest))\
.AndReturn([tenants, False])
api.neutron.network_list(IsA(http.HttpRequest))\
.AndReturn([network])
api.neutron.network_delete(IsA(http.HttpRequest), network.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'action': 'networks__delete__%s' % network.id}
res = self.client.post(INDEX_URL, form_data)
self.assertRedirectsNoFollow(res, INDEX_URL)
class NetworkSubnetTests(test.BaseAdminViewTests):
@test.create_stubs({api.neutron: ('subnet_get',)})
def test_subnet_detail(self):
subnet = self.subnets.first()
api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)\
.AndReturn(self.subnets.first())
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:subnets:detail',
args=[subnet.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'project/networks/subnets/detail.html')
self.assertEqual(res.context['subnet'].id, subnet.id)
@test.create_stubs({api.neutron: ('subnet_get',)})
def test_subnet_detail_exception(self):
subnet = self.subnets.first()
api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:subnets:detail',
args=[subnet.id])
res = self.client.get(url)
# admin DetailView is shared with userpanel one, so
# redirection URL on error is userpanel index.
redir_url = reverse('horizon:project:networks:index')
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',)})
def test_subnet_create_get(self):
network = self.networks.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:addsubnet',
args=[network.id])
res = self.client.get(url)
self.assertTemplateUsed(res, views.WorkflowView.template_name)
@test.create_stubs({api.neutron: ('network_get',
'subnet_create',)})
def test_subnet_create_post(self):
network = self.networks.first()
subnet = self.subnets.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.subnet_create(IsA(http.HttpRequest),
network_id=network.id,
name=subnet.name,
cidr=subnet.cidr,
ip_version=subnet.ip_version,
gateway_ip=subnet.gateway_ip,
enable_dhcp=subnet.enable_dhcp,
allocation_pools=subnet.allocation_pools,
tenant_id=subnet.tenant_id)\
.AndReturn(subnet)
self.mox.ReplayAll()
form_data = tests.form_data_subnet(subnet)
url = reverse('horizon:admin:networks:addsubnet',
args=[subnet.network_id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
redir_url = reverse('horizon:admin:networks:detail',
args=[subnet.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'subnet_create',)})
def test_subnet_create_post_network_exception(self):
network = self.networks.first()
subnet = self.subnets.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = tests.form_data_subnet(subnet, allocation_pools=[])
url = reverse('horizon:admin:networks:addsubnet',
args=[subnet.network_id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
# admin DetailView is shared with userpanel one, so
# redirection URL on error is userpanel index.
redir_url = reverse('horizon:project:networks:index')
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'subnet_create',)})
def test_subnet_create_post_subnet_exception(self):
network = self.networks.first()
subnet = self.subnets.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.subnet_create(IsA(http.HttpRequest),
network_id=network.id,
name=subnet.name,
cidr=subnet.cidr,
ip_version=subnet.ip_version,
gateway_ip=subnet.gateway_ip,
enable_dhcp=subnet.enable_dhcp,
tenant_id=subnet.tenant_id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = tests.form_data_subnet(subnet, allocation_pools=[])
url = reverse('horizon:admin:networks:addsubnet',
args=[subnet.network_id])
res = self.client.post(url, form_data)
redir_url = reverse('horizon:admin:networks:detail',
args=[subnet.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',)})
def test_subnet_create_post_cidr_inconsistent(self):
network = self.networks.first()
subnet = self.subnets.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
self.mox.ReplayAll()
# dummy IPv6 address
cidr = '2001:0DB8:0:CD30:123:4567:89AB:CDEF/60'
form_data = tests.form_data_subnet(
subnet, cidr=cidr, allocation_pools=[])
url = reverse('horizon:admin:networks:addsubnet',
args=[subnet.network_id])
res = self.client.post(url, form_data)
expected_msg = 'Network Address and IP version are inconsistent.'
self.assertContains(res, expected_msg)
@test.create_stubs({api.neutron: ('network_get',)})
def test_subnet_create_post_gw_inconsistent(self):
network = self.networks.first()
subnet = self.subnets.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
self.mox.ReplayAll()
# dummy IPv6 address
gateway_ip = '2001:0DB8:0:CD30:123:4567:89AB:CDEF'
form_data = tests.form_data_subnet(subnet, gateway_ip=gateway_ip,
allocation_pools=[])
url = reverse('horizon:admin:networks:addsubnet',
args=[subnet.network_id])
res = self.client.post(url, form_data)
self.assertContains(res, 'Gateway IP and IP version are inconsistent.')
@test.create_stubs({api.neutron: ('subnet_modify',
'subnet_get',)})
def test_subnet_update_post(self):
subnet = self.subnets.first()
api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)\
.AndReturn(subnet)
api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)\
.AndReturn(subnet)
api.neutron.subnet_modify(IsA(http.HttpRequest), subnet.id,
name=subnet.name,
enable_dhcp=subnet.enable_dhcp,
dns_nameservers=[],
host_routes=[])\
.AndReturn(subnet)
self.mox.ReplayAll()
form_data = tests.form_data_subnet(subnet, allocation_pools=[])
url = reverse('horizon:admin:networks:editsubnet',
args=[subnet.network_id, subnet.id])
res = self.client.post(url, form_data)
redir_url = reverse('horizon:admin:networks:detail',
args=[subnet.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('subnet_modify',
'subnet_get',)})
def test_subnet_update_post_gw_inconsistent(self):
subnet = self.subnets.first()
api.neutron.subnet_get(IsA(http.HttpRequest), subnet.id)\
.AndReturn(subnet)
self.mox.ReplayAll()
# dummy IPv6 address
gateway_ip = '2001:0DB8:0:CD30:123:4567:89AB:CDEF'
form_data = tests.form_data_subnet(subnet, gateway_ip=gateway_ip,
allocation_pools=[])
url = reverse('horizon:admin:networks:editsubnet',
args=[subnet.network_id, subnet.id])
res = self.client.post(url, form_data)
self.assertContains(res, 'Gateway IP and IP version are inconsistent.')
@test.create_stubs({api.neutron: ('subnet_delete',
'subnet_list',
'port_list',)})
def test_subnet_delete(self):
subnet = self.subnets.first()
network_id = subnet.network_id
api.neutron.subnet_delete(IsA(http.HttpRequest), subnet.id)
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
self.mox.ReplayAll()
form_data = {'action': 'subnets__delete__%s' % subnet.id}
url = reverse('horizon:admin:networks:detail',
args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
@test.create_stubs({api.neutron: ('subnet_delete',
'subnet_list',
'port_list',)})
def test_subnet_delete_exception(self):
subnet = self.subnets.first()
network_id = subnet.network_id
api.neutron.subnet_delete(IsA(http.HttpRequest), subnet.id)\
.AndRaise(self.exceptions.neutron)
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
self.mox.ReplayAll()
form_data = {'action': 'subnets__delete__%s' % subnet.id}
url = reverse('horizon:admin:networks:detail',
args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
class NetworkPortTests(test.BaseAdminViewTests):
@test.create_stubs({api.neutron: ('port_get',)})
def test_port_detail(self):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndReturn(self.ports.first())
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:networks:ports:detail',
args=[port.id]))
self.assertTemplateUsed(res, 'project/networks/ports/detail.html')
self.assertEqual(res.context['port'].id, port.id)
@test.create_stubs({api.neutron: ('port_get',)})
def test_port_detail_exception(self):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:admin:networks:ports:detail',
args=[port.id]))
# admin DetailView is shared with userpanel one, so
# redirection URL on error is userpanel index.
redir_url = reverse('horizon:project:networks:index')
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',)})
def test_port_create_get(self):
network = self.networks.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:addport',
args=[network.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/ports/create.html')
@test.create_stubs({api.neutron: ('network_get',
'port_create')})
def test_port_create_post(self):
network = self.networks.first()
port = self.ports.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.port_create(IsA(http.HttpRequest),
tenant_id=network.tenant_id,
network_id=network.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner)\
.AndReturn(port)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'network_name': network.name,
'name': port.name,
'admin_state': port.admin_state_up,
'device_id': port.device_id,
'device_owner': port.device_owner}
url = reverse('horizon:admin:networks:addport',
args=[port.network_id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
redir_url = reverse('horizon:admin:networks:detail',
args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('network_get',
'port_create')})
def test_port_create_post_exception(self):
network = self.networks.first()
port = self.ports.first()
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.network_get(IsA(http.HttpRequest),
network.id)\
.AndReturn(self.networks.first())
api.neutron.port_create(IsA(http.HttpRequest),
tenant_id=network.tenant_id,
network_id=network.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'network_name': network.name,
'name': port.name,
'admin_state': port.admin_state_up,
'device_id': port.device_id,
'device_owner': port.device_owner}
url = reverse('horizon:admin:networks:addport',
args=[port.network_id])
res = self.client.post(url, form_data)
self.assertNoFormErrors(res)
redir_url = reverse('horizon:admin:networks:detail',
args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('port_get',)})
def test_port_update_get(self):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest),
port.id)\
.AndReturn(port)
self.mox.ReplayAll()
url = reverse('horizon:admin:networks:editport',
args=[port.network_id, port.id])
res = self.client.get(url)
self.assertTemplateUsed(res, 'admin/networks/ports/update.html')
@test.create_stubs({api.neutron: ('port_get',
'port_modify')})
def test_port_update_post(self):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndReturn(port)
api.neutron.port_modify(IsA(http.HttpRequest), port.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner)\
.AndReturn(port)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'port_id': port.id,
'name': port.name,
'admin_state': port.admin_state_up,
'device_id': port.device_id,
'device_owner': port.device_owner}
url = reverse('horizon:admin:networks:editport',
args=[port.network_id, port.id])
res = self.client.post(url, form_data)
redir_url = reverse('horizon:admin:networks:detail',
args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('port_get',
'port_modify')})
def test_port_update_post_exception(self):
port = self.ports.first()
api.neutron.port_get(IsA(http.HttpRequest), port.id)\
.AndReturn(port)
api.neutron.port_modify(IsA(http.HttpRequest), port.id,
name=port.name,
admin_state_up=port.admin_state_up,
device_id=port.device_id,
device_owner=port.device_owner)\
.AndRaise(self.exceptions.neutron)
self.mox.ReplayAll()
form_data = {'network_id': port.network_id,
'port_id': port.id,
'name': port.name,
'admin_state': port.admin_state_up,
'device_id': port.device_id,
'device_owner': port.device_owner}
url = reverse('horizon:admin:networks:editport',
args=[port.network_id, port.id])
res = self.client.post(url, form_data)
redir_url = reverse('horizon:admin:networks:detail',
args=[port.network_id])
self.assertRedirectsNoFollow(res, redir_url)
@test.create_stubs({api.neutron: ('port_delete',
'subnet_list',
'port_list',)})
def test_port_delete(self):
port = self.ports.first()
network_id = port.network_id
api.neutron.port_delete(IsA(http.HttpRequest), port.id)
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
self.mox.ReplayAll()
form_data = {'action': 'ports__delete__%s' % port.id}
url = reverse('horizon:admin:networks:detail',
args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
@test.create_stubs({api.neutron: ('port_delete',
'subnet_list',
'port_list',)})
def test_port_delete_exception(self):
port = self.ports.first()
network_id = port.network_id
api.neutron.port_delete(IsA(http.HttpRequest), port.id)\
.AndRaise(self.exceptions.neutron)
api.neutron.subnet_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.subnets.first()])
api.neutron.port_list(IsA(http.HttpRequest), network_id=network_id)\
.AndReturn([self.ports.first()])
self.mox.ReplayAll()
form_data = {'action': 'ports__delete__%s' % port.id}
url = reverse('horizon:admin:networks:detail',
args=[network_id])
res = self.client.post(url, form_data)
self.assertRedirectsNoFollow(res, url)
|
|
"""Support for an exposed aREST RESTful API of a device."""
from datetime import timedelta
import logging
import requests
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_MONITORED_VARIABLES,
CONF_NAME,
CONF_RESOURCE,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
HTTP_OK,
)
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)
CONF_FUNCTIONS = "functions"
CONF_PINS = "pins"
DEFAULT_NAME = "aREST sensor"
PIN_VARIABLE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_RESOURCE): cv.url,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PINS, default={}): vol.Schema(
{cv.string: PIN_VARIABLE_SCHEMA}
),
vol.Optional(CONF_MONITORED_VARIABLES, default={}): vol.Schema(
{cv.string: PIN_VARIABLE_SCHEMA}
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the aREST sensor."""
resource = config[CONF_RESOURCE]
var_conf = config[CONF_MONITORED_VARIABLES]
pins = config[CONF_PINS]
try:
response = requests.get(resource, timeout=10).json()
except requests.exceptions.MissingSchema:
_LOGGER.error(
"Missing resource or schema in configuration. Add http:// to your URL"
)
return False
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to device at %s", resource)
return False
arest = ArestData(resource)
def make_renderer(value_template):
"""Create a renderer based on variable_template value."""
if value_template is None:
return lambda value: value
value_template.hass = hass
def _render(value):
try:
return value_template.async_render({"value": value}, parse_result=False)
except TemplateError:
_LOGGER.exception("Error parsing value")
return value
return _render
dev = []
if var_conf is not None:
for variable, var_data in var_conf.items():
if variable not in response["variables"]:
_LOGGER.error("Variable: %s does not exist", variable)
continue
renderer = make_renderer(var_data.get(CONF_VALUE_TEMPLATE))
dev.append(
ArestSensor(
arest,
resource,
config.get(CONF_NAME, response[CONF_NAME]),
var_data.get(CONF_NAME, variable),
variable=variable,
unit_of_measurement=var_data.get(CONF_UNIT_OF_MEASUREMENT),
renderer=renderer,
)
)
if pins is not None:
for pinnum, pin in pins.items():
renderer = make_renderer(pin.get(CONF_VALUE_TEMPLATE))
dev.append(
ArestSensor(
ArestData(resource, pinnum),
resource,
config.get(CONF_NAME, response[CONF_NAME]),
pin.get(CONF_NAME),
pin=pinnum,
unit_of_measurement=pin.get(CONF_UNIT_OF_MEASUREMENT),
renderer=renderer,
)
)
add_entities(dev, True)
class ArestSensor(Entity):
"""Implementation of an aREST sensor for exposed variables."""
def __init__(
self,
arest,
resource,
location,
name,
variable=None,
pin=None,
unit_of_measurement=None,
renderer=None,
):
"""Initialize the sensor."""
self.arest = arest
self._resource = resource
self._name = f"{location.title()} {name.title()}"
self._variable = variable
self._pin = pin
self._state = None
self._unit_of_measurement = unit_of_measurement
self._renderer = renderer
if self._pin is not None:
request = requests.get(f"{self._resource}/mode/{self._pin}/i", timeout=10)
if request.status_code != HTTP_OK:
_LOGGER.error("Can't set mode of %s", self._resource)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state of the sensor."""
values = self.arest.data
if "error" in values:
return values["error"]
value = self._renderer(values.get("value", values.get(self._variable, None)))
return value
def update(self):
"""Get the latest data from aREST API."""
self.arest.update()
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self.arest.available
class ArestData:
"""The Class for handling the data retrieval for variables."""
def __init__(self, resource, pin=None):
"""Initialize the data object."""
self._resource = resource
self._pin = pin
self.data = {}
self.available = True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from aREST device."""
try:
if self._pin is None:
response = requests.get(self._resource, timeout=10)
self.data = response.json()["variables"]
else:
try:
if str(self._pin[0]) == "A":
response = requests.get(
f"{self._resource}/analog/{self._pin[1:]}", timeout=10
)
self.data = {"value": response.json()["return_value"]}
except TypeError:
response = requests.get(
f"{self._resource}/digital/{self._pin}", timeout=10
)
self.data = {"value": response.json()["return_value"]}
self.available = True
except requests.exceptions.ConnectionError:
_LOGGER.error("No route to device %s", self._resource)
self.available = False
|
|
try:
from urllib.parse import quote_plus
except ImportError:
from urllib import quote_plus
import processout
import json
from processout.networking.request import Request
from processout.networking.response import Response
# The content of this file was automatically generated
class Transaction(object):
def __init__(self, client, prefill=None):
self._client = client
self._id = None
self._project = None
self._project_id = None
self._invoice = None
self._invoice_id = None
self._customer = None
self._customer_id = None
self._subscription = None
self._subscription_id = None
self._token = None
self._token_id = None
self._card = None
self._card_id = None
self._gateway_configuration = None
self._gateway_configuration_id = None
self._operations = None
self._refunds = None
self._name = None
self._amount = None
self._amount_local = None
self._authorized_amount = None
self._authorized_amount_local = None
self._captured_amount = None
self._captured_amount_local = None
self._refunded_amount = None
self._refunded_amount_local = None
self._available_amount = None
self._available_amount_local = None
self._currency = None
self._error_code = None
self._error_message = None
self._gateway_name = None
self._three_d_s_status = None
self._status = None
self._authorized = None
self._captured = None
self._voided = None
self._refunded = None
self._chargedback = None
self._received_fraud_notification = None
self._received_retrieval_request = None
self._processout_fee = None
self._estimated_fee = None
self._gateway_fee = None
self._gateway_fee_local = None
self._currency_fee = None
self._metadata = None
self._sandbox = None
self._created_at = None
self._chargedback_at = None
self._refunded_at = None
self._three_d_s = None
self._cvc_check = None
self._avs_check = None
if prefill is not None:
self.fill_with_data(prefill)
@property
def id(self):
"""Get id"""
return self._id
@id.setter
def id(self, val):
"""Set id
Keyword argument:
val -- New id value"""
self._id = val
return self
@property
def project(self):
"""Get project"""
return self._project
@project.setter
def project(self, val):
"""Set project
Keyword argument:
val -- New project value"""
if val is None:
self._project = val
return self
if isinstance(val, dict):
obj = processout.Project(self._client)
obj.fill_with_data(val)
self._project = obj
else:
self._project = val
return self
@property
def project_id(self):
"""Get project_id"""
return self._project_id
@project_id.setter
def project_id(self, val):
"""Set project_id
Keyword argument:
val -- New project_id value"""
self._project_id = val
return self
@property
def invoice(self):
"""Get invoice"""
return self._invoice
@invoice.setter
def invoice(self, val):
"""Set invoice
Keyword argument:
val -- New invoice value"""
if val is None:
self._invoice = val
return self
if isinstance(val, dict):
obj = processout.Invoice(self._client)
obj.fill_with_data(val)
self._invoice = obj
else:
self._invoice = val
return self
@property
def invoice_id(self):
"""Get invoice_id"""
return self._invoice_id
@invoice_id.setter
def invoice_id(self, val):
"""Set invoice_id
Keyword argument:
val -- New invoice_id value"""
self._invoice_id = val
return self
@property
def customer(self):
"""Get customer"""
return self._customer
@customer.setter
def customer(self, val):
"""Set customer
Keyword argument:
val -- New customer value"""
if val is None:
self._customer = val
return self
if isinstance(val, dict):
obj = processout.Customer(self._client)
obj.fill_with_data(val)
self._customer = obj
else:
self._customer = val
return self
@property
def customer_id(self):
"""Get customer_id"""
return self._customer_id
@customer_id.setter
def customer_id(self, val):
"""Set customer_id
Keyword argument:
val -- New customer_id value"""
self._customer_id = val
return self
@property
def subscription(self):
"""Get subscription"""
return self._subscription
@subscription.setter
def subscription(self, val):
"""Set subscription
Keyword argument:
val -- New subscription value"""
if val is None:
self._subscription = val
return self
if isinstance(val, dict):
obj = processout.Subscription(self._client)
obj.fill_with_data(val)
self._subscription = obj
else:
self._subscription = val
return self
@property
def subscription_id(self):
"""Get subscription_id"""
return self._subscription_id
@subscription_id.setter
def subscription_id(self, val):
"""Set subscription_id
Keyword argument:
val -- New subscription_id value"""
self._subscription_id = val
return self
@property
def token(self):
"""Get token"""
return self._token
@token.setter
def token(self, val):
"""Set token
Keyword argument:
val -- New token value"""
if val is None:
self._token = val
return self
if isinstance(val, dict):
obj = processout.Token(self._client)
obj.fill_with_data(val)
self._token = obj
else:
self._token = val
return self
@property
def token_id(self):
"""Get token_id"""
return self._token_id
@token_id.setter
def token_id(self, val):
"""Set token_id
Keyword argument:
val -- New token_id value"""
self._token_id = val
return self
@property
def card(self):
"""Get card"""
return self._card
@card.setter
def card(self, val):
"""Set card
Keyword argument:
val -- New card value"""
if val is None:
self._card = val
return self
if isinstance(val, dict):
obj = processout.Card(self._client)
obj.fill_with_data(val)
self._card = obj
else:
self._card = val
return self
@property
def card_id(self):
"""Get card_id"""
return self._card_id
@card_id.setter
def card_id(self, val):
"""Set card_id
Keyword argument:
val -- New card_id value"""
self._card_id = val
return self
@property
def gateway_configuration(self):
"""Get gateway_configuration"""
return self._gateway_configuration
@gateway_configuration.setter
def gateway_configuration(self, val):
"""Set gateway_configuration
Keyword argument:
val -- New gateway_configuration value"""
if val is None:
self._gateway_configuration = val
return self
if isinstance(val, dict):
obj = processout.GatewayConfiguration(self._client)
obj.fill_with_data(val)
self._gateway_configuration = obj
else:
self._gateway_configuration = val
return self
@property
def gateway_configuration_id(self):
"""Get gateway_configuration_id"""
return self._gateway_configuration_id
@gateway_configuration_id.setter
def gateway_configuration_id(self, val):
"""Set gateway_configuration_id
Keyword argument:
val -- New gateway_configuration_id value"""
self._gateway_configuration_id = val
return self
@property
def operations(self):
"""Get operations"""
return self._operations
@operations.setter
def operations(self, val):
"""Set operations
Keyword argument:
val -- New operations value"""
if val is None:
self._operations = []
return self
if len(val) > 0 and isinstance(
val[0], processout.TransactionOperation):
self._operations = val
else:
l = []
for v in val:
obj = processout.TransactionOperation(self._client)
obj.fill_with_data(v)
l.append(obj)
self._operations = l
return self
@property
def refunds(self):
"""Get refunds"""
return self._refunds
@refunds.setter
def refunds(self, val):
"""Set refunds
Keyword argument:
val -- New refunds value"""
if val is None:
self._refunds = []
return self
if len(val) > 0 and isinstance(val[0], processout.Refund):
self._refunds = val
else:
l = []
for v in val:
obj = processout.Refund(self._client)
obj.fill_with_data(v)
l.append(obj)
self._refunds = l
return self
@property
def name(self):
"""Get name"""
return self._name
@name.setter
def name(self, val):
"""Set name
Keyword argument:
val -- New name value"""
self._name = val
return self
@property
def amount(self):
"""Get amount"""
return self._amount
@amount.setter
def amount(self, val):
"""Set amount
Keyword argument:
val -- New amount value"""
self._amount = val
return self
@property
def amount_local(self):
"""Get amount_local"""
return self._amount_local
@amount_local.setter
def amount_local(self, val):
"""Set amount_local
Keyword argument:
val -- New amount_local value"""
self._amount_local = val
return self
@property
def authorized_amount(self):
"""Get authorized_amount"""
return self._authorized_amount
@authorized_amount.setter
def authorized_amount(self, val):
"""Set authorized_amount
Keyword argument:
val -- New authorized_amount value"""
self._authorized_amount = val
return self
@property
def authorized_amount_local(self):
"""Get authorized_amount_local"""
return self._authorized_amount_local
@authorized_amount_local.setter
def authorized_amount_local(self, val):
"""Set authorized_amount_local
Keyword argument:
val -- New authorized_amount_local value"""
self._authorized_amount_local = val
return self
@property
def captured_amount(self):
"""Get captured_amount"""
return self._captured_amount
@captured_amount.setter
def captured_amount(self, val):
"""Set captured_amount
Keyword argument:
val -- New captured_amount value"""
self._captured_amount = val
return self
@property
def captured_amount_local(self):
"""Get captured_amount_local"""
return self._captured_amount_local
@captured_amount_local.setter
def captured_amount_local(self, val):
"""Set captured_amount_local
Keyword argument:
val -- New captured_amount_local value"""
self._captured_amount_local = val
return self
@property
def refunded_amount(self):
"""Get refunded_amount"""
return self._refunded_amount
@refunded_amount.setter
def refunded_amount(self, val):
"""Set refunded_amount
Keyword argument:
val -- New refunded_amount value"""
self._refunded_amount = val
return self
@property
def refunded_amount_local(self):
"""Get refunded_amount_local"""
return self._refunded_amount_local
@refunded_amount_local.setter
def refunded_amount_local(self, val):
"""Set refunded_amount_local
Keyword argument:
val -- New refunded_amount_local value"""
self._refunded_amount_local = val
return self
@property
def available_amount(self):
"""Get available_amount"""
return self._available_amount
@available_amount.setter
def available_amount(self, val):
"""Set available_amount
Keyword argument:
val -- New available_amount value"""
self._available_amount = val
return self
@property
def available_amount_local(self):
"""Get available_amount_local"""
return self._available_amount_local
@available_amount_local.setter
def available_amount_local(self, val):
"""Set available_amount_local
Keyword argument:
val -- New available_amount_local value"""
self._available_amount_local = val
return self
@property
def currency(self):
"""Get currency"""
return self._currency
@currency.setter
def currency(self, val):
"""Set currency
Keyword argument:
val -- New currency value"""
self._currency = val
return self
@property
def error_code(self):
"""Get error_code"""
return self._error_code
@error_code.setter
def error_code(self, val):
"""Set error_code
Keyword argument:
val -- New error_code value"""
self._error_code = val
return self
@property
def error_message(self):
"""Get error_message"""
return self._error_message
@error_message.setter
def error_message(self, val):
"""Set error_message
Keyword argument:
val -- New error_message value"""
self._error_message = val
return self
@property
def gateway_name(self):
"""Get gateway_name"""
return self._gateway_name
@gateway_name.setter
def gateway_name(self, val):
"""Set gateway_name
Keyword argument:
val -- New gateway_name value"""
self._gateway_name = val
return self
@property
def three_d_s_status(self):
"""Get three_d_s_status"""
return self._three_d_s_status
@three_d_s_status.setter
def three_d_s_status(self, val):
"""Set three_d_s_status
Keyword argument:
val -- New three_d_s_status value"""
self._three_d_s_status = val
return self
@property
def status(self):
"""Get status"""
return self._status
@status.setter
def status(self, val):
"""Set status
Keyword argument:
val -- New status value"""
self._status = val
return self
@property
def authorized(self):
"""Get authorized"""
return self._authorized
@authorized.setter
def authorized(self, val):
"""Set authorized
Keyword argument:
val -- New authorized value"""
self._authorized = val
return self
@property
def captured(self):
"""Get captured"""
return self._captured
@captured.setter
def captured(self, val):
"""Set captured
Keyword argument:
val -- New captured value"""
self._captured = val
return self
@property
def voided(self):
"""Get voided"""
return self._voided
@voided.setter
def voided(self, val):
"""Set voided
Keyword argument:
val -- New voided value"""
self._voided = val
return self
@property
def refunded(self):
"""Get refunded"""
return self._refunded
@refunded.setter
def refunded(self, val):
"""Set refunded
Keyword argument:
val -- New refunded value"""
self._refunded = val
return self
@property
def chargedback(self):
"""Get chargedback"""
return self._chargedback
@chargedback.setter
def chargedback(self, val):
"""Set chargedback
Keyword argument:
val -- New chargedback value"""
self._chargedback = val
return self
@property
def received_fraud_notification(self):
"""Get received_fraud_notification"""
return self._received_fraud_notification
@received_fraud_notification.setter
def received_fraud_notification(self, val):
"""Set received_fraud_notification
Keyword argument:
val -- New received_fraud_notification value"""
self._received_fraud_notification = val
return self
@property
def received_retrieval_request(self):
"""Get received_retrieval_request"""
return self._received_retrieval_request
@received_retrieval_request.setter
def received_retrieval_request(self, val):
"""Set received_retrieval_request
Keyword argument:
val -- New received_retrieval_request value"""
self._received_retrieval_request = val
return self
@property
def processout_fee(self):
"""Get processout_fee"""
return self._processout_fee
@processout_fee.setter
def processout_fee(self, val):
"""Set processout_fee
Keyword argument:
val -- New processout_fee value"""
self._processout_fee = val
return self
@property
def estimated_fee(self):
"""Get estimated_fee"""
return self._estimated_fee
@estimated_fee.setter
def estimated_fee(self, val):
"""Set estimated_fee
Keyword argument:
val -- New estimated_fee value"""
self._estimated_fee = val
return self
@property
def gateway_fee(self):
"""Get gateway_fee"""
return self._gateway_fee
@gateway_fee.setter
def gateway_fee(self, val):
"""Set gateway_fee
Keyword argument:
val -- New gateway_fee value"""
self._gateway_fee = val
return self
@property
def gateway_fee_local(self):
"""Get gateway_fee_local"""
return self._gateway_fee_local
@gateway_fee_local.setter
def gateway_fee_local(self, val):
"""Set gateway_fee_local
Keyword argument:
val -- New gateway_fee_local value"""
self._gateway_fee_local = val
return self
@property
def currency_fee(self):
"""Get currency_fee"""
return self._currency_fee
@currency_fee.setter
def currency_fee(self, val):
"""Set currency_fee
Keyword argument:
val -- New currency_fee value"""
self._currency_fee = val
return self
@property
def metadata(self):
"""Get metadata"""
return self._metadata
@metadata.setter
def metadata(self, val):
"""Set metadata
Keyword argument:
val -- New metadata value"""
self._metadata = val
return self
@property
def sandbox(self):
"""Get sandbox"""
return self._sandbox
@sandbox.setter
def sandbox(self, val):
"""Set sandbox
Keyword argument:
val -- New sandbox value"""
self._sandbox = val
return self
@property
def created_at(self):
"""Get created_at"""
return self._created_at
@created_at.setter
def created_at(self, val):
"""Set created_at
Keyword argument:
val -- New created_at value"""
self._created_at = val
return self
@property
def chargedback_at(self):
"""Get chargedback_at"""
return self._chargedback_at
@chargedback_at.setter
def chargedback_at(self, val):
"""Set chargedback_at
Keyword argument:
val -- New chargedback_at value"""
self._chargedback_at = val
return self
@property
def refunded_at(self):
"""Get refunded_at"""
return self._refunded_at
@refunded_at.setter
def refunded_at(self, val):
"""Set refunded_at
Keyword argument:
val -- New refunded_at value"""
self._refunded_at = val
return self
@property
def three_d_s(self):
"""Get three_d_s"""
return self._three_d_s
@three_d_s.setter
def three_d_s(self, val):
"""Set three_d_s
Keyword argument:
val -- New three_d_s value"""
if val is None:
self._three_d_s = val
return self
if isinstance(val, dict):
obj = processout.ThreeDS(self._client)
obj.fill_with_data(val)
self._three_d_s = obj
else:
self._three_d_s = val
return self
@property
def cvc_check(self):
"""Get cvc_check"""
return self._cvc_check
@cvc_check.setter
def cvc_check(self, val):
"""Set cvc_check
Keyword argument:
val -- New cvc_check value"""
self._cvc_check = val
return self
@property
def avs_check(self):
"""Get avs_check"""
return self._avs_check
@avs_check.setter
def avs_check(self, val):
"""Set avs_check
Keyword argument:
val -- New avs_check value"""
self._avs_check = val
return self
def fill_with_data(self, data):
"""Fill the current object with the new values pulled from data
Keyword argument:
data -- The data from which to pull the new values"""
if "id" in data.keys():
self.id = data["id"]
if "project" in data.keys():
self.project = data["project"]
if "project_id" in data.keys():
self.project_id = data["project_id"]
if "invoice" in data.keys():
self.invoice = data["invoice"]
if "invoice_id" in data.keys():
self.invoice_id = data["invoice_id"]
if "customer" in data.keys():
self.customer = data["customer"]
if "customer_id" in data.keys():
self.customer_id = data["customer_id"]
if "subscription" in data.keys():
self.subscription = data["subscription"]
if "subscription_id" in data.keys():
self.subscription_id = data["subscription_id"]
if "token" in data.keys():
self.token = data["token"]
if "token_id" in data.keys():
self.token_id = data["token_id"]
if "card" in data.keys():
self.card = data["card"]
if "card_id" in data.keys():
self.card_id = data["card_id"]
if "gateway_configuration" in data.keys():
self.gateway_configuration = data["gateway_configuration"]
if "gateway_configuration_id" in data.keys():
self.gateway_configuration_id = data["gateway_configuration_id"]
if "operations" in data.keys():
self.operations = data["operations"]
if "refunds" in data.keys():
self.refunds = data["refunds"]
if "name" in data.keys():
self.name = data["name"]
if "amount" in data.keys():
self.amount = data["amount"]
if "amount_local" in data.keys():
self.amount_local = data["amount_local"]
if "authorized_amount" in data.keys():
self.authorized_amount = data["authorized_amount"]
if "authorized_amount_local" in data.keys():
self.authorized_amount_local = data["authorized_amount_local"]
if "captured_amount" in data.keys():
self.captured_amount = data["captured_amount"]
if "captured_amount_local" in data.keys():
self.captured_amount_local = data["captured_amount_local"]
if "refunded_amount" in data.keys():
self.refunded_amount = data["refunded_amount"]
if "refunded_amount_local" in data.keys():
self.refunded_amount_local = data["refunded_amount_local"]
if "available_amount" in data.keys():
self.available_amount = data["available_amount"]
if "available_amount_local" in data.keys():
self.available_amount_local = data["available_amount_local"]
if "currency" in data.keys():
self.currency = data["currency"]
if "error_code" in data.keys():
self.error_code = data["error_code"]
if "error_message" in data.keys():
self.error_message = data["error_message"]
if "gateway_name" in data.keys():
self.gateway_name = data["gateway_name"]
if "three_d_s_status" in data.keys():
self.three_d_s_status = data["three_d_s_status"]
if "status" in data.keys():
self.status = data["status"]
if "authorized" in data.keys():
self.authorized = data["authorized"]
if "captured" in data.keys():
self.captured = data["captured"]
if "voided" in data.keys():
self.voided = data["voided"]
if "refunded" in data.keys():
self.refunded = data["refunded"]
if "chargedback" in data.keys():
self.chargedback = data["chargedback"]
if "received_fraud_notification" in data.keys():
self.received_fraud_notification = data["received_fraud_notification"]
if "received_retrieval_request" in data.keys():
self.received_retrieval_request = data["received_retrieval_request"]
if "processout_fee" in data.keys():
self.processout_fee = data["processout_fee"]
if "estimated_fee" in data.keys():
self.estimated_fee = data["estimated_fee"]
if "gateway_fee" in data.keys():
self.gateway_fee = data["gateway_fee"]
if "gateway_fee_local" in data.keys():
self.gateway_fee_local = data["gateway_fee_local"]
if "currency_fee" in data.keys():
self.currency_fee = data["currency_fee"]
if "metadata" in data.keys():
self.metadata = data["metadata"]
if "sandbox" in data.keys():
self.sandbox = data["sandbox"]
if "created_at" in data.keys():
self.created_at = data["created_at"]
if "chargedback_at" in data.keys():
self.chargedback_at = data["chargedback_at"]
if "refunded_at" in data.keys():
self.refunded_at = data["refunded_at"]
if "three_d_s" in data.keys():
self.three_d_s = data["three_d_s"]
if "cvc_check" in data.keys():
self.cvc_check = data["cvc_check"]
if "avs_check" in data.keys():
self.avs_check = data["avs_check"]
return self
def to_json(self):
return {
"id": self.id,
"project": self.project,
"project_id": self.project_id,
"invoice": self.invoice,
"invoice_id": self.invoice_id,
"customer": self.customer,
"customer_id": self.customer_id,
"subscription": self.subscription,
"subscription_id": self.subscription_id,
"token": self.token,
"token_id": self.token_id,
"card": self.card,
"card_id": self.card_id,
"gateway_configuration": self.gateway_configuration,
"gateway_configuration_id": self.gateway_configuration_id,
"operations": self.operations,
"refunds": self.refunds,
"name": self.name,
"amount": self.amount,
"amount_local": self.amount_local,
"authorized_amount": self.authorized_amount,
"authorized_amount_local": self.authorized_amount_local,
"captured_amount": self.captured_amount,
"captured_amount_local": self.captured_amount_local,
"refunded_amount": self.refunded_amount,
"refunded_amount_local": self.refunded_amount_local,
"available_amount": self.available_amount,
"available_amount_local": self.available_amount_local,
"currency": self.currency,
"error_code": self.error_code,
"error_message": self.error_message,
"gateway_name": self.gateway_name,
"three_d_s_status": self.three_d_s_status,
"status": self.status,
"authorized": self.authorized,
"captured": self.captured,
"voided": self.voided,
"refunded": self.refunded,
"chargedback": self.chargedback,
"received_fraud_notification": self.received_fraud_notification,
"received_retrieval_request": self.received_retrieval_request,
"processout_fee": self.processout_fee,
"estimated_fee": self.estimated_fee,
"gateway_fee": self.gateway_fee,
"gateway_fee_local": self.gateway_fee_local,
"currency_fee": self.currency_fee,
"metadata": self.metadata,
"sandbox": self.sandbox,
"created_at": self.created_at,
"chargedback_at": self.chargedback_at,
"refunded_at": self.refunded_at,
"three_d_s": self.three_d_s,
"cvc_check": self.cvc_check,
"avs_check": self.avs_check,
}
def fetch_refunds(self, options={}):
"""Get the transaction's refunds.
Keyword argument:
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/transactions/" + quote_plus(self.id) + "/refunds"
data = {
}
response = Response(request.get(path, data, options))
return_values = []
a = []
body = response.body
for v in body['refunds']:
tmp = processout.Refund(self._client)
tmp.fill_with_data(v)
a.append(tmp)
return_values.append(a)
return return_values[0]
def find_refund(self, refund_id, options={}):
"""Find a transaction's refund by its ID.
Keyword argument:
refund_id -- ID of the refund
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/transactions/" + \
quote_plus(self.id) + "/refunds/" + quote_plus(refund_id) + ""
data = {
}
response = Response(request.get(path, data, options))
return_values = []
body = response.body
body = body["refund"]
refund = processout.Refund(self._client)
return_values.append(refund.fill_with_data(body))
return return_values[0]
def all(self, options={}):
"""Get all the transactions.
Keyword argument:
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/transactions"
data = {
}
response = Response(request.get(path, data, options))
return_values = []
a = []
body = response.body
for v in body['transactions']:
tmp = processout.Transaction(self._client)
tmp.fill_with_data(v)
a.append(tmp)
return_values.append(a)
return return_values[0]
def find(self, transaction_id, options={}):
"""Find a transaction by its ID.
Keyword argument:
transaction_id -- ID of the transaction
options -- Options for the request"""
self.fill_with_data(options)
request = Request(self._client)
path = "/transactions/" + quote_plus(transaction_id) + ""
data = {
}
response = Response(request.get(path, data, options))
return_values = []
body = response.body
body = body["transaction"]
obj = processout.Transaction(self._client)
return_values.append(obj.fill_with_data(body))
return return_values[0]
|
|
# coding=utf-8
# Copyright 2022 The Balloon Learning Environment Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Standard atmosphere computations.
These methods compute:
- Temperature [K]
- Pressure [Pa]
- Density [kg/m^3]
- Water vapor pressure [Pa]
- Water vapor density [kg/m^3]
- Radio refractivity index [.]
given geopotential height [m].
Temperature, pressure, and density based on U.S. Standard Atmosphere 1976
described at:
http://en.wikipedia.org/wiki/US_Standard_Atmosphere
http://www.digitaldutch.com/atmoscalc/table.htm
Water vapor pressure and density based on ITU-R P.835-5: "Reference Standard
Atmospheres" available here:
http://www.itu.int/rec/R-REC-P.835-5-201202-I
Global radio refractivity index based on ITU-R P.453-10: "The radio
refractive index: its formula and refractivity data" available here:
http://www.itu.int/rec/R-REC-P.453-10-201202-I
"""
# TODO(joshgreavs): Maybe move this to balloon_learning_environment/env.
import dataclasses
from balloon_learning_environment.utils import constants
from balloon_learning_environment.utils import units
import jax
import jax.numpy as jnp
import numpy as np
@dataclasses.dataclass
class AtmosphericValues(object):
"""A dataclass containing the relevant values for standard atmosphere."""
height: units.Distance
temperature: float # In Kelvin
pressure: float # In Pascals
density: float # In kg/m^3
class Atmosphere:
"""Atmospheric conditions for a variety of geographical locations.
This is built on top of standard atmosphere, but includes lapse rates
to simulate a broader range of atmospheric conditions.
"""
_HEIGHT_TRANSITIONS = np.array(
[-610.0, 17000.0, 21000.0, 32000.0, 47000.0, 51000.0, 71000.0, 85000.0])
_LAPSE_RATES_LOW = np.array(
[-0.007, 0.006, 0.001, 0.0028, 0.0, -0.0028, -0.002])
_LAPSE_RATES_HIGH = np.array(
[-0.0058, 0.005, 0.001, 0.0028, 0.0, -0.0028, -0.002])
def __init__(self, key: jnp.ndarray):
self.reset(key)
def reset(self, key: jnp.ndarray) -> None:
"""Resets and samples a new atmosphere.
Args:
key: A PRNG key to use for sampling a new atmosphere.
"""
alpha = jax.random.uniform(key).item()
self._lapse_rates = ((1 - alpha) * self._LAPSE_RATES_LOW +
alpha * self._LAPSE_RATES_HIGH)
self._initialize_temperature_transitions()
self._initialize_pressure_transitions()
def at_height(self, height: units.Distance) -> AtmosphericValues:
"""Computes atmosphere values at a specific height."""
# We "unwrap" height into meters for legibility in the function.
height = height.meters
# Check that height is within expected range, specified by first and last
# height transitions.
assert height >= self._HEIGHT_TRANSITIONS[0]
assert height < self._HEIGHT_TRANSITIONS[-1]
# Compute standard atmosphere temperature and pressure given height.
temperature = 0.0
pressure = 0.0
for i in range(len(self._lapse_rates)):
# Check if height is within this range.
if height < self._HEIGHT_TRANSITIONS[i + 1]:
# Propagate temperature.
temperature = self._temperature_transitions[i] + self._lapse_rates[i] * (
height - self._HEIGHT_TRANSITIONS[i])
# Propagate pressure.
if self._lapse_rates[i] == 0.0:
pressure = self._pressure_for_constant_temperature(
height - self._HEIGHT_TRANSITIONS[i], temperature,
self._pressure_transitions[i])
else:
pressure = self._pressure_for_linear_temperature(
temperature / self._temperature_transitions[i],
self._lapse_rates[i], self._pressure_transitions[i])
break
density = pressure / (constants.DRY_AIR_SPECIFIC_GAS_CONSTANT * temperature)
return AtmosphericValues(
units.Distance(meters=height), temperature, pressure, density)
def at_pressure(self, pressure: float) -> AtmosphericValues:
"""Computes atmosphere values at a specific pressure."""
# Check that pressure is within expected range, specified by last and first
# pressure transitions.
assert pressure > self._pressure_transitions[-1]
assert pressure <= self._pressure_transitions[0]
# Compute standard atmosphere temperature and height given pressure.
temperature = 0.0
height = 0.0
for i in range(len(self._lapse_rates)):
# Check if pressure is within this range.
if pressure > self._pressure_transitions[i + 1]:
# Compute height.
if self._lapse_rates[i] == 0.0:
height = ((-constants.DRY_AIR_SPECIFIC_GAS_CONSTANT *
self._temperature_transitions[i] / constants.GRAVITY) *
np.log(pressure / self._pressure_transitions[i]) +
self._HEIGHT_TRANSITIONS[i])
else:
height = (((pressure / self._pressure_transitions[i])**
(-constants.DRY_AIR_SPECIFIC_GAS_CONSTANT *
self._lapse_rates[i] / constants.GRAVITY) - 1) *
self._temperature_transitions[i] / self._lapse_rates[i] +
self._HEIGHT_TRANSITIONS[i])
# Propagate temperature.
temperature = self._temperature_transitions[i] + self._lapse_rates[i] * (
height - self._HEIGHT_TRANSITIONS[i])
break
density = pressure / (constants.DRY_AIR_SPECIFIC_GAS_CONSTANT * temperature)
return AtmosphericValues(
units.Distance(meters=height), temperature, pressure, density)
def _initialize_temperature_transitions(self) -> None:
self._temperature_transitions = [300.0] # Base temperature (in K).
for i in range(len(self._lapse_rates)):
self._temperature_transitions.append(
self._temperature_transitions[-1] + self._lapse_rates[i] *
(self._HEIGHT_TRANSITIONS[i + 1] - self._HEIGHT_TRANSITIONS[i]))
self._temperature_transitions = np.array(self._temperature_transitions)
def _initialize_pressure_transitions(self) -> None:
"""Initializes pressure transitions."""
# We need temperature transitions initialized.
self._pressure_transitions = [108870.8213] # Base pressure (in Pa).
for i in range(len(self._lapse_rates)):
if self._lapse_rates[i] == 0.0:
# Use constant temperature equation.
self._pressure_transitions.append(
self._pressure_for_constant_temperature(
self._HEIGHT_TRANSITIONS[i + 1] - self._HEIGHT_TRANSITIONS[i],
self._temperature_transitions[i + 1],
self._pressure_transitions[-1]))
else:
# Use linear temperature equation.
self._pressure_transitions.append(
self._pressure_for_linear_temperature(
self._temperature_transitions[i + 1] /
self._temperature_transitions[i], self._lapse_rates[i],
self._pressure_transitions[-1]))
self._pressure_transitions = np.array(self._pressure_transitions)
@staticmethod
def _pressure_for_constant_temperature(delta_height: float,
temperature: float,
pressure_init: float) -> float:
"""Compute pressure for regions of constant temperature."""
return pressure_init * np.exp(
-(constants.GRAVITY * delta_height) /
(constants.DRY_AIR_SPECIFIC_GAS_CONSTANT * temperature))
@staticmethod
def _pressure_for_linear_temperature(temperature_ratio: float,
lapse_rate: float,
pressure_init: float) -> float:
"""Compute pressure for regions of linearly changing temperature."""
return pressure_init * (
temperature_ratio
**(-constants.GRAVITY /
(constants.DRY_AIR_SPECIFIC_GAS_CONSTANT * lapse_rate)))
|
|
"""
vtk window short cuts:
- j: joystick (continuous) mode
- t: trackball mode
- c: camera move mode
- a: actor move mode
- left mouse: rotate x,y
- ctrl_left mouse: rotate z
- middle mouse: pan
- right mouse: zoom
- r: reset camera
- s/w: surface/wireframe
- u: command window
- e: exit
RETURNS
- window with animation of stl file, points, or surfaces. All of them can be combined
using the different geometry to actor functions following the examples given.
INPUT / OUTPUT FILES
-stl files
-point list in csv format columns are: sen_x, sen_y and sen_z
-surface list with 9 columns --> three points with 3 coordinates.
SIDE EFFECTS
- if no data file is given, the loaded geometry will not change color
"""
import math
import os
import matplotlib.cm as cm
import numpy as np
import pandas as pd
import vtk
import cea.config
import cea.inputlocator
__author__ = "Paul Neitzel"
__copyright__ = "Copyright 2016, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Paul Neitzel"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "cea@arch.ethz.ch"
__status__ = "Production"
def stl2actor(ageometry_path, ageometry_name, ageometry_color):
appendfilter = vtk.vtkAppendPolyData()
render_lib = vtk.vtkSTLReader()
polydata = vtk.vtkPolyData()
render_lib.SetFileName(os.path.join(ageometry_path, ageometry_name + ".stl"))
render_lib.Update()
polydata.ShallowCopy(render_lib.GetOutput())
appendfilter.AddInputConnection(polydata.GetProducerPort())
appendfilter.Update()
# Remove any duplicate points.
cleanfilter = vtk.vtkCleanPolyData()
cleanfilter.SetInputConnection(appendfilter.GetOutputPort())
cleanfilter.Update()
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(cleanfilter.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(ageometry_color)
return actor, polydata
def points2actor(xyz, apoint_size):
import vtk
points = vtk.vtkPoints()
# Create the topology of the point (a vertex)
vertices = vtk.vtkCellArray()
# Add points
for i in range(0, len(xyz)):
p = xyz.loc[i].values.tolist()
point_id = points.InsertNextPoint(p)
vertices.InsertNextCell(1)
vertices.InsertCellPoint(point_id)
# Create a poly data object
polydata = vtk.vtkPolyData()
# Set the points and vertices we created as the geometry and topology of the polydata
polydata.SetPoints(points)
polydata.SetVerts(vertices)
polydata.Modified()
# Mapper for points
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(polydata)
# ACTOR for points
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetPointSize(apoint_size)
return actor, polydata
def face_points2actor(fps_df):
cell_array = vtk.vtkCellArray()
points = vtk.vtkPoints()
point_id = 0
for i in range(fps_df.shape[0]):
polygon = vtk.vtkPolygon()
polygon.GetPointIds().SetNumberOfIds(3)
for n in range(3):
points.InsertNextPoint(fps_df.iloc[i, 0 + 3 * n], fps_df.iloc[i, 1 + 3 * n], fps_df.iloc[i, 2 + 3 * n])
polygon.GetPointIds().SetId(n, point_id)
point_id += 1
cell_array.InsertNextCell(polygon)
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
polydata.SetPolys(cell_array)
# mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(polydata)
# actor
actor = vtk.vtkActor()
actor.SetMapper(mapper)
return actor, polydata
def xy_axis(apoints_path):
# print a green y-positive and a x-positive line in the centre
sensor = pd.read_csv(apoints_path)
mid_x = sensor[['sen_x']].mean(axis=0)
mid_y = sensor[['sen_y']].mean(axis=0)
mid_z = sensor[['sen_z']].mean(axis=0)
min_z = sensor[['sen_z']].min(axis=0)
max_x = sensor[['sen_x']].max(axis=0)
max_y = sensor[['sen_y']].max(axis=0)
pts = vtk.vtkPoints()
pts.InsertNextPoint([mid_x, mid_y, min_z])
pts.InsertNextPoint([max_x, mid_y, min_z])
pts.InsertNextPoint([mid_x, max_y, min_z])
# Setup two colors - one for each line
red = [255, 0, 0]
green = [0, 255, 0]
colors = vtk.vtkUnsignedCharArray()
colors.SetNumberOfComponents(3)
colors.SetName("Colors")
colors.InsertNextTupleValue(red)
colors.InsertNextTupleValue(green)
line0 = vtk.vtkLine()
line0.GetPointIds().SetId(0, 0)
line0.GetPointIds().SetId(1, 1)
line1 = vtk.vtkLine()
line1.GetPointIds().SetId(0, 0)
line1.GetPointIds().SetId(1, 2)
lines = vtk.vtkCellArray()
lines.InsertNextCell(line0)
lines.InsertNextCell(line1)
linespolydata = vtk.vtkPolyData()
linespolydata.SetPoints(pts)
linespolydata.SetLines(lines)
linespolydata.GetCellData().SetScalars(colors)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInput(linespolydata)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
mid_point = [mid_x, mid_y, mid_z]
return actor, mid_point
def get_colors(irradiance_array, min_irr, max_irr):
rgb = np.zeros((len(irradiance_array), 3))
# scale irradiance values
n_col = irradiance_array / max_irr
# set rgb color scale from blue over green to red
for k in range(0, len(irradiance_array)):
col_map = cm.get_cmap('inferno')
rgb[k][0] = col_map(n_col[k])[0] * 255
rgb[k][1] = col_map(n_col[k])[1] * 255
rgb[k][2] = col_map(n_col[k])[2] * 255
return rgb
def visualize(arepeating_timer):
'''
geometry_path: stl file directory or point and faces file path
geometry_name: stl file list,
data_path: data file path, txt document
'''
# =============================== timer event =============================== #
class VtkTimerCallback:
def __init__(self):
self.timer_count = 0
def execute(self, obj, event):
print("\r", self.timer_count, end=' ')
for name in data_lib:
data_array = data_lib[name][self.timer_count]
data_max = np.max(data)
rgb = get_colors(data_array, 0, data_max)
colors = vtk.vtkUnsignedCharArray()
colors.SetNumberOfComponents(3)
colors.SetName("Colors")
for l in range(0, len(rgb)):
for j in range(count_lib[name]):
colors.InsertNextTuple(rgb[l])
polydata_lib[name].GetCellData().SetScalars(colors)
polydata_lib[name].GetCellData().Update()
if move_cam_bool is True:
camera.SetPosition(camera_xyz[self.timer_count % 360])
camera.SetViewUp(0, 0, 1)
camera.SetFocalPoint(mid_point[0], mid_point[1], mid_point[2])
txt.SetInput("time: " + str(self.timer_count))
render_window.Render()
iren = obj
iren.GetRenderWindow().Render()
imageFilter.Modified()
if self.timer_count < data_lib[name].shape[0] - 1:
self.timer_count += 1
else:
self.timer_count = 0
# ============================== camera ============================== #
camera = vtk.vtkCamera()
if move_cam_bool is True:
camera_xyz = []
for i in range(start_angle, end_angle):
phi = i * step_size
camera_xyz.append(
(mid_point[0] + radius * 6 * math.sin(phi), mid_point[1] + radius * 6 * math.cos(phi),
cam_height + mid_point[2]))
# ============================== text ============================== #
txt = vtk.vtkTextActor()
txtprop = txt.GetTextProperty()
txtprop.SetFontFamilyToArial()
txtprop.SetFontSize(txt_font_size)
txtprop.SetColor(txt_color)
txt.SetDisplayPosition(2, 2)
# =============================== initialize visualization =============================== #
renderer = vtk.vtkRenderer()
renderer.SetActiveCamera(camera)
render_window = vtk.vtkRenderWindow()
render_window_interactor = vtk.vtkRenderWindowInteractor()
# Add actor to the scene
renderer.AddActor(txt)
cb = VtkTimerCallback()
renderer.AddActor(axes)
cb.actor = axes
for name in actor_lib:
renderer.AddActor(actor_lib[name])
cb.actor = actor_lib[name]
# Background
renderer.SetBackground(background_color)
# Reset camera
renderer.ResetCamera()
# Render window
render_window.AddRenderer(renderer)
# Interactor
render_window_interactor.SetRenderWindow(render_window)
# Begin interaction
render_window.Render()
# Initialize must be called prior to creating timer events.
render_window_interactor.Initialize()
# Sign up to receive TimerEvent
render_window_interactor.AddObserver('TimerEvent', cb.execute)
render_window_interactor.CreateRepeatingTimer(arepeating_timer)
# Setup filter
imageFilter = vtk.vtkWindowToImageFilter()
imageFilter.SetInput(render_window)
imageFilter.SetInputBufferTypeToRGB()
imageFilter.ReadFrontBufferOff()
imageFilter.Update()
render_window_interactor.Start()
if __name__ == '__main__':
move_cam_bool = False
background_color = [1, 1, 1]
# camera properties
cam_height = 200
radius = 50
start_angle = 0
end_angle = 360
step_size = math.pi / 90
repeating_timer = 20
# text properties
txt_font_size = 18
txt_color = (0.4, 0.4, 0.4)
# file paths
config = cea.config.Configuration()
scenario_path = config.scenario
locator = cea.inputlocator.InputLocator(scenario=scenario_path)
in_path = locator.get_3D_geometry_folder()
out_path = locator.get_solar_radiation_folder()
geo_list = pd.read_csv(os.path.join(in_path, 'background_geometries.csv'))['name']
sen_list = pd.read_csv(os.path.join(in_path, 'sensor_geometries.csv'))['name']
# create actors to show including actors, polydata, data (for coloring) and count
start_h = 3000
nr_hours = 200
actor_lib = {}
polydata_lib = {}
data_lib = {}
count_lib = {}
# stl example
for name in geo_list:
actor_lib[name], polydata_lib[name] = stl2actor(in_path, name, [0.7, .7, .7])
count_lib[name] = polydata_lib[name].GetNumberOfCells()
path = os.path.join(out_path, name, 'res')
# data = pd.read_csv(os.path.join(path, name+'.csv'), sep=',', skiprows=start_h, nrows=nr_hours, header=None)
# data = data.astype(float)
# data = data.as_matrix()
# data_lib[name] = data
# point example
for name in sen_list:
xyz = pd.read_csv(os.path.join(out_path, name + '_sen_df.csv'))[['sen_x', 'sen_y', 'sen_z']]
actor_lib['pt_' + name], polydata_lib['pt_' + name] = points2actor(xyz, apoint_size=8)
count_lib['pt_' + name] = 1
path = os.path.join(out_path, name, 'res')
data = pd.read_csv(os.path.join(path, name + '.ill'),
sep=' ', skiprows=start_h, nrows=nr_hours, header=None).iloc[:, 4:]
data = data.astype(float)
data = data.as_matrix()
data_lib['pt_' + name] = data
'''
# face example
for name in sen_list:
fps_df = pd.read_csv(os.path.join(out_path,name+'_fps_df.csv'))
actor_lib['f_'+name], polydata_lib['f_'+name] = face_points2actor(fps_df)
count_lib['f_'+name] = 1
path = os.path.join(out_path, name, 'res')
data = pd.read_csv(os.path.join(path, name+'.ill'), sep=' ', skiprows=start_h, nrows=nr_hours,header=None).ix[:, 4:]
data = data.astype(float)
data = data.T
data.reset_index(inplace=True)
data.drop('index', axis=1, inplace=True)
sensor = pd.read_csv(os.path.join(out_path, name+'_sen_df.csv'))['fac_int']
data['fac_int'] = sensor
data = data.groupby(['fac_int']).mean()
data = data.as_matrix()
data_lib['f_'+name] = data.T
'''
# give a path with "sen_x,..." columns to calculate where the camera focuses on
axes, mid_point = xy_axis(os.path.join(out_path, sen_list[0] + '_sen_df.csv'))
point_size = 5
visualize(repeating_timer)
|
|
#!/usr/bin/env python
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
import getpass
import inspect
import atom.mock_http_core
import gdata.gauth
"""Loads configuration for tests which connect to Google servers.
Settings used in tests are stored in a ConfigCollection instance in this
module called options. If your test needs to get a test related setting,
use
import gdata.test_config
option_value = gdata.test_config.options.get_value('x')
The above will check the command line for an '--x' argument, and if not
found will either use the default value for 'x' or prompt the user to enter
one.
Your test can override the value specified by the user by performing:
gdata.test_config.options.set_value('x', 'y')
If your test uses a new option which you would like to allow the user to
specify on the command line or via a prompt, you can use the register_option
method as follows:
gdata.test_config.options.register(
'option_name', 'Prompt shown to the user', secret=False #As for password.
'This is the description of the option, shown when help is requested.',
'default value, provide only if you do not want the user to be prompted')
"""
class Option(object):
def __init__(self, name, prompt, secret=False, description=None, default=None):
self.name = name
self.prompt = prompt
self.secret = secret
self.description = description
self.default = default
def get(self):
value = self.default
# Check for a command line parameter.
for i in range(len(sys.argv)):
if sys.argv[i].startswith('--%s=' % self.name):
value = sys.argv[i].split('=')[1]
elif sys.argv[i] == '--%s' % self.name:
value = sys.argv[i + 1]
# If the param was not on the command line, ask the user to input the
# value.
# In order for this to prompt the user, the default value for the option
# must be None.
if value is None:
prompt = '%s: ' % self.prompt
if self.secret:
value = getpass.getpass(prompt)
else:
print('You can specify this on the command line using --%s' % self.name)
value = input(prompt)
return value
class ConfigCollection(object):
def __init__(self, options=None):
self.options = options or {}
self.values = {}
def register_option(self, option):
self.options[option.name] = option
def register(self, *args, **kwargs):
self.register_option(Option(*args, **kwargs))
def get_value(self, option_name):
if option_name in self.values:
return self.values[option_name]
value = self.options[option_name].get()
if value is not None:
self.values[option_name] = value
return value
def set_value(self, option_name, value):
self.values[option_name] = value
def render_usage(self):
message_parts = []
for opt_name, option in self.options.items():
message_parts.append('--%s: %s' % (opt_name, option.description))
return '\n'.join(message_parts)
options = ConfigCollection()
# Register the default options.
options.register(
'username',
'Please enter the email address of your test account',
description=('The email address you want to sign in with. '
'Make sure this is a test account as these tests may edit'
' or delete data.'))
options.register(
'password',
'Please enter the password for your test account',
secret=True, description='The test account password.')
options.register(
'clearcache',
'Delete cached data? (enter true or false)',
description=('If set to true, any temporary files which cache test'
' requests and responses will be deleted.'),
default='true')
options.register(
'savecache',
'Save requests and responses in a temporary file? (enter true or false)',
description=('If set to true, requests to the server and responses will'
' be saved in temporary files.'),
default='false')
options.register(
'runlive',
'Run the live tests which contact the server? (enter true or false)',
description=('If set to true, the tests will make real HTTP requests to'
' the servers. This slows down test execution and may'
' modify the users data, be sure to use a test account.'),
default='true')
options.register(
'host',
'Run the live tests against the given host',
description='Examples: docs.google.com, spreadsheets.google.com, etc.',
default='')
options.register(
'ssl',
'Run the live tests over SSL (enter true or false)',
description='If set to true, all tests will be performed over HTTPS (SSL)',
default='false')
options.register(
'clean',
'Clean ALL data first before and after each test (enter true or false)',
description='If set to true, all tests will remove all data (DANGEROUS)',
default='false')
options.register(
'appsusername',
'Please enter the email address of your test Apps domain account',
description=('The email address you want to sign in with. '
'Make sure this is a test account on your Apps domain as '
'these tests may edit or delete data.'))
options.register(
'appspassword',
'Please enter the password for your test Apps domain account',
secret=True, description='The test Apps account password.')
# Other options which may be used if needed.
BLOG_ID_OPTION = Option(
'blogid',
'Please enter the ID of your test blog',
description=('The blog ID for the blog which should have test posts added'
' to it. Example 7682659670455539811'))
TEST_IMAGE_LOCATION_OPTION = Option(
'imgpath',
'Please enter the full path to a test image to upload',
description=('This test image will be uploaded to a service which'
' accepts a media file, it must be a jpeg.'))
SPREADSHEET_ID_OPTION = Option(
'spreadsheetid',
'Please enter the ID of a spreadsheet to use in these tests',
description=('The spreadsheet ID for the spreadsheet which should be'
' modified by theses tests.'))
APPS_DOMAIN_OPTION = Option(
'appsdomain',
'Please enter your Google Apps domain',
description=('The domain the Google Apps is hosted on or leave blank'
' if n/a'))
SITES_NAME_OPTION = Option(
'sitename',
'Please enter name of your Google Site',
description='The webspace name of the Site found in its URL.')
PROJECT_NAME_OPTION = Option(
'project_name',
'Please enter the name of your project hosting project',
description=('The name of the project which should have test issues added'
' to it. Example gdata-python-client'))
ISSUE_ASSIGNEE_OPTION = Option(
'issue_assignee',
'Enter the email address of the target owner of the updated issue.',
description=('The email address of the user a created issue\'s owner will '
' become. Example testuser2@gmail.com'))
GA_TABLE_ID = Option(
'table_id',
'Enter the Table ID of the Google Analytics profile to test',
description=('The Table ID of the Google Analytics profile to test.'
' Example ga:1174'))
TARGET_USERNAME_OPTION = Option(
'targetusername',
'Please enter the username (without domain) of the user which will be'
' affected by the tests',
description=('The username of the user to be tested'))
YT_DEVELOPER_KEY_OPTION = Option(
'developerkey',
'Please enter your YouTube developer key',
description=('The YouTube developer key for your account'))
YT_CLIENT_ID_OPTION = Option(
'clientid',
'Please enter your YouTube client ID',
description=('The YouTube client ID for your account'))
YT_VIDEO_ID_OPTION= Option(
'videoid',
'Please enter the ID of a YouTube video you uploaded',
description=('The video ID of a YouTube video uploaded to your account'))
# Functions to inject a cachable HTTP client into a service client.
def configure_client(client, case_name, service_name, use_apps_auth=False):
"""Sets up a mock client which will reuse a saved session.
Should be called during setUp of each unit test.
Handles authentication to allow the GDClient to make requests which
require an auth header.
Args:
client: a gdata.GDClient whose http_client member should be replaced
with a atom.mock_http_core.MockHttpClient so that repeated
executions can used cached responses instead of contacting
the server.
case_name: str The name of the test case class. Examples: 'BloggerTest',
'ContactsTest'. Used to save a session
for the ClientLogin auth token request, so the case_name
should be reused if and only if the same username, password,
and service are being used.
service_name: str The service name as used for ClientLogin to identify
the Google Data API being accessed. Example: 'blogger',
'wise', etc.
use_apps_auth: bool (optional) If set to True, use appsusername and
appspassword command-line args instead of username and
password respectively.
"""
# Use a mock HTTP client which will record and replay the HTTP traffic
# from these tests.
client.http_client = atom.mock_http_core.MockHttpClient()
client.http_client.cache_case_name = case_name
# Getting the auth token only needs to be done once in the course of test
# runs.
auth_token_key = '%s_auth_token' % service_name
if (auth_token_key not in options.values
and options.get_value('runlive') == 'true'):
client.http_client.cache_test_name = 'client_login'
cache_name = client.http_client.get_cache_file_name()
if options.get_value('clearcache') == 'true':
client.http_client.delete_session(cache_name)
client.http_client.use_cached_session(cache_name)
if not use_apps_auth:
username = options.get_value('username')
password = options.get_value('password')
else:
username = options.get_value('appsusername')
password = options.get_value('appspassword')
auth_token = client.client_login(username, password, case_name,
service=service_name)
options.values[auth_token_key] = gdata.gauth.token_to_blob(auth_token)
if client.alt_auth_service is not None:
options.values[client.alt_auth_service] = gdata.gauth.token_to_blob(
client.alt_auth_token)
client.http_client.close_session()
# Allow a config auth_token of False to prevent the client's auth header
# from being modified.
if auth_token_key in options.values:
client.auth_token = gdata.gauth.token_from_blob(
options.values[auth_token_key])
if client.alt_auth_service is not None:
client.alt_auth_token = gdata.gauth.token_from_blob(
options.values[client.alt_auth_service])
if options.get_value('host'):
client.host = options.get_value('host')
def configure_cache(client, test_name):
"""Loads or begins a cached session to record HTTP traffic.
Should be called at the beginning of each test method.
Args:
client: a gdata.GDClient whose http_client member has been replaced
with a atom.mock_http_core.MockHttpClient so that repeated
executions can used cached responses instead of contacting
the server.
test_name: str The name of this test method. Examples:
'TestClass.test_x_works', 'TestClass.test_crud_operations'.
This is used to name the recording of the HTTP requests and
responses, so it should be unique to each test method in the
test case.
"""
# Auth token is obtained in configure_client which is called as part of
# setUp.
client.http_client.cache_test_name = test_name
cache_name = client.http_client.get_cache_file_name()
if options.get_value('clearcache') == 'true':
client.http_client.delete_session(cache_name)
client.http_client.use_cached_session(cache_name)
def close_client(client):
"""Saves the recoded responses to a temp file if the config file allows.
This should be called in the unit test's tearDown method.
Checks to see if the 'savecache' option is set to 'true', to make sure we
only save sessions to repeat if the user desires.
"""
if client and options.get_value('savecache') == 'true':
# If this was a live request, save the recording.
client.http_client.close_session()
def configure_service(service, case_name, service_name):
"""Sets up a mock GDataService v1 client to reuse recorded sessions.
Should be called during setUp of each unit test. This is a duplicate of
configure_client, modified to handle old v1 service classes.
"""
service.http_client.v2_http_client = atom.mock_http_core.MockHttpClient()
service.http_client.v2_http_client.cache_case_name = case_name
# Getting the auth token only needs to be done once in the course of test
# runs.
auth_token_key = 'service_%s_auth_token' % service_name
if (auth_token_key not in options.values
and options.get_value('runlive') == 'true'):
service.http_client.v2_http_client.cache_test_name = 'client_login'
cache_name = service.http_client.v2_http_client.get_cache_file_name()
if options.get_value('clearcache') == 'true':
service.http_client.v2_http_client.delete_session(cache_name)
service.http_client.v2_http_client.use_cached_session(cache_name)
service.ClientLogin(options.get_value('username'),
options.get_value('password'),
service=service_name, source=case_name)
options.values[auth_token_key] = service.GetClientLoginToken()
service.http_client.v2_http_client.close_session()
if auth_token_key in options.values:
service.SetClientLoginToken(options.values[auth_token_key])
def configure_service_cache(service, test_name):
"""Loads or starts a session recording for a v1 Service object.
Duplicates the behavior of configure_cache, but the target for this
function is a v1 Service object instead of a v2 Client.
"""
service.http_client.v2_http_client.cache_test_name = test_name
cache_name = service.http_client.v2_http_client.get_cache_file_name()
if options.get_value('clearcache') == 'true':
service.http_client.v2_http_client.delete_session(cache_name)
service.http_client.v2_http_client.use_cached_session(cache_name)
def close_service(service):
if service and options.get_value('savecache') == 'true':
# If this was a live request, save the recording.
service.http_client.v2_http_client.close_session()
def build_suite(classes):
"""Creates a TestSuite for all unit test classes in the list.
Assumes that each of the classes in the list has unit test methods which
begin with 'test'. Calls unittest.makeSuite.
Returns:
A new unittest.TestSuite containing a test suite for all classes.
"""
suites = [unittest.makeSuite(a_class, 'test') for a_class in classes]
return unittest.TestSuite(suites)
def check_data_classes(test, classes):
import inspect
for data_class in classes:
test.assertTrue(data_class.__doc__ is not None,
'The class %s should have a docstring' % data_class)
if hasattr(data_class, '_qname'):
qname_versions = None
if isinstance(data_class._qname, tuple):
qname_versions = data_class._qname
else:
qname_versions = (data_class._qname,)
for versioned_qname in qname_versions:
test.assertTrue(isinstance(versioned_qname, str),
'The class %s has a non-string _qname' % data_class)
test.assertTrue(not versioned_qname.endswith('}'),
'The _qname for class %s is only a namespace' % (
data_class))
for attribute_name, value in data_class.__dict__.items():
# Ignore all elements that start with _ (private members)
if not attribute_name.startswith('_'):
try:
if not (isinstance(value, str) or inspect.isfunction(value)
or (isinstance(value, list)
and issubclass(value[0], atom.core.XmlElement))
or type(value) == property # Allow properties.
or inspect.ismethod(value) # Allow methods.
or inspect.ismethoddescriptor(value) # Allow method descriptors.
# staticmethod et al.
or issubclass(value, atom.core.XmlElement)):
test.fail(
'XmlElement member should have an attribute, XML class,'
' or list of XML classes as attributes.')
except TypeError:
test.fail('Element %s in %s was of type %s' % (
attribute_name, data_class._qname, type(value)))
def check_clients_with_auth(test, classes):
for client_class in classes:
test.assertTrue(hasattr(client_class, 'api_version'))
test.assertTrue(isinstance(client_class.auth_service, (str, int)))
test.assertTrue(hasattr(client_class, 'auth_service'))
test.assertTrue(isinstance(client_class.auth_service, str))
test.assertTrue(hasattr(client_class, 'auth_scopes'))
test.assertTrue(isinstance(client_class.auth_scopes, (list, tuple)))
|
|
""" Module: writer
Provide an API for storing data in a graph database, including create,
update, and delete actions on nodes and edges.
Provides:
def create_node
def update_node
def delete_node
def create_edge
def update_edge
def delete_edge
"""
from time import time
from model.constants import NODE_PROPERTY, EDGE_PROPERTY
from model.data import database_manager
from model.data.data_errors import DbInputError, DbWriteError
from constants import GRAPH_PROPERTY
from model.graph import GraphEdge, GraphNode, GraphInputError
# TODO: change functions and definitions to use GraphProto*
def database():
""" Get a database from the data layer's database_manager. """
return database_manager.database()
def create_node(prototype_node):
""" Create a node in a graph database.
Required:
GraphProtoNode prototype_node unwritten version of GraphNode
Returns:
GraphNode newly created GraphNode
Raises:
GraphInputError bad input
"""
graph_node = None
try:
# isolate the GraphProtoNode members we need
properties = prototype_node.properties()
# TODO: move this error checking into GraphPrototype subclasses
# make sure callers don't usurp power over data input
bad_properties = [
NODE_PROPERTY.ID,
NODE_PROPERTY.TYPE,
GRAPH_PROPERTY.CREATED_TS,
GRAPH_PROPERTY.UPDATED_TS,
GRAPH_PROPERTY.DELETED_TS
]
input_errors = set(bad_properties).intersection(set(properties))
if input_errors:
raise GraphInputError(
input_errors,
"Invalid input supplied to create_node().")
# initialize some required properties
current_ts = int(time())
properties[GRAPH_PROPERTY.CREATED_TS] = current_ts
properties[GRAPH_PROPERTY.UPDATED_TS] = current_ts
properties[GRAPH_PROPERTY.DELETED_TS] = False
# issue a call to the data layer
node = database().create_node(prototype_node.type(), properties)
graph_node = GraphNode(
node[NODE_PROPERTY.ID],
node[NODE_PROPERTY.TYPE],
node[NODE_PROPERTY.PROPERTIES],
node[NODE_PROPERTY.EDGES])
except DbInputError as e:
#logger.debug(e.reason)
print e.reason
graph_node = None
except DbWriteError as e:
#logger.debug(e.reason)
print e.reason
graph_node = None
return graph_node
# TODO: replace new_properties with GraphProtoEdge
def update_node(node_id, new_properties):
""" Update a node in a graph database.
Required:
id node_id id of the node to update
dict new_properties dict of optional GraphNode properties
Returns:
GraphNode updated GraphNode
Raises:
GraphInputError bad input
"""
graph_node = None
try:
# make sure callers don't usurp power over data input
bad_properties = [
NODE_PROPERTY.ID,
NODE_PROPERTY.TYPE,
GRAPH_PROPERTY.CREATED_TS,
GRAPH_PROPERTY.UPDATED_TS,
GRAPH_PROPERTY.DELETED_TS
]
input_errors = set(bad_properties).intersection(set(new_properties))
if input_errors:
raise GraphInputError(
input_errors,
"Invalid input supplied to update_node().")
# make required changes
new_properties[GRAPH_PROPERTY.UPDATED_TS] = int(time())
# issue a call to the data layer
node = database().update_node(node_id, new_properties)
graph_node = GraphNode(
node[NODE_PROPERTY.ID],
node[NODE_PROPERTY.TYPE],
node[NODE_PROPERTY.PROPERTIES],
node[NODE_PROPERTY.EDGES])
except DbInputError as e:
#logger.debug(e.reason)
print(e.reason)
graph_node = None
except DbWriteError as e:
#logger.debug(e.reason)
print(e.reason)
graph_node = None
return graph_node
def delete_node(node_id):
""" Delete a node in a graph database.
Required:
id node_id id of the node to update
Returns:
GraphNode deleted GraphNode
"""
graph_node = None
try:
# issue a call to the data layer with the required changes
node = database().delete_node(node_id, {"deleted_ts": int(time())})
graph_node = GraphNode(
node[NODE_PROPERTY.ID],
node[NODE_PROPERTY.TYPE],
node[NODE_PROPERTY.PROPERTIES],
node[NODE_PROPERTY.EDGES])
except DbInputError as e:
#logger.debug(e.reason)
print(e.reason)
graph_node = None
except DbWriteError as e:
#logger.debug(e.reason)
print(e.reason)
graph_node = None
return graph_node
def create_edge(prototype_edge):
""" Create an edge connecting two nodes in a graph database.
Required:
GraphProtoEdge prototype_edge unwritten version of GraphEdge
Returns:
GraphEdge newly created GraphEdge
Raises:
GraphInputError bad input
"""
graph_edge = None
try:
properties = prototype_edge.properties()
# make sure callers don't usurp power over data input
bad_properties = [
EDGE_PROPERTY.ID,
EDGE_PROPERTY.FROM_NODE_ID,
EDGE_PROPERTY.TO_NODE_ID,
EDGE_PROPERTY.TYPE,
#GRAPH_PROPERTY.IS_ONE_WAY,
#GRAPH_PROPERTY.IS_UNIQUE,
GRAPH_PROPERTY.CREATED_TS,
GRAPH_PROPERTY.UPDATED_TS,
GRAPH_PROPERTY.DELETED_TS
]
input_errors = set(bad_properties).intersection(set(properties))
if input_errors:
raise GraphInputError(
input_errors,
"Invalid input supplied to create_edge().")
# initialize some required properties
#properties["is_one_way"] = is_one_way
#properties["is_unique"] = is_unique
current_ts = int(time())
properties[GRAPH_PROPERTY.CREATED_TS] = current_ts
properties[GRAPH_PROPERTY.UPDATED_TS] = current_ts
properties[GRAPH_PROPERTY.DELETED_TS] = False
# issue a call to the data layer
edge = database().create_edge(
prototype_edge.from_node_id(),
prototype_edge.to_node_id(),
prototype_edge.type(),
properties)
graph_edge = GraphEdge(
edge[EDGE_PROPERTY.ID],
edge[EDGE_PROPERTY.TYPE],
edge[EDGE_PROPERTY.PROPERTIES],
edge[EDGE_PROPERTY.FROM_NODE_ID],
edge[EDGE_PROPERTY.TO_NODE_ID])
except DbInputError as e:
#logger.debug(e.reason)
print(e.reason)
graph_edge = None
except DbWriteError as e:
#logger.debug(e.reason)
print(e.reason)
graph_edge = None
return graph_edge
# TODO: replace new_properties with GraphProtoEdge
def update_edge(edge_id, new_properties):
""" Update an edge connecting two nodes in a graph database.
Required:
id edge_id id of the edge to update
dict new_properties dict of optional GraphEdge properties
Returns:
GraphEdge updated GraphEdge
Raises:
GraphInputError bad input
"""
graph_edge = None
try:
# make sure callers don't usurp power over data input
bad_properties = [
EDGE_PROPERTY.ID,
EDGE_PROPERTY.FROM_NODE_ID,
EDGE_PROPERTY.TO_NODE_ID,
EDGE_PROPERTY.TYPE,
#GRAPH_PROPERTY.IS_ONE_WAY,
#GRAPH_PROPERTY.IS_UNIQUE,
GRAPH_PROPERTY.CREATED_TS,
GRAPH_PROPERTY.UPDATED_TS,
GRAPH_PROPERTY.DELETED_TS
]
input_errors = set(bad_properties).intersection(set(new_properties))
if input_errors:
raise GraphInputError(
input_errors,
"Invalid input supplied to update_edge().")
# make required changes
new_properties[GRAPH_PROPERTY.UPDATED_TS] = int(time())
# issue a call to the data layer
edge = database().update_edge(edge_id, new_properties)
graph_edge = GraphEdge(
edge[EDGE_PROPERTY.ID],
edge[EDGE_PROPERTY.TYPE],
edge[EDGE_PROPERTY.PROPERTIES],
edge[EDGE_PROPERTY.FROM_NODE_ID],
edge[EDGE_PROPERTY.TO_NODE_ID])
except DbInputError as e:
#logger.debug(e.reason)
print(e.reason)
graph_edge = None
except DbWriteError as e:
#logger.debug(e.reason)
print(e.reason)
graph_edge = None
return graph_edge
def delete_edge(edge_id):
""" Delete an edge connecting two nodes in a graph database.
Required:
id edge_id id of the edge to update
Returns:
GraphEdge deleted GraphEdge
"""
graph_edge = None
try:
# issue a call to the data layer with the required changes
edge = database().delete_edge(
edge_id,
{GRAPH_PROPERTY.DELETED_TS: int(time())})
graph_edge = GraphEdge(
edge[EDGE_PROPERTY.ID],
edge[EDGE_PROPERTY.TYPE],
edge[EDGE_PROPERTY.PROPERTIES],
edge[EDGE_PROPERTY.FROM_NODE_ID],
edge[EDGE_PROPERTY.TO_NODE_ID])
except DbInputError as e:
#logger.debug(e.reason)
print(e.reason)
graph_edge = None
except DbWriteError as e:
#logger.debug(e.reason)
print(e.reason)
graph_edge = None
return graph_edge
|
|
'''
Created on 21.01.2016
@author: fabian
'''
import unittest
import logging
import os
import datetime
from pymongo import MongoClient
import uuid
from pyvcsshark.config import Config
from pyvcsshark.datastores.mongostore import MongoStore
from pyvcsshark.dbmodels.models import CommitModel, BranchModel, TagModel,\
PeopleModel, FileModel, Hunk
class Test(unittest.TestCase):
config = None
mongostore = None
projectUrl = None
projectName = None
mongoClient = None
@classmethod
def setUpClass(cls):
# Setup logging
logging.basicConfig(level=logging.ERROR)
# Read testconfig
cls.config = Config()
cls.config.load_from_file(os.path.dirname(os.path.realpath(__file__))+"/data/used_test_config.cfg")
# Initialize mongoclient
cls.mongoClient = MongoClient(cls.config.db_hostname, cls.config.db_port)
cls.mongoClient.admin.authenticate(cls.config.db_user, cls.config.db_password, mechanism='SCRAM-SHA-1')
def setUp(self):
# Drop database
self.mongoClient.drop_database(self.config.db_database)
# Initialize mongostore
self.mongostore = MongoStore()
self.projectName = str(uuid.uuid4())
self.projectUrl = "local/"+self.projectName
self.mongostore.initialize(self.config.db_database, self.config.db_hostname, self.config.db_port,
self.config.db_user, self.config.db_password, self.projectName, self.projectUrl, "git")
def test_storeIdentifier(self):
self.assertEqual("mongo", self.mongostore.storeIdentifier)
def addingCommit(self):
# Creating rather complex commit
## Create author/committer/tagger
people = PeopleModel("Fabian Trautsch", "ftrautsch@googlemail.com")
## Create branches
branch1 = BranchModel('refs/heads/master')
branch2 = BranchModel('refs/heads/testbranch1')
## Create tag
tag = TagModel("release1", "tag release 1", people , 1453380457, 60)
## ChangedFile
hunks = []
hunks.append(Hunk(old_start=1, old_lines=1, new_start=0, new_lines=0, content='-line1\n'))
hunks.append(Hunk(old_start=20, old_lines=1, new_start=19, new_lines=1, content='-line20\n+\n'))
hunks.append(Hunk(old_start=40, old_lines=0, new_start=40, new_lines=1, content='+line41\n'))
#hunks.append("@@ -1,4 +1,3 @@ \n-line1\n line2\n line3\n line4\n")
#hunks.append("@@ -17,7 +16,7 @@ \n line17\n line18\n line19\n-line20\n+\n line21\n line22\n line23\n")
#hunks.append("@@ -38,3 +37,4 @@ \n line38\n line39\n line40\n+line41\n")
testFile = FileModel("lib/lib.txt", 266, 2, 2, False, "M", hunks, None)
commit = CommitModel("830c29f111f261e26897d42e94c15960a512c0e4", set([branch1, branch2]), [tag],
['204d306b10e123f2474612a297b83be6ac79e519'], people, people, "testCommit", [testFile]
, 1453380157, 60, 1453380357, 60)
self.mongostore.addCommit(commit)
# Wait till mongostore finalized
self.mongostore.finalize()
def test_deleteAll(self):
self.addingCommit()
# Check if it was inserted
db = self.mongoClient[self.config.db_database]
self.assertEqual(1, db.commit.find().count())
self.assertEqual(1, db.tag.find().count())
self.assertEqual(1, db.file.find().count())
self.assertEqual(1, db.file_action.find().count())
self.assertEqual(1, db.project.find().count())
self.assertEqual(1, db.people.find().count())
self.assertEqual(3, db.hunk.find().count())
# Delete everything, EXCEPT people, because they can be associated to something else
self.mongostore.deleteAll()
self.assertEqual(0, db.commit.find().count())
self.assertEqual(0, db.tag.find().count())
self.assertEqual(0, db.file.find().count())
self.assertEqual(0, db.file_action.find().count())
self.assertEqual(0, db.project.find().count())
self.assertEqual(1, db.people.find().count())
self.assertEqual(0, db.hunk.find().count())
def test_addCommit(self):
self.addingCommit()
# Check if it was inserted
db = self.mongoClient[self.config.db_database]
# check if only inserted once
commits = db.commit.find()
self.assertEqual(1, commits.count())
commit = commits[0]
# Check commit data
tags = db.tag.find()
self.assertEqual(1, tags.count())
tag = tags[0]
# File
files = db.file.find()
self.assertEqual(1, files.count())
file = files[0]
# file_action
fileActions = db.file_action.find()
self.assertEqual(1, fileActions.count())
fileAction = fileActions[0]
# Project
projects = db.project.find()
self.assertEqual(1, projects.count())
project = projects[0]
# People
people = db.people.find()
self.assertEqual(1, people.count())
ppl = people[0]
# Hunks
hunks = db.hunk.find()
self.assertEqual(3, hunks.count())
hunk1 = hunks[0]
hunk2 = hunks[1]
hunk3 = hunks[2]
# Check Commit
self.assertEqual(commit['projectId'], project['_id'])
self.assertEqual('830c29f111f261e26897d42e94c15960a512c0e4', commit['revisionHash'])
self.assertEqual(2, len(commit['branches']))
self.assertIn('refs/heads/master', commit['branches'])
self.assertIn('refs/heads/testbranch1', commit['branches'])
self.assertEqual(1, len(commit['tagIds']))
self.assertEqual(tag['_id'], commit['tagIds'][0])
self.assertEqual(1, len(commit['parents']))
self.assertIn('204d306b10e123f2474612a297b83be6ac79e519', commit['parents'])
self.assertEqual(ppl['_id'], commit['authorId'])
self.assertEqual(datetime.datetime.utcfromtimestamp(1453380157), commit['authorDate'])
self.assertEqual(60, commit['authorOffset'])
self.assertEqual(ppl['_id'], commit['committerId'])
self.assertEqual(datetime.datetime.utcfromtimestamp(1453380357), commit['committerDate'])
self.assertEqual(60, commit['committerOffset'])
self.assertEqual('testCommit', commit['message'])
self.assertEqual(1, len(commit['fileActionIds']))
self.assertEqual(fileAction['_id'], commit['fileActionIds'][0])
# Check file
self.assertEqual(file['_id'], fileAction['fileId'])
self.assertEqual('lib.txt', file['name'])
self.assertEqual('lib/lib.txt', file['path'])
self.assertEqual(project['_id'], file['projectId'])
# Check file action
self.assertEqual(project['_id'], fileAction['projectId'])
self.assertEqual('830c29f111f261e26897d42e94c15960a512c0e4', fileAction['revisionHash'])
self.assertEqual('M', fileAction['mode'])
self.assertEqual(266, fileAction['sizeAtCommit'])
self.assertEqual(2, fileAction['linesAdded'])
self.assertEqual(2, fileAction['linesDeleted'])
self.assertEqual(False, fileAction['isBinary'])
self.assertEquals(3, len(fileAction['hunkIds']))
self.assertIn(hunk1['_id'], fileAction['hunkIds'])
self.assertIn(hunk2['_id'], fileAction['hunkIds'])
self.assertIn(hunk3['_id'], fileAction['hunkIds'])
# Check hunks
self.assertEqual(0, hunk1['new_lines'])
self.assertEqual(0, hunk1['new_start'])
self.assertEqual(1, hunk1['old_start'])
self.assertEqual(1, hunk1['old_lines'])
self.assertEqual("-line1\n",hunk1['content'])
self.assertEqual(1, hunk2['new_lines'])
self.assertEqual(19, hunk2['new_start'])
self.assertEqual(20, hunk2['old_start'])
self.assertEqual(1, hunk2['old_lines'])
self.assertEqual("-line20\n+\n",hunk2['content'])
self.assertEqual(1, hunk3['new_lines'])
self.assertEqual(40, hunk3['new_start'])
self.assertEqual(40, hunk3['old_start'])
self.assertEqual(0, hunk3['old_lines'])
self.assertEqual("+line41\n",hunk3['content'])
# Check people
self.assertEqual("Fabian Trautsch", ppl['name'])
self.assertEqual("ftrautsch@googlemail.com", ppl['email'])
# Check project
self.assertEqual(self.projectUrl, project['url'])
self.assertEqual(self.projectName, project['name'])
self.assertEqual('git', project['repositoryType'])
# Check tag
self.assertEqual('release1', tag['name'])
self.assertEqual(project['_id'], tag['projectId'])
self.assertEqual(datetime.datetime.utcfromtimestamp(1453380457), tag['date'])
self.assertEqual(60, tag['offset'])
self.assertEqual('tag release 1', tag['message'])
self.assertEqual(ppl['_id'], tag['taggerId'])
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
|
# -*- coding: utf-8 -*-
"""
flask.ext.security.datastore
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains an user datastore classes.
:copyright: (c) 2012 by Matt Wright.
:license: MIT, see LICENSE for more details.
"""
from .utils import get_identity_attributes, string_types
class Datastore(object):
def __init__(self, db):
self.db = db
def commit(self):
pass
def put(self, model):
raise NotImplementedError
def delete(self, model):
raise NotImplementedError
class SQLAlchemyDatastore(Datastore):
def commit(self):
self.db.session.commit()
def put(self, model):
self.db.session.add(model)
return model
def delete(self, model):
self.db.session.delete(model)
class MongoEngineDatastore(Datastore):
def put(self, model):
model.save()
return model
def delete(self, model):
model.delete()
class PeeweeDatastore(Datastore):
def put(self, model):
model.save()
return model
def delete(self, model):
model.delete_instance()
class UserDatastore(object):
"""Abstracted user datastore.
:param user_model: A user model class definition
:param role_model: A role model class definition
"""
def __init__(self, user_model, role_model):
self.user_model = user_model
self.role_model = role_model
def _prepare_role_modify_args(self, user, role):
if isinstance(user, string_types):
user = self.find_user(email=user)
if isinstance(role, string_types):
role = self.find_role(role)
return user, role
def _prepare_create_user_args(self, **kwargs):
kwargs.setdefault('active', True)
roles = kwargs.get('roles', [])
for i, role in enumerate(roles):
rn = role.name if isinstance(role, self.role_model) else role
# see if the role exists
roles[i] = self.find_role(rn)
kwargs['roles'] = roles
return kwargs
def get_user(self, id_or_email):
"""Returns a user matching the specified ID or email address"""
raise NotImplementedError
def find_user(self, *args, **kwargs):
"""Returns a user matching the provided parameters."""
raise NotImplementedError
def find_role(self, *args, **kwargs):
"""Returns a role matching the provided name."""
raise NotImplementedError
def add_role_to_user(self, user, role):
"""Adds a role tp a user
:param user: The user to manipulate
:param role: The role to add to the user
"""
user, role = self._prepare_role_modify_args(user, role)
if role not in user.roles:
user.roles.append(role)
self.put(user)
return True
return False
def remove_role_from_user(self, user, role):
"""Removes a role from a user
:param user: The user to manipulate
:param role: The role to remove from the user
"""
rv = False
user, role = self._prepare_role_modify_args(user, role)
if role in user.roles:
rv = True
user.roles.remove(role)
return rv
def toggle_active(self, user):
"""Toggles a user's active status. Always returns True."""
user.active = not user.active
return True
def deactivate_user(self, user):
"""Deactivates a specified user. Returns `True` if a change was made.
:param user: The user to deactivate
"""
if user.active:
user.active = False
return True
return False
def activate_user(self, user):
"""Activates a specified user. Returns `True` if a change was made.
:param user: The user to activate
"""
if not user.active:
user.active = True
return True
return False
def create_role(self, **kwargs):
"""Creates and returns a new role from the given parameters."""
role = self.role_model(**kwargs)
return self.put(role)
def find_or_create_role(self, name, **kwargs):
"""Returns a role matching the given name or creates it with any
additionally provided parameters
"""
kwargs["name"] = name
return self.find_role(name) or self.create_role(**kwargs)
def create_user(self, **kwargs):
"""Creates and returns a new user from the given parameters."""
kwargs = self._prepare_create_user_args(**kwargs)
user = self.user_model(**kwargs)
return self.put(user)
def delete_user(self, user):
"""Delete the specified user
:param user: The user to delete
"""
self.delete(user)
class SQLAlchemyUserDatastore(SQLAlchemyDatastore, UserDatastore):
"""A SQLAlchemy datastore implementation for Flask-Security that assumes the
use of the Flask-SQLAlchemy extension.
"""
def __init__(self, db, user_model, role_model):
SQLAlchemyDatastore.__init__(self, db)
UserDatastore.__init__(self, user_model, role_model)
def get_user(self, identifier):
if self._is_numeric(identifier):
return self.user_model.query.get(identifier)
for attr in get_identity_attributes():
query = getattr(self.user_model, attr).ilike(identifier)
rv = self.user_model.query.filter(query).first()
if rv is not None:
return rv
def _is_numeric(self, value):
try:
int(value)
except ValueError:
return False
return True
def find_user(self, **kwargs):
return self.user_model.query.filter_by(**kwargs).first()
def find_role(self, role):
return self.role_model.query.filter_by(name=role).first()
class MongoEngineUserDatastore(MongoEngineDatastore, UserDatastore):
"""A MongoEngine datastore implementation for Flask-Security that assumes
the use of the Flask-MongoEngine extension.
"""
def __init__(self, db, user_model, role_model):
MongoEngineDatastore.__init__(self, db)
UserDatastore.__init__(self, user_model, role_model)
def get_user(self, identifier):
from mongoengine import ValidationError
try:
return self.user_model.objects(id=identifier).first()
except ValidationError:
pass
for attr in get_identity_attributes():
query_key = '%s__iexact' % attr
query = {query_key: identifier}
rv = self.user_model.objects(**query).first()
if rv is not None:
return rv
def find_user(self, **kwargs):
try:
from mongoengine.queryset import Q, QCombination
except ImportError:
from mongoengine.queryset.visitor import Q, QCombination
from mongoengine.errors import ValidationError
queries = map(lambda i: Q(**{i[0]: i[1]}), kwargs.items())
query = QCombination(QCombination.AND, queries)
try:
return self.user_model.objects(query).first()
except ValidationError: # pragma: no cover
return None
def find_role(self, role):
return self.role_model.objects(name=role).first()
# TODO: Not sure why this was added but tests pass without it
# def add_role_to_user(self, user, role):
# rv = super(MongoEngineUserDatastore, self).add_role_to_user(user, role)
# if rv:
# self.put(user)
# return rv
class PeeweeUserDatastore(PeeweeDatastore, UserDatastore):
"""A PeeweeD datastore implementation for Flask-Security that assumes
the use of the Flask-Peewee extension.
:param user_model: A user model class definition
:param role_model: A role model class definition
:param role_link: A model implementing the many-to-many user-role relation
"""
def __init__(self, db, user_model, role_model, role_link):
PeeweeDatastore.__init__(self, db)
UserDatastore.__init__(self, user_model, role_model)
self.UserRole = role_link
def get_user(self, identifier):
try:
return self.user_model.get(self.user_model.id == identifier)
except ValueError:
pass
for attr in get_identity_attributes():
column = getattr(self.user_model, attr)
try:
return self.user_model.get(column ** identifier)
except self.user_model.DoesNotExist:
pass
def find_user(self, **kwargs):
try:
return self.user_model.filter(**kwargs).get()
except self.user_model.DoesNotExist:
return None
def find_role(self, role):
try:
return self.role_model.filter(name=role).get()
except self.role_model.DoesNotExist:
return None
def create_user(self, **kwargs):
"""Creates and returns a new user from the given parameters."""
roles = kwargs.pop('roles', [])
user = self.user_model(**self._prepare_create_user_args(**kwargs))
user = self.put(user)
for role in roles:
self.add_role_to_user(user, role)
self.put(user)
return user
def add_role_to_user(self, user, role):
"""Adds a role tp a user
:param user: The user to manipulate
:param role: The role to add to the user
"""
user, role = self._prepare_role_modify_args(user, role)
result = self.UserRole.select() \
.where(self.UserRole.user == user.id, self.UserRole.role == role.id)
if result.count():
return False
else:
self.put(self.UserRole.create(user=user.id, role=role.id))
return True
def remove_role_from_user(self, user, role):
"""Removes a role from a user
:param user: The user to manipulate
:param role: The role to remove from the user
"""
user, role = self._prepare_role_modify_args(user, role)
result = self.UserRole.select() \
.where(self.UserRole.user == user, self.UserRole.role == role)
if result.count():
query = self.UserRole.delete().where(
self.UserRole.user == user, self.UserRole.role == role)
query.execute()
return True
else:
return False
|
|
import datetime
import decimal
import itertools
import sys
import time
from wtforms import widgets
from wtforms.validators import StopValidation, u, unicode, next
__all__ = (
'BooleanField', 'DecimalField', 'DateField', 'DateTimeField', 'FieldList',
'FileField', 'FloatField', 'FormField', 'HiddenField', 'IntegerField',
'PasswordField', 'RadioField', 'SelectField', 'SelectMultipleField',
'SubmitField', 'TextField', 'TextAreaField',
)
_unset_value = object()
class DummyTranslations(object):
def gettext(self, string):
return string
def ngettext(self, singular, plural, n):
if n == 1:
return singular
return plural
class Field(object):
"""
Field base class
"""
widget = None
errors = tuple()
process_errors = tuple()
_formfield = True
_translations = DummyTranslations()
def __new__(cls, *args, **kwargs):
if '_form' in kwargs and '_name' in kwargs:
return super(Field, cls).__new__(cls)
else:
return UnboundField(cls, *args, **kwargs)
def __init__(self, label=u(''), validators=None, filters=tuple(),
description=u(''), id=None, default=None, widget=None,
_form=None, _name=None, _prefix='', _translations=None):
"""
Construct a new field.
:param label:
The label of the field. Available after construction through the
`label` property.
:param validators:
A sequence of validators to call when `validate` is called.
:param filters:
A sequence of filters which are run on input data by `process`.
:param description:
A description for the field, typically used for help text.
:param id:
An id to use for the field. A reasonable default is set by the form,
and you shouldn't need to set this manually.
:param default:
The default value to assign to the field, if no form or object
input is provided. May be a callable.
:param widget:
If provided, overrides the widget used to render the field.
:param _form:
The form holding this field. It is passed by the form itself during
construction. You should never pass this value yourself.
:param _name:
The name of this field, passed by the enclosing form during its
construction. You should never pass this value yourself.
:param _prefix:
The prefix to prepend to the form name of this field, passed by
the enclosing form during construction.
If `_form` and `_name` isn't provided, an :class:`UnboundField` will be
returned instead. Call its :func:`bind` method with a form instance and
a name to construct the field.
"""
self.short_name = _name
self.name = _prefix + _name
if _translations is not None:
self._translations = _translations
self.id = id or self.name
self.label = Label(self.id, label or _name.replace('_', ' ').title())
if validators is None:
validators = []
self.validators = validators
self.filters = filters
self.description = description
self.type = type(self).__name__
self.default = default
self.raw_data = None
if widget:
self.widget = widget
self.flags = Flags()
for v in validators:
flags = getattr(v, 'field_flags', ())
for f in flags:
setattr(self.flags, f, True)
def __unicode__(self):
"""
Returns a HTML representation of the field. For more powerful rendering,
see the `__call__` method.
"""
return self()
def __str__(self):
"""
Returns a HTML representation of the field. For more powerful rendering,
see the `__call__` method.
"""
return self()
def __html__(self):
"""
Returns a HTML representation of the field. For more powerful rendering,
see the `__call__` method.
"""
return self()
def __call__(self, **kwargs):
"""
Render this field as HTML, using keyword args as additional attributes.
Any HTML attribute passed to the method will be added to the tag
and entity-escaped properly.
"""
return self.widget(self, **kwargs)
def gettext(self, string):
return self._translations.gettext(string)
def ngettext(self, singular, plural, n):
return self._translations.ngettext(singular, plural, n)
def validate(self, form, extra_validators=tuple()):
"""
Validates the field and returns True or False. `self.errors` will
contain any errors raised during validation. This is usually only
called by `Form.validate`.
Subfields shouldn't override this, but rather override either
`pre_validate`, `post_validate` or both, depending on needs.
:param form: The form the field belongs to.
:param extra_validators: A list of extra validators to run.
"""
self.errors = list(self.process_errors)
stop_validation = False
# Call pre_validate
try:
self.pre_validate(form)
except StopValidation:
e = sys.exc_info()[1]
if e.args and e.args[0]:
self.errors.append(e.args[0])
stop_validation = True
except ValueError:
e = sys.exc_info()[1]
self.errors.append(e.args[0])
# Run validators
if not stop_validation:
for validator in itertools.chain(self.validators, extra_validators):
try:
validator(form, self)
except StopValidation:
e = sys.exc_info()[1]
if e.args and e.args[0]:
self.errors.append(e.args[0])
stop_validation = True
break
except ValueError:
e = sys.exc_info()[1]
self.errors.append(e.args[0])
# Call post_validate
try:
self.post_validate(form, stop_validation)
except ValueError:
e = sys.exc_info()[1]
self.errors.append(e.args[0])
return len(self.errors) == 0
def pre_validate(self, form):
"""
Override if you need field-level validation. Runs before any other
validators.
:param form: The form the field belongs to.
"""
pass
def post_validate(self, form, validation_stopped):
"""
Override if you need to run any field-level validation tasks after
normal validation. This shouldn't be needed in most cases.
:param form: The form the field belongs to.
:param validation_stopped:
`True` if any validator raised StopValidation.
"""
pass
def process(self, formdata, data=_unset_value):
"""
Process incoming data, calling process_data, process_formdata as needed,
and run filters.
If `data` is not provided, process_data will be called on the field's
default.
Field subclasses usually won't override this, instead overriding the
process_formdata and process_data methods. Only override this for
special advanced processing, such as when a field encapsulates many
inputs.
"""
self.process_errors = []
if data is _unset_value:
try:
data = self.default()
except TypeError:
data = self.default
try:
self.process_data(data)
except ValueError:
e = sys.exc_info()[1]
self.process_errors.append(e.args[0])
if formdata:
try:
if self.name in formdata:
self.raw_data = formdata.getlist(self.name)
else:
self.raw_data = []
self.process_formdata(self.raw_data)
except ValueError:
e = sys.exc_info()[1]
self.process_errors.append(e.args[0])
for filter in self.filters:
try:
self.data = filter(self.data)
except ValueError:
e = sys.exc_info()[1]
self.process_errors.append(e.args[0])
def process_data(self, value):
"""
Process the Python data applied to this field and store the result.
This will be called during form construction by the form's `kwargs` or
`obj` argument.
:param value: The python object containing the value to process.
"""
self.data = value
def process_formdata(self, valuelist):
"""
Process data received over the wire from a form.
This will be called during form construction with data supplied
through the `formdata` argument.
:param valuelist: A list of strings to process.
"""
if valuelist:
self.data = valuelist[0]
def populate_obj(self, obj, name):
"""
Populates `obj.<name>` with the field's data.
:note: This is a destructive operation. If `obj.<name>` already exists,
it will be overridden. Use with caution.
"""
setattr(obj, name, self.data)
class UnboundField(object):
_formfield = True
creation_counter = 0
def __init__(self, field_class, *args, **kwargs):
UnboundField.creation_counter += 1
self.field_class = field_class
self.args = args
self.kwargs = kwargs
self.creation_counter = UnboundField.creation_counter
def bind(self, form, name, prefix='', translations=None, **kwargs):
return self.field_class(_form=form, _prefix=prefix, _name=name, _translations=translations, *self.args, **dict(self.kwargs, **kwargs))
def __repr__(self):
return '<UnboundField(%s, %r, %r)>' % (self.field_class.__name__, self.args, self.kwargs)
class Flags(object):
"""
Holds a set of boolean flags as attributes.
Accessing a non-existing attribute returns False for its value.
"""
def __getattr__(self, name):
return False
def __contains__(self, name):
return getattr(self, name)
def __repr__(self):
flags = (name for name in dir(self) if not name.startswith('_'))
return '<wtforms.fields.Flags: {%s}>' % ', '.join(flags)
class Label(object):
"""
An HTML form label.
"""
def __init__(self, field_id, text):
self.field_id = field_id
self.text = text
def __str__(self):
return self()
def __unicode__(self):
return self()
def __html__(self):
return self()
def __call__(self, text=None, **kwargs):
kwargs['for'] = self.field_id
attributes = widgets.html_params(**kwargs)
return widgets.HTMLString(u('<label %s>%s</label>') % (attributes, text or self.text))
def __repr__(self):
return 'Label(%r, %r)' % (self.field_id, self.text)
class SelectFieldBase(Field):
option_widget = widgets.Option()
"""
Base class for fields which can be iterated to produce options.
This isn't a field, but an abstract base class for fields which want to
provide this functionality.
"""
def __init__(self, label=u(''), validators=None, option_widget=None, **kwargs):
super(SelectFieldBase, self).__init__(label, validators, **kwargs)
if option_widget is not None:
self.option_widget = option_widget
def iter_choices(self):
"""
Provides data for choice widget rendering. Must return a sequence or
iterable of (value, label, selected) tuples.
"""
raise NotImplementedError()
def __iter__(self):
opts = dict(widget=self.option_widget, _name=self.name, _form=None)
for i, (value, label, checked) in enumerate(self.iter_choices()):
opt = self._Option(label=label, id=u('%s-%d') % (self.id, i), **opts)
opt.process(None, value)
opt.checked = checked
yield opt
class _Option(Field):
checked = False
def _value(self):
return self.data
class SelectField(SelectFieldBase):
widget = widgets.Select()
def __init__(self, label=u(''), validators=None, coerce=unicode, choices=None, **kwargs):
super(SelectField, self).__init__(label, validators, **kwargs)
self.coerce = coerce
self.choices = choices
def iter_choices(self):
for value, label in self.choices:
yield (value, label, self.coerce(value) == self.data)
def process_data(self, value):
try:
self.data = self.coerce(value)
except (ValueError, TypeError):
self.data = None
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = self.coerce(valuelist[0])
except ValueError:
raise ValueError(self.gettext(u('Invalid Choice: could not coerce')))
def pre_validate(self, form):
for v, _ in self.choices:
if self.data == v:
break
else:
raise ValueError(self.gettext(u('Not a valid choice')))
class SelectMultipleField(SelectField):
"""
No different from a normal select field, except this one can take (and
validate) multiple choices. You'll need to specify the HTML `rows`
attribute to the select field when rendering.
"""
widget = widgets.Select(multiple=True)
def iter_choices(self):
for value, label in self.choices:
selected = self.data is not None and self.coerce(value) in self.data
yield (value, label, selected)
def process_data(self, value):
try:
self.data = list(self.coerce(v) for v in value)
except (ValueError, TypeError):
self.data = None
def process_formdata(self, valuelist):
try:
self.data = list(self.coerce(x) for x in valuelist)
except ValueError:
raise ValueError(self.gettext(u('Invalid choice(s): one or more data inputs could not be coerced')))
def pre_validate(self, form):
if self.data:
values = list(c[0] for c in self.choices)
for d in self.data:
if d not in values:
raise ValueError(self.gettext(u("'%(value)s' is not a valid choice for this field")) % dict(value=d))
class RadioField(SelectField):
"""
Like a SelectField, except displays a list of radio buttons.
Iterating the field will produce subfields (each containing a label as
well) in order to allow custom rendering of the individual radio fields.
"""
widget = widgets.ListWidget(prefix_label=False)
option_widget = widgets.RadioInput()
class TextField(Field):
"""
This field is the base for most of the more complicated fields, and
represents an ``<input type="text">``.
"""
widget = widgets.TextInput()
def process_formdata(self, valuelist):
if valuelist:
self.data = valuelist[0]
else:
self.data = u('')
def _value(self):
return self.data is not None and unicode(self.data) or u('')
class HiddenField(TextField):
"""
Represents an ``<input type="hidden">``.
"""
widget = widgets.HiddenInput()
class TextAreaField(TextField):
"""
This field represents an HTML ``<textarea>`` and can be used to take
multi-line input.
"""
widget = widgets.TextArea()
class PasswordField(TextField):
"""
Represents an ``<input type="password">``.
"""
widget = widgets.PasswordInput()
class FileField(TextField):
"""
Can render a file-upload field. Will take any passed filename value, if
any is sent by the browser in the post params. This field will NOT
actually handle the file upload portion, as wtforms does not deal with
individual frameworks' file handling capabilities.
"""
widget = widgets.FileInput()
class IntegerField(TextField):
"""
A text field, except all input is coerced to an integer. Erroneous input
is ignored and will not be accepted as a value.
"""
def __init__(self, label=u(''), validators=None, **kwargs):
super(IntegerField, self).__init__(label, validators, **kwargs)
def _value(self):
if self.raw_data:
return self.raw_data[0]
elif self.data is not None:
return unicode(self.data)
else:
return u('')
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = int(valuelist[0])
except ValueError:
raise ValueError(self.gettext(u('Not a valid integer value')))
class DecimalField(TextField):
"""
A text field which displays and coerces data of the `decimal.Decimal` type.
:param places:
How many decimal places to quantize the value to for display on form.
If None, does not quantize value.
:param rounding:
How to round the value during quantize, for example
`decimal.ROUND_UP`. If unset, uses the rounding value from the
current thread's context.
"""
def __init__(self, label=u(''), validators=None, places=2, rounding=None, **kwargs):
super(DecimalField, self).__init__(label, validators, **kwargs)
self.places = places
self.rounding = rounding
def _value(self):
if self.raw_data:
return self.raw_data[0]
elif self.data is not None:
if self.places is not None:
if hasattr(self.data, 'quantize'):
exp = decimal.Decimal('.1') ** self.places
quantized = self.data.quantize(exp, rounding=self.rounding)
return unicode(quantized)
else:
# If for some reason, data is a float or int, then format
# as we would for floats using string formatting.
format = u('%%0.%df') % self.places
return format % self.data
else:
return unicode(self.data)
else:
return u('')
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = decimal.Decimal(valuelist[0])
except (decimal.InvalidOperation, ValueError):
raise ValueError(self.gettext(u('Not a valid decimal value')))
class FloatField(TextField):
"""
A text field, except all input is coerced to an float. Erroneous input
is ignored and will not be accepted as a value.
"""
def __init__(self, label=u(''), validators=None, **kwargs):
super(FloatField, self).__init__(label, validators, **kwargs)
def _value(self):
if self.raw_data:
return self.raw_data[0]
elif self.data is not None:
return unicode(self.data)
else:
return u('')
def process_formdata(self, valuelist):
if valuelist:
try:
self.data = float(valuelist[0])
except ValueError:
raise ValueError(self.gettext(u('Not a valid float value')))
class BooleanField(Field):
"""
Represents an ``<input type="checkbox">``.
"""
widget = widgets.CheckboxInput()
def __init__(self, label=u(''), validators=None, **kwargs):
super(BooleanField, self).__init__(label, validators, **kwargs)
def process_data(self, value):
self.data = bool(value)
def process_formdata(self, valuelist):
self.data = bool(valuelist)
def _value(self):
if self.raw_data:
return unicode(self.raw_data[0])
else:
return u('y')
class DateTimeField(Field):
"""
A text field which stores a `datetime.datetime` matching a format.
"""
widget = widgets.TextInput()
def __init__(self, label=u(''), validators=None, format='%Y-%m-%d %H:%M:%S', **kwargs):
super(DateTimeField, self).__init__(label, validators, **kwargs)
self.format = format
def _value(self):
if self.raw_data:
return u(' ').join(self.raw_data)
else:
return self.data and self.data.strftime(self.format) or u('')
def process_formdata(self, valuelist):
if valuelist:
date_str = u(' ').join(valuelist)
try:
timetuple = time.strptime(date_str, self.format)
self.data = datetime.datetime(*timetuple[:6])
except ValueError:
self.data = None
raise
class DateField(DateTimeField):
"""
Same as DateTimeField, except stores a `datetime.date`.
"""
def __init__(self, label=u(''), validators=None, format='%Y-%m-%d', **kwargs):
super(DateField, self).__init__(label, validators, format, **kwargs)
def process_formdata(self, valuelist):
if valuelist:
date_str = u(' ').join(valuelist)
try:
timetuple = time.strptime(date_str, self.format)
self.data = datetime.date(*timetuple[:3])
except ValueError:
self.data = None
raise
class SubmitField(BooleanField):
"""
Represents an ``<input type="submit">``. This allows checking if a given
submit button has been pressed.
"""
widget = widgets.SubmitInput()
class FormField(Field):
"""
Encapsulate a form as a field in another form.
:param form_class:
A subclass of Form that will be encapsulated.
:param separator:
A string which will be suffixed to this field's name to create the
prefix to enclosed fields. The default is fine for most uses.
"""
widget = widgets.TableWidget()
def __init__(self, form_class, label=u(''), validators=None, separator='-', **kwargs):
super(FormField, self).__init__(label, validators, **kwargs)
self.form_class = form_class
self.separator = separator
self._obj = None
if self.filters:
raise TypeError('FormField cannot take filters, as the encapsulated data is not mutable.')
if validators:
raise TypeError('FormField does not accept any validators. Instead, define them on the enclosed form.')
def process(self, formdata, data=_unset_value):
if data is _unset_value:
try:
data = self.default()
except TypeError:
data = self.default
self._obj = data
prefix = self.name + self.separator
if isinstance(data, dict):
self.form = self.form_class(formdata=formdata, prefix=prefix, **data)
else:
self.form = self.form_class(formdata=formdata, obj=data, prefix=prefix)
def validate(self, form, extra_validators=tuple()):
if extra_validators:
raise TypeError('FormField does not accept in-line validators, as it gets errors from the enclosed form.')
return self.form.validate()
def populate_obj(self, obj, name):
candidate = getattr(obj, name, None)
if candidate is None:
if self._obj is None:
raise TypeError('populate_obj: cannot find a value to populate from the provided obj or input data/defaults')
candidate = self._obj
setattr(obj, name, candidate)
self.form.populate_obj(candidate)
def __iter__(self):
return iter(self.form)
def __getitem__(self, name):
return self.form[name]
def __getattr__(self, name):
return getattr(self.form, name)
@property
def data(self):
return self.form.data
@property
def errors(self):
return self.form.errors
class FieldList(Field):
"""
Encapsulate an ordered list of multiple instances of the same field type,
keeping data as a list.
>>> authors = FieldList(TextField('Name', [validators.required()]))
:param unbound_field:
A partially-instantiated field definition, just like that would be
defined on a form directly.
:param min_entries:
if provided, always have at least this many entries on the field,
creating blank ones if the provided input does not specify a sufficient
amount.
:param max_entries:
accept no more than this many entries as input, even if more exist in
formdata.
"""
widget=widgets.ListWidget()
def __init__(self, unbound_field, label=u(''), validators=None, min_entries=0,
max_entries=None, default=tuple(), **kwargs):
super(FieldList, self).__init__(label, validators, default=default, **kwargs)
if self.filters:
raise TypeError('FieldList does not accept any filters. Instead, define them on the enclosed field.')
if validators:
raise TypeError('FieldList does not accept any validators. Instead, define them on the enclosed field.')
assert isinstance(unbound_field, UnboundField), 'Field must be unbound, not a field class'
self.unbound_field = unbound_field
self.min_entries = min_entries
self.max_entries = max_entries
self.last_index = -1
self._prefix = kwargs.get('_prefix', '')
def process(self, formdata, data=_unset_value):
self.entries = []
if data is _unset_value or not data:
try:
data = self.default()
except TypeError:
data = self.default
if formdata:
indices = sorted(set(self._extract_indices(self.name, formdata)))
if self.max_entries:
indices = indices[:self.max_entries]
idata = iter(data)
for index in indices:
try:
obj_data = next(idata)
except StopIteration:
obj_data = _unset_value
self._add_entry(formdata, obj_data, index=index)
else:
for obj_data in data:
self._add_entry(formdata, obj_data)
while len(self.entries) < self.min_entries:
self._add_entry(formdata)
def _extract_indices(self, prefix, formdata):
"""
Yield indices of any keys with given prefix.
formdata must be an object which will produce keys when iterated. For
example, if field 'foo' contains keys 'foo-0-bar', 'foo-1-baz', then
the numbers 0 and 1 will be yielded, but not neccesarily in order.
"""
offset = len(prefix) + 1
for k in formdata:
if k.startswith(prefix):
k = k[offset:].split('-', 1)[0]
if k.isdigit():
yield int(k)
def validate(self, form, extra_validators=tuple()):
self.errors = []
success = True
for subfield in self.entries:
if not subfield.validate(form):
success = False
self.errors.append(subfield.errors)
return success
def populate_obj(self, obj, name):
values = getattr(obj, name, None)
try:
ivalues = iter(values)
except TypeError:
ivalues = iter([])
candidates = itertools.chain(ivalues, itertools.repeat(None))
_fake = type('_fake', (object, ), {})
output = []
for field, data in zip(self.entries, candidates):
fake_obj = _fake()
fake_obj.data = data
field.populate_obj(fake_obj, 'data')
output.append(fake_obj.data)
setattr(obj, name, output)
def _add_entry(self, formdata=None, data=_unset_value, index=None):
assert not self.max_entries or len(self.entries) < self.max_entries, \
'You cannot have more than max_entries entries in this FieldList'
new_index = self.last_index = index or (self.last_index + 1)
name = '%s-%d' % (self.short_name, new_index)
id = '%s-%d' % (self.id, new_index)
field = self.unbound_field.bind(form=None, name=name, prefix=self._prefix, id=id)
field.process(formdata, data)
self.entries.append(field)
return field
def append_entry(self, data=_unset_value):
"""
Create a new entry with optional default data.
Entries added in this way will *not* receive formdata however, and can
only receive object data.
"""
return self._add_entry(data=data)
def pop_entry(self):
""" Removes the last entry from the list and returns it. """
entry = self.entries.pop()
self.last_index -= 1
return entry
def __iter__(self):
return iter(self.entries)
def __len__(self):
return len(self.entries)
def __getitem__(self, index):
return self.entries[index]
@property
def data(self):
return [f.data for f in self.entries]
|
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tink.python.tink.jwt._jwt_mac_wrapper."""
import io
from absl.testing import absltest
from absl.testing import parameterized
from tink.proto import jwt_hmac_pb2
from tink.proto import tink_pb2
import tink
from tink import cleartext_keyset_handle
from tink import jwt
from tink.jwt import _json_util
from tink.jwt import _jwt_format
from tink.testing import keyset_builder
def setUpModule():
jwt.register_jwt_mac()
def _set_custom_kid(keyset_handle: tink.KeysetHandle,
custom_kid: str) -> tink.KeysetHandle:
"""Set the custom_kid field of the first key."""
buffer = io.BytesIO()
cleartext_keyset_handle.write(
tink.BinaryKeysetWriter(buffer), keyset_handle)
keyset = tink_pb2.Keyset.FromString(buffer.getvalue())
hmac_key = jwt_hmac_pb2.JwtHmacKey.FromString(keyset.key[0].key_data.value)
hmac_key.custom_kid.value = custom_kid
keyset.key[0].key_data.value = hmac_key.SerializeToString()
return cleartext_keyset_handle.from_keyset(keyset)
def _change_key_id(keyset_handle: tink.KeysetHandle) -> tink.KeysetHandle:
"""Changes the key id of the first key and sets it primary."""
buffer = io.BytesIO()
cleartext_keyset_handle.write(
tink.BinaryKeysetWriter(buffer), keyset_handle)
keyset = tink_pb2.Keyset.FromString(buffer.getvalue())
# XOR the key id with an arbitrary 32-bit string to get a new key id.
new_key_id = keyset.key[0].key_id ^ 0xdeadbeef
keyset.key[0].key_id = new_key_id
keyset.primary_key_id = new_key_id
return cleartext_keyset_handle.from_keyset(keyset)
def _change_output_prefix_to_tink(
keyset_handle: tink.KeysetHandle) -> tink.KeysetHandle:
"""Changes the output prefix type of the first key to TINK."""
buffer = io.BytesIO()
cleartext_keyset_handle.write(
tink.BinaryKeysetWriter(buffer), keyset_handle)
keyset = tink_pb2.Keyset.FromString(buffer.getvalue())
keyset.key[0].output_prefix_type = tink_pb2.TINK
return cleartext_keyset_handle.from_keyset(keyset)
class JwtMacWrapperTest(parameterized.TestCase):
@parameterized.parameters([
(jwt.raw_jwt_hs256_template(), jwt.raw_jwt_hs256_template()),
(jwt.raw_jwt_hs256_template(), jwt.jwt_hs256_template()),
(jwt.jwt_hs256_template(), jwt.raw_jwt_hs256_template()),
(jwt.jwt_hs256_template(), jwt.jwt_hs256_template()),
])
def test_key_rotation(self, old_key_tmpl, new_key_tmpl):
builder = keyset_builder.new_keyset_builder()
older_key_id = builder.add_new_key(old_key_tmpl)
builder.set_primary_key(older_key_id)
jwtmac1 = builder.keyset_handle().primitive(jwt.JwtMac)
newer_key_id = builder.add_new_key(new_key_tmpl)
jwtmac2 = builder.keyset_handle().primitive(jwt.JwtMac)
builder.set_primary_key(newer_key_id)
jwtmac3 = builder.keyset_handle().primitive(jwt.JwtMac)
builder.disable_key(older_key_id)
jwtmac4 = builder.keyset_handle().primitive(jwt.JwtMac)
raw_jwt = jwt.new_raw_jwt(issuer='a', without_expiration=True)
validator = jwt.new_validator(
expected_issuer='a', allow_missing_expiration=True)
self.assertNotEqual(older_key_id, newer_key_id)
# 1 uses the older key. So 1, 2 and 3 can verify the mac, but not 4.
compact1 = jwtmac1.compute_mac_and_encode(raw_jwt)
self.assertEqual(
jwtmac1.verify_mac_and_decode(compact1, validator).issuer(), 'a')
self.assertEqual(
jwtmac2.verify_mac_and_decode(compact1, validator).issuer(), 'a')
self.assertEqual(
jwtmac3.verify_mac_and_decode(compact1, validator).issuer(), 'a')
with self.assertRaises(tink.TinkError):
jwtmac4.verify_mac_and_decode(compact1, validator)
# 2 uses the older key. So 1, 2 and 3 can verify the mac, but not 4.
compact2 = jwtmac2.compute_mac_and_encode(raw_jwt)
self.assertEqual(
jwtmac1.verify_mac_and_decode(compact2, validator).issuer(), 'a')
self.assertEqual(
jwtmac2.verify_mac_and_decode(compact2, validator).issuer(), 'a')
self.assertEqual(
jwtmac3.verify_mac_and_decode(compact2, validator).issuer(), 'a')
with self.assertRaises(tink.TinkError):
jwtmac4.verify_mac_and_decode(compact2, validator)
# 3 uses the newer key. So 2, 3 and 4 can verify the mac, but not 1.
compact3 = jwtmac3.compute_mac_and_encode(raw_jwt)
with self.assertRaises(tink.TinkError):
jwtmac1.verify_mac_and_decode(compact3, validator)
self.assertEqual(
jwtmac2.verify_mac_and_decode(compact3, validator).issuer(), 'a')
self.assertEqual(
jwtmac3.verify_mac_and_decode(compact3, validator).issuer(), 'a')
self.assertEqual(
jwtmac4.verify_mac_and_decode(compact3, validator).issuer(), 'a')
# 4 uses the newer key. So 2, 3 and 4 can verify the mac, but not 1.
compact4 = jwtmac4.compute_mac_and_encode(raw_jwt)
with self.assertRaises(tink.TinkError):
jwtmac1.verify_mac_and_decode(compact4, validator)
self.assertEqual(
jwtmac2.verify_mac_and_decode(compact4, validator).issuer(), 'a')
self.assertEqual(
jwtmac3.verify_mac_and_decode(compact4, validator).issuer(), 'a')
self.assertEqual(
jwtmac4.verify_mac_and_decode(compact4, validator).issuer(), 'a')
def test_only_tink_output_prefix_type_encodes_a_kid_header(self):
handle = tink.new_keyset_handle(jwt.raw_jwt_hs256_template())
jwt_mac = handle.primitive(jwt.JwtMac)
tink_handle = _change_output_prefix_to_tink(handle)
tink_jwt_mac = tink_handle.primitive(jwt.JwtMac)
raw_jwt = jwt.new_raw_jwt(issuer='issuer', without_expiration=True)
token = jwt_mac.compute_mac_and_encode(raw_jwt)
token_with_kid = tink_jwt_mac.compute_mac_and_encode(raw_jwt)
_, header, _, _ = _jwt_format.split_signed_compact(token)
self.assertNotIn('kid', _json_util.json_loads(header))
_, header_with_kid, _, _ = _jwt_format.split_signed_compact(token_with_kid)
self.assertIn('kid', _json_util.json_loads(header_with_kid))
validator = jwt.new_validator(
expected_issuer='issuer', allow_missing_expiration=True)
jwt_mac.verify_mac_and_decode(token, validator)
tink_jwt_mac.verify_mac_and_decode(token_with_kid, validator)
# With output prefix type RAW, a kid header is ignored
jwt_mac.verify_mac_and_decode(token_with_kid, validator)
# With output prefix type TINK, a kid header is required.
with self.assertRaises(tink.TinkError):
tink_jwt_mac.verify_mac_and_decode(token, validator)
other_handle = _change_key_id(tink_handle)
other_jwt_mac = other_handle.primitive(jwt.JwtMac)
# A token with a wrong kid is rejected, even if the signature is ok.
with self.assertRaises(tink.TinkError):
other_jwt_mac.verify_mac_and_decode(token_with_kid, validator)
def test_raw_output_prefix_type_encodes_a_custom_kid_header(self):
# normal HMAC jwt_mac with output prefix RAW
handle = tink.new_keyset_handle(jwt.raw_jwt_hs256_template())
raw_jwt = jwt.new_raw_jwt(issuer='issuer', without_expiration=True)
validator = jwt.new_validator(
expected_issuer='issuer', allow_missing_expiration=True)
jwt_mac = handle.primitive(jwt.JwtMac)
token = jwt_mac.compute_mac_and_encode(raw_jwt)
jwt_mac.verify_mac_and_decode(token, validator)
_, json_header, _, _ = _jwt_format.split_signed_compact(token)
self.assertNotIn('kid', _json_util.json_loads(json_header))
# HMAC jwt_mac with a custom_kid set
custom_kid_handle = _set_custom_kid(handle, custom_kid='my kid')
custom_kid_jwt_mac = custom_kid_handle.primitive(jwt.JwtMac)
token_with_kid = custom_kid_jwt_mac.compute_mac_and_encode(raw_jwt)
custom_kid_jwt_mac.verify_mac_and_decode(token_with_kid, validator)
_, header_with_kid, _, _ = _jwt_format.split_signed_compact(token_with_kid)
self.assertEqual(_json_util.json_loads(header_with_kid)['kid'], 'my kid')
# Even when custom_kid is set, its not required to be set in the header.
custom_kid_jwt_mac.verify_mac_and_decode(token, validator)
# An additional kid header is ignored.
jwt_mac.verify_mac_and_decode(token_with_kid, validator)
other_handle = _set_custom_kid(handle, custom_kid='other kid')
other_jwt_mac = other_handle.primitive(jwt.JwtMac)
with self.assertRaises(tink.TinkError):
# The custom_kid does not match the kid header.
other_jwt_mac.verify_mac_and_decode(
token_with_kid, validator)
tink_handle = _change_output_prefix_to_tink(custom_kid_handle)
tink_jwt_mac = tink_handle.primitive(jwt.JwtMac)
# having custom_kid set with output prefix TINK is not allowed
with self.assertRaises(tink.TinkError):
tink_jwt_mac.compute_mac_and_encode(raw_jwt)
with self.assertRaises(tink.TinkError):
tink_jwt_mac.verify_mac_and_decode(token, validator)
with self.assertRaises(tink.TinkError):
tink_jwt_mac.verify_mac_and_decode(token_with_kid, validator)
def test_legacy_key_fails(self):
template = keyset_builder.legacy_template(jwt.raw_jwt_hs256_template())
builder = keyset_builder.new_keyset_builder()
key_id = builder.add_new_key(template)
builder.set_primary_key(key_id)
handle = builder.keyset_handle()
with self.assertRaises(tink.TinkError):
handle.primitive(jwt.JwtMac)
def test_legacy_non_primary_key_fails(self):
builder = keyset_builder.new_keyset_builder()
old_template = keyset_builder.legacy_template(jwt.raw_jwt_hs256_template())
_ = builder.add_new_key(old_template)
current_key_id = builder.add_new_key(jwt.jwt_hs256_template())
builder.set_primary_key(current_key_id)
handle = builder.keyset_handle()
with self.assertRaises(tink.TinkError):
handle.primitive(jwt.JwtMac)
def test_jwt_mac_from_keyset_without_primary_fails(self):
builder = keyset_builder.new_keyset_builder()
builder.add_new_key(jwt.raw_jwt_hs256_template())
with self.assertRaises(tink.TinkError):
builder.keyset_handle()
if __name__ == '__main__':
absltest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.