hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f734ea9b8bb5e64eb1c72b0e9195f75e0147dfe3 | 2,673 | py | Python | benchmark.py | lucas-sio-rosa/bigquery-benchmark | b37e029fffcb516818efa74e338d95293549499c | [
"WTFPL"
] | null | null | null | benchmark.py | lucas-sio-rosa/bigquery-benchmark | b37e029fffcb516818efa74e338d95293549499c | [
"WTFPL"
] | null | null | null | benchmark.py | lucas-sio-rosa/bigquery-benchmark | b37e029fffcb516818efa74e338d95293549499c | [
"WTFPL"
] | null | null | null | from concurrent.futures import ThreadPoolExecutor, as_completed
from google.cloud import bigquery as bq
from datetime import datetime as dt
import argparse
import json
import logging
import time
if __name__ == "__main__":
start_time = time.time()
threads = []
results = []
parser = argparse.ArgumentParser()
parser.add_argument('job_prefix', help='The job prefix to be added to the BQ jobs')
parser.add_argument('query_file', help='The json file with a list of queries to be executed simultaneously')
parser.add_argument('--query_param_list', help='The json file with a list of parameters to be supplied to the query in round-robin fashion')
parser.add_argument('--credential_file', help='The path to a json credential file to authenticate the client')
parser.add_argument('--pool_size', default=50, type=int, help='Sets the logging level (default INFO)')
parser.add_argument('--log_level', default=20, type=int, choices=(0, 10, 20, 30, 40, 50), help='Log level')
args = parser.parse_args()
logging.basicConfig(level=args.log_level)
executor = ThreadPoolExecutor(args.pool_size)
client = bq.Client.from_service_account_json(args.credential_file) if args.credential_file else bq.Client()
job_config = bq.job.QueryJobConfig(use_legacy_sql=False, use_query_cache=False)
with open(args.query_file, 'r') as q:
query_list = json.loads(q.read())
param_list = None
if args.query_param_list:
with open(args.query_param_list, 'r') as p:
param_list = json.loads(p.read())
setup_time = time.time()
job_list = []
param_index = 0
param_reset = len(param_list) - 1 if param_list else 0
for q in query_list:
if param_list:
query = q['query'].format(**param_list[param_index])
logging.debug(query)
else:
query = q['query']
logging.debug(query)
job = client.query(query, job_id_prefix=args.job_prefix, job_config=job_config)
threads.append(executor.submit(job.result))
param_index = param_index + 1 if param_index < param_reset else 0
sent_time = time.time()
for future in as_completed(threads):
results.append(list(future.result()))
logging.debug('Execution results: {}'.format(results))
logging.info('Start time: {}'.format(dt.utcfromtimestamp(start_time).isoformat()))
logging.info('Time spent in setup: {}, {}s'.format(dt.utcfromtimestamp(setup_time).isoformat(), setup_time - start_time))
logging.info('Time spent in execution: {}, {}s'.format(dt.utcfromtimestamp(sent_time).isoformat(), sent_time - setup_time)) | 41.765625 | 144 | 0.693603 | from concurrent.futures import ThreadPoolExecutor, as_completed
from google.cloud import bigquery as bq
from datetime import datetime as dt
import argparse
import json
import logging
import time
if __name__ == "__main__":
start_time = time.time()
threads = []
results = []
parser = argparse.ArgumentParser()
parser.add_argument('job_prefix', help='The job prefix to be added to the BQ jobs')
parser.add_argument('query_file', help='The json file with a list of queries to be executed simultaneously')
parser.add_argument('--query_param_list', help='The json file with a list of parameters to be supplied to the query in round-robin fashion')
parser.add_argument('--credential_file', help='The path to a json credential file to authenticate the client')
parser.add_argument('--pool_size', default=50, type=int, help='Sets the logging level (default INFO)')
parser.add_argument('--log_level', default=20, type=int, choices=(0, 10, 20, 30, 40, 50), help='Log level')
args = parser.parse_args()
logging.basicConfig(level=args.log_level)
executor = ThreadPoolExecutor(args.pool_size)
client = bq.Client.from_service_account_json(args.credential_file) if args.credential_file else bq.Client()
job_config = bq.job.QueryJobConfig(use_legacy_sql=False, use_query_cache=False)
with open(args.query_file, 'r') as q:
query_list = json.loads(q.read())
param_list = None
if args.query_param_list:
with open(args.query_param_list, 'r') as p:
param_list = json.loads(p.read())
setup_time = time.time()
job_list = []
param_index = 0
param_reset = len(param_list) - 1 if param_list else 0
for q in query_list:
if param_list:
query = q['query'].format(**param_list[param_index])
logging.debug(query)
else:
query = q['query']
logging.debug(query)
job = client.query(query, job_id_prefix=args.job_prefix, job_config=job_config)
threads.append(executor.submit(job.result))
param_index = param_index + 1 if param_index < param_reset else 0
sent_time = time.time()
for future in as_completed(threads):
results.append(list(future.result()))
logging.debug('Execution results: {}'.format(results))
logging.info('Start time: {}'.format(dt.utcfromtimestamp(start_time).isoformat()))
logging.info('Time spent in setup: {}, {}s'.format(dt.utcfromtimestamp(setup_time).isoformat(), setup_time - start_time))
logging.info('Time spent in execution: {}, {}s'.format(dt.utcfromtimestamp(sent_time).isoformat(), sent_time - setup_time)) | true | true |
f734eaa92f6550c0ce4abd1c44bdd7c44006debf | 1,640 | py | Python | python/oneflow/test/modules/test_TripletMarginLoss.py | butterluo/oneflow | cf2ce575d80f89642b71bee2248e69b09213007d | [
"Apache-2.0"
] | null | null | null | python/oneflow/test/modules/test_TripletMarginLoss.py | butterluo/oneflow | cf2ce575d80f89642b71bee2248e69b09213007d | [
"Apache-2.0"
] | null | null | null | python/oneflow/test/modules/test_TripletMarginLoss.py | butterluo/oneflow | cf2ce575d80f89642b71bee2248e69b09213007d | [
"Apache-2.0"
] | null | null | null | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from collections import OrderedDict
import numpy as np
from test_util import GenArgList
from oneflow.test_utils.automated_test_util import *
import oneflow as flow
@flow.unittest.skip_unless_1n1d()
class TestTripletMarginLoss(flow.unittest.TestCase):
@autotest(n=10)
def test_triplet_marginloss_with_random_data(test_case):
margin = random().to(float)
p = random().to(float)
swap = random_bool()
reduction = oneof("none", "sum", "mean", nothing())
m = torch.nn.TripletMarginLoss(
margin=margin, p=p, swap=swap, reduction=reduction
)
m.train(random())
device = random_device()
m.to(device)
shape = random_tensor(ndim=2, dim0=random(1, 8)).pytorch.shape
anchor = random_tensor(len(shape), *shape).to(device)
pos = random_tensor(len(shape), *shape).to(device)
neg = random_tensor(len(shape), *shape).to(device)
y = m(anchor, pos, neg)
return y
if __name__ == "__main__":
unittest.main()
| 32.8 | 72 | 0.702439 | import unittest
from collections import OrderedDict
import numpy as np
from test_util import GenArgList
from oneflow.test_utils.automated_test_util import *
import oneflow as flow
@flow.unittest.skip_unless_1n1d()
class TestTripletMarginLoss(flow.unittest.TestCase):
@autotest(n=10)
def test_triplet_marginloss_with_random_data(test_case):
margin = random().to(float)
p = random().to(float)
swap = random_bool()
reduction = oneof("none", "sum", "mean", nothing())
m = torch.nn.TripletMarginLoss(
margin=margin, p=p, swap=swap, reduction=reduction
)
m.train(random())
device = random_device()
m.to(device)
shape = random_tensor(ndim=2, dim0=random(1, 8)).pytorch.shape
anchor = random_tensor(len(shape), *shape).to(device)
pos = random_tensor(len(shape), *shape).to(device)
neg = random_tensor(len(shape), *shape).to(device)
y = m(anchor, pos, neg)
return y
if __name__ == "__main__":
unittest.main()
| true | true |
f734ece49fb38670af0abb06b808cf1656a36159 | 55,538 | py | Python | python/graphvite/application/application.py | adrenadine33/graphvite | 34fc203f96ff13095073c605ecfcae32213e7f6a | [
"Apache-2.0"
] | null | null | null | python/graphvite/application/application.py | adrenadine33/graphvite | 34fc203f96ff13095073c605ecfcae32213e7f6a | [
"Apache-2.0"
] | null | null | null | python/graphvite/application/application.py | adrenadine33/graphvite | 34fc203f96ff13095073c605ecfcae32213e7f6a | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 MilaGraph. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Zhaocheng Zhu
"""Implementation of applications"""
from __future__ import print_function, absolute_import, unicode_literals, division
import os
import re
import pickle
import logging
import multiprocessing
from collections import defaultdict
from future.builtins import str, map, range
from easydict import EasyDict
import numpy as np
from .. import lib, cfg, auto
from .. import graph, solver
from ..util import assert_in, monitor, SharedNDArray
logger = logging.getLogger(__name__)
class ApplicationMixin(object):
"""
General interface of graph applications.
Parameters:
dim (int): dimension of embeddings
gpus (list of int, optional): GPU ids, default is all GPUs
cpu_per_gpu (int, optional): number of CPU threads per GPU, default is all CPUs
gpu_memory_limit (int, optional): memory limit per GPU in bytes, default is all memory
float_type (dtype, optional): type of parameters
index_type (dtype, optional): type of graph indexes
"""
def __init__(self, dim, gpus=[], cpu_per_gpu=auto, gpu_memory_limit=auto,
float_type=cfg.float_type, index_type=cfg.index_type):
self.dim = dim
self.gpus = gpus
self.cpu_per_gpu = cpu_per_gpu
self.gpu_memory_limit = gpu_memory_limit
self.float_type = float_type
self.index_type = index_type
self.set_format()
def get_graph(self, **kwargs):
raise NotImplementedError
def get_solver(self, **kwargs):
raise NotImplementedError
def set_format(self, delimiters=" \t\r\n", comment="#"):
"""
Set the format for parsing input data.
Parameters:
delimiters (str, optional): string of delimiter characters
comment (str, optional): prefix of comment strings
"""
self.delimiters = delimiters
self.comment = comment
self.pattern = re.compile("[%s]" % self.delimiters)
@monitor.time
def load(self, **kwargs):
"""load(**kwargs)
Load a graph from file or Python object.
Arguments depend on the underlying graph type.
"""
self.graph = self.get_graph(**kwargs)
if "file_name" in kwargs or "vector_file" in "kwargs":
self.graph.load(delimiters=self.delimiters, comment=self.comment, **kwargs)
else:
self.graph.load(**kwargs)
@monitor.time
def build(self, **kwargs):
"""build(**kwargs)
Build the solver from the graph.
Arguments depend on the underlying solver type.
"""
self.solver = self.get_solver(**kwargs)
self.solver.build(self.graph, **kwargs)
@monitor.time
def train(self, **kwargs):
"""train(**kwargs)
Train embeddings with the solver.
Arguments depend on the underlying solver type.
"""
self.solver.train(**kwargs)
@monitor.time
def evaluate(self, task, **kwargs):
"""evaluate(task, **kwargs)
Evaluate the learned embeddings on a downstream task.
Arguments depend on the underlying graph type and the task.
Parameters:
task (str): name of task
Returns:
dict: metrics and their values
"""
func_name = task.replace(" ", "_")
if not hasattr(self, func_name):
raise ValueError("Unknown task `%s`" % task)
logger.info(lib.io.header(task))
result = getattr(self, func_name)(**kwargs)
if isinstance(result, dict):
for metric, value in sorted(result.items()):
logger.warning("%s: %g" % (metric, value))
return result
@monitor.time
def load_model(self, file_name):
"""
Load model in pickle format.
Parameters:
file_name (str): file name:
"""
logger.warning("load model from `%s`" % file_name)
with open(file_name, "rb") as fin:
model = pickle.load(fin)
self.set_parameters(model)
@monitor.time
def save_model(self, file_name, save_hyperparameter=False):
"""
Save model in pickle format.
Parameters:
file_name (str): file name
save_hyperparameter (bool, optional): save hyperparameters or not, default is false
"""
def is_mapping(name, attribute):
return "2" in name
def is_embedding(name, attribute):
if name[0] == "_":
return False
return isinstance(attribute, np.ndarray)
def is_hyperparameter(name, attribute):
if name[0] == "_":
return False
return isinstance(attribute, int) or isinstance(attribute, float) or isinstance(attribute, str)
def get_attributes(object, filter):
attributes = EasyDict()
for name in dir(object):
attribute = getattr(object, name)
if filter(name, attribute):
attributes[name] = attribute
return attributes
logger.warning("save model to `%s`" % file_name)
model = EasyDict()
model.graph = get_attributes(self.graph, is_mapping)
model.solver = get_attributes(self.solver, is_embedding)
if save_hyperparameter:
model.graph.update(get_attributes(self.graph, is_hyperparameter))
model.solver.update(get_attributes(self.solver, is_hyperparameter))
model.solver.optimizer = get_attributes(self.solver.optimizer, is_hyperparameter)
model.solver.optimizer.schedule = self.solver.optimizer.schedule.type
with open(file_name, "wb") as fout:
pickle.dump(model, fout, protocol=pickle.HIGHEST_PROTOCOL)
def get_mapping(self, id2name, name2id):
mapping = []
for name in id2name:
if name not in name2id:
raise ValueError("Can't find the embedding for `%s`" % name)
mapping.append(name2id[name])
return mapping
def tokenize(self, str):
str = str.strip(self.delimiters)
comment_start = str.find(self.comment)
if comment_start != -1:
str = str[:comment_start]
return self.pattern.split(str)
def name_map(self, dicts, names):
assert len(dicts) == len(names), "The number of dictionaries and names must be equal"
indexes = [[] for _ in range(len(names))]
num_param = len(names)
num_sample = len(names[0])
for i in range(num_sample):
valid = True
for j in range(num_param):
if names[j][i] not in dicts[j]:
valid = False
break
if valid:
for j in range(num_param):
indexes[j].append(dicts[j][names[j][i]])
return indexes
def gpu_map(self, func, settings):
import torch
gpus = self.gpus if self.gpus else range(torch.cuda.device_count())
new_settings = []
for i, setting in enumerate(settings):
new_settings.append(setting + (gpus[i % len(gpus)],))
settings = new_settings
try:
start_method = multiprocessing.get_start_method()
# if there are other running processes, this could cause leakage of semaphores
multiprocessing.set_start_method("spawn", force=True)
pool = multiprocessing.Pool(len(gpus))
results = pool.map(func, settings, chunksize=1)
multiprocessing.set_start_method(start_method, force=True)
except AttributeError:
logger.info("Spawn mode is not supported by multiprocessing. Switch to serial execution.")
results = list(map(func, settings))
return results
class GraphApplication(ApplicationMixin):
"""
Node embedding application.
Given a graph, it embeds each node into a continuous vector representation.
The learned embeddings can be used for many downstream tasks.
e.g. **node classification**, **link prediction**, **node analogy**.
The similarity between node embeddings can be measured by cosine distance.
Supported Models:
- DeepWalk (`DeepWalk: Online Learning of Social Representations`_)
- LINE (`LINE: Large-scale Information Network Embedding`_)
- node2vec (`node2vec: Scalable Feature Learning for Networks`_)
.. _DeepWalk\: Online Learning of Social Representations:
https://arxiv.org/pdf/1403.6652.pdf
.. _LINE\: Large-scale Information Network Embedding:
https://arxiv.org/pdf/1503.03578.pdf
.. _node2vec\: Scalable Feature Learning for Networks:
https://www.kdd.org/kdd2016/papers/files/rfp0218-groverA.pdf
Parameters:
dim (int): dimension of embeddings
gpus (list of int, optional): GPU ids, default is all GPUs
cpu_per_gpu (int, optional): number of CPU threads per GPU, default is all CPUs
float_type (dtype, optional): type of parameters
index_type (dtype, optional): type of graph indexes
See also:
:class:`Graph <graphvite.graph.Graph>`,
:class:`GraphSolver <graphvite.solver.GraphSolver>`
"""
def get_graph(self, **kwargs):
return graph.Graph(self.index_type)
def get_solver(self, **kwargs):
if self.cpu_per_gpu == auto:
num_sampler_per_worker = auto
else:
num_sampler_per_worker = self.cpu_per_gpu - 1
return solver.GraphSolver(self.dim, self.float_type, self.index_type, self.gpus, num_sampler_per_worker,
self.gpu_memory_limit)
def set_parameters(self, model):
mapping = self.get_mapping(self.graph.id2name, model.graph.name2id)
self.solver.vertex_embeddings[:] = model.solver.vertex_embeddings[mapping]
self.solver.context_embeddings[:] = model.solver.context_embeddings[mapping]
def node_classification(self, X=None, Y=None, file_name=None, portions=(0.02,), normalization=False, times=1,
patience=100):
"""
Evaluate node embeddings on node classification task.
Parameters:
X (list of str, optional): names of nodes
Y (list, optional): labels of nodes
file_name (str, optional): file of nodes & labels
portions (tuple of float, optional): how much data for training
normalization (bool, optional): normalize the embeddings or not
times (int, optional): number of trials
patience (int, optional): patience on loss convergence
Returns:
dict: macro-F1 & micro-F1 averaged over all trials
"""
import scipy.sparse as sp
self.solver.clear()
if file_name:
if not (X is None and Y is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
X = []
Y = []
with open(file_name, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
x, y = tokens
X.append(x)
Y.append(y)
if X is None or Y is None:
raise ValueError("Either evaluataion data (X, Y) or a file name should be provided")
name2id = self.graph.name2id
class2id = {c:i for i, c in enumerate(np.unique(Y))}
new_X, new_Y = self.name_map((name2id, class2id), (X, Y))
logger.info("effective labels: %d / %d" % (len(new_X), len(X)))
X = np.asarray(new_X)
Y = np.asarray(new_Y)
labels = sp.coo_matrix((np.ones_like(X), (X, Y)), dtype=np.int32).todense()
indexes, _ = np.where(np.sum(labels, axis=1) > 0)
# discard non-labeled nodes
labels = labels[indexes]
vertex_embeddings = SharedNDArray(self.solver.vertex_embeddings[indexes])
settings = []
for portion in portions:
settings.append((vertex_embeddings, labels, portion, normalization, times, patience))
results = self.gpu_map(linear_classification, settings)
metrics = {}
for result in results:
metrics.update(result)
return metrics
def link_prediction(self, H=None, T=None, Y=None, file_name=None, filter_H=None, filter_T=None, filter_file=None):
"""
Evaluate node embeddings on link prediction task.
Parameters:
H (list of str, optional): names of head nodes
T (list of str, optional): names of tail nodes
Y (list of int, optional): labels of edges
file_name (str, optional): file of edges and labels (e.g. validation set)
filter_H (list of str, optional): names of head nodes to filter out
filter_T (list of str, optional): names of tail nodes to filter out
filter_file (str, optional): file of edges to filter out (e.g. training set)
Returns:
dict: AUC of link prediction
"""
import torch
from .network import LinkPredictor
self.solver.clear()
if file_name:
if not (H is None and T is None and Y is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
H = []
T = []
Y = []
with open(file_name, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
h, t, y = tokens
H.append(h)
T.append(t)
Y.append(y)
if H is None or T is None or Y is None:
raise ValueError("Either evaluation data or file should be provided")
if filter_file:
if not (filter_H is None and filter_T is None):
raise ValueError("Filter data and file should not be provided at the same time")
filter_H = []
filter_T = []
with open(filter_file, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
h, t = tokens
filter_H.append(h)
filter_T.append(t)
elif filter_H is None:
filter_H = []
filter_T = []
name2id = self.graph.name2id
Y = [int(y) for y in Y]
new_H, new_T, new_Y = self.name_map((name2id, name2id, {0:0, 1:1}), (H, T, Y))
logger.info("effective edges: %d / %d" % (len(new_H), len(H)))
H = new_H
T = new_T
Y = new_Y
new_H, new_T = self.name_map((name2id, name2id), (filter_H, filter_T))
logger.info("effective filter edges: %d / %d" % (len(new_H), len(filter_H)))
filters = set(zip(new_H, new_T))
new_H = []
new_T = []
new_Y = []
for h, t, y in zip(H, T, Y):
if (h, t) not in filters:
new_H.append(h)
new_T.append(t)
new_Y.append(y)
logger.info("remaining edges: %d / %d" % (len(new_H), len(H)))
H = np.asarray(new_H)
T = np.asarray(new_T)
Y = np.asarray(new_Y)
vertex_embeddings = self.solver.vertex_embeddings
context_embeddings = self.solver.context_embeddings
model = LinkPredictor(self.solver.model, vertex_embeddings, context_embeddings)
model = model.cuda()
H = torch.as_tensor(H)
T = torch.as_tensor(T)
Y = torch.as_tensor(Y)
H = H.cuda()
T = T.cuda()
Y = Y.cuda()
score = model(H, T)
order = torch.argsort(score, descending=True)
Y = Y[order]
hit = torch.cumsum(Y, dim=0)
all = torch.sum(Y == 0) * torch.sum(Y == 1)
auc = torch.sum(hit[Y == 0]).item() / all.item()
return {
"AUC": auc
}
def linear_classification(args):
import torch
from torch import optim
from torch.nn import functional as F
from .network import NodeClassifier
def generate_one_vs_rest(indexes, labels):
new_indexes = []
new_labels = []
num_class = labels.shape[1]
for index, sample_labels in zip(indexes, labels):
for cls in np.where(sample_labels)[0]:
new_indexes.append(index)
new_label = np.zeros(num_class, dtype=np.int)
new_label[cls] = 1
new_labels.append(new_label)
return torch.as_tensor(new_indexes), torch.as_tensor(new_labels)
embeddings, labels, portion, normalization, times, patience, gpu = args
embeddings = np.asarray(embeddings)
num_sample, num_class = labels.shape
num_train = int(num_sample * portion)
macro_f1s = []
micro_f1s = []
for t in range(times):
samples = np.random.permutation(num_sample)
train_samples = samples[:num_train]
train_labels = np.asarray(labels[train_samples])
train_samples, train_labels = generate_one_vs_rest(train_samples, train_labels)
test_samples = torch.as_tensor(samples[num_train:])
test_labels = torch.as_tensor(labels[test_samples])
model = NodeClassifier(embeddings, num_class, normalization=normalization)
train_samples = train_samples.cuda(gpu)
train_labels = train_labels.cuda(gpu)
test_samples = test_samples.cuda(gpu)
test_labels = test_labels.cuda(gpu)
model = model.cuda(gpu)
# train
optimizer = optim.SGD(model.parameters(), lr=1, weight_decay=2e-5, momentum=0.9)
best_loss = float("inf")
best_epoch = -1
for epoch in range(100000):
optimizer.zero_grad()
logits = model(train_samples)
loss = F.binary_cross_entropy_with_logits(logits, train_labels.float())
loss.backward()
optimizer.step()
loss = loss.item()
if loss < best_loss:
best_epoch = epoch
best_loss = loss
if epoch == best_epoch + patience:
break
# test
logits = model(test_samples)
num_labels = test_labels.sum(dim=1, keepdim=True)
sorted, _ = logits.sort(dim=1, descending=True)
thresholds = sorted.gather(dim=1, index=num_labels-1)
predictions = (logits >= thresholds).int()
# compute metric
num_TP_per_class = (predictions & test_labels).sum(dim=0).float()
num_T_per_class = test_labels.sum(dim=0).float()
num_P_per_class = predictions.sum(dim=0).float()
macro_f1s.append((2 * num_TP_per_class / (num_T_per_class + num_P_per_class)).mean().item())
num_TP = (predictions & test_labels).sum().float()
num_T = test_labels.sum().float()
num_P = predictions.sum().float()
micro_f1s.append((2 * num_TP / (num_T + num_P)).item())
return {
"macro-F1@%g%%" % (portion * 100): np.mean(macro_f1s),
"micro-F1@%g%%" % (portion * 100): np.mean(micro_f1s)
}
class WordGraphApplication(ApplicationMixin):
"""
Word node embedding application.
Given a corpus, it embeds each word into a continuous vector representation.
The learned embeddings can be used for natural language processing tasks.
This can be viewed as a variant of the word2vec algorithm, with random walk augmentation support.
The similarity between node embeddings can be measured by cosine distance.
Supported Models:
- LINE (`LINE: Large-scale Information Network Embedding`_)
Parameters:
dim (int): dimension of embeddings
gpus (list of int, optional): GPU ids, default is all GPUs
cpu_per_gpu (int, optional): number of CPU threads per GPU, default is all CPUs
float_type (dtype, optional): type of parameters
index_type (dtype, optional): type of graph indexes
See also:
:class:`WordGraph <graphvite.graph.WordGraph>`,
:class:`GraphSolver <graphvite.solver.GraphSolver>`
"""
def get_graph(self, **kwargs):
return graph.WordGraph(self.index_type)
def get_solver(self, **kwargs):
if self.cpu_per_gpu == auto:
num_sampler_per_worker = auto
else:
num_sampler_per_worker = self.cpu_per_gpu - 1
return solver.GraphSolver(self.dim, self.float_type, self.index_type, self.gpus, num_sampler_per_worker,
self.gpu_memory_limit)
def set_parameters(self, model):
mapping = self.get_mapping(self.graph.id2name, model.graph.name2id)
self.solver.vertex_embeddings[:] = model.solver.vertex_embeddings[mapping]
self.solver.context_embeddings[:] = model.solver.context_embeddings[mapping]
class KnowledgeGraphApplication(ApplicationMixin):
"""
Knowledge graph embedding application.
Given a knowledge graph, it embeds each entity and relation into a continuous vector representation respectively.
The learned embeddings can be used for analysis of knowledge graphs.
e.g. **entity prediction**, **link prediction**.
The likelihood of edges can be predicted by computing the score function over embeddings of triplets.
Supported Models:
- TransE (`Translating Embeddings for Modeling Multi-relational Data`_)
- DistMult (`Embedding Entities and Relations for Learning and Inference in Knowledge Bases`_)
- ComplEx (`Complex Embeddings for Simple Link Prediction`_)
- SimplE (`SimplE Embedding for Link Prediction in Knowledge Graphs`_)
- RotatE (`RotatE: Knowledge Graph Embedding by Relational Rotation in Complex Space`_)
.. _Translating Embeddings for Modeling Multi-relational Data:
http://papers.nips.cc/paper/5071-translating-embeddings-for-modeling-multi-relational-data.pdf
.. _Embedding Entities and Relations for Learning and Inference in Knowledge Bases:
https://arxiv.org/pdf/1412.6575.pdf
.. _Complex Embeddings for Simple Link Prediction:
http://proceedings.mlr.press/v48/trouillon16.pdf
.. _SimplE Embedding for Link Prediction in Knowledge Graphs:
https://papers.nips.cc/paper/7682-simple-embedding-for-link-prediction-in-knowledge-graphs.pdf
.. _RotatE\: Knowledge Graph Embedding by Relational Rotation in Complex Space:
https://arxiv.org/pdf/1902.10197.pdf
Parameters:
dim (int): dimension of embeddings
gpus (list of int, optional): GPU ids, default is all GPUs
cpu_per_gpu (int, optional): number of CPU threads per GPU, default is all CPUs
float_type (dtype, optional): type of parameters
index_type (dtype, optional): type of graph indexes
Note:
The implementation of TransE, DistMult and ComplEx, SimplE are slightly different from their original papers.
The loss function and the regularization term generally follow `this repo`_.
Self-adversarial negative sampling is also adopted in these models like RotatE.
.. _this repo: https://github.com/DeepGraphLearning/KnowledgeGraphEmbedding
See also:
:class:`KnowledgeGraph <graphvite.graph.KnowledgeGraph>`,
:class:`KnowledgeGraphSolver <graphvite.solver.KnowledgeGraphSolver>`
"""
SAMPLE_PER_DIMENSION = 7
MEMORY_SCALE_FACTOR = 1.5
def get_graph(self, **kwargs):
return graph.KnowledgeGraph(self.index_type)
def get_solver(self, **kwargs):
if self.cpu_per_gpu == auto:
num_sampler_per_worker = auto
else:
num_sampler_per_worker = self.cpu_per_gpu - 1
return solver.KnowledgeGraphSolver(self.dim, self.float_type, self.index_type, self.gpus, num_sampler_per_worker,
self.gpu_memory_limit)
def set_parameters(self, model):
entity_mapping = self.get_mapping(self.graph.id2entity, model.graph.entity2id)
relation_mapping = self.get_mapping(self.graph.id2relation, model.graph.relation2id)
self.solver.entity_embeddings[:] = model.solver.entity_embeddings[entity_mapping]
self.solver.relation_embeddings[:] = model.solver.relation_embeddings[relation_mapping]
def entity_prediction(self, H=None, R=None, T=None, file_name=None, save_file=None, target="tail", k=10,
backend=cfg.backend):
"""
Predict the distribution of missing entity or relation for triplets.
Parameters:
H (list of str, optional): names of head entities
R (list of str, optional): names of relations
T (list of str, optional): names of tail entities
file_name (str, optional): file of triplets (e.g. validation set)
save_file (str, optional): ``txt`` or ``pkl`` file to save predictions
k (int, optional): top-k recalls will be returned
target (str, optional): 'head' or 'tail'
backend (str, optional): 'graphvite' or 'torch'
Return:
list of list of tuple: top-k recalls for each triplet, if save file is not provided
"""
def torch_predict():
import torch
entity_embeddings = SharedNDArray(self.solver.entity_embeddings)
relation_embeddings = SharedNDArray(self.solver.relation_embeddings)
num_gpu = len(self.gpus) if self.gpus else torch.cuda.device_count()
work_load = (num_sample + num_gpu - 1) // num_gpu
settings = []
for i in range(num_gpu):
work_H = H[work_load * i: work_load * (i+1)]
work_R = R[work_load * i: work_load * (i+1)]
work_T = T[work_load * i: work_load * (i+1)]
settings.append((entity_embeddings, relation_embeddings, work_H, work_R, work_T,
None, None, target, k, self.solver.model, self.solver.margin))
results = self.gpu_map(triplet_prediction, settings)
return sum(results, [])
def graphvite_predict():
num_entity = len(entity2id)
batch_size = self.get_batch_size(num_entity)
recalls = []
for i in range(0, num_sample, batch_size):
batch_h = H[i: i + batch_size]
batch_r = R[i: i + batch_size]
batch_t = T[i: i + batch_size]
batch = self.generate_one_vs_rest(batch_h, batch_r, batch_t, num_entity, target)
scores = self.solver.predict(batch)
scores = scores.reshape(-1, num_entity)
indexes = np.argpartition(scores, num_entity - k, axis=-1)
for index, score in zip(indexes, scores):
index = index[-k:]
score = score[index]
order = np.argsort(score)[::-1]
recall = list(zip(index[order], score[order]))
recalls.append(recall)
return recalls
assert_in(["head", "tail"], target=target)
assert_in(["graphvite", "torch"], backend=backend)
if backend == "torch":
self.solver.clear()
if file_name:
if not (H is None and R is None and T is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
H = []
R = []
T = []
with open(file_name, "r") as fin:
for i, line in enumerate(fin):
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
if 3 <= len(tokens) <= 4:
h, r, t = tokens[:3]
elif len(tokens) == 2:
if target == "head":
r, t = tokens
h = None
else:
h, r = tokens
t = None
else:
raise ValueError("Invalid line format at line %d in %s" % (i + 1, file_name))
H.append(h)
R.append(r)
T.append(t)
if (H is None and T is None) or R is None:
raise ValueError("Either evaluation data or file should be provided")
if H is None:
target = "head"
if T is None:
target = "tail"
entity2id = self.graph.entity2id
relation2id = self.graph.relation2id
num_sample = len(R)
new_H = np.zeros(num_sample, dtype=np.uint32)
new_T = np.zeros(num_sample, dtype=np.uint32)
if target == "head":
new_R, new_T = self.name_map((relation2id, entity2id), (R, T))
if target == "tail":
new_H, new_R = self.name_map((entity2id, relation2id), (H, R))
assert len(new_R) == len(R), "Can't recognize some entities or relations"
H = np.asarray(new_H, dtype=np.uint32)
R = np.asarray(new_R, dtype=np.uint32)
T = np.asarray(new_T, dtype=np.uint32)
if backend == "graphvite":
recalls = graphvite_predict()
else:
recalls = torch_predict()
id2entity = self.graph.id2entity
new_recalls = []
for recall in recalls:
new_recall = [(id2entity[e], s) for e, s in recall]
new_recalls.append(new_recall)
recalls = new_recalls
if save_file:
logger.warning("save entity predictions to `%s`" % save_file)
extension = os.path.splitext(save_file)[1]
if extension == ".txt":
with open(save_file, "w") as fout:
for recall in recalls:
tokens = ["%s: %g" % x for x in recall]
fout.write("%s\n" % "\t".join(tokens))
elif extension == ".pkl":
with open(save_file, "wb") as fout:
pickle.dump(recalls, fout, protocol=pickle.HIGHEST_PROTOCOL)
else:
raise ValueError("Unknown file extension `%s`" % extension)
else:
return recalls
def link_prediction(self, H=None, R=None, T=None, filter_H=None, filter_R=None, filter_T=None, file_name=None,
filter_files=None, target="both", fast_mode=None, backend=cfg.backend):
"""
Evaluate knowledge graph embeddings on link prediction task.
Parameters:
H (list of str, optional): names of head entities
R (list of str, optional): names of relations
T (list of str, optional): names of tail entities
file_name (str, optional): file of triplets (e.g. validation set)
filter_H (list of str, optional): names of head entities to filter out
filter_R (list of str, optional): names of relations to filter out
filter_T (list of str, optional): names of tail entities to filter out
filter_files (str, optional): files of triplets to filter out (e.g. training / validation / test set)
target (str, optional): 'head', 'tail' or 'both'
fast_mode (int, optional): if specified, only that number of samples will be evaluated
backend (str, optional): 'graphvite' or 'torch'
Returns:
dict: MR, MRR, HITS\@1, HITS\@3 & HITS\@10 of link prediction
"""
def torch_predict():
import torch
entity_embeddings = SharedNDArray(self.solver.entity_embeddings)
relation_embeddings = SharedNDArray(self.solver.relation_embeddings)
num_gpu = len(self.gpus) if self.gpus else torch.cuda.device_count()
work_load = (fast_mode + num_gpu - 1) // num_gpu
settings = []
for i in range(num_gpu):
work_H = H[work_load * i: work_load * (i+1)]
work_R = R[work_load * i: work_load * (i+1)]
work_T = T[work_load * i: work_load * (i+1)]
settings.append((entity_embeddings, relation_embeddings, work_H, work_R, work_T,
exclude_H, exclude_T, target, None, self.solver.model, self.solver.margin))
results = self.gpu_map(triplet_prediction, settings)
return np.concatenate(results)
def graphvite_predict():
num_entity = len(entity2id)
if target == "both":
batch_size = self.get_batch_size(num_entity * 2)
else:
batch_size = self.get_batch_size(num_entity)
rankings = []
for i in range(0, fast_mode, batch_size):
batch_h = H[i: i + batch_size]
batch_r = R[i: i + batch_size]
batch_t = T[i: i + batch_size]
batch = self.generate_one_vs_rest(batch_h, batch_r, batch_t, num_entity, target)
masks = self.generate_mask(batch_h, batch_r, batch_t, exclude_H, exclude_T, num_entity, target)
if target == "head":
positives = batch_h
if target == "tail":
positives = batch_t
if target == "both":
positives = np.asarray([batch_h, batch_t]).transpose()
positives = positives.ravel()
scores = self.solver.predict(batch)
scores = scores.reshape(-1, num_entity)
truths = scores[range(len(positives)), positives]
ranking = np.sum((scores >= truths[:, np.newaxis]) * masks, axis=1)
rankings.append(ranking)
return np.concatenate(rankings)
assert_in(["head", "tail", "both"], target=target)
assert_in(["graphvite", "torch"], backend=backend)
if backend == "torch":
self.solver.clear()
if file_name:
if not (H is None and R is None and T is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
H = []
R = []
T = []
with open(file_name, "r") as fin:
for i, line in enumerate(fin):
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
if 3 <= len(tokens) <= 4:
h, r, t = tokens[:3]
else:
raise ValueError("Invalid line format at line %d in %s" % (i + 1, file_name))
H.append(h)
R.append(r)
T.append(t)
if H is None or R is None or T is None:
raise ValueError("Either evaluation data or file should be provided")
if filter_files:
if not (filter_H is None and filter_R is None and filter_T is None):
raise ValueError("Filter data and file should not be provided at the same time")
filter_H = []
filter_R = []
filter_T = []
for filter_file in filter_files:
with open(filter_file, "r") as fin:
for i, line in enumerate(fin):
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
if 3 <= len(tokens) <= 4:
h, r, t = tokens[:3]
else:
raise ValueError("Invalid line format at line %d in %s" % (i + 1, filter_file))
filter_H.append(h)
filter_R.append(r)
filter_T.append(t)
elif filter_H is None:
filter_H = []
filter_R = []
filter_T = []
entity2id = self.graph.entity2id
relation2id = self.graph.relation2id
new_H, new_R, new_T = self.name_map((entity2id, relation2id, entity2id), (H, R, T))
logger.info("effective triplets: %d / %d" % (len(new_H), len(H)))
H = np.asarray(new_H, dtype=np.uint32)
R = np.asarray(new_R, dtype=np.uint32)
T = np.asarray(new_T, dtype=np.uint32)
new_H, new_R, new_T = self.name_map((entity2id, relation2id, entity2id), (filter_H, filter_R, filter_T))
logger.info("effective filter triplets: %d / %d" % (len(new_H), len(filter_H)))
filter_H = np.asarray(new_H, dtype=np.uint32)
filter_R = np.asarray(new_R, dtype=np.uint32)
filter_T = np.asarray(new_T, dtype=np.uint32)
exclude_H = defaultdict(set)
exclude_T = defaultdict(set)
for h, r, t in zip(filter_H, filter_R, filter_T):
exclude_H[(t, r)].add(h)
exclude_T[(h, r)].add(t)
num_sample = len(H)
fast_mode = fast_mode or num_sample
indexes = np.random.permutation(num_sample)[:fast_mode]
H = H[indexes]
R = R[indexes]
T = T[indexes]
if backend == "graphvite":
rankings = graphvite_predict()
elif backend == "torch":
rankings = torch_predict()
return {
"MR": np.mean(rankings),
"MRR": np.mean(1 / rankings),
"HITS@1": np.mean(rankings <= 1),
"HITS@3": np.mean(rankings <= 3),
"HITS@10": np.mean(rankings <= 10)
}
def get_batch_size(self, sample_size):
import psutil
memory = psutil.virtual_memory()
batch_size = int(self.SAMPLE_PER_DIMENSION * self.dim * self.graph.num_vertex
* self.solver.num_partition / self.solver.num_worker / sample_size)
# 2 triplet (Python, C++ sample pool) + 1 sample index
mem_per_sample = sample_size * (2 * 3 * np.uint32().itemsize + 1 * np.uint64().itemsize)
max_batch_size = int(memory.available / mem_per_sample / self.MEMORY_SCALE_FACTOR)
if max_batch_size < batch_size:
logger.info("Memory is not enough for optimal prediction batch size. "
"Use the maximal possible size instead.")
batch_size = max_batch_size
return batch_size
def generate_one_vs_rest(self, H, R, T, num_entity, target="both"):
one = np.ones(num_entity, dtype=np.bool)
all = np.arange(num_entity, dtype=np.uint32)
batches = []
for h, r, t in zip(H, R, T):
if target == "head" or target == "both":
batch = np.asarray([all, t * one, r * one]).transpose()
batches.append(batch)
if target == "tail" or target == "both":
batch = np.asarray([h * one, all, r * one]).transpose()
batches.append(batch)
batches = np.concatenate(batches)
return batches
def generate_mask(self, H, R, T, exclude_H, exclude_T, num_entity, target="both"):
one = np.ones(num_entity, dtype=np.bool)
masks = []
for h, r, t in zip(H, R, T):
if target == "head" or target == "both":
mask = one.copy()
mask[list(exclude_H[(t, r)])] = 0
mask[h] = 1
masks.append(mask)
if target == "tail" or target == "both":
mask = one.copy()
mask[list(exclude_T[(h, r)])] = 0
mask[t] = 1
masks.append(mask)
masks = np.asarray(masks)
return masks
def triplet_prediction(args):
import torch
from .network import LinkPredictor
torch.set_grad_enabled(False)
entity_embeddings, relation_embeddings, H, R, T, \
exclude_H, exclude_T, target, k, score_function, margin, device = args
entity_embeddings = np.asarray(entity_embeddings)
relation_embeddings = np.asarray(relation_embeddings)
num_entity = len(entity_embeddings)
score_function = LinkPredictor(score_function, entity_embeddings, relation_embeddings, entity_embeddings,
margin=margin)
if device != "cpu":
try:
score_function = score_function.to(device)
except RuntimeError:
logger.info("Model is too large for GPU evaluation with PyTorch. Switch to CPU evaluation.")
device = "cpu"
if device == "cpu":
del score_function
torch.cuda.empty_cache()
score_function = LinkPredictor(score_function, entity_embeddings, relation_embeddings, entity_embeddings,
margin=margin)
one = torch.ones(num_entity, dtype=torch.long, device=device)
all = torch.arange(num_entity, dtype=torch.long, device=device)
results = [] # rankings or top-k recalls
for h, r, t in zip(H, R, T):
if target == "head" or target == "both":
batch_h = all
batch_r = r * one
batch_t = t * one
score = score_function(batch_h, batch_r, batch_t)
if k: # top-k recalls
score, index = torch.topk(score, k)
score = score.cpu().numpy()
index = index.cpu().numpy()
recall = list(zip(index, score))
results.append(recall)
else: # ranking
mask = torch.ones(num_entity, dtype=torch.uint8, device=device)
index = torch.tensor(list(exclude_H[(t, r)]), dtype=torch.long, device=device)
mask[index] = 0
mask[h] = 1
ranking = torch.sum((score >= score[h]) * mask).item()
results.append(ranking)
if target == "tail" or target == "both":
batch_h = h * one
batch_r = r * one
batch_t = all
score = score_function(batch_h, batch_r, batch_t)
if k: # top-k recalls
score, index = torch.topk(score, k)
score = score.cpu().numpy()
index = index.cpu().numpy()
recall = list(zip(index, score))
results.append(recall)
else: # ranking
mask = torch.ones(num_entity, dtype=torch.uint8, device=device)
index = torch.tensor(list(exclude_T[(h, r)]), dtype=torch.long, device=device)
mask[index] = 0
mask[t] = 1
ranking = torch.sum((score >= score[t]) * mask).item()
results.append(ranking)
if not k: # ranking
results = np.asarray(results)
return results
class VisualizationApplication(ApplicationMixin):
"""
Graph & high-dimensional data visualization.
Given a graph or high-dimensional vectors, it maps each node to 2D or 3D coordinates to
faciliate visualization. The learned coordinates preserve most local similarity information
of the original input, and may shed some light on the structure of the graph or the
high-dimensional space.
Supported Models:
- LargeVis (`Visualizing Large-scale and High-dimensional Data`_)
.. _Visualizing Large-scale and High-dimensional Data: https://arxiv.org/pdf/1602.00370.pdf
Parameters:
dim (int): dimension of embeddings
gpus (list of int, optional): GPU ids, default is all GPUs
cpu_per_gpu (int, optional): number of CPU threads per GPU, default is all CPUs
float_type (dtype, optional): type of parameters
index_type (dtype, optional): type of graph indexes
See also:
:class:`Graph <graphvite.graph.Graph>`,
:class:`KNNGraph <graphvite.graph.KNNGraph>`,
:class:`VisualizationSolver <graphvite.solver.VisualizationSolver>`
"""
OUTLIER_THRESHOLD = 5
def get_graph(self, **kwargs):
if "file_name" in kwargs or "edge_list" in kwargs:
return graph.Graph(self.index_type)
else:
return graph.KNNGraph(self.index_type, self.gpus, self.cpu_per_gpu)
def get_solver(self, **kwargs):
if self.cpu_per_gpu == auto:
num_sampler_per_worker = auto
else:
num_sampler_per_worker = self.cpu_per_gpu - 1
return solver.VisualizationSolver(self.dim, self.float_type, self.index_type, self.gpus, num_sampler_per_worker,
self.gpu_memory_limit)
def set_parameters(self, model):
if self.solver.coordinates.shape != model.solver.coordinates.shape:
raise ValueError("Expect coordinates with shape %s, but %s is found" %
(self.solver.coordinates.shape, model.solver.coordinates.shape))
self.solver.coordinates[:] = model.solver.coordinates
def visualization(self, Y=None, file_name=None, save_file=None, figure_size=10, scale=2):
"""
Visualize learned 2D or 3D coordinates.
Parameters:
Y (list of str, optional): labels of vectors
file_name (str, optional): file of labels
save_file (str, optional): ``png`` or ``pdf`` file to save visualization,
if not provided, show the figure in window
figure_size (int, optional): size of figure
scale (int, optional): size of points
"""
from matplotlib import pyplot as plt
plt.switch_backend("agg") # for compatibility
self.solver.clear()
coordinates = self.solver.coordinates
dim = coordinates.shape[1]
if not (dim == 2 or dim == 3):
raise ValueError("Can't visualize %dD data" % dim)
if file_name:
if not (Y is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
Y = []
with open(file_name, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
y, = tokens
Y.append(y)
elif Y is None:
Y = ["unknown"] * self.graph.num_vertex
Y = np.asarray(Y)
mean = np.mean(coordinates, axis=0)
std = np.std(coordinates, axis=0)
inside = np.abs(coordinates - mean) < self.OUTLIER_THRESHOLD * std
indexes, = np.where(np.all(inside, axis=1))
# discard outliers
coordinates = coordinates[indexes]
Y = Y[indexes]
classes = sorted(np.unique(Y))
fig = plt.figure(figsize=(figure_size, figure_size))
if dim == 2:
ax = fig.gca()
elif dim == 3:
from mpl_toolkits.mplot3d import Axes3D
ax = fig.gca(projection="3d")
for cls in classes:
indexes, = np.where(Y == cls)
ax.scatter(*coordinates[indexes].T, s=scale)
ax.set_xticks([])
ax.set_yticks([])
if dim == 3:
ax.set_zticks([])
if len(classes) > 1:
ax.legend(classes, markerscale=6, loc="upper right")
if save_file:
logger.warning("save visualization to `%s`" % save_file)
plt.savefig(save_file)
else:
plt.show()
return {}
def hierarchy(self, HY=None, file_name=None, target=None, save_file=None, figure_size=10, scale=2, duration=3):
"""
Visualize learned 2D coordinates with hierarchical labels.
Parameters:
HY (list of list of str, optional): hierarchical labels of vectors
file_name (str, optional): file of hierarchical labels
target (str): target class
save_file (str): ``gif`` file to save visualization
figure_size (int, optional): size of figure
scale (int, optional): size of points
duration (float, optional): duration of each frame in seconds
"""
import imageio
from matplotlib import pyplot as plt
plt.switch_backend("agg") # for compatibility
self.solver.clear()
coordinates = self.solver.coordinates
dim = coordinates.shape[1]
if dim != 2:
raise ValuerError("Can't visualize the hierarchy of %dD data" % dim)
if file_name:
if not (HY is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
HY = []
with open(file_name, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) > 0:
HY.append(tokens)
elif HY is None:
raise ValueError("No label is provided for hierarchy")
HY = np.asarray(HY)
min_type = "S%d" % len("else")
if HY.dtype < min_type:
HY = HY.astype(min_type)
mean = np.mean(coordinates, axis=0)
std = np.std(coordinates, axis=0)
inside = np.abs(coordinates - mean) < self.OUTLIER_THRESHOLD * std
indexes, = np.where(np.all(inside, axis=1))
# discard outliers
coordinates = coordinates[indexes]
HY = HY[indexes].T
if target is None:
raise ValueError("Target class is not provided")
for depth, Y in enumerate(HY):
indexes, = np.where(Y == target)
if len(indexes) > 0:
sample = indexes[0]
break
else:
raise ValueError("Can't find target `%s` in the hierarchy" % target)
settings = [(coordinates, None, HY[0], sample, figure_size, scale, 0)]
for i in range(depth):
settings.append((coordinates, HY[i], HY[i + 1], sample, figure_size, scale, i+1))
pool = multiprocessing.Pool(self.solver.num_worker + self.solver.num_sampler)
frames = pool.map(render_hierarchy, settings)
logger.warning("save hierarchy to `%s`" % save_file)
imageio.mimsave(save_file, frames, fps=1 / duration, subrectangles=True)
return {}
def animation(self, Y=None, file_name=None, save_file=None, figure_size=5, scale=1, elevation=30, num_frame=700):
"""
Rotate learn 3D coordinates as an animation.
Parameters:
Y (list of str, optional): labels of vectors
file_name (str, optional): file of labels
save_file (str): ``gif`` file to save visualization
figure_size (int, optional): size of figure
scale (int, optional): size of points
elevation (float, optional): elevation angle
num_frame (int, optional): number of frames
"""
import imageio
from matplotlib import pyplot as plt, animation
from mpl_toolkits.mplot3d import Axes3D
plt.switch_backend("agg") # for compatibility
self.solver.clear()
coordinates = self.solver.coordinates
dim = coordinates.shape[1]
if dim != 3:
raise ValueError("Can't animate %dD data" % dim)
if file_name:
if not (Y is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
Y = []
with open(file_name, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
y, = tokens
Y.append(y)
elif Y is None:
Y = ["unknown"] * self.graph.num_vertex
Y = np.asarray(Y)
mean = np.mean(coordinates, axis=0)
std = np.std(coordinates, axis=0)
inside = np.abs(coordinates - mean) < self.OUTLIER_THRESHOLD * std
indexes, = np.where(np.all(inside, axis=1))
# discard outliers
coordinates = coordinates[indexes]
Y = Y[indexes]
settings = []
degrees = np.linspace(0, 360, num_frame, endpoint=False)
for degree in degrees:
settings.append((coordinates, Y, degree, figure_size, scale, elevation))
pool = multiprocessing.Pool(self.solver.num_worker + self.solver.num_sampler)
frames = pool.map(render_animation, settings)
logger.warning("save animation to `%s`" % save_file)
imageio.mimsave(save_file, frames, fps=num_frame / 70, subrectangles=True) # 70 seconds
return {}
def render_hierarchy(args):
from matplotlib import pyplot as plt
plt.switch_backend("agg")
coordinates, H, Y, sample, figure_size, scale, depth = args
fig = plt.figure(figsize=(figure_size, figure_size))
ax = fig.gca()
if H is not None:
for i in range(len(Y)):
if H[i] != H[sample]:
Y[i] = "else"
classes = set(Y)
classes.discard(Y[sample])
classes.discard("else")
classes = [Y[sample]] + sorted(classes) + ["else"]
for i, cls in enumerate(classes):
indexes, = np.where(Y == cls)
color = "lightgrey" if cls == "else" else None
ax.scatter(*coordinates[indexes].T, s=2, c=color, zorder=-i)
ax.set_xticks([])
ax.set_yticks([])
ax.legend(classes, markerscale=6, loc="upper right")
fig.canvas.draw()
frame = np.asarray(fig.canvas.renderer._renderer)
return frame
def render_animation(args):
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
plt.switch_backend("agg")
coordinates, Y, degree, figure_size, scale, elevation = args
classes = sorted(np.unique(Y))
fig = plt.figure(figsize=(figure_size, figure_size))
ax = fig.gca(projection="3d")
for cls in classes:
indexes, = np.where(Y == cls)
ax.scatter(*coordinates[indexes].T, s=scale)
ax.view_init(elev=elevation, azim=degree)
ax.set_xticks([])
ax.set_yticks([])
ax.set_zticks([])
if len(classes) > 1:
ax.legend(classes, markerscale=6)
fig.canvas.draw()
frame = np.asarray(fig.canvas.renderer._renderer)
return frame
class Application(object):
"""
Application(type, *args, **kwargs)
Create an application instance of any type.
Parameters:
type (str): application type,
can be 'graph', 'word graph', 'knowledge graph' or 'visualization'
"""
application = {
"graph": GraphApplication,
"word graph": WordGraphApplication,
"knowledge graph": KnowledgeGraphApplication,
"visualization": VisualizationApplication
}
def __new__(cls, type, *args, **kwargs):
if type in cls.application:
return cls.application[type](*args, **kwargs)
else:
raise ValueError("Unknown application `%s`" % type)
__all__ = [
"Application",
"GraphApplication", "WordGraphApplication", "KnowledgeGraphApplication", "VisualizationApplication"
] | 39.783668 | 121 | 0.586355 |
from __future__ import print_function, absolute_import, unicode_literals, division
import os
import re
import pickle
import logging
import multiprocessing
from collections import defaultdict
from future.builtins import str, map, range
from easydict import EasyDict
import numpy as np
from .. import lib, cfg, auto
from .. import graph, solver
from ..util import assert_in, monitor, SharedNDArray
logger = logging.getLogger(__name__)
class ApplicationMixin(object):
def __init__(self, dim, gpus=[], cpu_per_gpu=auto, gpu_memory_limit=auto,
float_type=cfg.float_type, index_type=cfg.index_type):
self.dim = dim
self.gpus = gpus
self.cpu_per_gpu = cpu_per_gpu
self.gpu_memory_limit = gpu_memory_limit
self.float_type = float_type
self.index_type = index_type
self.set_format()
def get_graph(self, **kwargs):
raise NotImplementedError
def get_solver(self, **kwargs):
raise NotImplementedError
def set_format(self, delimiters=" \t\r\n", comment="#"):
self.delimiters = delimiters
self.comment = comment
self.pattern = re.compile("[%s]" % self.delimiters)
@monitor.time
def load(self, **kwargs):
self.graph = self.get_graph(**kwargs)
if "file_name" in kwargs or "vector_file" in "kwargs":
self.graph.load(delimiters=self.delimiters, comment=self.comment, **kwargs)
else:
self.graph.load(**kwargs)
@monitor.time
def build(self, **kwargs):
self.solver = self.get_solver(**kwargs)
self.solver.build(self.graph, **kwargs)
@monitor.time
def train(self, **kwargs):
self.solver.train(**kwargs)
@monitor.time
def evaluate(self, task, **kwargs):
func_name = task.replace(" ", "_")
if not hasattr(self, func_name):
raise ValueError("Unknown task `%s`" % task)
logger.info(lib.io.header(task))
result = getattr(self, func_name)(**kwargs)
if isinstance(result, dict):
for metric, value in sorted(result.items()):
logger.warning("%s: %g" % (metric, value))
return result
@monitor.time
def load_model(self, file_name):
logger.warning("load model from `%s`" % file_name)
with open(file_name, "rb") as fin:
model = pickle.load(fin)
self.set_parameters(model)
@monitor.time
def save_model(self, file_name, save_hyperparameter=False):
def is_mapping(name, attribute):
return "2" in name
def is_embedding(name, attribute):
if name[0] == "_":
return False
return isinstance(attribute, np.ndarray)
def is_hyperparameter(name, attribute):
if name[0] == "_":
return False
return isinstance(attribute, int) or isinstance(attribute, float) or isinstance(attribute, str)
def get_attributes(object, filter):
attributes = EasyDict()
for name in dir(object):
attribute = getattr(object, name)
if filter(name, attribute):
attributes[name] = attribute
return attributes
logger.warning("save model to `%s`" % file_name)
model = EasyDict()
model.graph = get_attributes(self.graph, is_mapping)
model.solver = get_attributes(self.solver, is_embedding)
if save_hyperparameter:
model.graph.update(get_attributes(self.graph, is_hyperparameter))
model.solver.update(get_attributes(self.solver, is_hyperparameter))
model.solver.optimizer = get_attributes(self.solver.optimizer, is_hyperparameter)
model.solver.optimizer.schedule = self.solver.optimizer.schedule.type
with open(file_name, "wb") as fout:
pickle.dump(model, fout, protocol=pickle.HIGHEST_PROTOCOL)
def get_mapping(self, id2name, name2id):
mapping = []
for name in id2name:
if name not in name2id:
raise ValueError("Can't find the embedding for `%s`" % name)
mapping.append(name2id[name])
return mapping
def tokenize(self, str):
str = str.strip(self.delimiters)
comment_start = str.find(self.comment)
if comment_start != -1:
str = str[:comment_start]
return self.pattern.split(str)
def name_map(self, dicts, names):
assert len(dicts) == len(names), "The number of dictionaries and names must be equal"
indexes = [[] for _ in range(len(names))]
num_param = len(names)
num_sample = len(names[0])
for i in range(num_sample):
valid = True
for j in range(num_param):
if names[j][i] not in dicts[j]:
valid = False
break
if valid:
for j in range(num_param):
indexes[j].append(dicts[j][names[j][i]])
return indexes
def gpu_map(self, func, settings):
import torch
gpus = self.gpus if self.gpus else range(torch.cuda.device_count())
new_settings = []
for i, setting in enumerate(settings):
new_settings.append(setting + (gpus[i % len(gpus)],))
settings = new_settings
try:
start_method = multiprocessing.get_start_method()
# if there are other running processes, this could cause leakage of semaphores
multiprocessing.set_start_method("spawn", force=True)
pool = multiprocessing.Pool(len(gpus))
results = pool.map(func, settings, chunksize=1)
multiprocessing.set_start_method(start_method, force=True)
except AttributeError:
logger.info("Spawn mode is not supported by multiprocessing. Switch to serial execution.")
results = list(map(func, settings))
return results
class GraphApplication(ApplicationMixin):
def get_graph(self, **kwargs):
return graph.Graph(self.index_type)
def get_solver(self, **kwargs):
if self.cpu_per_gpu == auto:
num_sampler_per_worker = auto
else:
num_sampler_per_worker = self.cpu_per_gpu - 1
return solver.GraphSolver(self.dim, self.float_type, self.index_type, self.gpus, num_sampler_per_worker,
self.gpu_memory_limit)
def set_parameters(self, model):
mapping = self.get_mapping(self.graph.id2name, model.graph.name2id)
self.solver.vertex_embeddings[:] = model.solver.vertex_embeddings[mapping]
self.solver.context_embeddings[:] = model.solver.context_embeddings[mapping]
def node_classification(self, X=None, Y=None, file_name=None, portions=(0.02,), normalization=False, times=1,
patience=100):
import scipy.sparse as sp
self.solver.clear()
if file_name:
if not (X is None and Y is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
X = []
Y = []
with open(file_name, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
x, y = tokens
X.append(x)
Y.append(y)
if X is None or Y is None:
raise ValueError("Either evaluataion data (X, Y) or a file name should be provided")
name2id = self.graph.name2id
class2id = {c:i for i, c in enumerate(np.unique(Y))}
new_X, new_Y = self.name_map((name2id, class2id), (X, Y))
logger.info("effective labels: %d / %d" % (len(new_X), len(X)))
X = np.asarray(new_X)
Y = np.asarray(new_Y)
labels = sp.coo_matrix((np.ones_like(X), (X, Y)), dtype=np.int32).todense()
indexes, _ = np.where(np.sum(labels, axis=1) > 0)
# discard non-labeled nodes
labels = labels[indexes]
vertex_embeddings = SharedNDArray(self.solver.vertex_embeddings[indexes])
settings = []
for portion in portions:
settings.append((vertex_embeddings, labels, portion, normalization, times, patience))
results = self.gpu_map(linear_classification, settings)
metrics = {}
for result in results:
metrics.update(result)
return metrics
def link_prediction(self, H=None, T=None, Y=None, file_name=None, filter_H=None, filter_T=None, filter_file=None):
import torch
from .network import LinkPredictor
self.solver.clear()
if file_name:
if not (H is None and T is None and Y is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
H = []
T = []
Y = []
with open(file_name, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
h, t, y = tokens
H.append(h)
T.append(t)
Y.append(y)
if H is None or T is None or Y is None:
raise ValueError("Either evaluation data or file should be provided")
if filter_file:
if not (filter_H is None and filter_T is None):
raise ValueError("Filter data and file should not be provided at the same time")
filter_H = []
filter_T = []
with open(filter_file, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
h, t = tokens
filter_H.append(h)
filter_T.append(t)
elif filter_H is None:
filter_H = []
filter_T = []
name2id = self.graph.name2id
Y = [int(y) for y in Y]
new_H, new_T, new_Y = self.name_map((name2id, name2id, {0:0, 1:1}), (H, T, Y))
logger.info("effective edges: %d / %d" % (len(new_H), len(H)))
H = new_H
T = new_T
Y = new_Y
new_H, new_T = self.name_map((name2id, name2id), (filter_H, filter_T))
logger.info("effective filter edges: %d / %d" % (len(new_H), len(filter_H)))
filters = set(zip(new_H, new_T))
new_H = []
new_T = []
new_Y = []
for h, t, y in zip(H, T, Y):
if (h, t) not in filters:
new_H.append(h)
new_T.append(t)
new_Y.append(y)
logger.info("remaining edges: %d / %d" % (len(new_H), len(H)))
H = np.asarray(new_H)
T = np.asarray(new_T)
Y = np.asarray(new_Y)
vertex_embeddings = self.solver.vertex_embeddings
context_embeddings = self.solver.context_embeddings
model = LinkPredictor(self.solver.model, vertex_embeddings, context_embeddings)
model = model.cuda()
H = torch.as_tensor(H)
T = torch.as_tensor(T)
Y = torch.as_tensor(Y)
H = H.cuda()
T = T.cuda()
Y = Y.cuda()
score = model(H, T)
order = torch.argsort(score, descending=True)
Y = Y[order]
hit = torch.cumsum(Y, dim=0)
all = torch.sum(Y == 0) * torch.sum(Y == 1)
auc = torch.sum(hit[Y == 0]).item() / all.item()
return {
"AUC": auc
}
def linear_classification(args):
import torch
from torch import optim
from torch.nn import functional as F
from .network import NodeClassifier
def generate_one_vs_rest(indexes, labels):
new_indexes = []
new_labels = []
num_class = labels.shape[1]
for index, sample_labels in zip(indexes, labels):
for cls in np.where(sample_labels)[0]:
new_indexes.append(index)
new_label = np.zeros(num_class, dtype=np.int)
new_label[cls] = 1
new_labels.append(new_label)
return torch.as_tensor(new_indexes), torch.as_tensor(new_labels)
embeddings, labels, portion, normalization, times, patience, gpu = args
embeddings = np.asarray(embeddings)
num_sample, num_class = labels.shape
num_train = int(num_sample * portion)
macro_f1s = []
micro_f1s = []
for t in range(times):
samples = np.random.permutation(num_sample)
train_samples = samples[:num_train]
train_labels = np.asarray(labels[train_samples])
train_samples, train_labels = generate_one_vs_rest(train_samples, train_labels)
test_samples = torch.as_tensor(samples[num_train:])
test_labels = torch.as_tensor(labels[test_samples])
model = NodeClassifier(embeddings, num_class, normalization=normalization)
train_samples = train_samples.cuda(gpu)
train_labels = train_labels.cuda(gpu)
test_samples = test_samples.cuda(gpu)
test_labels = test_labels.cuda(gpu)
model = model.cuda(gpu)
# train
optimizer = optim.SGD(model.parameters(), lr=1, weight_decay=2e-5, momentum=0.9)
best_loss = float("inf")
best_epoch = -1
for epoch in range(100000):
optimizer.zero_grad()
logits = model(train_samples)
loss = F.binary_cross_entropy_with_logits(logits, train_labels.float())
loss.backward()
optimizer.step()
loss = loss.item()
if loss < best_loss:
best_epoch = epoch
best_loss = loss
if epoch == best_epoch + patience:
break
# test
logits = model(test_samples)
num_labels = test_labels.sum(dim=1, keepdim=True)
sorted, _ = logits.sort(dim=1, descending=True)
thresholds = sorted.gather(dim=1, index=num_labels-1)
predictions = (logits >= thresholds).int()
# compute metric
num_TP_per_class = (predictions & test_labels).sum(dim=0).float()
num_T_per_class = test_labels.sum(dim=0).float()
num_P_per_class = predictions.sum(dim=0).float()
macro_f1s.append((2 * num_TP_per_class / (num_T_per_class + num_P_per_class)).mean().item())
num_TP = (predictions & test_labels).sum().float()
num_T = test_labels.sum().float()
num_P = predictions.sum().float()
micro_f1s.append((2 * num_TP / (num_T + num_P)).item())
return {
"macro-F1@%g%%" % (portion * 100): np.mean(macro_f1s),
"micro-F1@%g%%" % (portion * 100): np.mean(micro_f1s)
}
class WordGraphApplication(ApplicationMixin):
def get_graph(self, **kwargs):
return graph.WordGraph(self.index_type)
def get_solver(self, **kwargs):
if self.cpu_per_gpu == auto:
num_sampler_per_worker = auto
else:
num_sampler_per_worker = self.cpu_per_gpu - 1
return solver.GraphSolver(self.dim, self.float_type, self.index_type, self.gpus, num_sampler_per_worker,
self.gpu_memory_limit)
def set_parameters(self, model):
mapping = self.get_mapping(self.graph.id2name, model.graph.name2id)
self.solver.vertex_embeddings[:] = model.solver.vertex_embeddings[mapping]
self.solver.context_embeddings[:] = model.solver.context_embeddings[mapping]
class KnowledgeGraphApplication(ApplicationMixin):
SAMPLE_PER_DIMENSION = 7
MEMORY_SCALE_FACTOR = 1.5
def get_graph(self, **kwargs):
return graph.KnowledgeGraph(self.index_type)
def get_solver(self, **kwargs):
if self.cpu_per_gpu == auto:
num_sampler_per_worker = auto
else:
num_sampler_per_worker = self.cpu_per_gpu - 1
return solver.KnowledgeGraphSolver(self.dim, self.float_type, self.index_type, self.gpus, num_sampler_per_worker,
self.gpu_memory_limit)
def set_parameters(self, model):
entity_mapping = self.get_mapping(self.graph.id2entity, model.graph.entity2id)
relation_mapping = self.get_mapping(self.graph.id2relation, model.graph.relation2id)
self.solver.entity_embeddings[:] = model.solver.entity_embeddings[entity_mapping]
self.solver.relation_embeddings[:] = model.solver.relation_embeddings[relation_mapping]
def entity_prediction(self, H=None, R=None, T=None, file_name=None, save_file=None, target="tail", k=10,
backend=cfg.backend):
def torch_predict():
import torch
entity_embeddings = SharedNDArray(self.solver.entity_embeddings)
relation_embeddings = SharedNDArray(self.solver.relation_embeddings)
num_gpu = len(self.gpus) if self.gpus else torch.cuda.device_count()
work_load = (num_sample + num_gpu - 1) // num_gpu
settings = []
for i in range(num_gpu):
work_H = H[work_load * i: work_load * (i+1)]
work_R = R[work_load * i: work_load * (i+1)]
work_T = T[work_load * i: work_load * (i+1)]
settings.append((entity_embeddings, relation_embeddings, work_H, work_R, work_T,
None, None, target, k, self.solver.model, self.solver.margin))
results = self.gpu_map(triplet_prediction, settings)
return sum(results, [])
def graphvite_predict():
num_entity = len(entity2id)
batch_size = self.get_batch_size(num_entity)
recalls = []
for i in range(0, num_sample, batch_size):
batch_h = H[i: i + batch_size]
batch_r = R[i: i + batch_size]
batch_t = T[i: i + batch_size]
batch = self.generate_one_vs_rest(batch_h, batch_r, batch_t, num_entity, target)
scores = self.solver.predict(batch)
scores = scores.reshape(-1, num_entity)
indexes = np.argpartition(scores, num_entity - k, axis=-1)
for index, score in zip(indexes, scores):
index = index[-k:]
score = score[index]
order = np.argsort(score)[::-1]
recall = list(zip(index[order], score[order]))
recalls.append(recall)
return recalls
assert_in(["head", "tail"], target=target)
assert_in(["graphvite", "torch"], backend=backend)
if backend == "torch":
self.solver.clear()
if file_name:
if not (H is None and R is None and T is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
H = []
R = []
T = []
with open(file_name, "r") as fin:
for i, line in enumerate(fin):
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
if 3 <= len(tokens) <= 4:
h, r, t = tokens[:3]
elif len(tokens) == 2:
if target == "head":
r, t = tokens
h = None
else:
h, r = tokens
t = None
else:
raise ValueError("Invalid line format at line %d in %s" % (i + 1, file_name))
H.append(h)
R.append(r)
T.append(t)
if (H is None and T is None) or R is None:
raise ValueError("Either evaluation data or file should be provided")
if H is None:
target = "head"
if T is None:
target = "tail"
entity2id = self.graph.entity2id
relation2id = self.graph.relation2id
num_sample = len(R)
new_H = np.zeros(num_sample, dtype=np.uint32)
new_T = np.zeros(num_sample, dtype=np.uint32)
if target == "head":
new_R, new_T = self.name_map((relation2id, entity2id), (R, T))
if target == "tail":
new_H, new_R = self.name_map((entity2id, relation2id), (H, R))
assert len(new_R) == len(R), "Can't recognize some entities or relations"
H = np.asarray(new_H, dtype=np.uint32)
R = np.asarray(new_R, dtype=np.uint32)
T = np.asarray(new_T, dtype=np.uint32)
if backend == "graphvite":
recalls = graphvite_predict()
else:
recalls = torch_predict()
id2entity = self.graph.id2entity
new_recalls = []
for recall in recalls:
new_recall = [(id2entity[e], s) for e, s in recall]
new_recalls.append(new_recall)
recalls = new_recalls
if save_file:
logger.warning("save entity predictions to `%s`" % save_file)
extension = os.path.splitext(save_file)[1]
if extension == ".txt":
with open(save_file, "w") as fout:
for recall in recalls:
tokens = ["%s: %g" % x for x in recall]
fout.write("%s\n" % "\t".join(tokens))
elif extension == ".pkl":
with open(save_file, "wb") as fout:
pickle.dump(recalls, fout, protocol=pickle.HIGHEST_PROTOCOL)
else:
raise ValueError("Unknown file extension `%s`" % extension)
else:
return recalls
def link_prediction(self, H=None, R=None, T=None, filter_H=None, filter_R=None, filter_T=None, file_name=None,
filter_files=None, target="both", fast_mode=None, backend=cfg.backend):
def torch_predict():
import torch
entity_embeddings = SharedNDArray(self.solver.entity_embeddings)
relation_embeddings = SharedNDArray(self.solver.relation_embeddings)
num_gpu = len(self.gpus) if self.gpus else torch.cuda.device_count()
work_load = (fast_mode + num_gpu - 1) // num_gpu
settings = []
for i in range(num_gpu):
work_H = H[work_load * i: work_load * (i+1)]
work_R = R[work_load * i: work_load * (i+1)]
work_T = T[work_load * i: work_load * (i+1)]
settings.append((entity_embeddings, relation_embeddings, work_H, work_R, work_T,
exclude_H, exclude_T, target, None, self.solver.model, self.solver.margin))
results = self.gpu_map(triplet_prediction, settings)
return np.concatenate(results)
def graphvite_predict():
num_entity = len(entity2id)
if target == "both":
batch_size = self.get_batch_size(num_entity * 2)
else:
batch_size = self.get_batch_size(num_entity)
rankings = []
for i in range(0, fast_mode, batch_size):
batch_h = H[i: i + batch_size]
batch_r = R[i: i + batch_size]
batch_t = T[i: i + batch_size]
batch = self.generate_one_vs_rest(batch_h, batch_r, batch_t, num_entity, target)
masks = self.generate_mask(batch_h, batch_r, batch_t, exclude_H, exclude_T, num_entity, target)
if target == "head":
positives = batch_h
if target == "tail":
positives = batch_t
if target == "both":
positives = np.asarray([batch_h, batch_t]).transpose()
positives = positives.ravel()
scores = self.solver.predict(batch)
scores = scores.reshape(-1, num_entity)
truths = scores[range(len(positives)), positives]
ranking = np.sum((scores >= truths[:, np.newaxis]) * masks, axis=1)
rankings.append(ranking)
return np.concatenate(rankings)
assert_in(["head", "tail", "both"], target=target)
assert_in(["graphvite", "torch"], backend=backend)
if backend == "torch":
self.solver.clear()
if file_name:
if not (H is None and R is None and T is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
H = []
R = []
T = []
with open(file_name, "r") as fin:
for i, line in enumerate(fin):
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
if 3 <= len(tokens) <= 4:
h, r, t = tokens[:3]
else:
raise ValueError("Invalid line format at line %d in %s" % (i + 1, file_name))
H.append(h)
R.append(r)
T.append(t)
if H is None or R is None or T is None:
raise ValueError("Either evaluation data or file should be provided")
if filter_files:
if not (filter_H is None and filter_R is None and filter_T is None):
raise ValueError("Filter data and file should not be provided at the same time")
filter_H = []
filter_R = []
filter_T = []
for filter_file in filter_files:
with open(filter_file, "r") as fin:
for i, line in enumerate(fin):
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
if 3 <= len(tokens) <= 4:
h, r, t = tokens[:3]
else:
raise ValueError("Invalid line format at line %d in %s" % (i + 1, filter_file))
filter_H.append(h)
filter_R.append(r)
filter_T.append(t)
elif filter_H is None:
filter_H = []
filter_R = []
filter_T = []
entity2id = self.graph.entity2id
relation2id = self.graph.relation2id
new_H, new_R, new_T = self.name_map((entity2id, relation2id, entity2id), (H, R, T))
logger.info("effective triplets: %d / %d" % (len(new_H), len(H)))
H = np.asarray(new_H, dtype=np.uint32)
R = np.asarray(new_R, dtype=np.uint32)
T = np.asarray(new_T, dtype=np.uint32)
new_H, new_R, new_T = self.name_map((entity2id, relation2id, entity2id), (filter_H, filter_R, filter_T))
logger.info("effective filter triplets: %d / %d" % (len(new_H), len(filter_H)))
filter_H = np.asarray(new_H, dtype=np.uint32)
filter_R = np.asarray(new_R, dtype=np.uint32)
filter_T = np.asarray(new_T, dtype=np.uint32)
exclude_H = defaultdict(set)
exclude_T = defaultdict(set)
for h, r, t in zip(filter_H, filter_R, filter_T):
exclude_H[(t, r)].add(h)
exclude_T[(h, r)].add(t)
num_sample = len(H)
fast_mode = fast_mode or num_sample
indexes = np.random.permutation(num_sample)[:fast_mode]
H = H[indexes]
R = R[indexes]
T = T[indexes]
if backend == "graphvite":
rankings = graphvite_predict()
elif backend == "torch":
rankings = torch_predict()
return {
"MR": np.mean(rankings),
"MRR": np.mean(1 / rankings),
"HITS@1": np.mean(rankings <= 1),
"HITS@3": np.mean(rankings <= 3),
"HITS@10": np.mean(rankings <= 10)
}
def get_batch_size(self, sample_size):
import psutil
memory = psutil.virtual_memory()
batch_size = int(self.SAMPLE_PER_DIMENSION * self.dim * self.graph.num_vertex
* self.solver.num_partition / self.solver.num_worker / sample_size)
mem_per_sample = sample_size * (2 * 3 * np.uint32().itemsize + 1 * np.uint64().itemsize)
max_batch_size = int(memory.available / mem_per_sample / self.MEMORY_SCALE_FACTOR)
if max_batch_size < batch_size:
logger.info("Memory is not enough for optimal prediction batch size. "
"Use the maximal possible size instead.")
batch_size = max_batch_size
return batch_size
def generate_one_vs_rest(self, H, R, T, num_entity, target="both"):
one = np.ones(num_entity, dtype=np.bool)
all = np.arange(num_entity, dtype=np.uint32)
batches = []
for h, r, t in zip(H, R, T):
if target == "head" or target == "both":
batch = np.asarray([all, t * one, r * one]).transpose()
batches.append(batch)
if target == "tail" or target == "both":
batch = np.asarray([h * one, all, r * one]).transpose()
batches.append(batch)
batches = np.concatenate(batches)
return batches
def generate_mask(self, H, R, T, exclude_H, exclude_T, num_entity, target="both"):
one = np.ones(num_entity, dtype=np.bool)
masks = []
for h, r, t in zip(H, R, T):
if target == "head" or target == "both":
mask = one.copy()
mask[list(exclude_H[(t, r)])] = 0
mask[h] = 1
masks.append(mask)
if target == "tail" or target == "both":
mask = one.copy()
mask[list(exclude_T[(h, r)])] = 0
mask[t] = 1
masks.append(mask)
masks = np.asarray(masks)
return masks
def triplet_prediction(args):
import torch
from .network import LinkPredictor
torch.set_grad_enabled(False)
entity_embeddings, relation_embeddings, H, R, T, \
exclude_H, exclude_T, target, k, score_function, margin, device = args
entity_embeddings = np.asarray(entity_embeddings)
relation_embeddings = np.asarray(relation_embeddings)
num_entity = len(entity_embeddings)
score_function = LinkPredictor(score_function, entity_embeddings, relation_embeddings, entity_embeddings,
margin=margin)
if device != "cpu":
try:
score_function = score_function.to(device)
except RuntimeError:
logger.info("Model is too large for GPU evaluation with PyTorch. Switch to CPU evaluation.")
device = "cpu"
if device == "cpu":
del score_function
torch.cuda.empty_cache()
score_function = LinkPredictor(score_function, entity_embeddings, relation_embeddings, entity_embeddings,
margin=margin)
one = torch.ones(num_entity, dtype=torch.long, device=device)
all = torch.arange(num_entity, dtype=torch.long, device=device)
results = []
for h, r, t in zip(H, R, T):
if target == "head" or target == "both":
batch_h = all
batch_r = r * one
batch_t = t * one
score = score_function(batch_h, batch_r, batch_t)
if k:
score, index = torch.topk(score, k)
score = score.cpu().numpy()
index = index.cpu().numpy()
recall = list(zip(index, score))
results.append(recall)
else:
mask = torch.ones(num_entity, dtype=torch.uint8, device=device)
index = torch.tensor(list(exclude_H[(t, r)]), dtype=torch.long, device=device)
mask[index] = 0
mask[h] = 1
ranking = torch.sum((score >= score[h]) * mask).item()
results.append(ranking)
if target == "tail" or target == "both":
batch_h = h * one
batch_r = r * one
batch_t = all
score = score_function(batch_h, batch_r, batch_t)
if k:
score, index = torch.topk(score, k)
score = score.cpu().numpy()
index = index.cpu().numpy()
recall = list(zip(index, score))
results.append(recall)
else:
mask = torch.ones(num_entity, dtype=torch.uint8, device=device)
index = torch.tensor(list(exclude_T[(h, r)]), dtype=torch.long, device=device)
mask[index] = 0
mask[t] = 1
ranking = torch.sum((score >= score[t]) * mask).item()
results.append(ranking)
if not k:
results = np.asarray(results)
return results
class VisualizationApplication(ApplicationMixin):
OUTLIER_THRESHOLD = 5
def get_graph(self, **kwargs):
if "file_name" in kwargs or "edge_list" in kwargs:
return graph.Graph(self.index_type)
else:
return graph.KNNGraph(self.index_type, self.gpus, self.cpu_per_gpu)
def get_solver(self, **kwargs):
if self.cpu_per_gpu == auto:
num_sampler_per_worker = auto
else:
num_sampler_per_worker = self.cpu_per_gpu - 1
return solver.VisualizationSolver(self.dim, self.float_type, self.index_type, self.gpus, num_sampler_per_worker,
self.gpu_memory_limit)
def set_parameters(self, model):
if self.solver.coordinates.shape != model.solver.coordinates.shape:
raise ValueError("Expect coordinates with shape %s, but %s is found" %
(self.solver.coordinates.shape, model.solver.coordinates.shape))
self.solver.coordinates[:] = model.solver.coordinates
def visualization(self, Y=None, file_name=None, save_file=None, figure_size=10, scale=2):
from matplotlib import pyplot as plt
plt.switch_backend("agg")
self.solver.clear()
coordinates = self.solver.coordinates
dim = coordinates.shape[1]
if not (dim == 2 or dim == 3):
raise ValueError("Can't visualize %dD data" % dim)
if file_name:
if not (Y is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
Y = []
with open(file_name, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
y, = tokens
Y.append(y)
elif Y is None:
Y = ["unknown"] * self.graph.num_vertex
Y = np.asarray(Y)
mean = np.mean(coordinates, axis=0)
std = np.std(coordinates, axis=0)
inside = np.abs(coordinates - mean) < self.OUTLIER_THRESHOLD * std
indexes, = np.where(np.all(inside, axis=1))
# discard outliers
coordinates = coordinates[indexes]
Y = Y[indexes]
classes = sorted(np.unique(Y))
fig = plt.figure(figsize=(figure_size, figure_size))
if dim == 2:
ax = fig.gca()
elif dim == 3:
from mpl_toolkits.mplot3d import Axes3D
ax = fig.gca(projection="3d")
for cls in classes:
indexes, = np.where(Y == cls)
ax.scatter(*coordinates[indexes].T, s=scale)
ax.set_xticks([])
ax.set_yticks([])
if dim == 3:
ax.set_zticks([])
if len(classes) > 1:
ax.legend(classes, markerscale=6, loc="upper right")
if save_file:
logger.warning("save visualization to `%s`" % save_file)
plt.savefig(save_file)
else:
plt.show()
return {}
def hierarchy(self, HY=None, file_name=None, target=None, save_file=None, figure_size=10, scale=2, duration=3):
import imageio
from matplotlib import pyplot as plt
plt.switch_backend("agg") # for compatibility
self.solver.clear()
coordinates = self.solver.coordinates
dim = coordinates.shape[1]
if dim != 2:
raise ValuerError("Can't visualize the hierarchy of %dD data" % dim)
if file_name:
if not (HY is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
HY = []
with open(file_name, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) > 0:
HY.append(tokens)
elif HY is None:
raise ValueError("No label is provided for hierarchy")
HY = np.asarray(HY)
min_type = "S%d" % len("else")
if HY.dtype < min_type:
HY = HY.astype(min_type)
mean = np.mean(coordinates, axis=0)
std = np.std(coordinates, axis=0)
inside = np.abs(coordinates - mean) < self.OUTLIER_THRESHOLD * std
indexes, = np.where(np.all(inside, axis=1))
coordinates = coordinates[indexes]
HY = HY[indexes].T
if target is None:
raise ValueError("Target class is not provided")
for depth, Y in enumerate(HY):
indexes, = np.where(Y == target)
if len(indexes) > 0:
sample = indexes[0]
break
else:
raise ValueError("Can't find target `%s` in the hierarchy" % target)
settings = [(coordinates, None, HY[0], sample, figure_size, scale, 0)]
for i in range(depth):
settings.append((coordinates, HY[i], HY[i + 1], sample, figure_size, scale, i+1))
pool = multiprocessing.Pool(self.solver.num_worker + self.solver.num_sampler)
frames = pool.map(render_hierarchy, settings)
logger.warning("save hierarchy to `%s`" % save_file)
imageio.mimsave(save_file, frames, fps=1 / duration, subrectangles=True)
return {}
def animation(self, Y=None, file_name=None, save_file=None, figure_size=5, scale=1, elevation=30, num_frame=700):
import imageio
from matplotlib import pyplot as plt, animation
from mpl_toolkits.mplot3d import Axes3D
plt.switch_backend("agg") # for compatibility
self.solver.clear()
coordinates = self.solver.coordinates
dim = coordinates.shape[1]
if dim != 3:
raise ValueError("Can't animate %dD data" % dim)
if file_name:
if not (Y is None):
raise ValueError("Evaluation data and file should not be provided at the same time")
Y = []
with open(file_name, "r") as fin:
for line in fin:
tokens = self.tokenize(line)
if len(tokens) == 0:
continue
y, = tokens
Y.append(y)
elif Y is None:
Y = ["unknown"] * self.graph.num_vertex
Y = np.asarray(Y)
mean = np.mean(coordinates, axis=0)
std = np.std(coordinates, axis=0)
inside = np.abs(coordinates - mean) < self.OUTLIER_THRESHOLD * std
indexes, = np.where(np.all(inside, axis=1))
coordinates = coordinates[indexes]
Y = Y[indexes]
settings = []
degrees = np.linspace(0, 360, num_frame, endpoint=False)
for degree in degrees:
settings.append((coordinates, Y, degree, figure_size, scale, elevation))
pool = multiprocessing.Pool(self.solver.num_worker + self.solver.num_sampler)
frames = pool.map(render_animation, settings)
logger.warning("save animation to `%s`" % save_file)
imageio.mimsave(save_file, frames, fps=num_frame / 70, subrectangles=True)
return {}
def render_hierarchy(args):
from matplotlib import pyplot as plt
plt.switch_backend("agg")
coordinates, H, Y, sample, figure_size, scale, depth = args
fig = plt.figure(figsize=(figure_size, figure_size))
ax = fig.gca()
if H is not None:
for i in range(len(Y)):
if H[i] != H[sample]:
Y[i] = "else"
classes = set(Y)
classes.discard(Y[sample])
classes.discard("else")
classes = [Y[sample]] + sorted(classes) + ["else"]
for i, cls in enumerate(classes):
indexes, = np.where(Y == cls)
color = "lightgrey" if cls == "else" else None
ax.scatter(*coordinates[indexes].T, s=2, c=color, zorder=-i)
ax.set_xticks([])
ax.set_yticks([])
ax.legend(classes, markerscale=6, loc="upper right")
fig.canvas.draw()
frame = np.asarray(fig.canvas.renderer._renderer)
return frame
def render_animation(args):
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
plt.switch_backend("agg")
coordinates, Y, degree, figure_size, scale, elevation = args
classes = sorted(np.unique(Y))
fig = plt.figure(figsize=(figure_size, figure_size))
ax = fig.gca(projection="3d")
for cls in classes:
indexes, = np.where(Y == cls)
ax.scatter(*coordinates[indexes].T, s=scale)
ax.view_init(elev=elevation, azim=degree)
ax.set_xticks([])
ax.set_yticks([])
ax.set_zticks([])
if len(classes) > 1:
ax.legend(classes, markerscale=6)
fig.canvas.draw()
frame = np.asarray(fig.canvas.renderer._renderer)
return frame
class Application(object):
application = {
"graph": GraphApplication,
"word graph": WordGraphApplication,
"knowledge graph": KnowledgeGraphApplication,
"visualization": VisualizationApplication
}
def __new__(cls, type, *args, **kwargs):
if type in cls.application:
return cls.application[type](*args, **kwargs)
else:
raise ValueError("Unknown application `%s`" % type)
__all__ = [
"Application",
"GraphApplication", "WordGraphApplication", "KnowledgeGraphApplication", "VisualizationApplication"
] | true | true |
f734ed8bac4aef42dc6a6161d52892cad0940c1e | 5,894 | py | Python | atlas/measure_baseclass.py | USC-NSL/ripe-atlas | 9c512b0660923779031ec62909bc13bccace5890 | [
"MIT"
] | 4 | 2015-09-17T18:22:36.000Z | 2016-03-11T21:00:57.000Z | atlas/measure_baseclass.py | USC-NSL/ripe-atlas | 9c512b0660923779031ec62909bc13bccace5890 | [
"MIT"
] | null | null | null | atlas/measure_baseclass.py | USC-NSL/ripe-atlas | 9c512b0660923779031ec62909bc13bccace5890 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import json
import sys
import traceback
import os
import requests
import argparse
SLEEP_TIME = 60*5
debug = False
key_loc = '~/.atlas/auth'
class MeasurementBase(object):
def __init__(self, target, key, probe_list=None, sess=None):
self.target = target
self.description = ''
self.start_time = None
self.stop_time = None
self.af = 4
self.is_oneoff = True
self.is_public = True
self.resolve_on_probe = True
self.interval = 86400 #1 day
self.key = key
self.sess = sess if sess else requests
if probe_list:
self.num_probes = len(probe_list)
self.probe_type = 'probes'
self.probe_value = setup_probe_value('probes', probe_list)
def setup_definitions(self):
definitions = {}
definitions['target'] = self.target
definitions['description'] = self.description
definitions['af'] = self.af #set ip version
definitions['type'] = self.measurement_type
definitions['is_oneoff'] = str(self.is_oneoff).lower()
definitions['interval'] = self.interval
definitions['resolve_on_probe'] = str(self.resolve_on_probe).lower()
definitions['is_public'] = str(self.is_public).lower()
return definitions
def setup_probes(self):
probes = {}
probes['requested'] = self.num_probes
probes['type'] = self.probe_type
probes['value'] = self.probe_value
return probes
def run(self):
key = self.key
definitions = self.setup_definitions()
probes = self.setup_probes()
data = {'definitions': [definitions], 'probes': [probes]}
if self.start_time is not None:
data['start_time'] = self.start_time
if self.stop_time is not None:
data['stop_time'] = self.stop_time
data_str = json.dumps(data)
headers = {'content-type': 'application/json', 'accept': 'application/json'}
response = self.sess.post('https://atlas.ripe.net/api/v1/measurement/?key='+key, data_str, headers=headers)
response_str = response.text
return json.loads(response_str)
def readkey(keyfile=key_loc):
auth_file = os.path.expanduser(keyfile)
f = open(auth_file)
key = f.read().strip()
f.close()
if len(key) <= 0:
sys.stderr.write('Meaurement key is too short!\n')
return key
def setup_probe_value(type, arg_values):
"""
type is the probe type.
arg_values is a list of args passed in by user
"""
if type == 'asn' or type == 'msm':
return int(arg_values[0]) #return an integer value
elif type == 'probes':
arg_values = map(str, arg_values)
return ','.join(arg_values) #return command separated list of probe ids
else:
return arg_values[0] #for everything else just return single item from list
def load_input(inputfile):
target_dict = {}
f = open(inputfile)
for line in f:
line = line.strip()
if not line: #empty
continue
chunks = line.split(' ')
nodeid = chunks[0]
targetip = chunks[1]
try:
target_dict[targetip].append(nodeid)
except KeyError:
target_dict[targetip] = [nodeid]
f.close()
return target_dict
def process_response(response):
if 'error' in response:
error_details = response['error']
code = error_details['code']
message = error_details['message']
#return a tuple with error message and code
return 'error', '%s code: %d' % (message, code)
elif 'measurements' in response:
measurement_list = response['measurements']
return 'ok', measurement_list
else:
return 'error', 'Unknown response: %s' % str(response)
def format_response(response):
if 'error' in response:
error_details = response['error']
code = error_details['code']
message = error_details['message']
return message+' code: '+str(code)
elif 'measurements' in response:
measurement_list = response['measurements']
measurement_list_str = map(str, measurement_list)
return '\n'.join(measurement_list_str)
else:
return 'Error processing response: '+str(response)
def config_argparser():
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--description', default=[''], nargs=1, help='measurement description (default: empty)')
parser.add_argument('-k', '--key-file', default=[key_loc], nargs=1, help='Path to RIPE Atlas API key (default: '+key_loc+')')
parser.add_argument('-r', '--dont-resolve-on-probe', action='store_true',
help='Do DNS resolution on probe? (default: on)')
parser.add_argument('--ipv6', action='store_true', help='Use IPv6 instead of IPv4 (default: IPv4)')
parser.add_argument('--repeats', nargs=1, default=[0],
help='Is a one-off measurement. Non-zero is the repeating interval in seconds (default: 0)')
parser.add_argument('--private', action='store_true',
help='Sets this measurement to be private. Other people will not see the results. (default: public)')
parser.add_argument('--start-time', default=[None], nargs=1, help='Specify a Unix timestamp for this measurement to begin (default: immediately)')
parser.add_argument('--stop-time', default=[None], nargs=1, help='Specify a Unix timestamp for this measurement to stop')
parser.add_argument('target_list', nargs=1, help='Path to target-list')
parser.add_argument('meas_id_output', nargs=1, help='Path to file where measurement ids will be written')
return parser
| 34.069364 | 150 | 0.619444 |
import json
import sys
import traceback
import os
import requests
import argparse
SLEEP_TIME = 60*5
debug = False
key_loc = '~/.atlas/auth'
class MeasurementBase(object):
def __init__(self, target, key, probe_list=None, sess=None):
self.target = target
self.description = ''
self.start_time = None
self.stop_time = None
self.af = 4
self.is_oneoff = True
self.is_public = True
self.resolve_on_probe = True
self.interval = 86400
self.key = key
self.sess = sess if sess else requests
if probe_list:
self.num_probes = len(probe_list)
self.probe_type = 'probes'
self.probe_value = setup_probe_value('probes', probe_list)
def setup_definitions(self):
definitions = {}
definitions['target'] = self.target
definitions['description'] = self.description
definitions['af'] = self.af
definitions['type'] = self.measurement_type
definitions['is_oneoff'] = str(self.is_oneoff).lower()
definitions['interval'] = self.interval
definitions['resolve_on_probe'] = str(self.resolve_on_probe).lower()
definitions['is_public'] = str(self.is_public).lower()
return definitions
def setup_probes(self):
probes = {}
probes['requested'] = self.num_probes
probes['type'] = self.probe_type
probes['value'] = self.probe_value
return probes
def run(self):
key = self.key
definitions = self.setup_definitions()
probes = self.setup_probes()
data = {'definitions': [definitions], 'probes': [probes]}
if self.start_time is not None:
data['start_time'] = self.start_time
if self.stop_time is not None:
data['stop_time'] = self.stop_time
data_str = json.dumps(data)
headers = {'content-type': 'application/json', 'accept': 'application/json'}
response = self.sess.post('https://atlas.ripe.net/api/v1/measurement/?key='+key, data_str, headers=headers)
response_str = response.text
return json.loads(response_str)
def readkey(keyfile=key_loc):
auth_file = os.path.expanduser(keyfile)
f = open(auth_file)
key = f.read().strip()
f.close()
if len(key) <= 0:
sys.stderr.write('Meaurement key is too short!\n')
return key
def setup_probe_value(type, arg_values):
if type == 'asn' or type == 'msm':
return int(arg_values[0])
elif type == 'probes':
arg_values = map(str, arg_values)
return ','.join(arg_values)
else:
return arg_values[0]
def load_input(inputfile):
target_dict = {}
f = open(inputfile)
for line in f:
line = line.strip()
if not line:
continue
chunks = line.split(' ')
nodeid = chunks[0]
targetip = chunks[1]
try:
target_dict[targetip].append(nodeid)
except KeyError:
target_dict[targetip] = [nodeid]
f.close()
return target_dict
def process_response(response):
if 'error' in response:
error_details = response['error']
code = error_details['code']
message = error_details['message']
return 'error', '%s code: %d' % (message, code)
elif 'measurements' in response:
measurement_list = response['measurements']
return 'ok', measurement_list
else:
return 'error', 'Unknown response: %s' % str(response)
def format_response(response):
if 'error' in response:
error_details = response['error']
code = error_details['code']
message = error_details['message']
return message+' code: '+str(code)
elif 'measurements' in response:
measurement_list = response['measurements']
measurement_list_str = map(str, measurement_list)
return '\n'.join(measurement_list_str)
else:
return 'Error processing response: '+str(response)
def config_argparser():
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--description', default=[''], nargs=1, help='measurement description (default: empty)')
parser.add_argument('-k', '--key-file', default=[key_loc], nargs=1, help='Path to RIPE Atlas API key (default: '+key_loc+')')
parser.add_argument('-r', '--dont-resolve-on-probe', action='store_true',
help='Do DNS resolution on probe? (default: on)')
parser.add_argument('--ipv6', action='store_true', help='Use IPv6 instead of IPv4 (default: IPv4)')
parser.add_argument('--repeats', nargs=1, default=[0],
help='Is a one-off measurement. Non-zero is the repeating interval in seconds (default: 0)')
parser.add_argument('--private', action='store_true',
help='Sets this measurement to be private. Other people will not see the results. (default: public)')
parser.add_argument('--start-time', default=[None], nargs=1, help='Specify a Unix timestamp for this measurement to begin (default: immediately)')
parser.add_argument('--stop-time', default=[None], nargs=1, help='Specify a Unix timestamp for this measurement to stop')
parser.add_argument('target_list', nargs=1, help='Path to target-list')
parser.add_argument('meas_id_output', nargs=1, help='Path to file where measurement ids will be written')
return parser
| true | true |
f734edb79c58022e50f9363b5ab9395deb5cbc15 | 138,531 | py | Python | nova/tests/api/openstack/compute/plugins/v3/test_servers.py | vmthunder/nova | baf05caab705c5778348d9f275dc541747b7c2de | [
"Apache-2.0"
] | null | null | null | nova/tests/api/openstack/compute/plugins/v3/test_servers.py | vmthunder/nova | baf05caab705c5778348d9f275dc541747b7c2de | [
"Apache-2.0"
] | null | null | null | nova/tests/api/openstack/compute/plugins/v3/test_servers.py | vmthunder/nova | baf05caab705c5778348d9f275dc541747b7c2de | [
"Apache-2.0"
] | null | null | null | # Copyright 2010-2011 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import contextlib
import copy
import datetime
import uuid
import iso8601
import mock
import mox
from oslo.config import cfg
from oslo.utils import timeutils
import six.moves.urllib.parse as urlparse
import testtools
import webob
from nova.api.openstack import compute
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import access_ips
from nova.api.openstack.compute.plugins.v3 import ips
from nova.api.openstack.compute.plugins.v3 import keypairs
from nova.api.openstack.compute.plugins.v3 import servers
from nova.api.openstack.compute.schemas.v3 import keypairs as keypairs_schema
from nova.api.openstack.compute.schemas.v3 import servers as servers_schema
from nova.api.openstack.compute import views
from nova.api.openstack import extensions
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import models
from nova import exception
from nova.i18n import _
from nova.image import glance
from nova.network import manager
from nova.network.neutronv2 import api as neutron_api
from nova import objects
from nova.objects import instance as instance_obj
from nova.openstack.common import jsonutils
from nova.openstack.common import policy as common_policy
from nova import policy
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests import fake_network
from nova.tests.image import fake
from nova.tests import matchers
from nova import utils as nova_utils
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.utils')
FAKE_UUID = fakes.FAKE_UUID
INSTANCE_IDS = {FAKE_UUID: 1}
FIELDS = instance_obj.INSTANCE_DEFAULT_FIELDS
def fake_gen_uuid():
return FAKE_UUID
def return_servers_empty(context, *args, **kwargs):
return []
def instance_update_and_get_original(context, instance_uuid, values,
update_cells=True,
columns_to_join=None,
):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return (inst, inst)
def instance_update(context, instance_uuid, values, update_cells=True):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return inst
def fake_compute_api(cls, req, id):
return True
def fake_start_stop_not_ready(self, context, instance):
raise exception.InstanceNotReady(instance_id=instance["uuid"])
def fake_start_stop_invalid_state(self, context, instance):
raise exception.InstanceInvalidState(
instance_uuid=instance['uuid'], attr='fake_attr',
method='fake_method', state='fake_state')
def fake_instance_get_by_uuid_not_found(context, uuid,
columns_to_join, use_slave=False):
raise exception.InstanceNotFound(instance_id=uuid)
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance_id, password):
self.instance_id = instance_id
self.password = password
class Base64ValidationTest(test.TestCase):
def setUp(self):
super(Base64ValidationTest, self).setUp()
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def test_decode_base64(self):
value = "A random string"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_binary(self):
value = "\x00\x12\x75\x99"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_whitespace(self):
value = "A random string"
encoded = base64.b64encode(value)
white = "\n \n%s\t%s\n" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertEqual(result, value)
def test_decode_base64_invalid(self):
invalid = "A random string"
result = self.controller._decode_base64(invalid)
self.assertIsNone(result)
def test_decode_base64_illegal_bytes(self):
value = "A random string"
encoded = base64.b64encode(value)
white = ">\x01%s*%s()" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertIsNone(result)
class NeutronV2Subclass(neutron_api.API):
"""Used to ensure that API handles subclasses properly."""
pass
class ControllerTest(test.TestCase):
def setUp(self):
super(ControllerTest, self).setUp()
self.flags(verbose=True, use_ipv6=False)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
return_server = fakes.fake_instance_get()
return_servers = fakes.fake_instance_get_all_by_filters()
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers)
self.stubs.Set(db, 'instance_get_by_uuid',
return_server)
self.stubs.Set(db, 'instance_update_and_get_original',
instance_update_and_get_original)
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
self.ips_controller = ips.IPsController()
policy.reset()
policy.init()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
class ServersControllerTest(ControllerTest):
def setUp(self):
super(ServersControllerTest, self).setUp()
CONF.set_override('host', 'localhost', group='glance')
def test_requested_networks_prefix(self):
uuid = 'br-00000000-0000-0000-0000-000000000000'
requested_networks = [{'uuid': uuid}]
res = self.controller._get_requested_networks(requested_networks)
self.assertIn((uuid, None), res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network_and_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_conflict_on_fixed_ip(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
addr = '10.0.0.1'
requested_networks = [{'uuid': network,
'fixed_ip': addr,
'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_neutronv2_disabled_with_port(self):
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_api_enabled_with_v2_subclass(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_subclass_with_port(self):
cls = 'nova.tests.api.openstack.compute.test_servers.NeutronV2Subclass'
self.flags(network_api_class=cls)
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_get_server_by_uuid(self):
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
def test_get_server_joins_pci_devices(self):
self.expected_attrs = None
def fake_get(_self, *args, **kwargs):
self.expected_attrs = kwargs['expected_attrs']
ctxt = context.RequestContext('fake', 'fake')
return fake_instance.fake_instance_obj(ctxt)
self.stubs.Set(compute_api.API, 'get', fake_get)
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
self.controller.show(req, FAKE_UUID)
self.assertIn('pci_devices', self.expected_attrs)
def test_unique_host_id(self):
"""Create two servers with the same host and different
project_ids and check that the host_id's are unique.
"""
def return_instance_with_host(self, *args, **kwargs):
project_id = str(uuid.uuid4())
return fakes.stub_instance(id=1, uuid=FAKE_UUID,
project_id=project_id,
host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid',
return_instance_with_host)
self.stubs.Set(db, 'instance_get',
return_instance_with_host)
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
server1 = self.controller.show(req, FAKE_UUID)
server2 = self.controller.show(req, FAKE_UUID)
self.assertNotEqual(server1['server']['hostId'],
server2['server']['hostId'])
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
return {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": progress,
"name": "server1",
"status": status,
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {
"seq": "1",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v3/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/servers/%s" % uuid,
},
],
}
}
def test_get_server_by_id(self):
self.flags(use_ipv6=True)
image_bookmark = "http://localhost/images/10"
flavor_bookmark = "http://localhost/flavors/1"
uuid = FAKE_UUID
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status="BUILD",
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_active_status_by_id(self):
image_bookmark = "http://localhost/images/10"
flavor_bookmark = "http://localhost/flavors/1"
new_return_server = fakes.fake_instance_get(
vm_state=vm_states.ACTIVE, progress=100)
self.stubs.Set(db, 'instance_get_by_uuid', new_return_server)
uuid = FAKE_UUID
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_id_image_ref_by_id(self):
image_ref = "10"
image_bookmark = "http://localhost/images/10"
flavor_id = "1"
flavor_bookmark = "http://localhost/flavors/1"
new_return_server = fakes.fake_instance_get(
vm_state=vm_states.ACTIVE, image_ref=image_ref,
flavor_id=flavor_id, progress=100)
self.stubs.Set(db, 'instance_get_by_uuid', new_return_server)
uuid = FAKE_UUID
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_addresses_from_cache(self):
pub0 = ('172.19.0.1', '172.19.0.2',)
pub1 = ('1.2.3.4',)
pub2 = ('b33f::fdee:ddff:fecc:bbaa',)
priv0 = ('192.168.0.3', '192.168.0.4',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'public',
'subnets': [{'cidr': '172.19.0.0/24',
'ips': [_ip(ip) for ip in pub0]},
{'cidr': '1.2.3.0/16',
'ips': [_ip(ip) for ip in pub1]},
{'cidr': 'b33f::/64',
'ips': [_ip(ip) for ip in pub2]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br1',
'id': 2,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': [_ip(ip) for ip in priv0]}]}}]
return_server = fakes.fake_instance_get(nw_cache=nw_cache)
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
req = fakes.HTTPRequestV3.blank('/servers/%s/ips' % FAKE_UUID)
res_dict = self.ips_controller.index(req, FAKE_UUID)
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
{'version': 4, 'addr': '192.168.0.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '172.19.0.2',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '1.2.3.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
],
},
}
self.assertThat(res_dict, matchers.DictMatches(expected))
def test_get_server_addresses_nonexistent_network(self):
url = '/v3/servers/%s/ips/network_0' % FAKE_UUID
req = fakes.HTTPRequestV3.blank(url)
self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.show,
req, FAKE_UUID, 'network_0')
def test_get_server_addresses_nonexistent_server(self):
def fake_instance_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
server_id = str(uuid.uuid4())
req = fakes.HTTPRequestV3.blank('/servers/%s/ips' % server_id)
self.assertRaises(webob.exc.HTTPNotFound,
self.ips_controller.index, req, server_id)
def test_get_server_list_empty(self):
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_empty)
req = fakes.HTTPRequestV3.blank('/servers')
res_dict = self.controller.index(req)
num_servers = len(res_dict['servers'])
self.assertEqual(0, num_servers)
def test_get_server_list_with_reservation_id(self):
req = fakes.HTTPRequestV3.blank('/servers?reservation_id=foo')
res_dict = self.controller.index(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_empty(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_details(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list(self):
req = fakes.HTTPRequestV3.blank('/servers')
res_dict = self.controller.index(req)
self.assertEqual(len(res_dict['servers']), 5)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertIsNone(s.get('image', None))
expected_links = [
{
"rel": "self",
"href": "http://localhost/v3/servers/%s" % s['id'],
},
{
"rel": "bookmark",
"href": "http://localhost/servers/%s" % s['id'],
},
]
self.assertEqual(s['links'], expected_links)
def test_get_servers_with_limit(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=3')
res_dict = self.controller.index(req)
servers = res_dict['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res_dict['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v3/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected_params = {'limit': ['3'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected_params))
def test_get_servers_with_limit_bad_value(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_server_details_empty(self):
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_empty)
req = fakes.HTTPRequestV3.blank('/servers/detail')
res_dict = self.controller.detail(req)
num_servers = len(res_dict['servers'])
self.assertEqual(0, num_servers)
def test_get_server_details_with_limit(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?limit=3')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v3/servers/detail', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_server_details_with_limit_bad_value(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_get_server_details_with_limit_and_other_params(self):
req = fakes.HTTPRequestV3.blank('/servers/detail'
'?limit=3&blah=2:t')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v3/servers/detail', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'blah': ['2:t'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_servers_with_too_big_limit(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=30')
res_dict = self.controller.index(req)
self.assertNotIn('servers_links', res_dict)
def test_get_servers_with_bad_limit(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_marker(self):
url = '/v3/servers?marker=%s' % fakes.get_fake_uuid(2)
req = fakes.HTTPRequestV3.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ["server4", "server5"])
def test_get_servers_with_limit_and_marker(self):
url = '/v3/servers?limit=2&marker=%s' % fakes.get_fake_uuid(1)
req = fakes.HTTPRequestV3.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ['server3', 'server4'])
def test_get_servers_with_bad_marker(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=2&marker=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_bad_option(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?unknownoption=whee')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_image(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('image', search_opts)
self.assertEqual(search_opts['image'], '12345')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?image=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_tenant_id_filter_converts_to_project_id_for_admin(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertIsNotNone(filters)
self.assertEqual(filters['project_id'], 'newfake')
self.assertFalse(filters.get('tenant_id'))
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers'
'?all_tenants=1&tenant_id=newfake',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_tenant_id_filter_no_admin_context(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotEqual(filters, None)
self.assertEqual(filters['project_id'], 'fake')
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?tenant_id=newfake')
res = self.controller.index(req)
self.assertIn('servers', res)
def test_tenant_id_filter_implies_all_tenants(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotEqual(filters, None)
# The project_id assertion checks that the project_id
# filter is set to that specified in the request url and
# not that of the context, verifying that the all_tenants
# flag was enabled
self.assertEqual(filters['project_id'], 'newfake')
self.assertFalse(filters.get('tenant_id'))
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?tenant_id=newfake',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_normal(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('project_id', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_one(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('project_id', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=1',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_zero(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('all_tenants', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=0',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_false(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('all_tenants', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=false',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_invalid(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None,
expected_attrs=None):
self.assertNotIn('all_tenants', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=xxx',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_admin_restricted_tenant(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertIsNotNone(filters)
self.assertEqual(filters['project_id'], 'fake')
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_pass_policy(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertIsNotNone(filters)
self.assertNotIn('project_id', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
policy.set_rules(rules)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=1')
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_fail_policy(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None):
self.assertIsNotNone(filters)
return [fakes.stub_instance(100)]
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:non_fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
policy.set_rules(rules)
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=1')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_get_servers_allows_flavor(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('flavor', search_opts)
# flavor is an integer ID
self.assertEqual(search_opts['flavor'], '12345')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?flavor=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_with_bad_flavor(self):
req = fakes.HTTPRequestV3.blank('/servers?flavor=abcde')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_server_details_with_bad_flavor(self):
req = fakes.HTTPRequestV3.blank('/servers?flavor=abcde')
servers = self.controller.detail(req)['servers']
self.assertThat(servers, testtools.matchers.HasLength(0))
def test_get_servers_allows_status(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], [vm_states.ACTIVE])
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=active')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_task_status(self):
server_uuid = str(uuid.uuid4())
task_state = task_states.REBOOTING
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('task_state', search_opts)
self.assertEqual([task_states.REBOOT_PENDING,
task_states.REBOOT_STARTED,
task_states.REBOOTING],
search_opts['task_state'])
db_list = [fakes.stub_instance(100, uuid=server_uuid,
task_state=task_state)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=reboot')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_resize_status(self):
# Test when resize status, it maps list of vm states.
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'],
[vm_states.ACTIVE, vm_states.STOPPED])
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=resize')
servers = self.controller.detail(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_invalid_status(self):
# Test getting servers by invalid status.
req = fakes.HTTPRequestV3.blank('/servers?status=baloney',
use_admin_context=False)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_servers_deleted_status_as_user(self):
req = fakes.HTTPRequestV3.blank('/servers?status=deleted',
use_admin_context=False)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.detail, req)
def test_get_servers_deleted_status_as_admin(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], ['deleted'])
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=deleted',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_name(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('name', search_opts)
self.assertEqual(search_opts['name'], 'whee.*')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?name=whee.*')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('changes-since', search_opts)
changes_since = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertNotIn('deleted', search_opts)
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
params = 'changes-since=2011-01-24T17:08:01Z'
req = fakes.HTTPRequestV3.blank('/servers?%s' % params)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since_bad_value(self):
params = 'changes-since=asdf'
req = fakes.HTTPRequestV3.blank('/servers?%s' % params)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
def test_get_servers_admin_filters_as_user(self):
"""Test getting servers by admin-only or unknown options when
context is not admin. Make sure the admin and unknown options
are stripped before they get to compute_api.get_all()
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('ip', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertNotIn('unknown_option', search_opts)
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank('/servers?%s' % query_str)
res = self.controller.index(req)
servers = res['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_options_as_admin(self):
"""Test getting servers by admin-only or unknown options when
context is admin. All options should be passed
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertIn('ip', search_opts)
self.assertIn('unknown_option', search_opts)
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequestV3.blank('/servers?%s' % query_str,
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_ip(self):
"""Test getting servers by ip."""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('ip', search_opts)
self.assertEqual(search_opts['ip'], '10\..*')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?ip=10\..*')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_allows_ip6(self):
"""Test getting servers by ip6 with admin_api enabled and
admin context
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('ip6', search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?ip6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_all_server_details(self):
expected_flavor = {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/flavors/1',
},
],
}
expected_image = {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/images/10',
},
],
}
req = fakes.HTTPRequestV3.blank('/servers/detail')
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], '')
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s['image'], expected_image)
self.assertEqual(s['flavor'], expected_flavor)
self.assertEqual(s['status'], 'BUILD')
self.assertEqual(s['metadata']['seq'], str(i + 1))
def test_get_all_server_details_with_host(self):
"""We want to make sure that if two instances are on the same host,
then they return the same hostId. If two instances are on different
hosts, they should return different hostIds. In this test,
there are 5 instances - 2 on one host and 3 on another.
"""
def return_servers_with_host(context, *args, **kwargs):
return [fakes.stub_instance(i + 1, 'fake', 'fake', host=i % 2,
uuid=fakes.get_fake_uuid(i))
for i in xrange(5)]
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_with_host)
req = fakes.HTTPRequestV3.blank('/servers/detail')
res_dict = self.controller.detail(req)
server_list = res_dict['servers']
host_ids = [server_list[0]['hostId'], server_list[1]['hostId']]
self.assertTrue(host_ids[0] and host_ids[1])
self.assertNotEqual(host_ids[0], host_ids[1])
for i, s in enumerate(server_list):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], host_ids[i % 2])
self.assertEqual(s['name'], 'server%d' % (i + 1))
def test_get_servers_joins_pci_devices(self):
self.expected_attrs = None
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.expected_attrs = expected_attrs
return []
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers', use_admin_context=True)
self.assertIn('servers', self.controller.index(req))
self.assertIn('pci_devices', self.expected_attrs)
class ServersControllerDeleteTest(ControllerTest):
def setUp(self):
super(ServersControllerDeleteTest, self).setUp()
self.server_delete_called = False
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
deleted_at = timeutils.utcnow()
return fake_instance.fake_db_instance(deleted_at=deleted_at)
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
def _create_delete_request(self, uuid):
fakes.stub_out_instance_quota(self.stubs, 0, 10)
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
req.method = 'DELETE'
return req
def _delete_server_instance(self, uuid=FAKE_UUID):
req = self._create_delete_request(uuid)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
self.controller.delete(req, uuid)
def test_delete_server_instance(self):
self._delete_server_instance()
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_not_found(self):
self.assertRaises(webob.exc.HTTPNotFound,
self._delete_server_instance,
uuid='non-existent-uuid')
def test_delete_server_instance_while_building(self):
req = self._create_delete_request(FAKE_UUID)
self.controller.delete(req, FAKE_UUID)
self.assertTrue(self.server_delete_called)
def test_delete_locked_server(self):
req = self._create_delete_request(FAKE_UUID)
self.stubs.Set(compute_api.API, 'soft_delete',
fakes.fake_actions_to_locked_server)
self.stubs.Set(compute_api.API, 'delete',
fakes.fake_actions_to_locked_server)
self.assertRaises(webob.exc.HTTPConflict, self.controller.delete,
req, FAKE_UUID)
def test_delete_server_instance_while_resize(self):
req = self._create_delete_request(FAKE_UUID)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
task_state=task_states.RESIZE_PREP))
self.controller.delete(req, FAKE_UUID)
# Delete shoud be allowed in any case, even during resizing,
# because it may get stuck.
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_if_not_launched(self):
self.flags(reclaim_instance_interval=3600)
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(launched_at=None))
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
deleted_at = timeutils.utcnow()
return fake_instance.fake_db_instance(deleted_at=deleted_at)
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
# delete() should be called for instance which has never been active,
# even if reclaim_instance_interval has been set.
self.assertEqual(self.server_delete_called, True)
class ServersControllerRebuildInstanceTest(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v3/fake/images/%s' % image_uuid
def setUp(self):
super(ServersControllerRebuildInstanceTest, self).setUp()
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_href,
'metadata': {
'open': 'stack',
},
},
}
self.req = fakes.HTTPRequest.blank('/fake/servers/a/action')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def test_rebuild_instance_with_blank_metadata_key(self):
self.body['rebuild']['metadata'][''] = 'world'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_key_too_long(self):
self.body['rebuild']['metadata'][('a' * 260)] = 'world'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_too_long(self):
self.body['rebuild']['metadata']['key1'] = ('a' * 260)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_not_string(self):
self.body['rebuild']['metadata']['key1'] = 1
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_fails_when_min_ram_too_small(self):
# make min_ram larger than our instance ram size
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="4096", min_disk="10")
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_fails_when_min_disk_too_small(self):
# make min_disk larger than our instance disk size
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="128", min_disk="100000")
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_image_too_large(self):
# make image size larger than our instance disk size
size = str(1000 * (1024 ** 3))
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', size=size)
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_name_all_blank(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True, status='active')
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.body['rebuild']['name'] = ' '
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_deleted_image(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='DELETED')
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_onset_file_limit_over_quota(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True, status='active')
with contextlib.nested(
mock.patch.object(fake._FakeImageService, 'show',
side_effect=fake_get_image),
mock.patch.object(self.controller.compute_api, 'rebuild',
side_effect=exception.OnsetFileLimitExceeded)
) as (
show_mock, rebuild_mock
):
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_start(self):
self.mox.StubOutWithMock(compute_api.API, 'start')
compute_api.API.start(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.controller._start_server(req, FAKE_UUID, body)
def test_start_policy_failed(self):
rules = {
"compute:v3:servers:start":
common_policy.parse_rule("project_id:non_fake")
}
policy.set_rules(rules)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._start_server,
req, FAKE_UUID, body)
self.assertIn("compute:v3:servers:start", exc.format_message())
def test_start_not_ready(self):
self.stubs.Set(compute_api.API, 'start', fake_start_stop_not_ready)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
def test_start_locked_server(self):
self.stubs.Set(compute_api.API, 'start',
fakes.fake_actions_to_locked_server)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
def test_start_invalid(self):
self.stubs.Set(compute_api.API, 'start', fake_start_stop_invalid_state)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
def test_stop(self):
self.mox.StubOutWithMock(compute_api.API, 'stop')
compute_api.API.stop(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.controller._stop_server(req, FAKE_UUID, body)
def test_stop_policy_failed(self):
rules = {
"compute:v3:servers:stop":
common_policy.parse_rule("project_id:non_fake")
}
policy.set_rules(rules)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop='')
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._stop_server,
req, FAKE_UUID, body)
self.assertIn("compute:v3:servers:stop", exc.format_message())
def test_stop_not_ready(self):
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_not_ready)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
def test_stop_locked_server(self):
self.stubs.Set(compute_api.API, 'stop',
fakes.fake_actions_to_locked_server)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
def test_stop_invalid_state(self):
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_invalid_state)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
def test_start_with_bogus_id(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
req = fakes.HTTPRequestV3.blank('/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, req, 'test_inst', body)
def test_stop_with_bogus_id(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
req = fakes.HTTPRequestV3.blank('/servers/test_inst/action')
body = dict(stop="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, req, 'test_inst', body)
class ServersControllerUpdateTest(ControllerTest):
def _get_request(self, body=None, options=None):
if options:
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(**options))
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = jsonutils.dumps(body)
return req
def test_update_server_all_attributes(self):
body = {'server': {
'name': 'server_test',
}}
req = self._get_request(body, {'name': 'server_test'})
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name(self):
body = {'server': {'name': 'server_test'}}
req = self._get_request(body, {'name': 'server_test'})
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name_too_long(self):
body = {'server': {'name': 'x' * 256}}
req = self._get_request(body, {'name': 'server_test'})
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_name_all_blank_spaces(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank('/v3/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': ' ' * 64}}
req.body = jsonutils.dumps(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_admin_password_ignored(self):
inst_dict = dict(name='server_test', admin_password='bacon')
body = dict(server=inst_dict)
def server_update(context, id, params):
filtered_dict = {
'display_name': 'server_test',
}
self.assertEqual(params, filtered_dict)
filtered_dict['uuid'] = id
return filtered_dict
self.stubs.Set(db, 'instance_update', server_update)
# FIXME (comstud)
# self.stubs.Set(db, 'instance_get',
# return_server_with_attributes(name='server_test'))
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_not_found(self):
def fake_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute_api.API, 'get', fake_get)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_not_found_on_update(self):
def fake_update(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(db, 'instance_update_and_get_original', fake_update)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_policy_fail(self):
rule = {'compute:update': common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body, {'name': 'server_test'})
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.update, req, FAKE_UUID, body=body)
class ServerStatusTest(test.TestCase):
def setUp(self):
super(ServerStatusTest, self).setUp()
fakes.stub_out_nw_api(self.stubs)
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def _get_with_state(self, vm_state, task_state=None):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_state,
task_state=task_state))
request = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
return self.controller.show(request, FAKE_UUID)
def test_active(self):
response = self._get_with_state(vm_states.ACTIVE)
self.assertEqual(response['server']['status'], 'ACTIVE')
def test_reboot(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING)
self.assertEqual(response['server']['status'], 'REBOOT')
def test_reboot_hard(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING_HARD)
self.assertEqual(response['server']['status'], 'HARD_REBOOT')
def test_reboot_resize_policy_fail(self):
def fake_get_server(context, req, id):
return fakes.stub_instance(id)
self.stubs.Set(self.controller, '_get_server', fake_get_server)
rule = {'compute:reboot':
common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
req = fakes.HTTPRequestV3.blank('/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_reboot, req, '1234',
{'reboot': {'type': 'HARD'}})
def test_rebuild(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBUILDING)
self.assertEqual(response['server']['status'], 'REBUILD')
def test_rebuild_error(self):
response = self._get_with_state(vm_states.ERROR)
self.assertEqual(response['server']['status'], 'ERROR')
def test_resize(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.RESIZE_PREP)
self.assertEqual(response['server']['status'], 'RESIZE')
def test_confirm_resize_policy_fail(self):
def fake_get_server(context, req, id):
return fakes.stub_instance(id)
self.stubs.Set(self.controller, '_get_server', fake_get_server)
rule = {'compute:confirm_resize':
common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
req = fakes.HTTPRequestV3.blank('/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_confirm_resize, req, '1234', {})
def test_verify_resize(self):
response = self._get_with_state(vm_states.RESIZED, None)
self.assertEqual(response['server']['status'], 'VERIFY_RESIZE')
def test_revert_resize(self):
response = self._get_with_state(vm_states.RESIZED,
task_states.RESIZE_REVERTING)
self.assertEqual(response['server']['status'], 'REVERT_RESIZE')
def test_revert_resize_policy_fail(self):
def fake_get_server(context, req, id):
return fakes.stub_instance(id)
self.stubs.Set(self.controller, '_get_server', fake_get_server)
rule = {'compute:revert_resize':
common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
req = fakes.HTTPRequestV3.blank('/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_revert_resize, req, '1234', {})
def test_password_update(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.UPDATING_PASSWORD)
self.assertEqual(response['server']['status'], 'PASSWORD')
def test_stopped(self):
response = self._get_with_state(vm_states.STOPPED)
self.assertEqual(response['server']['status'], 'SHUTOFF')
class ServersControllerCreateTest(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
fakes.stub_out_nw_api(self.stubs)
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': inst_type,
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update(context, instance_uuid, params, update_cells=True):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return inst
def server_update_and_get_original(
context, instance_uuid, params, update_cells=False,
columns_to_join=None):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
def queue_get_for(context, *args):
return 'network_topic'
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update_and_get_original)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self.bdm = [{'delete_on_termination': 1,
'device_name': 123,
'volume_size': 1,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
self.req = fakes.HTTPRequest.blank('/fake/servers')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _check_admin_password_len(self, server_dict):
"""utility function - check server_dict for admin_password length."""
self.assertEqual(CONF.password_length,
len(server_dict["adminPass"]))
def _check_admin_password_missing(self, server_dict):
"""utility function - check server_dict for admin_password absence."""
self.assertNotIn("adminPass", server_dict)
def _test_create_instance(self, flavor=2):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
self.body['server']['imageRef'] = image_uuid
self.body['server']['flavorRef'] = flavor
self.req.body = jsonutils.dumps(self.body)
server = self.controller.create(self.req, body=self.body).obj['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_private_flavor(self):
values = {
'name': 'fake_name',
'memory_mb': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': '1324',
'swap': 0,
'rxtx_factor': 0.5,
'vcpu_weight': 1,
'disabled': False,
'is_public': False,
}
db.flavor_create(context.get_admin_context(), values)
self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_instance,
flavor=1324)
def test_create_server_bad_image_href(self):
image_href = 1
self.body['server']['min_count'] = 1
self.body['server']['imageRef'] = image_href,
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
# TODO(cyeoh): bp-v3-api-unittests
# This needs to be ported to the os-networks extension tests
# def test_create_server_with_invalid_networks_parameter(self):
# self.ext_mgr.extensions = {'os-networks': 'fake'}
# image_href = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
# flavor_ref = 'http://localhost/123/flavors/3'
# body = {
# 'server': {
# 'name': 'server_test',
# 'imageRef': image_href,
# 'flavorRef': flavor_ref,
# 'networks': {'uuid': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'},
# }
# }
# req = fakes.HTTPRequest.blank('/v2/fake/servers')
# req.method = 'POST'
# req.body = jsonutils.dumps(body)
# req.headers["content-type"] = "application/json"
# self.assertRaises(webob.exc.HTTPBadRequest,
# self.controller.create,
# req,
# body)
def test_create_server_with_deleted_image(self):
# Get the fake image service so we can set the status to deleted
(image_service, image_id) = glance.get_remote_image_service(
context, '')
image_service.update(context, self.image_uuid, {'status': 'DELETED'})
self.addCleanup(image_service.update, context, self.image_uuid,
{'status': 'active'})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dumps(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
'Image 76fa36fc-c930-4bf3-8c8a-ea2a2420deb6 is not active.'):
self.controller.create(self.req, body=self.body)
def test_create_server_image_too_large(self):
# Get the fake image service so we can set the status to deleted
(image_service, image_id) = glance.get_remote_image_service(
context, self.image_uuid)
image = image_service.show(context, image_id)
orig_size = image['size']
new_size = str(1000 * (1024 ** 3))
image_service.update(context, self.image_uuid, {'size': new_size})
self.addCleanup(image_service.update, context, self.image_uuid,
{'size': orig_size})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dumps(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
"Flavor's disk is too small for requested image."):
self.controller.create(self.req, body=self.body)
def test_create_instance_image_ref_is_bookmark(self):
image_href = 'http://localhost/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_image_ref_is_invalid(self):
image_uuid = 'this_is_not_a_valid_uuid'
image_href = 'http://localhost/images/%s' % image_uuid
flavor_ref = 'http://localhost/flavors/3'
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, body=self.body)
def test_create_instance_no_key_pair(self):
fakes.stub_out_key_pair_funcs(self.stubs, have_key_pair=False)
self._test_create_instance()
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dumps(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
# TODO(cyeoh): bp-v3-api-unittests
# This needs to be ported to the os-keypairs extension tests
# def test_create_instance_with_keypairs_enabled(self):
# self.ext_mgr.extensions = {'os-keypairs': 'fake'}
# key_name = 'green'
#
# params = {'key_name': key_name}
# old_create = compute_api.API.create
#
# # NOTE(sdague): key pair goes back to the database,
# # so we need to stub it out for tests
# def key_pair_get(context, user_id, name):
# return {'public_key': 'FAKE_KEY',
# 'fingerprint': 'FAKE_FINGERPRINT',
# 'name': name}
#
# def create(*args, **kwargs):
# self.assertEqual(kwargs['key_name'], key_name)
# return old_create(*args, **kwargs)
#
# self.stubs.Set(db, 'key_pair_get', key_pair_get)
# self.stubs.Set(compute_api.API, 'create', create)
# self._test_create_extra(params)
#
# TODO(cyeoh): bp-v3-api-unittests
# This needs to be ported to the os-networks extension tests
# def test_create_instance_with_networks_enabled(self):
# self.ext_mgr.extensions = {'os-networks': 'fake'}
# net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
# requested_networks = [{'uuid': net_uuid}]
# params = {'networks': requested_networks}
# old_create = compute_api.API.create
# def create(*args, **kwargs):
# result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None)]
# self.assertEqual(kwargs['requested_networks'], result)
# return old_create(*args, **kwargs)
# self.stubs.Set(compute_api.API, 'create', create)
# self._test_create_extra(params)
def test_create_instance_with_port_with_no_fixed_ips(self):
port_id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port_id}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortRequiresFixedIP(port_id=port_id)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_raise_user_data_too_large(self, mock_create):
mock_create.side_effect = exception.InstanceUserDataTooLarge(
maxsize=1, length=2)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
def test_create_instance_with_network_with_no_subnet(self):
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.NetworkRequiresSubnet(network_uuid=network)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_non_unique_secgroup_name(self):
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks,
'security_groups': [{'name': 'dup'}, {'name': 'dup'}]}
def fake_create(*args, **kwargs):
raise exception.NoUniqueMatch("No Unique match found for ...")
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
def test_create_instance_with_networks_disabled_neutronv2(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None,
None, None)]
self.assertEqual(result, kwargs['requested_networks'].as_tuples())
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_networks_disabled(self):
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsNone(kwargs['requested_networks'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_pass_disabled(self):
# test with admin passwords disabled See lp bug 921814
self.flags(enable_instance_password=False)
# proper local hrefs must start with 'http://localhost/v3/'
self.flags(enable_instance_password=False)
image_href = 'http://localhost/v2/fake/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_name_too_long(self):
# proper local hrefs must start with 'http://localhost/v3/'
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['name'] = 'X' * 256
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
def test_create_instance_name_all_blank_spaces(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v3/images/%s' % image_uuid
flavor_ref = 'http://localhost/flavors/3'
body = {
'server': {
'name': ' ' * 64,
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
req = fakes.HTTPRequest.blank('/v3/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_instance(self):
# proper local hrefs must start with 'http://localhost/v3/'
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_extension_create_exception(self):
def fake_keypair_server_create(self, server_dict,
create_kwargs):
raise KeyError
self.stubs.Set(keypairs.Keypairs, 'server_create',
fake_keypair_server_create)
# proper local hrefs must start with 'http://localhost/v3/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v3/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.create, req, body=body)
def test_create_instance_pass_disabled(self):
self.flags(enable_instance_password=False)
# proper local hrefs must start with 'http://localhost/v3/'
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_too_much_metadata(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata']['vote'] = 'fiddletown'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_too_long(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {('a' * 260): '12345'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_too_long(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {'key1': ('a' * 260)}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_blank(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {'': 'abcd'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_not_dict(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = 'string'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_not_string(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {1: 'test'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_not_string(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {'test': ['a', 'list']}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_user_data_malformed_bad_request(self):
params = {'user_data': 'u1234'}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
def test_create_instance_invalid_key_name(self):
image_href = 'http://localhost/v2/images/2'
self.body['server']['imageRef'] = image_href
self.body['server']['key_name'] = 'nonexistentkey'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_valid_key_name(self):
self.body['server']['key_name'] = 'key'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_password_len(res["server"])
def test_create_instance_invalid_flavor_href(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/v2/flavors/asdf'
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_invalid_flavor_id_int(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = -1
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_bad_flavor_href(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/v2/flavors/17'
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_bad_href(self):
image_href = 'asdf'
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_local_href(self):
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_admin_password(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(server['adminPass'],
self.body['server']['adminPass'])
def test_create_instance_admin_password_pass_disabled(self):
self.flags(enable_instance_password=False)
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertIn('server', res)
self.assertIn('adminPass', self.body['server'])
def test_create_instance_admin_password_empty(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = ''
self.req.body = jsonutils.dumps(self.body)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body)
def test_create_location(self):
selfhref = 'http://localhost/v2/fake/servers/%s' % FAKE_UUID
self.req.body = jsonutils.dumps(self.body)
robj = self.controller.create(self.req, body=self.body)
self.assertEqual(robj['Location'], selfhref)
def _do_test_create_instance_above_quota(self, resource, allowed, quota,
expected_msg):
fakes.stub_out_instance_quota(self.stubs, allowed, quota, resource)
self.body['server']['flavorRef'] = 3
self.req.body = jsonutils.dumps(self.body)
try:
self.controller.create(self.req, body=self.body).obj['server']
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_above_quota_instances(self):
msg = _('Quota exceeded for instances: Requested 1, but'
' already used 10 of 10 instances')
self._do_test_create_instance_above_quota('instances', 0, 10, msg)
def test_create_instance_above_quota_ram(self):
msg = _('Quota exceeded for ram: Requested 4096, but'
' already used 8192 of 10240 ram')
self._do_test_create_instance_above_quota('ram', 2048, 10 * 1024, msg)
def test_create_instance_above_quota_cores(self):
msg = _('Quota exceeded for cores: Requested 2, but'
' already used 9 of 10 cores')
self._do_test_create_instance_above_quota('cores', 1, 10, msg)
def test_create_instance_above_quota_server_group_members(self):
ctxt = context.get_admin_context()
fake_group = objects.InstanceGroup(ctxt)
fake_group.create()
def fake_count(context, name, group, user_id):
self.assertEqual(name, "server_group_members")
self.assertEqual(group.uuid, fake_group.uuid)
self.assertEqual(user_id,
self.req.environ['nova.context'].user_id)
return 10
def fake_limit_check(context, **kwargs):
if 'server_group_members' in kwargs:
raise exception.OverQuota(overs={})
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stubs.Set(fakes.QUOTAS, 'count', fake_count)
self.stubs.Set(fakes.QUOTAS, 'limit_check', fake_limit_check)
self.stubs.Set(db, 'instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {'group': fake_group.uuid}
self.req.body = jsonutils.dumps(self.body)
expected_msg = "Quota exceeded, too many servers in group"
try:
self.controller.create(self.req, body=self.body).obj
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_above_quota_server_groups(self):
def fake_reserve(contex, **deltas):
if 'server_groups' in deltas:
raise exception.OverQuota(overs={})
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stubs.Set(fakes.QUOTAS, 'reserve', fake_reserve)
self.stubs.Set(db, 'instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {'group': 'fake_group'}
self.req.body = jsonutils.dumps(self.body)
expected_msg = "Quota exceeded, too many server groups."
try:
self.controller.create(self.req, body=self.body).obj
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_with_neutronv2_port_in_use(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortInUse(port_id=port)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_public_network_non_admin(self, mock_create):
public_network_uuid = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
params = {'networks': [{'uuid': public_network_uuid}]}
self.req.body = jsonutils.dumps(self.body)
mock_create.side_effect = exception.ExternalNetworkAttachForbidden(
network_uuid=public_network_uuid)
self.assertRaises(webob.exc.HTTPForbidden,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_multiple_instance_with_specified_ip_neutronv2(self,
_api_mock):
_api_mock.side_effect = exception.InvalidFixedIpAndMaxCountRequest(
reason="")
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
address = '10.0.0.1'
requested_networks = [{'uuid': network, 'fixed_ip': address,
'port': port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_multiple_instance_with_neutronv2_port(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
def fake_create(*args, **kwargs):
msg = _("Unable to launch multiple instances with"
" a single configured port ID. Please launch your"
" instance one by one with different ports.")
raise exception.MultiplePortsNotApplicable(reason=msg)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_neturonv2_not_found_network(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.NetworkNotFound(network_id=network)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_neutronv2_port_not_found(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortNotFound(port_id=port)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_network_ambiguous(self, mock_create):
mock_create.side_effect = exception.NetworkAmbiguous()
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InstanceExists(
name='instance-name'))
def test_create_instance_raise_instance_exists(self, mock_create):
self.assertRaises(webob.exc.HTTPConflict,
self.controller.create,
self.req, body=self.body)
class ServersControllerCreateTestWithMock(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTestWithMock, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self.req = fakes.HTTPRequest.blank('/fake/servers')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dumps(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_neutronv2_fixed_ip_already_in_use(self,
create_mock):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '10.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
create_mock.side_effect = exception.FixedIpAlreadyInUse(
address=address,
instance_uuid=network)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
self.assertEqual(1, len(create_mock.call_args_list))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidVolume(reason='error'))
def test_create_instance_with_invalid_volume_error(self, create_mock):
# Tests that InvalidVolume is translated to a 400 error.
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
class ServersViewBuilderTest(test.TestCase):
def setUp(self):
super(ServersViewBuilderTest, self).setUp()
CONF.set_override('host', 'localhost', group='glance')
self.flags(use_ipv6=True)
db_inst = fakes.stub_instance(
id=1,
image_ref="5",
uuid="deadbeef-feed-edee-beef-d0ea7beefedd",
display_name="test_server",
include_fake_metadata=False)
privates = ['172.19.0.1']
publics = ['192.168.0.3']
public6s = ['b33f::fdee:ddff:fecc:bbaa']
def nw_info(*args, **kwargs):
return [(None, {'label': 'public',
'ips': [dict(ip=ip) for ip in publics],
'ip6s': [dict(ip=ip) for ip in public6s]}),
(None, {'label': 'private',
'ips': [dict(ip=ip) for ip in privates]})]
def floaters(*args, **kwargs):
return []
fakes.stub_out_nw_api_get_instance_nw_info(self.stubs, nw_info)
fakes.stub_out_nw_api_get_floating_ips_by_fixed_address(self.stubs,
floaters)
self.uuid = db_inst['uuid']
self.view_builder = views.servers.ViewBuilderV3()
self.request = fakes.HTTPRequestV3.blank("")
self.request.context = context.RequestContext('fake', 'fake')
self.instance = fake_instance.fake_instance_obj(
self.request.context,
expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS,
**db_inst)
def test_get_flavor_valid_instance_type(self):
flavor_bookmark = "http://localhost/flavors/1"
expected = {"id": "1",
"links": [{"rel": "bookmark",
"href": flavor_bookmark}]}
result = self.view_builder._get_flavor(self.request, self.instance)
self.assertEqual(result, expected)
def test_build_server(self):
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_with_project_id(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": "http://localhost/v3/servers/%s" %
self.uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/servers/%s" % self.uuid,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail(self):
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"name": "test_server",
"status": "ERROR",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
"fault": {
"code": 404,
"created": "2010-10-10T12:00:00Z",
"message": "HTTPNotFound",
"details": "Stock details for test",
},
}
}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault_that_has_been_deleted(self):
self.instance['deleted'] = 1
self.instance['vm_state'] = vm_states.ERROR
fault = fake_instance.fake_fault_obj(self.request.context,
self.uuid, code=500,
message="No valid host was found")
self.instance['fault'] = fault
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "No valid host was found"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
# Regardless of vm_state deleted servers sholud be DELETED
self.assertEqual("DELETED", output['server']['status'])
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_not_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error",
'details': 'Stock details for test'}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error',
details='')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_but_active(self):
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
output = self.view_builder.show(self.request, self.instance)
self.assertNotIn('fault', output['server'])
def test_build_server_detail_active_status(self):
# set the power state of the instance to running
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_metadata(self):
metadata = []
metadata.append(models.InstanceMetadata(key="Open", value="Stack"))
metadata = nova_utils.metadata_to_dict(metadata)
self.instance['metadata'] = metadata
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
]
},
"metadata": {"Open": "Stack"},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
class ServersAllExtensionsTestCase(test.TestCase):
"""Servers tests using default API router with all extensions enabled.
The intent here is to catch cases where extensions end up throwing
an exception because of a malformed request before the core API
gets a chance to validate the request and return a 422 response.
For example, AccessIPsController extends servers.Controller::
| @wsgi.extends
| def create(self, req, resp_obj, body):
| context = req.environ['nova.context']
| if authorize(context) and 'server' in resp_obj.obj:
| resp_obj.attach(xml=AccessIPTemplate())
| server = resp_obj.obj['server']
| self._extend_server(req, server)
we want to ensure that the extension isn't barfing on an invalid
body.
"""
def setUp(self):
super(ServersAllExtensionsTestCase, self).setUp()
self.app = compute.APIRouterV3()
def test_create_missing_server(self):
# Test create with malformed body.
def fake_create(*args, **kwargs):
raise test.TestingException("Should not reach the compute API.")
self.stubs.Set(compute_api.API, 'create', fake_create)
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
def test_update_missing_server(self):
# Test update with malformed body.
def fake_update(*args, **kwargs):
raise test.TestingException("Should not reach the compute API.")
self.stubs.Set(compute_api.API, 'update', fake_update)
req = fakes.HTTPRequestV3.blank('/servers/1')
req.method = 'PUT'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
class ServersInvalidRequestTestCase(test.TestCase):
"""Tests of places we throw 400 Bad Request from."""
def setUp(self):
super(ServersInvalidRequestTestCase, self).setUp()
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def _invalid_server_create(self, body):
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_server_no_body(self):
self._invalid_server_create(body=None)
def test_create_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_server_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
def _unprocessable_server_update(self, body):
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
req.method = 'PUT'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, FAKE_UUID, body=body)
def test_update_server_no_body(self):
self._invalid_server_create(body=None)
def test_update_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_update_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
class FakeExt(extensions.V3APIExtensionBase):
name = "AccessIPs"
alias = 'os-access-ips'
version = 1
def fake_extension_point(self, *args, **kwargs):
pass
def get_controller_extensions(self):
return []
def get_resources(self):
return []
class TestServersExtensionPoint(test.NoDBTestCase):
def setUp(self):
super(TestServersExtensionPoint, self).setUp()
CONF.set_override('extensions_whitelist', ['os-access-ips'],
'osapi_v3')
self.stubs.Set(access_ips, 'AccessIPs', FakeExt)
def _test_load_extension_point(self, name):
setattr(FakeExt, 'server_%s' % name,
FakeExt.fake_extension_point)
ext_info = plugins.LoadedExtensionInfo()
controller = servers.ServersController(extension_info=ext_info)
self.assertEqual(
'os-access-ips',
list(getattr(controller,
'%s_extension_manager' % name))[0].obj.alias)
delattr(FakeExt, 'server_%s' % name)
def test_load_update_extension_point(self):
self._test_load_extension_point('update')
def test_load_rebuild_extension_point(self):
self._test_load_extension_point('rebuild')
def test_load_create_extension_point(self):
self._test_load_extension_point('create')
class TestServersExtensionSchema(test.NoDBTestCase):
def setUp(self):
super(TestServersExtensionSchema, self).setUp()
CONF.set_override('extensions_whitelist', ['keypairs'], 'osapi_v3')
def _test_load_extension_schema(self, name):
setattr(FakeExt, 'get_server_%s_schema' % name,
FakeExt.fake_extension_point)
ext_info = plugins.LoadedExtensionInfo()
controller = servers.ServersController(extension_info=ext_info)
self.assertTrue(hasattr(controller, '%s_schema_manager' % name))
delattr(FakeExt, 'get_server_%s_schema' % name)
return getattr(controller, 'schema_server_%s' % name)
def test_load_create_extension_point(self):
# The expected is the schema combination of base and keypairs
# because of the above extensions_whitelist.
expected_schema = copy.deepcopy(servers_schema.base_create)
expected_schema['properties']['server']['properties'].update(
keypairs_schema.server_create)
actual_schema = self._test_load_extension_schema('create')
self.assertEqual(expected_schema, actual_schema)
def test_load_update_extension_point(self):
# keypair extension does not contain update_server() and
# here checks that any extension is not added to the schema.
expected_schema = copy.deepcopy(servers_schema.base_update)
actual_schema = self._test_load_extension_schema('update')
self.assertEqual(expected_schema, actual_schema)
def test_load_rebuild_extension_point(self):
# keypair extension does not contain rebuild_server() and
# here checks that any extension is not added to the schema.
expected_schema = copy.deepcopy(servers_schema.base_rebuild)
actual_schema = self._test_load_extension_schema('rebuild')
self.assertEqual(expected_schema, actual_schema)
| 41.663459 | 79 | 0.585652 |
import base64
import contextlib
import copy
import datetime
import uuid
import iso8601
import mock
import mox
from oslo.config import cfg
from oslo.utils import timeutils
import six.moves.urllib.parse as urlparse
import testtools
import webob
from nova.api.openstack import compute
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import access_ips
from nova.api.openstack.compute.plugins.v3 import ips
from nova.api.openstack.compute.plugins.v3 import keypairs
from nova.api.openstack.compute.plugins.v3 import servers
from nova.api.openstack.compute.schemas.v3 import keypairs as keypairs_schema
from nova.api.openstack.compute.schemas.v3 import servers as servers_schema
from nova.api.openstack.compute import views
from nova.api.openstack import extensions
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import models
from nova import exception
from nova.i18n import _
from nova.image import glance
from nova.network import manager
from nova.network.neutronv2 import api as neutron_api
from nova import objects
from nova.objects import instance as instance_obj
from nova.openstack.common import jsonutils
from nova.openstack.common import policy as common_policy
from nova import policy
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests import fake_network
from nova.tests.image import fake
from nova.tests import matchers
from nova import utils as nova_utils
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.utils')
FAKE_UUID = fakes.FAKE_UUID
INSTANCE_IDS = {FAKE_UUID: 1}
FIELDS = instance_obj.INSTANCE_DEFAULT_FIELDS
def fake_gen_uuid():
return FAKE_UUID
def return_servers_empty(context, *args, **kwargs):
return []
def instance_update_and_get_original(context, instance_uuid, values,
update_cells=True,
columns_to_join=None,
):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return (inst, inst)
def instance_update(context, instance_uuid, values, update_cells=True):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return inst
def fake_compute_api(cls, req, id):
return True
def fake_start_stop_not_ready(self, context, instance):
raise exception.InstanceNotReady(instance_id=instance["uuid"])
def fake_start_stop_invalid_state(self, context, instance):
raise exception.InstanceInvalidState(
instance_uuid=instance['uuid'], attr='fake_attr',
method='fake_method', state='fake_state')
def fake_instance_get_by_uuid_not_found(context, uuid,
columns_to_join, use_slave=False):
raise exception.InstanceNotFound(instance_id=uuid)
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance_id, password):
self.instance_id = instance_id
self.password = password
class Base64ValidationTest(test.TestCase):
def setUp(self):
super(Base64ValidationTest, self).setUp()
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def test_decode_base64(self):
value = "A random string"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_binary(self):
value = "\x00\x12\x75\x99"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_whitespace(self):
value = "A random string"
encoded = base64.b64encode(value)
white = "\n \n%s\t%s\n" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertEqual(result, value)
def test_decode_base64_invalid(self):
invalid = "A random string"
result = self.controller._decode_base64(invalid)
self.assertIsNone(result)
def test_decode_base64_illegal_bytes(self):
value = "A random string"
encoded = base64.b64encode(value)
white = ">\x01%s*%s()" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertIsNone(result)
class NeutronV2Subclass(neutron_api.API):
pass
class ControllerTest(test.TestCase):
def setUp(self):
super(ControllerTest, self).setUp()
self.flags(verbose=True, use_ipv6=False)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
return_server = fakes.fake_instance_get()
return_servers = fakes.fake_instance_get_all_by_filters()
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers)
self.stubs.Set(db, 'instance_get_by_uuid',
return_server)
self.stubs.Set(db, 'instance_update_and_get_original',
instance_update_and_get_original)
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
self.ips_controller = ips.IPsController()
policy.reset()
policy.init()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
class ServersControllerTest(ControllerTest):
def setUp(self):
super(ServersControllerTest, self).setUp()
CONF.set_override('host', 'localhost', group='glance')
def test_requested_networks_prefix(self):
uuid = 'br-00000000-0000-0000-0000-000000000000'
requested_networks = [{'uuid': uuid}]
res = self.controller._get_requested_networks(requested_networks)
self.assertIn((uuid, None), res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network_and_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_conflict_on_fixed_ip(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
addr = '10.0.0.1'
requested_networks = [{'uuid': network,
'fixed_ip': addr,
'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_neutronv2_disabled_with_port(self):
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_api_enabled_with_v2_subclass(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_subclass_with_port(self):
cls = 'nova.tests.api.openstack.compute.test_servers.NeutronV2Subclass'
self.flags(network_api_class=cls)
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_get_server_by_uuid(self):
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
def test_get_server_joins_pci_devices(self):
self.expected_attrs = None
def fake_get(_self, *args, **kwargs):
self.expected_attrs = kwargs['expected_attrs']
ctxt = context.RequestContext('fake', 'fake')
return fake_instance.fake_instance_obj(ctxt)
self.stubs.Set(compute_api.API, 'get', fake_get)
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
self.controller.show(req, FAKE_UUID)
self.assertIn('pci_devices', self.expected_attrs)
def test_unique_host_id(self):
def return_instance_with_host(self, *args, **kwargs):
project_id = str(uuid.uuid4())
return fakes.stub_instance(id=1, uuid=FAKE_UUID,
project_id=project_id,
host='fake_host')
self.stubs.Set(db, 'instance_get_by_uuid',
return_instance_with_host)
self.stubs.Set(db, 'instance_get',
return_instance_with_host)
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
server1 = self.controller.show(req, FAKE_UUID)
server2 = self.controller.show(req, FAKE_UUID)
self.assertNotEqual(server1['server']['hostId'],
server2['server']['hostId'])
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
return {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": progress,
"name": "server1",
"status": status,
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {
"seq": "1",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v3/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/servers/%s" % uuid,
},
],
}
}
def test_get_server_by_id(self):
self.flags(use_ipv6=True)
image_bookmark = "http://localhost/images/10"
flavor_bookmark = "http://localhost/flavors/1"
uuid = FAKE_UUID
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status="BUILD",
progress=0)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_active_status_by_id(self):
image_bookmark = "http://localhost/images/10"
flavor_bookmark = "http://localhost/flavors/1"
new_return_server = fakes.fake_instance_get(
vm_state=vm_states.ACTIVE, progress=100)
self.stubs.Set(db, 'instance_get_by_uuid', new_return_server)
uuid = FAKE_UUID
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_id_image_ref_by_id(self):
image_ref = "10"
image_bookmark = "http://localhost/images/10"
flavor_id = "1"
flavor_bookmark = "http://localhost/flavors/1"
new_return_server = fakes.fake_instance_get(
vm_state=vm_states.ACTIVE, image_ref=image_ref,
flavor_id=flavor_id, progress=100)
self.stubs.Set(db, 'instance_get_by_uuid', new_return_server)
uuid = FAKE_UUID
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_addresses_from_cache(self):
pub0 = ('172.19.0.1', '172.19.0.2',)
pub1 = ('1.2.3.4',)
pub2 = ('b33f::fdee:ddff:fecc:bbaa',)
priv0 = ('192.168.0.3', '192.168.0.4',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'public',
'subnets': [{'cidr': '172.19.0.0/24',
'ips': [_ip(ip) for ip in pub0]},
{'cidr': '1.2.3.0/16',
'ips': [_ip(ip) for ip in pub1]},
{'cidr': 'b33f::/64',
'ips': [_ip(ip) for ip in pub2]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br1',
'id': 2,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': [_ip(ip) for ip in priv0]}]}}]
return_server = fakes.fake_instance_get(nw_cache=nw_cache)
self.stubs.Set(db, 'instance_get_by_uuid', return_server)
req = fakes.HTTPRequestV3.blank('/servers/%s/ips' % FAKE_UUID)
res_dict = self.ips_controller.index(req, FAKE_UUID)
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
{'version': 4, 'addr': '192.168.0.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'bb:bb:bb:bb:bb:bb'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '172.19.0.2',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 4, 'addr': '1.2.3.4',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
],
},
}
self.assertThat(res_dict, matchers.DictMatches(expected))
def test_get_server_addresses_nonexistent_network(self):
url = '/v3/servers/%s/ips/network_0' % FAKE_UUID
req = fakes.HTTPRequestV3.blank(url)
self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.show,
req, FAKE_UUID, 'network_0')
def test_get_server_addresses_nonexistent_server(self):
def fake_instance_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
server_id = str(uuid.uuid4())
req = fakes.HTTPRequestV3.blank('/servers/%s/ips' % server_id)
self.assertRaises(webob.exc.HTTPNotFound,
self.ips_controller.index, req, server_id)
def test_get_server_list_empty(self):
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_empty)
req = fakes.HTTPRequestV3.blank('/servers')
res_dict = self.controller.index(req)
num_servers = len(res_dict['servers'])
self.assertEqual(0, num_servers)
def test_get_server_list_with_reservation_id(self):
req = fakes.HTTPRequestV3.blank('/servers?reservation_id=foo')
res_dict = self.controller.index(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_empty(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_details(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list(self):
req = fakes.HTTPRequestV3.blank('/servers')
res_dict = self.controller.index(req)
self.assertEqual(len(res_dict['servers']), 5)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertIsNone(s.get('image', None))
expected_links = [
{
"rel": "self",
"href": "http://localhost/v3/servers/%s" % s['id'],
},
{
"rel": "bookmark",
"href": "http://localhost/servers/%s" % s['id'],
},
]
self.assertEqual(s['links'], expected_links)
def test_get_servers_with_limit(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=3')
res_dict = self.controller.index(req)
servers = res_dict['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res_dict['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v3/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected_params = {'limit': ['3'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected_params))
def test_get_servers_with_limit_bad_value(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_server_details_empty(self):
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_empty)
req = fakes.HTTPRequestV3.blank('/servers/detail')
res_dict = self.controller.detail(req)
num_servers = len(res_dict['servers'])
self.assertEqual(0, num_servers)
def test_get_server_details_with_limit(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?limit=3')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v3/servers/detail', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_server_details_with_limit_bad_value(self):
req = fakes.HTTPRequestV3.blank('/servers/detail?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_get_server_details_with_limit_and_other_params(self):
req = fakes.HTTPRequestV3.blank('/servers/detail'
'?limit=3&blah=2:t')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in xrange(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v3/servers/detail', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'blah': ['2:t'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_servers_with_too_big_limit(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=30')
res_dict = self.controller.index(req)
self.assertNotIn('servers_links', res_dict)
def test_get_servers_with_bad_limit(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_marker(self):
url = '/v3/servers?marker=%s' % fakes.get_fake_uuid(2)
req = fakes.HTTPRequestV3.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ["server4", "server5"])
def test_get_servers_with_limit_and_marker(self):
url = '/v3/servers?limit=2&marker=%s' % fakes.get_fake_uuid(1)
req = fakes.HTTPRequestV3.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ['server3', 'server4'])
def test_get_servers_with_bad_marker(self):
req = fakes.HTTPRequestV3.blank('/servers?limit=2&marker=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_bad_option(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?unknownoption=whee')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_image(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('image', search_opts)
self.assertEqual(search_opts['image'], '12345')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?image=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_tenant_id_filter_converts_to_project_id_for_admin(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertIsNotNone(filters)
self.assertEqual(filters['project_id'], 'newfake')
self.assertFalse(filters.get('tenant_id'))
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers'
'?all_tenants=1&tenant_id=newfake',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_tenant_id_filter_no_admin_context(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotEqual(filters, None)
self.assertEqual(filters['project_id'], 'fake')
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?tenant_id=newfake')
res = self.controller.index(req)
self.assertIn('servers', res)
def test_tenant_id_filter_implies_all_tenants(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotEqual(filters, None)
self.assertEqual(filters['project_id'], 'newfake')
self.assertFalse(filters.get('tenant_id'))
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?tenant_id=newfake',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_normal(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('project_id', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_one(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('project_id', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=1',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_zero(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('all_tenants', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=0',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_false(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertNotIn('all_tenants', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=false',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_param_invalid(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None,
expected_attrs=None):
self.assertNotIn('all_tenants', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=xxx',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_admin_restricted_tenant(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertIsNotNone(filters)
self.assertEqual(filters['project_id'], 'fake')
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers',
use_admin_context=True)
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_pass_policy(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None, use_slave=False,
expected_attrs=None):
self.assertIsNotNone(filters)
self.assertNotIn('project_id', filters)
return [fakes.stub_instance(100)]
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
policy.set_rules(rules)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=1')
res = self.controller.index(req)
self.assertIn('servers', res)
def test_all_tenants_fail_policy(self):
def fake_get_all(context, filters=None, sort_key=None,
sort_dir='desc', limit=None, marker=None,
columns_to_join=None):
self.assertIsNotNone(filters)
return [fakes.stub_instance(100)]
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:non_fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
policy.set_rules(rules)
self.stubs.Set(db, 'instance_get_all_by_filters',
fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?all_tenants=1')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_get_servers_allows_flavor(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('flavor', search_opts)
self.assertEqual(search_opts['flavor'], '12345')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?flavor=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_with_bad_flavor(self):
req = fakes.HTTPRequestV3.blank('/servers?flavor=abcde')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_server_details_with_bad_flavor(self):
req = fakes.HTTPRequestV3.blank('/servers?flavor=abcde')
servers = self.controller.detail(req)['servers']
self.assertThat(servers, testtools.matchers.HasLength(0))
def test_get_servers_allows_status(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], [vm_states.ACTIVE])
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=active')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_task_status(self):
server_uuid = str(uuid.uuid4())
task_state = task_states.REBOOTING
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('task_state', search_opts)
self.assertEqual([task_states.REBOOT_PENDING,
task_states.REBOOT_STARTED,
task_states.REBOOTING],
search_opts['task_state'])
db_list = [fakes.stub_instance(100, uuid=server_uuid,
task_state=task_state)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=reboot')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_resize_status(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'],
[vm_states.ACTIVE, vm_states.STOPPED])
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=resize')
servers = self.controller.detail(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_invalid_status(self):
req = fakes.HTTPRequestV3.blank('/servers?status=baloney',
use_admin_context=False)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_servers_deleted_status_as_user(self):
req = fakes.HTTPRequestV3.blank('/servers?status=deleted',
use_admin_context=False)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.detail, req)
def test_get_servers_deleted_status_as_admin(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], ['deleted'])
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?status=deleted',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_name(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('name', search_opts)
self.assertEqual(search_opts['name'], 'whee.*')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?name=whee.*')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('changes-since', search_opts)
changes_since = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertNotIn('deleted', search_opts)
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
params = 'changes-since=2011-01-24T17:08:01Z'
req = fakes.HTTPRequestV3.blank('/servers?%s' % params)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since_bad_value(self):
params = 'changes-since=asdf'
req = fakes.HTTPRequestV3.blank('/servers?%s' % params)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
def test_get_servers_admin_filters_as_user(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('name', search_opts)
self.assertIn('ip', search_opts)
self.assertIn('vm_state', search_opts)
self.assertNotIn('unknown_option', search_opts)
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank('/servers?%s' % query_str)
res = self.controller.index(req)
servers = res['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_options_as_admin(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('name', search_opts)
self.assertIn('vm_state', search_opts)
self.assertIn('ip', search_opts)
self.assertIn('unknown_option', search_opts)
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequestV3.blank('/servers?%s' % query_str,
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_ip(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('ip', search_opts)
self.assertEqual(search_opts['ip'], '10\..*')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?ip=10\..*')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_allows_ip6(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('ip6', search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers?ip6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_all_server_details(self):
expected_flavor = {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/flavors/1',
},
],
}
expected_image = {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/images/10',
},
],
}
req = fakes.HTTPRequestV3.blank('/servers/detail')
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], '')
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s['image'], expected_image)
self.assertEqual(s['flavor'], expected_flavor)
self.assertEqual(s['status'], 'BUILD')
self.assertEqual(s['metadata']['seq'], str(i + 1))
def test_get_all_server_details_with_host(self):
def return_servers_with_host(context, *args, **kwargs):
return [fakes.stub_instance(i + 1, 'fake', 'fake', host=i % 2,
uuid=fakes.get_fake_uuid(i))
for i in xrange(5)]
self.stubs.Set(db, 'instance_get_all_by_filters',
return_servers_with_host)
req = fakes.HTTPRequestV3.blank('/servers/detail')
res_dict = self.controller.detail(req)
server_list = res_dict['servers']
host_ids = [server_list[0]['hostId'], server_list[1]['hostId']]
self.assertTrue(host_ids[0] and host_ids[1])
self.assertNotEqual(host_ids[0], host_ids[1])
for i, s in enumerate(server_list):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], host_ids[i % 2])
self.assertEqual(s['name'], 'server%d' % (i + 1))
def test_get_servers_joins_pci_devices(self):
self.expected_attrs = None
def fake_get_all(compute_self, context, search_opts=None,
sort_key=None, sort_dir='desc',
limit=None, marker=None, want_objects=False,
expected_attrs=None):
self.expected_attrs = expected_attrs
return []
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequestV3.blank('/servers', use_admin_context=True)
self.assertIn('servers', self.controller.index(req))
self.assertIn('pci_devices', self.expected_attrs)
class ServersControllerDeleteTest(ControllerTest):
def setUp(self):
super(ServersControllerDeleteTest, self).setUp()
self.server_delete_called = False
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
deleted_at = timeutils.utcnow()
return fake_instance.fake_db_instance(deleted_at=deleted_at)
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
def _create_delete_request(self, uuid):
fakes.stub_out_instance_quota(self.stubs, 0, 10)
req = fakes.HTTPRequestV3.blank('/servers/%s' % uuid)
req.method = 'DELETE'
return req
def _delete_server_instance(self, uuid=FAKE_UUID):
req = self._create_delete_request(uuid)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
self.controller.delete(req, uuid)
def test_delete_server_instance(self):
self._delete_server_instance()
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_not_found(self):
self.assertRaises(webob.exc.HTTPNotFound,
self._delete_server_instance,
uuid='non-existent-uuid')
def test_delete_server_instance_while_building(self):
req = self._create_delete_request(FAKE_UUID)
self.controller.delete(req, FAKE_UUID)
self.assertTrue(self.server_delete_called)
def test_delete_locked_server(self):
req = self._create_delete_request(FAKE_UUID)
self.stubs.Set(compute_api.API, 'soft_delete',
fakes.fake_actions_to_locked_server)
self.stubs.Set(compute_api.API, 'delete',
fakes.fake_actions_to_locked_server)
self.assertRaises(webob.exc.HTTPConflict, self.controller.delete,
req, FAKE_UUID)
def test_delete_server_instance_while_resize(self):
req = self._create_delete_request(FAKE_UUID)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
task_state=task_states.RESIZE_PREP))
self.controller.delete(req, FAKE_UUID)
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_if_not_launched(self):
self.flags(reclaim_instance_interval=3600)
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(launched_at=None))
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
deleted_at = timeutils.utcnow()
return fake_instance.fake_db_instance(deleted_at=deleted_at)
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
self.assertEqual(self.server_delete_called, True)
class ServersControllerRebuildInstanceTest(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v3/fake/images/%s' % image_uuid
def setUp(self):
super(ServersControllerRebuildInstanceTest, self).setUp()
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE))
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_href,
'metadata': {
'open': 'stack',
},
},
}
self.req = fakes.HTTPRequest.blank('/fake/servers/a/action')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def test_rebuild_instance_with_blank_metadata_key(self):
self.body['rebuild']['metadata'][''] = 'world'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_key_too_long(self):
self.body['rebuild']['metadata'][('a' * 260)] = 'world'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_too_long(self):
self.body['rebuild']['metadata']['key1'] = ('a' * 260)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_with_metadata_value_not_string(self):
self.body['rebuild']['metadata']['key1'] = 1
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_fails_when_min_ram_too_small(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="4096", min_disk="10")
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_fails_when_min_disk_too_small(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="128", min_disk="100000")
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, self.req,
FAKE_UUID, body=self.body)
def test_rebuild_instance_image_too_large(self):
size = str(1000 * (1024 ** 3))
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', size=size)
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_name_all_blank(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True, status='active')
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.body['rebuild']['name'] = ' '
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_deleted_image(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='DELETED')
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_onset_file_limit_over_quota(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True, status='active')
with contextlib.nested(
mock.patch.object(fake._FakeImageService, 'show',
side_effect=fake_get_image),
mock.patch.object(self.controller.compute_api, 'rebuild',
side_effect=exception.OnsetFileLimitExceeded)
) as (
show_mock, rebuild_mock
):
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_start(self):
self.mox.StubOutWithMock(compute_api.API, 'start')
compute_api.API.start(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.controller._start_server(req, FAKE_UUID, body)
def test_start_policy_failed(self):
rules = {
"compute:v3:servers:start":
common_policy.parse_rule("project_id:non_fake")
}
policy.set_rules(rules)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._start_server,
req, FAKE_UUID, body)
self.assertIn("compute:v3:servers:start", exc.format_message())
def test_start_not_ready(self):
self.stubs.Set(compute_api.API, 'start', fake_start_stop_not_ready)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
def test_start_locked_server(self):
self.stubs.Set(compute_api.API, 'start',
fakes.fake_actions_to_locked_server)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
def test_start_invalid(self):
self.stubs.Set(compute_api.API, 'start', fake_start_stop_invalid_state)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, req, FAKE_UUID, body)
def test_stop(self):
self.mox.StubOutWithMock(compute_api.API, 'stop')
compute_api.API.stop(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.controller._stop_server(req, FAKE_UUID, body)
def test_stop_policy_failed(self):
rules = {
"compute:v3:servers:stop":
common_policy.parse_rule("project_id:non_fake")
}
policy.set_rules(rules)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop='')
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._stop_server,
req, FAKE_UUID, body)
self.assertIn("compute:v3:servers:stop", exc.format_message())
def test_stop_not_ready(self):
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_not_ready)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
def test_stop_locked_server(self):
self.stubs.Set(compute_api.API, 'stop',
fakes.fake_actions_to_locked_server)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
def test_stop_invalid_state(self):
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_invalid_state)
req = fakes.HTTPRequestV3.blank('/servers/%s/action' % FAKE_UUID)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, req, FAKE_UUID, body)
def test_start_with_bogus_id(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
req = fakes.HTTPRequestV3.blank('/servers/test_inst/action')
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, req, 'test_inst', body)
def test_stop_with_bogus_id(self):
self.stubs.Set(db, 'instance_get_by_uuid',
fake_instance_get_by_uuid_not_found)
req = fakes.HTTPRequestV3.blank('/servers/test_inst/action')
body = dict(stop="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, req, 'test_inst', body)
class ServersControllerUpdateTest(ControllerTest):
def _get_request(self, body=None, options=None):
if options:
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(**options))
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
req.body = jsonutils.dumps(body)
return req
def test_update_server_all_attributes(self):
body = {'server': {
'name': 'server_test',
}}
req = self._get_request(body, {'name': 'server_test'})
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name(self):
body = {'server': {'name': 'server_test'}}
req = self._get_request(body, {'name': 'server_test'})
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name_too_long(self):
body = {'server': {'name': 'x' * 256}}
req = self._get_request(body, {'name': 'server_test'})
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_name_all_blank_spaces(self):
self.stubs.Set(db, 'instance_get',
fakes.fake_instance_get(name='server_test'))
req = fakes.HTTPRequest.blank('/v3/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/json'
body = {'server': {'name': ' ' * 64}}
req.body = jsonutils.dumps(body)
self.assertRaises(exception.ValidationError, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_admin_password_ignored(self):
inst_dict = dict(name='server_test', admin_password='bacon')
body = dict(server=inst_dict)
def server_update(context, id, params):
filtered_dict = {
'display_name': 'server_test',
}
self.assertEqual(params, filtered_dict)
filtered_dict['uuid'] = id
return filtered_dict
self.stubs.Set(db, 'instance_update', server_update)
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body=body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_not_found(self):
def fake_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute_api.API, 'get', fake_get)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_not_found_on_update(self):
def fake_update(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(db, 'instance_update_and_get_original', fake_update)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body=body)
def test_update_server_policy_fail(self):
rule = {'compute:update': common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body, {'name': 'server_test'})
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.update, req, FAKE_UUID, body=body)
class ServerStatusTest(test.TestCase):
def setUp(self):
super(ServerStatusTest, self).setUp()
fakes.stub_out_nw_api(self.stubs)
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def _get_with_state(self, vm_state, task_state=None):
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_state,
task_state=task_state))
request = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
return self.controller.show(request, FAKE_UUID)
def test_active(self):
response = self._get_with_state(vm_states.ACTIVE)
self.assertEqual(response['server']['status'], 'ACTIVE')
def test_reboot(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING)
self.assertEqual(response['server']['status'], 'REBOOT')
def test_reboot_hard(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING_HARD)
self.assertEqual(response['server']['status'], 'HARD_REBOOT')
def test_reboot_resize_policy_fail(self):
def fake_get_server(context, req, id):
return fakes.stub_instance(id)
self.stubs.Set(self.controller, '_get_server', fake_get_server)
rule = {'compute:reboot':
common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
req = fakes.HTTPRequestV3.blank('/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_reboot, req, '1234',
{'reboot': {'type': 'HARD'}})
def test_rebuild(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBUILDING)
self.assertEqual(response['server']['status'], 'REBUILD')
def test_rebuild_error(self):
response = self._get_with_state(vm_states.ERROR)
self.assertEqual(response['server']['status'], 'ERROR')
def test_resize(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.RESIZE_PREP)
self.assertEqual(response['server']['status'], 'RESIZE')
def test_confirm_resize_policy_fail(self):
def fake_get_server(context, req, id):
return fakes.stub_instance(id)
self.stubs.Set(self.controller, '_get_server', fake_get_server)
rule = {'compute:confirm_resize':
common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
req = fakes.HTTPRequestV3.blank('/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_confirm_resize, req, '1234', {})
def test_verify_resize(self):
response = self._get_with_state(vm_states.RESIZED, None)
self.assertEqual(response['server']['status'], 'VERIFY_RESIZE')
def test_revert_resize(self):
response = self._get_with_state(vm_states.RESIZED,
task_states.RESIZE_REVERTING)
self.assertEqual(response['server']['status'], 'REVERT_RESIZE')
def test_revert_resize_policy_fail(self):
def fake_get_server(context, req, id):
return fakes.stub_instance(id)
self.stubs.Set(self.controller, '_get_server', fake_get_server)
rule = {'compute:revert_resize':
common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
req = fakes.HTTPRequestV3.blank('/servers/1234/action')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_revert_resize, req, '1234', {})
def test_password_update(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.UPDATING_PASSWORD)
self.assertEqual(response['server']['status'], 'PASSWORD')
def test_stopped(self):
response = self._get_with_state(vm_states.STOPPED)
self.assertEqual(response['server']['status'], 'SHUTOFF')
class ServersControllerCreateTest(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
fakes.stub_out_nw_api(self.stubs)
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': inst_type,
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update(context, instance_uuid, params, update_cells=True):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return inst
def server_update_and_get_original(
context, instance_uuid, params, update_cells=False,
columns_to_join=None):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
def queue_get_for(context, *args):
return 'network_topic'
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update_and_get_original)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self.bdm = [{'delete_on_termination': 1,
'device_name': 123,
'volume_size': 1,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
self.req = fakes.HTTPRequest.blank('/fake/servers')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _check_admin_password_len(self, server_dict):
self.assertEqual(CONF.password_length,
len(server_dict["adminPass"]))
def _check_admin_password_missing(self, server_dict):
self.assertNotIn("adminPass", server_dict)
def _test_create_instance(self, flavor=2):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
self.body['server']['imageRef'] = image_uuid
self.body['server']['flavorRef'] = flavor
self.req.body = jsonutils.dumps(self.body)
server = self.controller.create(self.req, body=self.body).obj['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_private_flavor(self):
values = {
'name': 'fake_name',
'memory_mb': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': '1324',
'swap': 0,
'rxtx_factor': 0.5,
'vcpu_weight': 1,
'disabled': False,
'is_public': False,
}
db.flavor_create(context.get_admin_context(), values)
self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_instance,
flavor=1324)
def test_create_server_bad_image_href(self):
image_href = 1
self.body['server']['min_count'] = 1
self.body['server']['imageRef'] = image_href,
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create,
self.req, body=self.body)
def test_create_server_with_deleted_image(self):
(image_service, image_id) = glance.get_remote_image_service(
context, '')
image_service.update(context, self.image_uuid, {'status': 'DELETED'})
self.addCleanup(image_service.update, context, self.image_uuid,
{'status': 'active'})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dumps(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
'Image 76fa36fc-c930-4bf3-8c8a-ea2a2420deb6 is not active.'):
self.controller.create(self.req, body=self.body)
def test_create_server_image_too_large(self):
(image_service, image_id) = glance.get_remote_image_service(
context, self.image_uuid)
image = image_service.show(context, image_id)
orig_size = image['size']
new_size = str(1000 * (1024 ** 3))
image_service.update(context, self.image_uuid, {'size': new_size})
self.addCleanup(image_service.update, context, self.image_uuid,
{'size': orig_size})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dumps(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
"Flavor's disk is too small for requested image."):
self.controller.create(self.req, body=self.body)
def test_create_instance_image_ref_is_bookmark(self):
image_href = 'http://localhost/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_image_ref_is_invalid(self):
image_uuid = 'this_is_not_a_valid_uuid'
image_href = 'http://localhost/images/%s' % image_uuid
flavor_ref = 'http://localhost/flavors/3'
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, body=self.body)
def test_create_instance_no_key_pair(self):
fakes.stub_out_key_pair_funcs(self.stubs, have_key_pair=False)
self._test_create_instance()
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dumps(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
# TODO(cyeoh): bp-v3-api-unittests
# This needs to be ported to the os-keypairs extension tests
# def test_create_instance_with_keypairs_enabled(self):
# self.ext_mgr.extensions = {'os-keypairs': 'fake'}
# key_name = 'green'
#
# params = {'key_name': key_name}
# old_create = compute_api.API.create
#
# # NOTE(sdague): key pair goes back to the database,
# # so we need to stub it out for tests
# def key_pair_get(context, user_id, name):
# return {'public_key': 'FAKE_KEY',
# 'fingerprint': 'FAKE_FINGERPRINT',
# 'name': name}
#
# def create(*args, **kwargs):
# self.assertEqual(kwargs['key_name'], key_name)
# return old_create(*args, **kwargs)
#
# self.stubs.Set(db, 'key_pair_get', key_pair_get)
# self.stubs.Set(compute_api.API, 'create', create)
# self._test_create_extra(params)
#
# TODO(cyeoh): bp-v3-api-unittests
# This needs to be ported to the os-networks extension tests
# def test_create_instance_with_networks_enabled(self):
# self.ext_mgr.extensions = {'os-networks': 'fake'}
# net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
# requested_networks = [{'uuid': net_uuid}]
# params = {'networks': requested_networks}
# old_create = compute_api.API.create
# def create(*args, **kwargs):
# result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None)]
# self.assertEqual(kwargs['requested_networks'], result)
# return old_create(*args, **kwargs)
# self.stubs.Set(compute_api.API, 'create', create)
# self._test_create_extra(params)
def test_create_instance_with_port_with_no_fixed_ips(self):
port_id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port_id}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortRequiresFixedIP(port_id=port_id)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_raise_user_data_too_large(self, mock_create):
mock_create.side_effect = exception.InstanceUserDataTooLarge(
maxsize=1, length=2)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, body=self.body)
def test_create_instance_with_network_with_no_subnet(self):
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.NetworkRequiresSubnet(network_uuid=network)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_non_unique_secgroup_name(self):
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks,
'security_groups': [{'name': 'dup'}, {'name': 'dup'}]}
def fake_create(*args, **kwargs):
raise exception.NoUniqueMatch("No Unique match found for ...")
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
def test_create_instance_with_networks_disabled_neutronv2(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None,
None, None)]
self.assertEqual(result, kwargs['requested_networks'].as_tuples())
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_networks_disabled(self):
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsNone(kwargs['requested_networks'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_pass_disabled(self):
# test with admin passwords disabled See lp bug 921814
self.flags(enable_instance_password=False)
# proper local hrefs must start with 'http://localhost/v3/'
self.flags(enable_instance_password=False)
image_href = 'http://localhost/v2/fake/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_name_too_long(self):
# proper local hrefs must start with 'http://localhost/v3/'
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['name'] = 'X' * 256
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError, self.controller.create,
self.req, body=self.body)
def test_create_instance_name_all_blank_spaces(self):
# proper local hrefs must start with 'http://localhost/v2/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v3/images/%s' % image_uuid
flavor_ref = 'http://localhost/flavors/3'
body = {
'server': {
'name': ' ' * 64,
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
req = fakes.HTTPRequest.blank('/v3/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_instance(self):
# proper local hrefs must start with 'http://localhost/v3/'
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_extension_create_exception(self):
def fake_keypair_server_create(self, server_dict,
create_kwargs):
raise KeyError
self.stubs.Set(keypairs.Keypairs, 'server_create',
fake_keypair_server_create)
# proper local hrefs must start with 'http://localhost/v3/'
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v3/images/%s' % image_uuid
flavor_ref = 'http://localhost/123/flavors/3'
body = {
'server': {
'name': 'server_test',
'imageRef': image_href,
'flavorRef': flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers["content-type"] = "application/json"
self.assertRaises(webob.exc.HTTPInternalServerError,
self.controller.create, req, body=body)
def test_create_instance_pass_disabled(self):
self.flags(enable_instance_password=False)
# proper local hrefs must start with 'http://localhost/v3/'
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self._check_admin_password_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_too_much_metadata(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata']['vote'] = 'fiddletown'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_too_long(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {('a' * 260): '12345'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_too_long(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {'key1': ('a' * 260)}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_blank(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {'': 'abcd'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_not_dict(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = 'string'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_key_not_string(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {1: 'test'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_instance_metadata_value_not_string(self):
self.flags(quota_metadata_items=1)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.body['server']['metadata'] = {'test': ['a', 'list']}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(exception.ValidationError,
self.controller.create, self.req, body=self.body)
def test_create_user_data_malformed_bad_request(self):
params = {'user_data': 'u1234'}
self.assertRaises(exception.ValidationError,
self._test_create_extra, params)
def test_create_instance_invalid_key_name(self):
image_href = 'http://localhost/v2/images/2'
self.body['server']['imageRef'] = image_href
self.body['server']['key_name'] = 'nonexistentkey'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_valid_key_name(self):
self.body['server']['key_name'] = 'key'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_password_len(res["server"])
def test_create_instance_invalid_flavor_href(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/v2/flavors/asdf'
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_invalid_flavor_id_int(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = -1
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_bad_flavor_href(self):
image_href = 'http://localhost/v2/images/2'
flavor_ref = 'http://localhost/v2/flavors/17'
self.body['server']['imageRef'] = image_href
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_bad_href(self):
image_href = 'asdf'
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, body=self.body)
def test_create_instance_local_href(self):
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_admin_password(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
server = res['server']
self.assertEqual(server['adminPass'],
self.body['server']['adminPass'])
def test_create_instance_admin_password_pass_disabled(self):
self.flags(enable_instance_password=False)
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, body=self.body).obj
self.assertIn('server', res)
self.assertIn('adminPass', self.body['server'])
def test_create_instance_admin_password_empty(self):
self.body['server']['flavorRef'] = 3
self.body['server']['adminPass'] = ''
self.req.body = jsonutils.dumps(self.body)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, body=self.body)
def test_create_location(self):
selfhref = 'http://localhost/v2/fake/servers/%s' % FAKE_UUID
self.req.body = jsonutils.dumps(self.body)
robj = self.controller.create(self.req, body=self.body)
self.assertEqual(robj['Location'], selfhref)
def _do_test_create_instance_above_quota(self, resource, allowed, quota,
expected_msg):
fakes.stub_out_instance_quota(self.stubs, allowed, quota, resource)
self.body['server']['flavorRef'] = 3
self.req.body = jsonutils.dumps(self.body)
try:
self.controller.create(self.req, body=self.body).obj['server']
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_above_quota_instances(self):
msg = _('Quota exceeded for instances: Requested 1, but'
' already used 10 of 10 instances')
self._do_test_create_instance_above_quota('instances', 0, 10, msg)
def test_create_instance_above_quota_ram(self):
msg = _('Quota exceeded for ram: Requested 4096, but'
' already used 8192 of 10240 ram')
self._do_test_create_instance_above_quota('ram', 2048, 10 * 1024, msg)
def test_create_instance_above_quota_cores(self):
msg = _('Quota exceeded for cores: Requested 2, but'
' already used 9 of 10 cores')
self._do_test_create_instance_above_quota('cores', 1, 10, msg)
def test_create_instance_above_quota_server_group_members(self):
ctxt = context.get_admin_context()
fake_group = objects.InstanceGroup(ctxt)
fake_group.create()
def fake_count(context, name, group, user_id):
self.assertEqual(name, "server_group_members")
self.assertEqual(group.uuid, fake_group.uuid)
self.assertEqual(user_id,
self.req.environ['nova.context'].user_id)
return 10
def fake_limit_check(context, **kwargs):
if 'server_group_members' in kwargs:
raise exception.OverQuota(overs={})
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stubs.Set(fakes.QUOTAS, 'count', fake_count)
self.stubs.Set(fakes.QUOTAS, 'limit_check', fake_limit_check)
self.stubs.Set(db, 'instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {'group': fake_group.uuid}
self.req.body = jsonutils.dumps(self.body)
expected_msg = "Quota exceeded, too many servers in group"
try:
self.controller.create(self.req, body=self.body).obj
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_above_quota_server_groups(self):
def fake_reserve(contex, **deltas):
if 'server_groups' in deltas:
raise exception.OverQuota(overs={})
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stubs.Set(fakes.QUOTAS, 'reserve', fake_reserve)
self.stubs.Set(db, 'instance_destroy', fake_instance_destroy)
self.body['os:scheduler_hints'] = {'group': 'fake_group'}
self.req.body = jsonutils.dumps(self.body)
expected_msg = "Quota exceeded, too many server groups."
try:
self.controller.create(self.req, body=self.body).obj
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_with_neutronv2_port_in_use(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortInUse(port_id=port)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_public_network_non_admin(self, mock_create):
public_network_uuid = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
params = {'networks': [{'uuid': public_network_uuid}]}
self.req.body = jsonutils.dumps(self.body)
mock_create.side_effect = exception.ExternalNetworkAttachForbidden(
network_uuid=public_network_uuid)
self.assertRaises(webob.exc.HTTPForbidden,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_multiple_instance_with_specified_ip_neutronv2(self,
_api_mock):
_api_mock.side_effect = exception.InvalidFixedIpAndMaxCountRequest(
reason="")
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
address = '10.0.0.1'
requested_networks = [{'uuid': network, 'fixed_ip': address,
'port': port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_multiple_instance_with_neutronv2_port(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
self.body['server']['max_count'] = 2
def fake_create(*args, **kwargs):
msg = _("Unable to launch multiple instances with"
" a single configured port ID. Please launch your"
" instance one by one with different ports.")
raise exception.MultiplePortsNotApplicable(reason=msg)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_neturonv2_not_found_network(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.NetworkNotFound(network_id=network)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_neutronv2_port_not_found(self):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortNotFound(port_id=port)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_network_ambiguous(self, mock_create):
mock_create.side_effect = exception.NetworkAmbiguous()
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, {})
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InstanceExists(
name='instance-name'))
def test_create_instance_raise_instance_exists(self, mock_create):
self.assertRaises(webob.exc.HTTPConflict,
self.controller.create,
self.req, body=self.body)
class ServersControllerCreateTestWithMock(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
super(ServersControllerCreateTestWithMock, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
self.body = {
'server': {
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
},
}
self.req = fakes.HTTPRequest.blank('/fake/servers')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dumps(self.body)
self.req.headers["content-type"] = "application/json"
self.controller.create(self.req, body=self.body).obj['server']
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_neutronv2_fixed_ip_already_in_use(self,
create_mock):
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '10.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
create_mock.side_effect = exception.FixedIpAlreadyInUse(
address=address,
instance_uuid=network)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
self.assertEqual(1, len(create_mock.call_args_list))
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidVolume(reason='error'))
def test_create_instance_with_invalid_volume_error(self, create_mock):
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
class ServersViewBuilderTest(test.TestCase):
def setUp(self):
super(ServersViewBuilderTest, self).setUp()
CONF.set_override('host', 'localhost', group='glance')
self.flags(use_ipv6=True)
db_inst = fakes.stub_instance(
id=1,
image_ref="5",
uuid="deadbeef-feed-edee-beef-d0ea7beefedd",
display_name="test_server",
include_fake_metadata=False)
privates = ['172.19.0.1']
publics = ['192.168.0.3']
public6s = ['b33f::fdee:ddff:fecc:bbaa']
def nw_info(*args, **kwargs):
return [(None, {'label': 'public',
'ips': [dict(ip=ip) for ip in publics],
'ip6s': [dict(ip=ip) for ip in public6s]}),
(None, {'label': 'private',
'ips': [dict(ip=ip) for ip in privates]})]
def floaters(*args, **kwargs):
return []
fakes.stub_out_nw_api_get_instance_nw_info(self.stubs, nw_info)
fakes.stub_out_nw_api_get_floating_ips_by_fixed_address(self.stubs,
floaters)
self.uuid = db_inst['uuid']
self.view_builder = views.servers.ViewBuilderV3()
self.request = fakes.HTTPRequestV3.blank("")
self.request.context = context.RequestContext('fake', 'fake')
self.instance = fake_instance.fake_instance_obj(
self.request.context,
expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS,
**db_inst)
def test_get_flavor_valid_instance_type(self):
flavor_bookmark = "http://localhost/flavors/1"
expected = {"id": "1",
"links": [{"rel": "bookmark",
"href": flavor_bookmark}]}
result = self.view_builder._get_flavor(self.request, self.instance)
self.assertEqual(result, expected)
def test_build_server(self):
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_with_project_id(self):
expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": "http://localhost/v3/servers/%s" %
self.uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/servers/%s" % self.uuid,
},
],
}
}
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail(self):
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"name": "test_server",
"status": "ERROR",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
"fault": {
"code": 404,
"created": "2010-10-10T12:00:00Z",
"message": "HTTPNotFound",
"details": "Stock details for test",
},
}
}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_fault_that_has_been_deleted(self):
self.instance['deleted'] = 1
self.instance['vm_state'] = vm_states.ERROR
fault = fake_instance.fake_fault_obj(self.request.context,
self.uuid, code=500,
message="No valid host was found")
self.instance['fault'] = fault
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "No valid host was found"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertEqual("DELETED", output['server']['status'])
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_not_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error",
'details': 'Stock details for test'}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error',
details='')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_but_active(self):
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
output = self.view_builder.show(self.request, self.instance)
self.assertNotIn('fault', output['server'])
def test_build_server_detail_active_status(self):
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 100,
"name": "test_server",
"status": "ACTIVE",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
def test_build_server_detail_with_metadata(self):
metadata = []
metadata.append(models.InstanceMetadata(key="Open", value="Stack"))
metadata = nova_utils.metadata_to_dict(metadata)
self.instance['metadata'] = metadata
image_bookmark = "http://localhost/images/5"
flavor_bookmark = "http://localhost/flavors/1"
self_link = "http://localhost/v3/servers/%s" % self.uuid
bookmark_link = "http://localhost/servers/%s" % self.uuid
expected_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
{'version': 6, 'addr': '2001:db8:0:1::1',
'OS-EXT-IPS:type': 'fixed',
'OS-EXT-IPS-MAC:mac_addr': 'aa:aa:aa:aa:aa:aa'},
]
},
"metadata": {"Open": "Stack"},
"links": [
{
"rel": "self",
"href": self_link,
},
{
"rel": "bookmark",
"href": bookmark_link,
},
],
}
}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output, matchers.DictMatches(expected_server))
class ServersAllExtensionsTestCase(test.TestCase):
def setUp(self):
super(ServersAllExtensionsTestCase, self).setUp()
self.app = compute.APIRouterV3()
def test_create_missing_server(self):
def fake_create(*args, **kwargs):
raise test.TestingException("Should not reach the compute API.")
self.stubs.Set(compute_api.API, 'create', fake_create)
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
def test_update_missing_server(self):
def fake_update(*args, **kwargs):
raise test.TestingException("Should not reach the compute API.")
self.stubs.Set(compute_api.API, 'update', fake_update)
req = fakes.HTTPRequestV3.blank('/servers/1')
req.method = 'PUT'
req.content_type = 'application/json'
body = {'foo': {'a': 'b'}}
req.body = jsonutils.dumps(body)
res = req.get_response(self.app)
self.assertEqual(400, res.status_int)
class ServersInvalidRequestTestCase(test.TestCase):
def setUp(self):
super(ServersInvalidRequestTestCase, self).setUp()
ext_info = plugins.LoadedExtensionInfo()
self.controller = servers.ServersController(extension_info=ext_info)
def _invalid_server_create(self, body):
req = fakes.HTTPRequestV3.blank('/servers')
req.method = 'POST'
self.assertRaises(exception.ValidationError,
self.controller.create, req, body=body)
def test_create_server_no_body(self):
self._invalid_server_create(body=None)
def test_create_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_server_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
def _unprocessable_server_update(self, body):
req = fakes.HTTPRequestV3.blank('/servers/%s' % FAKE_UUID)
req.method = 'PUT'
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, FAKE_UUID, body=body)
def test_update_server_no_body(self):
self._invalid_server_create(body=None)
def test_update_server_missing_server(self):
body = {'foo': {'a': 'b'}}
self._invalid_server_create(body=body)
def test_create_update_malformed_entity(self):
body = {'server': 'string'}
self._invalid_server_create(body=body)
class FakeExt(extensions.V3APIExtensionBase):
name = "AccessIPs"
alias = 'os-access-ips'
version = 1
def fake_extension_point(self, *args, **kwargs):
pass
def get_controller_extensions(self):
return []
def get_resources(self):
return []
class TestServersExtensionPoint(test.NoDBTestCase):
def setUp(self):
super(TestServersExtensionPoint, self).setUp()
CONF.set_override('extensions_whitelist', ['os-access-ips'],
'osapi_v3')
self.stubs.Set(access_ips, 'AccessIPs', FakeExt)
def _test_load_extension_point(self, name):
setattr(FakeExt, 'server_%s' % name,
FakeExt.fake_extension_point)
ext_info = plugins.LoadedExtensionInfo()
controller = servers.ServersController(extension_info=ext_info)
self.assertEqual(
'os-access-ips',
list(getattr(controller,
'%s_extension_manager' % name))[0].obj.alias)
delattr(FakeExt, 'server_%s' % name)
def test_load_update_extension_point(self):
self._test_load_extension_point('update')
def test_load_rebuild_extension_point(self):
self._test_load_extension_point('rebuild')
def test_load_create_extension_point(self):
self._test_load_extension_point('create')
class TestServersExtensionSchema(test.NoDBTestCase):
def setUp(self):
super(TestServersExtensionSchema, self).setUp()
CONF.set_override('extensions_whitelist', ['keypairs'], 'osapi_v3')
def _test_load_extension_schema(self, name):
setattr(FakeExt, 'get_server_%s_schema' % name,
FakeExt.fake_extension_point)
ext_info = plugins.LoadedExtensionInfo()
controller = servers.ServersController(extension_info=ext_info)
self.assertTrue(hasattr(controller, '%s_schema_manager' % name))
delattr(FakeExt, 'get_server_%s_schema' % name)
return getattr(controller, 'schema_server_%s' % name)
def test_load_create_extension_point(self):
expected_schema = copy.deepcopy(servers_schema.base_create)
expected_schema['properties']['server']['properties'].update(
keypairs_schema.server_create)
actual_schema = self._test_load_extension_schema('create')
self.assertEqual(expected_schema, actual_schema)
def test_load_update_extension_point(self):
expected_schema = copy.deepcopy(servers_schema.base_update)
actual_schema = self._test_load_extension_schema('update')
self.assertEqual(expected_schema, actual_schema)
def test_load_rebuild_extension_point(self):
expected_schema = copy.deepcopy(servers_schema.base_rebuild)
actual_schema = self._test_load_extension_schema('rebuild')
self.assertEqual(expected_schema, actual_schema)
| true | true |
f734f159910ff4649f30f99b0dfdab7ad3c0bb6c | 744 | py | Python | 2019/Python/day02/part2.py | tymscar/Advent-Of-Code | cd7b96b0253191e236bd704b0d8b5540fb3e8ef6 | [
"MIT"
] | 4 | 2019-12-08T08:20:53.000Z | 2021-12-17T12:04:11.000Z | 2019/Python/day02/part2.py | tymscar/AdventOfCode2018 | 9742ddb6bbbc917062baad87d6b6de75375f1ae8 | [
"MIT"
] | null | null | null | 2019/Python/day02/part2.py | tymscar/AdventOfCode2018 | 9742ddb6bbbc917062baad87d6b6de75375f1ae8 | [
"MIT"
] | 4 | 2020-12-11T22:10:24.000Z | 2021-12-25T22:39:05.000Z | import math
file = open('input.txt','r')
for line in file:
initialMemory = line.split(',')
bruteforce = -1
result = 0
while result != 19690720:
memory = initialMemory.copy()
bruteforce = bruteforce + 1
opcode = 0
pc = 0
memory[1] = int(bruteforce/100)
memory[2] = int(bruteforce%100)
while True:
opcode = memory[pc]
if int(opcode) == 99:
break
operandOne = int(memory[pc+1])
operandTwo = int(memory[pc+2])
if int(opcode) == 1:
memory[int(memory[pc+3])] = int(memory[operandOne]) + int(memory[operandTwo])
if int(opcode) == 2:
memory[int(memory[pc+3])] = int(memory [operandOne]) * int(memory[operandTwo])
pc = pc + 4
opcode = int(memory[pc])
result = memory[0]
print(bruteforce)
| 17.302326 | 81 | 0.63172 | import math
file = open('input.txt','r')
for line in file:
initialMemory = line.split(',')
bruteforce = -1
result = 0
while result != 19690720:
memory = initialMemory.copy()
bruteforce = bruteforce + 1
opcode = 0
pc = 0
memory[1] = int(bruteforce/100)
memory[2] = int(bruteforce%100)
while True:
opcode = memory[pc]
if int(opcode) == 99:
break
operandOne = int(memory[pc+1])
operandTwo = int(memory[pc+2])
if int(opcode) == 1:
memory[int(memory[pc+3])] = int(memory[operandOne]) + int(memory[operandTwo])
if int(opcode) == 2:
memory[int(memory[pc+3])] = int(memory [operandOne]) * int(memory[operandTwo])
pc = pc + 4
opcode = int(memory[pc])
result = memory[0]
print(bruteforce)
| true | true |
f734f40c8b6fc327694f4ecc097b36a67858d578 | 998 | py | Python | Python/unique-word-abbreviation.py | bssrdf/LeetCode-5 | 746df5cff523361145a74d9d429dc541a7b99910 | [
"MIT"
] | 68 | 2018-01-13T07:15:37.000Z | 2022-02-20T12:58:24.000Z | Python/unique-word-abbreviation.py | bssrdf/LeetCode-5 | 746df5cff523361145a74d9d429dc541a7b99910 | [
"MIT"
] | null | null | null | Python/unique-word-abbreviation.py | bssrdf/LeetCode-5 | 746df5cff523361145a74d9d429dc541a7b99910 | [
"MIT"
] | 63 | 2017-04-10T03:38:25.000Z | 2022-03-17T23:24:51.000Z | # Time: ctor: O(n), n is number of words in the dictionary.
# lookup: O(1)
# Space: O(k), k is number of unique words.
class ValidWordAbbr(object):
def __init__(self, dictionary):
"""
initialize your data structure here.
:type dictionary: List[str]
"""
self.lookup_ = collections.defaultdict(set)
for word in dictionary:
abbr = self.abbreviation(word)
self.lookup_[abbr].add(word)
def isUnique(self, word):
"""
check if a word is unique.
:type word: str
:rtype: bool
"""
abbr = self.abbreviation(word)
return self.lookup_[abbr] <= {word}
def abbreviation(self, word):
if len(word) <= 2:
return word
return word[0] + str(len(word)-2) + word[-1]
# Your ValidWordAbbr object will be instantiated and called as such:
# vwa = ValidWordAbbr(dictionary)
# vwa.isUnique("word")
# vwa.isUnique("anotherWord")
| 26.972973 | 68 | 0.578156 |
class ValidWordAbbr(object):
def __init__(self, dictionary):
self.lookup_ = collections.defaultdict(set)
for word in dictionary:
abbr = self.abbreviation(word)
self.lookup_[abbr].add(word)
def isUnique(self, word):
abbr = self.abbreviation(word)
return self.lookup_[abbr] <= {word}
def abbreviation(self, word):
if len(word) <= 2:
return word
return word[0] + str(len(word)-2) + word[-1]
| true | true |
f734f41506c92e5b58aa8af5f798f467d21f6f9e | 1,345 | py | Python | tests/distributed/test_workspaces/test_nonblocking.py | vishalbelsare/jina | ae72cc5ce1f7e7f4c662e72e96ea21dddc28bf43 | [
"Apache-2.0"
] | 15,179 | 2020-04-28T10:23:56.000Z | 2022-03-31T14:35:25.000Z | tests/distributed/test_workspaces/test_nonblocking.py | manavshah123/jina | f18b04eb82d18a3c554e2892bbae4b95fc0cb13e | [
"Apache-2.0"
] | 3,912 | 2020-04-28T13:01:29.000Z | 2022-03-31T14:36:46.000Z | tests/distributed/test_workspaces/test_nonblocking.py | manavshah123/jina | f18b04eb82d18a3c554e2892bbae4b95fc0cb13e | [
"Apache-2.0"
] | 1,955 | 2020-04-28T10:50:49.000Z | 2022-03-31T12:28:34.000Z | import os
import pytest
import asyncio
from jina import __default_host__
from daemon.clients import AsyncJinaDClient
cur_dir = os.path.dirname(os.path.abspath(__file__))
CLOUD_HOST = 'localhost:8000' # consider it as the staged version
success = 0
failure = 0
client = AsyncJinaDClient(host=__default_host__, port=8000)
async def get_alive():
global success, failure
while True:
is_alive = await client.alive
if is_alive:
success += 1
else:
failure += 1
@pytest.mark.asyncio
async def test_nonblocking_server():
workspace_id = await client.workspaces.create(
paths=[os.path.join(cur_dir, 'delayed_flow')]
)
alive_task = asyncio.create_task(get_alive())
create_flow_task = asyncio.create_task(
client.flows.create(workspace_id=workspace_id, filename='delayed_flow.yml')
)
done, pending = await asyncio.wait(
{alive_task, create_flow_task}, return_when=asyncio.FIRST_COMPLETED
)
assert create_flow_task in done
flow_id = create_flow_task.result()
assert alive_task in pending
alive_task.cancel()
await client.flows.delete(flow_id)
await client.workspaces.delete(workspace_id)
assert success > 0, f'#success is {success} (expected >0)'
assert failure == 0, f'#failure is {failure} (expected =0)'
| 26.9 | 83 | 0.704833 | import os
import pytest
import asyncio
from jina import __default_host__
from daemon.clients import AsyncJinaDClient
cur_dir = os.path.dirname(os.path.abspath(__file__))
CLOUD_HOST = 'localhost:8000'
success = 0
failure = 0
client = AsyncJinaDClient(host=__default_host__, port=8000)
async def get_alive():
global success, failure
while True:
is_alive = await client.alive
if is_alive:
success += 1
else:
failure += 1
@pytest.mark.asyncio
async def test_nonblocking_server():
workspace_id = await client.workspaces.create(
paths=[os.path.join(cur_dir, 'delayed_flow')]
)
alive_task = asyncio.create_task(get_alive())
create_flow_task = asyncio.create_task(
client.flows.create(workspace_id=workspace_id, filename='delayed_flow.yml')
)
done, pending = await asyncio.wait(
{alive_task, create_flow_task}, return_when=asyncio.FIRST_COMPLETED
)
assert create_flow_task in done
flow_id = create_flow_task.result()
assert alive_task in pending
alive_task.cancel()
await client.flows.delete(flow_id)
await client.workspaces.delete(workspace_id)
assert success > 0, f'#success is {success} (expected >0)'
assert failure == 0, f'#failure is {failure} (expected =0)'
| true | true |
f734f4c59aaf5760f233bff98b0f4f64a6485f4a | 3,922 | py | Python | core/controllers/beam_jobs.py | tonadev/oppia | ba55bb58763ed01c21017e3c34b72e25302f3bd8 | [
"Apache-2.0"
] | null | null | null | core/controllers/beam_jobs.py | tonadev/oppia | ba55bb58763ed01c21017e3c34b72e25302f3bd8 | [
"Apache-2.0"
] | null | null | null | core/controllers/beam_jobs.py | tonadev/oppia | ba55bb58763ed01c21017e3c34b72e25302f3bd8 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers responsible for managing Apache Beam jobs."""
from __future__ import absolute_import
from __future__ import unicode_literals
from core.controllers import acl_decorators
from core.controllers import base
from core.domain import beam_job_services
import feconf
from typing import Any, Dict # isort: skip
class BeamJobHandler(base.BaseHandler):
"""Handler for getting the definitions of Apache Beam jobs."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
URL_PATH_ARGS_SCHEMAS: Dict[str, Any] = {}
HANDLER_ARGS_SCHEMAS: Dict[str, Any] = {
'GET': {}
}
@acl_decorators.can_run_any_job
def get(self) -> None:
sorted_beam_jobs = sorted(
beam_job_services.get_beam_jobs(),
key=lambda j: j.name)
self.render_json({'jobs': [j.to_dict() for j in sorted_beam_jobs]})
class BeamJobRunHandler(base.BaseHandler):
"""Handler for managing the execution of Apache Beam jobs."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
URL_PATH_ARGS_SCHEMAS: Dict[str, Any] = {}
HANDLER_ARGS_SCHEMAS: Dict[str, Any] = {
'GET': {},
'PUT': {
'job_name': {
'schema': {
'type': 'unicode'
}
},
},
'DELETE': {
'job_id': {
'schema': {
'type': 'unicode',
'validators': [{
'id': 'is_regex_matched',
'regex_pattern': r'[A-Za-z0-9]{22}'
}]
}
}
},
}
@acl_decorators.can_run_any_job
def get(self) -> None:
sorted_beam_job_runs = sorted(
beam_job_services.get_beam_job_runs(),
key=lambda j: j.job_updated_on,
reverse=True)
self.render_json({'runs': [r.to_dict() for r in sorted_beam_job_runs]})
@acl_decorators.can_run_any_job
def put(self) -> None:
job_name: str = (
self.normalized_payload.get('job_name')
if self.normalized_payload else '')
beam_job_run = beam_job_services.run_beam_job(job_name)
self.render_json(beam_job_run.to_dict())
@acl_decorators.can_run_any_job
def delete(self) -> None:
job_id = self.request.get('job_id')
beam_job_run = beam_job_services.cancel_beam_job(job_id)
self.render_json(beam_job_run.to_dict())
class BeamJobRunResultHandler(base.BaseHandler):
"""Handler for getting the result of Apache Beam jobs."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
URL_PATH_ARGS_SCHEMAS: Dict[str, Any] = {}
HANDLER_ARGS_SCHEMAS: Dict[str, Any] = {
'GET': {
'job_id': {
'schema': {
'type': 'unicode',
'validators': [{
'id': 'is_regex_matched',
'regex_pattern': r'[A-Za-z0-9]{22}'
}]
}
}
}
}
@acl_decorators.can_run_any_job
def get(self) -> None:
job_id = self.request.get('job_id')
beam_job_run_result = beam_job_services.get_beam_job_run_result(job_id)
self.render_json(beam_job_run_result.to_dict())
| 32.413223 | 79 | 0.610658 |
from __future__ import absolute_import
from __future__ import unicode_literals
from core.controllers import acl_decorators
from core.controllers import base
from core.domain import beam_job_services
import feconf
from typing import Any, Dict
class BeamJobHandler(base.BaseHandler):
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
URL_PATH_ARGS_SCHEMAS: Dict[str, Any] = {}
HANDLER_ARGS_SCHEMAS: Dict[str, Any] = {
'GET': {}
}
@acl_decorators.can_run_any_job
def get(self) -> None:
sorted_beam_jobs = sorted(
beam_job_services.get_beam_jobs(),
key=lambda j: j.name)
self.render_json({'jobs': [j.to_dict() for j in sorted_beam_jobs]})
class BeamJobRunHandler(base.BaseHandler):
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
URL_PATH_ARGS_SCHEMAS: Dict[str, Any] = {}
HANDLER_ARGS_SCHEMAS: Dict[str, Any] = {
'GET': {},
'PUT': {
'job_name': {
'schema': {
'type': 'unicode'
}
},
},
'DELETE': {
'job_id': {
'schema': {
'type': 'unicode',
'validators': [{
'id': 'is_regex_matched',
'regex_pattern': r'[A-Za-z0-9]{22}'
}]
}
}
},
}
@acl_decorators.can_run_any_job
def get(self) -> None:
sorted_beam_job_runs = sorted(
beam_job_services.get_beam_job_runs(),
key=lambda j: j.job_updated_on,
reverse=True)
self.render_json({'runs': [r.to_dict() for r in sorted_beam_job_runs]})
@acl_decorators.can_run_any_job
def put(self) -> None:
job_name: str = (
self.normalized_payload.get('job_name')
if self.normalized_payload else '')
beam_job_run = beam_job_services.run_beam_job(job_name)
self.render_json(beam_job_run.to_dict())
@acl_decorators.can_run_any_job
def delete(self) -> None:
job_id = self.request.get('job_id')
beam_job_run = beam_job_services.cancel_beam_job(job_id)
self.render_json(beam_job_run.to_dict())
class BeamJobRunResultHandler(base.BaseHandler):
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
URL_PATH_ARGS_SCHEMAS: Dict[str, Any] = {}
HANDLER_ARGS_SCHEMAS: Dict[str, Any] = {
'GET': {
'job_id': {
'schema': {
'type': 'unicode',
'validators': [{
'id': 'is_regex_matched',
'regex_pattern': r'[A-Za-z0-9]{22}'
}]
}
}
}
}
@acl_decorators.can_run_any_job
def get(self) -> None:
job_id = self.request.get('job_id')
beam_job_run_result = beam_job_services.get_beam_job_run_result(job_id)
self.render_json(beam_job_run_result.to_dict())
| true | true |
f734f51521e82853a72828b8b24473e38736128d | 736 | py | Python | core/migrations/0008_auto_20210426_0828.py | honno/ascii-forever | 8364219db115229fa9eb0b059e9c0611dcb689cf | [
"MIT"
] | null | null | null | core/migrations/0008_auto_20210426_0828.py | honno/ascii-forever | 8364219db115229fa9eb0b059e9c0611dcb689cf | [
"MIT"
] | null | null | null | core/migrations/0008_auto_20210426_0828.py | honno/ascii-forever | 8364219db115229fa9eb0b059e9c0611dcb689cf | [
"MIT"
] | null | null | null | # Generated by Django 3.1.7 on 2021-04-26 08:28
import uuid
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("core", "0007_art_thumb_render_squashed_0008_art_uuid"),
]
operations = [
migrations.AlterField(
model_name="art",
name="thumb_render",
field=models.ImageField(
default="/home/honno/gdrive/GitHub/ascii-world/core/static/core/thumb.png",
upload_to="thumbs",
),
),
migrations.AlterField(
model_name="art",
name="uuid",
field=models.UUIDField(default=uuid.uuid4, unique=True),
),
]
| 24.533333 | 91 | 0.585598 |
import uuid
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("core", "0007_art_thumb_render_squashed_0008_art_uuid"),
]
operations = [
migrations.AlterField(
model_name="art",
name="thumb_render",
field=models.ImageField(
default="/home/honno/gdrive/GitHub/ascii-world/core/static/core/thumb.png",
upload_to="thumbs",
),
),
migrations.AlterField(
model_name="art",
name="uuid",
field=models.UUIDField(default=uuid.uuid4, unique=True),
),
]
| true | true |
f734f5164fe1306a6e83df0cf7c8142a2d2b6ab5 | 4,266 | py | Python | nativepython/tests/alternative_compilation_test.py | mjwoolf/nativepython | 3f469f6d3c8c0f03cb9f51eb2a851d68310c7f90 | [
"Apache-2.0"
] | null | null | null | nativepython/tests/alternative_compilation_test.py | mjwoolf/nativepython | 3f469f6d3c8c0f03cb9f51eb2a851d68310c7f90 | [
"Apache-2.0"
] | null | null | null | nativepython/tests/alternative_compilation_test.py | mjwoolf/nativepython | 3f469f6d3c8c0f03cb9f51eb2a851d68310c7f90 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Braxton Mckee
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typed_python import *
import typed_python._types as _types
from nativepython.runtime import Runtime
import unittest
import time
def Compiled(f):
f = Function(f)
return Runtime.singleton().compile(f)
class TestAlternativeCompilation(unittest.TestCase):
def test_simple_alternative_passing(self):
Simple = Alternative("Simple", A={}, B={}, C={})
@Compiled
def f(s: Simple):
y = s
return y
self.assertEqual(f(Simple.A()), Simple.A())
self.assertEqual(f(Simple.B()), Simple.B())
self.assertEqual(f(Simple.C()), Simple.C())
def test_complex_alternative_passing(self):
Complex = Alternative(
"Complex",
A={'a': str, 'b': int},
B={'a': str, 'c': int},
C={'a': str, 'd': lambda: Complex}
)
c = Complex.A(a="hi", b=20)
c2 = Complex.C(a="hi", d=c)
@Compiled
def f(c: Complex):
y = c
return y
self.assertEqual(f(c), c)
self.assertEqual(f(c2), c2)
self.assertEqual(_types.refcount(c), 2)
self.assertEqual(_types.refcount(c2), 1)
def test_construct_alternative(self):
A = Alternative("A", X={'x': int})
@Compiled
def f():
return A.X(x=10)
self.assertTrue(f().matches.X)
self.assertEqual(f().x, 10)
def test_alternative_matches(self):
A = Alternative("A", X={'x': int}, Y={'x': int})
@Compiled
def f(x: A):
return x.matches.X
self.assertTrue(f(A.X()))
self.assertFalse(f(A.Y()))
def test_alternative_member_homogenous(self):
A = Alternative("A", X={'x': int}, Y={'x': int})
@Compiled
def f(x: A):
return x.x
self.assertEqual(f(A.X(x=10)), 10)
self.assertEqual(f(A.Y(x=10)), 10)
def test_alternative_member_diverse(self):
A = Alternative("A", X={'x': int}, Y={'x': float})
@Compiled
def f(x: A):
return x.x
self.assertEqual(f(A.X(x=10)), 10)
self.assertEqual(f(A.Y(x=10.5)), 10.5)
def test_alternative_member_distinct(self):
A = Alternative("A", X={'x': int}, Y={'y': float})
@Compiled
def f(x: A):
if x.matches.X:
return x.x
if x.matches.Y:
return x.y
self.assertEqual(f(A.X(x=10)), 10)
self.assertEqual(f(A.Y(y=10.5)), 10.5)
def test_matching_recursively(self):
@TypeFunction
def Tree(T):
return Alternative(
"Tree",
Leaf={'value': T},
Node={'left': Tree(T), 'right': Tree(T)}
)
def treeSum(x: Tree(int)):
matches = x.matches.Leaf
if matches:
return x.value
if x.matches.Node:
return treeSum(x.left) + treeSum(x.right)
return 0
def buildTree(depth: int, offset: int) -> Tree(int):
if depth > 0:
return Tree(int).Node(
left=buildTree(depth-1, offset),
right=buildTree(depth-1, offset+1),
)
return Tree(int).Leaf(value=offset)
aTree = Compiled(buildTree)(15, 0)
treeSumCompiled = Compiled(treeSum)
t0 = time.time()
sum = treeSum(aTree)
t1 = time.time()
sumCompiled = treeSumCompiled(aTree)
t2 = time.time()
self.assertEqual(sum, sumCompiled)
speedup = (t1-t0)/(t2-t1)
self.assertGreater(speedup, 20) # I get about 50
| 28.065789 | 76 | 0.545945 |
from typed_python import *
import typed_python._types as _types
from nativepython.runtime import Runtime
import unittest
import time
def Compiled(f):
f = Function(f)
return Runtime.singleton().compile(f)
class TestAlternativeCompilation(unittest.TestCase):
def test_simple_alternative_passing(self):
Simple = Alternative("Simple", A={}, B={}, C={})
@Compiled
def f(s: Simple):
y = s
return y
self.assertEqual(f(Simple.A()), Simple.A())
self.assertEqual(f(Simple.B()), Simple.B())
self.assertEqual(f(Simple.C()), Simple.C())
def test_complex_alternative_passing(self):
Complex = Alternative(
"Complex",
A={'a': str, 'b': int},
B={'a': str, 'c': int},
C={'a': str, 'd': lambda: Complex}
)
c = Complex.A(a="hi", b=20)
c2 = Complex.C(a="hi", d=c)
@Compiled
def f(c: Complex):
y = c
return y
self.assertEqual(f(c), c)
self.assertEqual(f(c2), c2)
self.assertEqual(_types.refcount(c), 2)
self.assertEqual(_types.refcount(c2), 1)
def test_construct_alternative(self):
A = Alternative("A", X={'x': int})
@Compiled
def f():
return A.X(x=10)
self.assertTrue(f().matches.X)
self.assertEqual(f().x, 10)
def test_alternative_matches(self):
A = Alternative("A", X={'x': int}, Y={'x': int})
@Compiled
def f(x: A):
return x.matches.X
self.assertTrue(f(A.X()))
self.assertFalse(f(A.Y()))
def test_alternative_member_homogenous(self):
A = Alternative("A", X={'x': int}, Y={'x': int})
@Compiled
def f(x: A):
return x.x
self.assertEqual(f(A.X(x=10)), 10)
self.assertEqual(f(A.Y(x=10)), 10)
def test_alternative_member_diverse(self):
A = Alternative("A", X={'x': int}, Y={'x': float})
@Compiled
def f(x: A):
return x.x
self.assertEqual(f(A.X(x=10)), 10)
self.assertEqual(f(A.Y(x=10.5)), 10.5)
def test_alternative_member_distinct(self):
A = Alternative("A", X={'x': int}, Y={'y': float})
@Compiled
def f(x: A):
if x.matches.X:
return x.x
if x.matches.Y:
return x.y
self.assertEqual(f(A.X(x=10)), 10)
self.assertEqual(f(A.Y(y=10.5)), 10.5)
def test_matching_recursively(self):
@TypeFunction
def Tree(T):
return Alternative(
"Tree",
Leaf={'value': T},
Node={'left': Tree(T), 'right': Tree(T)}
)
def treeSum(x: Tree(int)):
matches = x.matches.Leaf
if matches:
return x.value
if x.matches.Node:
return treeSum(x.left) + treeSum(x.right)
return 0
def buildTree(depth: int, offset: int) -> Tree(int):
if depth > 0:
return Tree(int).Node(
left=buildTree(depth-1, offset),
right=buildTree(depth-1, offset+1),
)
return Tree(int).Leaf(value=offset)
aTree = Compiled(buildTree)(15, 0)
treeSumCompiled = Compiled(treeSum)
t0 = time.time()
sum = treeSum(aTree)
t1 = time.time()
sumCompiled = treeSumCompiled(aTree)
t2 = time.time()
self.assertEqual(sum, sumCompiled)
speedup = (t1-t0)/(t2-t1)
self.assertGreater(speedup, 20)
| true | true |
f734f6c742d13b78096f4a33478f826a3c320dcd | 1,175 | py | Python | tests/utest/test_thresholds.py | wagnerd/robotframework-robocop | a52d5843e953544da61e26df3521b219ccfc344c | [
"Apache-2.0"
] | 2 | 2021-12-22T01:50:52.000Z | 2022-01-05T06:32:27.000Z | tests/utest/test_thresholds.py | wagnerd/robotframework-robocop | a52d5843e953544da61e26df3521b219ccfc344c | [
"Apache-2.0"
] | null | null | null | tests/utest/test_thresholds.py | wagnerd/robotframework-robocop | a52d5843e953544da61e26df3521b219ccfc344c | [
"Apache-2.0"
] | 1 | 2021-06-30T11:01:51.000Z | 2021-06-30T11:01:51.000Z | import pytest
from robocop.rules import RuleSeverity, Rule
def get_severity_enum(value):
for sev in RuleSeverity:
if sev.value == value:
return sev
return RuleSeverity.INFO
def get_message_with_id_sev(rule_id, sev):
for char in RuleSeverity:
rule_id = rule_id.replace(char.value, '')
sev = get_severity_enum(sev)
msg = (
f"some-message-{rule_id}",
"Some description",
sev
)
return Rule(rule_id, msg)
class TestThresholds:
@pytest.mark.parametrize('threshold, included, excluded', [
('E', ['E'], ['I', 'W']),
('W', ['E', 'W'], ['I']),
('I', ['E', 'W', 'I'], []),
])
def test_disable_rules_below_threshold(self, threshold, included, excluded, robocop_pre_load):
robocop_pre_load.config.threshold = get_severity_enum(threshold)
for severity in included:
msg = get_message_with_id_sev('0101', severity)
assert robocop_pre_load.config.is_rule_enabled(msg)
for severity in excluded:
msg = get_message_with_id_sev('0101', severity)
assert not robocop_pre_load.config.is_rule_enabled(msg)
| 30.921053 | 98 | 0.629787 | import pytest
from robocop.rules import RuleSeverity, Rule
def get_severity_enum(value):
for sev in RuleSeverity:
if sev.value == value:
return sev
return RuleSeverity.INFO
def get_message_with_id_sev(rule_id, sev):
for char in RuleSeverity:
rule_id = rule_id.replace(char.value, '')
sev = get_severity_enum(sev)
msg = (
f"some-message-{rule_id}",
"Some description",
sev
)
return Rule(rule_id, msg)
class TestThresholds:
@pytest.mark.parametrize('threshold, included, excluded', [
('E', ['E'], ['I', 'W']),
('W', ['E', 'W'], ['I']),
('I', ['E', 'W', 'I'], []),
])
def test_disable_rules_below_threshold(self, threshold, included, excluded, robocop_pre_load):
robocop_pre_load.config.threshold = get_severity_enum(threshold)
for severity in included:
msg = get_message_with_id_sev('0101', severity)
assert robocop_pre_load.config.is_rule_enabled(msg)
for severity in excluded:
msg = get_message_with_id_sev('0101', severity)
assert not robocop_pre_load.config.is_rule_enabled(msg)
| true | true |
f734f7bd14892873b678912da3d43da15dc8efca | 654 | py | Python | deliravision/torch/models/backbones/__init__.py | delira-dev/vision_torch | d944aa67d319bd63a2add5cb89e8308413943de6 | [
"BSD-2-Clause"
] | 4 | 2019-08-03T09:56:50.000Z | 2019-09-05T09:32:06.000Z | deliravision/torch/models/backbones/__init__.py | delira-dev/vision_torch | d944aa67d319bd63a2add5cb89e8308413943de6 | [
"BSD-2-Clause"
] | 23 | 2019-08-03T14:16:47.000Z | 2019-10-22T10:15:10.000Z | deliravision/torch/models/backbones/__init__.py | delira-dev/vision_torch | d944aa67d319bd63a2add5cb89e8308413943de6 | [
"BSD-2-Clause"
] | null | null | null | __all__ = []
from .resnet import ResNetTorch
from .vgg import VGGTorch
from .alexnet import AlexNetTorch
from .squeezenet import SqueezeNetTorch
from .densenet import DenseNetTorch
from .mobilenet import MobileNetV2Torch
from .resnext import ResNeXtTorch
from .seblocks import SEBasicBlockTorch, SEBottleneckTorch, \
SEBottleneckXTorch
from .unet import UNetTorch, LinkNetTorch
__all__ += [
"AlexNetTorch",
"DenseNetTorch",
"LinkNetTorch",
"MobileNetV2Torch",
"ResNetTorch",
"ResNeXtTorch",
"SEBasicBlockTorch",
"SEBottleneckTorch",
"SEBottleneckXTorch",
"SqueezeNetTorch",
"UNetTorch",
"VGGTorch",
] | 24.222222 | 61 | 0.743119 | __all__ = []
from .resnet import ResNetTorch
from .vgg import VGGTorch
from .alexnet import AlexNetTorch
from .squeezenet import SqueezeNetTorch
from .densenet import DenseNetTorch
from .mobilenet import MobileNetV2Torch
from .resnext import ResNeXtTorch
from .seblocks import SEBasicBlockTorch, SEBottleneckTorch, \
SEBottleneckXTorch
from .unet import UNetTorch, LinkNetTorch
__all__ += [
"AlexNetTorch",
"DenseNetTorch",
"LinkNetTorch",
"MobileNetV2Torch",
"ResNetTorch",
"ResNeXtTorch",
"SEBasicBlockTorch",
"SEBottleneckTorch",
"SEBottleneckXTorch",
"SqueezeNetTorch",
"UNetTorch",
"VGGTorch",
] | true | true |
f734f824d58a2eac60206f7d5648be51ce1cd66b | 1,018 | py | Python | app/models/user.py | qtdemo1/ibm-ops | 29f3d4ba406a1c39a007468977784d6c39f056bb | [
"Apache-2.0"
] | 1 | 2021-09-14T18:40:33.000Z | 2021-09-14T18:40:33.000Z | app/models/user.py | qtdemo1/ibm-ops | 29f3d4ba406a1c39a007468977784d6c39f056bb | [
"Apache-2.0"
] | 7 | 2021-04-23T13:41:39.000Z | 2021-08-12T09:33:10.000Z | app/models/user.py | qtdemo1/ibm-ops | 29f3d4ba406a1c39a007468977784d6c39f056bb | [
"Apache-2.0"
] | 5 | 2020-12-10T14:27:23.000Z | 2022-03-29T08:44:22.000Z | #!/usr/bin/env python3
#
# Copyright 2020 IBM
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.IBM Confidential
#
import sqlalchemy as sql
import app.db.base_class as base_class
class User(base_class.Base):
id = sql.Column('id', sql.Integer, nullable=False, unique=True, index=True, primary_key=True)
username = sql.Column(sql.NCHAR(32), nullable=False, index=True, unique=True)
# hashed_password = sql.Column(sql.BINARY(64), nullable=False)
hashed_password = sql.Column(sql.String(128), nullable=False)
| 36.357143 | 97 | 0.752456 |
import sqlalchemy as sql
import app.db.base_class as base_class
class User(base_class.Base):
id = sql.Column('id', sql.Integer, nullable=False, unique=True, index=True, primary_key=True)
username = sql.Column(sql.NCHAR(32), nullable=False, index=True, unique=True)
hashed_password = sql.Column(sql.String(128), nullable=False)
| true | true |
f734f8778bf4b8e5ff157ae685a7c022d954b83a | 1,502 | py | Python | heat/engine/clients/os/aodh.py | noironetworks/heat | 7cdadf1155f4d94cf8f967635b98e4012a7acfb7 | [
"Apache-2.0"
] | 1 | 2018-07-04T07:59:26.000Z | 2018-07-04T07:59:26.000Z | heat/engine/clients/os/aodh.py | noironetworks/heat | 7cdadf1155f4d94cf8f967635b98e4012a7acfb7 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | heat/engine/clients/os/aodh.py | noironetworks/heat | 7cdadf1155f4d94cf8f967635b98e4012a7acfb7 | [
"Apache-2.0"
] | 3 | 2018-07-19T17:43:37.000Z | 2019-11-15T22:13:30.000Z | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from aodhclient import client as ac
from aodhclient import exceptions
from heat.engine.clients import client_plugin
CLIENT_NAME = 'aodh'
class AodhClientPlugin(client_plugin.ClientPlugin):
exceptions_module = exceptions
service_types = [ALARMING] = ['alarming']
supported_versions = [V2] = ['2']
default_version = V2
def _create(self, version=None):
interface = self._get_client_option(CLIENT_NAME, 'endpoint_type')
return ac.Client(
version,
session=self.context.keystone_session,
interface=interface,
service_type=self.ALARMING,
region_name=self._get_region_name())
def is_not_found(self, ex):
return isinstance(ex, exceptions.NotFound)
def is_over_limit(self, ex):
return isinstance(ex, exceptions.OverLimit)
def is_conflict(self, ex):
return isinstance(ex, exceptions.Conflict)
| 30.04 | 78 | 0.699734 |
from aodhclient import client as ac
from aodhclient import exceptions
from heat.engine.clients import client_plugin
CLIENT_NAME = 'aodh'
class AodhClientPlugin(client_plugin.ClientPlugin):
exceptions_module = exceptions
service_types = [ALARMING] = ['alarming']
supported_versions = [V2] = ['2']
default_version = V2
def _create(self, version=None):
interface = self._get_client_option(CLIENT_NAME, 'endpoint_type')
return ac.Client(
version,
session=self.context.keystone_session,
interface=interface,
service_type=self.ALARMING,
region_name=self._get_region_name())
def is_not_found(self, ex):
return isinstance(ex, exceptions.NotFound)
def is_over_limit(self, ex):
return isinstance(ex, exceptions.OverLimit)
def is_conflict(self, ex):
return isinstance(ex, exceptions.Conflict)
| true | true |
f734f997599f10b45b5a2ef8338f9a454cceefe8 | 28,143 | py | Python | sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities_async.py | dmarx/azure-sdk-for-python | 86ac35b947c0ed3d5edb1cac03f5ad20a34a6fda | [
"MIT"
] | 1 | 2021-09-07T18:43:20.000Z | 2021-09-07T18:43:20.000Z | sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities_async.py | dmarx/azure-sdk-for-python | 86ac35b947c0ed3d5edb1cac03f5ad20a34a6fda | [
"MIT"
] | 2 | 2021-11-03T06:10:36.000Z | 2021-12-01T06:29:39.000Z | sdk/textanalytics/azure-ai-textanalytics/tests/test_recognize_pii_entities_async.py | msyyc/azure-sdk-for-python | e2dba75181f8b4336ae57e75aa391322c12c3123 | [
"MIT"
] | null | null | null | # coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import os
import pytest
import platform
import functools
from azure.core.exceptions import HttpResponseError, ClientAuthenticationError
from azure.core.credentials import AzureKeyCredential
from asynctestcase import AsyncTextAnalyticsTest
from testcase import GlobalTextAnalyticsAccountPreparer
from testcase import TextAnalyticsClientPreparer as _TextAnalyticsClientPreparer
from azure.ai.textanalytics.aio import TextAnalyticsClient
from azure.ai.textanalytics import (
TextDocumentInput,
VERSION,
TextAnalyticsApiVersion,
PiiEntityDomainType,
)
# pre-apply the client_cls positional argument so it needn't be explicitly passed below
# the first one
TextAnalyticsClientPreparer = functools.partial(_TextAnalyticsClientPreparer, TextAnalyticsClient)
class TestRecognizePIIEntities(AsyncTextAnalyticsTest):
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_no_single_input(self, client):
with self.assertRaises(TypeError):
response = await client.recognize_pii_entities("hello world")
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_all_successful_passing_dict(self, client):
docs = [{"id": "1", "text": "My SSN is 859-98-0987."},
{"id": "2", "text": "Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check."},
{"id": "3", "text": "Is 998.214.865-68 your Brazilian CPF number?"}]
response = await client.recognize_pii_entities(docs, show_stats=True)
self.assertEqual(response[0].entities[0].text, "859-98-0987")
self.assertEqual(response[0].entities[0].category, "U.S. Social Security Number (SSN)")
self.assertEqual(response[1].entities[0].text, "111000025")
# self.assertEqual(response[1].entities[0].category, "ABA Routing Number") # Service is currently returning PhoneNumber here
self.assertEqual(response[2].entities[0].text, "998.214.865-68")
self.assertEqual(response[2].entities[0].category, "Brazil CPF Number")
for doc in response:
self.assertIsNotNone(doc.id)
self.assertIsNotNone(doc.statistics)
for entity in doc.entities:
self.assertIsNotNone(entity.text)
self.assertIsNotNone(entity.category)
self.assertIsNotNone(entity.offset)
self.assertIsNotNone(entity.confidence_score)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_all_successful_passing_text_document_input(self, client):
docs = [
TextDocumentInput(id="1", text="My SSN is 859-98-0987."),
TextDocumentInput(id="2", text="Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check."),
TextDocumentInput(id="3", text="Is 998.214.865-68 your Brazilian CPF number?")
]
response = await client.recognize_pii_entities(docs, show_stats=True)
self.assertEqual(response[0].entities[0].text, "859-98-0987")
self.assertEqual(response[0].entities[0].category, "U.S. Social Security Number (SSN)")
self.assertEqual(response[1].entities[0].text, "111000025")
# self.assertEqual(response[1].entities[0].category, "ABA Routing Number") # Service is currently returning PhoneNumber here
self.assertEqual(response[2].entities[0].text, "998.214.865-68")
self.assertEqual(response[2].entities[0].category, "Brazil CPF Number")
for doc in response:
self.assertIsNotNone(doc.id)
self.assertIsNotNone(doc.statistics)
for entity in doc.entities:
self.assertIsNotNone(entity.text)
self.assertIsNotNone(entity.category)
self.assertIsNotNone(entity.offset)
self.assertIsNotNone(entity.confidence_score)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_passing_only_string(self, client):
docs = [
u"My SSN is 859-98-0987.",
u"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.",
u"Is 998.214.865-68 your Brazilian CPF number?",
u""
]
response = await client.recognize_pii_entities(docs, show_stats=True)
self.assertEqual(response[0].entities[0].text, "859-98-0987")
self.assertEqual(response[0].entities[0].category, "U.S. Social Security Number (SSN)")
self.assertEqual(response[1].entities[0].text, "111000025")
# self.assertEqual(response[1].entities[0].category, "ABA Routing Number") # Service is currently returning PhoneNumber here
self.assertEqual(response[2].entities[0].text, "998.214.865-68")
self.assertEqual(response[2].entities[0].category, "Brazil CPF Number")
self.assertTrue(response[3].is_error)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_input_with_some_errors(self, client):
docs = [{"id": "1", "language": "es", "text": "hola"},
{"id": "2", "text": ""},
{"id": "3", "text": "Is 998.214.865-68 your Brazilian CPF number?"}]
response = await client.recognize_pii_entities(docs)
self.assertTrue(response[0].is_error)
self.assertTrue(response[1].is_error)
self.assertFalse(response[2].is_error)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_input_with_all_errors(self, client):
docs = [{"id": "1", "text": ""},
{"id": "2", "language": "Spanish", "text": "Hola"},
{"id": "3", "language": "de", "text": ""}]
response = await client.recognize_pii_entities(docs)
self.assertTrue(response[0].is_error)
self.assertTrue(response[1].is_error)
self.assertTrue(response[2].is_error)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_too_many_documents(self, client):
docs = ["One", "Two", "Three", "Four", "Five", "Six"]
with pytest.raises(HttpResponseError) as excinfo:
await client.recognize_pii_entities(docs)
assert excinfo.value.status_code == 400
assert excinfo.value.error.code == "InvalidDocumentBatch"
assert "Batch request contains too many records" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_output_same_order_as_input(self, client):
docs = [
TextDocumentInput(id="1", text="one"),
TextDocumentInput(id="2", text="two"),
TextDocumentInput(id="3", text="three"),
TextDocumentInput(id="4", text="four"),
TextDocumentInput(id="5", text="five")
]
response = await client.recognize_pii_entities(docs)
for idx, doc in enumerate(response):
self.assertEqual(str(idx + 1), doc.id)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"text_analytics_account_key": ""})
async def test_empty_credential_class(self, client):
with self.assertRaises(ClientAuthenticationError):
response = await client.recognize_pii_entities(
["This is written in English."]
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"text_analytics_account_key": "xxxxxxxxxxxx"})
async def test_bad_credentials(self, client):
with self.assertRaises(ClientAuthenticationError):
response = await client.recognize_pii_entities(
["This is written in English."]
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_bad_document_input(self, client):
docs = "This is the wrong type"
with self.assertRaises(TypeError):
response = await client.recognize_pii_entities(docs)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_mixing_inputs(self, client):
docs = [
{"id": "1", "text": "Microsoft was founded by Bill Gates and Paul Allen."},
TextDocumentInput(id="2", text="I did not like the hotel we stayed at. It was too expensive."),
u"You cannot mix string input with the above inputs"
]
with self.assertRaises(TypeError):
response = await client.recognize_pii_entities(docs)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_out_of_order_ids(self, client):
docs = [{"id": "56", "text": ":)"},
{"id": "0", "text": ":("},
{"id": "22", "text": ""},
{"id": "19", "text": ":P"},
{"id": "1", "text": ":D"}]
response = await client.recognize_pii_entities(docs)
in_order = ["56", "0", "22", "19", "1"]
for idx, resp in enumerate(response):
self.assertEqual(resp.id, in_order[idx])
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_show_stats_and_model_version(self, client):
def callback(response):
self.assertIsNotNone(response)
self.assertIsNotNone(response.model_version, msg=response.raw_response)
self.assertIsNotNone(response.raw_response)
self.assertEqual(response.statistics.document_count, 5)
self.assertEqual(response.statistics.transaction_count, 4)
self.assertEqual(response.statistics.valid_document_count, 4)
self.assertEqual(response.statistics.erroneous_document_count, 1)
docs = [{"id": "56", "text": ":)"},
{"id": "0", "text": ":("},
{"id": "22", "text": ""},
{"id": "19", "text": ":P"},
{"id": "1", "text": ":D"}]
response = await client.recognize_pii_entities(
docs,
show_stats=True,
model_version="latest",
raw_response_hook=callback
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_batch_size_over_limit(self, client):
docs = [u"hello world"] * 1050
with self.assertRaises(HttpResponseError):
response = await client.recognize_pii_entities(docs)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"fr\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [
u"This was the best day of my life.",
u"I did not like the hotel we stayed at. It was too expensive.",
u"The restaurant was not as good as I hoped."
]
response = await client.recognize_pii_entities(docs, language="fr", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_dont_use_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [
u"This was the best day of my life.",
u"I did not like the hotel we stayed at. It was too expensive.",
u"The restaurant was not as good as I hoped."
]
response = await client.recognize_pii_entities(docs, language="", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_per_item_dont_use_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 2)
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 1)
docs = [{"id": "1", "language": "", "text": "I will go to the park."},
{"id": "2", "language": "", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint_and_obj_input(self, client):
def callback(resp):
language_str = "\"language\": \"de\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [
TextDocumentInput(id="1", text="I should take my cat to the veterinarian."),
TextDocumentInput(id="4", text="Este es un document escrito en Español."),
TextDocumentInput(id="3", text="猫は幸せ"),
]
response = await client.recognize_pii_entities(docs, language="de", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint_and_obj_per_item_hints(self, client):
def callback(resp):
language_str = "\"language\": \"es\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 2)
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 1)
docs = [
TextDocumentInput(id="1", text="I should take my cat to the veterinarian.", language="es"),
TextDocumentInput(id="2", text="Este es un document escrito en Español.", language="es"),
TextDocumentInput(id="3", text="猫は幸せ"),
]
response = await client.recognize_pii_entities(docs, language="en", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint_and_dict_per_item_hints(self, client):
def callback(resp):
language_str = "\"language\": \"es\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 2)
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 1)
docs = [{"id": "1", "language": "es", "text": "I will go to the park."},
{"id": "2", "language": "es", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, language="en", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"default_language": "es"})
async def test_client_passed_default_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"es\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
def callback_2(resp):
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [{"id": "1", "text": "I will go to the park."},
{"id": "2", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
response = await client.recognize_pii_entities(docs, language="en", raw_response_hook=callback_2)
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_invalid_language_hint_method(self, client):
response = await client.recognize_pii_entities(
["This should fail because we're passing in an invalid language hint"], language="notalanguage"
)
self.assertEqual(response[0].error.code, 'UnsupportedLanguageCode')
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_invalid_language_hint_docs(self, client):
response = await client.recognize_pii_entities(
[{"id": "1", "language": "notalanguage", "text": "This should fail because we're passing in an invalid language hint"}]
)
self.assertEqual(response[0].error.code, 'UnsupportedLanguageCode')
@GlobalTextAnalyticsAccountPreparer()
async def test_rotate_subscription_key(self, resource_group, location, text_analytics_account, text_analytics_account_key):
credential = AzureKeyCredential(text_analytics_account_key)
client = TextAnalyticsClient(text_analytics_account, credential)
docs = [{"id": "1", "text": "I will go to the park."},
{"id": "2", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs)
self.assertIsNotNone(response)
credential.update("xxx") # Make authentication fail
with self.assertRaises(ClientAuthenticationError):
response = await client.recognize_pii_entities(docs)
credential.update(text_analytics_account_key) # Authenticate successfully again
response = await client.recognize_pii_entities(docs)
self.assertIsNotNone(response)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_user_agent(self, client):
def callback(resp):
self.assertIn("azsdk-python-ai-textanalytics/{} Python/{} ({})".format(
VERSION, platform.python_version(), platform.platform()),
resp.http_request.headers["User-Agent"]
)
docs = [{"id": "1", "text": "I will go to the park."},
{"id": "2", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_attribute_error_no_result_attribute(self, client):
docs = [{"id": "1", "text": ""}]
response = await client.recognize_pii_entities(docs)
# Attributes on DocumentError
self.assertTrue(response[0].is_error)
self.assertEqual(response[0].id, "1")
self.assertIsNotNone(response[0].error)
# Result attribute not on DocumentError, custom error message
try:
entities = response[0].entities
except AttributeError as custom_error:
self.assertEqual(
custom_error.args[0],
'\'DocumentError\' object has no attribute \'entities\'. '
'The service was unable to process this document:\nDocument Id: 1\nError: '
'InvalidDocument - Document text is empty.\n'
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_attribute_error_nonexistent_attribute(self, client):
docs = [{"id": "1", "text": ""}]
response = await client.recognize_pii_entities(docs)
# Attribute not found on DocumentError or result obj, default behavior/message
try:
entities = response[0].attribute_not_on_result_or_error
except AttributeError as default_behavior:
self.assertEqual(
default_behavior.args[0],
'\'DocumentError\' object has no attribute \'attribute_not_on_result_or_error\''
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_bad_model_version_error(self, client):
docs = [{"id": "1", "language": "english", "text": "I did not like the hotel we stayed at."}]
try:
result = await client.recognize_pii_entities(docs, model_version="bad")
except HttpResponseError as err:
self.assertEqual(err.error.code, "ModelVersionIncorrect")
self.assertIsNotNone(err.error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_errors(self, client):
text = ""
for _ in range(5121):
text += "x"
docs = [{"id": "1", "text": ""},
{"id": "2", "language": "english", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": text}]
doc_errors = await client.recognize_pii_entities(docs)
self.assertEqual(doc_errors[0].error.code, "InvalidDocument")
self.assertIsNotNone(doc_errors[0].error.message)
self.assertEqual(doc_errors[1].error.code, "UnsupportedLanguageCode")
self.assertIsNotNone(doc_errors[1].error.message)
self.assertEqual(doc_errors[2].error.code, "InvalidDocument")
self.assertIsNotNone(doc_errors[2].error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_warnings(self, client):
# No warnings actually returned for recognize_pii_entities. Will update when they add
docs = [
{"id": "1", "text": "This won't actually create a warning :'("},
]
result = await client.recognize_pii_entities(docs)
for doc in result:
doc_warnings = doc.warnings
self.assertEqual(len(doc_warnings), 0)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_not_passing_list_for_docs(self, client):
docs = {"id": "1", "text": "hello world"}
with pytest.raises(TypeError) as excinfo:
await client.recognize_pii_entities(docs)
assert "Input documents cannot be a dict" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_missing_input_records_error(self, client):
docs = []
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(docs)
assert "Input documents can not be empty or None" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_passing_none_docs(self, client):
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(None)
assert "Input documents can not be empty or None" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_duplicate_ids_error(self, client):
# Duplicate Ids
docs = [{"id": "1", "text": "hello world"},
{"id": "1", "text": "I did not like the hotel we stayed at."}]
try:
result = await client.recognize_pii_entities(docs)
except HttpResponseError as err:
self.assertEqual(err.error.code, "InvalidDocument")
self.assertIsNotNone(err.error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_batch_size_over_limit_error(self, client):
# Batch size over limit
docs = [u"hello world"] * 1001
try:
response = await client.recognize_pii_entities(docs)
except HttpResponseError as err:
self.assertEqual(err.error.code, "InvalidDocumentBatch")
self.assertIsNotNone(err.error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_pass_cls(self, client):
def callback(pipeline_response, deserialized, _):
return "cls result"
res = await client.recognize_pii_entities(
documents=["Test passing cls to endpoint"],
cls=callback
)
assert res == "cls result"
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_language_kwarg_english(self, client):
def callback(response):
language_str = "\"language\": \"en\""
self.assertEqual(response.http_request.body.count(language_str), 1)
self.assertIsNotNone(response.model_version)
self.assertIsNotNone(response.statistics)
res = await client.recognize_pii_entities(
documents=["Bill Gates is the CEO of Microsoft."],
model_version="latest",
show_stats=True,
language="en",
raw_response_hook=callback
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"api_version": TextAnalyticsApiVersion.V3_0})
async def test_recognize_pii_entities_v3(self, client):
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(["this should fail"])
assert "'recognize_pii_entities' endpoint is only available for API version v3.1-preview and up" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_redacted_text(self, client):
result = await client.recognize_pii_entities(["My SSN is 859-98-0987."])
self.assertEqual("My SSN is ***********.", result[0].redacted_text)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_phi_domain_filter(self, client):
# without the domain filter, this should return two entities: Microsoft as an org,
# and the phone number. With the domain filter, it should only return one.
result = await client.recognize_pii_entities(
["I work at Microsoft and my phone number is 333-333-3333"],
domain_filter=PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION
)
self.assertEqual(len(result[0].entities), 1)
self.assertEqual(result[0].entities[0].text, '333-333-3333')
self.assertEqual(result[0].entities[0].category, 'Phone Number')
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"api_version": TextAnalyticsApiVersion.V3_0})
async def test_string_index_type_explicit_fails_v3(self, client):
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(["this should fail"], string_index_type="UnicodeCodePoint")
assert "'string_index_type' is only available for API version v3.1-preview and up" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_default_string_index_type_is_UnicodeCodePoint(self, client):
def callback(response):
self.assertEqual(response.http_request.query["stringIndexType"], "UnicodeCodePoint")
res = await client.recognize_pii_entities(
documents=["Hello world"],
raw_response_hook=callback
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_explicit_set_string_index_type(self, client):
def callback(response):
self.assertEqual(response.http_request.query["stringIndexType"], "TextElements_v8")
res = await client.recognize_pii_entities(
documents=["Hello world"],
string_index_type="TextElements_v8",
raw_response_hook=callback
) | 45.318841 | 152 | 0.656291 |
import os
import pytest
import platform
import functools
from azure.core.exceptions import HttpResponseError, ClientAuthenticationError
from azure.core.credentials import AzureKeyCredential
from asynctestcase import AsyncTextAnalyticsTest
from testcase import GlobalTextAnalyticsAccountPreparer
from testcase import TextAnalyticsClientPreparer as _TextAnalyticsClientPreparer
from azure.ai.textanalytics.aio import TextAnalyticsClient
from azure.ai.textanalytics import (
TextDocumentInput,
VERSION,
TextAnalyticsApiVersion,
PiiEntityDomainType,
)
# the first one
TextAnalyticsClientPreparer = functools.partial(_TextAnalyticsClientPreparer, TextAnalyticsClient)
class TestRecognizePIIEntities(AsyncTextAnalyticsTest):
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_no_single_input(self, client):
with self.assertRaises(TypeError):
response = await client.recognize_pii_entities("hello world")
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_all_successful_passing_dict(self, client):
docs = [{"id": "1", "text": "My SSN is 859-98-0987."},
{"id": "2", "text": "Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check."},
{"id": "3", "text": "Is 998.214.865-68 your Brazilian CPF number?"}]
response = await client.recognize_pii_entities(docs, show_stats=True)
self.assertEqual(response[0].entities[0].text, "859-98-0987")
self.assertEqual(response[0].entities[0].category, "U.S. Social Security Number (SSN)")
self.assertEqual(response[1].entities[0].text, "111000025")
# self.assertEqual(response[1].entities[0].category, "ABA Routing Number") # Service is currently returning PhoneNumber here
self.assertEqual(response[2].entities[0].text, "998.214.865-68")
self.assertEqual(response[2].entities[0].category, "Brazil CPF Number")
for doc in response:
self.assertIsNotNone(doc.id)
self.assertIsNotNone(doc.statistics)
for entity in doc.entities:
self.assertIsNotNone(entity.text)
self.assertIsNotNone(entity.category)
self.assertIsNotNone(entity.offset)
self.assertIsNotNone(entity.confidence_score)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_all_successful_passing_text_document_input(self, client):
docs = [
TextDocumentInput(id="1", text="My SSN is 859-98-0987."),
TextDocumentInput(id="2", text="Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check."),
TextDocumentInput(id="3", text="Is 998.214.865-68 your Brazilian CPF number?")
]
response = await client.recognize_pii_entities(docs, show_stats=True)
self.assertEqual(response[0].entities[0].text, "859-98-0987")
self.assertEqual(response[0].entities[0].category, "U.S. Social Security Number (SSN)")
self.assertEqual(response[1].entities[0].text, "111000025")
# self.assertEqual(response[1].entities[0].category, "ABA Routing Number") # Service is currently returning PhoneNumber here
self.assertEqual(response[2].entities[0].text, "998.214.865-68")
self.assertEqual(response[2].entities[0].category, "Brazil CPF Number")
for doc in response:
self.assertIsNotNone(doc.id)
self.assertIsNotNone(doc.statistics)
for entity in doc.entities:
self.assertIsNotNone(entity.text)
self.assertIsNotNone(entity.category)
self.assertIsNotNone(entity.offset)
self.assertIsNotNone(entity.confidence_score)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_passing_only_string(self, client):
docs = [
u"My SSN is 859-98-0987.",
u"Your ABA number - 111000025 - is the first 9 digits in the lower left hand corner of your personal check.",
u"Is 998.214.865-68 your Brazilian CPF number?",
u""
]
response = await client.recognize_pii_entities(docs, show_stats=True)
self.assertEqual(response[0].entities[0].text, "859-98-0987")
self.assertEqual(response[0].entities[0].category, "U.S. Social Security Number (SSN)")
self.assertEqual(response[1].entities[0].text, "111000025")
# self.assertEqual(response[1].entities[0].category, "ABA Routing Number") # Service is currently returning PhoneNumber here
self.assertEqual(response[2].entities[0].text, "998.214.865-68")
self.assertEqual(response[2].entities[0].category, "Brazil CPF Number")
self.assertTrue(response[3].is_error)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_input_with_some_errors(self, client):
docs = [{"id": "1", "language": "es", "text": "hola"},
{"id": "2", "text": ""},
{"id": "3", "text": "Is 998.214.865-68 your Brazilian CPF number?"}]
response = await client.recognize_pii_entities(docs)
self.assertTrue(response[0].is_error)
self.assertTrue(response[1].is_error)
self.assertFalse(response[2].is_error)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_input_with_all_errors(self, client):
docs = [{"id": "1", "text": ""},
{"id": "2", "language": "Spanish", "text": "Hola"},
{"id": "3", "language": "de", "text": ""}]
response = await client.recognize_pii_entities(docs)
self.assertTrue(response[0].is_error)
self.assertTrue(response[1].is_error)
self.assertTrue(response[2].is_error)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_too_many_documents(self, client):
docs = ["One", "Two", "Three", "Four", "Five", "Six"]
with pytest.raises(HttpResponseError) as excinfo:
await client.recognize_pii_entities(docs)
assert excinfo.value.status_code == 400
assert excinfo.value.error.code == "InvalidDocumentBatch"
assert "Batch request contains too many records" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_output_same_order_as_input(self, client):
docs = [
TextDocumentInput(id="1", text="one"),
TextDocumentInput(id="2", text="two"),
TextDocumentInput(id="3", text="three"),
TextDocumentInput(id="4", text="four"),
TextDocumentInput(id="5", text="five")
]
response = await client.recognize_pii_entities(docs)
for idx, doc in enumerate(response):
self.assertEqual(str(idx + 1), doc.id)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"text_analytics_account_key": ""})
async def test_empty_credential_class(self, client):
with self.assertRaises(ClientAuthenticationError):
response = await client.recognize_pii_entities(
["This is written in English."]
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"text_analytics_account_key": "xxxxxxxxxxxx"})
async def test_bad_credentials(self, client):
with self.assertRaises(ClientAuthenticationError):
response = await client.recognize_pii_entities(
["This is written in English."]
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_bad_document_input(self, client):
docs = "This is the wrong type"
with self.assertRaises(TypeError):
response = await client.recognize_pii_entities(docs)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_mixing_inputs(self, client):
docs = [
{"id": "1", "text": "Microsoft was founded by Bill Gates and Paul Allen."},
TextDocumentInput(id="2", text="I did not like the hotel we stayed at. It was too expensive."),
u"You cannot mix string input with the above inputs"
]
with self.assertRaises(TypeError):
response = await client.recognize_pii_entities(docs)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_out_of_order_ids(self, client):
docs = [{"id": "56", "text": ":)"},
{"id": "0", "text": ":("},
{"id": "22", "text": ""},
{"id": "19", "text": ":P"},
{"id": "1", "text": ":D"}]
response = await client.recognize_pii_entities(docs)
in_order = ["56", "0", "22", "19", "1"]
for idx, resp in enumerate(response):
self.assertEqual(resp.id, in_order[idx])
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_show_stats_and_model_version(self, client):
def callback(response):
self.assertIsNotNone(response)
self.assertIsNotNone(response.model_version, msg=response.raw_response)
self.assertIsNotNone(response.raw_response)
self.assertEqual(response.statistics.document_count, 5)
self.assertEqual(response.statistics.transaction_count, 4)
self.assertEqual(response.statistics.valid_document_count, 4)
self.assertEqual(response.statistics.erroneous_document_count, 1)
docs = [{"id": "56", "text": ":)"},
{"id": "0", "text": ":("},
{"id": "22", "text": ""},
{"id": "19", "text": ":P"},
{"id": "1", "text": ":D"}]
response = await client.recognize_pii_entities(
docs,
show_stats=True,
model_version="latest",
raw_response_hook=callback
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_batch_size_over_limit(self, client):
docs = [u"hello world"] * 1050
with self.assertRaises(HttpResponseError):
response = await client.recognize_pii_entities(docs)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"fr\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [
u"This was the best day of my life.",
u"I did not like the hotel we stayed at. It was too expensive.",
u"The restaurant was not as good as I hoped."
]
response = await client.recognize_pii_entities(docs, language="fr", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_dont_use_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [
u"This was the best day of my life.",
u"I did not like the hotel we stayed at. It was too expensive.",
u"The restaurant was not as good as I hoped."
]
response = await client.recognize_pii_entities(docs, language="", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_per_item_dont_use_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 2)
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 1)
docs = [{"id": "1", "language": "", "text": "I will go to the park."},
{"id": "2", "language": "", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint_and_obj_input(self, client):
def callback(resp):
language_str = "\"language\": \"de\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [
TextDocumentInput(id="1", text="I should take my cat to the veterinarian."),
TextDocumentInput(id="4", text="Este es un document escrito en Español."),
TextDocumentInput(id="3", text="猫は幸せ"),
]
response = await client.recognize_pii_entities(docs, language="de", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint_and_obj_per_item_hints(self, client):
def callback(resp):
language_str = "\"language\": \"es\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 2)
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 1)
docs = [
TextDocumentInput(id="1", text="I should take my cat to the veterinarian.", language="es"),
TextDocumentInput(id="2", text="Este es un document escrito en Español.", language="es"),
TextDocumentInput(id="3", text="猫は幸せ"),
]
response = await client.recognize_pii_entities(docs, language="en", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_whole_batch_language_hint_and_dict_per_item_hints(self, client):
def callback(resp):
language_str = "\"language\": \"es\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 2)
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 1)
docs = [{"id": "1", "language": "es", "text": "I will go to the park."},
{"id": "2", "language": "es", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, language="en", raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"default_language": "es"})
async def test_client_passed_default_language_hint(self, client):
def callback(resp):
language_str = "\"language\": \"es\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
def callback_2(resp):
language_str = "\"language\": \"en\""
language = resp.http_request.body.count(language_str)
self.assertEqual(language, 3)
docs = [{"id": "1", "text": "I will go to the park."},
{"id": "2", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
response = await client.recognize_pii_entities(docs, language="en", raw_response_hook=callback_2)
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_invalid_language_hint_method(self, client):
response = await client.recognize_pii_entities(
["This should fail because we're passing in an invalid language hint"], language="notalanguage"
)
self.assertEqual(response[0].error.code, 'UnsupportedLanguageCode')
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_invalid_language_hint_docs(self, client):
response = await client.recognize_pii_entities(
[{"id": "1", "language": "notalanguage", "text": "This should fail because we're passing in an invalid language hint"}]
)
self.assertEqual(response[0].error.code, 'UnsupportedLanguageCode')
@GlobalTextAnalyticsAccountPreparer()
async def test_rotate_subscription_key(self, resource_group, location, text_analytics_account, text_analytics_account_key):
credential = AzureKeyCredential(text_analytics_account_key)
client = TextAnalyticsClient(text_analytics_account, credential)
docs = [{"id": "1", "text": "I will go to the park."},
{"id": "2", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs)
self.assertIsNotNone(response)
credential.update("xxx") # Make authentication fail
with self.assertRaises(ClientAuthenticationError):
response = await client.recognize_pii_entities(docs)
credential.update(text_analytics_account_key) # Authenticate successfully again
response = await client.recognize_pii_entities(docs)
self.assertIsNotNone(response)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_user_agent(self, client):
def callback(resp):
self.assertIn("azsdk-python-ai-textanalytics/{} Python/{} ({})".format(
VERSION, platform.python_version(), platform.platform()),
resp.http_request.headers["User-Agent"]
)
docs = [{"id": "1", "text": "I will go to the park."},
{"id": "2", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": "The restaurant had really good food."}]
response = await client.recognize_pii_entities(docs, raw_response_hook=callback)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_attribute_error_no_result_attribute(self, client):
docs = [{"id": "1", "text": ""}]
response = await client.recognize_pii_entities(docs)
# Attributes on DocumentError
self.assertTrue(response[0].is_error)
self.assertEqual(response[0].id, "1")
self.assertIsNotNone(response[0].error)
# Result attribute not on DocumentError, custom error message
try:
entities = response[0].entities
except AttributeError as custom_error:
self.assertEqual(
custom_error.args[0],
'\'DocumentError\' object has no attribute \'entities\'. '
'The service was unable to process this document:\nDocument Id: 1\nError: '
'InvalidDocument - Document text is empty.\n'
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_attribute_error_nonexistent_attribute(self, client):
docs = [{"id": "1", "text": ""}]
response = await client.recognize_pii_entities(docs)
# Attribute not found on DocumentError or result obj, default behavior/message
try:
entities = response[0].attribute_not_on_result_or_error
except AttributeError as default_behavior:
self.assertEqual(
default_behavior.args[0],
'\'DocumentError\' object has no attribute \'attribute_not_on_result_or_error\''
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_bad_model_version_error(self, client):
docs = [{"id": "1", "language": "english", "text": "I did not like the hotel we stayed at."}]
try:
result = await client.recognize_pii_entities(docs, model_version="bad")
except HttpResponseError as err:
self.assertEqual(err.error.code, "ModelVersionIncorrect")
self.assertIsNotNone(err.error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_errors(self, client):
text = ""
for _ in range(5121):
text += "x"
docs = [{"id": "1", "text": ""},
{"id": "2", "language": "english", "text": "I did not like the hotel we stayed at."},
{"id": "3", "text": text}]
doc_errors = await client.recognize_pii_entities(docs)
self.assertEqual(doc_errors[0].error.code, "InvalidDocument")
self.assertIsNotNone(doc_errors[0].error.message)
self.assertEqual(doc_errors[1].error.code, "UnsupportedLanguageCode")
self.assertIsNotNone(doc_errors[1].error.message)
self.assertEqual(doc_errors[2].error.code, "InvalidDocument")
self.assertIsNotNone(doc_errors[2].error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_document_warnings(self, client):
# No warnings actually returned for recognize_pii_entities. Will update when they add
docs = [
{"id": "1", "text": "This won't actually create a warning :'("},
]
result = await client.recognize_pii_entities(docs)
for doc in result:
doc_warnings = doc.warnings
self.assertEqual(len(doc_warnings), 0)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_not_passing_list_for_docs(self, client):
docs = {"id": "1", "text": "hello world"}
with pytest.raises(TypeError) as excinfo:
await client.recognize_pii_entities(docs)
assert "Input documents cannot be a dict" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_missing_input_records_error(self, client):
docs = []
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(docs)
assert "Input documents can not be empty or None" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_passing_none_docs(self, client):
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(None)
assert "Input documents can not be empty or None" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_duplicate_ids_error(self, client):
# Duplicate Ids
docs = [{"id": "1", "text": "hello world"},
{"id": "1", "text": "I did not like the hotel we stayed at."}]
try:
result = await client.recognize_pii_entities(docs)
except HttpResponseError as err:
self.assertEqual(err.error.code, "InvalidDocument")
self.assertIsNotNone(err.error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_batch_size_over_limit_error(self, client):
# Batch size over limit
docs = [u"hello world"] * 1001
try:
response = await client.recognize_pii_entities(docs)
except HttpResponseError as err:
self.assertEqual(err.error.code, "InvalidDocumentBatch")
self.assertIsNotNone(err.error.message)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_pass_cls(self, client):
def callback(pipeline_response, deserialized, _):
return "cls result"
res = await client.recognize_pii_entities(
documents=["Test passing cls to endpoint"],
cls=callback
)
assert res == "cls result"
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_language_kwarg_english(self, client):
def callback(response):
language_str = "\"language\": \"en\""
self.assertEqual(response.http_request.body.count(language_str), 1)
self.assertIsNotNone(response.model_version)
self.assertIsNotNone(response.statistics)
res = await client.recognize_pii_entities(
documents=["Bill Gates is the CEO of Microsoft."],
model_version="latest",
show_stats=True,
language="en",
raw_response_hook=callback
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"api_version": TextAnalyticsApiVersion.V3_0})
async def test_recognize_pii_entities_v3(self, client):
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(["this should fail"])
assert "'recognize_pii_entities' endpoint is only available for API version v3.1-preview and up" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_redacted_text(self, client):
result = await client.recognize_pii_entities(["My SSN is 859-98-0987."])
self.assertEqual("My SSN is ***********.", result[0].redacted_text)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_phi_domain_filter(self, client):
# without the domain filter, this should return two entities: Microsoft as an org,
# and the phone number. With the domain filter, it should only return one.
result = await client.recognize_pii_entities(
["I work at Microsoft and my phone number is 333-333-3333"],
domain_filter=PiiEntityDomainType.PROTECTED_HEALTH_INFORMATION
)
self.assertEqual(len(result[0].entities), 1)
self.assertEqual(result[0].entities[0].text, '333-333-3333')
self.assertEqual(result[0].entities[0].category, 'Phone Number')
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer(client_kwargs={"api_version": TextAnalyticsApiVersion.V3_0})
async def test_string_index_type_explicit_fails_v3(self, client):
with pytest.raises(ValueError) as excinfo:
await client.recognize_pii_entities(["this should fail"], string_index_type="UnicodeCodePoint")
assert "'string_index_type' is only available for API version v3.1-preview and up" in str(excinfo.value)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_default_string_index_type_is_UnicodeCodePoint(self, client):
def callback(response):
self.assertEqual(response.http_request.query["stringIndexType"], "UnicodeCodePoint")
res = await client.recognize_pii_entities(
documents=["Hello world"],
raw_response_hook=callback
)
@GlobalTextAnalyticsAccountPreparer()
@TextAnalyticsClientPreparer()
async def test_explicit_set_string_index_type(self, client):
def callback(response):
self.assertEqual(response.http_request.query["stringIndexType"], "TextElements_v8")
res = await client.recognize_pii_entities(
documents=["Hello world"],
string_index_type="TextElements_v8",
raw_response_hook=callback
) | true | true |
f734fad6c40629440b9272bed1e078ca97ba0134 | 2,276 | py | Python | lingvo/core/test_utils_test.py | zhoudoufu/lingvo | bd0f89809942fd0508ff43bd4b6bca1b598220cb | [
"Apache-2.0"
] | null | null | null | lingvo/core/test_utils_test.py | zhoudoufu/lingvo | bd0f89809942fd0508ff43bd4b6bca1b598220cb | [
"Apache-2.0"
] | null | null | null | lingvo/core/test_utils_test.py | zhoudoufu/lingvo | bd0f89809942fd0508ff43bd4b6bca1b598220cb | [
"Apache-2.0"
] | null | null | null | # Lint as: python2, python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for test_utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from lingvo.core import test_utils
class TestUtilsTest(test_utils.TestCase):
def testReplaceGoldenSingleFloat(self):
old_line = ' CompareToGoldenSingleFloat(self, 1.489712, vs[0])\n'
expected = ' CompareToGoldenSingleFloat(self, 1.000000, vs[0])\n'
actual = test_utils.ReplaceGoldenSingleFloat(old_line, 1.0)
self.assertEqual(expected, actual)
old_line = ('test_utils.CompareToGoldenSingleFloat(self, -2.e-3, vs[0])'
' # pylint: disable=line-too-long\n')
expected = ('test_utils.CompareToGoldenSingleFloat(self, 1.000000, vs[0])'
' # pylint: disable=line-too-long\n')
actual = test_utils.ReplaceGoldenSingleFloat(old_line, 1.0)
self.assertEqual(expected, actual)
def CompareToGoldenSingleFloat(self, unused_v1, v2):
return test_utils.ReplaceGoldenStackAnalysis(v2)
def testReplaceGoldenStackAnalysis(self):
v2 = 2.0
result = TestUtilsTest.CompareToGoldenSingleFloat(self, 1.0, v2)
self.assertTrue(result[0].endswith('test_utils_test.py'))
old_line = (' result = TestUtilsTest.CompareToGoldenSingleFloat('
'self, 1.0, v2)\n')
new_line = (' result = TestUtilsTest.CompareToGoldenSingleFloat('
'self, 2.000000, v2)\n')
self.assertEqual(old_line, result[2])
self.assertEqual(new_line, result[3])
if __name__ == '__main__':
tf.test.main()
| 39.241379 | 80 | 0.696397 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from lingvo.core import test_utils
class TestUtilsTest(test_utils.TestCase):
def testReplaceGoldenSingleFloat(self):
old_line = ' CompareToGoldenSingleFloat(self, 1.489712, vs[0])\n'
expected = ' CompareToGoldenSingleFloat(self, 1.000000, vs[0])\n'
actual = test_utils.ReplaceGoldenSingleFloat(old_line, 1.0)
self.assertEqual(expected, actual)
old_line = ('test_utils.CompareToGoldenSingleFloat(self, -2.e-3, vs[0])'
' # pylint: disable=line-too-long\n')
expected = ('test_utils.CompareToGoldenSingleFloat(self, 1.000000, vs[0])'
' # pylint: disable=line-too-long\n')
actual = test_utils.ReplaceGoldenSingleFloat(old_line, 1.0)
self.assertEqual(expected, actual)
def CompareToGoldenSingleFloat(self, unused_v1, v2):
return test_utils.ReplaceGoldenStackAnalysis(v2)
def testReplaceGoldenStackAnalysis(self):
v2 = 2.0
result = TestUtilsTest.CompareToGoldenSingleFloat(self, 1.0, v2)
self.assertTrue(result[0].endswith('test_utils_test.py'))
old_line = (' result = TestUtilsTest.CompareToGoldenSingleFloat('
'self, 1.0, v2)\n')
new_line = (' result = TestUtilsTest.CompareToGoldenSingleFloat('
'self, 2.000000, v2)\n')
self.assertEqual(old_line, result[2])
self.assertEqual(new_line, result[3])
if __name__ == '__main__':
tf.test.main()
| true | true |
f734fb6ee9073916872738ef1d419bf8d564a7d0 | 3,161 | py | Python | tests/agents/test_agent_interface.py | garaytc/reinforcement | e6af258bf2ac3b45c20e0ed3d2f58ca7bc2b232f | [
"Apache-2.0"
] | 12 | 2020-05-19T18:58:55.000Z | 2021-02-21T20:26:46.000Z | tests/agents/test_agent_interface.py | garaytc/reinforcement | e6af258bf2ac3b45c20e0ed3d2f58ca7bc2b232f | [
"Apache-2.0"
] | 39 | 2020-05-19T18:41:42.000Z | 2021-01-16T08:31:06.000Z | tests/agents/test_agent_interface.py | garaytc/reinforcement | e6af258bf2ac3b45c20e0ed3d2f58ca7bc2b232f | [
"Apache-2.0"
] | 2 | 2020-05-19T15:15:04.000Z | 2020-05-21T08:45:59.000Z | import pytest
import torch
from gym.spaces import Discrete, MultiDiscrete, MultiBinary, Dict, Tuple, Box
from blobrl.agents import AgentInterface
class MOCKAgentInterface(AgentInterface):
def __init__(self, observation_space, action_space, device):
super().__init__(observation_space, action_space, device)
def get_action(self, observation):
pass
def enable_exploration(self):
pass
def disable_exploration(self):
pass
def learn(self, observation, action, reward, next_observation, done) -> None:
pass
def episode_finished(self) -> None:
pass
def save(self, file_name, dire_name="."):
pass
@classmethod
def load(cls, file_name, dire_name=".", device=None):
pass
def __str__(self):
return ""
class TestAgentInterface:
__test__ = True
agent = MOCKAgentInterface
list_work = [
[Discrete(3), Discrete(1)],
[Discrete(3), Discrete(3)],
[Discrete(10), Discrete(50)],
[MultiDiscrete([3]), MultiDiscrete([1])],
[MultiDiscrete([3, 3]), MultiDiscrete([3, 3])],
[MultiDiscrete([4, 4, 4]), MultiDiscrete([50, 4, 4])],
[MultiDiscrete([[100, 3], [3, 5]]), MultiDiscrete([[100, 3], [3, 5]])],
[MultiDiscrete([[[100, 3], [3, 5]], [[100, 3], [3, 5]]]),
MultiDiscrete([[[100, 3], [3, 5]], [[100, 3], [3, 5]]])],
[MultiBinary(1), MultiBinary(1)],
[MultiBinary(3), MultiBinary(3)],
# [MultiBinary([3, 2]), MultiBinary([3, 2])], # Don't work yet because gym don't implemented this
[Box(low=0, high=10, shape=[1]), Box(low=0, high=10, shape=[1])],
[Box(low=0, high=10, shape=[2, 2]), Box(low=0, high=10, shape=[2, 2])],
[Box(low=0, high=10, shape=[2, 2, 2]), Box(low=0, high=10, shape=[2, 2, 2])],
[Tuple([Discrete(1), MultiDiscrete([1, 1])]), Tuple([Discrete(1), MultiDiscrete([1, 1])])],
[Dict({"first": Discrete(1), "second": MultiDiscrete([1, 1])}),
Dict({"first": Discrete(1), "second": MultiDiscrete([1, 1])})],
]
list_fail = [
[None, None],
["dedrfe", "qdzq"],
[1215.4154, 157.48],
["zdzd", (Discrete(1))],
[Discrete(1), "zdzd"],
["zdzd", (1, 4, 7)],
[(1, 4, 7), "zdzd"],
[152, 485]
]
def test_init(self):
for o, a in self.list_work:
with pytest.raises(TypeError):
self.agent(o, a, "cpu")
for o, a in self.list_fail:
with pytest.raises(TypeError):
self.agent(o, a, "cpu")
def test_device(self):
for o, a in self.list_work:
device = torch.device("cpu")
assert device == self.agent(o, a, device).device
device = None
assert torch.device("cpu") == self.agent(o, a, device).device
for device in ["dzeqdzqd", 1512, object(), 151.515]:
with pytest.raises(TypeError):
self.agent(o, a, device)
if torch.cuda.is_available():
self.agent(o, a, torch.device("cuda"))
def test__str__(self):
pass
| 30.68932 | 105 | 0.548561 | import pytest
import torch
from gym.spaces import Discrete, MultiDiscrete, MultiBinary, Dict, Tuple, Box
from blobrl.agents import AgentInterface
class MOCKAgentInterface(AgentInterface):
def __init__(self, observation_space, action_space, device):
super().__init__(observation_space, action_space, device)
def get_action(self, observation):
pass
def enable_exploration(self):
pass
def disable_exploration(self):
pass
def learn(self, observation, action, reward, next_observation, done) -> None:
pass
def episode_finished(self) -> None:
pass
def save(self, file_name, dire_name="."):
pass
@classmethod
def load(cls, file_name, dire_name=".", device=None):
pass
def __str__(self):
return ""
class TestAgentInterface:
__test__ = True
agent = MOCKAgentInterface
list_work = [
[Discrete(3), Discrete(1)],
[Discrete(3), Discrete(3)],
[Discrete(10), Discrete(50)],
[MultiDiscrete([3]), MultiDiscrete([1])],
[MultiDiscrete([3, 3]), MultiDiscrete([3, 3])],
[MultiDiscrete([4, 4, 4]), MultiDiscrete([50, 4, 4])],
[MultiDiscrete([[100, 3], [3, 5]]), MultiDiscrete([[100, 3], [3, 5]])],
[MultiDiscrete([[[100, 3], [3, 5]], [[100, 3], [3, 5]]]),
MultiDiscrete([[[100, 3], [3, 5]], [[100, 3], [3, 5]]])],
[MultiBinary(1), MultiBinary(1)],
[MultiBinary(3), MultiBinary(3)],
, high=10, shape=[1])],
[Box(low=0, high=10, shape=[2, 2]), Box(low=0, high=10, shape=[2, 2])],
[Box(low=0, high=10, shape=[2, 2, 2]), Box(low=0, high=10, shape=[2, 2, 2])],
[Tuple([Discrete(1), MultiDiscrete([1, 1])]), Tuple([Discrete(1), MultiDiscrete([1, 1])])],
[Dict({"first": Discrete(1), "second": MultiDiscrete([1, 1])}),
Dict({"first": Discrete(1), "second": MultiDiscrete([1, 1])})],
]
list_fail = [
[None, None],
["dedrfe", "qdzq"],
[1215.4154, 157.48],
["zdzd", (Discrete(1))],
[Discrete(1), "zdzd"],
["zdzd", (1, 4, 7)],
[(1, 4, 7), "zdzd"],
[152, 485]
]
def test_init(self):
for o, a in self.list_work:
with pytest.raises(TypeError):
self.agent(o, a, "cpu")
for o, a in self.list_fail:
with pytest.raises(TypeError):
self.agent(o, a, "cpu")
def test_device(self):
for o, a in self.list_work:
device = torch.device("cpu")
assert device == self.agent(o, a, device).device
device = None
assert torch.device("cpu") == self.agent(o, a, device).device
for device in ["dzeqdzqd", 1512, object(), 151.515]:
with pytest.raises(TypeError):
self.agent(o, a, device)
if torch.cuda.is_available():
self.agent(o, a, torch.device("cuda"))
def test__str__(self):
pass
| true | true |
f734fc94c2982f19f46721f2e95b37368f72bc2d | 385 | py | Python | apps/wsgi.py | reimibeta/django-store-item-models | 0be5fad0df0b3ebc7283fc6369f0e769a4743987 | [
"Apache-2.0"
] | null | null | null | apps/wsgi.py | reimibeta/django-store-item-models | 0be5fad0df0b3ebc7283fc6369f0e769a4743987 | [
"Apache-2.0"
] | 35 | 2020-10-24T22:14:41.000Z | 2022-03-07T10:20:25.000Z | apps/wsgi.py | reimibeta/django-store-item-models | 0be5fad0df0b3ebc7283fc6369f0e769a4743987 | [
"Apache-2.0"
] | null | null | null | """
WSGI config for apps project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'apps.settings')
application = get_wsgi_application()
| 22.647059 | 78 | 0.781818 |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'apps.settings')
application = get_wsgi_application()
| true | true |
f734fd0b04be2065e704ed64eb67bd4f6f83c3c6 | 1,745 | py | Python | oleander/google.py | honzajavorek/oleander | d4a0dc4c2da1cf2394a5a1b206e5398ad6900bee | [
"ISC"
] | null | null | null | oleander/google.py | honzajavorek/oleander | d4a0dc4c2da1cf2394a5a1b206e5398ad6900bee | [
"ISC"
] | null | null | null | oleander/google.py | honzajavorek/oleander | d4a0dc4c2da1cf2394a5a1b206e5398ad6900bee | [
"ISC"
] | null | null | null | # -*- coding: utf-8 -*-
from flask import url_for, request, session
from oleander import app
from gdata.client import Unauthorized as UnauthorizedError
from gdata.gauth import OAuth2Token, Error as ConnectionError, token_to_blob, token_from_blob
from gdata.contacts.client import ContactsClient
from gdata.calendar.client import CalendarClient
# https://developers.google.com/gdata/faq#AuthScopes
# http://googleappsdeveloper.blogspot.com/2011/09/python-oauth-20-google-data-apis.html
# http://stackoverflow.com/questions/10188768/google-contacts-import-using-oauth2-0
# http://stackoverflow.com/questions/4263888/how-to-detect-if-an-email-is-a-google-account
def create_oauth_handler(scope=''):
oauth2_handler = OAuth2Token(
client_id=app.config['GOOGLE_APP_ID'],
client_secret=app.config['GOOGLE_APP_SECRET'],
scope=scope,
user_agent=''
)
web_hook_url = url_for(
'google_connected',
_external=True
)
oauth2_handler.generate_authorize_url(
redirect_uri=web_hook_url
)
return oauth2_handler
def create_authorize_url(action_url, error_url, scope=''):
oauth2_handler = create_oauth_handler(scope)
session['action_url'] = action_url
session['error_url'] = error_url
web_hook_url = url_for(
'google_connected',
_external=True
)
return oauth2_handler.generate_authorize_url(
redirect_uri=web_hook_url
)
def create_api(cls):
credentials = session.get('google_credentials', None)
if not credentials:
raise ConnectionError('No credentials.')
credentials = token_from_blob(credentials)
client = cls(source='') # source - user agent
credentials.authorize(client)
return client
| 31.160714 | 93 | 0.731232 |
from flask import url_for, request, session
from oleander import app
from gdata.client import Unauthorized as UnauthorizedError
from gdata.gauth import OAuth2Token, Error as ConnectionError, token_to_blob, token_from_blob
from gdata.contacts.client import ContactsClient
from gdata.calendar.client import CalendarClient
reate_oauth_handler(scope=''):
oauth2_handler = OAuth2Token(
client_id=app.config['GOOGLE_APP_ID'],
client_secret=app.config['GOOGLE_APP_SECRET'],
scope=scope,
user_agent=''
)
web_hook_url = url_for(
'google_connected',
_external=True
)
oauth2_handler.generate_authorize_url(
redirect_uri=web_hook_url
)
return oauth2_handler
def create_authorize_url(action_url, error_url, scope=''):
oauth2_handler = create_oauth_handler(scope)
session['action_url'] = action_url
session['error_url'] = error_url
web_hook_url = url_for(
'google_connected',
_external=True
)
return oauth2_handler.generate_authorize_url(
redirect_uri=web_hook_url
)
def create_api(cls):
credentials = session.get('google_credentials', None)
if not credentials:
raise ConnectionError('No credentials.')
credentials = token_from_blob(credentials)
client = cls(source='')
credentials.authorize(client)
return client
| true | true |
f734fd169d9fe2785174d14db380bf2f369d6039 | 25,499 | py | Python | tests/schemas/test_openapi.py | mrtaalebi/django-rest-framework | d22daf4e05bc670f4ff96d97da5d2a9cf83df6c1 | [
"BSD-3-Clause"
] | 4 | 2019-02-11T13:01:34.000Z | 2020-10-22T08:39:57.000Z | tests/schemas/test_openapi.py | mrtaalebi/django-rest-framework | d22daf4e05bc670f4ff96d97da5d2a9cf83df6c1 | [
"BSD-3-Clause"
] | null | null | null | tests/schemas/test_openapi.py | mrtaalebi/django-rest-framework | d22daf4e05bc670f4ff96d97da5d2a9cf83df6c1 | [
"BSD-3-Clause"
] | 2 | 2020-04-24T01:54:08.000Z | 2020-06-05T18:37:03.000Z | import pytest
from django.conf.urls import url
from django.test import RequestFactory, TestCase, override_settings
from django.utils.translation import gettext_lazy as _
from rest_framework import filters, generics, pagination, routers, serializers
from rest_framework.compat import uritemplate
from rest_framework.parsers import JSONParser, MultiPartParser
from rest_framework.renderers import JSONRenderer
from rest_framework.request import Request
from rest_framework.schemas.openapi import AutoSchema, SchemaGenerator
from . import views
def create_request(path):
factory = RequestFactory()
request = Request(factory.get(path))
return request
def create_view(view_cls, method, request):
generator = SchemaGenerator()
view = generator.create_view(view_cls.as_view(), method, request)
return view
class TestBasics(TestCase):
def dummy_view(request):
pass
def test_filters(self):
classes = [filters.SearchFilter, filters.OrderingFilter]
for c in classes:
f = c()
assert f.get_schema_operation_parameters(self.dummy_view)
def test_pagination(self):
classes = [pagination.PageNumberPagination, pagination.LimitOffsetPagination, pagination.CursorPagination]
for c in classes:
f = c()
assert f.get_schema_operation_parameters(self.dummy_view)
class TestFieldMapping(TestCase):
def test_list_field_mapping(self):
inspector = AutoSchema()
cases = [
(serializers.ListField(), {'items': {}, 'type': 'array'}),
(serializers.ListField(child=serializers.BooleanField()), {'items': {'type': 'boolean'}, 'type': 'array'}),
(serializers.ListField(child=serializers.FloatField()), {'items': {'type': 'number'}, 'type': 'array'}),
(serializers.ListField(child=serializers.CharField()), {'items': {'type': 'string'}, 'type': 'array'}),
(serializers.ListField(child=serializers.IntegerField(max_value=4294967295)),
{'items': {'type': 'integer', 'maximum': 4294967295, 'format': 'int64'}, 'type': 'array'}),
(serializers.ListField(child=serializers.ChoiceField(choices=[('a', 'Choice A'), ('b', 'Choice B')])),
{'items': {'enum': ['a', 'b']}, 'type': 'array'}),
(serializers.IntegerField(min_value=2147483648),
{'type': 'integer', 'minimum': 2147483648, 'format': 'int64'}),
]
for field, mapping in cases:
with self.subTest(field=field):
assert inspector._map_field(field) == mapping
def test_lazy_string_field(self):
class Serializer(serializers.Serializer):
text = serializers.CharField(help_text=_('lazy string'))
inspector = AutoSchema()
data = inspector._map_serializer(Serializer())
assert isinstance(data['properties']['text']['description'], str), "description must be str"
@pytest.mark.skipif(uritemplate is None, reason='uritemplate not installed.')
class TestOperationIntrospection(TestCase):
def test_path_without_parameters(self):
path = '/example/'
method = 'GET'
view = create_view(
views.DocStringExampleListView,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
operation = inspector.get_operation(path, method)
assert operation == {
'operationId': 'listDocStringExamples',
'description': 'A description of my GET operation.',
'parameters': [],
'responses': {
'200': {
'description': '',
'content': {
'application/json': {
'schema': {
'type': 'array',
'items': {},
},
},
},
},
},
}
def test_path_with_id_parameter(self):
path = '/example/{id}/'
method = 'GET'
view = create_view(
views.DocStringExampleDetailView,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
operation = inspector.get_operation(path, method)
assert operation == {
'operationId': 'RetrieveDocStringExampleDetail',
'description': 'A description of my GET operation.',
'parameters': [{
'description': '',
'in': 'path',
'name': 'id',
'required': True,
'schema': {
'type': 'string',
},
}],
'responses': {
'200': {
'description': '',
'content': {
'application/json': {
'schema': {
},
},
},
},
},
}
def test_request_body(self):
path = '/'
method = 'POST'
class Serializer(serializers.Serializer):
text = serializers.CharField()
read_only = serializers.CharField(read_only=True)
class View(generics.GenericAPIView):
serializer_class = Serializer
view = create_view(
View,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
request_body = inspector._get_request_body(path, method)
assert request_body['content']['application/json']['schema']['required'] == ['text']
assert list(request_body['content']['application/json']['schema']['properties'].keys()) == ['text']
def test_empty_required(self):
path = '/'
method = 'POST'
class Serializer(serializers.Serializer):
read_only = serializers.CharField(read_only=True)
write_only = serializers.CharField(write_only=True, required=False)
class View(generics.GenericAPIView):
serializer_class = Serializer
view = create_view(
View,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
request_body = inspector._get_request_body(path, method)
# there should be no empty 'required' property, see #6834
assert 'required' not in request_body['content']['application/json']['schema']
for response in inspector._get_responses(path, method).values():
assert 'required' not in response['content']['application/json']['schema']
def test_empty_required_with_patch_method(self):
path = '/'
method = 'PATCH'
class Serializer(serializers.Serializer):
read_only = serializers.CharField(read_only=True)
write_only = serializers.CharField(write_only=True, required=False)
class View(generics.GenericAPIView):
serializer_class = Serializer
view = create_view(
View,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
request_body = inspector._get_request_body(path, method)
# there should be no empty 'required' property, see #6834
assert 'required' not in request_body['content']['application/json']['schema']
for response in inspector._get_responses(path, method).values():
assert 'required' not in response['content']['application/json']['schema']
def test_response_body_generation(self):
path = '/'
method = 'POST'
class Serializer(serializers.Serializer):
text = serializers.CharField()
write_only = serializers.CharField(write_only=True)
class View(generics.GenericAPIView):
serializer_class = Serializer
view = create_view(
View,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
assert responses['200']['content']['application/json']['schema']['required'] == ['text']
assert list(responses['200']['content']['application/json']['schema']['properties'].keys()) == ['text']
assert 'description' in responses['200']
def test_response_body_nested_serializer(self):
path = '/'
method = 'POST'
class NestedSerializer(serializers.Serializer):
number = serializers.IntegerField()
class Serializer(serializers.Serializer):
text = serializers.CharField()
nested = NestedSerializer()
class View(generics.GenericAPIView):
serializer_class = Serializer
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
schema = responses['200']['content']['application/json']['schema']
assert sorted(schema['required']) == ['nested', 'text']
assert sorted(list(schema['properties'].keys())) == ['nested', 'text']
assert schema['properties']['nested']['type'] == 'object'
assert list(schema['properties']['nested']['properties'].keys()) == ['number']
assert schema['properties']['nested']['required'] == ['number']
def test_list_response_body_generation(self):
"""Test that an array schema is returned for list views."""
path = '/'
method = 'GET'
class ItemSerializer(serializers.Serializer):
text = serializers.CharField()
class View(generics.GenericAPIView):
serializer_class = ItemSerializer
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
assert responses == {
'200': {
'description': '',
'content': {
'application/json': {
'schema': {
'type': 'array',
'items': {
'properties': {
'text': {
'type': 'string',
},
},
'required': ['text'],
},
},
},
},
},
}
def test_paginated_list_response_body_generation(self):
"""Test that pagination properties are added for a paginated list view."""
path = '/'
method = 'GET'
class Pagination(pagination.BasePagination):
def get_paginated_response_schema(self, schema):
return {
'type': 'object',
'item': schema,
}
class ItemSerializer(serializers.Serializer):
text = serializers.CharField()
class View(generics.GenericAPIView):
serializer_class = ItemSerializer
pagination_class = Pagination
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
assert responses == {
'200': {
'description': '',
'content': {
'application/json': {
'schema': {
'type': 'object',
'item': {
'type': 'array',
'items': {
'properties': {
'text': {
'type': 'string',
},
},
'required': ['text'],
},
},
},
},
},
},
}
def test_delete_response_body_generation(self):
"""Test that a view's delete method generates a proper response body schema."""
path = '/{id}/'
method = 'DELETE'
class View(generics.DestroyAPIView):
serializer_class = views.ExampleSerializer
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
assert responses == {
'204': {
'description': '',
},
}
def test_parser_mapping(self):
"""Test that view's parsers are mapped to OA media types"""
path = '/{id}/'
method = 'POST'
class View(generics.CreateAPIView):
serializer_class = views.ExampleSerializer
parser_classes = [JSONParser, MultiPartParser]
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
request_body = inspector._get_request_body(path, method)
assert len(request_body['content'].keys()) == 2
assert 'multipart/form-data' in request_body['content']
assert 'application/json' in request_body['content']
def test_renderer_mapping(self):
"""Test that view's renderers are mapped to OA media types"""
path = '/{id}/'
method = 'GET'
class View(generics.CreateAPIView):
serializer_class = views.ExampleSerializer
renderer_classes = [JSONRenderer]
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
# TODO this should be changed once the multiple response
# schema support is there
success_response = responses['200']
assert len(success_response['content'].keys()) == 1
assert 'application/json' in success_response['content']
def test_serializer_filefield(self):
path = '/{id}/'
method = 'POST'
class ItemSerializer(serializers.Serializer):
attachment = serializers.FileField()
class View(generics.CreateAPIView):
serializer_class = ItemSerializer
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
request_body = inspector._get_request_body(path, method)
mp_media = request_body['content']['multipart/form-data']
attachment = mp_media['schema']['properties']['attachment']
assert attachment['format'] == 'binary'
def test_retrieve_response_body_generation(self):
"""
Test that a list of properties is returned for retrieve item views.
Pagination properties should not be added as the view represents a single item.
"""
path = '/{id}/'
method = 'GET'
class Pagination(pagination.BasePagination):
def get_paginated_response_schema(self, schema):
return {
'type': 'object',
'item': schema,
}
class ItemSerializer(serializers.Serializer):
text = serializers.CharField()
class View(generics.GenericAPIView):
serializer_class = ItemSerializer
pagination_class = Pagination
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
assert responses == {
'200': {
'description': '',
'content': {
'application/json': {
'schema': {
'properties': {
'text': {
'type': 'string',
},
},
'required': ['text'],
},
},
},
},
}
def test_operation_id_generation(self):
path = '/'
method = 'GET'
view = create_view(
views.ExampleGenericAPIView,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
operationId = inspector._get_operation_id(path, method)
assert operationId == 'listExamples'
def test_repeat_operation_ids(self):
router = routers.SimpleRouter()
router.register('account', views.ExampleGenericViewSet, basename="account")
urlpatterns = router.urls
generator = SchemaGenerator(patterns=urlpatterns)
request = create_request('/')
schema = generator.get_schema(request=request)
schema_str = str(schema)
print(schema_str)
assert schema_str.count("operationId") == 2
assert schema_str.count("newExample") == 1
assert schema_str.count("oldExample") == 1
def test_serializer_datefield(self):
path = '/'
method = 'GET'
view = create_view(
views.ExampleGenericAPIView,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
response_schema = responses['200']['content']['application/json']['schema']
properties = response_schema['items']['properties']
assert properties['date']['type'] == properties['datetime']['type'] == 'string'
assert properties['date']['format'] == 'date'
assert properties['datetime']['format'] == 'date-time'
def test_serializer_hstorefield(self):
path = '/'
method = 'GET'
view = create_view(
views.ExampleGenericAPIView,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
response_schema = responses['200']['content']['application/json']['schema']
properties = response_schema['items']['properties']
assert properties['hstore']['type'] == 'object'
def test_serializer_callable_default(self):
path = '/'
method = 'GET'
view = create_view(
views.ExampleGenericAPIView,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
response_schema = responses['200']['content']['application/json']['schema']
properties = response_schema['items']['properties']
assert 'default' not in properties['uuid_field']
def test_serializer_validators(self):
path = '/'
method = 'GET'
view = create_view(
views.ExampleValidatedAPIView,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
response_schema = responses['200']['content']['application/json']['schema']
properties = response_schema['items']['properties']
assert properties['integer']['type'] == 'integer'
assert properties['integer']['maximum'] == 99
assert properties['integer']['minimum'] == -11
assert properties['string']['minLength'] == 2
assert properties['string']['maxLength'] == 10
assert properties['lst']['minItems'] == 2
assert properties['lst']['maxItems'] == 10
assert properties['regex']['pattern'] == r'[ABC]12{3}'
assert properties['regex']['description'] == 'must have an A, B, or C followed by 1222'
assert properties['decimal1']['type'] == 'number'
assert properties['decimal1']['multipleOf'] == .01
assert properties['decimal1']['maximum'] == 10000
assert properties['decimal1']['minimum'] == -10000
assert properties['decimal2']['type'] == 'number'
assert properties['decimal2']['multipleOf'] == .0001
assert properties['email']['type'] == 'string'
assert properties['email']['format'] == 'email'
assert properties['email']['default'] == 'foo@bar.com'
assert properties['url']['type'] == 'string'
assert properties['url']['nullable'] is True
assert properties['url']['default'] == 'http://www.example.com'
assert properties['uuid']['type'] == 'string'
assert properties['uuid']['format'] == 'uuid'
assert properties['ip4']['type'] == 'string'
assert properties['ip4']['format'] == 'ipv4'
assert properties['ip6']['type'] == 'string'
assert properties['ip6']['format'] == 'ipv6'
assert properties['ip']['type'] == 'string'
assert 'format' not in properties['ip']
@pytest.mark.skipif(uritemplate is None, reason='uritemplate not installed.')
@override_settings(REST_FRAMEWORK={'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.openapi.AutoSchema'})
class TestGenerator(TestCase):
def test_override_settings(self):
assert isinstance(views.ExampleListView.schema, AutoSchema)
def test_paths_construction(self):
"""Construction of the `paths` key."""
patterns = [
url(r'^example/?$', views.ExampleListView.as_view()),
]
generator = SchemaGenerator(patterns=patterns)
generator._initialise_endpoints()
paths = generator.get_schema()["paths"]
assert '/example/' in paths
example_operations = paths['/example/']
assert len(example_operations) == 2
assert 'get' in example_operations
assert 'post' in example_operations
def test_prefixed_paths_construction(self):
"""Construction of the `paths` key maintains a common prefix."""
patterns = [
url(r'^v1/example/?$', views.ExampleListView.as_view()),
url(r'^v1/example/{pk}/?$', views.ExampleDetailView.as_view()),
]
generator = SchemaGenerator(patterns=patterns)
generator._initialise_endpoints()
paths = generator.get_schema()["paths"]
assert '/v1/example/' in paths
assert '/v1/example/{id}/' in paths
def test_mount_url_prefixed_to_paths(self):
patterns = [
url(r'^example/?$', views.ExampleListView.as_view()),
url(r'^example/{pk}/?$', views.ExampleDetailView.as_view()),
]
generator = SchemaGenerator(patterns=patterns, url='/api')
generator._initialise_endpoints()
paths = generator.get_schema()["paths"]
assert '/api/example/' in paths
assert '/api/example/{id}/' in paths
def test_schema_construction(self):
"""Construction of the top level dictionary."""
patterns = [
url(r'^example/?$', views.ExampleListView.as_view()),
]
generator = SchemaGenerator(patterns=patterns)
request = create_request('/')
schema = generator.get_schema(request=request)
assert 'openapi' in schema
assert 'paths' in schema
def test_schema_with_no_paths(self):
patterns = []
generator = SchemaGenerator(patterns=patterns)
request = create_request('/')
schema = generator.get_schema(request=request)
assert schema['paths'] == {}
def test_schema_information(self):
"""Construction of the top level dictionary."""
patterns = [
url(r'^example/?$', views.ExampleListView.as_view()),
]
generator = SchemaGenerator(patterns=patterns, title='My title', version='1.2.3', description='My description')
request = create_request('/')
schema = generator.get_schema(request=request)
assert schema['info']['title'] == 'My title'
assert schema['info']['version'] == '1.2.3'
assert schema['info']['description'] == 'My description'
def test_schema_information_empty(self):
"""Construction of the top level dictionary."""
patterns = [
url(r'^example/?$', views.ExampleListView.as_view()),
]
generator = SchemaGenerator(patterns=patterns)
request = create_request('/')
schema = generator.get_schema(request=request)
assert schema['info']['title'] == ''
assert schema['info']['version'] == ''
| 34.135207 | 119 | 0.553512 | import pytest
from django.conf.urls import url
from django.test import RequestFactory, TestCase, override_settings
from django.utils.translation import gettext_lazy as _
from rest_framework import filters, generics, pagination, routers, serializers
from rest_framework.compat import uritemplate
from rest_framework.parsers import JSONParser, MultiPartParser
from rest_framework.renderers import JSONRenderer
from rest_framework.request import Request
from rest_framework.schemas.openapi import AutoSchema, SchemaGenerator
from . import views
def create_request(path):
factory = RequestFactory()
request = Request(factory.get(path))
return request
def create_view(view_cls, method, request):
generator = SchemaGenerator()
view = generator.create_view(view_cls.as_view(), method, request)
return view
class TestBasics(TestCase):
def dummy_view(request):
pass
def test_filters(self):
classes = [filters.SearchFilter, filters.OrderingFilter]
for c in classes:
f = c()
assert f.get_schema_operation_parameters(self.dummy_view)
def test_pagination(self):
classes = [pagination.PageNumberPagination, pagination.LimitOffsetPagination, pagination.CursorPagination]
for c in classes:
f = c()
assert f.get_schema_operation_parameters(self.dummy_view)
class TestFieldMapping(TestCase):
def test_list_field_mapping(self):
inspector = AutoSchema()
cases = [
(serializers.ListField(), {'items': {}, 'type': 'array'}),
(serializers.ListField(child=serializers.BooleanField()), {'items': {'type': 'boolean'}, 'type': 'array'}),
(serializers.ListField(child=serializers.FloatField()), {'items': {'type': 'number'}, 'type': 'array'}),
(serializers.ListField(child=serializers.CharField()), {'items': {'type': 'string'}, 'type': 'array'}),
(serializers.ListField(child=serializers.IntegerField(max_value=4294967295)),
{'items': {'type': 'integer', 'maximum': 4294967295, 'format': 'int64'}, 'type': 'array'}),
(serializers.ListField(child=serializers.ChoiceField(choices=[('a', 'Choice A'), ('b', 'Choice B')])),
{'items': {'enum': ['a', 'b']}, 'type': 'array'}),
(serializers.IntegerField(min_value=2147483648),
{'type': 'integer', 'minimum': 2147483648, 'format': 'int64'}),
]
for field, mapping in cases:
with self.subTest(field=field):
assert inspector._map_field(field) == mapping
def test_lazy_string_field(self):
class Serializer(serializers.Serializer):
text = serializers.CharField(help_text=_('lazy string'))
inspector = AutoSchema()
data = inspector._map_serializer(Serializer())
assert isinstance(data['properties']['text']['description'], str), "description must be str"
@pytest.mark.skipif(uritemplate is None, reason='uritemplate not installed.')
class TestOperationIntrospection(TestCase):
def test_path_without_parameters(self):
path = '/example/'
method = 'GET'
view = create_view(
views.DocStringExampleListView,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
operation = inspector.get_operation(path, method)
assert operation == {
'operationId': 'listDocStringExamples',
'description': 'A description of my GET operation.',
'parameters': [],
'responses': {
'200': {
'description': '',
'content': {
'application/json': {
'schema': {
'type': 'array',
'items': {},
},
},
},
},
},
}
def test_path_with_id_parameter(self):
path = '/example/{id}/'
method = 'GET'
view = create_view(
views.DocStringExampleDetailView,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
operation = inspector.get_operation(path, method)
assert operation == {
'operationId': 'RetrieveDocStringExampleDetail',
'description': 'A description of my GET operation.',
'parameters': [{
'description': '',
'in': 'path',
'name': 'id',
'required': True,
'schema': {
'type': 'string',
},
}],
'responses': {
'200': {
'description': '',
'content': {
'application/json': {
'schema': {
},
},
},
},
},
}
def test_request_body(self):
path = '/'
method = 'POST'
class Serializer(serializers.Serializer):
text = serializers.CharField()
read_only = serializers.CharField(read_only=True)
class View(generics.GenericAPIView):
serializer_class = Serializer
view = create_view(
View,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
request_body = inspector._get_request_body(path, method)
assert request_body['content']['application/json']['schema']['required'] == ['text']
assert list(request_body['content']['application/json']['schema']['properties'].keys()) == ['text']
def test_empty_required(self):
path = '/'
method = 'POST'
class Serializer(serializers.Serializer):
read_only = serializers.CharField(read_only=True)
write_only = serializers.CharField(write_only=True, required=False)
class View(generics.GenericAPIView):
serializer_class = Serializer
view = create_view(
View,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
request_body = inspector._get_request_body(path, method)
assert 'required' not in request_body['content']['application/json']['schema']
for response in inspector._get_responses(path, method).values():
assert 'required' not in response['content']['application/json']['schema']
def test_empty_required_with_patch_method(self):
path = '/'
method = 'PATCH'
class Serializer(serializers.Serializer):
read_only = serializers.CharField(read_only=True)
write_only = serializers.CharField(write_only=True, required=False)
class View(generics.GenericAPIView):
serializer_class = Serializer
view = create_view(
View,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
request_body = inspector._get_request_body(path, method)
assert 'required' not in request_body['content']['application/json']['schema']
for response in inspector._get_responses(path, method).values():
assert 'required' not in response['content']['application/json']['schema']
def test_response_body_generation(self):
path = '/'
method = 'POST'
class Serializer(serializers.Serializer):
text = serializers.CharField()
write_only = serializers.CharField(write_only=True)
class View(generics.GenericAPIView):
serializer_class = Serializer
view = create_view(
View,
method,
create_request(path)
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
assert responses['200']['content']['application/json']['schema']['required'] == ['text']
assert list(responses['200']['content']['application/json']['schema']['properties'].keys()) == ['text']
assert 'description' in responses['200']
def test_response_body_nested_serializer(self):
path = '/'
method = 'POST'
class NestedSerializer(serializers.Serializer):
number = serializers.IntegerField()
class Serializer(serializers.Serializer):
text = serializers.CharField()
nested = NestedSerializer()
class View(generics.GenericAPIView):
serializer_class = Serializer
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
schema = responses['200']['content']['application/json']['schema']
assert sorted(schema['required']) == ['nested', 'text']
assert sorted(list(schema['properties'].keys())) == ['nested', 'text']
assert schema['properties']['nested']['type'] == 'object'
assert list(schema['properties']['nested']['properties'].keys()) == ['number']
assert schema['properties']['nested']['required'] == ['number']
def test_list_response_body_generation(self):
path = '/'
method = 'GET'
class ItemSerializer(serializers.Serializer):
text = serializers.CharField()
class View(generics.GenericAPIView):
serializer_class = ItemSerializer
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
assert responses == {
'200': {
'description': '',
'content': {
'application/json': {
'schema': {
'type': 'array',
'items': {
'properties': {
'text': {
'type': 'string',
},
},
'required': ['text'],
},
},
},
},
},
}
def test_paginated_list_response_body_generation(self):
path = '/'
method = 'GET'
class Pagination(pagination.BasePagination):
def get_paginated_response_schema(self, schema):
return {
'type': 'object',
'item': schema,
}
class ItemSerializer(serializers.Serializer):
text = serializers.CharField()
class View(generics.GenericAPIView):
serializer_class = ItemSerializer
pagination_class = Pagination
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
assert responses == {
'200': {
'description': '',
'content': {
'application/json': {
'schema': {
'type': 'object',
'item': {
'type': 'array',
'items': {
'properties': {
'text': {
'type': 'string',
},
},
'required': ['text'],
},
},
},
},
},
},
}
def test_delete_response_body_generation(self):
path = '/{id}/'
method = 'DELETE'
class View(generics.DestroyAPIView):
serializer_class = views.ExampleSerializer
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
assert responses == {
'204': {
'description': '',
},
}
def test_parser_mapping(self):
path = '/{id}/'
method = 'POST'
class View(generics.CreateAPIView):
serializer_class = views.ExampleSerializer
parser_classes = [JSONParser, MultiPartParser]
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
request_body = inspector._get_request_body(path, method)
assert len(request_body['content'].keys()) == 2
assert 'multipart/form-data' in request_body['content']
assert 'application/json' in request_body['content']
def test_renderer_mapping(self):
path = '/{id}/'
method = 'GET'
class View(generics.CreateAPIView):
serializer_class = views.ExampleSerializer
renderer_classes = [JSONRenderer]
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
success_response = responses['200']
assert len(success_response['content'].keys()) == 1
assert 'application/json' in success_response['content']
def test_serializer_filefield(self):
path = '/{id}/'
method = 'POST'
class ItemSerializer(serializers.Serializer):
attachment = serializers.FileField()
class View(generics.CreateAPIView):
serializer_class = ItemSerializer
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
request_body = inspector._get_request_body(path, method)
mp_media = request_body['content']['multipart/form-data']
attachment = mp_media['schema']['properties']['attachment']
assert attachment['format'] == 'binary'
def test_retrieve_response_body_generation(self):
path = '/{id}/'
method = 'GET'
class Pagination(pagination.BasePagination):
def get_paginated_response_schema(self, schema):
return {
'type': 'object',
'item': schema,
}
class ItemSerializer(serializers.Serializer):
text = serializers.CharField()
class View(generics.GenericAPIView):
serializer_class = ItemSerializer
pagination_class = Pagination
view = create_view(
View,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
assert responses == {
'200': {
'description': '',
'content': {
'application/json': {
'schema': {
'properties': {
'text': {
'type': 'string',
},
},
'required': ['text'],
},
},
},
},
}
def test_operation_id_generation(self):
path = '/'
method = 'GET'
view = create_view(
views.ExampleGenericAPIView,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
operationId = inspector._get_operation_id(path, method)
assert operationId == 'listExamples'
def test_repeat_operation_ids(self):
router = routers.SimpleRouter()
router.register('account', views.ExampleGenericViewSet, basename="account")
urlpatterns = router.urls
generator = SchemaGenerator(patterns=urlpatterns)
request = create_request('/')
schema = generator.get_schema(request=request)
schema_str = str(schema)
print(schema_str)
assert schema_str.count("operationId") == 2
assert schema_str.count("newExample") == 1
assert schema_str.count("oldExample") == 1
def test_serializer_datefield(self):
path = '/'
method = 'GET'
view = create_view(
views.ExampleGenericAPIView,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
response_schema = responses['200']['content']['application/json']['schema']
properties = response_schema['items']['properties']
assert properties['date']['type'] == properties['datetime']['type'] == 'string'
assert properties['date']['format'] == 'date'
assert properties['datetime']['format'] == 'date-time'
def test_serializer_hstorefield(self):
path = '/'
method = 'GET'
view = create_view(
views.ExampleGenericAPIView,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
response_schema = responses['200']['content']['application/json']['schema']
properties = response_schema['items']['properties']
assert properties['hstore']['type'] == 'object'
def test_serializer_callable_default(self):
path = '/'
method = 'GET'
view = create_view(
views.ExampleGenericAPIView,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
response_schema = responses['200']['content']['application/json']['schema']
properties = response_schema['items']['properties']
assert 'default' not in properties['uuid_field']
def test_serializer_validators(self):
path = '/'
method = 'GET'
view = create_view(
views.ExampleValidatedAPIView,
method,
create_request(path),
)
inspector = AutoSchema()
inspector.view = view
responses = inspector._get_responses(path, method)
response_schema = responses['200']['content']['application/json']['schema']
properties = response_schema['items']['properties']
assert properties['integer']['type'] == 'integer'
assert properties['integer']['maximum'] == 99
assert properties['integer']['minimum'] == -11
assert properties['string']['minLength'] == 2
assert properties['string']['maxLength'] == 10
assert properties['lst']['minItems'] == 2
assert properties['lst']['maxItems'] == 10
assert properties['regex']['pattern'] == r'[ABC]12{3}'
assert properties['regex']['description'] == 'must have an A, B, or C followed by 1222'
assert properties['decimal1']['type'] == 'number'
assert properties['decimal1']['multipleOf'] == .01
assert properties['decimal1']['maximum'] == 10000
assert properties['decimal1']['minimum'] == -10000
assert properties['decimal2']['type'] == 'number'
assert properties['decimal2']['multipleOf'] == .0001
assert properties['email']['type'] == 'string'
assert properties['email']['format'] == 'email'
assert properties['email']['default'] == 'foo@bar.com'
assert properties['url']['type'] == 'string'
assert properties['url']['nullable'] is True
assert properties['url']['default'] == 'http://www.example.com'
assert properties['uuid']['type'] == 'string'
assert properties['uuid']['format'] == 'uuid'
assert properties['ip4']['type'] == 'string'
assert properties['ip4']['format'] == 'ipv4'
assert properties['ip6']['type'] == 'string'
assert properties['ip6']['format'] == 'ipv6'
assert properties['ip']['type'] == 'string'
assert 'format' not in properties['ip']
@pytest.mark.skipif(uritemplate is None, reason='uritemplate not installed.')
@override_settings(REST_FRAMEWORK={'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.openapi.AutoSchema'})
class TestGenerator(TestCase):
def test_override_settings(self):
assert isinstance(views.ExampleListView.schema, AutoSchema)
def test_paths_construction(self):
patterns = [
url(r'^example/?$', views.ExampleListView.as_view()),
]
generator = SchemaGenerator(patterns=patterns)
generator._initialise_endpoints()
paths = generator.get_schema()["paths"]
assert '/example/' in paths
example_operations = paths['/example/']
assert len(example_operations) == 2
assert 'get' in example_operations
assert 'post' in example_operations
def test_prefixed_paths_construction(self):
patterns = [
url(r'^v1/example/?$', views.ExampleListView.as_view()),
url(r'^v1/example/{pk}/?$', views.ExampleDetailView.as_view()),
]
generator = SchemaGenerator(patterns=patterns)
generator._initialise_endpoints()
paths = generator.get_schema()["paths"]
assert '/v1/example/' in paths
assert '/v1/example/{id}/' in paths
def test_mount_url_prefixed_to_paths(self):
patterns = [
url(r'^example/?$', views.ExampleListView.as_view()),
url(r'^example/{pk}/?$', views.ExampleDetailView.as_view()),
]
generator = SchemaGenerator(patterns=patterns, url='/api')
generator._initialise_endpoints()
paths = generator.get_schema()["paths"]
assert '/api/example/' in paths
assert '/api/example/{id}/' in paths
def test_schema_construction(self):
patterns = [
url(r'^example/?$', views.ExampleListView.as_view()),
]
generator = SchemaGenerator(patterns=patterns)
request = create_request('/')
schema = generator.get_schema(request=request)
assert 'openapi' in schema
assert 'paths' in schema
def test_schema_with_no_paths(self):
patterns = []
generator = SchemaGenerator(patterns=patterns)
request = create_request('/')
schema = generator.get_schema(request=request)
assert schema['paths'] == {}
def test_schema_information(self):
patterns = [
url(r'^example/?$', views.ExampleListView.as_view()),
]
generator = SchemaGenerator(patterns=patterns, title='My title', version='1.2.3', description='My description')
request = create_request('/')
schema = generator.get_schema(request=request)
assert schema['info']['title'] == 'My title'
assert schema['info']['version'] == '1.2.3'
assert schema['info']['description'] == 'My description'
def test_schema_information_empty(self):
patterns = [
url(r'^example/?$', views.ExampleListView.as_view()),
]
generator = SchemaGenerator(patterns=patterns)
request = create_request('/')
schema = generator.get_schema(request=request)
assert schema['info']['title'] == ''
assert schema['info']['version'] == ''
| true | true |
f734fe34b1e61e45b4b8a3552db35388a603ad0c | 376 | py | Python | trackme/utils/messages/auth_messages.py | j4l13n/trackMe | aab64060dfed7147a4604b80fe861f990d95a161 | [
"MIT"
] | null | null | null | trackme/utils/messages/auth_messages.py | j4l13n/trackMe | aab64060dfed7147a4604b80fe861f990d95a161 | [
"MIT"
] | 7 | 2020-08-10T08:20:20.000Z | 2020-08-18T07:58:22.000Z | trackme/utils/messages/auth_messages.py | j4l13n/trackMe | aab64060dfed7147a4604b80fe861f990d95a161 | [
"MIT"
] | null | null | null | AUTH_SUCCESS_RESPONSES = {
"register_success": "You have successfully registered to TrackMe",
"login_success": "User logged in successfully"
}
AUTH_ERROR_RESPONSES = {
"register_error": "Something went wrong while creating an account: {}",
"invalid_credentials": "Incorrect email or password",
"not_active": "Your Email address has not been verified. "
}
| 34.181818 | 75 | 0.728723 | AUTH_SUCCESS_RESPONSES = {
"register_success": "You have successfully registered to TrackMe",
"login_success": "User logged in successfully"
}
AUTH_ERROR_RESPONSES = {
"register_error": "Something went wrong while creating an account: {}",
"invalid_credentials": "Incorrect email or password",
"not_active": "Your Email address has not been verified. "
}
| true | true |
f734fe7fd58f4a3a26a042d8b339ae8dbff991e2 | 12,657 | py | Python | pandas/tests/series/test_alter_axes.py | ivary43/pandas | 46adc5b1c2aacb312d72729af72bc0ad600917c0 | [
"BSD-3-Clause"
] | 1 | 2020-04-26T17:14:25.000Z | 2020-04-26T17:14:25.000Z | pandas/tests/series/test_alter_axes.py | ivary43/pandas | 46adc5b1c2aacb312d72729af72bc0ad600917c0 | [
"BSD-3-Clause"
] | null | null | null | pandas/tests/series/test_alter_axes.py | ivary43/pandas | 46adc5b1c2aacb312d72729af72bc0ad600917c0 | [
"BSD-3-Clause"
] | 1 | 2020-01-02T14:28:17.000Z | 2020-01-02T14:28:17.000Z | from datetime import datetime
import numpy as np
import pytest
from pandas import DataFrame, Index, MultiIndex, RangeIndex, Series
import pandas.util.testing as tm
class TestSeriesAlterAxes:
def test_setindex(self, string_series):
# wrong type
msg = (r"Index\(\.\.\.\) must be called with a collection of some"
r" kind, None was passed")
with pytest.raises(TypeError, match=msg):
string_series.index = None
# wrong length
msg = ("Length mismatch: Expected axis has 30 elements, new"
" values have 29 elements")
with pytest.raises(ValueError, match=msg):
string_series.index = np.arange(len(string_series) - 1)
# works
string_series.index = np.arange(len(string_series))
assert isinstance(string_series.index, Index)
# Renaming
def test_rename(self, datetime_series):
ts = datetime_series
renamer = lambda x: x.strftime('%Y%m%d')
renamed = ts.rename(renamer)
assert renamed.index[0] == renamer(ts.index[0])
# dict
rename_dict = dict(zip(ts.index, renamed.index))
renamed2 = ts.rename(rename_dict)
tm.assert_series_equal(renamed, renamed2)
# partial dict
s = Series(np.arange(4), index=['a', 'b', 'c', 'd'], dtype='int64')
renamed = s.rename({'b': 'foo', 'd': 'bar'})
tm.assert_index_equal(renamed.index, Index(['a', 'foo', 'c', 'bar']))
# index with name
renamer = Series(np.arange(4),
index=Index(['a', 'b', 'c', 'd'], name='name'),
dtype='int64')
renamed = renamer.rename({})
assert renamed.index.name == renamer.index.name
def test_rename_by_series(self):
s = Series(range(5), name='foo')
renamer = Series({1: 10, 2: 20})
result = s.rename(renamer)
expected = Series(range(5), index=[0, 10, 20, 3, 4], name='foo')
tm.assert_series_equal(result, expected)
def test_rename_set_name(self):
s = Series(range(4), index=list('abcd'))
for name in ['foo', 123, 123., datetime(2001, 11, 11), ('foo',)]:
result = s.rename(name)
assert result.name == name
tm.assert_numpy_array_equal(result.index.values, s.index.values)
assert s.name is None
def test_rename_set_name_inplace(self):
s = Series(range(3), index=list('abc'))
for name in ['foo', 123, 123., datetime(2001, 11, 11), ('foo',)]:
s.rename(name, inplace=True)
assert s.name == name
exp = np.array(['a', 'b', 'c'], dtype=np.object_)
tm.assert_numpy_array_equal(s.index.values, exp)
def test_rename_axis_supported(self):
# Supporting axis for compatibility, detailed in GH-18589
s = Series(range(5))
s.rename({}, axis=0)
s.rename({}, axis='index')
with pytest.raises(ValueError, match='No axis named 5'):
s.rename({}, axis=5)
def test_set_name_attribute(self):
s = Series([1, 2, 3])
s2 = Series([1, 2, 3], name='bar')
for name in [7, 7., 'name', datetime(2001, 1, 1), (1,), "\u05D0"]:
s.name = name
assert s.name == name
s2.name = name
assert s2.name == name
def test_set_name(self):
s = Series([1, 2, 3])
s2 = s._set_name('foo')
assert s2.name == 'foo'
assert s.name is None
assert s is not s2
def test_rename_inplace(self, datetime_series):
renamer = lambda x: x.strftime('%Y%m%d')
expected = renamer(datetime_series.index[0])
datetime_series.rename(renamer, inplace=True)
assert datetime_series.index[0] == expected
def test_set_index_makes_timeseries(self):
idx = tm.makeDateIndex(10)
s = Series(range(10))
s.index = idx
assert s.index.is_all_dates
def test_reset_index(self):
df = tm.makeDataFrame()[:5]
ser = df.stack()
ser.index.names = ['hash', 'category']
ser.name = 'value'
df = ser.reset_index()
assert 'value' in df
df = ser.reset_index(name='value2')
assert 'value2' in df
# check inplace
s = ser.reset_index(drop=True)
s2 = ser
s2.reset_index(drop=True, inplace=True)
tm.assert_series_equal(s, s2)
# level
index = MultiIndex(levels=[['bar'], ['one', 'two', 'three'], [0, 1]],
codes=[[0, 0, 0, 0, 0, 0], [0, 1, 2, 0, 1, 2],
[0, 1, 0, 1, 0, 1]])
s = Series(np.random.randn(6), index=index)
rs = s.reset_index(level=1)
assert len(rs.columns) == 2
rs = s.reset_index(level=[0, 2], drop=True)
tm.assert_index_equal(rs.index, Index(index.get_level_values(1)))
assert isinstance(rs, Series)
def test_reset_index_name(self):
s = Series([1, 2, 3], index=Index(range(3), name='x'))
assert s.reset_index().index.name is None
assert s.reset_index(drop=True).index.name is None
def test_reset_index_level(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]],
columns=['A', 'B', 'C'])
for levels in ['A', 'B'], [0, 1]:
# With MultiIndex
s = df.set_index(['A', 'B'])['C']
result = s.reset_index(level=levels[0])
tm.assert_frame_equal(result, df.set_index('B'))
result = s.reset_index(level=levels[:1])
tm.assert_frame_equal(result, df.set_index('B'))
result = s.reset_index(level=levels)
tm.assert_frame_equal(result, df)
result = df.set_index(['A', 'B']).reset_index(level=levels,
drop=True)
tm.assert_frame_equal(result, df[['C']])
with pytest.raises(KeyError, match='Level E '):
s.reset_index(level=['A', 'E'])
# With single-level Index
s = df.set_index('A')['B']
result = s.reset_index(level=levels[0])
tm.assert_frame_equal(result, df[['A', 'B']])
result = s.reset_index(level=levels[:1])
tm.assert_frame_equal(result, df[['A', 'B']])
result = s.reset_index(level=levels[0], drop=True)
tm.assert_series_equal(result, df['B'])
with pytest.raises(IndexError, match='Too many levels'):
s.reset_index(level=[0, 1, 2])
# Check that .reset_index([],drop=True) doesn't fail
result = Series(range(4)).reset_index([], drop=True)
expected = Series(range(4))
tm.assert_series_equal(result, expected)
def test_reset_index_range(self):
# GH 12071
s = Series(range(2), name='A', dtype='int64')
series_result = s.reset_index()
assert isinstance(series_result.index, RangeIndex)
series_expected = DataFrame([[0, 0], [1, 1]],
columns=['index', 'A'],
index=RangeIndex(stop=2))
tm.assert_frame_equal(series_result, series_expected)
def test_reorder_levels(self):
index = MultiIndex(levels=[['bar'], ['one', 'two', 'three'], [0, 1]],
codes=[[0, 0, 0, 0, 0, 0], [0, 1, 2, 0, 1, 2],
[0, 1, 0, 1, 0, 1]],
names=['L0', 'L1', 'L2'])
s = Series(np.arange(6), index=index)
# no change, position
result = s.reorder_levels([0, 1, 2])
tm.assert_series_equal(s, result)
# no change, labels
result = s.reorder_levels(['L0', 'L1', 'L2'])
tm.assert_series_equal(s, result)
# rotate, position
result = s.reorder_levels([1, 2, 0])
e_idx = MultiIndex(levels=[['one', 'two', 'three'], [0, 1], ['bar']],
codes=[[0, 1, 2, 0, 1, 2], [0, 1, 0, 1, 0, 1],
[0, 0, 0, 0, 0, 0]],
names=['L1', 'L2', 'L0'])
expected = Series(np.arange(6), index=e_idx)
tm.assert_series_equal(result, expected)
def test_rename_axis_mapper(self):
# GH 19978
mi = MultiIndex.from_product([['a', 'b', 'c'], [1, 2]],
names=['ll', 'nn'])
s = Series([i for i in range(len(mi))], index=mi)
result = s.rename_axis(index={'ll': 'foo'})
assert result.index.names == ['foo', 'nn']
result = s.rename_axis(index=str.upper, axis=0)
assert result.index.names == ['LL', 'NN']
result = s.rename_axis(index=['foo', 'goo'])
assert result.index.names == ['foo', 'goo']
with pytest.raises(TypeError, match='unexpected'):
s.rename_axis(columns='wrong')
def test_rename_axis_inplace(self, datetime_series):
# GH 15704
expected = datetime_series.rename_axis('foo')
result = datetime_series
no_return = result.rename_axis('foo', inplace=True)
assert no_return is None
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize('kwargs', [{'mapper': None}, {'index': None}, {}])
def test_rename_axis_none(self, kwargs):
# GH 25034
index = Index(list('abc'), name='foo')
df = Series([1, 2, 3], index=index)
result = df.rename_axis(**kwargs)
expected_index = index.rename(None) if kwargs else index
expected = Series([1, 2, 3], index=expected_index)
tm.assert_series_equal(result, expected)
def test_set_axis_inplace_axes(self, axis_series):
# GH14636
ser = Series(np.arange(4), index=[1, 3, 5, 7], dtype='int64')
expected = ser.copy()
expected.index = list('abcd')
# inplace=True
# The FutureWarning comes from the fact that we would like to have
# inplace default to False some day
for inplace, warn in [(None, FutureWarning), (True, None)]:
result = ser.copy()
kwargs = {'inplace': inplace}
with tm.assert_produces_warning(warn):
result.set_axis(list('abcd'), axis=axis_series, **kwargs)
tm.assert_series_equal(result, expected)
def test_set_axis_inplace(self):
# GH14636
s = Series(np.arange(4), index=[1, 3, 5, 7], dtype='int64')
expected = s.copy()
expected.index = list('abcd')
# inplace=False
result = s.set_axis(list('abcd'), axis=0, inplace=False)
tm.assert_series_equal(expected, result)
# omitting the "axis" parameter
with tm.assert_produces_warning(None):
result = s.set_axis(list('abcd'), inplace=False)
tm.assert_series_equal(result, expected)
# wrong values for the "axis" parameter
for axis in [2, 'foo']:
with pytest.raises(ValueError, match='No axis named'):
s.set_axis(list('abcd'), axis=axis, inplace=False)
def test_set_axis_prior_to_deprecation_signature(self):
s = Series(np.arange(4), index=[1, 3, 5, 7], dtype='int64')
expected = s.copy()
expected.index = list('abcd')
for axis in [0, 'index']:
with tm.assert_produces_warning(FutureWarning):
result = s.set_axis(0, list('abcd'), inplace=False)
tm.assert_series_equal(result, expected)
def test_reset_index_drop_errors(self):
# GH 20925
# KeyError raised for series index when passed level name is missing
s = Series(range(4))
with pytest.raises(KeyError, match='must be same as name'):
s.reset_index('wrong', drop=True)
with pytest.raises(KeyError, match='must be same as name'):
s.reset_index('wrong')
# KeyError raised for series when level to be dropped is missing
s = Series(range(4), index=MultiIndex.from_product([[1, 2]] * 2))
with pytest.raises(KeyError, match='not found'):
s.reset_index('wrong', drop=True)
def test_droplevel(self):
# GH20342
ser = Series([1, 2, 3, 4])
ser.index = MultiIndex.from_arrays([(1, 2, 3, 4), (5, 6, 7, 8)],
names=['a', 'b'])
expected = ser.reset_index('b', drop=True)
result = ser.droplevel('b', axis='index')
tm.assert_series_equal(result, expected)
# test that droplevel raises ValueError on axis != 0
with pytest.raises(ValueError):
ser.droplevel(1, axis='columns')
| 36.900875 | 79 | 0.555187 | from datetime import datetime
import numpy as np
import pytest
from pandas import DataFrame, Index, MultiIndex, RangeIndex, Series
import pandas.util.testing as tm
class TestSeriesAlterAxes:
def test_setindex(self, string_series):
msg = (r"Index\(\.\.\.\) must be called with a collection of some"
r" kind, None was passed")
with pytest.raises(TypeError, match=msg):
string_series.index = None
msg = ("Length mismatch: Expected axis has 30 elements, new"
" values have 29 elements")
with pytest.raises(ValueError, match=msg):
string_series.index = np.arange(len(string_series) - 1)
string_series.index = np.arange(len(string_series))
assert isinstance(string_series.index, Index)
def test_rename(self, datetime_series):
ts = datetime_series
renamer = lambda x: x.strftime('%Y%m%d')
renamed = ts.rename(renamer)
assert renamed.index[0] == renamer(ts.index[0])
rename_dict = dict(zip(ts.index, renamed.index))
renamed2 = ts.rename(rename_dict)
tm.assert_series_equal(renamed, renamed2)
s = Series(np.arange(4), index=['a', 'b', 'c', 'd'], dtype='int64')
renamed = s.rename({'b': 'foo', 'd': 'bar'})
tm.assert_index_equal(renamed.index, Index(['a', 'foo', 'c', 'bar']))
renamer = Series(np.arange(4),
index=Index(['a', 'b', 'c', 'd'], name='name'),
dtype='int64')
renamed = renamer.rename({})
assert renamed.index.name == renamer.index.name
def test_rename_by_series(self):
s = Series(range(5), name='foo')
renamer = Series({1: 10, 2: 20})
result = s.rename(renamer)
expected = Series(range(5), index=[0, 10, 20, 3, 4], name='foo')
tm.assert_series_equal(result, expected)
def test_rename_set_name(self):
s = Series(range(4), index=list('abcd'))
for name in ['foo', 123, 123., datetime(2001, 11, 11), ('foo',)]:
result = s.rename(name)
assert result.name == name
tm.assert_numpy_array_equal(result.index.values, s.index.values)
assert s.name is None
def test_rename_set_name_inplace(self):
s = Series(range(3), index=list('abc'))
for name in ['foo', 123, 123., datetime(2001, 11, 11), ('foo',)]:
s.rename(name, inplace=True)
assert s.name == name
exp = np.array(['a', 'b', 'c'], dtype=np.object_)
tm.assert_numpy_array_equal(s.index.values, exp)
def test_rename_axis_supported(self):
s = Series(range(5))
s.rename({}, axis=0)
s.rename({}, axis='index')
with pytest.raises(ValueError, match='No axis named 5'):
s.rename({}, axis=5)
def test_set_name_attribute(self):
s = Series([1, 2, 3])
s2 = Series([1, 2, 3], name='bar')
for name in [7, 7., 'name', datetime(2001, 1, 1), (1,), "\u05D0"]:
s.name = name
assert s.name == name
s2.name = name
assert s2.name == name
def test_set_name(self):
s = Series([1, 2, 3])
s2 = s._set_name('foo')
assert s2.name == 'foo'
assert s.name is None
assert s is not s2
def test_rename_inplace(self, datetime_series):
renamer = lambda x: x.strftime('%Y%m%d')
expected = renamer(datetime_series.index[0])
datetime_series.rename(renamer, inplace=True)
assert datetime_series.index[0] == expected
def test_set_index_makes_timeseries(self):
idx = tm.makeDateIndex(10)
s = Series(range(10))
s.index = idx
assert s.index.is_all_dates
def test_reset_index(self):
df = tm.makeDataFrame()[:5]
ser = df.stack()
ser.index.names = ['hash', 'category']
ser.name = 'value'
df = ser.reset_index()
assert 'value' in df
df = ser.reset_index(name='value2')
assert 'value2' in df
s = ser.reset_index(drop=True)
s2 = ser
s2.reset_index(drop=True, inplace=True)
tm.assert_series_equal(s, s2)
index = MultiIndex(levels=[['bar'], ['one', 'two', 'three'], [0, 1]],
codes=[[0, 0, 0, 0, 0, 0], [0, 1, 2, 0, 1, 2],
[0, 1, 0, 1, 0, 1]])
s = Series(np.random.randn(6), index=index)
rs = s.reset_index(level=1)
assert len(rs.columns) == 2
rs = s.reset_index(level=[0, 2], drop=True)
tm.assert_index_equal(rs.index, Index(index.get_level_values(1)))
assert isinstance(rs, Series)
def test_reset_index_name(self):
s = Series([1, 2, 3], index=Index(range(3), name='x'))
assert s.reset_index().index.name is None
assert s.reset_index(drop=True).index.name is None
def test_reset_index_level(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]],
columns=['A', 'B', 'C'])
for levels in ['A', 'B'], [0, 1]:
s = df.set_index(['A', 'B'])['C']
result = s.reset_index(level=levels[0])
tm.assert_frame_equal(result, df.set_index('B'))
result = s.reset_index(level=levels[:1])
tm.assert_frame_equal(result, df.set_index('B'))
result = s.reset_index(level=levels)
tm.assert_frame_equal(result, df)
result = df.set_index(['A', 'B']).reset_index(level=levels,
drop=True)
tm.assert_frame_equal(result, df[['C']])
with pytest.raises(KeyError, match='Level E '):
s.reset_index(level=['A', 'E'])
s = df.set_index('A')['B']
result = s.reset_index(level=levels[0])
tm.assert_frame_equal(result, df[['A', 'B']])
result = s.reset_index(level=levels[:1])
tm.assert_frame_equal(result, df[['A', 'B']])
result = s.reset_index(level=levels[0], drop=True)
tm.assert_series_equal(result, df['B'])
with pytest.raises(IndexError, match='Too many levels'):
s.reset_index(level=[0, 1, 2])
result = Series(range(4)).reset_index([], drop=True)
expected = Series(range(4))
tm.assert_series_equal(result, expected)
def test_reset_index_range(self):
# GH 12071
s = Series(range(2), name='A', dtype='int64')
series_result = s.reset_index()
assert isinstance(series_result.index, RangeIndex)
series_expected = DataFrame([[0, 0], [1, 1]],
columns=['index', 'A'],
index=RangeIndex(stop=2))
tm.assert_frame_equal(series_result, series_expected)
def test_reorder_levels(self):
index = MultiIndex(levels=[['bar'], ['one', 'two', 'three'], [0, 1]],
codes=[[0, 0, 0, 0, 0, 0], [0, 1, 2, 0, 1, 2],
[0, 1, 0, 1, 0, 1]],
names=['L0', 'L1', 'L2'])
s = Series(np.arange(6), index=index)
# no change, position
result = s.reorder_levels([0, 1, 2])
tm.assert_series_equal(s, result)
# no change, labels
result = s.reorder_levels(['L0', 'L1', 'L2'])
tm.assert_series_equal(s, result)
# rotate, position
result = s.reorder_levels([1, 2, 0])
e_idx = MultiIndex(levels=[['one', 'two', 'three'], [0, 1], ['bar']],
codes=[[0, 1, 2, 0, 1, 2], [0, 1, 0, 1, 0, 1],
[0, 0, 0, 0, 0, 0]],
names=['L1', 'L2', 'L0'])
expected = Series(np.arange(6), index=e_idx)
tm.assert_series_equal(result, expected)
def test_rename_axis_mapper(self):
# GH 19978
mi = MultiIndex.from_product([['a', 'b', 'c'], [1, 2]],
names=['ll', 'nn'])
s = Series([i for i in range(len(mi))], index=mi)
result = s.rename_axis(index={'ll': 'foo'})
assert result.index.names == ['foo', 'nn']
result = s.rename_axis(index=str.upper, axis=0)
assert result.index.names == ['LL', 'NN']
result = s.rename_axis(index=['foo', 'goo'])
assert result.index.names == ['foo', 'goo']
with pytest.raises(TypeError, match='unexpected'):
s.rename_axis(columns='wrong')
def test_rename_axis_inplace(self, datetime_series):
# GH 15704
expected = datetime_series.rename_axis('foo')
result = datetime_series
no_return = result.rename_axis('foo', inplace=True)
assert no_return is None
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize('kwargs', [{'mapper': None}, {'index': None}, {}])
def test_rename_axis_none(self, kwargs):
# GH 25034
index = Index(list('abc'), name='foo')
df = Series([1, 2, 3], index=index)
result = df.rename_axis(**kwargs)
expected_index = index.rename(None) if kwargs else index
expected = Series([1, 2, 3], index=expected_index)
tm.assert_series_equal(result, expected)
def test_set_axis_inplace_axes(self, axis_series):
# GH14636
ser = Series(np.arange(4), index=[1, 3, 5, 7], dtype='int64')
expected = ser.copy()
expected.index = list('abcd')
# inplace=True
# The FutureWarning comes from the fact that we would like to have
# inplace default to False some day
for inplace, warn in [(None, FutureWarning), (True, None)]:
result = ser.copy()
kwargs = {'inplace': inplace}
with tm.assert_produces_warning(warn):
result.set_axis(list('abcd'), axis=axis_series, **kwargs)
tm.assert_series_equal(result, expected)
def test_set_axis_inplace(self):
# GH14636
s = Series(np.arange(4), index=[1, 3, 5, 7], dtype='int64')
expected = s.copy()
expected.index = list('abcd')
# inplace=False
result = s.set_axis(list('abcd'), axis=0, inplace=False)
tm.assert_series_equal(expected, result)
# omitting the "axis" parameter
with tm.assert_produces_warning(None):
result = s.set_axis(list('abcd'), inplace=False)
tm.assert_series_equal(result, expected)
# wrong values for the "axis" parameter
for axis in [2, 'foo']:
with pytest.raises(ValueError, match='No axis named'):
s.set_axis(list('abcd'), axis=axis, inplace=False)
def test_set_axis_prior_to_deprecation_signature(self):
s = Series(np.arange(4), index=[1, 3, 5, 7], dtype='int64')
expected = s.copy()
expected.index = list('abcd')
for axis in [0, 'index']:
with tm.assert_produces_warning(FutureWarning):
result = s.set_axis(0, list('abcd'), inplace=False)
tm.assert_series_equal(result, expected)
def test_reset_index_drop_errors(self):
# GH 20925
# KeyError raised for series index when passed level name is missing
s = Series(range(4))
with pytest.raises(KeyError, match='must be same as name'):
s.reset_index('wrong', drop=True)
with pytest.raises(KeyError, match='must be same as name'):
s.reset_index('wrong')
# KeyError raised for series when level to be dropped is missing
s = Series(range(4), index=MultiIndex.from_product([[1, 2]] * 2))
with pytest.raises(KeyError, match='not found'):
s.reset_index('wrong', drop=True)
def test_droplevel(self):
# GH20342
ser = Series([1, 2, 3, 4])
ser.index = MultiIndex.from_arrays([(1, 2, 3, 4), (5, 6, 7, 8)],
names=['a', 'b'])
expected = ser.reset_index('b', drop=True)
result = ser.droplevel('b', axis='index')
tm.assert_series_equal(result, expected)
# test that droplevel raises ValueError on axis != 0
with pytest.raises(ValueError):
ser.droplevel(1, axis='columns')
| true | true |
f734fee836f525ac79f83d059794cdc10b43ae4a | 6,835 | py | Python | beagle/transformers/darpa_tc_transformer.py | limkokhian/beagle | 791e83db94e5a8ab1965b155bb79d32bb259d2b3 | [
"MIT"
] | 1,139 | 2019-03-24T09:09:05.000Z | 2022-03-27T14:54:38.000Z | beagle/transformers/darpa_tc_transformer.py | limkokhian/beagle | 791e83db94e5a8ab1965b155bb79d32bb259d2b3 | [
"MIT"
] | 78 | 2019-03-24T16:56:06.000Z | 2022-02-27T21:31:38.000Z | beagle/transformers/darpa_tc_transformer.py | limkokhian/beagle | 791e83db94e5a8ab1965b155bb79d32bb259d2b3 | [
"MIT"
] | 149 | 2019-03-24T16:44:45.000Z | 2022-03-11T12:20:51.000Z | from typing import List, Optional, Tuple, Union
from beagle.common import logger, split_path, split_reg_path
from beagle.nodes import File, Process, RegistryKey, IPAddress
from beagle.transformers.base_transformer import Transformer
# Custom Node classes to use the UUID in TC
class TCProcess(Process):
key_fields: List[str] = ["uuid"]
uuid: Optional[str]
def __init__(self, uuid: str = None, *args, **kwargs) -> None:
self.uuid = uuid
super().__init__(*args, **kwargs)
class TCFile(File):
key_fields: List[str] = ["uuid"]
uuid: Optional[str]
def __init__(self, uuid: str = None, *args, **kwargs) -> None:
self.uuid = uuid
super().__init__(*args, **kwargs)
class TCRegistryKey(RegistryKey):
key_fields: List[str] = ["uuid"]
uuid: Optional[str]
def __init__(self, uuid: str = None, *args, **kwargs) -> None:
self.uuid = uuid
super().__init__(*args, **kwargs)
class TCIPAddress(IPAddress):
key_fields: List[str] = ["uuid"]
uuid: Optional[str]
def __init__(self, uuid: str = None, *args, **kwargs) -> None:
self.uuid = uuid
super().__init__(*args, **kwargs)
class DRAPATCTransformer(Transformer):
name = "DARPA TC"
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
logger.info("Created Darpa Transperant Computing Transformer.")
def transform(self, event: dict) -> Optional[Tuple]:
event_type = event["event_type"]
if event_type == "subject" and event["type"] == "SUBJECT_PROCESS":
return self.make_process(event)
elif event_type == "fileobject" and event["type"] in [
"FILE_OBJECT_BLOCK",
"FILE_OBJECT_PEFILE",
]:
return self.make_file(event)
elif event_type == "registrykeyobject":
return self.make_registrykey(event)
elif event_type == "netflowobject":
return self.make_addr(event)
elif event_type == "event" and event["type"] in [
"EVENT_READ",
"EVENT_OPEN",
"EVENT_WRITE",
"EVENT_WRITE_APPEND",
"EVENT_MODIFY_FILE_ATTRIBUTES",
"EVENT_CREATE_OBJECT",
"EVENT_LOAD_LIBRARY",
]:
return self.file_events(event)
elif event_type == "event" and event["type"] == "EVENT_EXECUTE":
return self.execute_events(event)
elif event_type == "event" and event["type"] in ["EVENT_CONNECT"]:
return self.conn_events(event)
return tuple()
def make_process(self, event: dict) -> Union[Tuple[TCProcess], Tuple[TCProcess, TCProcess]]:
if event.get("cmdLine"):
proc_cmdline = event["cmdLine"]["string"]
else:
proc_cmdline = None
path = None
image = None
if event.get("properties"):
path = event["properties"]["map"].get("path")
if "/" in path:
# Swap the path directions
path = path.replace("/", "\\")
image, path = split_path(path)
proc = TCProcess(
uuid=event["uuid"],
process_image=image or proc_cmdline,
process_image_path=path or proc_cmdline,
command_line=proc_cmdline,
host=event["hostId"],
)
if event.get("parentSubject"):
parent = TCProcess(
uuid=event["parentSubject"]["com.bbn.tc.schema.avro.cdm18.UUID"],
host=event["hostId"],
)
parent.launched[proc]
return (proc, parent)
else:
return (proc,)
def make_file(self, event: dict) -> Tuple[TCFile]:
base_obj = event["baseObject"]
file_node = TCFile(uuid=event["uuid"], host=base_obj["hostId"])
# Since not everything has a full path, and this is multiple different systems,
# this is the best try for this.
if base_obj.get("properties"):
full_path = base_obj["properties"]["map"].get("filename", "")
full_path = full_path.replace("/", "\\")
file_name, file_path = split_path(full_path)
file_node.full_path = full_path
file_node.file_path = file_path
file_node.file_name = file_name
return (file_node,)
def make_registrykey(self, event: dict) -> Tuple[TCRegistryKey]:
if event["key"].startswith("\\REGISTRY\\"):
event["key"] = event["key"].replace("\\REGISTRY\\", "", 1)
hive, key, path = split_reg_path(event["key"])
base_obj = event["baseObject"]
value = event["value"]["com.bbn.tc.schema.avro.cdm18.Value"]
regkey = TCRegistryKey(
uuid=event["uuid"],
host=base_obj["hostId"],
value_type=value["valueDataType"],
value=value["name"],
hive=hive,
key_path=path,
key=key,
)
return (regkey,)
def make_addr(self, event: dict) -> Tuple[TCIPAddress]:
addr = TCIPAddress(uuid=event["uuid"], ip_address=event["remoteAddress"])
# TODO: Add port data somehow
return (addr,)
def file_events(self, event: dict) -> Tuple[TCProcess, TCFile]:
proc = TCProcess(uuid=event["subject"]["com.bbn.tc.schema.avro.cdm18.UUID"])
target = TCFile(uuid=event["predicateObject"]["com.bbn.tc.schema.avro.cdm18.UUID"])
if event["type"] in ["EVENT_READ", "EVENT_MODIFY_FILE_ATTRIBUTES", "EVENT_OPEN"]:
proc.accessed[target].append(timestamp=event["timestampNanos"])
elif event["type"] in ["EVENT_WRITE", "EVENT_WRITE_APPEND", "EVENT_CREATE_OBJECT"]:
proc.wrote[target].append(timestamp=event["timestampNanos"])
elif event["type"] in ["EVENT_LOAD_LIBRARY"]:
proc.loaded[target].append(timestamp=event["timestampNanos"])
return (proc, target)
def execute_events(self, event: dict) -> Tuple[TCProcess, TCProcess]:
proc = TCProcess(uuid=event["subject"]["com.bbn.tc.schema.avro.cdm18.UUID"])
target = TCProcess(
uuid=event["predicateObject"]["com.bbn.tc.schema.avro.cdm18.UUID"],
process_image=event.get("predicateObjectPath", {}).get("string"),
)
proc.launched[target].append(timestamp=event["timestampNanos"])
return (proc, target)
def conn_events(self, event: dict) -> Tuple[TCProcess, TCIPAddress]:
proc = TCProcess(uuid=event["subject"]["com.bbn.tc.schema.avro.cdm18.UUID"])
addr = TCIPAddress(uuid=event["predicateObject"]["com.bbn.tc.schema.avro.cdm18.UUID"])
# TODO: Need to add the port data on the edge somehow
proc.connected_to[addr].append(timestamp=event["timestampNanos"])
return (proc, addr)
| 34.004975 | 96 | 0.597952 | from typing import List, Optional, Tuple, Union
from beagle.common import logger, split_path, split_reg_path
from beagle.nodes import File, Process, RegistryKey, IPAddress
from beagle.transformers.base_transformer import Transformer
class TCProcess(Process):
key_fields: List[str] = ["uuid"]
uuid: Optional[str]
def __init__(self, uuid: str = None, *args, **kwargs) -> None:
self.uuid = uuid
super().__init__(*args, **kwargs)
class TCFile(File):
key_fields: List[str] = ["uuid"]
uuid: Optional[str]
def __init__(self, uuid: str = None, *args, **kwargs) -> None:
self.uuid = uuid
super().__init__(*args, **kwargs)
class TCRegistryKey(RegistryKey):
key_fields: List[str] = ["uuid"]
uuid: Optional[str]
def __init__(self, uuid: str = None, *args, **kwargs) -> None:
self.uuid = uuid
super().__init__(*args, **kwargs)
class TCIPAddress(IPAddress):
key_fields: List[str] = ["uuid"]
uuid: Optional[str]
def __init__(self, uuid: str = None, *args, **kwargs) -> None:
self.uuid = uuid
super().__init__(*args, **kwargs)
class DRAPATCTransformer(Transformer):
name = "DARPA TC"
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
logger.info("Created Darpa Transperant Computing Transformer.")
def transform(self, event: dict) -> Optional[Tuple]:
event_type = event["event_type"]
if event_type == "subject" and event["type"] == "SUBJECT_PROCESS":
return self.make_process(event)
elif event_type == "fileobject" and event["type"] in [
"FILE_OBJECT_BLOCK",
"FILE_OBJECT_PEFILE",
]:
return self.make_file(event)
elif event_type == "registrykeyobject":
return self.make_registrykey(event)
elif event_type == "netflowobject":
return self.make_addr(event)
elif event_type == "event" and event["type"] in [
"EVENT_READ",
"EVENT_OPEN",
"EVENT_WRITE",
"EVENT_WRITE_APPEND",
"EVENT_MODIFY_FILE_ATTRIBUTES",
"EVENT_CREATE_OBJECT",
"EVENT_LOAD_LIBRARY",
]:
return self.file_events(event)
elif event_type == "event" and event["type"] == "EVENT_EXECUTE":
return self.execute_events(event)
elif event_type == "event" and event["type"] in ["EVENT_CONNECT"]:
return self.conn_events(event)
return tuple()
def make_process(self, event: dict) -> Union[Tuple[TCProcess], Tuple[TCProcess, TCProcess]]:
if event.get("cmdLine"):
proc_cmdline = event["cmdLine"]["string"]
else:
proc_cmdline = None
path = None
image = None
if event.get("properties"):
path = event["properties"]["map"].get("path")
if "/" in path:
path = path.replace("/", "\\")
image, path = split_path(path)
proc = TCProcess(
uuid=event["uuid"],
process_image=image or proc_cmdline,
process_image_path=path or proc_cmdline,
command_line=proc_cmdline,
host=event["hostId"],
)
if event.get("parentSubject"):
parent = TCProcess(
uuid=event["parentSubject"]["com.bbn.tc.schema.avro.cdm18.UUID"],
host=event["hostId"],
)
parent.launched[proc]
return (proc, parent)
else:
return (proc,)
def make_file(self, event: dict) -> Tuple[TCFile]:
base_obj = event["baseObject"]
file_node = TCFile(uuid=event["uuid"], host=base_obj["hostId"])
if base_obj.get("properties"):
full_path = base_obj["properties"]["map"].get("filename", "")
full_path = full_path.replace("/", "\\")
file_name, file_path = split_path(full_path)
file_node.full_path = full_path
file_node.file_path = file_path
file_node.file_name = file_name
return (file_node,)
def make_registrykey(self, event: dict) -> Tuple[TCRegistryKey]:
if event["key"].startswith("\\REGISTRY\\"):
event["key"] = event["key"].replace("\\REGISTRY\\", "", 1)
hive, key, path = split_reg_path(event["key"])
base_obj = event["baseObject"]
value = event["value"]["com.bbn.tc.schema.avro.cdm18.Value"]
regkey = TCRegistryKey(
uuid=event["uuid"],
host=base_obj["hostId"],
value_type=value["valueDataType"],
value=value["name"],
hive=hive,
key_path=path,
key=key,
)
return (regkey,)
def make_addr(self, event: dict) -> Tuple[TCIPAddress]:
addr = TCIPAddress(uuid=event["uuid"], ip_address=event["remoteAddress"])
return (addr,)
def file_events(self, event: dict) -> Tuple[TCProcess, TCFile]:
proc = TCProcess(uuid=event["subject"]["com.bbn.tc.schema.avro.cdm18.UUID"])
target = TCFile(uuid=event["predicateObject"]["com.bbn.tc.schema.avro.cdm18.UUID"])
if event["type"] in ["EVENT_READ", "EVENT_MODIFY_FILE_ATTRIBUTES", "EVENT_OPEN"]:
proc.accessed[target].append(timestamp=event["timestampNanos"])
elif event["type"] in ["EVENT_WRITE", "EVENT_WRITE_APPEND", "EVENT_CREATE_OBJECT"]:
proc.wrote[target].append(timestamp=event["timestampNanos"])
elif event["type"] in ["EVENT_LOAD_LIBRARY"]:
proc.loaded[target].append(timestamp=event["timestampNanos"])
return (proc, target)
def execute_events(self, event: dict) -> Tuple[TCProcess, TCProcess]:
proc = TCProcess(uuid=event["subject"]["com.bbn.tc.schema.avro.cdm18.UUID"])
target = TCProcess(
uuid=event["predicateObject"]["com.bbn.tc.schema.avro.cdm18.UUID"],
process_image=event.get("predicateObjectPath", {}).get("string"),
)
proc.launched[target].append(timestamp=event["timestampNanos"])
return (proc, target)
def conn_events(self, event: dict) -> Tuple[TCProcess, TCIPAddress]:
proc = TCProcess(uuid=event["subject"]["com.bbn.tc.schema.avro.cdm18.UUID"])
addr = TCIPAddress(uuid=event["predicateObject"]["com.bbn.tc.schema.avro.cdm18.UUID"])
proc.connected_to[addr].append(timestamp=event["timestampNanos"])
return (proc, addr)
| true | true |
f73500cf450e08e25d97f5cb04a26cfa720754d1 | 2,897 | py | Python | rest_framework_tracking/admin.py | Movemeback/drf-api-tracking | 3730907eee448917f766aa1d85402e85ed775d0c | [
"0BSD"
] | null | null | null | rest_framework_tracking/admin.py | Movemeback/drf-api-tracking | 3730907eee448917f766aa1d85402e85ed775d0c | [
"0BSD"
] | null | null | null | rest_framework_tracking/admin.py | Movemeback/drf-api-tracking | 3730907eee448917f766aa1d85402e85ed775d0c | [
"0BSD"
] | null | null | null | import datetime
from django.contrib import admin
from django.db.models import Count
from django.db.models.functions import TruncDay
from django.urls import path
from django.http import JsonResponse
from .app_settings import app_settings
from .models import APIRequestLog
class APIRequestLogAdmin(admin.ModelAdmin):
date_hierarchy = "requested_at"
list_display = (
"id",
"requested_at",
"response_ms",
"status_code",
"user",
"view_method",
"path",
"remote_addr",
"host",
"query_params",
)
ordering = ("-requested_at",)
list_filter = ("view_method", "status_code")
search_fields = (
"path",
"user",
)
if app_settings.ADMIN_LOG_READONLY:
readonly_fields = (
"user",
"username_persistent",
"requested_at",
"response_ms",
"path",
"view",
"view_method",
"remote_addr",
"host",
"method",
"query_params",
"data",
"response",
"errors",
"status_code",
)
def changelist_view(self, request, extra_context=None):
# Aggregate api logs per day
chart_data = (
APIRequestLog.objects.annotate(date=TruncDay("requested_at"))
.values("date")
.annotate(y=Count("id"))
.order_by("-date")
)
extra_context = extra_context or {"chart_data": list(chart_data)}
# Call the superclass changelist_view to render the page
return super().changelist_view(request, extra_context=extra_context)
def get_urls(self):
urls = super().get_urls()
extra_urls = [
path("chart_data/", self.admin_site.admin_view(self.chart_data_endpoint))
]
return extra_urls + urls
# JSON endpoint for generating chart data that is used for dynamic loading
# via JS.
def chart_data_endpoint(self, request):
start_date = request.GET.get("start_date")
end_date = request.GET.get("end_date")
# convert start_date and end_date to datetime objects
start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d").date()
end_date = datetime.datetime.strptime(end_date, "%Y-%m-%d").date()
chart_data = self.chart_data(start_date, end_date)
return JsonResponse(list(chart_data), safe=False)
def chart_data(self, start_date, end_date):
return (
APIRequestLog.objects.filter(
requested_at__date__gte=start_date, requested_at__date__lte=end_date
)
.annotate(date=TruncDay("requested_at"))
.values("date")
.annotate(y=Count("id"))
.order_by("-date")
)
admin.site.register(APIRequestLog, APIRequestLogAdmin)
| 28.97 | 85 | 0.594408 | import datetime
from django.contrib import admin
from django.db.models import Count
from django.db.models.functions import TruncDay
from django.urls import path
from django.http import JsonResponse
from .app_settings import app_settings
from .models import APIRequestLog
class APIRequestLogAdmin(admin.ModelAdmin):
date_hierarchy = "requested_at"
list_display = (
"id",
"requested_at",
"response_ms",
"status_code",
"user",
"view_method",
"path",
"remote_addr",
"host",
"query_params",
)
ordering = ("-requested_at",)
list_filter = ("view_method", "status_code")
search_fields = (
"path",
"user",
)
if app_settings.ADMIN_LOG_READONLY:
readonly_fields = (
"user",
"username_persistent",
"requested_at",
"response_ms",
"path",
"view",
"view_method",
"remote_addr",
"host",
"method",
"query_params",
"data",
"response",
"errors",
"status_code",
)
def changelist_view(self, request, extra_context=None):
chart_data = (
APIRequestLog.objects.annotate(date=TruncDay("requested_at"))
.values("date")
.annotate(y=Count("id"))
.order_by("-date")
)
extra_context = extra_context or {"chart_data": list(chart_data)}
return super().changelist_view(request, extra_context=extra_context)
def get_urls(self):
urls = super().get_urls()
extra_urls = [
path("chart_data/", self.admin_site.admin_view(self.chart_data_endpoint))
]
return extra_urls + urls
def chart_data_endpoint(self, request):
start_date = request.GET.get("start_date")
end_date = request.GET.get("end_date")
start_date = datetime.datetime.strptime(start_date, "%Y-%m-%d").date()
end_date = datetime.datetime.strptime(end_date, "%Y-%m-%d").date()
chart_data = self.chart_data(start_date, end_date)
return JsonResponse(list(chart_data), safe=False)
def chart_data(self, start_date, end_date):
return (
APIRequestLog.objects.filter(
requested_at__date__gte=start_date, requested_at__date__lte=end_date
)
.annotate(date=TruncDay("requested_at"))
.values("date")
.annotate(y=Count("id"))
.order_by("-date")
)
admin.site.register(APIRequestLog, APIRequestLogAdmin)
| true | true |
f73500ecce8a3e73abd3291d623dc2f348a5e473 | 47,456 | py | Python | python/GafferArnoldTest/ArnoldRenderTest.py | Tuftux/gaffer | 5acaf7cbfadbae841dc06854121ca85dcc5c338c | [
"BSD-3-Clause"
] | 31 | 2017-07-10T10:02:07.000Z | 2022-02-08T13:54:14.000Z | python/GafferArnoldTest/ArnoldRenderTest.py | Tuftux/gaffer | 5acaf7cbfadbae841dc06854121ca85dcc5c338c | [
"BSD-3-Clause"
] | null | null | null | python/GafferArnoldTest/ArnoldRenderTest.py | Tuftux/gaffer | 5acaf7cbfadbae841dc06854121ca85dcc5c338c | [
"BSD-3-Clause"
] | 3 | 2017-11-04T15:30:11.000Z | 2018-09-25T18:36:11.000Z | ##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import inspect
import unittest
import subprocess32 as subprocess
import threading
import arnold
import imath
import six
import IECore
import IECoreImage
import IECoreScene
import IECoreArnold
import Gaffer
import GafferTest
import GafferDispatch
import GafferImage
import GafferScene
import GafferSceneTest
import GafferOSL
import GafferArnold
import GafferArnoldTest
class ArnoldRenderTest( GafferSceneTest.SceneTestCase ) :
def setUp( self ) :
GafferSceneTest.SceneTestCase.setUp( self )
self.__scriptFileName = self.temporaryDirectory() + "/test.gfr"
def tearDown( self ) :
GafferSceneTest.SceneTestCase.tearDown( self )
GafferScene.deregisterAdaptor( "Test" )
def testExecute( self ) :
s = Gaffer.ScriptNode()
s["plane"] = GafferScene.Plane()
s["render"] = GafferArnold.ArnoldRender()
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["in"].setInput( s["plane"]["out"] )
s["expression"] = Gaffer.Expression()
s["expression"].setExpression( "parent['render']['fileName'] = '" + self.temporaryDirectory() + "/test.%d.ass' % int( context['frame'] )" )
s["fileName"].setValue( self.__scriptFileName )
s.save()
p = subprocess.Popen(
"gaffer execute " + self.__scriptFileName + " -frames 1-3",
shell=True,
stderr = subprocess.PIPE,
)
p.wait()
self.assertFalse( p.returncode )
for i in range( 1, 4 ) :
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/test.%d.ass" % i ) )
def testWaitForImage( self ) :
s = Gaffer.ScriptNode()
s["plane"] = GafferScene.Plane()
s["outputs"] = GafferScene.Outputs()
s["outputs"].addOutput(
"beauty",
IECoreScene.Output(
self.temporaryDirectory() + "/test.tif",
"tiff",
"rgba",
{}
)
)
s["outputs"]["in"].setInput( s["plane"]["out"] )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["outputs"]["out"] )
s["render"]["task"].execute()
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/test.tif" ) )
def testExecuteWithStringSubstitutions( self ) :
s = Gaffer.ScriptNode()
s["plane"] = GafferScene.Plane()
s["render"] = GafferArnold.ArnoldRender()
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["in"].setInput( s["plane"]["out"] )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.####.ass" )
s["fileName"].setValue( self.__scriptFileName )
s.save()
p = subprocess.Popen(
"gaffer execute " + self.__scriptFileName + " -frames 1-3",
shell=True,
stderr = subprocess.PIPE,
)
p.wait()
self.assertFalse( p.returncode )
for i in range( 1, 4 ) :
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/test.%04d.ass" % i ) )
def testImageOutput( self ) :
s = Gaffer.ScriptNode()
s["plane"] = GafferScene.Plane()
s["outputs"] = GafferScene.Outputs()
s["outputs"].addOutput(
"beauty",
IECoreScene.Output(
self.temporaryDirectory() + "/test.####.tif",
"tiff",
"rgba",
{}
)
)
s["outputs"]["in"].setInput( s["plane"]["out"] )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["outputs"]["out"] )
c = Gaffer.Context()
for i in range( 1, 4 ) :
c.setFrame( i )
with c :
s["render"]["task"].execute()
for i in range( 1, 4 ) :
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/test.%04d.tif" % i ) )
def testTypeNamePrefixes( self ) :
self.assertTypeNamesArePrefixed( GafferArnold )
self.assertTypeNamesArePrefixed( GafferArnoldTest )
def testDefaultNames( self ) :
self.assertDefaultNamesAreCorrect( GafferArnold )
self.assertDefaultNamesAreCorrect( GafferArnoldTest )
def testNodesConstructWithDefaultValues( self ) :
self.assertNodesConstructWithDefaultValues( GafferArnold )
self.assertNodesConstructWithDefaultValues( GafferArnoldTest )
def testDirectoryCreation( self ) :
s = Gaffer.ScriptNode()
s["variables"].addChild( Gaffer.NameValuePlug( "renderDirectory", self.temporaryDirectory() + "/renderTests" ) )
s["variables"].addChild( Gaffer.NameValuePlug( "assDirectory", self.temporaryDirectory() + "/assTests" ) )
s["plane"] = GafferScene.Plane()
s["outputs"] = GafferScene.Outputs()
s["outputs"]["in"].setInput( s["plane"]["out"] )
s["outputs"].addOutput(
"beauty",
IECoreScene.Output(
"$renderDirectory/test.####.exr",
"exr",
"rgba",
{}
)
)
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["outputs"]["out"] )
s["render"]["fileName"].setValue( "$assDirectory/test.####.ass" )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
self.assertFalse( os.path.exists( self.temporaryDirectory() + "/renderTests" ) )
self.assertFalse( os.path.exists( self.temporaryDirectory() + "/assTests" ) )
self.assertFalse( os.path.exists( self.temporaryDirectory() + "/assTests/test.0001.ass" ) )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
with s.context() :
s["render"]["task"].execute()
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/renderTests" ) )
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/assTests" ) )
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/assTests/test.0001.ass" ) )
# check it can cope with everything already existing
with s.context() :
s["render"]["task"].execute()
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/renderTests" ) )
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/assTests" ) )
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/assTests/test.0001.ass" ) )
def testWedge( self ) :
s = Gaffer.ScriptNode()
s["sphere"] = GafferScene.Sphere()
s["sphere"]["sets"].setValue( "${wedge:value}" )
s["filter"] = GafferScene.SetFilter()
s["filter"]["setExpression"].setValue( "hidden" )
s["attributes"] = GafferScene.StandardAttributes()
s["attributes"]["attributes"]["visibility"]["enabled"].setValue( True )
s["attributes"]["attributes"]["visibility"]["value"].setValue( False )
s["attributes"]["filter"].setInput( s["filter"]["out"] )
s["attributes"]["in"].setInput( s["sphere"]["out"] )
s["outputs"] = GafferScene.Outputs()
s["outputs"].addOutput(
"beauty",
IECoreScene.Output(
self.temporaryDirectory() + "/${wedge:value}.tif",
"tiff",
"rgba",
{
}
)
)
s["outputs"]["in"].setInput( s["attributes"]["out"] )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.####.ass" )
s["render"]["in"].setInput( s["outputs"]["out"] )
s["wedge"] = GafferDispatch.Wedge()
s["wedge"]["mode"].setValue( int( s["wedge"].Mode.StringList ) )
s["wedge"]["strings"].setValue( IECore.StringVectorData( [ "visible", "hidden" ] ) )
s["wedge"]["preTasks"][0].setInput( s["render"]["task"] )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
dispatcher = GafferDispatch.LocalDispatcher()
dispatcher["jobsDirectory"].setValue( self.temporaryDirectory() + "/testJobDirectory" )
dispatcher["framesMode"].setValue( GafferDispatch.Dispatcher.FramesMode.CurrentFrame )
dispatcher["executeInBackground"].setValue( False )
dispatcher.dispatch( [ s["wedge"] ] )
hidden = GafferImage.ImageReader()
hidden["fileName"].setValue( self.temporaryDirectory() + "/hidden.tif" )
visible = GafferImage.ImageReader()
visible["fileName"].setValue( self.temporaryDirectory() + "/visible.tif" )
hiddenStats = GafferImage.ImageStats()
hiddenStats["in"].setInput( hidden["out"] )
hiddenStats["area"].setValue( hiddenStats["in"]["dataWindow"].getValue() )
visibleStats = GafferImage.ImageStats()
visibleStats["in"].setInput( visible["out"] )
visibleStats["area"].setValue( visibleStats["in"]["dataWindow"].getValue() )
self.assertLess( hiddenStats["average"].getValue()[0], 0.05 )
self.assertGreater( visibleStats["average"].getValue()[0], .27 )
@staticmethod
def __m44f( m ) :
return imath.M44f( *[ i for row in m.data for i in row ] )
def testTransformMotion( self ) :
s = Gaffer.ScriptNode()
s["plane"] = GafferScene.Plane()
s["sphere"] = GafferScene.Sphere()
s["group"] = GafferScene.Group()
s["group"]["in"][0].setInput( s["plane"]["out"] )
s["group"]["in"][1].setInput( s["sphere"]["out"] )
s["expression"] = Gaffer.Expression()
s["expression"].setExpression(
inspect.cleandoc(
"""
parent["plane"]["transform"]["translate"]["x"] = context.getFrame()
parent["sphere"]["transform"]["translate"]["y"] = context.getFrame() * 2
parent["group"]["transform"]["translate"]["z"] = context.getFrame() - 1
"""
)
)
s["planeFilter"] = GafferScene.PathFilter()
s["planeFilter"]["paths"].setValue( IECore.StringVectorData( [ "/group/plane" ] ) )
s["attributes"] = GafferScene.StandardAttributes()
s["attributes"]["in"].setInput( s["group"]["out"] )
s["attributes"]["filter"].setInput( s["planeFilter"]["out"] )
s["attributes"]["attributes"]["transformBlur"]["enabled"].setValue( True )
s["attributes"]["attributes"]["transformBlur"]["value"].setValue( False )
s["options"] = GafferScene.StandardOptions()
s["options"]["in"].setInput( s["attributes"]["out"] )
s["options"]["options"]["shutter"]["enabled"].setValue( True )
s["options"]["options"]["transformBlur"]["enabled"].setValue( True )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["options"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
# No motion blur
s["options"]["options"]["transformBlur"]["value"].setValue( False )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
camera = arnold.AiNodeLookUpByName( "gaffer:defaultCamera" )
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
sphereMotionStart = arnold.AiNodeGetFlt( sphere, "motion_start" )
sphereMotionEnd = arnold.AiNodeGetFlt( sphere, "motion_end" )
sphereMatrix = arnold.AiNodeGetMatrix( sphere, "matrix" )
plane = arnold.AiNodeLookUpByName( "/group/plane" )
planeMotionStart = arnold.AiNodeGetFlt( plane, "motion_start" )
planeMotionEnd = arnold.AiNodeGetFlt( plane, "motion_end" )
planeMatrix = arnold.AiNodeGetMatrix( plane, "matrix" )
# Motion parameters should be left at default
self.assertEqual( sphereMotionStart, 0 )
self.assertEqual( sphereMotionEnd, 1 )
self.assertEqual( planeMotionStart, 0 )
self.assertEqual( planeMotionEnd, 1 )
expectedSphereMatrix = arnold.AiM4Translation( arnold.AtVector( 0, 2, 0 ) )
expectedPlaneMatrix = arnold.AiM4Translation( arnold.AtVector( 1, 0, 0 ) )
self.assertEqual( self.__m44f( sphereMatrix ), self.__m44f( expectedSphereMatrix ) )
self.assertEqual( self.__m44f( planeMatrix ), self.__m44f( expectedPlaneMatrix ) )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_start" ), 1 )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_end" ), 1 )
# Motion blur
s["options"]["options"]["transformBlur"]["value"].setValue( True )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
camera = arnold.AiNodeLookUpByName( "gaffer:defaultCamera" )
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
sphereMotionStart = arnold.AiNodeGetFlt( sphere, "motion_start" )
sphereMotionEnd = arnold.AiNodeGetFlt( sphere, "motion_end" )
sphereMatrices = arnold.AiNodeGetArray( sphere, "matrix" )
plane = arnold.AiNodeLookUpByName( "/group/plane" )
planeMotionStart = arnold.AiNodeGetFlt( plane, "motion_start" )
planeMotionEnd = arnold.AiNodeGetFlt( plane, "motion_end" )
planeMatrices = arnold.AiNodeGetArray( plane, "matrix" )
self.assertEqual( sphereMotionStart, 0.75 )
self.assertEqual( sphereMotionEnd, 1.25 )
self.assertEqual( arnold.AiArrayGetNumElements( sphereMatrices.contents ), 1 )
self.assertEqual( arnold.AiArrayGetNumKeys( sphereMatrices.contents ), 2 )
self.assertEqual( planeMotionStart, 0.75 )
self.assertEqual( planeMotionEnd, 1.25 )
self.assertEqual( arnold.AiArrayGetNumElements( planeMatrices.contents ), 1 )
self.assertEqual( arnold.AiArrayGetNumKeys( planeMatrices.contents ), 2 )
for i in range( 0, 2 ) :
frame = 0.75 + 0.5 * i
sphereMatrix = arnold.AiArrayGetMtx( sphereMatrices, i )
expectedSphereMatrix = arnold.AiM4Translation( arnold.AtVector( 0, frame * 2, frame - 1 ) )
planeMatrix = arnold.AiArrayGetMtx( planeMatrices, i )
expectedPlaneMatrix = arnold.AiM4Translation( arnold.AtVector( 1, 0, frame - 1 ) )
self.assertEqual( self.__m44f( sphereMatrix ), self.__m44f( expectedSphereMatrix ) )
self.assertEqual( self.__m44f( planeMatrix ), self.__m44f( expectedPlaneMatrix ) )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_start" ), 0.75 )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_end" ), 1.25 )
# Motion blur on, but sampleMotion off
s["options"]["options"]["sampleMotion"]["enabled"].setValue( True )
s["options"]["options"]["sampleMotion"]["value"].setValue( False )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
camera = arnold.AiNodeLookUpByName( "gaffer:defaultCamera" )
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
sphereMotionStart = arnold.AiNodeGetFlt( sphere, "motion_start" )
sphereMotionEnd = arnold.AiNodeGetFlt( sphere, "motion_end" )
sphereMatrices = arnold.AiNodeGetArray( sphere, "matrix" )
plane = arnold.AiNodeLookUpByName( "/group/plane" )
planeMotionStart = arnold.AiNodeGetFlt( plane, "motion_start" )
planeMotionEnd = arnold.AiNodeGetFlt( plane, "motion_end" )
planeMatrices = arnold.AiNodeGetArray( plane, "matrix" )
self.assertEqual( sphereMotionStart, 0.75 )
self.assertEqual( sphereMotionEnd, 1.25 )
self.assertEqual( arnold.AiArrayGetNumElements( sphereMatrices.contents ), 1 )
self.assertEqual( arnold.AiArrayGetNumKeys( sphereMatrices.contents ), 2 )
self.assertEqual( planeMotionStart, 0.75 )
self.assertEqual( planeMotionEnd, 1.25 )
self.assertEqual( arnold.AiArrayGetNumElements( planeMatrices.contents ), 1 )
self.assertEqual( arnold.AiArrayGetNumKeys( planeMatrices.contents ), 2 )
for i in range( 0, 2 ) :
frame = 0.75 + 0.5 * i
sphereMatrix = arnold.AiArrayGetMtx( sphereMatrices, i )
expectedSphereMatrix = arnold.AiM4Translation( arnold.AtVector( 0, frame * 2, frame - 1 ) )
planeMatrix = arnold.AiArrayGetMtx( planeMatrices, i )
expectedPlaneMatrix = arnold.AiM4Translation( arnold.AtVector( 1, 0, frame - 1 ) )
self.assertEqual( self.__m44f( sphereMatrix ), self.__m44f( expectedSphereMatrix ) )
self.assertEqual( self.__m44f( planeMatrix ), self.__m44f( expectedPlaneMatrix ) )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_start" ), 0.75 )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_end" ), 0.75 )
def testResolution( self ) :
s = Gaffer.ScriptNode()
s["camera"] = GafferScene.Camera()
s["options"] = GafferScene.StandardOptions()
s["options"]["in"].setInput( s["camera"]["out"] )
s["options"]["options"]["renderResolution"]["enabled"].setValue( True )
s["options"]["options"]["renderResolution"]["value"].setValue( imath.V2i( 200, 100 ) )
s["options"]["options"]["resolutionMultiplier"]["enabled"].setValue( True )
s["options"]["options"]["resolutionMultiplier"]["value"].setValue( 2 )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["options"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
# Default camera should have the right resolution.
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 400 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 200 )
# As should a camera picked from the scene.
s["options"]["options"]["renderCamera"]["enabled"].setValue( True )
s["options"]["options"]["renderCamera"]["value"].setValue( "/camera" )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 400 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 200 )
def testRenderRegion( self ) :
s = Gaffer.ScriptNode()
s["camera"] = GafferScene.Camera()
s["options"] = GafferScene.StandardOptions()
s["options"]["in"].setInput( s["camera"]["out"] )
s["options"]["options"]["renderCamera"]["enabled"].setValue( True )
s["options"]["options"]["renderCamera"]["value"].setValue( "/camera" )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["options"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
# Default region
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 640 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 480 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_x" ), 0 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_x" ), 639 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_y" ), 0 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_y" ), 479 )
# Apply Crop Window
s["options"]["options"]["renderCropWindow"]["enabled"].setValue( True )
s["options"]["options"]["renderCropWindow"]["value"].setValue( imath.Box2f( imath.V2f( 0.25, 0.5 ), imath.V2f( 0.75, 1.0 ) ) )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 640 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 480 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_x" ), 160 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_x" ), 479 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_y" ), 240 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_y" ), 479 )
# Test Empty Crop Window
s["options"]["options"]["renderCropWindow"]["value"].setValue( imath.Box2f() )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 640 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 480 )
# Since Arnold doesn't support empty regions, we default to one pixel in the corner
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_x" ), 0 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_x" ), 0 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_y" ), 479 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_y" ), 479 )
# Apply Overscan
s["options"]["options"]["renderCropWindow"]["enabled"].setValue( False )
s["options"]["options"]["overscan"]["enabled"].setValue( True )
s["options"]["options"]["overscan"]["value"].setValue( True )
s["options"]["options"]["overscanTop"]["enabled"].setValue( True )
s["options"]["options"]["overscanTop"]["value"].setValue( 0.1 )
s["options"]["options"]["overscanBottom"]["enabled"].setValue( True )
s["options"]["options"]["overscanBottom"]["value"].setValue( 0.2 )
s["options"]["options"]["overscanLeft"]["enabled"].setValue( True )
s["options"]["options"]["overscanLeft"]["value"].setValue( 0.3 )
s["options"]["options"]["overscanRight"]["enabled"].setValue( True )
s["options"]["options"]["overscanRight"]["value"].setValue( 0.4 )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 640 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 480 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_x" ), -192 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_x" ), 640 + 255 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_y" ), -48 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_y" ), 480 + 95 )
def testMissingCameraRaises( self ) :
s = Gaffer.ScriptNode()
s["options"] = GafferScene.StandardOptions()
s["options"]["options"]["renderCamera"]["enabled"].setValue( True )
s["options"]["options"]["renderCamera"]["value"].setValue( "/i/dont/exist" )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["options"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
# The requested camera doesn't exist - this should raise an exception.
six.assertRaisesRegex( self, RuntimeError, "/i/dont/exist", s["render"]["task"].execute )
# And even the existence of a different camera shouldn't change that.
s["camera"] = GafferScene.Camera()
s["options"]["in"].setInput( s["camera"]["out"] )
six.assertRaisesRegex( self, RuntimeError, "/i/dont/exist", s["render"]["task"].execute )
def testManyCameras( self ) :
camera = GafferScene.Camera()
duplicate = GafferScene.Duplicate()
duplicate["in"].setInput( camera["out"] )
duplicate["target"].setValue( "/camera" )
duplicate["copies"].setValue( 1000 )
render = GafferArnold.ArnoldRender()
render["in"].setInput( duplicate["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
def testTwoRenders( self ) :
sphere = GafferScene.Sphere()
duplicate = GafferScene.Duplicate()
duplicate["in"].setInput( sphere["out"] )
duplicate["target"].setValue( "/sphere" )
duplicate["copies"].setValue( 10000 )
render = GafferArnold.ArnoldRender()
render["in"].setInput( duplicate["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.####.ass" )
errors = []
def executeFrame( frame ) :
with Gaffer.Context() as c :
c.setFrame( frame )
try :
render["task"].execute()
except Exception as e :
errors.append( str( e ) )
threads = []
for i in range( 0, 2 ) :
t = threading.Thread( target = executeFrame, args = ( i, ) )
t.start()
threads.append( t )
for t in threads :
t.join()
self.assertEqual( len( errors ), 1 )
self.assertTrue( "Arnold is already in use" in errors[0] )
def testTraceSets( self ) :
sphere = GafferScene.Sphere()
group = GafferScene.Group()
group["in"][0].setInput( sphere["out"] )
group["in"][1].setInput( sphere["out"] )
set1 = GafferScene.Set()
set1["name"].setValue( "render:firstSphere" )
set1["paths"].setValue( IECore.StringVectorData( [ "/group/sphere" ] ) )
set1["in"].setInput( group["out"] )
set2 = GafferScene.Set()
set2["name"].setValue( "render:secondSphere" )
set2["paths"].setValue( IECore.StringVectorData( [ "/group/sphere1" ] ) )
set2["in"].setInput( set1["out"] )
set3 = GafferScene.Set()
set3["name"].setValue( "render:group" )
set3["paths"].setValue( IECore.StringVectorData( [ "/group" ] ) )
set3["in"].setInput( set2["out"] )
set4 = GafferScene.Set()
set4["name"].setValue( "render:bothSpheres" )
set4["paths"].setValue( IECore.StringVectorData( [ "/group/sphere", "/group/sphere1" ] ) )
set4["in"].setInput( set3["out"] )
render = GafferArnold.ArnoldRender()
render["in"].setInput( set4["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
firstSphere = arnold.AiNodeLookUpByName( "/group/sphere" )
secondSphere = arnold.AiNodeLookUpByName( "/group/sphere1" )
self.assertEqual( self.__arrayToSet( arnold.AiNodeGetArray( firstSphere, "trace_sets" ) ), { "firstSphere", "group", "bothSpheres" } )
self.assertEqual( self.__arrayToSet( arnold.AiNodeGetArray( secondSphere, "trace_sets" ) ), { "secondSphere", "group", "bothSpheres" } )
def testSetsNeedContextEntry( self ) :
script = Gaffer.ScriptNode()
script["light"] = GafferArnold.ArnoldLight()
script["light"].loadShader( "point_light" )
script["expression"] = Gaffer.Expression()
script["expression"].setExpression(
"""parent["light"]["name"] = context["lightName"]"""
)
script["render"] = GafferArnold.ArnoldRender()
script["render"]["in"].setInput( script["light"]["out"] )
script["render"]["mode"].setValue( script["render"].Mode.SceneDescriptionMode )
script["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
for i in range( 0, 100 ) :
with Gaffer.Context() as context :
context["lightName"] = "light%d" % i
script["render"]["task"].execute()
def testFrameAndAASeed( self ) :
options = GafferArnold.ArnoldOptions()
render = GafferArnold.ArnoldRender()
render["in"].setInput( options["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
for frame in ( 1, 2, 2.8, 3.2 ) :
for seed in ( None, 3, 4 ) :
with Gaffer.Context() as c :
c.setFrame( frame )
options["options"]["aaSeed"]["enabled"].setValue( seed is not None )
options["options"]["aaSeed"]["value"].setValue( seed or 1 )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
self.assertEqual(
arnold.AiNodeGetInt( arnold.AiUniverseGetOptions(), "AA_seed" ),
seed or round( frame )
)
def testRendererContextVariable( self ) :
sphere = GafferScene.Sphere()
sphere["name"].setValue( "sphere${scene:renderer}" )
render = GafferArnold.ArnoldRender()
render["in"].setInput( sphere["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
self.assertTrue( arnold.AiNodeLookUpByName( "/sphereArnold" ) is not None )
def testAdaptors( self ) :
sphere = GafferScene.Sphere()
def a() :
result = GafferArnold.ArnoldAttributes()
result["attributes"]["matte"]["enabled"].setValue( True )
result["attributes"]["matte"]["value"].setValue( True )
return result
GafferScene.registerAdaptor( "Test", a )
sphere = GafferScene.Sphere()
render = GafferArnold.ArnoldRender()
render["in"].setInput( sphere["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
node = arnold.AiNodeLookUpByName( "/sphere" )
self.assertEqual( arnold.AiNodeGetBool( node, "matte" ), True )
def testLightAndShadowLinking( self ) :
sphere1 = GafferScene.Sphere()
sphere2 = GafferScene.Sphere()
attributes = GafferScene.StandardAttributes()
arnoldAttributes = GafferArnold.ArnoldAttributes()
light1 = GafferArnold.ArnoldLight()
light1.loadShader( "point_light" )
light2 = GafferArnold.ArnoldLight()
light2.loadShader( "point_light" )
group = GafferScene.Group()
render = GafferArnold.ArnoldRender()
attributes["in"].setInput( sphere1["out"] )
arnoldAttributes["in"].setInput( attributes["out"] )
group["in"][0].setInput( arnoldAttributes["out"] )
group["in"][1].setInput( light1["out"] )
group["in"][2].setInput( light2["out"] )
group["in"][3].setInput( sphere2["out"] )
render["in"].setInput( group["out"] )
# Illumination
attributes["attributes"]["linkedLights"]["enabled"].setValue( True )
attributes["attributes"]["linkedLights"]["value"].setValue( "/group/light" )
# Shadows
arnoldAttributes["attributes"]["shadowGroup"]["enabled"].setValue( True )
arnoldAttributes["attributes"]["shadowGroup"]["value"].setValue( "/group/light1" )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
# the first sphere had linked lights
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
# check illumination
self.assertTrue( arnold.AiNodeGetBool( sphere, "use_light_group" ) )
lights = arnold.AiNodeGetArray( sphere, "light_group" )
self.assertEqual( arnold.AiArrayGetNumElements( lights ), 1 )
self.assertEqual(
arnold.AiNodeGetName( arnold.AiArrayGetPtr( lights, 0 ) ),
"light:/group/light"
)
# check shadows
self.assertTrue( arnold.AiNodeGetBool( sphere, "use_shadow_group" ) )
shadows = arnold.AiNodeGetArray( sphere, "shadow_group" )
self.assertEqual( arnold.AiArrayGetNumElements( shadows ), 1 )
self.assertEqual(
arnold.AiNodeGetName( arnold.AiArrayGetPtr( shadows, 0 ) ),
"light:/group/light1"
)
# the second sphere does not have any light linking enabled
sphere1 = arnold.AiNodeLookUpByName( "/group/sphere1" )
# check illumination
self.assertFalse( arnold.AiNodeGetBool( sphere1, "use_light_group" ) )
lights = arnold.AiNodeGetArray( sphere1, "light_group" )
self.assertEqual( arnold.AiArrayGetNumElements( lights ), 0 )
# check shadows
self.assertFalse( arnold.AiNodeGetBool( sphere1, "use_shadow_group" ) )
shadows = arnold.AiNodeGetArray( sphere1, "shadow_group" )
self.assertEqual( arnold.AiArrayGetNumElements( shadows ), 0 )
def testNoLinkedLightsOnLights( self ) :
sphere = GafferScene.Sphere()
meshLightShader = GafferArnold.ArnoldShader()
meshLightShader.loadShader( "flat" )
meshLightFilter = GafferScene.PathFilter()
meshLightFilter["paths"].setValue( IECore.StringVectorData( [ "/sphere" ] ) )
meshLight = GafferArnold.ArnoldMeshLight()
meshLight["in"].setInput( sphere["out"] )
meshLight["filter"].setInput( meshLightFilter["out"] )
meshLight["parameters"]["color"].setInput( meshLightShader["out"] )
light1 = GafferArnold.ArnoldLight()
light1.loadShader( "point_light" )
light2 = GafferArnold.ArnoldLight()
light2.loadShader( "point_light" )
# Trigger light linking by unlinking a light
light2["defaultLight"].setValue( False )
group = GafferScene.Group()
group["in"][0].setInput( meshLight["out"] )
group["in"][1].setInput( light1["out"] )
group["in"][2].setInput( light2["out"] )
render = GafferArnold.ArnoldRender()
render["in"].setInput( group["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
self.assertIsNotNone( sphere )
self.assertEqual( arnold.AiArrayGetNumElements( arnold.AiNodeGetArray( sphere, "light_group" ) ), 0 )
self.assertFalse( arnold.AiNodeGetBool( sphere, "use_light_group" ) )
def testLightFilters( self ) :
s = Gaffer.ScriptNode()
s["lightFilter"] = GafferArnold.ArnoldLightFilter()
s["lightFilter"].loadShader( "light_blocker" )
s["attributes"] = GafferScene.StandardAttributes()
s["attributes"]["in"].setInput( s["lightFilter"]["out"] )
s["attributes"]["attributes"]["filteredLights"]["enabled"].setValue( True )
s["attributes"]["attributes"]["filteredLights"]["value"].setValue( "defaultLights" )
s["light"] = GafferArnold.ArnoldLight()
s["light"].loadShader( "point_light" )
s["gobo"] = GafferArnold.ArnoldShader()
s["gobo"].loadShader( "gobo" )
s["assignment"] = GafferScene.ShaderAssignment()
s["assignment"]["in"].setInput( s["light"]["out"] )
s["assignment"]["shader"].setInput( s["gobo"]["out"] )
s["group"] = GafferScene.Group()
s["group"]["in"][0].setInput( s["attributes"]["out"] )
s["group"]["in"][1].setInput( s["assignment"]["out"] )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["group"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
light = arnold.AiNodeLookUpByName( "light:/group/light" )
linkedFilters = arnold.AiNodeGetArray( light, "filters" )
numFilters = arnold.AiArrayGetNumElements( linkedFilters.contents )
self.assertEqual( numFilters, 2 )
linkedFilter = arnold.cast(arnold.AiArrayGetPtr(linkedFilters, 0), arnold.POINTER(arnold.AtNode))
linkedGobo = arnold.cast(arnold.AiArrayGetPtr(linkedFilters, 1), arnold.POINTER(arnold.AtNode))
self.assertEqual( arnold.AiNodeGetName( linkedFilter ), "lightFilter:/group/lightFilter" )
self.assertEqual( arnold.AiNodeEntryGetName( arnold.AiNodeGetNodeEntry( linkedFilter ) ), "light_blocker" )
self.assertEqual( arnold.AiNodeEntryGetName( arnold.AiNodeGetNodeEntry( linkedGobo ) ), "gobo" )
@GafferTest.TestRunner.PerformanceTestMethod( repeat = 1 )
def testLightFiltersMany( self ) :
numLights = 10000
numLightFilters = 10000
s = Gaffer.ScriptNode()
s["lightFilter"] = GafferArnold.ArnoldLightFilter()
s["lightFilter"].loadShader( "light_blocker" )
s["lightFilter"]["filteredLights"].setValue( "defaultLights" )
s["planeFilters"] = GafferScene.Plane( "Plane" )
s["planeFilters"]["divisions"].setValue( imath.V2i( 1, numLightFilters / 2 - 1 ) )
s["instancerFilters"] = GafferScene.Instancer( "Instancer" )
s["instancerFilters"]["in"].setInput( s["planeFilters"]["out"] )
s["instancerFilters"]["instances"].setInput( s["lightFilter"]["out"] )
s["instancerFilters"]["parent"].setValue( "/plane" )
s["light"] = GafferArnold.ArnoldLight()
s["light"].loadShader( "point_light" )
s["planeLights"] = GafferScene.Plane( "Plane" )
s["planeLights"]["divisions"].setValue( imath.V2i( 1, numLights / 2 - 1 ) )
s["instancerLights"] = GafferScene.Instancer( "Instancer" )
s["instancerLights"]["in"].setInput( s["planeLights"]["out"] )
s["instancerLights"]["instances"].setInput( s["light"]["out"] )
s["instancerLights"]["parent"].setValue( "/plane" )
s["group"] = GafferScene.Group( "Group" )
s["group"]["in"][0].setInput( s["instancerFilters"]["out"] )
s["group"]["in"][1].setInput( s["instancerLights"]["out"] )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["group"]["out"] )
with Gaffer.Context() as c :
c["scene:render:sceneTranslationOnly"] = IECore.BoolData( True )
s["render"]["task"].execute()
def testAbortRaises( self ) :
s = Gaffer.ScriptNode()
s["plane"] = GafferScene.Plane()
s["plane"]["transform"]["translate"]["z"].setValue( -10 )
s["shader"] = GafferArnold.ArnoldShader()
s["shader"].loadShader( "image" )
# Missing texture should cause render to abort
s["shader"]["parameters"]["filename"].setValue( "iDontExist" )
s["filter"] = GafferScene.PathFilter()
s["filter"]["paths"].setValue( IECore.StringVectorData( [ "/plane" ] ) )
s["shaderAssignment"] = GafferScene.ShaderAssignment()
s["shaderAssignment"]["in"].setInput( s["plane"]["out"] )
s["shaderAssignment"]["filter"].setInput( s["filter"]["out"] )
s["shaderAssignment"]["shader"].setInput( s["shader"]["out"] )
s["outputs"] = GafferScene.Outputs()
s["outputs"].addOutput(
"beauty",
IECoreScene.Output(
self.temporaryDirectory() + "/test.tif",
"tiff",
"rgba",
{}
)
)
s["outputs"]["in"].setInput( s["shaderAssignment"]["out"] )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["outputs"]["out"] )
six.assertRaisesRegex( self, RuntimeError, "Render aborted", s["render"]["task"].execute )
def testOSLShaders( self ) :
swizzle = GafferOSL.OSLShader()
swizzle.loadShader( "MaterialX/mx_swizzle_color_float" )
swizzle["parameters"]["in"].setValue( imath.Color3f( 0, 0, 1 ) )
swizzle["parameters"]["channels"].setValue( "b" )
pack = GafferOSL.OSLShader()
pack.loadShader( "MaterialX/mx_pack_color" )
pack["parameters"]["in1"].setInput( swizzle["out"]["out"] )
ball = GafferArnold.ArnoldShaderBall()
ball["shader"].setInput( pack["out"] )
catalogue = GafferImage.Catalogue()
outputs = GafferScene.Outputs()
outputs.addOutput(
"beauty",
IECoreScene.Output(
"test",
"ieDisplay",
"rgba",
{
"driverType" : "ClientDisplayDriver",
"displayHost" : "localhost",
"displayPort" : str( catalogue.displayDriverServer().portNumber() ),
"remoteDisplayType" : "GafferImage::GafferDisplayDriver",
}
)
)
outputs["in"].setInput( ball["out"] )
render = GafferArnold.ArnoldRender()
render["in"].setInput( outputs["out"] )
with GafferTest.ParallelAlgoTest.UIThreadCallHandler() as handler :
render["task"].execute()
handler.waitFor( 0.1 ) #Just need to let the catalogue update
self.assertEqual( self.__color4fAtUV( catalogue, imath.V2f( 0.5 ) ), imath.Color4f( 1, 0, 0, 1 ) )
def testDefaultLightsMistakesDontForceLinking( self ) :
light = GafferArnold.ArnoldLight()
light.loadShader( "point_light" )
sphere = GafferScene.Sphere()
# It doesn't make sense to add a non-light to the "defaultLights"
# set like this, but in the event of user error, we don't want to
# emit light links unnecessarily.
sphereSet = GafferScene.Set()
sphereSet["in"].setInput( sphere["out"] )
sphereSet["name"].setValue( "defaultLights" )
sphereSet["paths"].setValue( IECore.StringVectorData( [ "/sphere" ] ) )
group = GafferScene.Group()
group["in"][0].setInput( light["out"] )
group["in"][1].setInput( sphereSet["out"] )
render = GafferArnold.ArnoldRender()
render["in"].setInput( group["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
self.assertIsNotNone( sphere )
self.assertEqual( arnold.AiArrayGetNumElements( arnold.AiNodeGetArray( sphere, "light_group" ) ), 0 )
self.assertFalse( arnold.AiNodeGetBool( sphere, "use_light_group" ) )
def __color4fAtUV( self, image, uv ) :
sampler = GafferImage.ImageSampler()
sampler["image"].setInput( image["out"] )
dw = image['out']["format"].getValue().getDisplayWindow().size()
sampler["pixel"].setValue( uv * imath.V2f( dw.x, dw.y ) )
return sampler["color"].getValue()
def __arrayToSet( self, a ) :
result = set()
for i in range( 0, arnold.AiArrayGetNumElements( a.contents ) ) :
if arnold.AiArrayGetType( a.contents ) == arnold.AI_TYPE_STRING :
result.add( arnold.AiArrayGetStr( a, i ) )
else :
raise TypeError
return result
def testPerformanceMonitorDoesntCrash( self ) :
options = GafferScene.StandardOptions()
options["options"]["performanceMonitor"]["value"].setValue( True )
options["options"]["performanceMonitor"]["enabled"].setValue( True )
render = GafferArnold.ArnoldRender()
render["in"].setInput( options["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
def testShaderSubstitutions( self ) :
s = Gaffer.ScriptNode()
s["plane"] = GafferScene.Plane()
s["planeAttrs"] = GafferScene.CustomAttributes()
s["planeAttrs"]["in"].setInput( s["plane"]["out"] )
s["planeAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "A", Gaffer.StringPlug( "value", defaultValue = 'bar' ) ) )
s["planeAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "B", Gaffer.StringPlug( "value", defaultValue = 'foo' ) ) )
s["cube"] = GafferScene.Cube()
s["cubeAttrs"] = GafferScene.CustomAttributes()
s["cubeAttrs"]["in"].setInput( s["cube"]["out"] )
s["cubeAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "B", Gaffer.StringPlug( "value", defaultValue = 'override' ) ) )
s["parent"] = GafferScene.Parent()
s["parent"]["in"].setInput( s["planeAttrs"]["out"] )
s["parent"]["children"][0].setInput( s["cubeAttrs"]["out"] )
s["parent"]["parent"].setValue( "/plane" )
s["shader"] = GafferArnold.ArnoldShader()
s["shader"].loadShader( "image" )
s["shader"]["parameters"]["filename"].setValue( "<attr:A>/path/<attr:B>.tx" )
s["filter"] = GafferScene.PathFilter()
s["filter"]["paths"].setValue( IECore.StringVectorData( [ "/plane" ] ) )
s["shaderAssignment"] = GafferScene.ShaderAssignment()
s["shaderAssignment"]["in"].setInput( s["parent"]["out"] )
s["shaderAssignment"]["filter"].setInput( s["filter"]["out"] )
s["shaderAssignment"]["shader"].setInput( s["shader"]["out"] )
s["light"] = GafferArnold.ArnoldLight()
s["light"].loadShader( "photometric_light" )
s["light"]["parameters"]["filename"].setValue( "/path/<attr:A>.ies" )
s["goboTexture"] = GafferArnold.ArnoldShader()
s["goboTexture"].loadShader( "image" )
s["goboTexture"]["parameters"]["filename"].setValue( "<attr:B>/gobo.tx" )
s["gobo"] = GafferArnold.ArnoldShader()
s["gobo"].loadShader( "gobo" )
s["gobo"]["parameters"]["slidemap"].setInput( s["goboTexture"]["out"] )
s["goboAssign"] = GafferScene.ShaderAssignment()
s["goboAssign"]["in"].setInput( s["light"]["out"] )
s["goboAssign"]["shader"].setInput( s["gobo"]["out"] )
s["lightBlocker"] = GafferArnold.ArnoldLightFilter()
s["lightBlocker"].loadShader( "light_blocker" )
s["lightBlocker"]["parameters"]["geometry_type"].setValue( "<attr:geometryType>" )
s["lightGroup"] = GafferScene.Group()
s["lightGroup"]["name"].setValue( "lightGroup" )
s["lightGroup"]["in"][0].setInput( s["goboAssign"]["out"] )
s["lightGroup"]["in"][1].setInput( s["lightBlocker"]["out"] )
s["parent2"] = GafferScene.Parent()
s["parent2"]["in"].setInput( s["shaderAssignment"]["out"] )
s["parent2"]["children"][0].setInput( s["lightGroup"]["out"] )
s["parent2"]["parent"].setValue( "/" )
s["globalAttrs"] = GafferScene.CustomAttributes()
s["globalAttrs"]["in"].setInput( s["parent2"]["out"] )
s["globalAttrs"]["global"].setValue( True )
s["globalAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "A", Gaffer.StringPlug( "value", defaultValue = 'default1' ) ) )
s["globalAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "B", Gaffer.StringPlug( "value", defaultValue = 'default2' ) ) )
s["globalAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "geometryType", Gaffer.StringPlug( "value", defaultValue = 'cylinder' ) ) )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["globalAttrs"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
plane = arnold.AiNodeLookUpByName( "/plane" )
shader = arnold.AiNodeGetPtr( plane, "shader" )
self.assertEqual( arnold.AiNodeGetStr( shader, "filename" ), "bar/path/foo.tx" )
cube = arnold.AiNodeLookUpByName( "/plane/cube" )
shader2 = arnold.AiNodeGetPtr( cube, "shader" )
self.assertEqual( arnold.AiNodeGetStr( shader2, "filename" ), "bar/path/override.tx" )
light = arnold.AiNodeLookUpByName( "light:/lightGroup/light" )
self.assertEqual( arnold.AiNodeGetStr( light, "filename" ), "/path/default1.ies" )
gobo = arnold.AiNodeGetPtr( light, "filters" )
goboTex = arnold.AiNodeGetLink( gobo, "slidemap" )
self.assertEqual( arnold.AiNodeGetStr( goboTex, "filename" ), "default2/gobo.tx" )
lightFilter = arnold.AiNodeLookUpByName( "lightFilter:/lightGroup/lightFilter" )
self.assertEqual( arnold.AiNodeGetStr( lightFilter, "geometry_type" ), "cylinder" )
if __name__ == "__main__":
unittest.main()
| 36.364751 | 141 | 0.682506 | "/assTests/test.0001.ass" ) )
with s.context() :
s["render"]["task"].execute()
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/renderTests" ) )
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/assTests" ) )
self.assertTrue( os.path.exists( self.temporaryDirectory() + "/assTests/test.0001.ass" ) )
def testWedge( self ) :
s = Gaffer.ScriptNode()
s["sphere"] = GafferScene.Sphere()
s["sphere"]["sets"].setValue( "${wedge:value}" )
s["filter"] = GafferScene.SetFilter()
s["filter"]["setExpression"].setValue( "hidden" )
s["attributes"] = GafferScene.StandardAttributes()
s["attributes"]["attributes"]["visibility"]["enabled"].setValue( True )
s["attributes"]["attributes"]["visibility"]["value"].setValue( False )
s["attributes"]["filter"].setInput( s["filter"]["out"] )
s["attributes"]["in"].setInput( s["sphere"]["out"] )
s["outputs"] = GafferScene.Outputs()
s["outputs"].addOutput(
"beauty",
IECoreScene.Output(
self.temporaryDirectory() + "/${wedge:value}.tif",
"tiff",
"rgba",
{
}
)
)
s["outputs"]["in"].setInput( s["attributes"]["out"] )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.####.ass" )
s["render"]["in"].setInput( s["outputs"]["out"] )
s["wedge"] = GafferDispatch.Wedge()
s["wedge"]["mode"].setValue( int( s["wedge"].Mode.StringList ) )
s["wedge"]["strings"].setValue( IECore.StringVectorData( [ "visible", "hidden" ] ) )
s["wedge"]["preTasks"][0].setInput( s["render"]["task"] )
s["fileName"].setValue( self.temporaryDirectory() + "/test.gfr" )
s.save()
dispatcher = GafferDispatch.LocalDispatcher()
dispatcher["jobsDirectory"].setValue( self.temporaryDirectory() + "/testJobDirectory" )
dispatcher["framesMode"].setValue( GafferDispatch.Dispatcher.FramesMode.CurrentFrame )
dispatcher["executeInBackground"].setValue( False )
dispatcher.dispatch( [ s["wedge"] ] )
hidden = GafferImage.ImageReader()
hidden["fileName"].setValue( self.temporaryDirectory() + "/hidden.tif" )
visible = GafferImage.ImageReader()
visible["fileName"].setValue( self.temporaryDirectory() + "/visible.tif" )
hiddenStats = GafferImage.ImageStats()
hiddenStats["in"].setInput( hidden["out"] )
hiddenStats["area"].setValue( hiddenStats["in"]["dataWindow"].getValue() )
visibleStats = GafferImage.ImageStats()
visibleStats["in"].setInput( visible["out"] )
visibleStats["area"].setValue( visibleStats["in"]["dataWindow"].getValue() )
self.assertLess( hiddenStats["average"].getValue()[0], 0.05 )
self.assertGreater( visibleStats["average"].getValue()[0], .27 )
@staticmethod
def __m44f( m ) :
return imath.M44f( *[ i for row in m.data for i in row ] )
def testTransformMotion( self ) :
s = Gaffer.ScriptNode()
s["plane"] = GafferScene.Plane()
s["sphere"] = GafferScene.Sphere()
s["group"] = GafferScene.Group()
s["group"]["in"][0].setInput( s["plane"]["out"] )
s["group"]["in"][1].setInput( s["sphere"]["out"] )
s["expression"] = Gaffer.Expression()
s["expression"].setExpression(
inspect.cleandoc(
"""
parent["plane"]["transform"]["translate"]["x"] = context.getFrame()
parent["sphere"]["transform"]["translate"]["y"] = context.getFrame() * 2
parent["group"]["transform"]["translate"]["z"] = context.getFrame() - 1
"""
)
)
s["planeFilter"] = GafferScene.PathFilter()
s["planeFilter"]["paths"].setValue( IECore.StringVectorData( [ "/group/plane" ] ) )
s["attributes"] = GafferScene.StandardAttributes()
s["attributes"]["in"].setInput( s["group"]["out"] )
s["attributes"]["filter"].setInput( s["planeFilter"]["out"] )
s["attributes"]["attributes"]["transformBlur"]["enabled"].setValue( True )
s["attributes"]["attributes"]["transformBlur"]["value"].setValue( False )
s["options"] = GafferScene.StandardOptions()
s["options"]["in"].setInput( s["attributes"]["out"] )
s["options"]["options"]["shutter"]["enabled"].setValue( True )
s["options"]["options"]["transformBlur"]["enabled"].setValue( True )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["options"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
s["options"]["options"]["transformBlur"]["value"].setValue( False )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
camera = arnold.AiNodeLookUpByName( "gaffer:defaultCamera" )
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
sphereMotionStart = arnold.AiNodeGetFlt( sphere, "motion_start" )
sphereMotionEnd = arnold.AiNodeGetFlt( sphere, "motion_end" )
sphereMatrix = arnold.AiNodeGetMatrix( sphere, "matrix" )
plane = arnold.AiNodeLookUpByName( "/group/plane" )
planeMotionStart = arnold.AiNodeGetFlt( plane, "motion_start" )
planeMotionEnd = arnold.AiNodeGetFlt( plane, "motion_end" )
planeMatrix = arnold.AiNodeGetMatrix( plane, "matrix" )
self.assertEqual( sphereMotionStart, 0 )
self.assertEqual( sphereMotionEnd, 1 )
self.assertEqual( planeMotionStart, 0 )
self.assertEqual( planeMotionEnd, 1 )
expectedSphereMatrix = arnold.AiM4Translation( arnold.AtVector( 0, 2, 0 ) )
expectedPlaneMatrix = arnold.AiM4Translation( arnold.AtVector( 1, 0, 0 ) )
self.assertEqual( self.__m44f( sphereMatrix ), self.__m44f( expectedSphereMatrix ) )
self.assertEqual( self.__m44f( planeMatrix ), self.__m44f( expectedPlaneMatrix ) )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_start" ), 1 )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_end" ), 1 )
s["options"]["options"]["transformBlur"]["value"].setValue( True )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
camera = arnold.AiNodeLookUpByName( "gaffer:defaultCamera" )
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
sphereMotionStart = arnold.AiNodeGetFlt( sphere, "motion_start" )
sphereMotionEnd = arnold.AiNodeGetFlt( sphere, "motion_end" )
sphereMatrices = arnold.AiNodeGetArray( sphere, "matrix" )
plane = arnold.AiNodeLookUpByName( "/group/plane" )
planeMotionStart = arnold.AiNodeGetFlt( plane, "motion_start" )
planeMotionEnd = arnold.AiNodeGetFlt( plane, "motion_end" )
planeMatrices = arnold.AiNodeGetArray( plane, "matrix" )
self.assertEqual( sphereMotionStart, 0.75 )
self.assertEqual( sphereMotionEnd, 1.25 )
self.assertEqual( arnold.AiArrayGetNumElements( sphereMatrices.contents ), 1 )
self.assertEqual( arnold.AiArrayGetNumKeys( sphereMatrices.contents ), 2 )
self.assertEqual( planeMotionStart, 0.75 )
self.assertEqual( planeMotionEnd, 1.25 )
self.assertEqual( arnold.AiArrayGetNumElements( planeMatrices.contents ), 1 )
self.assertEqual( arnold.AiArrayGetNumKeys( planeMatrices.contents ), 2 )
for i in range( 0, 2 ) :
frame = 0.75 + 0.5 * i
sphereMatrix = arnold.AiArrayGetMtx( sphereMatrices, i )
expectedSphereMatrix = arnold.AiM4Translation( arnold.AtVector( 0, frame * 2, frame - 1 ) )
planeMatrix = arnold.AiArrayGetMtx( planeMatrices, i )
expectedPlaneMatrix = arnold.AiM4Translation( arnold.AtVector( 1, 0, frame - 1 ) )
self.assertEqual( self.__m44f( sphereMatrix ), self.__m44f( expectedSphereMatrix ) )
self.assertEqual( self.__m44f( planeMatrix ), self.__m44f( expectedPlaneMatrix ) )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_start" ), 0.75 )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_end" ), 1.25 )
s["options"]["options"]["sampleMotion"]["enabled"].setValue( True )
s["options"]["options"]["sampleMotion"]["value"].setValue( False )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
camera = arnold.AiNodeLookUpByName( "gaffer:defaultCamera" )
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
sphereMotionStart = arnold.AiNodeGetFlt( sphere, "motion_start" )
sphereMotionEnd = arnold.AiNodeGetFlt( sphere, "motion_end" )
sphereMatrices = arnold.AiNodeGetArray( sphere, "matrix" )
plane = arnold.AiNodeLookUpByName( "/group/plane" )
planeMotionStart = arnold.AiNodeGetFlt( plane, "motion_start" )
planeMotionEnd = arnold.AiNodeGetFlt( plane, "motion_end" )
planeMatrices = arnold.AiNodeGetArray( plane, "matrix" )
self.assertEqual( sphereMotionStart, 0.75 )
self.assertEqual( sphereMotionEnd, 1.25 )
self.assertEqual( arnold.AiArrayGetNumElements( sphereMatrices.contents ), 1 )
self.assertEqual( arnold.AiArrayGetNumKeys( sphereMatrices.contents ), 2 )
self.assertEqual( planeMotionStart, 0.75 )
self.assertEqual( planeMotionEnd, 1.25 )
self.assertEqual( arnold.AiArrayGetNumElements( planeMatrices.contents ), 1 )
self.assertEqual( arnold.AiArrayGetNumKeys( planeMatrices.contents ), 2 )
for i in range( 0, 2 ) :
frame = 0.75 + 0.5 * i
sphereMatrix = arnold.AiArrayGetMtx( sphereMatrices, i )
expectedSphereMatrix = arnold.AiM4Translation( arnold.AtVector( 0, frame * 2, frame - 1 ) )
planeMatrix = arnold.AiArrayGetMtx( planeMatrices, i )
expectedPlaneMatrix = arnold.AiM4Translation( arnold.AtVector( 1, 0, frame - 1 ) )
self.assertEqual( self.__m44f( sphereMatrix ), self.__m44f( expectedSphereMatrix ) )
self.assertEqual( self.__m44f( planeMatrix ), self.__m44f( expectedPlaneMatrix ) )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_start" ), 0.75 )
self.assertEqual( arnold.AiNodeGetFlt( camera, "shutter_end" ), 0.75 )
def testResolution( self ) :
s = Gaffer.ScriptNode()
s["camera"] = GafferScene.Camera()
s["options"] = GafferScene.StandardOptions()
s["options"]["in"].setInput( s["camera"]["out"] )
s["options"]["options"]["renderResolution"]["enabled"].setValue( True )
s["options"]["options"]["renderResolution"]["value"].setValue( imath.V2i( 200, 100 ) )
s["options"]["options"]["resolutionMultiplier"]["enabled"].setValue( True )
s["options"]["options"]["resolutionMultiplier"]["value"].setValue( 2 )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["options"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 400 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 200 )
s["options"]["options"]["renderCamera"]["enabled"].setValue( True )
s["options"]["options"]["renderCamera"]["value"].setValue( "/camera" )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 400 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 200 )
def testRenderRegion( self ) :
s = Gaffer.ScriptNode()
s["camera"] = GafferScene.Camera()
s["options"] = GafferScene.StandardOptions()
s["options"]["in"].setInput( s["camera"]["out"] )
s["options"]["options"]["renderCamera"]["enabled"].setValue( True )
s["options"]["options"]["renderCamera"]["value"].setValue( "/camera" )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["options"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 640 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 480 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_x" ), 0 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_x" ), 639 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_y" ), 0 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_y" ), 479 )
s["options"]["options"]["renderCropWindow"]["enabled"].setValue( True )
s["options"]["options"]["renderCropWindow"]["value"].setValue( imath.Box2f( imath.V2f( 0.25, 0.5 ), imath.V2f( 0.75, 1.0 ) ) )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 640 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 480 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_x" ), 160 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_x" ), 479 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_y" ), 240 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_y" ), 479 )
s["options"]["options"]["renderCropWindow"]["value"].setValue( imath.Box2f() )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 640 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 480 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_x" ), 0 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_x" ), 0 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_y" ), 479 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_y" ), 479 )
# Apply Overscan
s["options"]["options"]["renderCropWindow"]["enabled"].setValue( False )
s["options"]["options"]["overscan"]["enabled"].setValue( True )
s["options"]["options"]["overscan"]["value"].setValue( True )
s["options"]["options"]["overscanTop"]["enabled"].setValue( True )
s["options"]["options"]["overscanTop"]["value"].setValue( 0.1 )
s["options"]["options"]["overscanBottom"]["enabled"].setValue( True )
s["options"]["options"]["overscanBottom"]["value"].setValue( 0.2 )
s["options"]["options"]["overscanLeft"]["enabled"].setValue( True )
s["options"]["options"]["overscanLeft"]["value"].setValue( 0.3 )
s["options"]["options"]["overscanRight"]["enabled"].setValue( True )
s["options"]["options"]["overscanRight"]["value"].setValue( 0.4 )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
options = arnold.AiUniverseGetOptions()
self.assertEqual( arnold.AiNodeGetInt( options, "xres" ), 640 )
self.assertEqual( arnold.AiNodeGetInt( options, "yres" ), 480 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_x" ), -192 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_x" ), 640 + 255 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_min_y" ), -48 )
self.assertEqual( arnold.AiNodeGetInt( options, "region_max_y" ), 480 + 95 )
def testMissingCameraRaises( self ) :
s = Gaffer.ScriptNode()
s["options"] = GafferScene.StandardOptions()
s["options"]["options"]["renderCamera"]["enabled"].setValue( True )
s["options"]["options"]["renderCamera"]["value"].setValue( "/i/dont/exist" )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["options"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
# The requested camera doesn't exist - this should raise an exception.
six.assertRaisesRegex( self, RuntimeError, "/i/dont/exist", s["render"]["task"].execute )
s["camera"] = GafferScene.Camera()
s["options"]["in"].setInput( s["camera"]["out"] )
six.assertRaisesRegex( self, RuntimeError, "/i/dont/exist", s["render"]["task"].execute )
def testManyCameras( self ) :
camera = GafferScene.Camera()
duplicate = GafferScene.Duplicate()
duplicate["in"].setInput( camera["out"] )
duplicate["target"].setValue( "/camera" )
duplicate["copies"].setValue( 1000 )
render = GafferArnold.ArnoldRender()
render["in"].setInput( duplicate["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
def testTwoRenders( self ) :
sphere = GafferScene.Sphere()
duplicate = GafferScene.Duplicate()
duplicate["in"].setInput( sphere["out"] )
duplicate["target"].setValue( "/sphere" )
duplicate["copies"].setValue( 10000 )
render = GafferArnold.ArnoldRender()
render["in"].setInput( duplicate["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.####.ass" )
errors = []
def executeFrame( frame ) :
with Gaffer.Context() as c :
c.setFrame( frame )
try :
render["task"].execute()
except Exception as e :
errors.append( str( e ) )
threads = []
for i in range( 0, 2 ) :
t = threading.Thread( target = executeFrame, args = ( i, ) )
t.start()
threads.append( t )
for t in threads :
t.join()
self.assertEqual( len( errors ), 1 )
self.assertTrue( "Arnold is already in use" in errors[0] )
def testTraceSets( self ) :
sphere = GafferScene.Sphere()
group = GafferScene.Group()
group["in"][0].setInput( sphere["out"] )
group["in"][1].setInput( sphere["out"] )
set1 = GafferScene.Set()
set1["name"].setValue( "render:firstSphere" )
set1["paths"].setValue( IECore.StringVectorData( [ "/group/sphere" ] ) )
set1["in"].setInput( group["out"] )
set2 = GafferScene.Set()
set2["name"].setValue( "render:secondSphere" )
set2["paths"].setValue( IECore.StringVectorData( [ "/group/sphere1" ] ) )
set2["in"].setInput( set1["out"] )
set3 = GafferScene.Set()
set3["name"].setValue( "render:group" )
set3["paths"].setValue( IECore.StringVectorData( [ "/group" ] ) )
set3["in"].setInput( set2["out"] )
set4 = GafferScene.Set()
set4["name"].setValue( "render:bothSpheres" )
set4["paths"].setValue( IECore.StringVectorData( [ "/group/sphere", "/group/sphere1" ] ) )
set4["in"].setInput( set3["out"] )
render = GafferArnold.ArnoldRender()
render["in"].setInput( set4["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
firstSphere = arnold.AiNodeLookUpByName( "/group/sphere" )
secondSphere = arnold.AiNodeLookUpByName( "/group/sphere1" )
self.assertEqual( self.__arrayToSet( arnold.AiNodeGetArray( firstSphere, "trace_sets" ) ), { "firstSphere", "group", "bothSpheres" } )
self.assertEqual( self.__arrayToSet( arnold.AiNodeGetArray( secondSphere, "trace_sets" ) ), { "secondSphere", "group", "bothSpheres" } )
def testSetsNeedContextEntry( self ) :
script = Gaffer.ScriptNode()
script["light"] = GafferArnold.ArnoldLight()
script["light"].loadShader( "point_light" )
script["expression"] = Gaffer.Expression()
script["expression"].setExpression(
"""parent["light"]["name"] = context["lightName"]"""
)
script["render"] = GafferArnold.ArnoldRender()
script["render"]["in"].setInput( script["light"]["out"] )
script["render"]["mode"].setValue( script["render"].Mode.SceneDescriptionMode )
script["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
for i in range( 0, 100 ) :
with Gaffer.Context() as context :
context["lightName"] = "light%d" % i
script["render"]["task"].execute()
def testFrameAndAASeed( self ) :
options = GafferArnold.ArnoldOptions()
render = GafferArnold.ArnoldRender()
render["in"].setInput( options["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
for frame in ( 1, 2, 2.8, 3.2 ) :
for seed in ( None, 3, 4 ) :
with Gaffer.Context() as c :
c.setFrame( frame )
options["options"]["aaSeed"]["enabled"].setValue( seed is not None )
options["options"]["aaSeed"]["value"].setValue( seed or 1 )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
self.assertEqual(
arnold.AiNodeGetInt( arnold.AiUniverseGetOptions(), "AA_seed" ),
seed or round( frame )
)
def testRendererContextVariable( self ) :
sphere = GafferScene.Sphere()
sphere["name"].setValue( "sphere${scene:renderer}" )
render = GafferArnold.ArnoldRender()
render["in"].setInput( sphere["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
self.assertTrue( arnold.AiNodeLookUpByName( "/sphereArnold" ) is not None )
def testAdaptors( self ) :
sphere = GafferScene.Sphere()
def a() :
result = GafferArnold.ArnoldAttributes()
result["attributes"]["matte"]["enabled"].setValue( True )
result["attributes"]["matte"]["value"].setValue( True )
return result
GafferScene.registerAdaptor( "Test", a )
sphere = GafferScene.Sphere()
render = GafferArnold.ArnoldRender()
render["in"].setInput( sphere["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
node = arnold.AiNodeLookUpByName( "/sphere" )
self.assertEqual( arnold.AiNodeGetBool( node, "matte" ), True )
def testLightAndShadowLinking( self ) :
sphere1 = GafferScene.Sphere()
sphere2 = GafferScene.Sphere()
attributes = GafferScene.StandardAttributes()
arnoldAttributes = GafferArnold.ArnoldAttributes()
light1 = GafferArnold.ArnoldLight()
light1.loadShader( "point_light" )
light2 = GafferArnold.ArnoldLight()
light2.loadShader( "point_light" )
group = GafferScene.Group()
render = GafferArnold.ArnoldRender()
attributes["in"].setInput( sphere1["out"] )
arnoldAttributes["in"].setInput( attributes["out"] )
group["in"][0].setInput( arnoldAttributes["out"] )
group["in"][1].setInput( light1["out"] )
group["in"][2].setInput( light2["out"] )
group["in"][3].setInput( sphere2["out"] )
render["in"].setInput( group["out"] )
# Illumination
attributes["attributes"]["linkedLights"]["enabled"].setValue( True )
attributes["attributes"]["linkedLights"]["value"].setValue( "/group/light" )
# Shadows
arnoldAttributes["attributes"]["shadowGroup"]["enabled"].setValue( True )
arnoldAttributes["attributes"]["shadowGroup"]["value"].setValue( "/group/light1" )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
# the first sphere had linked lights
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
# check illumination
self.assertTrue( arnold.AiNodeGetBool( sphere, "use_light_group" ) )
lights = arnold.AiNodeGetArray( sphere, "light_group" )
self.assertEqual( arnold.AiArrayGetNumElements( lights ), 1 )
self.assertEqual(
arnold.AiNodeGetName( arnold.AiArrayGetPtr( lights, 0 ) ),
"light:/group/light"
)
# check shadows
self.assertTrue( arnold.AiNodeGetBool( sphere, "use_shadow_group" ) )
shadows = arnold.AiNodeGetArray( sphere, "shadow_group" )
self.assertEqual( arnold.AiArrayGetNumElements( shadows ), 1 )
self.assertEqual(
arnold.AiNodeGetName( arnold.AiArrayGetPtr( shadows, 0 ) ),
"light:/group/light1"
)
# the second sphere does not have any light linking enabled
sphere1 = arnold.AiNodeLookUpByName( "/group/sphere1" )
# check illumination
self.assertFalse( arnold.AiNodeGetBool( sphere1, "use_light_group" ) )
lights = arnold.AiNodeGetArray( sphere1, "light_group" )
self.assertEqual( arnold.AiArrayGetNumElements( lights ), 0 )
# check shadows
self.assertFalse( arnold.AiNodeGetBool( sphere1, "use_shadow_group" ) )
shadows = arnold.AiNodeGetArray( sphere1, "shadow_group" )
self.assertEqual( arnold.AiArrayGetNumElements( shadows ), 0 )
def testNoLinkedLightsOnLights( self ) :
sphere = GafferScene.Sphere()
meshLightShader = GafferArnold.ArnoldShader()
meshLightShader.loadShader( "flat" )
meshLightFilter = GafferScene.PathFilter()
meshLightFilter["paths"].setValue( IECore.StringVectorData( [ "/sphere" ] ) )
meshLight = GafferArnold.ArnoldMeshLight()
meshLight["in"].setInput( sphere["out"] )
meshLight["filter"].setInput( meshLightFilter["out"] )
meshLight["parameters"]["color"].setInput( meshLightShader["out"] )
light1 = GafferArnold.ArnoldLight()
light1.loadShader( "point_light" )
light2 = GafferArnold.ArnoldLight()
light2.loadShader( "point_light" )
# Trigger light linking by unlinking a light
light2["defaultLight"].setValue( False )
group = GafferScene.Group()
group["in"][0].setInput( meshLight["out"] )
group["in"][1].setInput( light1["out"] )
group["in"][2].setInput( light2["out"] )
render = GafferArnold.ArnoldRender()
render["in"].setInput( group["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
self.assertIsNotNone( sphere )
self.assertEqual( arnold.AiArrayGetNumElements( arnold.AiNodeGetArray( sphere, "light_group" ) ), 0 )
self.assertFalse( arnold.AiNodeGetBool( sphere, "use_light_group" ) )
def testLightFilters( self ) :
s = Gaffer.ScriptNode()
s["lightFilter"] = GafferArnold.ArnoldLightFilter()
s["lightFilter"].loadShader( "light_blocker" )
s["attributes"] = GafferScene.StandardAttributes()
s["attributes"]["in"].setInput( s["lightFilter"]["out"] )
s["attributes"]["attributes"]["filteredLights"]["enabled"].setValue( True )
s["attributes"]["attributes"]["filteredLights"]["value"].setValue( "defaultLights" )
s["light"] = GafferArnold.ArnoldLight()
s["light"].loadShader( "point_light" )
s["gobo"] = GafferArnold.ArnoldShader()
s["gobo"].loadShader( "gobo" )
s["assignment"] = GafferScene.ShaderAssignment()
s["assignment"]["in"].setInput( s["light"]["out"] )
s["assignment"]["shader"].setInput( s["gobo"]["out"] )
s["group"] = GafferScene.Group()
s["group"]["in"][0].setInput( s["attributes"]["out"] )
s["group"]["in"][1].setInput( s["assignment"]["out"] )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["group"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
light = arnold.AiNodeLookUpByName( "light:/group/light" )
linkedFilters = arnold.AiNodeGetArray( light, "filters" )
numFilters = arnold.AiArrayGetNumElements( linkedFilters.contents )
self.assertEqual( numFilters, 2 )
linkedFilter = arnold.cast(arnold.AiArrayGetPtr(linkedFilters, 0), arnold.POINTER(arnold.AtNode))
linkedGobo = arnold.cast(arnold.AiArrayGetPtr(linkedFilters, 1), arnold.POINTER(arnold.AtNode))
self.assertEqual( arnold.AiNodeGetName( linkedFilter ), "lightFilter:/group/lightFilter" )
self.assertEqual( arnold.AiNodeEntryGetName( arnold.AiNodeGetNodeEntry( linkedFilter ) ), "light_blocker" )
self.assertEqual( arnold.AiNodeEntryGetName( arnold.AiNodeGetNodeEntry( linkedGobo ) ), "gobo" )
@GafferTest.TestRunner.PerformanceTestMethod( repeat = 1 )
def testLightFiltersMany( self ) :
numLights = 10000
numLightFilters = 10000
s = Gaffer.ScriptNode()
s["lightFilter"] = GafferArnold.ArnoldLightFilter()
s["lightFilter"].loadShader( "light_blocker" )
s["lightFilter"]["filteredLights"].setValue( "defaultLights" )
s["planeFilters"] = GafferScene.Plane( "Plane" )
s["planeFilters"]["divisions"].setValue( imath.V2i( 1, numLightFilters / 2 - 1 ) )
s["instancerFilters"] = GafferScene.Instancer( "Instancer" )
s["instancerFilters"]["in"].setInput( s["planeFilters"]["out"] )
s["instancerFilters"]["instances"].setInput( s["lightFilter"]["out"] )
s["instancerFilters"]["parent"].setValue( "/plane" )
s["light"] = GafferArnold.ArnoldLight()
s["light"].loadShader( "point_light" )
s["planeLights"] = GafferScene.Plane( "Plane" )
s["planeLights"]["divisions"].setValue( imath.V2i( 1, numLights / 2 - 1 ) )
s["instancerLights"] = GafferScene.Instancer( "Instancer" )
s["instancerLights"]["in"].setInput( s["planeLights"]["out"] )
s["instancerLights"]["instances"].setInput( s["light"]["out"] )
s["instancerLights"]["parent"].setValue( "/plane" )
s["group"] = GafferScene.Group( "Group" )
s["group"]["in"][0].setInput( s["instancerFilters"]["out"] )
s["group"]["in"][1].setInput( s["instancerLights"]["out"] )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["group"]["out"] )
with Gaffer.Context() as c :
c["scene:render:sceneTranslationOnly"] = IECore.BoolData( True )
s["render"]["task"].execute()
def testAbortRaises( self ) :
s = Gaffer.ScriptNode()
s["plane"] = GafferScene.Plane()
s["plane"]["transform"]["translate"]["z"].setValue( -10 )
s["shader"] = GafferArnold.ArnoldShader()
s["shader"].loadShader( "image" )
# Missing texture should cause render to abort
s["shader"]["parameters"]["filename"].setValue( "iDontExist" )
s["filter"] = GafferScene.PathFilter()
s["filter"]["paths"].setValue( IECore.StringVectorData( [ "/plane" ] ) )
s["shaderAssignment"] = GafferScene.ShaderAssignment()
s["shaderAssignment"]["in"].setInput( s["plane"]["out"] )
s["shaderAssignment"]["filter"].setInput( s["filter"]["out"] )
s["shaderAssignment"]["shader"].setInput( s["shader"]["out"] )
s["outputs"] = GafferScene.Outputs()
s["outputs"].addOutput(
"beauty",
IECoreScene.Output(
self.temporaryDirectory() + "/test.tif",
"tiff",
"rgba",
{}
)
)
s["outputs"]["in"].setInput( s["shaderAssignment"]["out"] )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["outputs"]["out"] )
six.assertRaisesRegex( self, RuntimeError, "Render aborted", s["render"]["task"].execute )
def testOSLShaders( self ) :
swizzle = GafferOSL.OSLShader()
swizzle.loadShader( "MaterialX/mx_swizzle_color_float" )
swizzle["parameters"]["in"].setValue( imath.Color3f( 0, 0, 1 ) )
swizzle["parameters"]["channels"].setValue( "b" )
pack = GafferOSL.OSLShader()
pack.loadShader( "MaterialX/mx_pack_color" )
pack["parameters"]["in1"].setInput( swizzle["out"]["out"] )
ball = GafferArnold.ArnoldShaderBall()
ball["shader"].setInput( pack["out"] )
catalogue = GafferImage.Catalogue()
outputs = GafferScene.Outputs()
outputs.addOutput(
"beauty",
IECoreScene.Output(
"test",
"ieDisplay",
"rgba",
{
"driverType" : "ClientDisplayDriver",
"displayHost" : "localhost",
"displayPort" : str( catalogue.displayDriverServer().portNumber() ),
"remoteDisplayType" : "GafferImage::GafferDisplayDriver",
}
)
)
outputs["in"].setInput( ball["out"] )
render = GafferArnold.ArnoldRender()
render["in"].setInput( outputs["out"] )
with GafferTest.ParallelAlgoTest.UIThreadCallHandler() as handler :
render["task"].execute()
handler.waitFor( 0.1 ) #Just need to let the catalogue update
self.assertEqual( self.__color4fAtUV( catalogue, imath.V2f( 0.5 ) ), imath.Color4f( 1, 0, 0, 1 ) )
def testDefaultLightsMistakesDontForceLinking( self ) :
light = GafferArnold.ArnoldLight()
light.loadShader( "point_light" )
sphere = GafferScene.Sphere()
# It doesn't make sense to add a non-light to the "defaultLights"
# emit light links unnecessarily.
sphereSet = GafferScene.Set()
sphereSet["in"].setInput( sphere["out"] )
sphereSet["name"].setValue( "defaultLights" )
sphereSet["paths"].setValue( IECore.StringVectorData( [ "/sphere" ] ) )
group = GafferScene.Group()
group["in"][0].setInput( light["out"] )
group["in"][1].setInput( sphereSet["out"] )
render = GafferArnold.ArnoldRender()
render["in"].setInput( group["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
sphere = arnold.AiNodeLookUpByName( "/group/sphere" )
self.assertIsNotNone( sphere )
self.assertEqual( arnold.AiArrayGetNumElements( arnold.AiNodeGetArray( sphere, "light_group" ) ), 0 )
self.assertFalse( arnold.AiNodeGetBool( sphere, "use_light_group" ) )
def __color4fAtUV( self, image, uv ) :
sampler = GafferImage.ImageSampler()
sampler["image"].setInput( image["out"] )
dw = image['out']["format"].getValue().getDisplayWindow().size()
sampler["pixel"].setValue( uv * imath.V2f( dw.x, dw.y ) )
return sampler["color"].getValue()
def __arrayToSet( self, a ) :
result = set()
for i in range( 0, arnold.AiArrayGetNumElements( a.contents ) ) :
if arnold.AiArrayGetType( a.contents ) == arnold.AI_TYPE_STRING :
result.add( arnold.AiArrayGetStr( a, i ) )
else :
raise TypeError
return result
def testPerformanceMonitorDoesntCrash( self ) :
options = GafferScene.StandardOptions()
options["options"]["performanceMonitor"]["value"].setValue( True )
options["options"]["performanceMonitor"]["enabled"].setValue( True )
render = GafferArnold.ArnoldRender()
render["in"].setInput( options["out"] )
render["mode"].setValue( render.Mode.SceneDescriptionMode )
render["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
render["task"].execute()
def testShaderSubstitutions( self ) :
s = Gaffer.ScriptNode()
s["plane"] = GafferScene.Plane()
s["planeAttrs"] = GafferScene.CustomAttributes()
s["planeAttrs"]["in"].setInput( s["plane"]["out"] )
s["planeAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "A", Gaffer.StringPlug( "value", defaultValue = 'bar' ) ) )
s["planeAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "B", Gaffer.StringPlug( "value", defaultValue = 'foo' ) ) )
s["cube"] = GafferScene.Cube()
s["cubeAttrs"] = GafferScene.CustomAttributes()
s["cubeAttrs"]["in"].setInput( s["cube"]["out"] )
s["cubeAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "B", Gaffer.StringPlug( "value", defaultValue = 'override' ) ) )
s["parent"] = GafferScene.Parent()
s["parent"]["in"].setInput( s["planeAttrs"]["out"] )
s["parent"]["children"][0].setInput( s["cubeAttrs"]["out"] )
s["parent"]["parent"].setValue( "/plane" )
s["shader"] = GafferArnold.ArnoldShader()
s["shader"].loadShader( "image" )
s["shader"]["parameters"]["filename"].setValue( "<attr:A>/path/<attr:B>.tx" )
s["filter"] = GafferScene.PathFilter()
s["filter"]["paths"].setValue( IECore.StringVectorData( [ "/plane" ] ) )
s["shaderAssignment"] = GafferScene.ShaderAssignment()
s["shaderAssignment"]["in"].setInput( s["parent"]["out"] )
s["shaderAssignment"]["filter"].setInput( s["filter"]["out"] )
s["shaderAssignment"]["shader"].setInput( s["shader"]["out"] )
s["light"] = GafferArnold.ArnoldLight()
s["light"].loadShader( "photometric_light" )
s["light"]["parameters"]["filename"].setValue( "/path/<attr:A>.ies" )
s["goboTexture"] = GafferArnold.ArnoldShader()
s["goboTexture"].loadShader( "image" )
s["goboTexture"]["parameters"]["filename"].setValue( "<attr:B>/gobo.tx" )
s["gobo"] = GafferArnold.ArnoldShader()
s["gobo"].loadShader( "gobo" )
s["gobo"]["parameters"]["slidemap"].setInput( s["goboTexture"]["out"] )
s["goboAssign"] = GafferScene.ShaderAssignment()
s["goboAssign"]["in"].setInput( s["light"]["out"] )
s["goboAssign"]["shader"].setInput( s["gobo"]["out"] )
s["lightBlocker"] = GafferArnold.ArnoldLightFilter()
s["lightBlocker"].loadShader( "light_blocker" )
s["lightBlocker"]["parameters"]["geometry_type"].setValue( "<attr:geometryType>" )
s["lightGroup"] = GafferScene.Group()
s["lightGroup"]["name"].setValue( "lightGroup" )
s["lightGroup"]["in"][0].setInput( s["goboAssign"]["out"] )
s["lightGroup"]["in"][1].setInput( s["lightBlocker"]["out"] )
s["parent2"] = GafferScene.Parent()
s["parent2"]["in"].setInput( s["shaderAssignment"]["out"] )
s["parent2"]["children"][0].setInput( s["lightGroup"]["out"] )
s["parent2"]["parent"].setValue( "/" )
s["globalAttrs"] = GafferScene.CustomAttributes()
s["globalAttrs"]["in"].setInput( s["parent2"]["out"] )
s["globalAttrs"]["global"].setValue( True )
s["globalAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "A", Gaffer.StringPlug( "value", defaultValue = 'default1' ) ) )
s["globalAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "B", Gaffer.StringPlug( "value", defaultValue = 'default2' ) ) )
s["globalAttrs"]["attributes"].addChild( Gaffer.NameValuePlug( "geometryType", Gaffer.StringPlug( "value", defaultValue = 'cylinder' ) ) )
s["render"] = GafferArnold.ArnoldRender()
s["render"]["in"].setInput( s["globalAttrs"]["out"] )
s["render"]["mode"].setValue( s["render"].Mode.SceneDescriptionMode )
s["render"]["fileName"].setValue( self.temporaryDirectory() + "/test.ass" )
s["render"]["task"].execute()
with IECoreArnold.UniverseBlock( writable = True ) :
arnold.AiASSLoad( self.temporaryDirectory() + "/test.ass" )
plane = arnold.AiNodeLookUpByName( "/plane" )
shader = arnold.AiNodeGetPtr( plane, "shader" )
self.assertEqual( arnold.AiNodeGetStr( shader, "filename" ), "bar/path/foo.tx" )
cube = arnold.AiNodeLookUpByName( "/plane/cube" )
shader2 = arnold.AiNodeGetPtr( cube, "shader" )
self.assertEqual( arnold.AiNodeGetStr( shader2, "filename" ), "bar/path/override.tx" )
light = arnold.AiNodeLookUpByName( "light:/lightGroup/light" )
self.assertEqual( arnold.AiNodeGetStr( light, "filename" ), "/path/default1.ies" )
gobo = arnold.AiNodeGetPtr( light, "filters" )
goboTex = arnold.AiNodeGetLink( gobo, "slidemap" )
self.assertEqual( arnold.AiNodeGetStr( goboTex, "filename" ), "default2/gobo.tx" )
lightFilter = arnold.AiNodeLookUpByName( "lightFilter:/lightGroup/lightFilter" )
self.assertEqual( arnold.AiNodeGetStr( lightFilter, "geometry_type" ), "cylinder" )
if __name__ == "__main__":
unittest.main()
| true | true |
f73501752e6c23eaebdfdfc652c07fdc952f82a3 | 1,049 | py | Python | tools/generateData_sensor_malfunction.py | Hemankita/refarch-kc-container-ms | c2e85eacabe8a194782835b04f3410c2d7956a9b | [
"Apache-2.0"
] | null | null | null | tools/generateData_sensor_malfunction.py | Hemankita/refarch-kc-container-ms | c2e85eacabe8a194782835b04f3410c2d7956a9b | [
"Apache-2.0"
] | null | null | null | tools/generateData_sensor_malfunction.py | Hemankita/refarch-kc-container-ms | c2e85eacabe8a194782835b04f3410c2d7956a9b | [
"Apache-2.0"
] | null | null | null | import csv
import json
from random import gauss
import random
import datetime
import numpy as np
import sys
import pandas as pd
df = pd.DataFrame(columns=['Timestamp', 'ID', 'Temperature(celsius)', 'Target_Temperature(celsius)', 'Amp', 'CumulativePowerConsumption', 'ContentType', 'Humidity', 'CO2', 'Time_Door_Open',
'Maintainence_Required', 'Defrost_Cycle'])
def buildJSON():
#faulty sensor data
id = random.randint(1001,2000)
Today= datetime.datetime.today()
date_list = [Today + datetime.timedelta(minutes=15*x) for x in range(0, 1000)]
range_list=np.linspace(1,2,1000)
index=0
for i in range_list:
timestamp = date_list[index].strftime('%Y-%m-%d T%H:%M Z')
df.loc[i] = [timestamp, id, gauss(5.0, 2.0), 4.4, gauss(2.5,1.0), gauss(10.0,2.0), random.randint(1,5),gauss(10.5, 5.5), gauss(10.5, 5.0), gauss(8.0, 2.0), 1, 6]
index=index+1
d = [dict([
(colname, row[i])
for i,colname in enumerate(df.columns)]) for row in df.values]
return json.dumps(d)
| 31.787879 | 190 | 0.650143 | import csv
import json
from random import gauss
import random
import datetime
import numpy as np
import sys
import pandas as pd
df = pd.DataFrame(columns=['Timestamp', 'ID', 'Temperature(celsius)', 'Target_Temperature(celsius)', 'Amp', 'CumulativePowerConsumption', 'ContentType', 'Humidity', 'CO2', 'Time_Door_Open',
'Maintainence_Required', 'Defrost_Cycle'])
def buildJSON():
id = random.randint(1001,2000)
Today= datetime.datetime.today()
date_list = [Today + datetime.timedelta(minutes=15*x) for x in range(0, 1000)]
range_list=np.linspace(1,2,1000)
index=0
for i in range_list:
timestamp = date_list[index].strftime('%Y-%m-%d T%H:%M Z')
df.loc[i] = [timestamp, id, gauss(5.0, 2.0), 4.4, gauss(2.5,1.0), gauss(10.0,2.0), random.randint(1,5),gauss(10.5, 5.5), gauss(10.5, 5.0), gauss(8.0, 2.0), 1, 6]
index=index+1
d = [dict([
(colname, row[i])
for i,colname in enumerate(df.columns)]) for row in df.values]
return json.dumps(d)
| true | true |
f73502a0b1963176fdafca20bee31f09321e6c49 | 542 | py | Python | blogengine/manage.py | forgoty/django-blog | 2d4c2353d3614be04f06bdb3b713c8339f7e00b5 | [
"MIT"
] | null | null | null | blogengine/manage.py | forgoty/django-blog | 2d4c2353d3614be04f06bdb3b713c8339f7e00b5 | [
"MIT"
] | null | null | null | blogengine/manage.py | forgoty/django-blog | 2d4c2353d3614be04f06bdb3b713c8339f7e00b5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'blogengine.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| 33.875 | 74 | 0.688192 |
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'blogengine.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| true | true |
f73503258b60efbb2ff116d604275c8ed287bfee | 269 | py | Python | helpers/cols.py | ijlyttle/reactivity-demo-dash | 93260437a78c257c43cf8ddedefea3acbff4eb66 | [
"MIT"
] | null | null | null | helpers/cols.py | ijlyttle/reactivity-demo-dash | 93260437a78c257c43cf8ddedefea3acbff4eb66 | [
"MIT"
] | 1 | 2022-02-16T10:58:32.000Z | 2022-02-19T17:58:17.000Z | helpers/cols.py | ijlyttle/reactivity-demo-dash | 93260437a78c257c43cf8ddedefea3acbff4eb66 | [
"MIT"
] | null | null | null | import pandas as pd
def cols_choice (df, include):
return df.select_dtypes(include=include).columns.to_list()
def cols_header (data_records):
if (len(data_records) == 0):
return []
return [{'name': v, 'id': v} for v in data_records[0].keys()]
| 22.416667 | 65 | 0.650558 | import pandas as pd
def cols_choice (df, include):
return df.select_dtypes(include=include).columns.to_list()
def cols_header (data_records):
if (len(data_records) == 0):
return []
return [{'name': v, 'id': v} for v in data_records[0].keys()]
| true | true |
f7350478178bbe07092ac6792210521df315bc7f | 303 | py | Python | coding_intereview/1015. Smallest Integer Divisible by K.py | purusharthmalik/Python-Bootcamp | 2ed1cf886d1081de200b0fdd4cb4e28008c7e3d1 | [
"MIT"
] | 2 | 2020-10-03T16:38:10.000Z | 2021-06-03T11:01:59.000Z | coding_intereview/1015. Smallest Integer Divisible by K.py | purusharthmalik/Python-Bootcamp | 2ed1cf886d1081de200b0fdd4cb4e28008c7e3d1 | [
"MIT"
] | null | null | null | coding_intereview/1015. Smallest Integer Divisible by K.py | purusharthmalik/Python-Bootcamp | 2ed1cf886d1081de200b0fdd4cb4e28008c7e3d1 | [
"MIT"
] | 1 | 2020-10-03T16:38:02.000Z | 2020-10-03T16:38:02.000Z | class Solution:
def smallestRepunitDivByK(self, k: int) -> int:
if k % 2 == 0 or k % 5 == 0:
return -1
if k == 1:
return 1
count = 1
n = 1
while (n % k > 0):
n = (n % k) * 10 + 1
count += 1
return count
| 23.307692 | 51 | 0.379538 | class Solution:
def smallestRepunitDivByK(self, k: int) -> int:
if k % 2 == 0 or k % 5 == 0:
return -1
if k == 1:
return 1
count = 1
n = 1
while (n % k > 0):
n = (n % k) * 10 + 1
count += 1
return count
| true | true |
f73505c7a1c27e7bd446ee7783c3f5f7ec32f268 | 449 | py | Python | leetcode/0278_first_bad_version.py | jacquerie/leetcode | a05e6b832eb0e0740aaff7b2eb3109038ad404bf | [
"MIT"
] | 3 | 2018-05-10T09:56:49.000Z | 2020-11-07T18:09:42.000Z | leetcode/0278_first_bad_version.py | jacquerie/leetcode | a05e6b832eb0e0740aaff7b2eb3109038ad404bf | [
"MIT"
] | null | null | null | leetcode/0278_first_bad_version.py | jacquerie/leetcode | a05e6b832eb0e0740aaff7b2eb3109038ad404bf | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
def isBadVersion(n):
return n >= 3
class Solution:
def firstBadVersion(self, n):
first, last = 1, n
while first <= last:
mid = (first + last) // 2
if isBadVersion(mid):
last = mid - 1
else:
first = mid + 1
return first
if __name__ == '__main__':
solution = Solution()
assert 3 == solution.firstBadVersion(5)
| 17.269231 | 43 | 0.494432 |
def isBadVersion(n):
return n >= 3
class Solution:
def firstBadVersion(self, n):
first, last = 1, n
while first <= last:
mid = (first + last) // 2
if isBadVersion(mid):
last = mid - 1
else:
first = mid + 1
return first
if __name__ == '__main__':
solution = Solution()
assert 3 == solution.firstBadVersion(5)
| true | true |
f735073dd8e4cb97b818e6d3e74a9803c651d4c0 | 473 | py | Python | animal-classifier/__init__.py | xvinay28x/cat_dog_classifier_library | 4d56f90f9d3e91051dba71dcdea78930c4ac0e52 | [
"MIT"
] | 1 | 2021-05-20T16:44:47.000Z | 2021-05-20T16:44:47.000Z | animal-classifier/__init__.py | xvinay28x/cat_dog_classifier_library | 4d56f90f9d3e91051dba71dcdea78930c4ac0e52 | [
"MIT"
] | null | null | null | animal-classifier/__init__.py | xvinay28x/cat_dog_classifier_library | 4d56f90f9d3e91051dba71dcdea78930c4ac0e52 | [
"MIT"
] | null | null | null | from tensorflow import keras
def classify(path):
model = keras.models.load_model("Cat_Dog_Classification.h5")
load_image = keras.preprocessing.image.load_image(path,target_size=(200,200))
image_array = keras.preprocessing.image.img_to_array(load_image)
reshape_array = image_array.reshape(1,200,200,3)
array_normalize = reshape_array/255
result = model.predict(array_normalize)
if result >= 0.5:
return 1
else:
return 0
| 33.785714 | 81 | 0.72093 | from tensorflow import keras
def classify(path):
model = keras.models.load_model("Cat_Dog_Classification.h5")
load_image = keras.preprocessing.image.load_image(path,target_size=(200,200))
image_array = keras.preprocessing.image.img_to_array(load_image)
reshape_array = image_array.reshape(1,200,200,3)
array_normalize = reshape_array/255
result = model.predict(array_normalize)
if result >= 0.5:
return 1
else:
return 0
| true | true |
f7350a516a0b79650e657142a36947cc0f4ff3df | 558 | py | Python | drfstripe/templatetags/payments_tags.py | brandon-fox/django-rest-framework-stripe | 883c1c82e64c67d5379460b5f6d2ce79b89b7e85 | [
"MIT"
] | null | null | null | drfstripe/templatetags/payments_tags.py | brandon-fox/django-rest-framework-stripe | 883c1c82e64c67d5379460b5f6d2ce79b89b7e85 | [
"MIT"
] | null | null | null | drfstripe/templatetags/payments_tags.py | brandon-fox/django-rest-framework-stripe | 883c1c82e64c67d5379460b5f6d2ce79b89b7e85 | [
"MIT"
] | null | null | null | from django import template
from ..forms import PlanForm
register = template.Library()
@register.inclusion_tag("drfstripe/_change_plan_form.html", takes_context=True)
def change_plan_form(context):
context.update({
"form": PlanForm(initial={
"plan": context["request"].user.customer.current_subscription.plan
})
})
return context
@register.inclusion_tag("drfstripe/_subscribe_form.html", takes_context=True)
def subscribe_form(context):
context.update({
"form": PlanForm()
})
return context
| 22.32 | 79 | 0.700717 | from django import template
from ..forms import PlanForm
register = template.Library()
@register.inclusion_tag("drfstripe/_change_plan_form.html", takes_context=True)
def change_plan_form(context):
context.update({
"form": PlanForm(initial={
"plan": context["request"].user.customer.current_subscription.plan
})
})
return context
@register.inclusion_tag("drfstripe/_subscribe_form.html", takes_context=True)
def subscribe_form(context):
context.update({
"form": PlanForm()
})
return context
| true | true |
f7350b3ed0632219aec9672fa33d4b3ba534f8ec | 299 | py | Python | tf_euler/python/euler_ops/type_ops.py | lixusign/euler | c8ce1968367aec2807cc542fcdb5958e3b1b9295 | [
"Apache-2.0"
] | 1 | 2019-09-18T02:18:06.000Z | 2019-09-18T02:18:06.000Z | tf_euler/python/euler_ops/type_ops.py | DingXiye/euler | c45225119c5b991ca953174f06c2f223562f34c9 | [
"Apache-2.0"
] | null | null | null | tf_euler/python/euler_ops/type_ops.py | DingXiye/euler | c45225119c5b991ca953174f06c2f223562f34c9 | [
"Apache-2.0"
] | 1 | 2020-09-18T13:37:08.000Z | 2020-09-18T13:37:08.000Z | # Copyright 2018 Alibaba Inc. All Rights Conserved
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ctypes
import os
import tensorflow as tf
from tf_euler.python.euler_ops import base
get_node_type = base._LIB_OP.get_node_type
| 19.933333 | 50 | 0.842809 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ctypes
import os
import tensorflow as tf
from tf_euler.python.euler_ops import base
get_node_type = base._LIB_OP.get_node_type
| true | true |
f7350b819bf2183789b58144d7ef8095f9e3572e | 940 | py | Python | test/io/testxyzwriter.py | alvarovm/cclib | 18a87de7fcb15c4133e1fd21939401672438ebb7 | [
"BSD-3-Clause"
] | null | null | null | test/io/testxyzwriter.py | alvarovm/cclib | 18a87de7fcb15c4133e1fd21939401672438ebb7 | [
"BSD-3-Clause"
] | null | null | null | test/io/testxyzwriter.py | alvarovm/cclib | 18a87de7fcb15c4133e1fd21939401672438ebb7 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016, the cclib development team
#
# This file is part of cclib (http://cclib.github.io) and is distributed under
# the terms of the BSD 3-Clause License.
"""Unit tests for writer xyzwriter module."""
import os
import unittest
import cclib
__filedir__ = os.path.dirname(__file__)
__filepath__ = os.path.realpath(__filedir__)
__datadir__ = os.path.join(__filepath__, "..", "..")
class XYZTest(unittest.TestCase):
def setUp(self):
self.XYZ = cclib.io.XYZ
def test_init(self):
"""Does the class initialize correctly?"""
fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout")
data = cclib.io.ccopen(fpath).parse()
xyz = cclib.io.xyzwriter.XYZ(data)
# The object should keep the ccData instance passed to its constructor.
self.assertEqual(xyz.ccdata, data)
if __name__ == "__main__":
unittest.main()
| 24.736842 | 85 | 0.678723 |
import os
import unittest
import cclib
__filedir__ = os.path.dirname(__file__)
__filepath__ = os.path.realpath(__filedir__)
__datadir__ = os.path.join(__filepath__, "..", "..")
class XYZTest(unittest.TestCase):
def setUp(self):
self.XYZ = cclib.io.XYZ
def test_init(self):
fpath = os.path.join(__datadir__, "data/ADF/basicADF2007.01/dvb_gopt.adfout")
data = cclib.io.ccopen(fpath).parse()
xyz = cclib.io.xyzwriter.XYZ(data)
self.assertEqual(xyz.ccdata, data)
if __name__ == "__main__":
unittest.main()
| true | true |
f7350bcea3057e9df338ed9f0dbbcc6dfb9d9b74 | 388 | py | Python | featurewiz/__version__.py | hercules261188/featurewiz | b52ab472a76b87440fd2482f315e14c71b4061df | [
"Apache-2.0"
] | 1 | 2021-12-15T17:11:24.000Z | 2021-12-15T17:11:24.000Z | featurewiz/__version__.py | hercules261188/featurewiz | b52ab472a76b87440fd2482f315e14c71b4061df | [
"Apache-2.0"
] | null | null | null | featurewiz/__version__.py | hercules261188/featurewiz | b52ab472a76b87440fd2482f315e14c71b4061df | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Specifies the version of the featurewiz package."""
__title__ = "featurewiz"
__author__ = "Ram Seshadri"
__description__ = "Advanced Feature Engineering and Feature Selection for any data set, any size"
__url__ = "https://github.com/Auto_ViML/featurewiz.git"
__version__ = "0.0.51"
__license__ = "Apache License 2.0"
__copyright__ = "2020-21 Google"
| 35.272727 | 98 | 0.721649 |
__title__ = "featurewiz"
__author__ = "Ram Seshadri"
__description__ = "Advanced Feature Engineering and Feature Selection for any data set, any size"
__url__ = "https://github.com/Auto_ViML/featurewiz.git"
__version__ = "0.0.51"
__license__ = "Apache License 2.0"
__copyright__ = "2020-21 Google"
| true | true |
f7350c06e31b0bd40b501e7b2cb33f083a294241 | 3,426 | py | Python | st2reactor/st2reactor/sensor/base.py | saucetray/st2 | 8f507d6c8d9483c8371e386fe2b7998596856fd7 | [
"Apache-2.0"
] | 2 | 2021-08-04T01:04:06.000Z | 2021-08-04T01:04:08.000Z | st2reactor/st2reactor/sensor/base.py | saucetray/st2 | 8f507d6c8d9483c8371e386fe2b7998596856fd7 | [
"Apache-2.0"
] | 1 | 2022-03-31T03:53:22.000Z | 2022-03-31T03:53:22.000Z | st2reactor/st2reactor/sensor/base.py | saucetray/st2 | 8f507d6c8d9483c8371e386fe2b7998596856fd7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import abc
import six
from st2common.util import concurrency
__all__ = [
'Sensor',
'PollingSensor'
]
@six.add_metaclass(abc.ABCMeta)
class BaseSensor(object):
"""
Base Sensor class - not to be instantiated directly.
"""
def __init__(self, sensor_service, config=None):
"""
:param sensor_service: Sensor Service instance.
:type sensor_service: :class:``st2reactor.container.sensor_wrapper.SensorService``
:keyword config: Sensor config.
:type config: ``dict`` or None
"""
self._sensor_service = sensor_service # Deprecate in the future
self.sensor_service = sensor_service
self._config = config or {} # Deprecate in the future
self.config = self._config
@abc.abstractmethod
def setup(self):
"""
Run the sensor initialization / setup code (if any).
"""
pass
@abc.abstractmethod
def run(self):
"""
Run the sensor.
"""
pass
@abc.abstractmethod
def cleanup(self):
"""
Run the sensor cleanup code (if any).
"""
pass
@abc.abstractmethod
def add_trigger(self, trigger):
"""
Runs when trigger is created
"""
pass
@abc.abstractmethod
def update_trigger(self, trigger):
"""
Runs when trigger is updated
"""
pass
@abc.abstractmethod
def remove_trigger(self, trigger):
"""
Runs when trigger is deleted
"""
pass
class Sensor(BaseSensor):
"""
Base class to be inherited from by the passive sensors.
"""
@abc.abstractmethod
def run(self):
pass
class PollingSensor(BaseSensor):
"""
Base class to be inherited from by the active sensors.
Active sensors periodically poll a 3rd party system for new information.
"""
def __init__(self, sensor_service, config=None, poll_interval=5):
super(PollingSensor, self).__init__(sensor_service=sensor_service, config=config)
self._poll_interval = poll_interval
@abc.abstractmethod
def poll(self):
"""
Poll 3rd party system for new information.
"""
pass
def run(self):
while True:
self.poll()
concurrency.sleep(self._poll_interval)
def get_poll_interval(self):
"""
Retrieve current poll interval.
:return: Current poll interval.
:rtype: ``float``
"""
return self._poll_interval
def set_poll_interval(self, poll_interval):
"""
Set the poll interval.
:param poll_interval: Poll interval to use.
:type poll_interval: ``float``
"""
self._poll_interval = poll_interval
| 24.297872 | 90 | 0.626678 |
from __future__ import absolute_import
import abc
import six
from st2common.util import concurrency
__all__ = [
'Sensor',
'PollingSensor'
]
@six.add_metaclass(abc.ABCMeta)
class BaseSensor(object):
def __init__(self, sensor_service, config=None):
self._sensor_service = sensor_service
self.sensor_service = sensor_service
self._config = config or {}
self.config = self._config
@abc.abstractmethod
def setup(self):
pass
@abc.abstractmethod
def run(self):
pass
@abc.abstractmethod
def cleanup(self):
pass
@abc.abstractmethod
def add_trigger(self, trigger):
pass
@abc.abstractmethod
def update_trigger(self, trigger):
pass
@abc.abstractmethod
def remove_trigger(self, trigger):
pass
class Sensor(BaseSensor):
@abc.abstractmethod
def run(self):
pass
class PollingSensor(BaseSensor):
def __init__(self, sensor_service, config=None, poll_interval=5):
super(PollingSensor, self).__init__(sensor_service=sensor_service, config=config)
self._poll_interval = poll_interval
@abc.abstractmethod
def poll(self):
pass
def run(self):
while True:
self.poll()
concurrency.sleep(self._poll_interval)
def get_poll_interval(self):
return self._poll_interval
def set_poll_interval(self, poll_interval):
self._poll_interval = poll_interval
| true | true |
f7350c2587fbe9b2c791461b60f362d4107f7172 | 1,470 | py | Python | config.py | crazynayan/tpf2 | 3552163a1dab7cd5e371d752a2651e73e8cd8e1e | [
"MIT"
] | null | null | null | config.py | crazynayan/tpf2 | 3552163a1dab7cd5e371d752a2651e73e8cd8e1e | [
"MIT"
] | 2 | 2021-03-23T02:46:31.000Z | 2021-08-04T07:39:45.000Z | config.py | crazynayan/tpf2 | 3552163a1dab7cd5e371d752a2651e73e8cd8e1e | [
"MIT"
] | null | null | null | import os
from base64 import b64encode
from socket import gethostname, gethostbyname
class Config:
SECRET_KEY = os.environ.get("SECRET_KEY") or b64encode(os.urandom(24)).decode()
SERVER_URL = os.environ.get("SERVER_URL") or f"http://{gethostbyname(gethostname())}:8000"
CI_SECURITY = True if os.environ.get("ENVIRONMENT") == "prod" else False
DOWNLOAD_PATH = os.path.join(os.path.abspath(os.sep), "tmp")
BUCKET = "tpf-listings"
SESSION_COOKIE_SECURE = CI_SECURITY
TOKEN_EXPIRY = 3600 # 1 hour = 3600 seconds
REG_BITS: int = 32
REG_MAX: int = (1 << REG_BITS) - 1 # 0xFFFFFFFF
REGISTERS: tuple = ("R0", "R1", "R2", "R3", "R4", "R5", "R6", "R7", "R8", "R9", "R10", "R11", "R12", "R13", "R14",
"R15")
ECB_LEVELS: tuple = ("0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F")
DEFAULT_MACROS: tuple = ("WA0AA", "EB0EB", "GLOBAL", "MI0MI")
AAAPNR: str = "AAAAAA"
PNR_KEYS = [
("name", "NAME"),
("hfax", "HFAX"),
("gfax", "GFAX"),
("fqtv", "FQTV"),
("itin", "ITIN"),
("subs_card_seg", "SUBS_CARD_SEG"),
("group_plan", "GROUP_PLAN"),
("rcvd_from", "RCVD_FROM"),
("phone", "PHONE"),
("record_loc", "RECORD_LOC"),
("remarks", "REMARKS"),
("header", "HEADER"),
("prs_seats", "PRS_SEATS"),
("vcr_coupon", "VCR_COUPON"),
("ice_data", "ICE_DATA"),
]
| 38.684211 | 118 | 0.538776 | import os
from base64 import b64encode
from socket import gethostname, gethostbyname
class Config:
SECRET_KEY = os.environ.get("SECRET_KEY") or b64encode(os.urandom(24)).decode()
SERVER_URL = os.environ.get("SERVER_URL") or f"http://{gethostbyname(gethostname())}:8000"
CI_SECURITY = True if os.environ.get("ENVIRONMENT") == "prod" else False
DOWNLOAD_PATH = os.path.join(os.path.abspath(os.sep), "tmp")
BUCKET = "tpf-listings"
SESSION_COOKIE_SECURE = CI_SECURITY
TOKEN_EXPIRY = 3600
REG_BITS: int = 32
REG_MAX: int = (1 << REG_BITS) - 1
REGISTERS: tuple = ("R0", "R1", "R2", "R3", "R4", "R5", "R6", "R7", "R8", "R9", "R10", "R11", "R12", "R13", "R14",
"R15")
ECB_LEVELS: tuple = ("0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F")
DEFAULT_MACROS: tuple = ("WA0AA", "EB0EB", "GLOBAL", "MI0MI")
AAAPNR: str = "AAAAAA"
PNR_KEYS = [
("name", "NAME"),
("hfax", "HFAX"),
("gfax", "GFAX"),
("fqtv", "FQTV"),
("itin", "ITIN"),
("subs_card_seg", "SUBS_CARD_SEG"),
("group_plan", "GROUP_PLAN"),
("rcvd_from", "RCVD_FROM"),
("phone", "PHONE"),
("record_loc", "RECORD_LOC"),
("remarks", "REMARKS"),
("header", "HEADER"),
("prs_seats", "PRS_SEATS"),
("vcr_coupon", "VCR_COUPON"),
("ice_data", "ICE_DATA"),
]
| true | true |
f7350d119267fcf4c70bb61c659559abce247fcd | 4,441 | py | Python | FastStyleTransfer/utils.py | ericlearning/style-transfer | f387515b4ffe441c4677400a65b9e7fdb50c979f | [
"MIT"
] | 1 | 2019-05-29T03:34:37.000Z | 2019-05-29T03:34:37.000Z | FastStyleTransfer/utils.py | ericlearning/style-transfer | f387515b4ffe441c4677400a65b9e7fdb50c979f | [
"MIT"
] | null | null | null | FastStyleTransfer/utils.py | ericlearning/style-transfer | f387515b4ffe441c4677400a65b9e7fdb50c979f | [
"MIT"
] | null | null | null | import os
import glob
import torch
import pandas as pd
import seaborn as sn
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
from torch.optim.lr_scheduler import _LRScheduler
from sklearn.metrics import confusion_matrix
from PIL import Image
def set_lr(optimizer, lrs):
if(len(lrs) == 1):
for param in optimizer.param_groups:
param['lr'] = lrs[0]
else:
for i, param in enumerate(optimizer.param_groups):
param['lr'] = lrs[i]
def set_base_lr(optimizer, lrs):
if(len(lrs) == 1):
for param in optimizer.param_groups:
param['initial_lr'] = lrs[0]
else:
for i, param in enumerate(optimizer.param_groups):
param['initial_lr'] = lrs[i]
def get_lr(optimizer):
optim_param_groups = optimizer.param_groups
if(len(optim_param_groups) == 1):
return optim_param_groups[0]['lr']
else:
lrs = []
for param in optim_param_groups:
lrs.append(param['lr'])
return lrs
def get_children_groups(model_children, param_places):
cur_place = 0
children_groups = []
for param_place in param_places:
children_groups.append(model_children[cur_place:param_place])
cur_place = param_place
return children_groups
def get_params(children):
params_use_grad = []
for child in children:
for param in child.parameters():
if(param.requires_grad == True):
params_use_grad.append(param)
return params_use_grad
def get_optimizer(model, lrs, param_places):
model_children = list(model.children())
# only 1 learning rate
if(len(lrs) == 1):
# from the model's childrens, only get the parameters that use grad
param_use_grad = get_params(model_children)
# set an Adam optimizer with the params that use grad, and the lr
optimizer = optim.Adam(param_use_grad, lrs[0])
# multiple learning rates
else:
# from the param_places, get chunks of children from model_children
# children_groups is a list, and each item will be a list of children
children_groups = get_children_groups(model_children, param_places)
# from children_groups, get each of its children group's grad using params
# param_groups_use_grad is a list, and each item will be a list of params that use grad
param_groups_use_grad = []
for children_group in children_groups:
param_group_use_grad = get_params(children_group)
param_groups_use_grad.append(param_group_use_grad)
# zip param_groups_use_grad together with lrs
# in order to feed in the corresponding lr to a given param_group
param_groups_use_grad_with_lrs = zip(param_groups_use_grad, lrs)
optimizer = optim.Adam([{'params' : p, 'lr' : l}
for p, l in param_groups_use_grad_with_lrs])
return optimizer
def freeze_until(model, idx):
for i, child in enumerate(model.children()):
if(i <= idx):
for param in child.parameters():
param.requires_grad = False
else:
for param in child.parameters():
param.requires_grad = True
def histogram_sizes(img_dir, h_lim = None, w_lim = None):
hs, ws = [], []
for file in glob.iglob(os.path.join(img_dir, '**/*.*')):
try:
with Image.open(file) as im:
h, w = im.size
hs.append(h)
ws.append(w)
except:
print('Not an Image file')
if(h_lim is not None and w_lim is not None):
hs = [h for h in hs if h<h_lim]
ws = [w for w in ws if w<w_lim]
plt.figure('Height')
plt.hist(hs)
plt.figure('Width')
plt.hist(ws)
plt.show()
return hs, ws
def plot_confusion_matrix(model, dl, names, classes_count, device, figsize):
true_label = []
predicted_label = []
for batch in dl:
(images, labels) = batch
y_real = list(labels.data.cpu().numpy())
y_pred = list(torch.argmax(model(images.to(device)), dim=1).data.cpu().numpy())
true_label.extend(y_real)
predicted_label.extend(y_pred)
cm = confusion_matrix(true_label, predicted_label)
names_with_cnt = [str(name) + ' : ' + str(cnt) for name, cnt in zip(names, classes_count)]
df = pd.DataFrame(cm, index = names_with_cnt, columns = names_with_cnt)
plt.figure(figsize = figsize)
ax = plt.subplot(111)
sn.heatmap(df, annot = True, ax = ax, fmt='g')
plt.show()
def freeze_cur_bn(module):
classname = module.__class__.__name__
if(classname.find('BatchNorm') != -1):
module.eval()
def freeze_bn(model):
model.apply(freeze_cur_bn)
class Normalize(nn.Module):
def __init__(self, mean, variance):
super(Normalize, self).__init__()
self.mean = mean.view(-1, 1, 1)
self.variance = variance.view(-1, 1, 1)
def forward(self, x):
return (x - mean) / variance | 27.41358 | 91 | 0.72236 | import os
import glob
import torch
import pandas as pd
import seaborn as sn
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
from torch.optim.lr_scheduler import _LRScheduler
from sklearn.metrics import confusion_matrix
from PIL import Image
def set_lr(optimizer, lrs):
if(len(lrs) == 1):
for param in optimizer.param_groups:
param['lr'] = lrs[0]
else:
for i, param in enumerate(optimizer.param_groups):
param['lr'] = lrs[i]
def set_base_lr(optimizer, lrs):
if(len(lrs) == 1):
for param in optimizer.param_groups:
param['initial_lr'] = lrs[0]
else:
for i, param in enumerate(optimizer.param_groups):
param['initial_lr'] = lrs[i]
def get_lr(optimizer):
optim_param_groups = optimizer.param_groups
if(len(optim_param_groups) == 1):
return optim_param_groups[0]['lr']
else:
lrs = []
for param in optim_param_groups:
lrs.append(param['lr'])
return lrs
def get_children_groups(model_children, param_places):
cur_place = 0
children_groups = []
for param_place in param_places:
children_groups.append(model_children[cur_place:param_place])
cur_place = param_place
return children_groups
def get_params(children):
params_use_grad = []
for child in children:
for param in child.parameters():
if(param.requires_grad == True):
params_use_grad.append(param)
return params_use_grad
def get_optimizer(model, lrs, param_places):
model_children = list(model.children())
if(len(lrs) == 1):
param_use_grad = get_params(model_children)
# set an Adam optimizer with the params that use grad, and the lr
optimizer = optim.Adam(param_use_grad, lrs[0])
# multiple learning rates
else:
# from the param_places, get chunks of children from model_children
# children_groups is a list, and each item will be a list of children
children_groups = get_children_groups(model_children, param_places)
# from children_groups, get each of its children group's grad using params
param_groups_use_grad = []
for children_group in children_groups:
param_group_use_grad = get_params(children_group)
param_groups_use_grad.append(param_group_use_grad)
param_groups_use_grad_with_lrs = zip(param_groups_use_grad, lrs)
optimizer = optim.Adam([{'params' : p, 'lr' : l}
for p, l in param_groups_use_grad_with_lrs])
return optimizer
def freeze_until(model, idx):
for i, child in enumerate(model.children()):
if(i <= idx):
for param in child.parameters():
param.requires_grad = False
else:
for param in child.parameters():
param.requires_grad = True
def histogram_sizes(img_dir, h_lim = None, w_lim = None):
hs, ws = [], []
for file in glob.iglob(os.path.join(img_dir, '**/*.*')):
try:
with Image.open(file) as im:
h, w = im.size
hs.append(h)
ws.append(w)
except:
print('Not an Image file')
if(h_lim is not None and w_lim is not None):
hs = [h for h in hs if h<h_lim]
ws = [w for w in ws if w<w_lim]
plt.figure('Height')
plt.hist(hs)
plt.figure('Width')
plt.hist(ws)
plt.show()
return hs, ws
def plot_confusion_matrix(model, dl, names, classes_count, device, figsize):
true_label = []
predicted_label = []
for batch in dl:
(images, labels) = batch
y_real = list(labels.data.cpu().numpy())
y_pred = list(torch.argmax(model(images.to(device)), dim=1).data.cpu().numpy())
true_label.extend(y_real)
predicted_label.extend(y_pred)
cm = confusion_matrix(true_label, predicted_label)
names_with_cnt = [str(name) + ' : ' + str(cnt) for name, cnt in zip(names, classes_count)]
df = pd.DataFrame(cm, index = names_with_cnt, columns = names_with_cnt)
plt.figure(figsize = figsize)
ax = plt.subplot(111)
sn.heatmap(df, annot = True, ax = ax, fmt='g')
plt.show()
def freeze_cur_bn(module):
classname = module.__class__.__name__
if(classname.find('BatchNorm') != -1):
module.eval()
def freeze_bn(model):
model.apply(freeze_cur_bn)
class Normalize(nn.Module):
def __init__(self, mean, variance):
super(Normalize, self).__init__()
self.mean = mean.view(-1, 1, 1)
self.variance = variance.view(-1, 1, 1)
def forward(self, x):
return (x - mean) / variance | true | true |
f7350d55eb323216182562c19c57df83c0186aa7 | 12,830 | py | Python | proteome_count/search_proteome.py | ProteinsWebTeam/interpro-pfam-curation-tools | 41df7e4ad390ace8c68f137e582b6bd2bfe4b23a | [
"MIT"
] | null | null | null | proteome_count/search_proteome.py | ProteinsWebTeam/interpro-pfam-curation-tools | 41df7e4ad390ace8c68f137e582b6bd2bfe4b23a | [
"MIT"
] | null | null | null | proteome_count/search_proteome.py | ProteinsWebTeam/interpro-pfam-curation-tools | 41df7e4ad390ace8c68f137e582b6bd2bfe4b23a | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""
@author T. Paysan-Lafosse
@brief This script searches unintegrated proteins for a given organism or taxid
in InterPro signatures, if they are not found in signatures, they are clustered based on UniRef clusters
@arguments [-u USER]: database user
[-p PASSWORD]: database password for the user
[-s SCHEMA]: database schema to use
[-o ORGANISM or -t TAXID]: organism (scientific name) or taxid to look for
[-f FOLDER]: output folder
"""
import argparse
import os
import sys
from pathlib import Path
import requests
from utils import proteome
class protein_pipeline(proteome):
def __init__(self):
super().__init__()
self.uniref50 = dict()
self.clusters = dict()
def get_integrated(self, protein_list):
"""
Search integrated proteins
Args:
protein_list: list containing proteins to search for
Yields:
list_integrated: list of proteins integrated in InterPro entries
"""
print("Searching for integrated proteins")
uniprot_chunks = list(self.chunks(protein_list, 1000))
list_integrated = set()
for chunk in uniprot_chunks:
protein_list_quote = [f"'{row}'" for row in chunk]
request = f"SELECT P.PROTEIN_AC \
FROM INTERPRO.MV_ENTRY2PROTEIN E2P \
JOIN INTERPRO.PROTEIN P ON E2P.PROTEIN_AC=P.PROTEIN_AC \
WHERE E2P.PROTEIN_AC IN ({','.join(protein_list_quote)})"
self.cursor.execute(request)
list_integrated.update(set([row[0] for row in self.cursor]))
return list_integrated
def get_count_signature_taxid(self, list_signatures):
"""
Search for protein counts for a list of InterPro signatures
Args:
list_signatures: list of InterPro signatures
Yields:
count_prot_signatures: dictionnary with signature as key and protein_count as value
"""
count_prot_signatures = dict()
signature_chunks = list(self.chunks(list(list_signatures), 1000))
for chunk in signature_chunks:
signature_list_quote = [f"'{row}'" for row in chunk]
request = f"SELECT M2P.METHOD_AC,COUNT(P.PROTEIN_AC) \
FROM INTERPRO.PROTEIN P \
JOIN INTERPRO.MV_METHOD2PROTEIN M2P ON P.PROTEIN_AC = M2P.PROTEIN_AC \
JOIN INTERPRO.ETAXI ET ON P.TAX_ID = ET.TAX_ID \
WHERE ET.TAX_ID=:1 AND M2P.METHOD_AC IN ({','.join(signature_list_quote)}) \
GROUP BY M2P.METHOD_AC"
self.cursor.execute(request, (self.tax_id,))
count_prot_signatures.update({row[0]: row[1] for row in self.cursor})
return count_prot_signatures
def get_accession_in_signature(self, folder, protein_list):
"""
Search for proteins found in InterPro signatures but not integrated
Write the results in a csv file with each row corresponding to a protein/signature pair (protein,dbcode,organism,signature,total_prot_count,count_proteome,comment)
Args:
folder: output directory
protein_list: list containing proteins to search for
Yields:
list of proteins found in unintegrated signatures
"""
print("Searching for unintegrated proteins in signature")
uniprot_chunks = list(self.chunks(list(protein_list), 1000))
list_signatures = set()
list_proteins_with_signature = dict()
nbprot_in_signature = 0
for chunk in uniprot_chunks:
# if comments needed in future: C.VALUE, LISTAGG(MC.VALUE, '; ') WITHIN GROUP (ORDER BY MC.VALUE) COMMENTS
protein_list_quote = [f"'{row}'" for row in chunk]
request = f"SELECT P.PROTEIN_AC, P.DBCODE, ET.SCIENTIFIC_NAME, M2P.METHOD_AC, MM.PROTEIN_COUNT, \
( SELECT COUNT(*) FROM INTERPRO.MATCH M \
INNER JOIN INTERPRO.PROTEIN P ON M.PROTEIN_AC = P.PROTEIN_AC \
WHERE P.DBCODE = 'S' and M.METHOD_AC = M2P.METHOD_AC ) as SWISS_COUNT \
FROM INTERPRO.PROTEIN P \
JOIN INTERPRO.ETAXI ET ON P.TAX_ID = ET.TAX_ID \
JOIN INTERPRO.MV_METHOD2PROTEIN M2P ON P.PROTEIN_AC = M2P.PROTEIN_AC \
JOIN INTERPRO.MV_METHOD_MATCH MM ON MM.METHOD_AC = M2P.METHOD_AC \
LEFT JOIN INTERPRO.METHOD_COMMENT MC ON MC.METHOD_AC = M2P.METHOD_AC \
WHERE P.PROTEIN_AC IN ({','.join(protein_list_quote)})\
AND M2P.METHOD_AC not like '%:SF%' \
AND MC.VALUE IS NULL \
GROUP BY P.PROTEIN_AC, P.DBCODE, ET.SCIENTIFIC_NAME, M2P.METHOD_AC, MM.PROTEIN_COUNT"
# print(request)
self.cursor.execute(request)
results = self.cursor.fetchall()
nbprot_in_signature += len(results)
for row in results:
protein = row[0]
signature = row[3]
list_signatures.add(signature)
if signature not in list_proteins_with_signature:
list_proteins_with_signature[signature] = [
protein,
row[1],
row[2],
row[4],
row[5],
]
else:
pass
# `try:
# list_proteins_with_signature[protein][signature] = [
# row[1],
# row[2],
# row[4],
# row[5],
# ]
# except KeyError:
# list_proteins_with_signature[protein] = dict()
# list_proteins_with_signature[protein][signature] = [
# row[1],
# row[2],
# row[4],
# row[5],
# ]`
# count_prot_signatures = self.get_count_signature_taxid(list_signatures)
unintegrated_file = os.path.join(
folder, f"unintegrated_prot_in_signatures_{self.tax_id}.csv"
)
with open(unintegrated_file, "w") as outf:
outf.write("protein,dbcode,organism,signature,total_prot_count,count_swiss_prot\n")
# outf.write(
# "protein,dbcode,organism,signature,total_prot_count,count_swiss_prot,count_proteome\n"
# )
# for protein, signatures in list_proteins_with_signature.items():
# for signature, values in signatures.items():
# if values[3] != 0:
# outf.write(
# f"{protein},{values[0]},{values[1]},{signature},{values[2]},{values[3]}\n"
# )
for signature, proteins in list_proteins_with_signature.items():
if proteins[4] != 0:
outf.write(
f"{proteins[0]},{proteins[1]},{proteins[2]},{signature},{proteins[3]},{proteins[4]}\n"
)
# outf.write(
# f"{protein},{values[0]},{values[1]},{signature},{values[2]},{values[3]},{count_prot_signatures[signature]}\n"
# )
# return list_proteins_with_signature.keys()
return nbprot_in_signature
def search_uniprotid_in_uniref(self, uniprotid):
"""
Search if the uniprotid is already referenced in the uniref50 dictionnary to avoid querying UniProt multiple times
Args:
uniprotid: UniProt accession to search for
Yields:
uniref: UniRef cluster found
False: uniprotid not found
"""
for uniref, accessions in self.uniref50.items():
if uniprotid in accessions:
return uniref
return False
def get_cluster(self, protein_list):
"""
Search clustering information in UniRef from UniProt for a given UniProt accession
Args:
None
"""
print("Clustering UniProt accessions unintegrated with no signature using Uniref50")
for uniprotid in protein_list:
uniref_cluster = self.search_uniprotid_in_uniref(uniprotid)
if uniref_cluster:
self.clusters.setdefault(uniref_cluster, []).append(uniprotid)
else:
url = f"https://www.uniprot.org/uniref/?query={uniprotid}&fil=identity:0.5&columns=id,members&format=tab"
response = requests.get(url)
data = response.text
if response.status_code != 200:
print(f"FAILURE::{url}")
uniref_all = data.split("\n")[1:]
for uniref_info in uniref_all:
if uniref_info:
name, accessions = uniref_info.split("\t")
accessions = accessions.split("; ")
if name not in self.uniref50:
self.uniref50[name] = accessions
self.clusters.setdefault(name, []).append(uniprotid)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-u", "--user", help="username for database connection", required=True)
parser.add_argument("-p", "--password", help="password for database connection", required=True)
parser.add_argument("-s", "--schema", help="database schema to connect to", required=True)
group = parser.add_mutually_exclusive_group()
group.add_argument(
"-o", "--organism", help="Scientific name of the organism to get the conservation score for"
)
group.add_argument(
"-t", "--taxid", help="Taxid of the organism to get the conservation score for"
)
parser.add_argument(
"-f", "--folder", help="folder directory to write output files", required=True
)
args = parser.parse_args()
# initialising
protein_pip = protein_pipeline()
protein_pip.getConnection(args.user, args.password, args.schema)
# create output directory if it doesn't exist
Path(args.folder).mkdir(parents=True, exist_ok=True)
# initialise tax_id value
if args.organism:
print(f"Searching taxid for {args.organism}")
protein_pip.search_taxid(args.organism)
elif args.taxid:
protein_pip.tax_id = args.taxid
else:
print("Error no organism or taxid provided")
sys.exit(1)
# search the proteome
print(f"Searching list of proteins for {protein_pip.tax_id}")
protein_list = protein_pip.get_proteins()
# search for integrated proteins
list_integrated = protein_pip.get_integrated(protein_list)
print(f"UniProt accessions integrated: {len(list_integrated)}")
# list of unintegrated proteins
unintegrated_subset = set(protein_list).difference(list_integrated)
print(f"UniProt accessions unintegrated: {len(unintegrated_subset)}")
# search for proteins in unintegrated InterPro signatures
list_in_signature = protein_pip.get_accession_in_signature(args.folder, unintegrated_subset)
# list_in_signature = set(
# protein_pip.get_accession_in_signature(args.folder, unintegrated_subset)
# )
print(f"UniProt accession unintegrated matching signature: {list_in_signature}")
# list of unintegrated proteins not found in InterPro signatures
# list_not_in_signature = unintegrated_subset.difference(list_in_signature)
list_not_in_signature = len(unintegrated_subset) - list_in_signature
print(f"UniProt accession unintegrated with no signature: {list_not_in_signature}")
# close database connection
protein_pip.connection.close()
# # clustering unintegrated proteins
# protein_pip.get_cluster(list_not_in_signature)
# print(f"{len(protein_pip.clusters)} clusters found")
# # write clustering results in file
# cluster_file = os.path.join(args.folder, f"clusters_proteome_taxid_{protein_pip.tax_id}.csv")
# with open(cluster_file, "w") as f:
# f.write("cluster_id,accessions\n")
# for cluster, accessions in protein_pip.clusters.items():
# f.write(f"{cluster},{'; '.join(accessions)}\n")
# uniref50_cluster_file = os.path.join(
# args.folder, f"all_clusters_taxid_{protein_pip.tax_id}.csv"
# )
# with open(uniref50_cluster_file, "w") as f:
# f.write("cluster_id,count proteome matches,accessions\n")
# for cluster, accessions in protein_pip.uniref50.items():
# f.write(f"{cluster},{len(protein_pip.clusters[cluster])},{'; '.join(accessions)}\n")
| 40.345912 | 171 | 0.60304 |
import argparse
import os
import sys
from pathlib import Path
import requests
from utils import proteome
class protein_pipeline(proteome):
def __init__(self):
super().__init__()
self.uniref50 = dict()
self.clusters = dict()
def get_integrated(self, protein_list):
print("Searching for integrated proteins")
uniprot_chunks = list(self.chunks(protein_list, 1000))
list_integrated = set()
for chunk in uniprot_chunks:
protein_list_quote = [f"'{row}'" for row in chunk]
request = f"SELECT P.PROTEIN_AC \
FROM INTERPRO.MV_ENTRY2PROTEIN E2P \
JOIN INTERPRO.PROTEIN P ON E2P.PROTEIN_AC=P.PROTEIN_AC \
WHERE E2P.PROTEIN_AC IN ({','.join(protein_list_quote)})"
self.cursor.execute(request)
list_integrated.update(set([row[0] for row in self.cursor]))
return list_integrated
def get_count_signature_taxid(self, list_signatures):
count_prot_signatures = dict()
signature_chunks = list(self.chunks(list(list_signatures), 1000))
for chunk in signature_chunks:
signature_list_quote = [f"'{row}'" for row in chunk]
request = f"SELECT M2P.METHOD_AC,COUNT(P.PROTEIN_AC) \
FROM INTERPRO.PROTEIN P \
JOIN INTERPRO.MV_METHOD2PROTEIN M2P ON P.PROTEIN_AC = M2P.PROTEIN_AC \
JOIN INTERPRO.ETAXI ET ON P.TAX_ID = ET.TAX_ID \
WHERE ET.TAX_ID=:1 AND M2P.METHOD_AC IN ({','.join(signature_list_quote)}) \
GROUP BY M2P.METHOD_AC"
self.cursor.execute(request, (self.tax_id,))
count_prot_signatures.update({row[0]: row[1] for row in self.cursor})
return count_prot_signatures
def get_accession_in_signature(self, folder, protein_list):
print("Searching for unintegrated proteins in signature")
uniprot_chunks = list(self.chunks(list(protein_list), 1000))
list_signatures = set()
list_proteins_with_signature = dict()
nbprot_in_signature = 0
for chunk in uniprot_chunks:
protein_list_quote = [f"'{row}'" for row in chunk]
request = f"SELECT P.PROTEIN_AC, P.DBCODE, ET.SCIENTIFIC_NAME, M2P.METHOD_AC, MM.PROTEIN_COUNT, \
( SELECT COUNT(*) FROM INTERPRO.MATCH M \
INNER JOIN INTERPRO.PROTEIN P ON M.PROTEIN_AC = P.PROTEIN_AC \
WHERE P.DBCODE = 'S' and M.METHOD_AC = M2P.METHOD_AC ) as SWISS_COUNT \
FROM INTERPRO.PROTEIN P \
JOIN INTERPRO.ETAXI ET ON P.TAX_ID = ET.TAX_ID \
JOIN INTERPRO.MV_METHOD2PROTEIN M2P ON P.PROTEIN_AC = M2P.PROTEIN_AC \
JOIN INTERPRO.MV_METHOD_MATCH MM ON MM.METHOD_AC = M2P.METHOD_AC \
LEFT JOIN INTERPRO.METHOD_COMMENT MC ON MC.METHOD_AC = M2P.METHOD_AC \
WHERE P.PROTEIN_AC IN ({','.join(protein_list_quote)})\
AND M2P.METHOD_AC not like '%:SF%' \
AND MC.VALUE IS NULL \
GROUP BY P.PROTEIN_AC, P.DBCODE, ET.SCIENTIFIC_NAME, M2P.METHOD_AC, MM.PROTEIN_COUNT"
self.cursor.execute(request)
results = self.cursor.fetchall()
nbprot_in_signature += len(results)
for row in results:
protein = row[0]
signature = row[3]
list_signatures.add(signature)
if signature not in list_proteins_with_signature:
list_proteins_with_signature[signature] = [
protein,
row[1],
row[2],
row[4],
row[5],
]
else:
pass
unintegrated_file = os.path.join(
folder, f"unintegrated_prot_in_signatures_{self.tax_id}.csv"
)
with open(unintegrated_file, "w") as outf:
outf.write("protein,dbcode,organism,signature,total_prot_count,count_swiss_prot\n")
for signature, proteins in list_proteins_with_signature.items():
if proteins[4] != 0:
outf.write(
f"{proteins[0]},{proteins[1]},{proteins[2]},{signature},{proteins[3]},{proteins[4]}\n"
)
return nbprot_in_signature
def search_uniprotid_in_uniref(self, uniprotid):
for uniref, accessions in self.uniref50.items():
if uniprotid in accessions:
return uniref
return False
def get_cluster(self, protein_list):
print("Clustering UniProt accessions unintegrated with no signature using Uniref50")
for uniprotid in protein_list:
uniref_cluster = self.search_uniprotid_in_uniref(uniprotid)
if uniref_cluster:
self.clusters.setdefault(uniref_cluster, []).append(uniprotid)
else:
url = f"https://www.uniprot.org/uniref/?query={uniprotid}&fil=identity:0.5&columns=id,members&format=tab"
response = requests.get(url)
data = response.text
if response.status_code != 200:
print(f"FAILURE::{url}")
uniref_all = data.split("\n")[1:]
for uniref_info in uniref_all:
if uniref_info:
name, accessions = uniref_info.split("\t")
accessions = accessions.split("; ")
if name not in self.uniref50:
self.uniref50[name] = accessions
self.clusters.setdefault(name, []).append(uniprotid)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-u", "--user", help="username for database connection", required=True)
parser.add_argument("-p", "--password", help="password for database connection", required=True)
parser.add_argument("-s", "--schema", help="database schema to connect to", required=True)
group = parser.add_mutually_exclusive_group()
group.add_argument(
"-o", "--organism", help="Scientific name of the organism to get the conservation score for"
)
group.add_argument(
"-t", "--taxid", help="Taxid of the organism to get the conservation score for"
)
parser.add_argument(
"-f", "--folder", help="folder directory to write output files", required=True
)
args = parser.parse_args()
protein_pip = protein_pipeline()
protein_pip.getConnection(args.user, args.password, args.schema)
Path(args.folder).mkdir(parents=True, exist_ok=True)
# initialise tax_id value
if args.organism:
print(f"Searching taxid for {args.organism}")
protein_pip.search_taxid(args.organism)
elif args.taxid:
protein_pip.tax_id = args.taxid
else:
print("Error no organism or taxid provided")
sys.exit(1)
# search the proteome
print(f"Searching list of proteins for {protein_pip.tax_id}")
protein_list = protein_pip.get_proteins()
# search for integrated proteins
list_integrated = protein_pip.get_integrated(protein_list)
print(f"UniProt accessions integrated: {len(list_integrated)}")
# list of unintegrated proteins
unintegrated_subset = set(protein_list).difference(list_integrated)
print(f"UniProt accessions unintegrated: {len(unintegrated_subset)}")
# search for proteins in unintegrated InterPro signatures
list_in_signature = protein_pip.get_accession_in_signature(args.folder, unintegrated_subset)
# list_in_signature = set(
# protein_pip.get_accession_in_signature(args.folder, unintegrated_subset)
# )
print(f"UniProt accession unintegrated matching signature: {list_in_signature}")
# list of unintegrated proteins not found in InterPro signatures
# list_not_in_signature = unintegrated_subset.difference(list_in_signature)
list_not_in_signature = len(unintegrated_subset) - list_in_signature
print(f"UniProt accession unintegrated with no signature: {list_not_in_signature}")
# close database connection
protein_pip.connection.close()
# # clustering unintegrated proteins
# protein_pip.get_cluster(list_not_in_signature)
# print(f"{len(protein_pip.clusters)} clusters found")
# # write clustering results in file
# cluster_file = os.path.join(args.folder, f"clusters_proteome_taxid_{protein_pip.tax_id}.csv")
# with open(cluster_file, "w") as f:
# f.write("cluster_id,accessions\n")
# for cluster, accessions in protein_pip.clusters.items():
# f.write(f"{cluster},{'; '.join(accessions)}\n")
# uniref50_cluster_file = os.path.join(
# args.folder, f"all_clusters_taxid_{protein_pip.tax_id}.csv"
# )
# with open(uniref50_cluster_file, "w") as f:
# f.write("cluster_id,count proteome matches,accessions\n")
# for cluster, accessions in protein_pip.uniref50.items():
# f.write(f"{cluster},{len(protein_pip.clusters[cluster])},{'; '.join(accessions)}\n")
| true | true |
f7350ddc02325562c929b5462b8300547041db9d | 1,789 | py | Python | scraper_test.py | svennickel/itunes-app-scraper | 14b857bd40a237825cb6bd93be388e6bcd083c01 | [
"MIT"
] | 10 | 2020-08-12T06:47:04.000Z | 2021-12-04T03:06:19.000Z | scraper_test.py | svennickel/itunes-app-scraper | 14b857bd40a237825cb6bd93be388e6bcd083c01 | [
"MIT"
] | 5 | 2020-11-19T07:53:19.000Z | 2022-03-16T15:06:37.000Z | scraper_test.py | iaine/itunes-app-scraper | de60c8c0b369e78d4c87a0cb11284b2ef576c090 | [
"MIT"
] | 11 | 2020-08-12T06:47:31.000Z | 2022-03-19T23:36:18.000Z | from itunes_app_scraper.scraper import AppStoreScraper
from itunes_app_scraper.util import AppStoreException, AppStoreCollections, AppStoreCategories, AppStoreUtils
import json
import pytest
import os
def test_term_no_exception():
scraper = AppStoreScraper()
results = scraper.get_app_ids_for_query("mindful", country="gb", lang="en")
assert len(results) > 0
def test_no_term_gives_exception():
scraper = AppStoreScraper()
with pytest.raises(AppStoreException, match = "No term was given"):
scraper.get_app_ids_for_query("", country="gb", lang="en")
def test_no_invalid_id_gives_exception():
scraper = AppStoreScraper()
with pytest.raises(AppStoreException, match = "No app found with ID 872"):
scraper.get_app_details('872')
def test_no_invalid_id_in_multiple_is_empty():
scraper = AppStoreScraper()
assert len(list(scraper.get_multiple_app_details(['872']))) == 0
def test_no_invalid_id_in_multiple_writes_log():
scraper = AppStoreScraper()
scraper.get_multiple_app_details(['872'])
assert os.path.exists("nl_log.txt")
fh = open('nl_log.txt')
assert "No app found with ID 872" in fh.read()
fh.close()
os.remove('nl_log.txt')
def test_log_file_write_message():
scraper = AppStoreScraper()
scraper._log_error("gb","test")
assert os.path.exists("gb_log.txt")
fh = open('gb_log.txt')
assert "test" in fh.read()
fh.close()
os.remove('gb_log.txt')
def test_country_code_does_exist():
scraper = AppStoreScraper()
assert scraper.get_store_id_for_country('gb') == 143444
def test_country_code_does_not_exist():
scraper = AppStoreScraper()
with pytest.raises(AppStoreException, match="Country code not found for XZ"):
scraper.get_store_id_for_country('xz') | 34.403846 | 109 | 0.731694 | from itunes_app_scraper.scraper import AppStoreScraper
from itunes_app_scraper.util import AppStoreException, AppStoreCollections, AppStoreCategories, AppStoreUtils
import json
import pytest
import os
def test_term_no_exception():
scraper = AppStoreScraper()
results = scraper.get_app_ids_for_query("mindful", country="gb", lang="en")
assert len(results) > 0
def test_no_term_gives_exception():
scraper = AppStoreScraper()
with pytest.raises(AppStoreException, match = "No term was given"):
scraper.get_app_ids_for_query("", country="gb", lang="en")
def test_no_invalid_id_gives_exception():
scraper = AppStoreScraper()
with pytest.raises(AppStoreException, match = "No app found with ID 872"):
scraper.get_app_details('872')
def test_no_invalid_id_in_multiple_is_empty():
scraper = AppStoreScraper()
assert len(list(scraper.get_multiple_app_details(['872']))) == 0
def test_no_invalid_id_in_multiple_writes_log():
scraper = AppStoreScraper()
scraper.get_multiple_app_details(['872'])
assert os.path.exists("nl_log.txt")
fh = open('nl_log.txt')
assert "No app found with ID 872" in fh.read()
fh.close()
os.remove('nl_log.txt')
def test_log_file_write_message():
scraper = AppStoreScraper()
scraper._log_error("gb","test")
assert os.path.exists("gb_log.txt")
fh = open('gb_log.txt')
assert "test" in fh.read()
fh.close()
os.remove('gb_log.txt')
def test_country_code_does_exist():
scraper = AppStoreScraper()
assert scraper.get_store_id_for_country('gb') == 143444
def test_country_code_does_not_exist():
scraper = AppStoreScraper()
with pytest.raises(AppStoreException, match="Country code not found for XZ"):
scraper.get_store_id_for_country('xz') | true | true |
f7350e589e89dd1abef4cd35a2f99c754386137d | 4,190 | py | Python | tests/DatastoreTests.py | erasmospunk/pychohistory | 71b2bd35578fea9bb6603c017a41c036644b3d85 | [
"MIT"
] | 1 | 2015-02-12T02:05:20.000Z | 2015-02-12T02:05:20.000Z | tests/DatastoreTests.py | erasmospunk/pychohistory | 71b2bd35578fea9bb6603c017a41c036644b3d85 | [
"MIT"
] | null | null | null | tests/DatastoreTests.py | erasmospunk/pychohistory | 71b2bd35578fea9bb6603c017a41c036644b3d85 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime
import logging
import unittest
import os
import shutil
from modules import Datastore, GoogSuggestMe
__author__ = 'Giannis Dzegoutanis'
TEMP_FOLDER = u'tmp'
TEST_DATABASE = u'testdatastore.tmp'
TEST_QUERY = u'test'
module_name = u'test_module'
module_signature = (module_name,
(u'timestamp', u'key', u'value'),
(datetime.utcnow(), u'keyword', 1244000))
TEST_SRC = u'test_src_name'
TEST_SRC_PARAMS = dict(param1=u'test text', param2=13.37)
datastore_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), TEMP_FOLDER)
test_db_path = os.path.join(datastore_path, TEST_DATABASE)
class TestDatabase(unittest.TestCase):
def setUp(self):
self.log = logging.getLogger()
def tearDown(self):
try:
shutil.rmtree(datastore_path, True)
except:
pass
def test_bucket_open(self):
""" Test if the database opens """
with Datastore.Bucket(test_db_path, module_signature) as bucket:
self.assertNotEqual(bucket, None, u'Bucket is None')
def test_update_version(self):
with Datastore.Bucket(test_db_path, module_signature) as bucket:
new_ver = bucket.version() + 1
bucket.update_version(new_ver)
self.assertEqual(bucket.version(), new_ver, u'Failed updating to v%d' % new_ver)
with self.assertRaises(Exception):
Datastore.IoBucket(test_db_path)
def test_create_invalid_src(self):
""" Create one invalid data source """
with self.assertRaises(Exception):
bad_sig = (TEST_SRC, (u'param1', u'param2'), (u'text', [u'array is unsupported']))
with Datastore.Bucket(test_db_path, bad_sig):
pass
with self.assertRaises(Exception):
bad_sig = (TEST_SRC, (u'param1', u'param2'), (u'text', ))
with Datastore.Bucket(test_db_path, bad_sig):
pass
def test_bucket_read_write(self):
""" Test if can write to database"""
now = datetime.utcnow()
test_key_vals = [
(now, "ideas to write about", 645000000),
(now, "ideas to go", 2260000000),
(now, "ideas to raise money", 106000000),
(now, "ideas to ask someone to prom", 966000),
(now, "ideas to ask a guy to prom", 378000),
(now, "ideas to build in minecraft", 7710000),
(now, "ideas unlimited", 217000000),
(now, "ideas united", 1530000000),
(now, "ideas ucla", 10700000),
(now, "ideas unlimited pepsi", 7190000),
(now, "ideas unlimited seminars", 1800000),
(now, "ideas unbound", 4310000),
(now, "ideas unlimited llc", 68000000),
(now, "ideas unlimited memphis", 1650000),
(now, "ideas uthscsa", 133000),
(now, "ideas ucsb", 609000),
(now, "ideas vs ideals", 7920000),
(now, "ideas valentines day", 123000000),
(now, "ideas valentines coupons", 2480000)
]
with Datastore.Bucket(test_db_path, module_signature) as bucket:
bucket.insertmany(test_key_vals)
with Datastore.Bucket(test_db_path, module_signature) as bucket:
self.assertEqual(len(bucket.readall()), len(test_key_vals))
def test_bucket_read_write_single(self):
""" Test if can write to database"""
now = datetime.utcnow()
with Datastore.Bucket(test_db_path, module_signature) as bucket:
bucket.insert((now, "single", "2200"))
bucket.insert((now, "test", "1200"))
with Datastore.Bucket(test_db_path, module_signature) as bucket:
self.assertEqual(len(bucket.readall()), 2)
def test_bucket_duplicates_read_write(self):
""" Test if can write to database"""
now = datetime.utcnow()
test_key_vals = [
(now, "ideas to build in minecraft", 7710000),
(now, "ideas unlimited", 217000000),
(now, "ideas to build in minecraft", 7710000),
(now, "ideas unlimited", 217000000),
(now, "ideas to build in minecraft", 7710000),
(now, "ideas unlimited", 217000000)
]
with Datastore.Bucket(test_db_path, module_signature) as bucket:
bucket.insertmany(test_key_vals)
with Datastore.Bucket(test_db_path, module_signature) as bucket:
self.assertEqual(len(bucket.readall()), len(test_key_vals))
| 32.230769 | 88 | 0.670883 |
from datetime import datetime
import logging
import unittest
import os
import shutil
from modules import Datastore, GoogSuggestMe
__author__ = 'Giannis Dzegoutanis'
TEMP_FOLDER = u'tmp'
TEST_DATABASE = u'testdatastore.tmp'
TEST_QUERY = u'test'
module_name = u'test_module'
module_signature = (module_name,
(u'timestamp', u'key', u'value'),
(datetime.utcnow(), u'keyword', 1244000))
TEST_SRC = u'test_src_name'
TEST_SRC_PARAMS = dict(param1=u'test text', param2=13.37)
datastore_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), TEMP_FOLDER)
test_db_path = os.path.join(datastore_path, TEST_DATABASE)
class TestDatabase(unittest.TestCase):
def setUp(self):
self.log = logging.getLogger()
def tearDown(self):
try:
shutil.rmtree(datastore_path, True)
except:
pass
def test_bucket_open(self):
with Datastore.Bucket(test_db_path, module_signature) as bucket:
self.assertNotEqual(bucket, None, u'Bucket is None')
def test_update_version(self):
with Datastore.Bucket(test_db_path, module_signature) as bucket:
new_ver = bucket.version() + 1
bucket.update_version(new_ver)
self.assertEqual(bucket.version(), new_ver, u'Failed updating to v%d' % new_ver)
with self.assertRaises(Exception):
Datastore.IoBucket(test_db_path)
def test_create_invalid_src(self):
with self.assertRaises(Exception):
bad_sig = (TEST_SRC, (u'param1', u'param2'), (u'text', [u'array is unsupported']))
with Datastore.Bucket(test_db_path, bad_sig):
pass
with self.assertRaises(Exception):
bad_sig = (TEST_SRC, (u'param1', u'param2'), (u'text', ))
with Datastore.Bucket(test_db_path, bad_sig):
pass
def test_bucket_read_write(self):
now = datetime.utcnow()
test_key_vals = [
(now, "ideas to write about", 645000000),
(now, "ideas to go", 2260000000),
(now, "ideas to raise money", 106000000),
(now, "ideas to ask someone to prom", 966000),
(now, "ideas to ask a guy to prom", 378000),
(now, "ideas to build in minecraft", 7710000),
(now, "ideas unlimited", 217000000),
(now, "ideas united", 1530000000),
(now, "ideas ucla", 10700000),
(now, "ideas unlimited pepsi", 7190000),
(now, "ideas unlimited seminars", 1800000),
(now, "ideas unbound", 4310000),
(now, "ideas unlimited llc", 68000000),
(now, "ideas unlimited memphis", 1650000),
(now, "ideas uthscsa", 133000),
(now, "ideas ucsb", 609000),
(now, "ideas vs ideals", 7920000),
(now, "ideas valentines day", 123000000),
(now, "ideas valentines coupons", 2480000)
]
with Datastore.Bucket(test_db_path, module_signature) as bucket:
bucket.insertmany(test_key_vals)
with Datastore.Bucket(test_db_path, module_signature) as bucket:
self.assertEqual(len(bucket.readall()), len(test_key_vals))
def test_bucket_read_write_single(self):
now = datetime.utcnow()
with Datastore.Bucket(test_db_path, module_signature) as bucket:
bucket.insert((now, "single", "2200"))
bucket.insert((now, "test", "1200"))
with Datastore.Bucket(test_db_path, module_signature) as bucket:
self.assertEqual(len(bucket.readall()), 2)
def test_bucket_duplicates_read_write(self):
now = datetime.utcnow()
test_key_vals = [
(now, "ideas to build in minecraft", 7710000),
(now, "ideas unlimited", 217000000),
(now, "ideas to build in minecraft", 7710000),
(now, "ideas unlimited", 217000000),
(now, "ideas to build in minecraft", 7710000),
(now, "ideas unlimited", 217000000)
]
with Datastore.Bucket(test_db_path, module_signature) as bucket:
bucket.insertmany(test_key_vals)
with Datastore.Bucket(test_db_path, module_signature) as bucket:
self.assertEqual(len(bucket.readall()), len(test_key_vals))
| true | true |
f7350e66a3ad1c6722e41a833fd5c41b04536a0d | 31 | py | Python | fcsgg/__init__.py | liuhengyue/fcsgg | 826c6e194270461a66ca5d048cb67f1ccf7ef387 | [
"MIT"
] | 9 | 2022-01-17T03:27:46.000Z | 2022-03-26T09:35:59.000Z | fcsgg/__init__.py | liuhengyue/fcsgg | 826c6e194270461a66ca5d048cb67f1ccf7ef387 | [
"MIT"
] | 3 | 2022-01-26T03:28:18.000Z | 2022-02-03T04:19:29.000Z | fcsgg/__init__.py | liuhengyue/fcsgg | 826c6e194270461a66ca5d048cb67f1ccf7ef387 | [
"MIT"
] | null | null | null | from .modeling import meta_arch | 31 | 31 | 0.870968 | from .modeling import meta_arch | true | true |
f7350e908e65de4d2b166767ea22ffebc316a256 | 670 | py | Python | p007.py | pbgnz/project-euler | 8ab4549101f7a3ac2a478eb6193b2b67920c8102 | [
"MIT"
] | null | null | null | p007.py | pbgnz/project-euler | 8ab4549101f7a3ac2a478eb6193b2b67920c8102 | [
"MIT"
] | 1 | 2021-04-13T12:47:07.000Z | 2021-04-14T20:27:04.000Z | p007.py | escobot/project-euler | 8ab4549101f7a3ac2a478eb6193b2b67920c8102 | [
"MIT"
] | null | null | null | # 10001st prime
# Problem 7
# By listing the first six prime numbers: 2, 3, 5, 7, 11, and 13,
# we can see that the 6th prime is 13.
# What is the 10 001st prime number?
# as of solving this challenge the largest prime number known is 24,862,048
def is_prime(n):
i = 2
while i*i <= n:
if n % i == 0:
return False
i = i + 1
return True
def solution(nth):
count = 1
prime = 0
while prime != nth:
count = count + 1 # primes start at 2
if is_prime(count):
prime = prime + 1
return count
def main():
ans = solution(10001)
print(ans)
if __name__ == '__main__':
main()
| 18.611111 | 75 | 0.570149 |
def is_prime(n):
i = 2
while i*i <= n:
if n % i == 0:
return False
i = i + 1
return True
def solution(nth):
count = 1
prime = 0
while prime != nth:
count = count + 1
if is_prime(count):
prime = prime + 1
return count
def main():
ans = solution(10001)
print(ans)
if __name__ == '__main__':
main()
| true | true |
f7350f87740d3150f4219cff8545678e6ec86cf2 | 537 | py | Python | randoms_products/main.py | pechuga22/services-kiero | 73ab9ac847fdbf3970e40d3d15098be38af924ca | [
"MIT"
] | null | null | null | randoms_products/main.py | pechuga22/services-kiero | 73ab9ac847fdbf3970e40d3d15098be38af924ca | [
"MIT"
] | null | null | null | randoms_products/main.py | pechuga22/services-kiero | 73ab9ac847fdbf3970e40d3d15098be38af924ca | [
"MIT"
] | null | null | null | from flask import Flask, json
import pyodbc
conn = pyodbc.connect('DRIVER={PostgreSQL Unicode};SERVER=10.4.28.183;DATABASE=postgres;UID=postgres;PWD=developer2020')
app = Flask(__name__)
def random_products(conn):
cnxn = conn.cursor()
cnxn.execute('select categoryid, name from categories c where parentid is null')
rows = cnxn.fetchall()
cnxn.commit()
return rows
@app.route('/')
def hello():
show_data = random_products(conn)
return str(show_data)
if __name__ == '__main__':
app.run()
| 21.48 | 120 | 0.692737 | from flask import Flask, json
import pyodbc
conn = pyodbc.connect('DRIVER={PostgreSQL Unicode};SERVER=10.4.28.183;DATABASE=postgres;UID=postgres;PWD=developer2020')
app = Flask(__name__)
def random_products(conn):
cnxn = conn.cursor()
cnxn.execute('select categoryid, name from categories c where parentid is null')
rows = cnxn.fetchall()
cnxn.commit()
return rows
@app.route('/')
def hello():
show_data = random_products(conn)
return str(show_data)
if __name__ == '__main__':
app.run()
| true | true |
f7350f891078f1e15ec28d2c4dc4c9392e366653 | 125 | py | Python | libalgopy/common/enums/algorithm_type.py | PotapenkoOleg/libalgopy | ac625c0f874918c1967218c302c6fcb200db0271 | [
"MIT"
] | null | null | null | libalgopy/common/enums/algorithm_type.py | PotapenkoOleg/libalgopy | ac625c0f874918c1967218c302c6fcb200db0271 | [
"MIT"
] | null | null | null | libalgopy/common/enums/algorithm_type.py | PotapenkoOleg/libalgopy | ac625c0f874918c1967218c302c6fcb200db0271 | [
"MIT"
] | null | null | null | from enum import Enum
class AlgorithmType(Enum):
ITERATIVE = 1
RECURSIVE = 2
if __name__ == '__main__':
pass
| 11.363636 | 26 | 0.656 | from enum import Enum
class AlgorithmType(Enum):
ITERATIVE = 1
RECURSIVE = 2
if __name__ == '__main__':
pass
| true | true |
f7350f8b72cb5a0b0bbab3f89585cc7c5b06e0fb | 8,012 | py | Python | configs/dcn/cascade_mask_rcnn_dconv_c3-c5_r50_fpn_1x.py | mrsempress/mmdetection | cb650560c97a2fe56a9b369a1abc8ec17e06583a | [
"Apache-2.0"
] | null | null | null | configs/dcn/cascade_mask_rcnn_dconv_c3-c5_r50_fpn_1x.py | mrsempress/mmdetection | cb650560c97a2fe56a9b369a1abc8ec17e06583a | [
"Apache-2.0"
] | null | null | null | configs/dcn/cascade_mask_rcnn_dconv_c3-c5_r50_fpn_1x.py | mrsempress/mmdetection | cb650560c97a2fe56a9b369a1abc8ec17e06583a | [
"Apache-2.0"
] | null | null | null | # model settings
model = dict(
type='CascadeRCNN',
num_stages=3,
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch',
dcn=dict(
type='DCN', deformable_groups=1, fallback_on_stride=False),
stage_with_dcn=(False, True, True, True)),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
],
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mask_head=dict(
type='FCNMaskHead',
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=81,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False)
],
stage_loss_weights=[1, 0.5, 0.25])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100,
mask_thr_binary=0.5))
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(save_every_n_steps=2500, max_to_keep=1)
# yapf:disable
log_config = dict(interval=100)
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = 'cascade_mask_rcnn_dconv_c3-c5_r50_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| 31.920319 | 78 | 0.552047 |
model = dict(
type='CascadeRCNN',
num_stages=3,
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch',
dcn=dict(
type='DCN', deformable_groups=1, fallback_on_stride=False),
stage_with_dcn=(False, True, True, True)),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
],
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mask_head=dict(
type='FCNMaskHead',
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=81,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False)
],
stage_loss_weights=[1, 0.5, 0.25])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100,
mask_thr_binary=0.5))
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(save_every_n_steps=2500, max_to_keep=1)
log_config = dict(interval=100)
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = 'cascade_mask_rcnn_dconv_c3-c5_r50_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| true | true |
f735108b6b0b94d4460d9293d1b15b476e83e94e | 4,419 | py | Python | Complement.py | Eltoney/Boolean-Algebra | 0607f7efb69c82fdf9508c101e05202cfc7a1a21 | [
"MIT"
] | 1 | 2020-02-29T22:03:09.000Z | 2020-02-29T22:03:09.000Z | Complement.py | Eltoney/Boolean-Algebra | 0607f7efb69c82fdf9508c101e05202cfc7a1a21 | [
"MIT"
] | null | null | null | Complement.py | Eltoney/Boolean-Algebra | 0607f7efb69c82fdf9508c101e05202cfc7a1a21 | [
"MIT"
] | null | null | null | def comp_9th(n):
"""takes a decimal number (n) in string fomat
returns the 9's complement of the number"""
n=str(n)
result=[]
for digit in n:
a=str(9-int(digit))
result.append(a)
return "".join(result)
def comp_1st(n):
"""takes a binary number (n) in string fomat
returns the 1's complement of the number"""
n=str(n)
result=[]
for digit in n:
a=str(1-int(digit))
result.append(a)
return "".join(result)
def comp_2nd(n):
"""takes a binary number (n) in string fomat
returns the 2's complement of the number"""
n=str(n)
count = 0
for digit in n[::-1]:
if digit == '1':
break
count += 1
change=n[:len(n)-(count+1)]
unchange=n[len(n)-(count+1):]
final=comp_1st(change)
return final+unchange
def comp_10th(n):
"""takes a decimal number (n) in string format
return the 10's complement of the number"""
n=str(n)
count = 0
for digit in n[::-1]:
if digit != '0':
break
count += 1
change=n[:len(n)-(count+1)]
special=n[len(n)-(count+1):len(n)-count]
var=str(10-int(special))
unchange=n[len(n)-count:]
final=comp_9th(change)
return final+var+unchange
def decimalSub(m,n):
"""takes 2 decimal numbers in any format(sting or integer)
return the result of subtraction usin complement rules"""
m=str(m)
n=str(n)
req=max(len(m),len(n))
while len(m) < req:
m="0"+m
while len(n) < req:
n="0"+n
if int(n)> int(m):
n_10th=int(comp_10th(str(n)))
summation=int(m)+n_10th
result=comp_10th(str(summation))
return "-"+result
else:
n_10th=int(comp_10th(str(n)))
summation=int(m)+n_10th
result=str(summation)
return result[1:]
def BinarySum(n,m):
result=[]
carry=0
x=str(n)[::-1]
y=str(m)[::-1]
for i in range(len(x)):
a=int(x[i])+int(y[i])+carry
if a==1 or a==0:
result.append(str(a))
carry=0
elif a==2:
result.append("0")
carry=1
elif a==3:
result.append("1")
carry=1
if carry==1:
result.append("1")
result.reverse()
return "".join(result)
def binarySub(m,n):
"""takes 2 binary numbers in any format(sting or integer)
return the result of subtraction usin complement rules"""
m=str(m)
n=str(n)
req=max(len(m),len(n))
while len(m) < req:
m="0"+m
while len(n) < req:
n="0"+n
if int(n)> int(m):
n_2nd=comp_2nd(str(n))
summation=BinarySum(m,n_2nd)
result=comp_2nd(str(summation))
return "-"+result
else:
n_2nd=comp_2nd(str(n))
summation=BinarySum(m,n_2nd)
result=str(summation)
return result[1:]
operations=[comp_1st,comp_2nd,comp_9th,comp_10th,decimalSub,binarySub]
operation_names=["The first complement of the binary number:",
"The second complement of the binary number:",
"The ninth complement of the decimal number:",
"The tenth complement of the decimal number:",
"The difference between the two decimal numbers"
"The difference between the two binary numbers"]
print("This program deals with the complment and operations involving them")
n=int(input("Enter number of operations: "))
for num in range(n):
print("Select the number of operation: ")
print("0 to find the 1st complement of a binary number")
print("1 to find the 2nd complement of a binary number")
print("2 to find the 9th complement of a decimal number")
print("3 to find the 10th complement of a decimal number")
print("4 to find the differnece between two decimal numbers")
print("5 to find the differnece between two binary numbers")
m=int(input("Enter the number of the required operation: "))
if m==0 or m==1 or m==2 or m==3:
x=input("Enter the reqired number to convert: ")
print(operation_names[m],x,"is",operations[m](x))
elif m==4 or m==5:
x=input("Enter the first number: ")
y=input("Enter the second number: ")
print(operation_names[m],x,"and",y,"is",operations[m](x,y))
else:
print("Wrong number of operation selected")
| 30.475862 | 76 | 0.577959 | def comp_9th(n):
n=str(n)
result=[]
for digit in n:
a=str(9-int(digit))
result.append(a)
return "".join(result)
def comp_1st(n):
n=str(n)
result=[]
for digit in n:
a=str(1-int(digit))
result.append(a)
return "".join(result)
def comp_2nd(n):
n=str(n)
count = 0
for digit in n[::-1]:
if digit == '1':
break
count += 1
change=n[:len(n)-(count+1)]
unchange=n[len(n)-(count+1):]
final=comp_1st(change)
return final+unchange
def comp_10th(n):
n=str(n)
count = 0
for digit in n[::-1]:
if digit != '0':
break
count += 1
change=n[:len(n)-(count+1)]
special=n[len(n)-(count+1):len(n)-count]
var=str(10-int(special))
unchange=n[len(n)-count:]
final=comp_9th(change)
return final+var+unchange
def decimalSub(m,n):
m=str(m)
n=str(n)
req=max(len(m),len(n))
while len(m) < req:
m="0"+m
while len(n) < req:
n="0"+n
if int(n)> int(m):
n_10th=int(comp_10th(str(n)))
summation=int(m)+n_10th
result=comp_10th(str(summation))
return "-"+result
else:
n_10th=int(comp_10th(str(n)))
summation=int(m)+n_10th
result=str(summation)
return result[1:]
def BinarySum(n,m):
result=[]
carry=0
x=str(n)[::-1]
y=str(m)[::-1]
for i in range(len(x)):
a=int(x[i])+int(y[i])+carry
if a==1 or a==0:
result.append(str(a))
carry=0
elif a==2:
result.append("0")
carry=1
elif a==3:
result.append("1")
carry=1
if carry==1:
result.append("1")
result.reverse()
return "".join(result)
def binarySub(m,n):
m=str(m)
n=str(n)
req=max(len(m),len(n))
while len(m) < req:
m="0"+m
while len(n) < req:
n="0"+n
if int(n)> int(m):
n_2nd=comp_2nd(str(n))
summation=BinarySum(m,n_2nd)
result=comp_2nd(str(summation))
return "-"+result
else:
n_2nd=comp_2nd(str(n))
summation=BinarySum(m,n_2nd)
result=str(summation)
return result[1:]
operations=[comp_1st,comp_2nd,comp_9th,comp_10th,decimalSub,binarySub]
operation_names=["The first complement of the binary number:",
"The second complement of the binary number:",
"The ninth complement of the decimal number:",
"The tenth complement of the decimal number:",
"The difference between the two decimal numbers"
"The difference between the two binary numbers"]
print("This program deals with the complment and operations involving them")
n=int(input("Enter number of operations: "))
for num in range(n):
print("Select the number of operation: ")
print("0 to find the 1st complement of a binary number")
print("1 to find the 2nd complement of a binary number")
print("2 to find the 9th complement of a decimal number")
print("3 to find the 10th complement of a decimal number")
print("4 to find the differnece between two decimal numbers")
print("5 to find the differnece between two binary numbers")
m=int(input("Enter the number of the required operation: "))
if m==0 or m==1 or m==2 or m==3:
x=input("Enter the reqired number to convert: ")
print(operation_names[m],x,"is",operations[m](x))
elif m==4 or m==5:
x=input("Enter the first number: ")
y=input("Enter the second number: ")
print(operation_names[m],x,"and",y,"is",operations[m](x,y))
else:
print("Wrong number of operation selected")
| true | true |
f73510985a441443661c53bc3bebf97b0cc9972d | 5,249 | py | Python | scripts/maintenance/make_i18n_dict.py | anisayari/pywikibot | af470904ce62cedae63d285ca15146e9168a0ee6 | [
"MIT"
] | 3 | 2019-02-14T13:59:34.000Z | 2021-11-08T09:23:03.000Z | scripts/maintenance/make_i18n_dict.py | anisayari/pywikibot | af470904ce62cedae63d285ca15146e9168a0ee6 | [
"MIT"
] | null | null | null | scripts/maintenance/make_i18n_dict.py | anisayari/pywikibot | af470904ce62cedae63d285ca15146e9168a0ee6 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Generate a i18n file from a given script.
run IDLE at topmost level:
>>> import pwb
>>> from scripts.maintenance.make_i18n_dict import i18nBot
>>> bot = i18nBot('<scriptname>', '<msg dict>')
>>> bot.run()
If you have more than one message dictionary, give all these names to the bot:
>>> bot = i18nBot('<scriptname>', '<msg dict1>', '<msg dict2>', '<msg dict3>')
If you want to rename the message index use keyword arguments. This may be
mixed with preleading positonal argumens:
>>> bot = i18nBot('<scriptname>', '<msg dict1>', the_other_msg='<msg dict2>')
If you have the messages as instance constants you may call the bot as follows:
>>> bot = i18nBot('<scriptname>.<class instance>', '<msg dict1>', '<msg dict2>')
It's also possible to make json files too by using to_json method after
instantiating the bot. It also calls C{bot.run()} to create the dictionaries:
>>> bot.to_json()
"""
#
# (C) xqt, 2013-2018
# (C) Pywikibot team, 2013-2018
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, print_function, unicode_literals
import codecs
import json
import os
from pywikibot import config
class i18nBot(object): # noqa: N801
"""I18n bot."""
def __init__(self, script, *args, **kwargs):
"""Initializer."""
modules = script.split('.')
self.scriptname = modules[0]
self.script = __import__('scripts.' + self.scriptname)
for m in modules:
self.script = getattr(self.script, m)
self.messages = {}
# setup the message dict
for msg in args:
if hasattr(self.script, msg):
self.messages[msg] = msg
else:
print('message {0} not found'.format(msg))
for new, old in kwargs.items():
self.messages[old] = new.replace('_', '-')
self.dict = {}
def print_all(self):
"""Pretty print the dict as a file content to screen."""
if not self.dict:
print('No messages found, read them first.\n'
'Use "run" or "to_json" methods')
return
keys = list(self.dict.keys())
keys.remove('qqq')
keys.sort()
keys.insert(0, 'qqq')
if 'en' in keys:
keys.remove('en')
keys.insert(0, 'en')
print("# -*- coding: utf-8 -*-")
print("msg = {")
for code in keys:
print(" '%s': {" % code)
for msg in sorted(self.messages.values()):
label = "%s-%s" % (self.scriptname, msg)
if label in self.dict[code]:
print(" '%s': u'%s'," % (label,
self.dict[code][label]))
print(" },")
print("};")
def read(self, oldmsg, newmsg=None):
"""Read a single message from source script."""
msg = getattr(self.script, oldmsg)
keys = list(msg.keys())
keys.append('qqq')
if newmsg is None:
newmsg = oldmsg
for code in keys:
label = "%s-%s" % (self.scriptname, newmsg)
if code == 'qqq':
if code not in self.dict:
self.dict[code] = {}
self.dict[code][label] = (
u'Edit summary for message %s of %s report'
% (newmsg, self.scriptname))
elif code != 'commons':
if code not in self.dict:
self.dict[code] = {}
self.dict[code][label] = msg[code]
if 'en' not in keys:
print('WARNING: "en" key missing for message %s' % newmsg)
def run(self, quiet=False):
"""
Run the bot, read the messages from source and print the dict.
@param quiet: print the result if False
@type quiet: bool
"""
for item in self.messages.items():
self.read(*item)
if not quiet:
self.print_all()
def to_json(self, quiet=True):
"""
Run the bot and create json files.
@param quiet: Print the result if False
@type quiet: bool
"""
indent = 4
if not self.dict:
self.run(quiet)
json_dir = os.path.join(
config.base_dir, 'scripts/i18n', self.scriptname)
if not os.path.exists(json_dir):
os.makedirs(json_dir)
for lang in self.dict:
file_name = os.path.join(json_dir, '%s.json' % lang)
if os.path.isfile(file_name):
with codecs.open(file_name, 'r', 'utf-8') as json_file:
new_dict = json.loads(json_file.read())
else:
new_dict = {}
new_dict['@metadata'] = new_dict.get('@metadata', {'authors': []})
with codecs.open(file_name, 'w', 'utf-8') as json_file:
new_dict.update(self.dict[lang])
s = json.dumps(new_dict, ensure_ascii=False, sort_keys=True,
indent=indent, separators=(',', ': '))
s = s.replace(' ' * indent, '\t')
json_file.write(s)
if __name__ == '__main__':
print(__doc__)
| 32.602484 | 80 | 0.536293 |
from __future__ import absolute_import, print_function, unicode_literals
import codecs
import json
import os
from pywikibot import config
class i18nBot(object):
def __init__(self, script, *args, **kwargs):
modules = script.split('.')
self.scriptname = modules[0]
self.script = __import__('scripts.' + self.scriptname)
for m in modules:
self.script = getattr(self.script, m)
self.messages = {}
for msg in args:
if hasattr(self.script, msg):
self.messages[msg] = msg
else:
print('message {0} not found'.format(msg))
for new, old in kwargs.items():
self.messages[old] = new.replace('_', '-')
self.dict = {}
def print_all(self):
if not self.dict:
print('No messages found, read them first.\n'
'Use "run" or "to_json" methods')
return
keys = list(self.dict.keys())
keys.remove('qqq')
keys.sort()
keys.insert(0, 'qqq')
if 'en' in keys:
keys.remove('en')
keys.insert(0, 'en')
print("# -*- coding: utf-8 -*-")
print("msg = {")
for code in keys:
print(" '%s': {" % code)
for msg in sorted(self.messages.values()):
label = "%s-%s" % (self.scriptname, msg)
if label in self.dict[code]:
print(" '%s': u'%s'," % (label,
self.dict[code][label]))
print(" },")
print("};")
def read(self, oldmsg, newmsg=None):
msg = getattr(self.script, oldmsg)
keys = list(msg.keys())
keys.append('qqq')
if newmsg is None:
newmsg = oldmsg
for code in keys:
label = "%s-%s" % (self.scriptname, newmsg)
if code == 'qqq':
if code not in self.dict:
self.dict[code] = {}
self.dict[code][label] = (
u'Edit summary for message %s of %s report'
% (newmsg, self.scriptname))
elif code != 'commons':
if code not in self.dict:
self.dict[code] = {}
self.dict[code][label] = msg[code]
if 'en' not in keys:
print('WARNING: "en" key missing for message %s' % newmsg)
def run(self, quiet=False):
for item in self.messages.items():
self.read(*item)
if not quiet:
self.print_all()
def to_json(self, quiet=True):
indent = 4
if not self.dict:
self.run(quiet)
json_dir = os.path.join(
config.base_dir, 'scripts/i18n', self.scriptname)
if not os.path.exists(json_dir):
os.makedirs(json_dir)
for lang in self.dict:
file_name = os.path.join(json_dir, '%s.json' % lang)
if os.path.isfile(file_name):
with codecs.open(file_name, 'r', 'utf-8') as json_file:
new_dict = json.loads(json_file.read())
else:
new_dict = {}
new_dict['@metadata'] = new_dict.get('@metadata', {'authors': []})
with codecs.open(file_name, 'w', 'utf-8') as json_file:
new_dict.update(self.dict[lang])
s = json.dumps(new_dict, ensure_ascii=False, sort_keys=True,
indent=indent, separators=(',', ': '))
s = s.replace(' ' * indent, '\t')
json_file.write(s)
if __name__ == '__main__':
print(__doc__)
| true | true |
f7351112411b3e19d17758b77c9516a3e645b2f5 | 1,454 | py | Python | app/views.py | rdelfin/zork-cortana | b1c8671502edb17417e79b0b9c6ee132b7769707 | [
"Apache-2.0"
] | null | null | null | app/views.py | rdelfin/zork-cortana | b1c8671502edb17417e79b0b9c6ee132b7769707 | [
"Apache-2.0"
] | null | null | null | app/views.py | rdelfin/zork-cortana | b1c8671502edb17417e79b0b9c6ee132b7769707 | [
"Apache-2.0"
] | null | null | null | from flask import render_template, send_from_directory, request, jsonify
from app import app
import hashlib, uuid
import game
from app import __config__ as config
def compare_password(password, correct_hash):
"""
Compares password with hash
"""
hashed_password = hashlib.sha512(password).hexdigest()
return hashed_password == correct_hash
@app.route('/index', methods=['POST'])
@app.route('/', methods=['POST'])
def index():
"""
Main webhook for responses to JSON objects
"""
json_obj = request.get_json()
if not "conversation_id" in json_obj:
return jsonify({"error": "400 Bad Request: No conversation ID field."}), 400
if not "X-Password" in request.headers:
return jsonify({"error": "400 Bad Request: No X-Password header field"}), 400
conv = json_obj["conversation_id"]
command = json_obj["command"] if "command" in json_obj else ""
password = request.headers.get('X-Password')
if not compare_password(password, config.hashed_password):
return jsonify({"error": "401 Unauthorized: Password is invalid"}), 401
if game.contains_conv(conv):
if command.strip() == "restart":
game.finish_conv(conv)
return jsonify({"response": game.create_conv(conv)})
else:
return jsonify({"response": game.execute_command_conv(conv, command)})
else:
return jsonify({"response": game.create_conv(conv)})
| 30.93617 | 85 | 0.670564 | from flask import render_template, send_from_directory, request, jsonify
from app import app
import hashlib, uuid
import game
from app import __config__ as config
def compare_password(password, correct_hash):
hashed_password = hashlib.sha512(password).hexdigest()
return hashed_password == correct_hash
@app.route('/index', methods=['POST'])
@app.route('/', methods=['POST'])
def index():
json_obj = request.get_json()
if not "conversation_id" in json_obj:
return jsonify({"error": "400 Bad Request: No conversation ID field."}), 400
if not "X-Password" in request.headers:
return jsonify({"error": "400 Bad Request: No X-Password header field"}), 400
conv = json_obj["conversation_id"]
command = json_obj["command"] if "command" in json_obj else ""
password = request.headers.get('X-Password')
if not compare_password(password, config.hashed_password):
return jsonify({"error": "401 Unauthorized: Password is invalid"}), 401
if game.contains_conv(conv):
if command.strip() == "restart":
game.finish_conv(conv)
return jsonify({"response": game.create_conv(conv)})
else:
return jsonify({"response": game.execute_command_conv(conv, command)})
else:
return jsonify({"response": game.create_conv(conv)})
| true | true |
f73511cdee490faea5847952fef23d5660145c4b | 3,756 | py | Python | project/app.py | civicmapper/flush-the-toilet | 94eea064156cff2729bb76da40484870f341e087 | [
"MIT"
] | null | null | null | project/app.py | civicmapper/flush-the-toilet | 94eea064156cff2729bb76da40484870f341e087 | [
"MIT"
] | 14 | 2018-09-06T20:03:17.000Z | 2022-02-12T02:55:44.000Z | project/app.py | civicmapper/flush-the-toilet | 94eea064156cff2729bb76da40484870f341e087 | [
"MIT"
] | 4 | 2018-04-10T18:44:36.000Z | 2019-08-14T19:16:49.000Z | #----------------------------------------------------------------------------#
# APP CONFIGURATION
#----------------------------------------------------------------------------#
# standard library imports
import os
import logging
from logging import Formatter, FileHandler
import json
# dependencies
import requests
from flask import Flask, render_template, request, make_response, session, jsonify
#import pdb
# config
app = Flask(__name__)
app.config.from_pyfile('config.py')
#----------------------------------------------------------------------------#
# Helper Functions & Wrappers
#----------------------------------------------------------------------------#
def get_ags_token(url,username,password,client,referer,session,token_name):
"""Requests and ArcGIS Server Token
session: pass flask session object in
token_name: string, used to store token in session
other params are ArcGIS Server params
"""
#if token_name not in session:
params = {
'username': username,
'password': password,
'client': client,
'referer': referer,
'expiration': 720,
'f': 'json',
}
response = requests.post(
url ,
# app.config['ROK_AUTH_URL'],
data=params
)
token = response.json()
session[token_name] = token
print("{0} token acquired: {1}".format(token_name, token))
return token
# else:
# print("Using existing {0} token: {1}".format(token_name, session[token_name]))
# return session[token_name]
def get_agol_token():
"""requests and returns an ArcGIS Token for the pre-registered application.
Client id and secrets are managed through the ArcGIS Developer's console.
"""
params = {
'client_id': app.config['ESRI_APP_CLIENT_ID'],
'client_secret': app.config['ESRI_APP_CLIENT_SECRET'],
'grant_type': "client_credentials"
}
request = requests.get(
'https://www.arcgis.com/sharing/oauth2/token',
params=params
)
token = request.json()
print("AGOL token acquired: {0}".format(token))
return token
#----------------------------------------------------------------------------#
# Controllers / Route Handlers
#----------------------------------------------------------------------------#
# ---------------------------------------------------
# pages (rendered from templates)
## map view
@app.route('/')
@app.route('/trp')
def main():
return render_template('pages/index.html')
@app.route('/generateToken/')
def token():
# get the token
t1 = get_ags_token(
url=app.config['ROK_AUTH_URL'],
username=app.config['ROK_USER'],
password=app.config['ROK_PW'],
client=app.config['ROK_CLIENT_TYPE'],
referer=app.config['ROK_REFERER_URL'],
session=session,
token_name='rsi_token'
)
# build the response
t = {"rsi_token": t1, "cmags_token": None}
r = make_response(jsonify(t), 200)
# add header to enable CORS
r.headers['Access-Control-Allow-Origin'] = '*'
return make_response(r)
# ------------------------------------------------
# Error Handling
## Error handler 500
@app.errorhandler(500)
def internal_error(error):
return render_template('errors/500.html'), 500
## Error handler 404
@app.errorhandler(404)
def not_found_error(error):
return render_template('errors/404.html'), 404
## Error Logging
if not app.debug:
file_handler = FileHandler('error.log')
file_handler.setFormatter(
Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')
)
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('errors') | 30.536585 | 88 | 0.563099 |
import os
import logging
from logging import Formatter, FileHandler
import json
import requests
from flask import Flask, render_template, request, make_response, session, jsonify
app = Flask(__name__)
app.config.from_pyfile('config.py')
def get_ags_token(url,username,password,client,referer,session,token_name):
params = {
'username': username,
'password': password,
'client': client,
'referer': referer,
'expiration': 720,
'f': 'json',
}
response = requests.post(
url ,
data=params
)
token = response.json()
session[token_name] = token
print("{0} token acquired: {1}".format(token_name, token))
return token
def get_agol_token():
params = {
'client_id': app.config['ESRI_APP_CLIENT_ID'],
'client_secret': app.config['ESRI_APP_CLIENT_SECRET'],
'grant_type': "client_credentials"
}
request = requests.get(
'https://www.arcgis.com/sharing/oauth2/token',
params=params
)
token = request.json()
print("AGOL token acquired: {0}".format(token))
return token
e('/')
@app.route('/trp')
def main():
return render_template('pages/index.html')
@app.route('/generateToken/')
def token():
t1 = get_ags_token(
url=app.config['ROK_AUTH_URL'],
username=app.config['ROK_USER'],
password=app.config['ROK_PW'],
client=app.config['ROK_CLIENT_TYPE'],
referer=app.config['ROK_REFERER_URL'],
session=session,
token_name='rsi_token'
)
t = {"rsi_token": t1, "cmags_token": None}
r = make_response(jsonify(t), 200)
r.headers['Access-Control-Allow-Origin'] = '*'
return make_response(r)
500)
def internal_error(error):
return render_template('errors/500.html'), 500
404)
def not_found_error(error):
return render_template('errors/404.html'), 404
ug:
file_handler = FileHandler('error.log')
file_handler.setFormatter(
Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')
)
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('errors') | true | true |
f735125056ef23768727c335713c992eb7fdb73d | 666 | py | Python | algorithms-implementation/07-next-permutation.py | palash24/algorithms-and-data-structures | 164be7d1a501a21af808673888964bbab36243a1 | [
"MIT"
] | 23 | 2018-11-06T03:54:00.000Z | 2022-03-14T13:30:40.000Z | algorithms-implementation/07-next-permutation.py | palash24/algorithms-and-data-structures | 164be7d1a501a21af808673888964bbab36243a1 | [
"MIT"
] | null | null | null | algorithms-implementation/07-next-permutation.py | palash24/algorithms-and-data-structures | 164be7d1a501a21af808673888964bbab36243a1 | [
"MIT"
] | 5 | 2019-05-24T16:56:45.000Z | 2022-03-10T17:29:10.000Z | # Next permutation
def nextPermutation(self, n: int) -> int:
digits = list(str(n))
i, j = len(digits)-2, len(digits)-1
# Find the first digit that is smaller than the digit next to it
while i >= 0 and digits[i] >= digits[i+1]:
i -= 1
# If not found, then all digits are in descending order
if i == -1: return -1
# Find the smallest digit on right side greatee than the found number
while digits[j] <= digits[i]:
j -= 1
# Swap
digits[i], digits[j] = digits[j], digits[i]
# Reverse
res = int("".join(digits[:i+1] + digits[i+1:][::-1]))
if res >= 2**31 or res == n: return -1
return res | 27.75 | 73 | 0.576577 |
def nextPermutation(self, n: int) -> int:
digits = list(str(n))
i, j = len(digits)-2, len(digits)-1
while i >= 0 and digits[i] >= digits[i+1]:
i -= 1
if i == -1: return -1
while digits[j] <= digits[i]:
j -= 1
digits[i], digits[j] = digits[j], digits[i]
res = int("".join(digits[:i+1] + digits[i+1:][::-1]))
if res >= 2**31 or res == n: return -1
return res | true | true |
f735125479877e8418887aaff7cdd661b3e358a6 | 1,385 | py | Python | awwards/migrations/0002_projects.py | RonaldKiprotich/Awwards-clone | ba5182a174c741c62c621f739653ef964d6d9a95 | [
"MIT"
] | null | null | null | awwards/migrations/0002_projects.py | RonaldKiprotich/Awwards-clone | ba5182a174c741c62c621f739653ef964d6d9a95 | [
"MIT"
] | null | null | null | awwards/migrations/0002_projects.py | RonaldKiprotich/Awwards-clone | ba5182a174c741c62c621f739653ef964d6d9a95 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.3 on 2020-11-28 15:05
import cloudinary.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('awwards', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Projects',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', cloudinary.models.CloudinaryField(max_length=255, verbose_name='image')),
('description', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('title', models.CharField(max_length=255)),
('link', models.URLField()),
('author', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('author_profile', models.ForeignKey(blank=True, default='1', on_delete=django.db.models.deletion.CASCADE, to='awwards.profile')),
],
options={
'db_table': 'project',
'ordering': ['-created_date'],
},
),
]
| 38.472222 | 146 | 0.615884 |
import cloudinary.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('awwards', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Projects',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', cloudinary.models.CloudinaryField(max_length=255, verbose_name='image')),
('description', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('title', models.CharField(max_length=255)),
('link', models.URLField()),
('author', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('author_profile', models.ForeignKey(blank=True, default='1', on_delete=django.db.models.deletion.CASCADE, to='awwards.profile')),
],
options={
'db_table': 'project',
'ordering': ['-created_date'],
},
),
]
| true | true |
f735140f1db1f3e9d4edfbb49f834603e20cc8dc | 2,097 | py | Python | server/api/socket/room.py | hktrpg/PlanarAlly | ec2db6d53a619a9629e40f1ed755b2ef97f128bd | [
"MIT"
] | null | null | null | server/api/socket/room.py | hktrpg/PlanarAlly | ec2db6d53a619a9629e40f1ed755b2ef97f128bd | [
"MIT"
] | 1 | 2020-11-28T05:00:28.000Z | 2020-11-28T05:00:28.000Z | server/api/socket/room.py | tom-vanbraband-sonarsource/PlanarAlly | 2a0c457148f344d2669f50958bcb7c53a49ee600 | [
"MIT"
] | null | null | null | import uuid
import auth
from api.socket.constants import GAME_NS
from app import app, sio
from models import PlayerRoom
from models.role import Role
from state.game import game_state
from utils import logger
@sio.on("Room.Info.InviteCode.Refresh", namespace=GAME_NS)
@auth.login_required(app, sio)
async def refresh_invite_code(sid: str):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to refresh the invitation code.")
return
pr.room.invitation_code = uuid.uuid4()
pr.room.save()
await sio.emit(
"Room.Info.InvitationCode.Set",
str(pr.room.invitation_code),
room=sid,
namespace=GAME_NS,
)
@sio.on("Room.Info.Players.Kick", namespace=GAME_NS)
@auth.login_required(app, sio)
async def kick_player(sid: str, player_id: int):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to refresh the invitation code.")
return
pr = PlayerRoom.get_or_none(player=player_id, room=pr.room)
if pr:
for psid in game_state.get_sids(player=pr.player, room=pr.room):
await sio.disconnect(psid, namespace=GAME_NS)
pr.delete_instance(True)
@sio.on("Room.Delete", namespace=GAME_NS)
@auth.login_required(app, sio)
async def delete_session(sid: str):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to REMOVE A SESSION.")
return
pr.room.delete_instance(True)
@sio.on("Room.Info.Set.Locked", namespace=GAME_NS)
@auth.login_required(app, sio)
async def set_locked_game_state(sid: str, is_locked: bool):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to set the locked game_state.")
return
pr.room.is_locked = is_locked
pr.room.save()
for psid, player in game_state.get_users(room=pr.room):
if player != pr.room.creator:
await sio.disconnect(psid, namespace=GAME_NS)
| 28.337838 | 85 | 0.688126 | import uuid
import auth
from api.socket.constants import GAME_NS
from app import app, sio
from models import PlayerRoom
from models.role import Role
from state.game import game_state
from utils import logger
@sio.on("Room.Info.InviteCode.Refresh", namespace=GAME_NS)
@auth.login_required(app, sio)
async def refresh_invite_code(sid: str):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to refresh the invitation code.")
return
pr.room.invitation_code = uuid.uuid4()
pr.room.save()
await sio.emit(
"Room.Info.InvitationCode.Set",
str(pr.room.invitation_code),
room=sid,
namespace=GAME_NS,
)
@sio.on("Room.Info.Players.Kick", namespace=GAME_NS)
@auth.login_required(app, sio)
async def kick_player(sid: str, player_id: int):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to refresh the invitation code.")
return
pr = PlayerRoom.get_or_none(player=player_id, room=pr.room)
if pr:
for psid in game_state.get_sids(player=pr.player, room=pr.room):
await sio.disconnect(psid, namespace=GAME_NS)
pr.delete_instance(True)
@sio.on("Room.Delete", namespace=GAME_NS)
@auth.login_required(app, sio)
async def delete_session(sid: str):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to REMOVE A SESSION.")
return
pr.room.delete_instance(True)
@sio.on("Room.Info.Set.Locked", namespace=GAME_NS)
@auth.login_required(app, sio)
async def set_locked_game_state(sid: str, is_locked: bool):
pr: PlayerRoom = game_state.get(sid)
if pr.role != Role.DM:
logger.warning(f"{pr.player.name} attempted to set the locked game_state.")
return
pr.room.is_locked = is_locked
pr.room.save()
for psid, player in game_state.get_users(room=pr.room):
if player != pr.room.creator:
await sio.disconnect(psid, namespace=GAME_NS)
| true | true |
f7351474b7727c1e9c5ecc3ec37ae3ed1a4c6fc3 | 1,003 | py | Python | coral/metrics/rolling.py | coralproject/atoll | 2b62b37d3a320480264c4a0242532aad99c338ec | [
"Apache-2.0"
] | 12 | 2016-01-09T17:47:05.000Z | 2022-02-09T18:09:41.000Z | coral/metrics/rolling.py | coralproject/atoll | 2b62b37d3a320480264c4a0242532aad99c338ec | [
"Apache-2.0"
] | 16 | 2016-01-05T15:49:31.000Z | 2016-08-04T20:59:15.000Z | coral/metrics/rolling.py | coralproject/atoll | 2b62b37d3a320480264c4a0242532aad99c338ec | [
"Apache-2.0"
] | 1 | 2016-04-06T16:00:32.000Z | 2016-04-06T16:00:32.000Z | def extract_history(input):
"""extract the past metrics"""
prev = input['prev']
prev['prev'] = True
return input['_id'], prev
def extract_update(input):
"""extract the update data, from which we compute new metrics"""
update = input['update']
update['_id'] = input['_id']
return update
def rolling_score(d1, d2, alpha=0.5):
"""computes rolling scores, decaying the past by alpha.
the past metrics are identified by the `prev` key.
any keys present in the update dict that are not in the past
dict are carried over."""
# figure out which dict is the previous metrics
if 'prev' in d1 and d1['prev']:
prev, update = d1, d2
else:
prev, update = d2, d1
del prev['prev']
new = {}
for k, v in prev.items():
if k in update:
new[k] = v + (alpha * (update[k] - v))
else:
new[k] = v
for k in set(update.keys()) - set(new.keys()):
new[k] = update[k]
return new
| 24.463415 | 68 | 0.582253 | def extract_history(input):
prev = input['prev']
prev['prev'] = True
return input['_id'], prev
def extract_update(input):
update = input['update']
update['_id'] = input['_id']
return update
def rolling_score(d1, d2, alpha=0.5):
if 'prev' in d1 and d1['prev']:
prev, update = d1, d2
else:
prev, update = d2, d1
del prev['prev']
new = {}
for k, v in prev.items():
if k in update:
new[k] = v + (alpha * (update[k] - v))
else:
new[k] = v
for k in set(update.keys()) - set(new.keys()):
new[k] = update[k]
return new
| true | true |
f73516171c98e7efa8128a56815f237e9da700a4 | 358 | py | Python | tag.py | shimayu22/ImageCodeGeneration | 9d9d06349818c9a2f65c31c9c28c454e48fae827 | [
"MIT"
] | null | null | null | tag.py | shimayu22/ImageCodeGeneration | 9d9d06349818c9a2f65c31c9c28c454e48fae827 | [
"MIT"
] | null | null | null | tag.py | shimayu22/ImageCodeGeneration | 9d9d06349818c9a2f65c31c9c28c454e48fae827 | [
"MIT"
] | null | null | null | import pyperclip
while True:
print("input URL(end:n)")
photo_url = input(">> ")
if photo_url == "n":
break
paste_code = "<span itemtype=\"http://schema.org/Photograph\" itemscope=\"itemscope\"><img class=\"magnifiable\" src=\"{}\" itemprop=\"image\"></span>".format(photo_url)
pyperclip.copy(paste_code)
print(paste_code)
| 25.571429 | 173 | 0.634078 | import pyperclip
while True:
print("input URL(end:n)")
photo_url = input(">> ")
if photo_url == "n":
break
paste_code = "<span itemtype=\"http://schema.org/Photograph\" itemscope=\"itemscope\"><img class=\"magnifiable\" src=\"{}\" itemprop=\"image\"></span>".format(photo_url)
pyperclip.copy(paste_code)
print(paste_code)
| true | true |
f7351619d814e5f964a3f9f454c3fbb1599c76d1 | 9,744 | py | Python | tests/__init__.py | murali-chevuri/cachetools | ab9e8af0d506759332a2d1a5ae9d36feae844fda | [
"MIT"
] | null | null | null | tests/__init__.py | murali-chevuri/cachetools | ab9e8af0d506759332a2d1a5ae9d36feae844fda | [
"MIT"
] | null | null | null | tests/__init__.py | murali-chevuri/cachetools | ab9e8af0d506759332a2d1a5ae9d36feae844fda | [
"MIT"
] | null | null | null | import unittest
class CacheTestMixin:
Cache = None
def test_defaults(self):
cache = self.Cache(maxsize=1)
self.assertEqual(0, len(cache))
self.assertEqual(1, cache.maxsize)
self.assertEqual(0, cache.currsize)
self.assertEqual(1, cache.getsizeof(None))
self.assertEqual(1, cache.getsizeof(""))
self.assertEqual(1, cache.getsizeof(0))
self.assertTrue(repr(cache).startswith(cache.__class__.__name__))
def test_insert(self):
cache = self.Cache(maxsize=2)
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache[3] = 3
self.assertEqual(2, len(cache))
self.assertEqual(3, cache[3])
self.assertTrue(1 in cache or 2 in cache)
cache[4] = 4
self.assertEqual(2, len(cache))
self.assertEqual(4, cache[4])
self.assertTrue(1 in cache or 2 in cache or 3 in cache)
def test_update(self):
cache = self.Cache(maxsize=2)
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache.update({1: "a", 2: "b"})
self.assertEqual(2, len(cache))
self.assertEqual("a", cache[1])
self.assertEqual("b", cache[2])
def test_delete(self):
cache = self.Cache(maxsize=2)
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
del cache[2]
self.assertEqual(1, len(cache))
self.assertEqual(1, cache[1])
self.assertNotIn(2, cache)
del cache[1]
self.assertEqual(0, len(cache))
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
with self.assertRaises(KeyError):
del cache[1]
self.assertEqual(0, len(cache))
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
def test_pop(self):
cache = self.Cache(maxsize=2)
cache.update({1: 1, 2: 2})
self.assertEqual(2, cache.pop(2))
self.assertEqual(1, len(cache))
self.assertEqual(1, cache.pop(1))
self.assertEqual(0, len(cache))
with self.assertRaises(KeyError):
cache.pop(2)
with self.assertRaises(KeyError):
cache.pop(1)
with self.assertRaises(KeyError):
cache.pop(0)
self.assertEqual(None, cache.pop(2, None))
self.assertEqual(None, cache.pop(1, None))
self.assertEqual(None, cache.pop(0, None))
def test_popitem(self):
cache = self.Cache(maxsize=2)
cache.update({1: 1, 2: 2})
self.assertIn(cache.pop(1), {1: 1, 2: 2})
self.assertEqual(1, len(cache))
self.assertIn(cache.pop(2), {1: 1, 2: 2})
self.assertEqual(0, len(cache))
with self.assertRaises(KeyError):
cache.popitem()
def test_popitem_exception_context(self):
# since Python 3.7, MutableMapping.popitem() suppresses
# exception context as implementation detail
exception = None
try:
self.Cache(maxsize=2).popitem()
except Exception as e:
exception = e
self.assertIsNone(exception.__cause__)
self.assertTrue(exception.__suppress_context__)
def test_missing(self):
class DefaultCache(self.Cache):
def __missing__(self, key):
self[key] = key
return key
cache = DefaultCache(maxsize=2)
self.assertEqual(0, cache.currsize)
self.assertEqual(2, cache.maxsize)
self.assertEqual(0, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
self.assertEqual(2, len(cache))
self.assertTrue(1 in cache and 2 in cache)
self.assertEqual(3, cache[3])
self.assertEqual(2, len(cache))
self.assertTrue(3 in cache)
self.assertTrue(1 in cache or 2 in cache)
self.assertTrue(1 not in cache or 2 not in cache)
self.assertEqual(4, cache[4])
self.assertEqual(2, len(cache))
self.assertTrue(4 in cache)
self.assertTrue(1 in cache or 2 in cache or 3 in cache)
# verify __missing__() is *not* called for any operations
# besides __getitem__()
self.assertEqual(4, cache.get(4))
self.assertEqual(None, cache.get(5))
self.assertEqual(5 * 5, cache.get(5, 5 * 5))
self.assertEqual(2, len(cache))
self.assertEqual(4, cache.pop(4))
with self.assertRaises(KeyError):
cache.pop(5)
self.assertEqual(None, cache.pop(5, None))
self.assertEqual(5 * 5, cache.pop(5, 5 * 5))
self.assertEqual(1, len(cache))
cache.clear()
cache[1] = 1 + 1
self.assertEqual(1 + 1, cache.setdefault(1))
self.assertEqual(1 + 1, cache.setdefault(1, 1))
self.assertEqual(1 + 1, cache[1])
self.assertEqual(2 + 2, cache.setdefault(2, 2 + 2))
self.assertEqual(2 + 2, cache.setdefault(2, None))
self.assertEqual(2 + 2, cache.setdefault(2))
self.assertEqual(2 + 2, cache[2])
self.assertEqual(2, len(cache))
self.assertTrue(1 in cache and 2 in cache)
self.assertEqual(None, cache.setdefault(3))
self.assertEqual(2, len(cache))
self.assertTrue(3 in cache)
self.assertTrue(1 in cache or 2 in cache)
self.assertTrue(1 not in cache or 2 not in cache)
def test_missing_getsizeof(self):
class DefaultCache(self.Cache):
def __missing__(self, key):
try:
self[key] = key
except ValueError:
pass # not stored
return key
cache = DefaultCache(maxsize=2, getsizeof=lambda x: x)
self.assertEqual(0, cache.currsize)
self.assertEqual(2, cache.maxsize)
self.assertEqual(1, cache[1])
self.assertEqual(1, len(cache))
self.assertEqual(1, cache.currsize)
self.assertIn(1, cache)
self.assertEqual(2, cache[2])
self.assertEqual(1, len(cache))
self.assertEqual(2, cache.currsize)
self.assertNotIn(1, cache)
self.assertIn(2, cache)
self.assertEqual(3, cache[3]) # not stored
self.assertEqual(1, len(cache))
self.assertEqual(2, cache.currsize)
self.assertEqual(1, cache[1])
self.assertEqual(1, len(cache))
self.assertEqual(1, cache.currsize)
self.assertEqual((1, 1), cache.popitem())
def _test_getsizeof(self, cache):
self.assertEqual(0, cache.currsize)
self.assertEqual(3, cache.maxsize)
self.assertEqual(1, cache.getsizeof(1))
self.assertEqual(2, cache.getsizeof(2))
self.assertEqual(3, cache.getsizeof(3))
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(3, cache.currsize)
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache[1] = 2
self.assertEqual(1, len(cache))
self.assertEqual(2, cache.currsize)
self.assertEqual(2, cache[1])
self.assertNotIn(2, cache)
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(3, cache.currsize)
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache[3] = 3
self.assertEqual(1, len(cache))
self.assertEqual(3, cache.currsize)
self.assertEqual(3, cache[3])
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
with self.assertRaises(ValueError):
cache[3] = 4
self.assertEqual(1, len(cache))
self.assertEqual(3, cache.currsize)
self.assertEqual(3, cache[3])
with self.assertRaises(ValueError):
cache[4] = 4
self.assertEqual(1, len(cache))
self.assertEqual(3, cache.currsize)
self.assertEqual(3, cache[3])
def test_getsizeof_param(self):
self._test_getsizeof(self.Cache(maxsize=3, getsizeof=lambda x: x))
def test_getsizeof_subclass(self):
class Cache(self.Cache):
def getsizeof(self, value):
return value
self._test_getsizeof(Cache(maxsize=3))
def test_pickle(self):
import pickle
source = self.Cache(maxsize=2)
source.update({1: 1, 2: 2})
cache = pickle.loads(pickle.dumps(source))
self.assertEqual(source, cache)
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache[3] = 3
self.assertEqual(2, len(cache))
self.assertEqual(3, cache[3])
self.assertTrue(1 in cache or 2 in cache)
cache[4] = 4
self.assertEqual(2, len(cache))
self.assertEqual(4, cache[4])
self.assertTrue(1 in cache or 2 in cache or 3 in cache)
self.assertEqual(cache, pickle.loads(pickle.dumps(cache)))
def test_pickle_maxsize(self):
import pickle
import sys
# test empty cache, single element, large cache (recursion limit)
for n in [0, 1, sys.getrecursionlimit() * 2]:
source = self.Cache(maxsize=n)
source.update((i, i) for i in range(n))
cache = pickle.loads(pickle.dumps(source))
self.assertEqual(n, len(cache))
self.assertEqual(source, cache)
| 32.158416 | 74 | 0.589491 | import unittest
class CacheTestMixin:
Cache = None
def test_defaults(self):
cache = self.Cache(maxsize=1)
self.assertEqual(0, len(cache))
self.assertEqual(1, cache.maxsize)
self.assertEqual(0, cache.currsize)
self.assertEqual(1, cache.getsizeof(None))
self.assertEqual(1, cache.getsizeof(""))
self.assertEqual(1, cache.getsizeof(0))
self.assertTrue(repr(cache).startswith(cache.__class__.__name__))
def test_insert(self):
cache = self.Cache(maxsize=2)
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache[3] = 3
self.assertEqual(2, len(cache))
self.assertEqual(3, cache[3])
self.assertTrue(1 in cache or 2 in cache)
cache[4] = 4
self.assertEqual(2, len(cache))
self.assertEqual(4, cache[4])
self.assertTrue(1 in cache or 2 in cache or 3 in cache)
def test_update(self):
cache = self.Cache(maxsize=2)
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache.update({1: "a", 2: "b"})
self.assertEqual(2, len(cache))
self.assertEqual("a", cache[1])
self.assertEqual("b", cache[2])
def test_delete(self):
cache = self.Cache(maxsize=2)
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
del cache[2]
self.assertEqual(1, len(cache))
self.assertEqual(1, cache[1])
self.assertNotIn(2, cache)
del cache[1]
self.assertEqual(0, len(cache))
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
with self.assertRaises(KeyError):
del cache[1]
self.assertEqual(0, len(cache))
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
def test_pop(self):
cache = self.Cache(maxsize=2)
cache.update({1: 1, 2: 2})
self.assertEqual(2, cache.pop(2))
self.assertEqual(1, len(cache))
self.assertEqual(1, cache.pop(1))
self.assertEqual(0, len(cache))
with self.assertRaises(KeyError):
cache.pop(2)
with self.assertRaises(KeyError):
cache.pop(1)
with self.assertRaises(KeyError):
cache.pop(0)
self.assertEqual(None, cache.pop(2, None))
self.assertEqual(None, cache.pop(1, None))
self.assertEqual(None, cache.pop(0, None))
def test_popitem(self):
cache = self.Cache(maxsize=2)
cache.update({1: 1, 2: 2})
self.assertIn(cache.pop(1), {1: 1, 2: 2})
self.assertEqual(1, len(cache))
self.assertIn(cache.pop(2), {1: 1, 2: 2})
self.assertEqual(0, len(cache))
with self.assertRaises(KeyError):
cache.popitem()
def test_popitem_exception_context(self):
exception = None
try:
self.Cache(maxsize=2).popitem()
except Exception as e:
exception = e
self.assertIsNone(exception.__cause__)
self.assertTrue(exception.__suppress_context__)
def test_missing(self):
class DefaultCache(self.Cache):
def __missing__(self, key):
self[key] = key
return key
cache = DefaultCache(maxsize=2)
self.assertEqual(0, cache.currsize)
self.assertEqual(2, cache.maxsize)
self.assertEqual(0, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
self.assertEqual(2, len(cache))
self.assertTrue(1 in cache and 2 in cache)
self.assertEqual(3, cache[3])
self.assertEqual(2, len(cache))
self.assertTrue(3 in cache)
self.assertTrue(1 in cache or 2 in cache)
self.assertTrue(1 not in cache or 2 not in cache)
self.assertEqual(4, cache[4])
self.assertEqual(2, len(cache))
self.assertTrue(4 in cache)
self.assertTrue(1 in cache or 2 in cache or 3 in cache)
self.assertEqual(4, cache.get(4))
self.assertEqual(None, cache.get(5))
self.assertEqual(5 * 5, cache.get(5, 5 * 5))
self.assertEqual(2, len(cache))
self.assertEqual(4, cache.pop(4))
with self.assertRaises(KeyError):
cache.pop(5)
self.assertEqual(None, cache.pop(5, None))
self.assertEqual(5 * 5, cache.pop(5, 5 * 5))
self.assertEqual(1, len(cache))
cache.clear()
cache[1] = 1 + 1
self.assertEqual(1 + 1, cache.setdefault(1))
self.assertEqual(1 + 1, cache.setdefault(1, 1))
self.assertEqual(1 + 1, cache[1])
self.assertEqual(2 + 2, cache.setdefault(2, 2 + 2))
self.assertEqual(2 + 2, cache.setdefault(2, None))
self.assertEqual(2 + 2, cache.setdefault(2))
self.assertEqual(2 + 2, cache[2])
self.assertEqual(2, len(cache))
self.assertTrue(1 in cache and 2 in cache)
self.assertEqual(None, cache.setdefault(3))
self.assertEqual(2, len(cache))
self.assertTrue(3 in cache)
self.assertTrue(1 in cache or 2 in cache)
self.assertTrue(1 not in cache or 2 not in cache)
def test_missing_getsizeof(self):
class DefaultCache(self.Cache):
def __missing__(self, key):
try:
self[key] = key
except ValueError:
pass
return key
cache = DefaultCache(maxsize=2, getsizeof=lambda x: x)
self.assertEqual(0, cache.currsize)
self.assertEqual(2, cache.maxsize)
self.assertEqual(1, cache[1])
self.assertEqual(1, len(cache))
self.assertEqual(1, cache.currsize)
self.assertIn(1, cache)
self.assertEqual(2, cache[2])
self.assertEqual(1, len(cache))
self.assertEqual(2, cache.currsize)
self.assertNotIn(1, cache)
self.assertIn(2, cache)
self.assertEqual(3, cache[3])
self.assertEqual(1, len(cache))
self.assertEqual(2, cache.currsize)
self.assertEqual(1, cache[1])
self.assertEqual(1, len(cache))
self.assertEqual(1, cache.currsize)
self.assertEqual((1, 1), cache.popitem())
def _test_getsizeof(self, cache):
self.assertEqual(0, cache.currsize)
self.assertEqual(3, cache.maxsize)
self.assertEqual(1, cache.getsizeof(1))
self.assertEqual(2, cache.getsizeof(2))
self.assertEqual(3, cache.getsizeof(3))
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(3, cache.currsize)
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache[1] = 2
self.assertEqual(1, len(cache))
self.assertEqual(2, cache.currsize)
self.assertEqual(2, cache[1])
self.assertNotIn(2, cache)
cache.update({1: 1, 2: 2})
self.assertEqual(2, len(cache))
self.assertEqual(3, cache.currsize)
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache[3] = 3
self.assertEqual(1, len(cache))
self.assertEqual(3, cache.currsize)
self.assertEqual(3, cache[3])
self.assertNotIn(1, cache)
self.assertNotIn(2, cache)
with self.assertRaises(ValueError):
cache[3] = 4
self.assertEqual(1, len(cache))
self.assertEqual(3, cache.currsize)
self.assertEqual(3, cache[3])
with self.assertRaises(ValueError):
cache[4] = 4
self.assertEqual(1, len(cache))
self.assertEqual(3, cache.currsize)
self.assertEqual(3, cache[3])
def test_getsizeof_param(self):
self._test_getsizeof(self.Cache(maxsize=3, getsizeof=lambda x: x))
def test_getsizeof_subclass(self):
class Cache(self.Cache):
def getsizeof(self, value):
return value
self._test_getsizeof(Cache(maxsize=3))
def test_pickle(self):
import pickle
source = self.Cache(maxsize=2)
source.update({1: 1, 2: 2})
cache = pickle.loads(pickle.dumps(source))
self.assertEqual(source, cache)
self.assertEqual(2, len(cache))
self.assertEqual(1, cache[1])
self.assertEqual(2, cache[2])
cache[3] = 3
self.assertEqual(2, len(cache))
self.assertEqual(3, cache[3])
self.assertTrue(1 in cache or 2 in cache)
cache[4] = 4
self.assertEqual(2, len(cache))
self.assertEqual(4, cache[4])
self.assertTrue(1 in cache or 2 in cache or 3 in cache)
self.assertEqual(cache, pickle.loads(pickle.dumps(cache)))
def test_pickle_maxsize(self):
import pickle
import sys
for n in [0, 1, sys.getrecursionlimit() * 2]:
source = self.Cache(maxsize=n)
source.update((i, i) for i in range(n))
cache = pickle.loads(pickle.dumps(source))
self.assertEqual(n, len(cache))
self.assertEqual(source, cache)
| true | true |
f735177289b7dc8e9e0f4ca915f735688faed056 | 2,039 | py | Python | cpp/src/experiments/generate_csv.py | chathurawidanage/cylon | ac61b7a50880138fe67de21adee208016a94979a | [
"Apache-2.0"
] | null | null | null | cpp/src/experiments/generate_csv.py | chathurawidanage/cylon | ac61b7a50880138fe67de21adee208016a94979a | [
"Apache-2.0"
] | null | null | null | cpp/src/experiments/generate_csv.py | chathurawidanage/cylon | ac61b7a50880138fe67de21adee208016a94979a | [
"Apache-2.0"
] | null | null | null | ##
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
import numpy as np
import pandas as pd
import argparse
parser = argparse.ArgumentParser(description='generate random data')
parser.add_argument('-o', dest='output', type=str, help='output file', default='/tmp/csv.csv')
parser.add_argument('-r', dest='rows', type=int, help='number of rows', default=10)
parser.add_argument('-c', dest='cols', type=int, help='number of cols', default=4)
parser.add_argument('-k', dest='idx_cols', type=int, nargs='+', help='index columns', default=[0])
parser.add_argument('--krange', nargs=2, type=int, help='key range', default=(0, 10))
parser.add_argument('--vrange', nargs=2, type=float, help='val range', default=(0., 1.))
parser.add_argument('--no_header', action='store_true', help='exclude header')
def generate_file(output='/tmp/csv.csv', rows=10, cols=4, idx_cols=None, vrange=(0., 1.),
krange=(0, 10), no_header=False):
if idx_cols is None:
idx_cols = [0]
df = pd.DataFrame(np.random.rand(rows, cols) * (vrange[1] - vrange[0]) + vrange[0],
columns=list(range(cols)))
for i in idx_cols:
assert cols > i >= 0
df[i] = df[i].map(lambda x: int(
krange[0] + (x - vrange[0]) * (krange[1] - krange[0]) / (vrange[1] - vrange[0])))
df.to_csv(output, header=not no_header, index=False, float_format='%.3f')
if __name__ == "__main__":
args = parser.parse_args()
args = vars(args)
print("generate csv :", args, flush=True)
generate_file(**args)
| 39.980392 | 98 | 0.672388 |
import numpy as np
import pandas as pd
import argparse
parser = argparse.ArgumentParser(description='generate random data')
parser.add_argument('-o', dest='output', type=str, help='output file', default='/tmp/csv.csv')
parser.add_argument('-r', dest='rows', type=int, help='number of rows', default=10)
parser.add_argument('-c', dest='cols', type=int, help='number of cols', default=4)
parser.add_argument('-k', dest='idx_cols', type=int, nargs='+', help='index columns', default=[0])
parser.add_argument('--krange', nargs=2, type=int, help='key range', default=(0, 10))
parser.add_argument('--vrange', nargs=2, type=float, help='val range', default=(0., 1.))
parser.add_argument('--no_header', action='store_true', help='exclude header')
def generate_file(output='/tmp/csv.csv', rows=10, cols=4, idx_cols=None, vrange=(0., 1.),
krange=(0, 10), no_header=False):
if idx_cols is None:
idx_cols = [0]
df = pd.DataFrame(np.random.rand(rows, cols) * (vrange[1] - vrange[0]) + vrange[0],
columns=list(range(cols)))
for i in idx_cols:
assert cols > i >= 0
df[i] = df[i].map(lambda x: int(
krange[0] + (x - vrange[0]) * (krange[1] - krange[0]) / (vrange[1] - vrange[0])))
df.to_csv(output, header=not no_header, index=False, float_format='%.3f')
if __name__ == "__main__":
args = parser.parse_args()
args = vars(args)
print("generate csv :", args, flush=True)
generate_file(**args)
| true | true |
f73517992385749a8a4ce2a60897e50b2bd2fdc3 | 4,189 | py | Python | bibclean/utils/doi_tools.py | Svdvoort/BibClean | 0d891d0dc0d0b335afdf3a09f4df6103d1e96215 | [
"MIT"
] | null | null | null | bibclean/utils/doi_tools.py | Svdvoort/BibClean | 0d891d0dc0d0b335afdf3a09f4df6103d1e96215 | [
"MIT"
] | 218 | 2020-11-20T08:20:01.000Z | 2022-03-28T19:21:18.000Z | bibclean/utils/doi_tools.py | Svdvoort/BibClean | 0d891d0dc0d0b335afdf3a09f4df6103d1e96215 | [
"MIT"
] | null | null | null | import bibclean.utils.cleaning as cleaner
import bibclean.utils.formatting as formatter
import bibclean.config.constants as constants
from bibtexparser.customization import splitname
from Levenshtein import distance as levenshtein_distance
import requests
from crossref.restful import Works, Etiquette
import bibclean.crossref_tools.parser as cr_parser
import bibclean.bib_tools.parser as bib_parser
from bibclean.updating.general import update_field
def crossref_is_similar(cr_info, bib_info, max_levenshtein_distance):
is_similar = False
if cr_parser.has_title(cr_info):
entry_title = bib_parser.get_title(bib_info)
entry_title = cleaner.clean_braces(entry_title)
crossref_title = cr_parser.get_title(cr_info)
lev_distance = levenshtein_distance(crossref_title, entry_title)
if lev_distance <= max_levenshtein_distance:
is_similar = True
return is_similar
def set_doi(entry, doi, update_URL):
doi = cleaner.clean_doi(doi)
entry = update_field(entry, "doi", doi)
if update_URL:
new_url = formatter.format_doi_url(doi)
entry = update_field(entry, "url", new_url)
return entry
def get_doi(entry, config):
has_doi = bib_parser.has_doi(entry)
my_etiquette = Etiquette(
constants.PROJECT_NAME, constants.VERSION, constants.URL, constants.EMAIL
)
max_levenshtein_distance = config.get_max_levenshtein_distance()
update_URL = config.get_update_URL()
works = Works(etiquette=my_etiquette)
if not has_doi and bib_parser.has_url(entry):
entry_url = bib_parser.get_url(entry)
if "doi" in entry_url:
doi = cleaner.clean_doi(entry_url)
if is_crossref_work(doi):
crossref_info = works.doi(doi)
if crossref_is_similar(crossref_info, entry, max_levenshtein_distance):
entry = set_doi(entry, doi, update_URL)
has_doi = True
if not has_doi:
# we try to find the doi for the title
entry_title = bib_parser.get_title(entry)
entry_title = cleaner.clean_braces(entry_title)
author = bib_parser.get_author(entry)
first_author = splitname(author[0], strict_mode=False)
first_author_last_name = first_author["last"][0]
query_parameters = {"author": first_author_last_name, "bibliographic": entry_title}
works_query = works.query(**query_parameters)
works_query = works_query.sort("score").order("desc").select(["title", "DOI"])
i_i_item = 0
max_items = min(works_query.count(), 10)
works_results = iter(works_query)
while i_i_item < max_items and not has_doi:
i_item = next(works_results)
if crossref_is_similar(i_item, entry, max_levenshtein_distance):
doi = cr_parser.get_doi(i_item)
entry = set_doi(entry, doi, update_URL)
has_doi = True
i_i_item += 1
else:
# We check to see if the doi is correct
doi = bib_parser.get_doi(entry)
doi = cleaner.clean_doi(doi)
if is_crossref_work(doi):
crossref_info = works.doi(doi)
if crossref_is_similar(crossref_info, entry, max_levenshtein_distance):
entry = set_doi(entry, doi, update_URL)
else:
entry.pop("doi", None)
if "doi" in bib_parser.get_url(entry):
entry.pop("url", None)
has_doi = False
else:
entry = set_doi(entry, doi, update_URL)
return entry, has_doi
def is_crossref_work(doi):
my_etiquette = Etiquette(
constants.PROJECT_NAME, constants.VERSION, constants.URL, constants.EMAIL
)
return Works(etiquette=my_etiquette).doi_exists(doi)
def get_doi_bib(doi):
"""
Return a bibTeX string of metadata for a given DOI.
From: https://gist.github.com/jrsmith3/5513926
"""
url = constants.DOI_URL + doi
headers = {"accept": "application/x-bibtex"}
r = requests.get(url, headers=headers)
if r.status_code == 200:
bib = r.text
else:
bib = None
return bib
| 34.056911 | 91 | 0.664598 | import bibclean.utils.cleaning as cleaner
import bibclean.utils.formatting as formatter
import bibclean.config.constants as constants
from bibtexparser.customization import splitname
from Levenshtein import distance as levenshtein_distance
import requests
from crossref.restful import Works, Etiquette
import bibclean.crossref_tools.parser as cr_parser
import bibclean.bib_tools.parser as bib_parser
from bibclean.updating.general import update_field
def crossref_is_similar(cr_info, bib_info, max_levenshtein_distance):
is_similar = False
if cr_parser.has_title(cr_info):
entry_title = bib_parser.get_title(bib_info)
entry_title = cleaner.clean_braces(entry_title)
crossref_title = cr_parser.get_title(cr_info)
lev_distance = levenshtein_distance(crossref_title, entry_title)
if lev_distance <= max_levenshtein_distance:
is_similar = True
return is_similar
def set_doi(entry, doi, update_URL):
doi = cleaner.clean_doi(doi)
entry = update_field(entry, "doi", doi)
if update_URL:
new_url = formatter.format_doi_url(doi)
entry = update_field(entry, "url", new_url)
return entry
def get_doi(entry, config):
has_doi = bib_parser.has_doi(entry)
my_etiquette = Etiquette(
constants.PROJECT_NAME, constants.VERSION, constants.URL, constants.EMAIL
)
max_levenshtein_distance = config.get_max_levenshtein_distance()
update_URL = config.get_update_URL()
works = Works(etiquette=my_etiquette)
if not has_doi and bib_parser.has_url(entry):
entry_url = bib_parser.get_url(entry)
if "doi" in entry_url:
doi = cleaner.clean_doi(entry_url)
if is_crossref_work(doi):
crossref_info = works.doi(doi)
if crossref_is_similar(crossref_info, entry, max_levenshtein_distance):
entry = set_doi(entry, doi, update_URL)
has_doi = True
if not has_doi:
entry_title = bib_parser.get_title(entry)
entry_title = cleaner.clean_braces(entry_title)
author = bib_parser.get_author(entry)
first_author = splitname(author[0], strict_mode=False)
first_author_last_name = first_author["last"][0]
query_parameters = {"author": first_author_last_name, "bibliographic": entry_title}
works_query = works.query(**query_parameters)
works_query = works_query.sort("score").order("desc").select(["title", "DOI"])
i_i_item = 0
max_items = min(works_query.count(), 10)
works_results = iter(works_query)
while i_i_item < max_items and not has_doi:
i_item = next(works_results)
if crossref_is_similar(i_item, entry, max_levenshtein_distance):
doi = cr_parser.get_doi(i_item)
entry = set_doi(entry, doi, update_URL)
has_doi = True
i_i_item += 1
else:
doi = bib_parser.get_doi(entry)
doi = cleaner.clean_doi(doi)
if is_crossref_work(doi):
crossref_info = works.doi(doi)
if crossref_is_similar(crossref_info, entry, max_levenshtein_distance):
entry = set_doi(entry, doi, update_URL)
else:
entry.pop("doi", None)
if "doi" in bib_parser.get_url(entry):
entry.pop("url", None)
has_doi = False
else:
entry = set_doi(entry, doi, update_URL)
return entry, has_doi
def is_crossref_work(doi):
my_etiquette = Etiquette(
constants.PROJECT_NAME, constants.VERSION, constants.URL, constants.EMAIL
)
return Works(etiquette=my_etiquette).doi_exists(doi)
def get_doi_bib(doi):
url = constants.DOI_URL + doi
headers = {"accept": "application/x-bibtex"}
r = requests.get(url, headers=headers)
if r.status_code == 200:
bib = r.text
else:
bib = None
return bib
| true | true |
f735181a126708262e7d44866f853497400b4e62 | 5,248 | py | Python | reid/utils/evaluation_metrics/retrieval.py | ZoRoronoa/Camera-Aware-Proxy | 352f900bbae330f18c2bfe2b3f2516fb4e31adea | [
"Apache-2.0"
] | 37 | 2021-02-05T11:49:17.000Z | 2022-03-13T15:42:40.000Z | reid/utils/evaluation_metrics/retrieval.py | ZoRoronoa/Camera-Aware-Proxy | 352f900bbae330f18c2bfe2b3f2516fb4e31adea | [
"Apache-2.0"
] | 7 | 2021-03-30T01:33:40.000Z | 2022-03-24T07:54:33.000Z | reid/utils/evaluation_metrics/retrieval.py | ZoRoronoa/Camera-Aware-Proxy | 352f900bbae330f18c2bfe2b3f2516fb4e31adea | [
"Apache-2.0"
] | 9 | 2021-03-06T02:43:55.000Z | 2022-03-26T07:32:19.000Z | import numpy as np
from sklearn import metrics as sk_metrics
import torch
class PersonReIDMAP:
'''
Compute Rank@k and mean Average Precision (mAP) scores
Used for Person ReID
Test on MarKet and Duke
'''
def __init__(self, query_feature, query_cam, query_label, gallery_feature, gallery_cam, gallery_label, dist):
'''
:param query_feature: np.array, bs * feature_dim
:param query_cam: np.array, 1d
:param query_label: np.array, 1d
:param gallery_feature: np.array, gallery_size * feature_dim
:param gallery_cam: np.array, 1d
:param gallery_label: np.array, 1d
'''
self.query_feature = query_feature
self.query_cam = query_cam
self.query_label = query_label
self.gallery_feature = gallery_feature
self.gallery_cam = gallery_cam
self.gallery_label = gallery_label
assert dist in ['cosine', 'euclidean']
self.dist = dist
# normalize feature for fast cosine computation
if self.dist == 'cosine':
self.query_feature = self.normalize(self.query_feature)
self.gallery_feature = self.normalize(self.gallery_feature)
APs = []
CMC = []
for i in range(len(query_label)):
AP, cmc = self.evaluate(self.query_feature[i], self.query_cam[i], self.query_label[i],
self.gallery_feature, self.gallery_cam, self.gallery_label)
APs.append(AP)
CMC.append(cmc)
# print('{}/{}'.format(i, len(query_label)))
self.APs = np.array(APs)
self.mAP = np.mean(self.APs)
min_len = 99999999
for cmc in CMC:
if len(cmc) < min_len:
min_len = len(cmc)
for i, cmc in enumerate(CMC):
CMC[i] = cmc[0: min_len]
self.CMC = np.mean(np.array(CMC), axis=0)
def compute_AP(self, index, good_index):
'''
:param index: np.array, 1d
:param good_index: np.array, 1d
:return:
'''
num_good = len(good_index)
hit = np.in1d(index, good_index)
index_hit = np.argwhere(hit == True).flatten()
if len(index_hit) == 0:
AP = 0
cmc = np.zeros([len(index)])
else:
precision = []
for i in range(num_good):
precision.append(float(i+1) / float((index_hit[i]+1)))
AP = np.mean(np.array(precision))
cmc = np.zeros([len(index)])
cmc[index_hit[0]: ] = 1
return AP, cmc
def evaluate(self, query_feature, query_cam, query_label, gallery_feature, gallery_cam, gallery_label, rerank=False):
'''
:param query_feature: np.array, 1d
:param query_cam: int
:param query_label: int
:param gallery_feature: np.array, 2d, gallerys_size * feature_dim
:param gallery_cam: np.array, 1d
:param gallery_label: np.array, 1d
:return:
'''
# cosine score
if self.dist is 'cosine':
# feature has been normalize during intialization
score = np.matmul(query_feature, gallery_feature.transpose())
index = np.argsort(score)[::-1]
elif self.dist is 'euclidean':
#score = self.l2(query_feature.reshape([1, -1]), gallery_feature)
#print('query_feature shape= {}, gallery_feature shape= {}'.format(query_feature.shape, gallery_feature.shape))
score = self.l2(query_feature.reshape([1,-1]), gallery_feature)
index = np.argsort(score.reshape([-1]))
junk_index_1 = self.in1d(np.argwhere(query_label == gallery_label), np.argwhere(query_cam == gallery_cam))
junk_index_2 = np.argwhere(gallery_label == -1)
junk_index = np.append(junk_index_1, junk_index_2)
good_index = self.in1d(np.argwhere(query_label == gallery_label), np.argwhere(query_cam != gallery_cam))
index_wo_junk = self.notin1d(index, junk_index)
return self.compute_AP(index_wo_junk, good_index)
def in1d(self, array1, array2, invert=False):
'''
:param set1: np.array, 1d
:param set2: np.array, 1d
:return:
'''
mask = np.in1d(array1, array2, invert=invert)
return array1[mask]
def notin1d(self, array1, array2):
return self.in1d(array1, array2, invert=True)
def normalize(self, x):
norm = np.tile(np.sqrt(np.sum(np.square(x), axis=1, keepdims=True)), [1, x.shape[1]])
return x / norm
def cosine_dist(self, x, y):
return sk_metrics.pairwise.cosine_distances(x, y)
def euclidean_dist(self, x, y):
return sk_metrics.pairwise.euclidean_distances(x, y)
def l2(self, x, y):
x = torch.from_numpy(x)
y = torch.from_numpy(y)
m, n = x.size(0), y.size(0)
x = x.view(m, -1)
y = y.view(n, -1)
dist = torch.pow(x, 2).sum(dim=1, keepdim=True).expand(m, n) + \
torch.pow(y, 2).sum(dim=1, keepdim=True).expand(n, m).t()
dist.addmm_(1, -2, x, y.t())
# We use clamp to keep numerical stability
dist = torch.clamp(dist, 1e-8, np.inf)
return dist.numpy()
| 34.754967 | 123 | 0.59032 | import numpy as np
from sklearn import metrics as sk_metrics
import torch
class PersonReIDMAP:
def __init__(self, query_feature, query_cam, query_label, gallery_feature, gallery_cam, gallery_label, dist):
self.query_feature = query_feature
self.query_cam = query_cam
self.query_label = query_label
self.gallery_feature = gallery_feature
self.gallery_cam = gallery_cam
self.gallery_label = gallery_label
assert dist in ['cosine', 'euclidean']
self.dist = dist
if self.dist == 'cosine':
self.query_feature = self.normalize(self.query_feature)
self.gallery_feature = self.normalize(self.gallery_feature)
APs = []
CMC = []
for i in range(len(query_label)):
AP, cmc = self.evaluate(self.query_feature[i], self.query_cam[i], self.query_label[i],
self.gallery_feature, self.gallery_cam, self.gallery_label)
APs.append(AP)
CMC.append(cmc)
self.APs = np.array(APs)
self.mAP = np.mean(self.APs)
min_len = 99999999
for cmc in CMC:
if len(cmc) < min_len:
min_len = len(cmc)
for i, cmc in enumerate(CMC):
CMC[i] = cmc[0: min_len]
self.CMC = np.mean(np.array(CMC), axis=0)
def compute_AP(self, index, good_index):
num_good = len(good_index)
hit = np.in1d(index, good_index)
index_hit = np.argwhere(hit == True).flatten()
if len(index_hit) == 0:
AP = 0
cmc = np.zeros([len(index)])
else:
precision = []
for i in range(num_good):
precision.append(float(i+1) / float((index_hit[i]+1)))
AP = np.mean(np.array(precision))
cmc = np.zeros([len(index)])
cmc[index_hit[0]: ] = 1
return AP, cmc
def evaluate(self, query_feature, query_cam, query_label, gallery_feature, gallery_cam, gallery_label, rerank=False):
if self.dist is 'cosine':
score = np.matmul(query_feature, gallery_feature.transpose())
index = np.argsort(score)[::-1]
elif self.dist is 'euclidean':
score = self.l2(query_feature.reshape([1,-1]), gallery_feature)
index = np.argsort(score.reshape([-1]))
junk_index_1 = self.in1d(np.argwhere(query_label == gallery_label), np.argwhere(query_cam == gallery_cam))
junk_index_2 = np.argwhere(gallery_label == -1)
junk_index = np.append(junk_index_1, junk_index_2)
good_index = self.in1d(np.argwhere(query_label == gallery_label), np.argwhere(query_cam != gallery_cam))
index_wo_junk = self.notin1d(index, junk_index)
return self.compute_AP(index_wo_junk, good_index)
def in1d(self, array1, array2, invert=False):
mask = np.in1d(array1, array2, invert=invert)
return array1[mask]
def notin1d(self, array1, array2):
return self.in1d(array1, array2, invert=True)
def normalize(self, x):
norm = np.tile(np.sqrt(np.sum(np.square(x), axis=1, keepdims=True)), [1, x.shape[1]])
return x / norm
def cosine_dist(self, x, y):
return sk_metrics.pairwise.cosine_distances(x, y)
def euclidean_dist(self, x, y):
return sk_metrics.pairwise.euclidean_distances(x, y)
def l2(self, x, y):
x = torch.from_numpy(x)
y = torch.from_numpy(y)
m, n = x.size(0), y.size(0)
x = x.view(m, -1)
y = y.view(n, -1)
dist = torch.pow(x, 2).sum(dim=1, keepdim=True).expand(m, n) + \
torch.pow(y, 2).sum(dim=1, keepdim=True).expand(n, m).t()
dist.addmm_(1, -2, x, y.t())
dist = torch.clamp(dist, 1e-8, np.inf)
return dist.numpy()
| true | true |
f73518b051d1cc9646ebc5039c4ebb6aa6cbfa1f | 2,550 | py | Python | demo/voice/main.py | fatash89/atom | 12846c8a3f936ae6c83e7e7b1d2dbb896e63fe66 | [
"Apache-2.0"
] | 64 | 2019-04-01T20:32:07.000Z | 2021-11-24T17:12:03.000Z | demo/voice/main.py | elementary-robotics/atom | 36aea078c0e029f03e7b9b4768729a683fb32a88 | [
"Apache-2.0"
] | 291 | 2019-04-01T22:54:31.000Z | 2022-03-31T21:48:47.000Z | demo/voice/main.py | fatash89/atom | 12846c8a3f936ae6c83e7e7b1d2dbb896e63fe66 | [
"Apache-2.0"
] | 5 | 2019-06-27T22:42:54.000Z | 2022-02-01T23:00:37.000Z | # atombot.py
import time
from atom import Element
PUBLISH_FREQUENCY = 100
TIME_FOR_WAVEFORM = 5
if __name__ == "__main__":
element = Element("voice_demo")
# Wait for the record element to start up and launch the VNC.
# this can and should be fixed with a heartbeat!
time.sleep(10)
# Start the recording and wait for 5s
data = {
"name": "example",
"t": TIME_FOR_WAVEFORM,
"perm": False,
"e": "waveform",
"s": "serialized",
}
res = element.command_send("record", "start", data, serialize=True)
time.sleep(TIME_FOR_WAVEFORM + 2)
# Strings we'll recognize for the plotting commands. This is pretty
# rudimentary and can be improved with some better parsing/processing/NLP
sinx_strings = ["show sin", "show sign", "show sine"]
cosx_strings = [
"show cos",
"show cosine",
"show coast",
"show coats",
"show cosign",
]
tanx_strings = ["show tan", "showtime"]
print("listening..")
last_id = element._get_redis_timestamp()
while True:
entries = element.entry_read_since(
"voice", "string", last_id=last_id, block=1000
)
if entries:
last_id = entries[0]["id"]
voice_string = entries[0]["data"].decode().lower()
print("Got voice string {}".format(voice_string))
if any(x in voice_string for x in sinx_strings):
print("Plotting sinx")
data = {
"name": "example",
"msgpack": True,
"plots": [{"data": [["x", ["sin"], "value"]]}],
}
res = element.command_send("record", "plot", data, serialize=True)
if any(x in voice_string for x in cosx_strings):
print("Plotting cosx")
data = {
"name": "example",
"msgpack": True,
"plots": [{"data": [["x", ["cos"], "value"]]}],
}
res = element.command_send("record", "plot", data, serialize=True)
if any(x in voice_string for x in tanx_strings):
print("Plotting tanx")
data = {
"name": "example",
"msgpack": True,
"plots": [{"data": [["x", ["tan"], "value"]]}],
}
res = element.command_send("record", "plot", data, serialize=True)
time.sleep(1 / PUBLISH_FREQUENCY)
| 30.722892 | 82 | 0.512549 |
import time
from atom import Element
PUBLISH_FREQUENCY = 100
TIME_FOR_WAVEFORM = 5
if __name__ == "__main__":
element = Element("voice_demo")
time.sleep(10)
data = {
"name": "example",
"t": TIME_FOR_WAVEFORM,
"perm": False,
"e": "waveform",
"s": "serialized",
}
res = element.command_send("record", "start", data, serialize=True)
time.sleep(TIME_FOR_WAVEFORM + 2)
# rudimentary and can be improved with some better parsing/processing/NLP
sinx_strings = ["show sin", "show sign", "show sine"]
cosx_strings = [
"show cos",
"show cosine",
"show coast",
"show coats",
"show cosign",
]
tanx_strings = ["show tan", "showtime"]
print("listening..")
last_id = element._get_redis_timestamp()
while True:
entries = element.entry_read_since(
"voice", "string", last_id=last_id, block=1000
)
if entries:
last_id = entries[0]["id"]
voice_string = entries[0]["data"].decode().lower()
print("Got voice string {}".format(voice_string))
if any(x in voice_string for x in sinx_strings):
print("Plotting sinx")
data = {
"name": "example",
"msgpack": True,
"plots": [{"data": [["x", ["sin"], "value"]]}],
}
res = element.command_send("record", "plot", data, serialize=True)
if any(x in voice_string for x in cosx_strings):
print("Plotting cosx")
data = {
"name": "example",
"msgpack": True,
"plots": [{"data": [["x", ["cos"], "value"]]}],
}
res = element.command_send("record", "plot", data, serialize=True)
if any(x in voice_string for x in tanx_strings):
print("Plotting tanx")
data = {
"name": "example",
"msgpack": True,
"plots": [{"data": [["x", ["tan"], "value"]]}],
}
res = element.command_send("record", "plot", data, serialize=True)
time.sleep(1 / PUBLISH_FREQUENCY)
| true | true |
f73518eb606b9fa6212405aa5f35e08518d35b0a | 3,468 | py | Python | tests/test_geointerface.py | tfardet/Shapely | 462de3aa7a8bbd80408762a2d5aaf84b04476e4d | [
"BSD-3-Clause"
] | 189 | 2021-12-10T17:43:54.000Z | 2022-03-30T22:03:02.000Z | tests/test_geointerface.py | tfardet/Shapely | 462de3aa7a8bbd80408762a2d5aaf84b04476e4d | [
"BSD-3-Clause"
] | 133 | 2021-12-10T16:28:25.000Z | 2022-03-31T21:22:58.000Z | tests/test_geointerface.py | tfardet/Shapely | 462de3aa7a8bbd80408762a2d5aaf84b04476e4d | [
"BSD-3-Clause"
] | 19 | 2021-12-17T14:42:17.000Z | 2022-03-15T08:25:02.000Z | from . import unittest, shapely20_deprecated
import pytest
from shapely.geometry import shape
from shapely.geometry.multipoint import MultiPoint
from shapely.geometry.linestring import LineString
from shapely.geometry.multilinestring import MultiLineString
from shapely.geometry.polygon import LinearRing, Polygon
from shapely.geometry.multipolygon import MultiPolygon
from shapely import wkt
class GeoThing:
def __init__(self, d):
self.__geo_interface__ = d
class GeoInterfaceTestCase(unittest.TestCase):
def test_geointerface(self):
# Convert a dictionary
d = {"type": "Point", "coordinates": (0.0, 0.0)}
geom = shape(d)
self.assertEqual(geom.geom_type, 'Point')
self.assertEqual(tuple(geom.coords), ((0.0, 0.0),))
# Convert an object that implements the geo protocol
geom = None
thing = GeoThing({"type": "Point", "coordinates": (0.0, 0.0)})
geom = shape(thing)
self.assertEqual(geom.geom_type, 'Point')
self.assertEqual(tuple(geom.coords), ((0.0, 0.0),))
# Check line string
geom = shape(
{'type': 'LineString', 'coordinates': ((-1.0, -1.0), (1.0, 1.0))})
self.assertIsInstance(geom, LineString)
self.assertEqual(tuple(geom.coords), ((-1.0, -1.0), (1.0, 1.0)))
# Check linearring
geom = shape(
{'type': 'LinearRing',
'coordinates':
((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (2.0, -1.0), (0.0, 0.0))}
)
self.assertIsInstance(geom, LinearRing)
self.assertEqual(
tuple(geom.coords),
((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (2.0, -1.0), (0.0, 0.0)))
# polygon
geom = shape(
{'type': 'Polygon',
'coordinates':
(((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (2.0, -1.0), (0.0, 0.0)),
((0.1, 0.1), (0.1, 0.2), (0.2, 0.2), (0.2, 0.1), (0.1, 0.1)))}
)
self.assertIsInstance(geom, Polygon)
self.assertEqual(
tuple(geom.exterior.coords),
((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (2.0, -1.0), (0.0, 0.0)))
self.assertEqual(len(geom.interiors), 1)
# multi point
geom = shape({'type': 'MultiPoint',
'coordinates': ((1.0, 2.0), (3.0, 4.0))})
self.assertIsInstance(geom, MultiPoint)
self.assertEqual(len(geom.geoms), 2)
# multi line string
geom = shape({'type': 'MultiLineString',
'coordinates': (((0.0, 0.0), (1.0, 2.0)),)})
self.assertIsInstance(geom, MultiLineString)
self.assertEqual(len(geom.geoms), 1)
# multi polygon
geom = shape(
{'type': 'MultiPolygon',
'coordinates':
[(((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)),
((0.1, 0.1), (0.1, 0.2), (0.2, 0.2), (0.2, 0.1), (0.1, 0.1))
)]})
self.assertIsInstance(geom, MultiPolygon)
self.assertEqual(len(geom.geoms), 1)
def test_empty_wkt_polygon():
"""Confirm fix for issue #450"""
g = wkt.loads('POLYGON EMPTY')
assert g.__geo_interface__['type'] == 'Polygon'
assert g.__geo_interface__['coordinates'] == ()
def test_empty_polygon():
"""Confirm fix for issue #450"""
g = Polygon()
assert g.__geo_interface__['type'] == 'Polygon'
assert g.__geo_interface__['coordinates'] == ()
| 34.68 | 79 | 0.542388 | from . import unittest, shapely20_deprecated
import pytest
from shapely.geometry import shape
from shapely.geometry.multipoint import MultiPoint
from shapely.geometry.linestring import LineString
from shapely.geometry.multilinestring import MultiLineString
from shapely.geometry.polygon import LinearRing, Polygon
from shapely.geometry.multipolygon import MultiPolygon
from shapely import wkt
class GeoThing:
def __init__(self, d):
self.__geo_interface__ = d
class GeoInterfaceTestCase(unittest.TestCase):
def test_geointerface(self):
d = {"type": "Point", "coordinates": (0.0, 0.0)}
geom = shape(d)
self.assertEqual(geom.geom_type, 'Point')
self.assertEqual(tuple(geom.coords), ((0.0, 0.0),))
geom = None
thing = GeoThing({"type": "Point", "coordinates": (0.0, 0.0)})
geom = shape(thing)
self.assertEqual(geom.geom_type, 'Point')
self.assertEqual(tuple(geom.coords), ((0.0, 0.0),))
geom = shape(
{'type': 'LineString', 'coordinates': ((-1.0, -1.0), (1.0, 1.0))})
self.assertIsInstance(geom, LineString)
self.assertEqual(tuple(geom.coords), ((-1.0, -1.0), (1.0, 1.0)))
geom = shape(
{'type': 'LinearRing',
'coordinates':
((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (2.0, -1.0), (0.0, 0.0))}
)
self.assertIsInstance(geom, LinearRing)
self.assertEqual(
tuple(geom.coords),
((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (2.0, -1.0), (0.0, 0.0)))
geom = shape(
{'type': 'Polygon',
'coordinates':
(((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (2.0, -1.0), (0.0, 0.0)),
((0.1, 0.1), (0.1, 0.2), (0.2, 0.2), (0.2, 0.1), (0.1, 0.1)))}
)
self.assertIsInstance(geom, Polygon)
self.assertEqual(
tuple(geom.exterior.coords),
((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (2.0, -1.0), (0.0, 0.0)))
self.assertEqual(len(geom.interiors), 1)
geom = shape({'type': 'MultiPoint',
'coordinates': ((1.0, 2.0), (3.0, 4.0))})
self.assertIsInstance(geom, MultiPoint)
self.assertEqual(len(geom.geoms), 2)
geom = shape({'type': 'MultiLineString',
'coordinates': (((0.0, 0.0), (1.0, 2.0)),)})
self.assertIsInstance(geom, MultiLineString)
self.assertEqual(len(geom.geoms), 1)
geom = shape(
{'type': 'MultiPolygon',
'coordinates':
[(((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0), (0.0, 0.0)),
((0.1, 0.1), (0.1, 0.2), (0.2, 0.2), (0.2, 0.1), (0.1, 0.1))
)]})
self.assertIsInstance(geom, MultiPolygon)
self.assertEqual(len(geom.geoms), 1)
def test_empty_wkt_polygon():
g = wkt.loads('POLYGON EMPTY')
assert g.__geo_interface__['type'] == 'Polygon'
assert g.__geo_interface__['coordinates'] == ()
def test_empty_polygon():
g = Polygon()
assert g.__geo_interface__['type'] == 'Polygon'
assert g.__geo_interface__['coordinates'] == ()
| true | true |
f7351a8ea86926ad65c06a83f4fd64019da7bcb8 | 1,300 | py | Python | stackhpc_monasca_agent_plugins/detection/nvidia.py | stackhpc/monasca-agent-plugins | 55687c0337e060d67feb76497d943842f720efb2 | [
"Apache-2.0"
] | 2 | 2018-08-16T12:37:37.000Z | 2021-03-02T13:59:57.000Z | stackhpc_monasca_agent_plugins/detection/nvidia.py | stackhpc/monasca-agent-plugins | 55687c0337e060d67feb76497d943842f720efb2 | [
"Apache-2.0"
] | 9 | 2018-01-05T13:57:22.000Z | 2021-09-11T04:32:24.000Z | stackhpc_monasca_agent_plugins/detection/nvidia.py | stackhpc/monasca-agent-plugins | 55687c0337e060d67feb76497d943842f720efb2 | [
"Apache-2.0"
] | 3 | 2020-06-17T16:05:10.000Z | 2021-09-15T14:28:36.000Z | # Copyright (c) 2018 StackHPC Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import subprocess
import monasca_setup.agent_config
import monasca_setup.detection
LOG = logging.getLogger(__name__)
class NvidiaDetect(monasca_setup.detection.Plugin):
"""Detects and configures nVidia plugin."""
def _detect(self):
self.available = False
if 'nvidia' not in subprocess.check_output(
["lshw", "-C", "display"]).lower():
LOG.info('No nVidia hardware detected.')
return
self.available = True
def build_config(self):
config = monasca_setup.agent_config.Plugins()
config['nvidia'] = {
'init_config': None,
'instances': [{'name': 'nvidia_stats'}]}
return config
| 31.707317 | 75 | 0.686154 |
import logging
import subprocess
import monasca_setup.agent_config
import monasca_setup.detection
LOG = logging.getLogger(__name__)
class NvidiaDetect(monasca_setup.detection.Plugin):
def _detect(self):
self.available = False
if 'nvidia' not in subprocess.check_output(
["lshw", "-C", "display"]).lower():
LOG.info('No nVidia hardware detected.')
return
self.available = True
def build_config(self):
config = monasca_setup.agent_config.Plugins()
config['nvidia'] = {
'init_config': None,
'instances': [{'name': 'nvidia_stats'}]}
return config
| true | true |
f7351c8dea221062879ca07266ee91556e630dd2 | 26 | py | Python | __init__.py | nipunnmalhotra/nipunn_IQR | 24a08cf00a41cd938a7c9a31c781efc8ce9359ed | [
"MIT"
] | null | null | null | __init__.py | nipunnmalhotra/nipunn_IQR | 24a08cf00a41cd938a7c9a31c781efc8ce9359ed | [
"MIT"
] | null | null | null | __init__.py | nipunnmalhotra/nipunn_IQR | 24a08cf00a41cd938a7c9a31c781efc8ce9359ed | [
"MIT"
] | null | null | null | import nipunn_IQR.outliers | 26 | 26 | 0.923077 | import nipunn_IQR.outliers | true | true |
f7351cf30e51503e4f8f41dd8d9d62a6f16bb53b | 190 | py | Python | test.py | amysudarat/KaggleProject | 319b6644cb7c45674c0b2fc69ab23c317b64d644 | [
"MIT"
] | null | null | null | test.py | amysudarat/KaggleProject | 319b6644cb7c45674c0b2fc69ab23c317b64d644 | [
"MIT"
] | null | null | null | test.py | amysudarat/KaggleProject | 319b6644cb7c45674c0b2fc69ab23c317b64d644 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
print("Hello Mikel")
print("test")
print("working on amy branch")
print(" Hello this is mikel. SO i created a branche and i want to be sure i did the right thing") | 23.75 | 97 | 0.678947 |
print("Hello Mikel")
print("test")
print("working on amy branch")
print(" Hello this is mikel. SO i created a branche and i want to be sure i did the right thing") | true | true |
f7351ed173149466d0046eb2e9cd4948e1478441 | 3,936 | py | Python | src/rewriter/gen_reading_correction_data.py | dancerj/mozc | a5a4927c1f709d2ff0c681585c746f73a434e4c9 | [
"BSD-3-Clause"
] | null | null | null | src/rewriter/gen_reading_correction_data.py | dancerj/mozc | a5a4927c1f709d2ff0c681585c746f73a434e4c9 | [
"BSD-3-Clause"
] | 1 | 2021-06-30T14:59:51.000Z | 2021-06-30T15:31:56.000Z | src/rewriter/gen_reading_correction_data.py | dancerj/mozc | a5a4927c1f709d2ff0c681585c746f73a434e4c9 | [
"BSD-3-Clause"
] | 1 | 2022-03-25T09:01:39.000Z | 2022-03-25T09:01:39.000Z | # -*- coding: utf-8 -*-
# Copyright 2010-2020, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Converter of reading correction data from TSV to binary format.
Usage:
python gen_reading_correction_data.py
--input=input.tsv
--output_value_array=value_array.data
--output_error_array=error_array.data
--output_correction_array=correction_array.data
"""
__author__ = "komatsu"
import codecs
import optparse
from build_tools import code_generator_util
from build_tools import serialized_string_array_builder
def ParseOptions():
"""Parse command line options."""
parser = optparse.OptionParser()
parser.add_option('--input', dest='input', help='Input TSV file path.')
parser.add_option('--output_value_array', dest='output_value_array',
help='Output serialized string array for values.')
parser.add_option('--output_error_array', dest='output_error_array',
help='Output serialized string array for errors.')
parser.add_option('--output_correction_array', dest='output_correction_array',
help='Output serialized string array for corrections.')
return parser.parse_args()[0]
def WriteData(input_path, output_value_array_path, output_error_array_path,
output_correction_array_path):
outputs = []
with codecs.open(input_path, 'r', encoding='utf-8') as input_stream:
input_stream = code_generator_util.SkipLineComment(input_stream)
input_stream = code_generator_util.ParseColumnStream(input_stream,
num_column=3)
# ex. (value, error, correction) = ("雰囲気", "ふいんき", "ふんいき")
for value, error, correction in input_stream:
outputs.append([value, error, correction])
# In order to lookup the entries via |error| with binary search,
# sort outputs here.
outputs.sort(key=lambda x: (x[1], x[0]))
serialized_string_array_builder.SerializeToFile(
[value for (value, _, _) in outputs], output_value_array_path)
serialized_string_array_builder.SerializeToFile(
[error for (_, error, _) in outputs], output_error_array_path)
serialized_string_array_builder.SerializeToFile(
[correction for (_, _, correction) in outputs],
output_correction_array_path)
def main():
options = ParseOptions()
WriteData(options.input, options.output_value_array,
options.output_error_array, options.output_correction_array)
if __name__ == '__main__':
main()
| 41.431579 | 80 | 0.741362 |
__author__ = "komatsu"
import codecs
import optparse
from build_tools import code_generator_util
from build_tools import serialized_string_array_builder
def ParseOptions():
parser = optparse.OptionParser()
parser.add_option('--input', dest='input', help='Input TSV file path.')
parser.add_option('--output_value_array', dest='output_value_array',
help='Output serialized string array for values.')
parser.add_option('--output_error_array', dest='output_error_array',
help='Output serialized string array for errors.')
parser.add_option('--output_correction_array', dest='output_correction_array',
help='Output serialized string array for corrections.')
return parser.parse_args()[0]
def WriteData(input_path, output_value_array_path, output_error_array_path,
output_correction_array_path):
outputs = []
with codecs.open(input_path, 'r', encoding='utf-8') as input_stream:
input_stream = code_generator_util.SkipLineComment(input_stream)
input_stream = code_generator_util.ParseColumnStream(input_stream,
num_column=3)
for value, error, correction in input_stream:
outputs.append([value, error, correction])
outputs.sort(key=lambda x: (x[1], x[0]))
serialized_string_array_builder.SerializeToFile(
[value for (value, _, _) in outputs], output_value_array_path)
serialized_string_array_builder.SerializeToFile(
[error for (_, error, _) in outputs], output_error_array_path)
serialized_string_array_builder.SerializeToFile(
[correction for (_, _, correction) in outputs],
output_correction_array_path)
def main():
options = ParseOptions()
WriteData(options.input, options.output_value_array,
options.output_error_array, options.output_correction_array)
if __name__ == '__main__':
main()
| true | true |
f7351ed7ac47d418a33066370b5990a5269a73b7 | 7,904 | py | Python | install/gcp/upgrade_tools/db_migrator.py | mitsuo0114/forseti-security | a21dc6b7a7420a60f02c1a4bdfbab9e101291dd2 | [
"Apache-2.0"
] | null | null | null | install/gcp/upgrade_tools/db_migrator.py | mitsuo0114/forseti-security | a21dc6b7a7420a60f02c1a4bdfbab9e101291dd2 | [
"Apache-2.0"
] | null | null | null | install/gcp/upgrade_tools/db_migrator.py | mitsuo0114/forseti-security | a21dc6b7a7420a60f02c1a4bdfbab9e101291dd2 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Forseti db migrator."""
from __future__ import print_function
import sys
# Importing migrate.changeset adds some new methods to existing SQLAlchemy
# objects but we will not be calling the library directly.
import migrate.changeset # noqa: F401, pylint: disable=unused-import
from sqlalchemy.exc import OperationalError
import google.cloud.forseti.services.scanner.dao as scanner_dao
import google.cloud.forseti.services.inventory.storage as inventory_dao
import google.cloud.forseti.services.dao as general_dao
from google.cloud.forseti.common.util import logger
DEFAULT_DB_CONN_STR = 'mysql://root@127.0.0.1:3306/forseti_security'
LOGGER = logger.get_logger(__name__)
class ColumnAction(object):
"""Column action class."""
DROP = 'DROP'
CREATE = 'CREATE'
ALTER = 'ALTER'
def create_column(table, column):
"""Create Column.
Args:
table (sqlalchemy.schema.Table): The sql alchemy table object.
column (sqlalchemy.schema.Column): The sql alchemy column object.
"""
LOGGER.info('Attempting to create column: %s', column.name)
column.create(table, populate_default=True)
def alter_column(table, old_column, new_column):
"""Alter Column.
Args:
table (sqlalchemy.schema.Table): The sql alchemy table object.
old_column (sqlalchemy.schema.Column): The sql alchemy column object,
this is the column to be modified.
new_column (sqlalchemy.schema.Column): The sql alchemy column object,
this is the column to update to.
"""
LOGGER.info('Attempting to alter column: %s', old_column.name)
# bind the old column with the corresponding table.
old_column.table = table
old_column.alter(name=new_column.name,
type=new_column.type,
nullable=new_column.nullable)
def drop_column(table, column):
"""Create Column.
Args:
table (sqlalchemy.schema.Table): The sql alchemy table object.
column (sqlalchemy.schema.Column): The sql alchemy column object.
"""
LOGGER.info('Attempting to drop column: %s', column.name)
column.drop(table)
COLUMN_ACTION_MAPPING = {ColumnAction.DROP: drop_column,
ColumnAction.CREATE: create_column,
ColumnAction.ALTER: alter_column}
def migrate_schema(base, dao_classes):
"""Migrate database schema.
Args:
base (Base): Declarative base.
dao_classes (list): A list of dao classes.
"""
# Find all the Table objects for each of the classes.
# The format of tables is: {table_name: Table object}.
tables = base.metadata.tables
schema_update_actions_method = 'get_schema_update_actions'
for dao_class in dao_classes:
get_schema_update_actions = getattr(dao_class,
schema_update_actions_method,
None)
if (not callable(get_schema_update_actions) or
dao_class.__tablename__ not in tables):
LOGGER.info('Table %s doesn\'t require update.',
dao_class.__tablename__)
continue
LOGGER.info('Updating table %s', dao_class.__tablename__)
# schema_update will require the Table object.
table = tables.get(dao_class.__tablename__)
schema_update_actions = get_schema_update_actions()
for column_action, columns in schema_update_actions.iteritems():
if column_action in [ColumnAction.CREATE, ColumnAction.DROP]:
_create_or_drop_columns(column_action, columns, table)
elif column_action in [ColumnAction.ALTER]:
_alter_columns(column_action, columns, table)
else:
LOGGER.warn('Unknown column action: %s', column_action)
def _alter_columns(column_action, columns, table):
"""Alter columns.
Args:
column_action (str): Column Action.
columns (dict): A dictionary of old_column: new_column.
table (sqlalchemy.schema.Table): The sql alchemy table object.
"""
column_action = column_action.upper()
for old_column, new_column in columns.iteritems():
try:
COLUMN_ACTION_MAPPING.get(column_action)(table,
old_column,
new_column)
except OperationalError:
LOGGER.info('Failed to update db schema, table=%s',
table.name)
except Exception: # pylint: disable=broad-except
LOGGER.exception(
'Unexpected error happened when attempting '
'to update database schema, table: %s',
table.name)
def _create_or_drop_columns(column_action, columns, table):
"""Create or drop columns.
Args:
column_action (str): Column Action.
columns (list): A list of columns.
table (sqlalchemy.schema.Table): The sql alchemy table object.
"""
column_action = column_action.upper()
for column in columns:
try:
COLUMN_ACTION_MAPPING.get(column_action)(table,
column)
except OperationalError:
LOGGER.info('Failed to update db schema, table=%s',
table.name)
except Exception: # pylint: disable=broad-except
LOGGER.exception(
'Unexpected error happened when attempting '
'to update database schema, table: %s',
table.name)
def _find_subclasses(cls):
"""Find all the subclasses of a class.
Args:
cls (class): The parent class.
Returns:
list: Subclasses of the given parent class.
"""
results = []
for subclass in cls.__subclasses__():
results.append(subclass)
return results
if __name__ == '__main__':
# If the DB connection string is passed in, use that, otherwise
# fall back to the default DB connection string.
print (sys.argv)
DB_CONN_STR = sys.argv[1] if len(sys.argv) > 1 else DEFAULT_DB_CONN_STR
SQL_ENGINE = general_dao.create_engine(DB_CONN_STR,
pool_recycle=3600)
# Drop the CaiTemporaryStore table to ensure it is using the
# latest schema.
inventory_dao.initialize(SQL_ENGINE)
INVENTORY_TABLES = inventory_dao.BASE.metadata.tables
CAI_TABLE = INVENTORY_TABLES.get(
inventory_dao.CaiTemporaryStore.__tablename__)
CAI_TABLE.drop(SQL_ENGINE)
# Create tables if not exists.
inventory_dao.initialize(SQL_ENGINE)
scanner_dao.initialize(SQL_ENGINE)
# Find all the child classes inherited from declarative base class.
SCANNER_DAO_CLASSES = _find_subclasses(scanner_dao.BASE)
INVENTORY_DAO_CLASSES = _find_subclasses(inventory_dao.BASE)
INVENTORY_DAO_CLASSES.extend([inventory_dao.CaiTemporaryStore])
DECLARITIVE_BASE_MAPPING = {
scanner_dao.BASE: SCANNER_DAO_CLASSES,
inventory_dao.BASE: INVENTORY_DAO_CLASSES}
for declaritive_base, classes in DECLARITIVE_BASE_MAPPING.iteritems():
declaritive_base.metadata.bind = SQL_ENGINE
migrate_schema(declaritive_base, classes)
| 35.443946 | 77 | 0.660552 |
from __future__ import print_function
import sys
import migrate.changeset
from sqlalchemy.exc import OperationalError
import google.cloud.forseti.services.scanner.dao as scanner_dao
import google.cloud.forseti.services.inventory.storage as inventory_dao
import google.cloud.forseti.services.dao as general_dao
from google.cloud.forseti.common.util import logger
DEFAULT_DB_CONN_STR = 'mysql://root@127.0.0.1:3306/forseti_security'
LOGGER = logger.get_logger(__name__)
class ColumnAction(object):
DROP = 'DROP'
CREATE = 'CREATE'
ALTER = 'ALTER'
def create_column(table, column):
LOGGER.info('Attempting to create column: %s', column.name)
column.create(table, populate_default=True)
def alter_column(table, old_column, new_column):
LOGGER.info('Attempting to alter column: %s', old_column.name)
old_column.table = table
old_column.alter(name=new_column.name,
type=new_column.type,
nullable=new_column.nullable)
def drop_column(table, column):
LOGGER.info('Attempting to drop column: %s', column.name)
column.drop(table)
COLUMN_ACTION_MAPPING = {ColumnAction.DROP: drop_column,
ColumnAction.CREATE: create_column,
ColumnAction.ALTER: alter_column}
def migrate_schema(base, dao_classes):
tables = base.metadata.tables
schema_update_actions_method = 'get_schema_update_actions'
for dao_class in dao_classes:
get_schema_update_actions = getattr(dao_class,
schema_update_actions_method,
None)
if (not callable(get_schema_update_actions) or
dao_class.__tablename__ not in tables):
LOGGER.info('Table %s doesn\'t require update.',
dao_class.__tablename__)
continue
LOGGER.info('Updating table %s', dao_class.__tablename__)
# schema_update will require the Table object.
table = tables.get(dao_class.__tablename__)
schema_update_actions = get_schema_update_actions()
for column_action, columns in schema_update_actions.iteritems():
if column_action in [ColumnAction.CREATE, ColumnAction.DROP]:
_create_or_drop_columns(column_action, columns, table)
elif column_action in [ColumnAction.ALTER]:
_alter_columns(column_action, columns, table)
else:
LOGGER.warn('Unknown column action: %s', column_action)
def _alter_columns(column_action, columns, table):
column_action = column_action.upper()
for old_column, new_column in columns.iteritems():
try:
COLUMN_ACTION_MAPPING.get(column_action)(table,
old_column,
new_column)
except OperationalError:
LOGGER.info('Failed to update db schema, table=%s',
table.name)
except Exception: # pylint: disable=broad-except
LOGGER.exception(
'Unexpected error happened when attempting '
'to update database schema, table: %s',
table.name)
def _create_or_drop_columns(column_action, columns, table):
column_action = column_action.upper()
for column in columns:
try:
COLUMN_ACTION_MAPPING.get(column_action)(table,
column)
except OperationalError:
LOGGER.info('Failed to update db schema, table=%s',
table.name)
except Exception: # pylint: disable=broad-except
LOGGER.exception(
'Unexpected error happened when attempting '
'to update database schema, table: %s',
table.name)
def _find_subclasses(cls):
results = []
for subclass in cls.__subclasses__():
results.append(subclass)
return results
if __name__ == '__main__':
# If the DB connection string is passed in, use that, otherwise
# fall back to the default DB connection string.
print (sys.argv)
DB_CONN_STR = sys.argv[1] if len(sys.argv) > 1 else DEFAULT_DB_CONN_STR
SQL_ENGINE = general_dao.create_engine(DB_CONN_STR,
pool_recycle=3600)
# Drop the CaiTemporaryStore table to ensure it is using the
# latest schema.
inventory_dao.initialize(SQL_ENGINE)
INVENTORY_TABLES = inventory_dao.BASE.metadata.tables
CAI_TABLE = INVENTORY_TABLES.get(
inventory_dao.CaiTemporaryStore.__tablename__)
CAI_TABLE.drop(SQL_ENGINE)
# Create tables if not exists.
inventory_dao.initialize(SQL_ENGINE)
scanner_dao.initialize(SQL_ENGINE)
# Find all the child classes inherited from declarative base class.
SCANNER_DAO_CLASSES = _find_subclasses(scanner_dao.BASE)
INVENTORY_DAO_CLASSES = _find_subclasses(inventory_dao.BASE)
INVENTORY_DAO_CLASSES.extend([inventory_dao.CaiTemporaryStore])
DECLARITIVE_BASE_MAPPING = {
scanner_dao.BASE: SCANNER_DAO_CLASSES,
inventory_dao.BASE: INVENTORY_DAO_CLASSES}
for declaritive_base, classes in DECLARITIVE_BASE_MAPPING.iteritems():
declaritive_base.metadata.bind = SQL_ENGINE
migrate_schema(declaritive_base, classes)
| true | true |
f735201175d7dcb58bf35d7c25432ef8c050f9e7 | 3,825 | py | Python | core_pe/photo.py | astrofrog/dupeguru | d0a3f081dab21ea3d2fc69830c9e71a18078c150 | [
"BSD-3-Clause"
] | 1 | 2017-01-03T05:50:39.000Z | 2017-01-03T05:50:39.000Z | core_pe/photo.py | astrofrog/dupeguru | d0a3f081dab21ea3d2fc69830c9e71a18078c150 | [
"BSD-3-Clause"
] | null | null | null | core_pe/photo.py | astrofrog/dupeguru | d0a3f081dab21ea3d2fc69830c9e71a18078c150 | [
"BSD-3-Clause"
] | null | null | null | # Created By: Virgil Dupras
# Created On: 2011-05-29
# Copyright 2013 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
import logging
from hscommon.util import get_file_ext, format_size
from core.app import format_timestamp, format_perc, format_dupe_count
from core import fs
from . import exif
def format_dimensions(dimensions):
return '%d x %d' % (dimensions[0], dimensions[1])
def get_delta_dimensions(value, ref_value):
return (value[0]-ref_value[0], value[1]-ref_value[1])
class Photo(fs.File):
INITIAL_INFO = fs.File.INITIAL_INFO.copy()
INITIAL_INFO.update({
'dimensions': (0,0),
'exif_timestamp': '',
})
__slots__ = fs.File.__slots__ + tuple(INITIAL_INFO.keys())
# These extensions are supported on all platforms
HANDLED_EXTS = {'png', 'jpg', 'jpeg', 'gif', 'bmp', 'tiff', 'tif'}
def _plat_get_dimensions(self):
raise NotImplementedError()
def _plat_get_blocks(self, block_count_per_side, orientation):
raise NotImplementedError()
def _get_orientation(self):
if not hasattr(self, '_cached_orientation'):
try:
with self.path.open('rb') as fp:
exifdata = exif.get_fields(fp)
# the value is a list (probably one-sized) of ints
orientations = exifdata['Orientation']
self._cached_orientation = orientations[0]
except Exception: # Couldn't read EXIF data, no transforms
self._cached_orientation = 0
return self._cached_orientation
@classmethod
def can_handle(cls, path):
return fs.File.can_handle(path) and get_file_ext(path[-1]) in cls.HANDLED_EXTS
def get_display_info(self, group, delta):
size = self.size
mtime = self.mtime
dimensions = self.dimensions
m = group.get_match_of(self)
if m:
percentage = m.percentage
dupe_count = 0
if delta:
r = group.ref
size -= r.size
mtime -= r.mtime
dimensions = get_delta_dimensions(dimensions, r.dimensions)
else:
percentage = group.percentage
dupe_count = len(group.dupes)
dupe_folder_path = getattr(self, 'display_folder_path', self.folder_path)
return {
'name': self.name,
'folder_path': str(dupe_folder_path),
'size': format_size(size, 0, 1, False),
'extension': self.extension,
'dimensions': format_dimensions(dimensions),
'exif_timestamp': self.exif_timestamp,
'mtime': format_timestamp(mtime, delta and m),
'percentage': format_perc(percentage),
'dupe_count': format_dupe_count(dupe_count),
}
def _read_info(self, field):
fs.File._read_info(self, field)
if field == 'dimensions':
self.dimensions = self._plat_get_dimensions()
if self._get_orientation() in {5, 6, 7, 8}:
self.dimensions = (self.dimensions[1], self.dimensions[0])
elif field == 'exif_timestamp':
try:
with self.path.open('rb') as fp:
exifdata = exif.get_fields(fp)
self.exif_timestamp = exifdata['DateTimeOriginal']
except Exception:
logging.info("Couldn't read EXIF of picture: %s", self.path)
def get_blocks(self, block_count_per_side):
return self._plat_get_blocks(block_count_per_side, self._get_orientation())
| 37.5 | 88 | 0.613072 |
import logging
from hscommon.util import get_file_ext, format_size
from core.app import format_timestamp, format_perc, format_dupe_count
from core import fs
from . import exif
def format_dimensions(dimensions):
return '%d x %d' % (dimensions[0], dimensions[1])
def get_delta_dimensions(value, ref_value):
return (value[0]-ref_value[0], value[1]-ref_value[1])
class Photo(fs.File):
INITIAL_INFO = fs.File.INITIAL_INFO.copy()
INITIAL_INFO.update({
'dimensions': (0,0),
'exif_timestamp': '',
})
__slots__ = fs.File.__slots__ + tuple(INITIAL_INFO.keys())
HANDLED_EXTS = {'png', 'jpg', 'jpeg', 'gif', 'bmp', 'tiff', 'tif'}
def _plat_get_dimensions(self):
raise NotImplementedError()
def _plat_get_blocks(self, block_count_per_side, orientation):
raise NotImplementedError()
def _get_orientation(self):
if not hasattr(self, '_cached_orientation'):
try:
with self.path.open('rb') as fp:
exifdata = exif.get_fields(fp)
orientations = exifdata['Orientation']
self._cached_orientation = orientations[0]
except Exception:
self._cached_orientation = 0
return self._cached_orientation
@classmethod
def can_handle(cls, path):
return fs.File.can_handle(path) and get_file_ext(path[-1]) in cls.HANDLED_EXTS
def get_display_info(self, group, delta):
size = self.size
mtime = self.mtime
dimensions = self.dimensions
m = group.get_match_of(self)
if m:
percentage = m.percentage
dupe_count = 0
if delta:
r = group.ref
size -= r.size
mtime -= r.mtime
dimensions = get_delta_dimensions(dimensions, r.dimensions)
else:
percentage = group.percentage
dupe_count = len(group.dupes)
dupe_folder_path = getattr(self, 'display_folder_path', self.folder_path)
return {
'name': self.name,
'folder_path': str(dupe_folder_path),
'size': format_size(size, 0, 1, False),
'extension': self.extension,
'dimensions': format_dimensions(dimensions),
'exif_timestamp': self.exif_timestamp,
'mtime': format_timestamp(mtime, delta and m),
'percentage': format_perc(percentage),
'dupe_count': format_dupe_count(dupe_count),
}
def _read_info(self, field):
fs.File._read_info(self, field)
if field == 'dimensions':
self.dimensions = self._plat_get_dimensions()
if self._get_orientation() in {5, 6, 7, 8}:
self.dimensions = (self.dimensions[1], self.dimensions[0])
elif field == 'exif_timestamp':
try:
with self.path.open('rb') as fp:
exifdata = exif.get_fields(fp)
self.exif_timestamp = exifdata['DateTimeOriginal']
except Exception:
logging.info("Couldn't read EXIF of picture: %s", self.path)
def get_blocks(self, block_count_per_side):
return self._plat_get_blocks(block_count_per_side, self._get_orientation())
| true | true |
f735204ab267854d8c7a3281bc5ce292b2f1e8d0 | 836 | py | Python | compiled/python/imports_circular_b.py | smarek/ci_targets | c5edee7b0901fd8e7f75f85245ea4209b38e0cb3 | [
"MIT"
] | 4 | 2017-04-08T12:55:11.000Z | 2020-12-05T21:09:31.000Z | compiled/python/imports_circular_b.py | smarek/ci_targets | c5edee7b0901fd8e7f75f85245ea4209b38e0cb3 | [
"MIT"
] | 7 | 2018-04-23T01:30:33.000Z | 2020-10-30T23:56:14.000Z | compiled/python/imports_circular_b.py | smarek/ci_targets | c5edee7b0901fd8e7f75f85245ea4209b38e0cb3 | [
"MIT"
] | 6 | 2017-04-08T11:41:14.000Z | 2020-10-30T22:47:31.000Z | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
import imports_circular_a
class ImportsCircularB(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.initial = self._io.read_u1()
if self.initial == 65:
self.back_ref = imports_circular_a.ImportsCircularA(self._io)
| 32.153846 | 132 | 0.722488 |
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
import imports_circular_a
class ImportsCircularB(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.initial = self._io.read_u1()
if self.initial == 65:
self.back_ref = imports_circular_a.ImportsCircularA(self._io)
| true | true |
f735215a5b29bb7037ea5253477ebe9aadee4cd9 | 879 | py | Python | server/migrations/versions/3bed7b8d8720_shares_relation.py | momikey/liblio | c7ad4fd8d72369358863b90e34f3ed89ddef753c | [
"MIT"
] | null | null | null | server/migrations/versions/3bed7b8d8720_shares_relation.py | momikey/liblio | c7ad4fd8d72369358863b90e34f3ed89ddef753c | [
"MIT"
] | null | null | null | server/migrations/versions/3bed7b8d8720_shares_relation.py | momikey/liblio | c7ad4fd8d72369358863b90e34f3ed89ddef753c | [
"MIT"
] | null | null | null | """Shares relation
Revision ID: 3bed7b8d8720
Revises: b86d7b60fbef
Create Date: 2019-09-30 10:21:56.725664
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3bed7b8d8720'
down_revision = 'b86d7b60fbef'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('shares',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('post_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('user_id', 'post_id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('shares')
# ### end Alembic commands ###
| 25.114286 | 65 | 0.675768 | from alembic import op
import sqlalchemy as sa
revision = '3bed7b8d8720'
down_revision = 'b86d7b60fbef'
branch_labels = None
depends_on = None
def upgrade():
'], ['posts.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('user_id', 'post_id')
)
| true | true |
f735219bdf8e165d01f8912a1bab3856ce66114f | 23,430 | py | Python | fast_scroller/h5data.py | miketrumpis/lfp_scroller | ce4dbf85bb4d31f2eacfb5d68a5049499637722c | [
"BSD-3-Clause"
] | null | null | null | fast_scroller/h5data.py | miketrumpis/lfp_scroller | ce4dbf85bb4d31f2eacfb5d68a5049499637722c | [
"BSD-3-Clause"
] | 6 | 2021-10-08T17:27:46.000Z | 2021-12-14T16:29:44.000Z | fast_scroller/h5data.py | miketrumpis/lfp_scroller | ce4dbf85bb4d31f2eacfb5d68a5049499637722c | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
from scipy.linalg import LinAlgError
from scipy.signal import lfilter, lfilter_zi, hilbert
from scipy.interpolate import interp1d
import h5py
from tqdm import tqdm
from ecogdata.util import input_as_2d
from ecogdata.util import nextpow2
def h5mean(array, axis, rowmask=(), start=0, stop=None):
"""Compute mean of a 2D HDF5 array in blocks"""
shape = array.shape
if axis < 0:
axis += len(shape)
if stop is None:
stop = shape[1]
if axis==1:
if len(rowmask):
mn_size = rowmask.sum()
else:
mn_size = shape[0]
else:
mn_size = shape[1 - axis]
mn = np.zeros(mn_size, 'd')
# For averaging in both dimensions, still iterate chunks in time
# If averaging over channels:
# * fill in the chunk averages along the way
# If averaging over time
# * accumulate the samples (scaled by 1/N)
itr = H5Chunks(array, axis=1, slices=True)
for n, sl in tqdm(enumerate(itr), desc='Computing mean', leave=True, total=itr.n_blocks):
t_sl = sl[1]
# just pass until t_sl.start < requested start < t_sl.stop
if start >= t_sl.stop:
print('Continuing')
continue
# now modify first good slice
elif start > t_sl.start:
t_sl = slice(start, t_sl.stop)
sl = (sl[0], t_sl)
# break loops if stop < t_sl.start
if stop < t_sl.start:
break
# now modify lsat good slice
elif stop < t_sl.stop:
t_sl = slice(t_sl.start, stop)
sl = (sl[0], t_sl)
x_sl = array[sl]
if len(rowmask):
x_sl = x_sl[rowmask]
if axis == 0:
mn[sl[1]] = x_sl.mean(0)
else:
mn[:] += x_sl.sum(1) / float(array.shape[1])
return mn
def h5stat(array, fn, rowmask=()):
"""Compute timeseries of a channel-wise statistic for a 2D HDF5 array in blocks"""
shape = array.shape
T = shape[1]
series = np.zeros(T, 'd')
itr = H5Chunks(array, axis=1, slices=True)
for n, sl in tqdm(enumerate(itr), desc='Computing series',
leave=True, total=itr.n_blocks):
x_sl = array[sl]
if len(rowmask):
x_sl = x_sl[rowmask]
series[sl[1]] = fn(x_sl)
return series
class ReadCache(object):
# TODO -- enable catch for full slicing
"""
Buffers row indexes from memmap or hdf5 file.
For cases where array[0, m:n], array[1, m:n], array[2, m:n] are
accessed sequentially, this object buffers the C x (n-m)
submatrix before yielding individual rows.
Access such as array[p:q, m:n] is handled by the underlying
array's __getitem__ method.
"""
def __init__(self, array):
self._array = array
self._current_slice = None
self._current_seg = ()
self.dtype = array.dtype
self.shape = array.shape
def __len__(self):
return len(self._array)
@property
def file_array(self):
return self._array
def __getitem__(self, sl):
indx, srange = sl
# Only access diretly if the first part of the slice is also a slice.
# In other cases, slice all first and then use numpy indexing
if isinstance(indx, slice):
return self._array[sl].copy()
if self._current_slice != srange:
all_sl = (slice(None), srange)
self._current_seg = self._array[all_sl]
self._current_slice = srange
# always return the full range after slicing with possibly
# complex original range
new_range = slice(None)
new_sl = (indx, new_range)
return self._current_seg[new_sl].copy()
class CommonReferenceReadCache(ReadCache):
"""Returns common-average re-referenced blocks"""
def __getitem__(self, sl):
indx, srange = sl
if isinstance(indx, slice):
# This returns without CAR?
return self._array[sl].copy()
if self._current_slice != srange:
all_sl = (slice(None), srange)
if self.dtype in np.sctypes['int']:
self._current_seg = self._array[all_sl].astype('d')
else:
self._current_seg = self._array[all_sl].copy()
self._current_seg -= self._current_seg.mean(0)
self._current_slice = srange
# always return the full range after slicing with possibly
# complex original range
new_range = slice(None)
new_sl = (indx, new_range)
return self._current_seg[new_sl].copy()
class FilteredReadCache(ReadCache):
"""
Apply row-by-row filters to a ReadCache
"""
def __init__(self, array, filters):
if not isinstance(filters, (tuple, list)):
f = filters
filters = [ f ] * len(array)
self.filters = filters
super(FilteredReadCache, self).__init__(array)
def __getitem__(self, sl):
idx = sl[0]
x = super(FilteredReadCache, self).__getitem__(sl)
if isinstance(idx, int):
return self.filters[idx]( x )
y = np.empty_like(x)
for x_, y_, f in zip(x[idx], y[idx], self.filters[idx]):
y_[:] = f(x_)
return y
def _make_subtract(z):
def _f(x):
return x - z
return _f
class DCOffsetReadCache(FilteredReadCache):
"""
A filtered read cache with a simple offset subtraction.
"""
def __init__(self, array, offsets):
#filters = [lambda x: x - off for off in offsets]
filters = [_make_subtract(off) for off in offsets]
super(DCOffsetReadCache, self).__init__(array, filters)
self.offsets = offsets
class H5Chunks(object):
"""Iterates an HDF5 over "chunks" with ndarray-like access"""
def __init__(self, h5array, out=None, axis=1, min_chunk=None, slices=False, reverse=False):
"""
Efficient block iterator for HDF5 arrays (streams over chunking sizes to read whole blocks at a time).
Parameters
----------
h5array: h5py.Dataset
Vector timeseries (chan x time) or (time x chan)
out: h5py.Dataset
Output array for write-back. May be equal to h5array for read/write arrays. Write-back disabled if None
axis: int
Axis to iterate over
min_chunk: int
Ensure the output blocks are greater than this size
slices: bool
Return array slicing rather than data
reverse: bool
Yield reverse-sequence data
"""
chunk = h5array.chunks
if len(chunk) > 2:
raise ValueError('Only iterates for 2D arrays')
self.h5array = h5array
while axis < 0:
axis += len(chunk)
if chunk[axis] < chunk[1-axis]:
print('chunk size larger in other dimension!')
self.axis = axis
self.size = h5array.shape[axis]
self.chunk = chunk[axis]
if min_chunk is not None:
while self.chunk < min_chunk:
self.chunk += chunk[axis]
self.n_blocks = self.size // self.chunk
if self.n_blocks * self.chunk < self.size:
self.n_blocks += 1
self.__it = self.n_blocks - 1 if reverse else 0
self.reverse = reverse
self.slices = slices
self._output_source = out
def write_out(self, data):
if self._output_source is None:
print('No output defined!')
return
if self.reverse:
# data is reversed
data = data[:, ::-1] if self.axis == 1 else data[::-1, :]
self._output_source[self._current_sl] = data
def __iter__(self):
return self
def __next__(self):
if self.__it >= self.n_blocks or self.__it < 0:
raise StopIteration()
n = self.__it
rng = slice(n * self.chunk, min(self.size, (n + 1) * self.chunk))
self._current_sl = (slice(None), rng) if self.axis else (rng, slice(None))
if self.reverse:
self.__it -= 1
else:
self.__it += 1
if self.slices:
return self._current_sl
arr = self.h5array[self._current_sl]
if self.reverse:
return arr[:, ::-1] if self.axis == 1 else arr[::-1, :]
return arr
class HandOffIter:
"""
Iterates over several 2D HDF5 arrays with hand-off between files. Hand-off procedure includes attemping to match
the DC offsets between signals around the end and beginning of recording edges.
Presently iterates over axis=1.
Also supports write-back to the currently visible buffer within an iteration.
"""
# TODO: support reverse iteration
def __init__(self, arrays, out=None, min_chunk=None, chans=None, blank_points=10):
"""
Construct hand-off iterator from HDF5 files.
Parameters
----------
arrays: sequence
sequence of h5py.Datasets
out: h5py.Dataset, str
out may be a pre-created Dataset of the correct size or the path of output file. If output_file=='same',
then write-back to the same input files. If None, then there is no output source.
min_chunk: int
Ensure the output blocks are greater than this size
chans: list
channels to expose on iteration (all by default)
blank_points: int
Blank these many points when handing off between files. Fill in +/- blank region with linear
interpolation between valid points.
"""
hdf_files = [array.file.filename for array in arrays]
self.files = hdf_files
self.arrays = arrays
rec_lengths = [array.shape[1] for array in arrays]
chunk_sizes = []
num_blocks = 0
if min_chunk is None:
# todo: fix dumb 2000 pts hard coding
min_chunk = blank_points + 2000
else:
min_chunk = max(blank_points + 2000, min_chunk)
for array in arrays:
size = array.chunks[1]
if min_chunk is not None:
while size < min_chunk:
size += array.chunks[1]
if size > array.shape[1]:
raise ValueError('Minimum chunk size {} is greater than the length of >=1 arrays'.format(min_chunk))
chunk_sizes.append(size)
# todo: is this +1 count correct?
num_blocks += array.shape[1] // size + 1
n_chan = arrays[0].shape[0]
self.n_blocks = num_blocks
if chans is None:
chans = slice(None)
else:
if not np.iterable(chans):
chans = (chans,)
n_chan = len(chans)
self.total_length = np.sum(rec_lengths)
self.rec_lengths = rec_lengths
self.chunk_sizes = chunk_sizes
# Output status will be checked through the value of self._output_file:
# if None, do nothing
# if 'same', write back to input sources
# else write to self._output_source defined here
if isinstance(out, str):
self._output_file = out
if self._output_file.lower() != 'same':
hdf = h5py.File(self._output_file, 'w')
array_name = arrays[0].name.strip('/')
out = hdf.create_dataset(array_name, shape=(n_chan, self.total_length), dtype='f', chunks=True)
hdf.create_dataset('break_points', data=np.cumsum(rec_lengths[:-1], dtype='i'))
self._output_source = out
self._closing_output = True
elif out is not None:
self._output_source = out
self._output_file = out.file.filename
self._closing_output = False
else:
self._output_file = None
self._closing_output = False
self.chans = chans
self._current_source = 0
self._current_offset = None
self._blanking_slice = False
self._blank_points = blank_points
def __iter__(self):
# set up initial offset as the mean(s) in the first file
self._current_source = self.arrays[0]
means = self._slice_source(np.s_[self._blank_points:self._blank_points + 2000], offset=False).mean(axis=1)
if self._output_file == 'same':
self._output_source = self.arrays[0]
self._current_source_num = 0
self._current_offset = means[:, None]
self._current_step = self.chunk_sizes[0]
self._input_point = 0
self._output_point = 0
# starting on a blanking slice
self._blanking_slice = True
self._end_of_iter = False
return self
def _slice_source(self, time_slice, offset=True):
if isinstance(self.chans, slice):
arr = self._current_source[self.chans, time_slice]
else:
arr = np.array([self._current_source[c, time_slice] for c in self.chans])
return arr - self._current_offset if offset else arr
def _hand_off(self, start):
# Right now the current step size will run off the end of the current source.
# So grab the remainder of this source and hand-off to the next source.
# Also reset the offset level to the average of the last few points
# array_name = self.array_name
end_point = self._current_source.shape[1]
remainder = self._slice_source(np.s_[start:])
old_mean = remainder.mean(1)[:, None]
# Actually... use more points if the remainder is short
if self._current_source.shape[1] - start < 100:
longer_tail = self._slice_source(np.s[-100:])
old_mean = longer_tail.mean(1)[:, None]
# self._current_source.file.close()
self._current_source_num += 1
if self._current_source_num >= len(self.files):
# do not change source or step size, just signal that the end is nigh
self._end_of_iter = True
else:
self._current_source = self.arrays[self._current_source_num]
self._current_step = self.chunk_sizes[self._current_source_num]
self._blanking_slice = True
self._break_point = self._output_point + (end_point - start)
# get the mean of the first few points in the new source
new_mean = self._slice_source(np.s_[self._blank_points:self._blank_points + 2000], offset=False).mean(1)
# new_mean = np.array([self._current_source[c, self._blank_points:200].mean() for c in self.chans])
# this is the offset to move the new mean to the old mean
self._current_offset = new_mean[:, None] - old_mean
return remainder
def write_out(self, data):
if self._output_file is None:
print('No output file defined!')
return
elif self._output_file == 'same':
# this condition means that data came from two sources in a hand-off
if data.shape[1] > self._input_point:
# last part is from current source
self._current_source[:, :self._input_point] = data[:, -self._input_point:]
# first part is from previous source
n_prev = data.shape[1] - self._input_point
prev_source = self.arrays[self._current_source_num - 1]
prev_source[:, -n_prev:] = data[:, :n_prev]
else:
max_n = self._current_source.shape[1]
start_pt = self._input_point - self._current_step
stop_pt = min(max_n, self._input_point)
this_slice = np.s_[:, start_pt:stop_pt]
self._current_source[this_slice] = data
return
# Write this data into the output array.
# If this is a blanking slice (b/c of hand-off) then ???
a = self._output_point
b = a + data.shape[1]
self._output_source[:, a:b] = data
self._output_source.flush()
self._output_point = b
def __next__(self):
if self._end_of_iter:
if self._closing_output:
self._output_source.file.close()
raise StopIteration
start = self._input_point
stop = start + self._current_step
if stop > self._current_source.shape[1]:
# print('hand off slice: {}-{}, file length {}'.format(start, stop, self._current_source.shape[1]))
remainder = self._hand_off(start)
# if the hand-off logic has found end-of-files then simply return the last bit and raise StopIteration
# next time around
if self._end_of_iter:
# advance the input array point counter so that it can be rewound as needed in write_out
self._input_point += self._current_step
return remainder
next_strip = self._slice_source(np.s_[:self._current_step])
# Need to handle blanking!
r_weight = np.linspace(0, 1, self._blank_points)
left_point = remainder[:, -1][:, None]
right_point = next_strip[:, self._blank_points][:, None]
next_strip[:, :self._blank_points] = r_weight * right_point + (1 - r_weight) * left_point
arr_slice = np.c_[remainder, next_strip]
# next input is 1X the current step
self._input_point = self._current_step
# print('new input point: {}, file length {}'.format(self._input_point, self._current_source.shape[1]))
return arr_slice
else:
# easy case!
arr_slice = self._slice_source(np.s_[start:stop])
self._input_point += self._current_step
if start == 0 and self._current_source_num == 0:
# just blank the initial points to zero
arr_slice[:, :self._blank_points] = 0
return arr_slice
def block_itr_factory(x, **kwargs):
if isinstance(x, (tuple, list)):
if 'axis' in kwargs and kwargs['axis'] == 1:
# just drop this since it works right anyway
kwargs.pop('axis')
args = set(kwargs.keys())
extra_args = args - {'out', 'min_chunks', 'chans', 'blank_points'}
if len(extra_args):
print('Dropping arguments not (yet) supported for HandOffIter: {}'.format(extra_args))
supported_args = args - extra_args
kwargs = dict((k, kwargs[k]) for k in supported_args)
return HandOffIter(x, **kwargs)
else:
return H5Chunks(x, **kwargs)
def bfilter(b, a, x, axis=-1, out=None, filtfilt=False):
"""
Apply linear filter inplace over array x streaming from disk.
Parameters
----------
b: ndarray
polynomial coefs for transfer function denominator
a: ndarray
polynomial coefs for transfer function numerator
x: h5py.Dataset, list
Either a single or multiple datasets. If multiple, then a HandOffIter will be used to iterate. In this mode,
if out is given as a string then the full output will be concatenated to a single HDF5 file. Otherwise output
will be written back to each individual file.
axis: int
Array axis to apply filter
out: h5py.Dataset, str
Output array (or file name, see details above). If multiple inputs are given, a value of None will be
converted to 'same'
filtfilt: bool
If True, perform zero-phase filtering with the forward-reverse technique
Returns
-------
out: h5py.Dataset
Output array. Not well defined if using HandOffIter in 'same' output mode
"""
try:
zii = lfilter_zi(b, a)
except LinAlgError:
# the integrating filter doesn't have valid zi
zii = np.array([0.0])
zi_sl = np.s_[None, :] if axis in (-1, 1) else np.s_[:, None]
xc_sl = np.s_[:, :1] if axis in (-1, 1) else np.s_[:1, :]
fir_size = len(b)
if out is None:
if isinstance(x, (list, tuple)):
out = 'same'
else:
out = x
itr = block_itr_factory(x, axis=axis, out=out, min_chunk=fir_size)
for n, xc in tqdm(enumerate(itr), desc='Blockwise filtering',
leave=True, total=itr.n_blocks):
if n == 0:
zi = zii[zi_sl] * xc[xc_sl]
xcf, zi = lfilter(b, a, xc, axis=axis, zi=zi)
itr.write_out(xcf)
# presently hand off iteration only goes forward so can't filt-filt
if isinstance(itr, HandOffIter) or not filtfilt:
out = itr._output_source
del xc
del xcf
return out
# Now read and write to the same out array (however it was earlier defined)
itr = H5Chunks(out, axis=axis, min_chunk=fir_size, out=out, reverse=True)
for n, xc in tqdm(enumerate(itr), desc='Blockwise filtering (reverse)',
leave=True, total=itr.n_blocks):
if n == 0:
zi = zii[zi_sl] * xc[xc_sl]
xcf, zi = lfilter(b, a, xc, axis=axis, zi=zi)
itr.write_out(xcf)
del xc
del xcf
return out
def passthrough(x, y):
itr = block_itr_factory(x, axis=1, out=y)
for xc in tqdm(itr, desc='Copying to output', leave=True, total=itr.n_blocks):
itr.write_out(xc)
@input_as_2d(in_arr=(0, 1))
def interpolate_blanked(x, mask, inplace=False, kind='linear'):
if inplace:
y = x
else:
y = x.copy()
a = np.arange(x.shape[1])
for row_x, row_y, row_m in zip(x, y, mask):
fv = row_x[~row_m].mean()
f = interp1d(a[~row_m], row_x[~row_m], kind=kind,
bounds_error=False, fill_value=fv)
#row_y[~row_m] = row_x[~row_m]
row_y[row_m] = f( a[row_m] )
return y
def block_nan_filter(x, y, kind='linear'):
itr = block_itr_factory(x, axis=1, out=y)
for xc in tqdm(itr, desc='NaN Filtering', leave=True, total=itr.n_blocks):
# xc = x[sl]
nan_mask = np.isnan(xc)
if not nan_mask.any():
# y[sl] = xc
itr.write_out(xc)
continue
xc = interpolate_blanked(xc, nan_mask, inplace=True, kind=kind)
# y[sl] = xc
itr.write_out(xc)
def square_filter(x, y):
itr = block_itr_factory(x, axis=1, out=y)
for xc in tqdm(itr, desc='Squaring', leave=True, total=itr.n_blocks):
# xc = x[sl]
# y[sl] = xc ** 2
itr.write_out(xc ** 2)
def abs_filter(x, y):
itr = block_itr_factory(x, axis=1, out=y)
for xc in tqdm(itr, desc='Rectifying', leave=True, total=itr.n_blocks):
# xc = x[sl]
# y[sl] = np.abs(xc)
itr.write_out(np.abs(xc))
def hilbert_envelope_filter(x, y):
itr = block_itr_factory(x, axis=1, out=y)
for xc in tqdm(itr, desc='Hilbert Transform', leave=True, total=itr.n_blocks):
# xc = x[sl]
n = xc.shape[1]
nfft = nextpow2(n)
# if n is closer to the previous power of 2, then split this block into two computations
if (nfft - n) > (n - nfft / 2):
n1 = int(n / 2)
nfft = int(nfft / 2)
y1 = hilbert(xc[..., :n1], N=nfft)[..., :n1]
y2 = hilbert(xc[..., n1:], N=nfft)[..., :n - n1]
# y[sl] = np.hstack((np.abs(y1), np.abs(y2)))
itr.write_out(np.hstack((np.abs(y1), np.abs(y2))))
else:
y1 = hilbert(xc, N=nfft)[..., :n]
# y[sl] = np.abs(y1)
itr.write_out(np.abs(y1))
| 37.368421 | 117 | 0.590568 | import numpy as np
from scipy.linalg import LinAlgError
from scipy.signal import lfilter, lfilter_zi, hilbert
from scipy.interpolate import interp1d
import h5py
from tqdm import tqdm
from ecogdata.util import input_as_2d
from ecogdata.util import nextpow2
def h5mean(array, axis, rowmask=(), start=0, stop=None):
shape = array.shape
if axis < 0:
axis += len(shape)
if stop is None:
stop = shape[1]
if axis==1:
if len(rowmask):
mn_size = rowmask.sum()
else:
mn_size = shape[0]
else:
mn_size = shape[1 - axis]
mn = np.zeros(mn_size, 'd')
itr = H5Chunks(array, axis=1, slices=True)
for n, sl in tqdm(enumerate(itr), desc='Computing mean', leave=True, total=itr.n_blocks):
t_sl = sl[1]
if start >= t_sl.stop:
print('Continuing')
continue
elif start > t_sl.start:
t_sl = slice(start, t_sl.stop)
sl = (sl[0], t_sl)
if stop < t_sl.start:
break
elif stop < t_sl.stop:
t_sl = slice(t_sl.start, stop)
sl = (sl[0], t_sl)
x_sl = array[sl]
if len(rowmask):
x_sl = x_sl[rowmask]
if axis == 0:
mn[sl[1]] = x_sl.mean(0)
else:
mn[:] += x_sl.sum(1) / float(array.shape[1])
return mn
def h5stat(array, fn, rowmask=()):
shape = array.shape
T = shape[1]
series = np.zeros(T, 'd')
itr = H5Chunks(array, axis=1, slices=True)
for n, sl in tqdm(enumerate(itr), desc='Computing series',
leave=True, total=itr.n_blocks):
x_sl = array[sl]
if len(rowmask):
x_sl = x_sl[rowmask]
series[sl[1]] = fn(x_sl)
return series
class ReadCache(object):
def __init__(self, array):
self._array = array
self._current_slice = None
self._current_seg = ()
self.dtype = array.dtype
self.shape = array.shape
def __len__(self):
return len(self._array)
@property
def file_array(self):
return self._array
def __getitem__(self, sl):
indx, srange = sl
if isinstance(indx, slice):
return self._array[sl].copy()
if self._current_slice != srange:
all_sl = (slice(None), srange)
self._current_seg = self._array[all_sl]
self._current_slice = srange
new_range = slice(None)
new_sl = (indx, new_range)
return self._current_seg[new_sl].copy()
class CommonReferenceReadCache(ReadCache):
def __getitem__(self, sl):
indx, srange = sl
if isinstance(indx, slice):
return self._array[sl].copy()
if self._current_slice != srange:
all_sl = (slice(None), srange)
if self.dtype in np.sctypes['int']:
self._current_seg = self._array[all_sl].astype('d')
else:
self._current_seg = self._array[all_sl].copy()
self._current_seg -= self._current_seg.mean(0)
self._current_slice = srange
new_range = slice(None)
new_sl = (indx, new_range)
return self._current_seg[new_sl].copy()
class FilteredReadCache(ReadCache):
def __init__(self, array, filters):
if not isinstance(filters, (tuple, list)):
f = filters
filters = [ f ] * len(array)
self.filters = filters
super(FilteredReadCache, self).__init__(array)
def __getitem__(self, sl):
idx = sl[0]
x = super(FilteredReadCache, self).__getitem__(sl)
if isinstance(idx, int):
return self.filters[idx]( x )
y = np.empty_like(x)
for x_, y_, f in zip(x[idx], y[idx], self.filters[idx]):
y_[:] = f(x_)
return y
def _make_subtract(z):
def _f(x):
return x - z
return _f
class DCOffsetReadCache(FilteredReadCache):
def __init__(self, array, offsets):
filters = [_make_subtract(off) for off in offsets]
super(DCOffsetReadCache, self).__init__(array, filters)
self.offsets = offsets
class H5Chunks(object):
def __init__(self, h5array, out=None, axis=1, min_chunk=None, slices=False, reverse=False):
chunk = h5array.chunks
if len(chunk) > 2:
raise ValueError('Only iterates for 2D arrays')
self.h5array = h5array
while axis < 0:
axis += len(chunk)
if chunk[axis] < chunk[1-axis]:
print('chunk size larger in other dimension!')
self.axis = axis
self.size = h5array.shape[axis]
self.chunk = chunk[axis]
if min_chunk is not None:
while self.chunk < min_chunk:
self.chunk += chunk[axis]
self.n_blocks = self.size // self.chunk
if self.n_blocks * self.chunk < self.size:
self.n_blocks += 1
self.__it = self.n_blocks - 1 if reverse else 0
self.reverse = reverse
self.slices = slices
self._output_source = out
def write_out(self, data):
if self._output_source is None:
print('No output defined!')
return
if self.reverse:
data = data[:, ::-1] if self.axis == 1 else data[::-1, :]
self._output_source[self._current_sl] = data
def __iter__(self):
return self
def __next__(self):
if self.__it >= self.n_blocks or self.__it < 0:
raise StopIteration()
n = self.__it
rng = slice(n * self.chunk, min(self.size, (n + 1) * self.chunk))
self._current_sl = (slice(None), rng) if self.axis else (rng, slice(None))
if self.reverse:
self.__it -= 1
else:
self.__it += 1
if self.slices:
return self._current_sl
arr = self.h5array[self._current_sl]
if self.reverse:
return arr[:, ::-1] if self.axis == 1 else arr[::-1, :]
return arr
class HandOffIter:
def __init__(self, arrays, out=None, min_chunk=None, chans=None, blank_points=10):
hdf_files = [array.file.filename for array in arrays]
self.files = hdf_files
self.arrays = arrays
rec_lengths = [array.shape[1] for array in arrays]
chunk_sizes = []
num_blocks = 0
if min_chunk is None:
min_chunk = blank_points + 2000
else:
min_chunk = max(blank_points + 2000, min_chunk)
for array in arrays:
size = array.chunks[1]
if min_chunk is not None:
while size < min_chunk:
size += array.chunks[1]
if size > array.shape[1]:
raise ValueError('Minimum chunk size {} is greater than the length of >=1 arrays'.format(min_chunk))
chunk_sizes.append(size)
num_blocks += array.shape[1] // size + 1
n_chan = arrays[0].shape[0]
self.n_blocks = num_blocks
if chans is None:
chans = slice(None)
else:
if not np.iterable(chans):
chans = (chans,)
n_chan = len(chans)
self.total_length = np.sum(rec_lengths)
self.rec_lengths = rec_lengths
self.chunk_sizes = chunk_sizes
if isinstance(out, str):
self._output_file = out
if self._output_file.lower() != 'same':
hdf = h5py.File(self._output_file, 'w')
array_name = arrays[0].name.strip('/')
out = hdf.create_dataset(array_name, shape=(n_chan, self.total_length), dtype='f', chunks=True)
hdf.create_dataset('break_points', data=np.cumsum(rec_lengths[:-1], dtype='i'))
self._output_source = out
self._closing_output = True
elif out is not None:
self._output_source = out
self._output_file = out.file.filename
self._closing_output = False
else:
self._output_file = None
self._closing_output = False
self.chans = chans
self._current_source = 0
self._current_offset = None
self._blanking_slice = False
self._blank_points = blank_points
def __iter__(self):
self._current_source = self.arrays[0]
means = self._slice_source(np.s_[self._blank_points:self._blank_points + 2000], offset=False).mean(axis=1)
if self._output_file == 'same':
self._output_source = self.arrays[0]
self._current_source_num = 0
self._current_offset = means[:, None]
self._current_step = self.chunk_sizes[0]
self._input_point = 0
self._output_point = 0
self._blanking_slice = True
self._end_of_iter = False
return self
def _slice_source(self, time_slice, offset=True):
if isinstance(self.chans, slice):
arr = self._current_source[self.chans, time_slice]
else:
arr = np.array([self._current_source[c, time_slice] for c in self.chans])
return arr - self._current_offset if offset else arr
def _hand_off(self, start):
end_point = self._current_source.shape[1]
remainder = self._slice_source(np.s_[start:])
old_mean = remainder.mean(1)[:, None]
if self._current_source.shape[1] - start < 100:
longer_tail = self._slice_source(np.s[-100:])
old_mean = longer_tail.mean(1)[:, None]
self._current_source_num += 1
if self._current_source_num >= len(self.files):
self._end_of_iter = True
else:
self._current_source = self.arrays[self._current_source_num]
self._current_step = self.chunk_sizes[self._current_source_num]
self._blanking_slice = True
self._break_point = self._output_point + (end_point - start)
new_mean = self._slice_source(np.s_[self._blank_points:self._blank_points + 2000], offset=False).mean(1)
self._current_offset = new_mean[:, None] - old_mean
return remainder
def write_out(self, data):
if self._output_file is None:
print('No output file defined!')
return
elif self._output_file == 'same':
if data.shape[1] > self._input_point:
self._current_source[:, :self._input_point] = data[:, -self._input_point:]
n_prev = data.shape[1] - self._input_point
prev_source = self.arrays[self._current_source_num - 1]
prev_source[:, -n_prev:] = data[:, :n_prev]
else:
max_n = self._current_source.shape[1]
start_pt = self._input_point - self._current_step
stop_pt = min(max_n, self._input_point)
this_slice = np.s_[:, start_pt:stop_pt]
self._current_source[this_slice] = data
return
a = self._output_point
b = a + data.shape[1]
self._output_source[:, a:b] = data
self._output_source.flush()
self._output_point = b
def __next__(self):
if self._end_of_iter:
if self._closing_output:
self._output_source.file.close()
raise StopIteration
start = self._input_point
stop = start + self._current_step
if stop > self._current_source.shape[1]:
remainder = self._hand_off(start)
if self._end_of_iter:
self._input_point += self._current_step
return remainder
next_strip = self._slice_source(np.s_[:self._current_step])
r_weight = np.linspace(0, 1, self._blank_points)
left_point = remainder[:, -1][:, None]
right_point = next_strip[:, self._blank_points][:, None]
next_strip[:, :self._blank_points] = r_weight * right_point + (1 - r_weight) * left_point
arr_slice = np.c_[remainder, next_strip]
self._input_point = self._current_step
return arr_slice
else:
arr_slice = self._slice_source(np.s_[start:stop])
self._input_point += self._current_step
if start == 0 and self._current_source_num == 0:
arr_slice[:, :self._blank_points] = 0
return arr_slice
def block_itr_factory(x, **kwargs):
if isinstance(x, (tuple, list)):
if 'axis' in kwargs and kwargs['axis'] == 1:
kwargs.pop('axis')
args = set(kwargs.keys())
extra_args = args - {'out', 'min_chunks', 'chans', 'blank_points'}
if len(extra_args):
print('Dropping arguments not (yet) supported for HandOffIter: {}'.format(extra_args))
supported_args = args - extra_args
kwargs = dict((k, kwargs[k]) for k in supported_args)
return HandOffIter(x, **kwargs)
else:
return H5Chunks(x, **kwargs)
def bfilter(b, a, x, axis=-1, out=None, filtfilt=False):
try:
zii = lfilter_zi(b, a)
except LinAlgError:
zii = np.array([0.0])
zi_sl = np.s_[None, :] if axis in (-1, 1) else np.s_[:, None]
xc_sl = np.s_[:, :1] if axis in (-1, 1) else np.s_[:1, :]
fir_size = len(b)
if out is None:
if isinstance(x, (list, tuple)):
out = 'same'
else:
out = x
itr = block_itr_factory(x, axis=axis, out=out, min_chunk=fir_size)
for n, xc in tqdm(enumerate(itr), desc='Blockwise filtering',
leave=True, total=itr.n_blocks):
if n == 0:
zi = zii[zi_sl] * xc[xc_sl]
xcf, zi = lfilter(b, a, xc, axis=axis, zi=zi)
itr.write_out(xcf)
# presently hand off iteration only goes forward so can't filt-filt
if isinstance(itr, HandOffIter) or not filtfilt:
out = itr._output_source
del xc
del xcf
return out
itr = H5Chunks(out, axis=axis, min_chunk=fir_size, out=out, reverse=True)
for n, xc in tqdm(enumerate(itr), desc='Blockwise filtering (reverse)',
leave=True, total=itr.n_blocks):
if n == 0:
zi = zii[zi_sl] * xc[xc_sl]
xcf, zi = lfilter(b, a, xc, axis=axis, zi=zi)
itr.write_out(xcf)
del xc
del xcf
return out
def passthrough(x, y):
itr = block_itr_factory(x, axis=1, out=y)
for xc in tqdm(itr, desc='Copying to output', leave=True, total=itr.n_blocks):
itr.write_out(xc)
@input_as_2d(in_arr=(0, 1))
def interpolate_blanked(x, mask, inplace=False, kind='linear'):
if inplace:
y = x
else:
y = x.copy()
a = np.arange(x.shape[1])
for row_x, row_y, row_m in zip(x, y, mask):
fv = row_x[~row_m].mean()
f = interp1d(a[~row_m], row_x[~row_m], kind=kind,
bounds_error=False, fill_value=fv)
row_y[row_m] = f( a[row_m] )
return y
def block_nan_filter(x, y, kind='linear'):
itr = block_itr_factory(x, axis=1, out=y)
for xc in tqdm(itr, desc='NaN Filtering', leave=True, total=itr.n_blocks):
nan_mask = np.isnan(xc)
if not nan_mask.any():
itr.write_out(xc)
continue
xc = interpolate_blanked(xc, nan_mask, inplace=True, kind=kind)
itr.write_out(xc)
def square_filter(x, y):
itr = block_itr_factory(x, axis=1, out=y)
for xc in tqdm(itr, desc='Squaring', leave=True, total=itr.n_blocks):
itr.write_out(xc ** 2)
def abs_filter(x, y):
itr = block_itr_factory(x, axis=1, out=y)
for xc in tqdm(itr, desc='Rectifying', leave=True, total=itr.n_blocks):
itr.write_out(np.abs(xc))
def hilbert_envelope_filter(x, y):
itr = block_itr_factory(x, axis=1, out=y)
for xc in tqdm(itr, desc='Hilbert Transform', leave=True, total=itr.n_blocks):
n = xc.shape[1]
nfft = nextpow2(n)
if (nfft - n) > (n - nfft / 2):
n1 = int(n / 2)
nfft = int(nfft / 2)
y1 = hilbert(xc[..., :n1], N=nfft)[..., :n1]
y2 = hilbert(xc[..., n1:], N=nfft)[..., :n - n1]
itr.write_out(np.hstack((np.abs(y1), np.abs(y2))))
else:
y1 = hilbert(xc, N=nfft)[..., :n]
itr.write_out(np.abs(y1))
| true | true |
f73522a2af911d1515d12f72fa0d2667a186b3b9 | 1,251 | py | Python | cyberhead/modules/brokers/coinbase/Coinbase.py | fakecoinbase/TheCyberHeadslashCyberHead | aac5bdaeab75d7ed42cb9aa3e316b3af55d68817 | [
"Unlicense"
] | 4 | 2019-10-25T05:37:32.000Z | 2019-10-25T07:20:44.000Z | cyberhead/modules/brokers/coinbase/Coinbase.py | Seburath/CyberHead | b1c5d8c157ff5bb976778ff5f7901d82e41d7d3e | [
"Unlicense"
] | 9 | 2021-03-11T02:56:42.000Z | 2022-03-12T00:43:13.000Z | cyberhead/modules/brokers/coinbase/Coinbase.py | Seburath/CyberHead | b1c5d8c157ff5bb976778ff5f7901d82e41d7d3e | [
"Unlicense"
] | 1 | 2020-11-23T09:37:25.000Z | 2020-11-23T09:37:25.000Z | import cbpro
import pandas as pd
from base64 import b64encode
class Coinbase:
def __init__(self, API_KEY, API_SECRET, API_PASS, ENV_URL="https://api-public.sandbox.pro.coinbase.com"):
self.API_KEY = API_KEY
self.API_SECRET = API_SECRET
self.API_PASS = API_PASS
self.ENV_URL = ENV_URL
self.client = cbpro.AuthenticatedClient(self.API_KEY, self.API_SECRET, self.API_PASS, api_url=self.ENV_URL)
def auth(self):
print('Authenticating Coinbase')
def place_market(self, action, ticker, amount):
order = self.client.place_market_order(
product_id=ticker,
side=action,
funds=amount
)
return place_market
def place_limit_order(self, action, ticker, entry_price, size):
entry_order = self.client.place_limit_order(product_id=ticker,
side=action,
price=entry_price,
size=size)
print(entry_order)
return entry_order
def get_accounts(self):
return self.client.get_accounts()
def orders(self):
return self.client.get_orders()
def fills(self):
return self.client.get_fills()
def historical_rates(self, ticker: str):
rates = self.client.get_product_historic_rates(ticker, granularity=86400)
df = pd.DataFrame(rates, columns=["time","low","high","open","close","volume"])
return df
| 27.8 | 109 | 0.741807 | import cbpro
import pandas as pd
from base64 import b64encode
class Coinbase:
def __init__(self, API_KEY, API_SECRET, API_PASS, ENV_URL="https://api-public.sandbox.pro.coinbase.com"):
self.API_KEY = API_KEY
self.API_SECRET = API_SECRET
self.API_PASS = API_PASS
self.ENV_URL = ENV_URL
self.client = cbpro.AuthenticatedClient(self.API_KEY, self.API_SECRET, self.API_PASS, api_url=self.ENV_URL)
def auth(self):
print('Authenticating Coinbase')
def place_market(self, action, ticker, amount):
order = self.client.place_market_order(
product_id=ticker,
side=action,
funds=amount
)
return place_market
def place_limit_order(self, action, ticker, entry_price, size):
entry_order = self.client.place_limit_order(product_id=ticker,
side=action,
price=entry_price,
size=size)
print(entry_order)
return entry_order
def get_accounts(self):
return self.client.get_accounts()
def orders(self):
return self.client.get_orders()
def fills(self):
return self.client.get_fills()
def historical_rates(self, ticker: str):
rates = self.client.get_product_historic_rates(ticker, granularity=86400)
df = pd.DataFrame(rates, columns=["time","low","high","open","close","volume"])
return df
| true | true |
f73523b2b99ace61784f26d0fe49a2259344d2d3 | 636 | py | Python | setup.py | ilfrich/frappy-py-mongo-content-store | aec7d72f2b1759ade7881abb69358b49cdc8aa02 | [
"Apache-2.0"
] | null | null | null | setup.py | ilfrich/frappy-py-mongo-content-store | aec7d72f2b1759ade7881abb69358b49cdc8aa02 | [
"Apache-2.0"
] | null | null | null | setup.py | ilfrich/frappy-py-mongo-content-store | aec7d72f2b1759ade7881abb69358b49cdc8aa02 | [
"Apache-2.0"
] | null | null | null | from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name="frappymongocontent",
version="1.0.0",
description="Store Implementation for Content in MongoDB",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ilfrich/frappy-py-mongo-content-store",
author="Peter Ilfrich",
author_email="das-peter@gmx.de",
packages=[
"frappymongocontent"
],
install_requires=[
"pbu",
],
tests_require=[
"pytest",
],
zip_safe=False)
| 26.5 | 69 | 0.622642 | from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name="frappymongocontent",
version="1.0.0",
description="Store Implementation for Content in MongoDB",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ilfrich/frappy-py-mongo-content-store",
author="Peter Ilfrich",
author_email="das-peter@gmx.de",
packages=[
"frappymongocontent"
],
install_requires=[
"pbu",
],
tests_require=[
"pytest",
],
zip_safe=False)
| true | true |
f73523dae23b1abc06f1abc0e7276a52c1a0a20e | 4,681 | py | Python | Track1.2/configs/recognition/csn/csn_loveu2_train.py | VisualAnalysisOfHumans/LOVEU_TRACK1_TOP3_SUBMISSION | 6f4d1c7e6883d6b0664fcd04265f437247afab54 | [
"MIT"
] | 1 | 2021-06-25T06:43:29.000Z | 2021-06-25T06:43:29.000Z | Track1.2/configs/recognition/csn/csn_loveu2_train.py | VisualAnalysisOfHumans/LOVEU_TRACK1_TOP3_SUBMISSION | 6f4d1c7e6883d6b0664fcd04265f437247afab54 | [
"MIT"
] | 1 | 2022-01-11T02:35:57.000Z | 2022-01-11T02:35:57.000Z | Track1.2/configs/recognition/csn/csn_loveu2_train.py | VisualAnalysisOfHumans/LOVEU_TRACK1_TOP3_SUBMISSION | 6f4d1c7e6883d6b0664fcd04265f437247afab54 | [
"MIT"
] | 1 | 2022-01-12T01:55:52.000Z | 2022-01-12T01:55:52.000Z | # model settings
model = dict(
type='Recognizer3D',
backbone=dict(
type='ResNet3dCSN',
pretrained2d=False,
pretrained= # noqa: E251
'/home/notebook/data/personal/mmaction2_task1/data/ircsn_from_scratch_r152_ig65m_20200807-771c4135.pth', # noqa: E501
depth=152,
with_pool2=False,
bottleneck_mode='ir',
norm_eval=True,
bn_frozen=True,
zero_init_residual=False),
cls_head=dict(
type='I3DHead',
num_classes=2,
in_channels=2048,
spatial_type='avg',
dropout_ratio=0.5,
init_std=0.01))
# model training and testing settings
train_cfg = None
test_cfg = dict(average_clips='prob')
# dataset settings
dataset_type = 'VideoDataset'
data_root = '' #'data/kinetics400/rawframes_train'
data_root_val = '' #'data/kinetics400/rawframes_val'
ann_file_train = '/home/notebook/data/personal/loveu/data/loveu_wide_2cls_train_annotation_valid.txt'
ann_file_val = '/home/notebook/data/personal/loveu/data/loveu_wide_2cls_val_annotation_valid.txt'
ann_file_test = '/home/notebook/data/personal/loveu/data/loveu_wide_2cls_val_annotation_valid.txt'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)
train_pipeline = [
dict(type='DecordInit'),
dict(type='SampleFrames', clip_len=16, frame_interval=2, num_clips=1),
#dict(type='FrameSelector'),
dict(type='DecordDecode'),
dict(type='Resize', scale=(342, 256), keep_ratio=False),
dict(type='RandomResizedCrop', area_range=(0.7, 1.0), aspect_ratio_range=(1.0, 4/3)),
dict(type='Resize', scale=(224, 224), keep_ratio=False),
dict(type='Flip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs', 'label'])
]
val_pipeline = [
dict(type='DecordInit'),
dict(
type='SampleFrames',
clip_len=16,
frame_interval=2,
num_clips=1,
test_mode=True),
#dict(type='FrameSelector'),
dict(type='DecordDecode'),
dict(type='Resize', scale=(342, 256), keep_ratio=False),
dict(type='CenterCrop', crop_size=224),
dict(type='Flip', flip_ratio=0),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
test_pipeline = [
dict(type='DecordInit'),
dict(
type='SampleFrames',
clip_len=16,
frame_interval=2,
num_clips=1,
test_mode=True),
#dict(type='FrameSelector'),
dict(type='DecordDecode'),
dict(type='Resize', scale=(342, 256), keep_ratio=False),
dict(type='ThreeCrop', crop_size=256),
dict(type='Flip', flip_ratio=0),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
data = dict(
videos_per_gpu=6, #3
workers_per_gpu=4, #4
train=dict(
type=dataset_type,
ann_file=ann_file_train,
data_prefix=data_root,
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=ann_file_val,
data_prefix=data_root_val,
pipeline=val_pipeline),
test=dict(
type=dataset_type,
ann_file=ann_file_val,
data_prefix=data_root_val,
pipeline=test_pipeline))
# optimizer
optimizer = dict(
type='SGD', lr=0.00025, momentum=0.9,
weight_decay=0.0001) # this lr is used for 8 gpus
optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
step=[32, 48],
warmup='linear',
warmup_ratio=0.1,
warmup_by_epoch=True,
warmup_iters=16)
total_epochs = 61
checkpoint_config = dict(interval=2)
evaluation = dict(
interval=62, metrics=['top_k_accuracy', 'mean_class_accuracy']) #'top_k_accuracy', 'mean_class_accuracy'])
log_config = dict(
interval=20,
hooks=[dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook')])
# runtime settings
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb' # noqa: E501
load_from = None #'/home/notebook/data/personal/mmaction2_sn_test/work_dirs/csn_sn15/epoch_20.pth'
resume_from = '/home/notebook/data/personal/loveu/mmaction2/work_dirs/loveu_2cls_1500ms_csn/epoch_20.pth'
workflow = [('train', 1)]
find_unused_parameters = True
| 35.732824 | 126 | 0.674001 |
model = dict(
type='Recognizer3D',
backbone=dict(
type='ResNet3dCSN',
pretrained2d=False,
pretrained=
'/home/notebook/data/personal/mmaction2_task1/data/ircsn_from_scratch_r152_ig65m_20200807-771c4135.pth',
depth=152,
with_pool2=False,
bottleneck_mode='ir',
norm_eval=True,
bn_frozen=True,
zero_init_residual=False),
cls_head=dict(
type='I3DHead',
num_classes=2,
in_channels=2048,
spatial_type='avg',
dropout_ratio=0.5,
init_std=0.01))
train_cfg = None
test_cfg = dict(average_clips='prob')
dataset_type = 'VideoDataset'
data_root = ''
data_root_val = ''
ann_file_train = '/home/notebook/data/personal/loveu/data/loveu_wide_2cls_train_annotation_valid.txt'
ann_file_val = '/home/notebook/data/personal/loveu/data/loveu_wide_2cls_val_annotation_valid.txt'
ann_file_test = '/home/notebook/data/personal/loveu/data/loveu_wide_2cls_val_annotation_valid.txt'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False)
train_pipeline = [
dict(type='DecordInit'),
dict(type='SampleFrames', clip_len=16, frame_interval=2, num_clips=1),
dict(type='DecordDecode'),
dict(type='Resize', scale=(342, 256), keep_ratio=False),
dict(type='RandomResizedCrop', area_range=(0.7, 1.0), aspect_ratio_range=(1.0, 4/3)),
dict(type='Resize', scale=(224, 224), keep_ratio=False),
dict(type='Flip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs', 'label'])
]
val_pipeline = [
dict(type='DecordInit'),
dict(
type='SampleFrames',
clip_len=16,
frame_interval=2,
num_clips=1,
test_mode=True),
dict(type='DecordDecode'),
dict(type='Resize', scale=(342, 256), keep_ratio=False),
dict(type='CenterCrop', crop_size=224),
dict(type='Flip', flip_ratio=0),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
test_pipeline = [
dict(type='DecordInit'),
dict(
type='SampleFrames',
clip_len=16,
frame_interval=2,
num_clips=1,
test_mode=True),
dict(type='DecordDecode'),
dict(type='Resize', scale=(342, 256), keep_ratio=False),
dict(type='ThreeCrop', crop_size=256),
dict(type='Flip', flip_ratio=0),
dict(type='Normalize', **img_norm_cfg),
dict(type='FormatShape', input_format='NCTHW'),
dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]),
dict(type='ToTensor', keys=['imgs'])
]
data = dict(
videos_per_gpu=6,
workers_per_gpu=4,
train=dict(
type=dataset_type,
ann_file=ann_file_train,
data_prefix=data_root,
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=ann_file_val,
data_prefix=data_root_val,
pipeline=val_pipeline),
test=dict(
type=dataset_type,
ann_file=ann_file_val,
data_prefix=data_root_val,
pipeline=test_pipeline))
optimizer = dict(
type='SGD', lr=0.00025, momentum=0.9,
weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2))
lr_config = dict(
policy='step',
step=[32, 48],
warmup='linear',
warmup_ratio=0.1,
warmup_by_epoch=True,
warmup_iters=16)
total_epochs = 61
checkpoint_config = dict(interval=2)
evaluation = dict(
interval=62, metrics=['top_k_accuracy', 'mean_class_accuracy'])
log_config = dict(
interval=20,
hooks=[dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook')])
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb'
load_from = None
resume_from = '/home/notebook/data/personal/loveu/mmaction2/work_dirs/loveu_2cls_1500ms_csn/epoch_20.pth'
workflow = [('train', 1)]
find_unused_parameters = True
| true | true |
f7352415b74aec839494235d20723e8458558e4f | 1,367 | py | Python | sdk/python/pulumi_kubernetes/rbac/v1/ClusterRoleList.py | rosskevin/pulumi-kubernetes | e4fa04b13a20929c879aca1bbe58fb5a95d16f7c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_kubernetes/rbac/v1/ClusterRoleList.py | rosskevin/pulumi-kubernetes | e4fa04b13a20929c879aca1bbe58fb5a95d16f7c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_kubernetes/rbac/v1/ClusterRoleList.py | rosskevin/pulumi-kubernetes | e4fa04b13a20929c879aca1bbe58fb5a95d16f7c | [
"Apache-2.0"
] | null | null | null | import pulumi
import pulumi.runtime
from ... import tables
class ClusterRoleList(pulumi.CustomResource):
"""
ClusterRoleList is a collection of ClusterRoles
"""
def __init__(self, __name__, __opts__=None, items=None, metadata=None):
if not __name__:
raise TypeError('Missing resource name argument (for URN creation)')
if not isinstance(__name__, str):
raise TypeError('Expected resource name to be a string')
if __opts__ and not isinstance(__opts__, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
__props__ = dict()
__props__['apiVersion'] = 'rbac.authorization.k8s.io/v1'
__props__['kind'] = 'ClusterRoleList'
if items is None:
raise TypeError('Missing required property items')
__props__['items'] = items
__props__['metadata'] = metadata
super(ClusterRoleList, self).__init__(
"kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleList",
__name__,
__props__,
__opts__)
def translate_output_property(self, prop: str) -> str:
return tables._CASING_FORWARD_TABLE.get(prop) or prop
def translate_input_property(self, prop: str) -> str:
return tables._CASING_BACKWARD_TABLE.get(prop) or prop
| 35.973684 | 89 | 0.66496 | import pulumi
import pulumi.runtime
from ... import tables
class ClusterRoleList(pulumi.CustomResource):
def __init__(self, __name__, __opts__=None, items=None, metadata=None):
if not __name__:
raise TypeError('Missing resource name argument (for URN creation)')
if not isinstance(__name__, str):
raise TypeError('Expected resource name to be a string')
if __opts__ and not isinstance(__opts__, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
__props__ = dict()
__props__['apiVersion'] = 'rbac.authorization.k8s.io/v1'
__props__['kind'] = 'ClusterRoleList'
if items is None:
raise TypeError('Missing required property items')
__props__['items'] = items
__props__['metadata'] = metadata
super(ClusterRoleList, self).__init__(
"kubernetes:rbac.authorization.k8s.io/v1:ClusterRoleList",
__name__,
__props__,
__opts__)
def translate_output_property(self, prop: str) -> str:
return tables._CASING_FORWARD_TABLE.get(prop) or prop
def translate_input_property(self, prop: str) -> str:
return tables._CASING_BACKWARD_TABLE.get(prop) or prop
| true | true |
f7352607be115758a5676f711cf159ffb55b1c11 | 18,174 | py | Python | 3rd_check/surgery/penalty.py | jdlaubrie/shell-elem | f87cb9ca9179533d3a645a494e7ef4d39666ddc6 | [
"MIT"
] | null | null | null | 3rd_check/surgery/penalty.py | jdlaubrie/shell-elem | f87cb9ca9179533d3a645a494e7ef4d39666ddc6 | [
"MIT"
] | null | null | null | 3rd_check/surgery/penalty.py | jdlaubrie/shell-elem | f87cb9ca9179533d3a645a494e7ef4d39666ddc6 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
NbrOfNodes = 35
keygnra = ' TIME: GANDRA STEP: 80.000 FRAME: 1.000'
keystent = ' TIME: STENT STEP: 1.000 FRAME: 1.000'
keygnrb = ' TIME: GANDRB STEP: 100.000 FRAME: 1.000'
# File for gain parameter 01
#--------------------------------------------------------------------------
#--------------------------------------------------------------------------
file_g01 = open('surgery_p7.rsn', 'r')
gain01 = file_g01.readlines()
g01 = pd.Series(gain01)
g01 = g01.replace(r'\n','', regex=True)
g01 = g01.replace(r'\r\n','', regex=True)
g01 = g01.replace(r'\r','', regex=True)
index_Time_g01 = g01[g01.str.contains('TIME', case=False, regex=False)]
index_TimeValues_g01 = index_Time_g01.index.values
#--------------------------------------------------------------------------
G01 = {}
for idx in index_Time_g01.index.values:
index_start = idx + 1
index_end = index_start + NbrOfNodes
tmp_df = g01[index_start:index_end].str.strip()
tmp_df = tmp_df.str.split(' ',expand=True)
np.array(tmp_df.values, dtype=float)
G01[g01[idx]]=np.array(tmp_df.values, dtype=float)
#every mesh along time
Data_g01 = np.array([], dtype=np.int64)
Data_g01.shape = (-1, 7)
for key in sorted(G01.keys()):
Data_g01 = np.append(Data_g01,[G01[key][0,:]], axis=0)
#mesh for this particular key GNRA
Data_g01_gnra = np.array([], dtype=np.int64)
Data_g01_gnra.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g01_gnra = np.append(Data_g01_gnra,[G01[keygnra][node,:]], axis=0)
#mesh for this particular key STENT
Data_g01_stent = np.array([], dtype=np.int64)
Data_g01_stent.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g01_stent = np.append(Data_g01_stent,[G01[keystent][node,:]], axis=0)
#mesh for this particular key GNRB
Data_g01_gnrb = np.array([], dtype=np.int64)
Data_g01_gnrb.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g01_gnrb = np.append(Data_g01_gnrb,[G01[keygnrb][node,:]], axis=0)
Data_g01=Data_g01[np.argsort(Data_g01[:,0])]
#--------------------------------------------------------------------------
# File for gain parameter 02
#--------------------------------------------------------------------------
file_g02 = open('surgery_ref.rsn', 'r')
gain02 = file_g02.readlines()
g02 = pd.Series(gain02)
g02 = g02.replace(r'\n','', regex=True)
g02 = g02.replace(r'\r\n','', regex=True)
g02 = g02.replace(r'\r','', regex=True)
index_Time_g02 = g02[g02.str.contains('TIME', case=False, regex=False)]
index_TimeValues_g02 = index_Time_g02.index.values
#--------------------------------------------------------------------------
G02 = {}
for idx in index_Time_g02.index.values:
index_start = idx + 1
index_end = index_start + NbrOfNodes
tmp_df = g02[index_start:index_end].str.strip()
tmp_df = tmp_df.str.split(' ',expand=True)
np.array(tmp_df.values, dtype=float)
G02[g02[idx]]=np.array(tmp_df.values, dtype=float)
#every mesh along time
Data_g02 = np.array([], dtype=np.int64)
Data_g02.shape = (-1, 7)
for key in sorted(G02.keys()):
Data_g02 = np.append(Data_g02,[G02[key][0,:]], axis=0)
#mesh for this particular key GNRA
Data_g02_gnra = np.array([], dtype=np.int64)
Data_g02_gnra.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g02_gnra = np.append(Data_g02_gnra,[G02[keygnra][node,:]], axis=0)
#mesh for this particular key STENT
Data_g02_stent = np.array([], dtype=np.int64)
Data_g02_stent.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g02_stent = np.append(Data_g02_stent,[G02[keystent][node,:]], axis=0)
#mesh for this particular key GNRB
Data_g02_gnrb = np.array([], dtype=np.int64)
Data_g02_gnrb.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g02_gnrb = np.append(Data_g02_gnrb,[G02[keygnrb][node,:]], axis=0)
Data_g02=Data_g02[np.argsort(Data_g02[:,0])]
#--------------------------------------------------------------------------
# File for gain parameter 03
#--------------------------------------------------------------------------
file_g03 = open('surgery_p9.rsn', 'r')
gain03 = file_g03.readlines()
g03 = pd.Series(gain03)
g03 = g03.replace(r'\n','', regex=True)
g03 = g03.replace(r'\r\n','', regex=True)
g03 = g03.replace(r'\r','', regex=True)
index_Time_g03 = g03[g03.str.contains('TIME', case=False, regex=False)]
index_TimeValues_g03 = index_Time_g03.index.values
#--------------------------------------------------------------------------
G03 = {}
for idx in index_Time_g03.index.values:
index_start = idx + 1
index_end = index_start + NbrOfNodes
tmp_df = g03[index_start:index_end].str.strip()
tmp_df = tmp_df.str.split(' ',expand=True)
np.array(tmp_df.values, dtype=float)
G03[g03[idx]]=np.array(tmp_df.values, dtype=float)
#every mesh along time
Data_g03 = np.array([], dtype=np.int64)
Data_g03.shape = (-1, 7)
for key in sorted(G03.keys()):
Data_g03 = np.append(Data_g03,[G03[key][0,:]], axis=0)
#mesh for this particular key GNRA
Data_g03_gnra = np.array([], dtype=np.int64)
Data_g03_gnra.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g03_gnra = np.append(Data_g03_gnra,[G03[keygnra][node,:]], axis=0)
#mesh for this particular key STENT
Data_g03_stent = np.array([], dtype=np.int64)
Data_g03_stent.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g03_stent = np.append(Data_g03_stent,[G03[keystent][node,:]], axis=0)
#mesh for this particular key GNRB
Data_g03_gnrb = np.array([], dtype=np.int64)
Data_g03_gnrb.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g03_gnrb = np.append(Data_g03_gnrb,[G03[keygnrb][node,:]], axis=0)
Data_g03=Data_g03[np.argsort(Data_g03[:,0])]
#--------------------------------------------------------------------------
fig = plt.figure()
plt.rcParams.update({'font.size': 5})
plt.rc('text', usetex=False)
plt.subplot(4,3,1)
plt.plot(Data_g01[:,0],Data_g01[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02[:,0],Data_g02[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03[:,0],Data_g03[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Time [months]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'a',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,180,0,150])
plt.subplot(4,3,2)
plt.plot(Data_g01[:,0],Data_g01[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02[:,0],Data_g02[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03[:,0],Data_g03[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Time [months]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'b',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.legend(loc='center right')
plt.axis([0,180,0,350])
plt.subplot(4,3,3)
plt.plot(Data_g01[:,0],Data_g01[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02[:,0],Data_g02[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03[:,0],Data_g03[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Time [months]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'c',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,180,10,13])
plt.subplot(4,3,4)
plt.plot(Data_g01_gnra[:,2]*1000.0,Data_g01_gnra[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnra[:,2]*1000.0,Data_g02_gnra[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnra[:,2]*1000.0,Data_g03_gnra[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'd',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,150])
plt.subplot(4,3,5)
plt.plot(Data_g01_gnra[:,2]*1000.0,Data_g01_gnra[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnra[:,2]*1000.0,Data_g02_gnra[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnra[:,2]*1000.0,Data_g03_gnra[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'e',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,350])
plt.subplot(4,3,6)
plt.plot(Data_g01_gnra[:,2]*1000.0,Data_g01_gnra[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnra[:,2]*1000.0,Data_g02_gnra[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnra[:,2]*1000.0,Data_g03_gnra[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'f',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,10,13])
plt.subplot(4,3,7)
plt.plot(Data_g01_stent[:,2]*1000.0,Data_g01_stent[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_stent[:,2]*1000.0,Data_g02_stent[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_stent[:,2]*1000.0,Data_g03_stent[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'g',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,150])
plt.subplot(4,3,8)
plt.plot(Data_g01_stent[:,2]*1000.0,Data_g01_stent[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_stent[:,2]*1000.0,Data_g02_stent[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_stent[:,2]*1000.0,Data_g03_stent[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'h',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,350])
plt.subplot(4,3,9)
plt.plot(Data_g01_stent[:,2]*1000.0,Data_g01_stent[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_stent[:,2]*1000.0,Data_g02_stent[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_stent[:,2]*1000.0,Data_g03_stent[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'i',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,10,13])
plt.subplot(4,3,10)
plt.plot(Data_g01_gnrb[:,2]*1000.0,Data_g01_gnrb[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnrb[:,2]*1000.0,Data_g02_gnrb[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnrb[:,2]*1000.0,Data_g03_gnrb[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'j',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,150])
plt.subplot(4,3,11)
plt.plot(Data_g01_gnrb[:,2]*1000.0,Data_g01_gnrb[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnrb[:,2]*1000.0,Data_g02_gnrb[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnrb[:,2]*1000.0,Data_g03_gnrb[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'k',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,350])
plt.subplot(4,3,12)
plt.plot(Data_g01_gnrb[:,2]*1000.0,Data_g01_gnrb[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnrb[:,2]*1000.0,Data_g02_gnrb[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnrb[:,2]*1000.0,Data_g03_gnrb[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'l',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,10,13])
fig.tight_layout()
plt.show
FIGURENAME = 'penalty.eps'
plt.savefig(FIGURENAME)
plt.savefig(fname=FIGURENAME,
dpi=None,
facecolor='w',
edgecolor='w',
orientation='portrait',
format=None,
transparent=False,
bbox_inches=None,
pad_inches=0.1,
frameon=None,
metadata=None)
plt.close('all')
"""
#--------------------------------------------------------------------------
radii = (Data_g02[-1,3]*1000.0, Data_g01[-1,3]*1000.0, Data_g03[-1,3]*1000.0)
fig, ax = plt.subplots()
index = np.arange(3)
bar_width = 0.45
opacity = 0.4
error_config = {'ecolor': '0.3'}
rects1 = ax.bar(index, radii, bar_width,
alpha=opacity, color='b',
error_kw=error_config, label='Penalty')
ax.set_xlabel('Penalty')
ax.set_ylabel('Radius [mm]')
ax.set_xticks(index + bar_width / 2)
ax.set_xticklabels(('1e5', '1e7', '1e9'))
plt.axis([-0.25,2.7,0,20])
fig.tight_layout()
plt.show
FIGURENAME = 'sensitivity_penalty.eps'
plt.savefig(FIGURENAME)
plt.savefig(fname=FIGURENAME,
dpi=None,
facecolor='w',
edgecolor='w',
orientation='portrait',
format=None,
transparent=False,
bbox_inches=None,
pad_inches=0.1,
frameon=None,
metadata=None)
plt.close('all')
"""
#--------------------------------------------------------------------------
| 50.483333 | 119 | 0.614009 |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
NbrOfNodes = 35
keygnra = ' TIME: GANDRA STEP: 80.000 FRAME: 1.000'
keystent = ' TIME: STENT STEP: 1.000 FRAME: 1.000'
keygnrb = ' TIME: GANDRB STEP: 100.000 FRAME: 1.000'
file_g01 = open('surgery_p7.rsn', 'r')
gain01 = file_g01.readlines()
g01 = pd.Series(gain01)
g01 = g01.replace(r'\n','', regex=True)
g01 = g01.replace(r'\r\n','', regex=True)
g01 = g01.replace(r'\r','', regex=True)
index_Time_g01 = g01[g01.str.contains('TIME', case=False, regex=False)]
index_TimeValues_g01 = index_Time_g01.index.values
G01 = {}
for idx in index_Time_g01.index.values:
index_start = idx + 1
index_end = index_start + NbrOfNodes
tmp_df = g01[index_start:index_end].str.strip()
tmp_df = tmp_df.str.split(' ',expand=True)
np.array(tmp_df.values, dtype=float)
G01[g01[idx]]=np.array(tmp_df.values, dtype=float)
Data_g01 = np.array([], dtype=np.int64)
Data_g01.shape = (-1, 7)
for key in sorted(G01.keys()):
Data_g01 = np.append(Data_g01,[G01[key][0,:]], axis=0)
Data_g01_gnra = np.array([], dtype=np.int64)
Data_g01_gnra.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g01_gnra = np.append(Data_g01_gnra,[G01[keygnra][node,:]], axis=0)
Data_g01_stent = np.array([], dtype=np.int64)
Data_g01_stent.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g01_stent = np.append(Data_g01_stent,[G01[keystent][node,:]], axis=0)
Data_g01_gnrb = np.array([], dtype=np.int64)
Data_g01_gnrb.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g01_gnrb = np.append(Data_g01_gnrb,[G01[keygnrb][node,:]], axis=0)
Data_g01=Data_g01[np.argsort(Data_g01[:,0])]
file_g02 = open('surgery_ref.rsn', 'r')
gain02 = file_g02.readlines()
g02 = pd.Series(gain02)
g02 = g02.replace(r'\n','', regex=True)
g02 = g02.replace(r'\r\n','', regex=True)
g02 = g02.replace(r'\r','', regex=True)
index_Time_g02 = g02[g02.str.contains('TIME', case=False, regex=False)]
index_TimeValues_g02 = index_Time_g02.index.values
G02 = {}
for idx in index_Time_g02.index.values:
index_start = idx + 1
index_end = index_start + NbrOfNodes
tmp_df = g02[index_start:index_end].str.strip()
tmp_df = tmp_df.str.split(' ',expand=True)
np.array(tmp_df.values, dtype=float)
G02[g02[idx]]=np.array(tmp_df.values, dtype=float)
Data_g02 = np.array([], dtype=np.int64)
Data_g02.shape = (-1, 7)
for key in sorted(G02.keys()):
Data_g02 = np.append(Data_g02,[G02[key][0,:]], axis=0)
Data_g02_gnra = np.array([], dtype=np.int64)
Data_g02_gnra.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g02_gnra = np.append(Data_g02_gnra,[G02[keygnra][node,:]], axis=0)
Data_g02_stent = np.array([], dtype=np.int64)
Data_g02_stent.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g02_stent = np.append(Data_g02_stent,[G02[keystent][node,:]], axis=0)
Data_g02_gnrb = np.array([], dtype=np.int64)
Data_g02_gnrb.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g02_gnrb = np.append(Data_g02_gnrb,[G02[keygnrb][node,:]], axis=0)
Data_g02=Data_g02[np.argsort(Data_g02[:,0])]
file_g03 = open('surgery_p9.rsn', 'r')
gain03 = file_g03.readlines()
g03 = pd.Series(gain03)
g03 = g03.replace(r'\n','', regex=True)
g03 = g03.replace(r'\r\n','', regex=True)
g03 = g03.replace(r'\r','', regex=True)
index_Time_g03 = g03[g03.str.contains('TIME', case=False, regex=False)]
index_TimeValues_g03 = index_Time_g03.index.values
G03 = {}
for idx in index_Time_g03.index.values:
index_start = idx + 1
index_end = index_start + NbrOfNodes
tmp_df = g03[index_start:index_end].str.strip()
tmp_df = tmp_df.str.split(' ',expand=True)
np.array(tmp_df.values, dtype=float)
G03[g03[idx]]=np.array(tmp_df.values, dtype=float)
Data_g03 = np.array([], dtype=np.int64)
Data_g03.shape = (-1, 7)
for key in sorted(G03.keys()):
Data_g03 = np.append(Data_g03,[G03[key][0,:]], axis=0)
Data_g03_gnra = np.array([], dtype=np.int64)
Data_g03_gnra.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g03_gnra = np.append(Data_g03_gnra,[G03[keygnra][node,:]], axis=0)
Data_g03_stent = np.array([], dtype=np.int64)
Data_g03_stent.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g03_stent = np.append(Data_g03_stent,[G03[keystent][node,:]], axis=0)
Data_g03_gnrb = np.array([], dtype=np.int64)
Data_g03_gnrb.shape = (-1, 7)
for node in range(NbrOfNodes):
Data_g03_gnrb = np.append(Data_g03_gnrb,[G03[keygnrb][node,:]], axis=0)
Data_g03=Data_g03[np.argsort(Data_g03[:,0])]
fig = plt.figure()
plt.rcParams.update({'font.size': 5})
plt.rc('text', usetex=False)
plt.subplot(4,3,1)
plt.plot(Data_g01[:,0],Data_g01[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02[:,0],Data_g02[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03[:,0],Data_g03[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Time [months]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'a',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,180,0,150])
plt.subplot(4,3,2)
plt.plot(Data_g01[:,0],Data_g01[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02[:,0],Data_g02[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03[:,0],Data_g03[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Time [months]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'b',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.legend(loc='center right')
plt.axis([0,180,0,350])
plt.subplot(4,3,3)
plt.plot(Data_g01[:,0],Data_g01[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02[:,0],Data_g02[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03[:,0],Data_g03[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Time [months]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'c',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,180,10,13])
plt.subplot(4,3,4)
plt.plot(Data_g01_gnra[:,2]*1000.0,Data_g01_gnra[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnra[:,2]*1000.0,Data_g02_gnra[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnra[:,2]*1000.0,Data_g03_gnra[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'd',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,150])
plt.subplot(4,3,5)
plt.plot(Data_g01_gnra[:,2]*1000.0,Data_g01_gnra[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnra[:,2]*1000.0,Data_g02_gnra[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnra[:,2]*1000.0,Data_g03_gnra[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'e',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,350])
plt.subplot(4,3,6)
plt.plot(Data_g01_gnra[:,2]*1000.0,Data_g01_gnra[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnra[:,2]*1000.0,Data_g02_gnra[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnra[:,2]*1000.0,Data_g03_gnra[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'f',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,10,13])
plt.subplot(4,3,7)
plt.plot(Data_g01_stent[:,2]*1000.0,Data_g01_stent[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_stent[:,2]*1000.0,Data_g02_stent[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_stent[:,2]*1000.0,Data_g03_stent[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'g',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,150])
plt.subplot(4,3,8)
plt.plot(Data_g01_stent[:,2]*1000.0,Data_g01_stent[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_stent[:,2]*1000.0,Data_g02_stent[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_stent[:,2]*1000.0,Data_g03_stent[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'h',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,350])
plt.subplot(4,3,9)
plt.plot(Data_g01_stent[:,2]*1000.0,Data_g01_stent[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_stent[:,2]*1000.0,Data_g02_stent[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_stent[:,2]*1000.0,Data_g03_stent[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'i',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,10,13])
plt.subplot(4,3,10)
plt.plot(Data_g01_gnrb[:,2]*1000.0,Data_g01_gnrb[:,4]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnrb[:,2]*1000.0,Data_g02_gnrb[:,4]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnrb[:,2]*1000.0,Data_g03_gnrb[:,4]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Axial Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'j',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,150])
plt.subplot(4,3,11)
plt.plot(Data_g01_gnrb[:,2]*1000.0,Data_g01_gnrb[:,5]/1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnrb[:,2]*1000.0,Data_g02_gnrb[:,5]/1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnrb[:,2]*1000.0,Data_g03_gnrb[:,5]/1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Circumferential Stress [kPa]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'k',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,0,350])
plt.subplot(4,3,12)
plt.plot(Data_g01_gnrb[:,2]*1000.0,Data_g01_gnrb[:,3]*1000.0,'b',label='Penalty=1*10^7',linewidth=1.0,markersize=10)
plt.plot(Data_g02_gnrb[:,2]*1000.0,Data_g02_gnrb[:,3]*1000.0,'r',label='Penalty=1*10^5',linewidth=1.0,markersize=10)
plt.plot(Data_g03_gnrb[:,2]*1000.0,Data_g03_gnrb[:,3]*1000.0,'g',label='Penalty=1*10^9',linewidth=1.0,markersize=10)
plt.text(0.5,0.05,r'Axial position [mm]', {'color': 'k', 'fontsize': 6},
ha='center',va='center',clip_on=False,transform=plt.gca().transAxes)
plt.text(0.05, 0.5, r'Radius [mm]',{'color': 'k', 'fontsize': 6,},
ha='left',va='center',rotation=90,clip_on=False,transform=plt.gca().transAxes)
plt.text(0.95, 0.95, r'l',{'color': 'k', 'fontsize': 6,
'bbox': dict(boxstyle="round", fc="w", ec="k", pad=0.2)},
ha='right',va='top',transform=plt.gca().transAxes)
plt.axis([0,100,10,13])
fig.tight_layout()
plt.show
FIGURENAME = 'penalty.eps'
plt.savefig(FIGURENAME)
plt.savefig(fname=FIGURENAME,
dpi=None,
facecolor='w',
edgecolor='w',
orientation='portrait',
format=None,
transparent=False,
bbox_inches=None,
pad_inches=0.1,
frameon=None,
metadata=None)
plt.close('all')
| true | true |
f735262f54850ed34027a73490ed6d900391ebd5 | 4,659 | py | Python | conans/client/generators/qmake.py | ytimenkov/conan | 89eb275b9696b308aaaa1fbfaa0f8cdab284a764 | [
"MIT"
] | 3 | 2016-11-11T01:09:44.000Z | 2017-07-19T13:30:17.000Z | conans/client/generators/qmake.py | ytimenkov/conan | 89eb275b9696b308aaaa1fbfaa0f8cdab284a764 | [
"MIT"
] | 6 | 2017-06-14T11:40:15.000Z | 2020-05-23T01:43:28.000Z | conans/client/generators/qmake.py | ytimenkov/conan | 89eb275b9696b308aaaa1fbfaa0f8cdab284a764 | [
"MIT"
] | 2 | 2017-11-29T14:05:22.000Z | 2018-09-19T12:43:33.000Z | from conans.model import Generator
from conans.paths import BUILD_INFO_QMAKE
class DepsCppQmake(object):
def __init__(self, cpp_info):
def multiline(field):
return " \\\n ".join('"%s"' % p.replace("\\", "/") for p in field)
self.include_paths = multiline(cpp_info.include_paths)
self.lib_paths = " \\\n ".join('-L"%s"' % p.replace("\\", "/")
for p in cpp_info.lib_paths)
self.bin_paths = multiline(cpp_info.bin_paths)
self.res_paths = multiline(cpp_info.res_paths)
self.build_paths = multiline(cpp_info.build_paths)
self.libs = " ".join('-l%s' % l for l in cpp_info.libs)
self.defines = " \\\n ".join('"%s"' % d for d in cpp_info.defines)
self.cppflags = " ".join(cpp_info.cppflags)
self.cflags = " ".join(cpp_info.cflags)
self.sharedlinkflags = " ".join(cpp_info.sharedlinkflags)
self.exelinkflags = " ".join(cpp_info.exelinkflags)
self.rootpath = '%s' % cpp_info.rootpath.replace("\\", "/")
class QmakeGenerator(Generator):
@property
def filename(self):
return BUILD_INFO_QMAKE
@property
def content(self):
deps = DepsCppQmake(self.deps_build_info)
template = ('CONAN_INCLUDEPATH{dep_name}{build_type} += {deps.include_paths}\n'
'CONAN_LIBS{dep_name}{build_type} += {deps.libs}\n'
'CONAN_LIBDIRS{dep_name}{build_type} += {deps.lib_paths}\n'
'CONAN_BINDIRS{dep_name}{build_type} += {deps.bin_paths}\n'
'CONAN_RESDIRS{dep_name}{build_type} += {deps.res_paths}\n'
'CONAN_BUILDDIRS{dep_name}{build_type} += {deps.build_paths}\n'
'CONAN_DEFINES{dep_name}{build_type} += {deps.defines}\n'
'CONAN_QMAKE_CXXFLAGS{dep_name}{build_type} += {deps.cppflags}\n'
'CONAN_QMAKE_CFLAGS{dep_name}{build_type} += {deps.cflags}\n'
'CONAN_QMAKE_LFLAGS{dep_name}{build_type} += {deps.sharedlinkflags}\n'
'CONAN_QMAKE_LFLAGS{dep_name}{build_type} += {deps.exelinkflags}\n')
sections = []
template_all = template
all_flags = template_all.format(dep_name="", deps=deps, build_type="")
sections.append(all_flags)
for config, cpp_info in self.deps_build_info.configs.items():
deps = DepsCppQmake(cpp_info)
dep_flags = template_all.format(dep_name="", deps=deps,
build_type="_" + str(config).upper())
sections.append(dep_flags)
template_deps = template + 'CONAN{dep_name}_ROOT{build_type} = "{deps.rootpath}"\n'
for dep_name, dep_cpp_info in self.deps_build_info.dependencies:
deps = DepsCppQmake(dep_cpp_info)
dep_flags = template_deps.format(dep_name="_" + dep_name.upper(), deps=deps,
build_type="")
sections.append(dep_flags)
for config, cpp_info in dep_cpp_info.configs.items():
deps = DepsCppQmake(cpp_info)
dep_flags = template_deps.format(dep_name="_" + dep_name.upper(), deps=deps,
build_type="_" + str(config).upper())
sections.append(dep_flags)
output = "\n".join(sections)
output += ("""\nCONFIG(conan_basic_setup) {
INCLUDEPATH += $$CONAN_INCLUDEPATH
LIBS += $$CONAN_LIBS
LIBS += $$CONAN_LIBDIRS
BINDIRS += $$CONAN_BINDIRS
DEFINES += $$CONAN_DEFINES
CONFIG(release, debug|release) {
message("Release config")
INCLUDEPATH += $$CONAN_INCLUDEPATH_RELEASE
LIBS += $$CONAN_LIBS_RELEASE
LIBS += $$CONAN_LIBDIRS_RELEASE
BINDIRS += $$CONAN_BINDIRS_RELEASE
DEFINES += $$CONAN_DEFINES_RELEASE
} else {
message("Debug config")
INCLUDEPATH += $$CONAN_INCLUDEPATH_DEBUG
LIBS += $$CONAN_LIBS_DEBUG
LIBS += $$CONAN_LIBDIRS_DEBUG
BINDIRS += $$CONAN_BINDIRS_DEBUG
DEFINES += $$CONAN_DEFINES_DEBUG
}
QMAKE_CXXFLAGS += $$CONAN_QMAKE_CXXFLAGS
QMAKE_CFLAGS += $$CONAN_QMAKE_CFLAGS
QMAKE_LFLAGS += $$CONAN_QMAKE_LFLAGS
QMAKE_CXXFLAGS_DEBUG += $$CONAN_QMAKE_CXXFLAGS_DEBUG
QMAKE_CFLAGS_DEBUG += $$CONAN_QMAKE_CFLAGS_DEBUG
QMAKE_LFLAGS_DEBUG += $$CONAN_QMAKE_LFLAGS_DEBUG
QMAKE_CXXFLAGS_RELEASE += $$CONAN_QMAKE_CXXFLAGS_RELEASE
QMAKE_CFLAGS_RELEASE += $$CONAN_QMAKE_CFLAGS_RELEASE
QMAKE_LFLAGS_RELEASE += $$CONAN_QMAKE_LFLAGS_RELEASE
}""")
return output
| 43.542056 | 92 | 0.608714 | from conans.model import Generator
from conans.paths import BUILD_INFO_QMAKE
class DepsCppQmake(object):
def __init__(self, cpp_info):
def multiline(field):
return " \\\n ".join('"%s"' % p.replace("\\", "/") for p in field)
self.include_paths = multiline(cpp_info.include_paths)
self.lib_paths = " \\\n ".join('-L"%s"' % p.replace("\\", "/")
for p in cpp_info.lib_paths)
self.bin_paths = multiline(cpp_info.bin_paths)
self.res_paths = multiline(cpp_info.res_paths)
self.build_paths = multiline(cpp_info.build_paths)
self.libs = " ".join('-l%s' % l for l in cpp_info.libs)
self.defines = " \\\n ".join('"%s"' % d for d in cpp_info.defines)
self.cppflags = " ".join(cpp_info.cppflags)
self.cflags = " ".join(cpp_info.cflags)
self.sharedlinkflags = " ".join(cpp_info.sharedlinkflags)
self.exelinkflags = " ".join(cpp_info.exelinkflags)
self.rootpath = '%s' % cpp_info.rootpath.replace("\\", "/")
class QmakeGenerator(Generator):
@property
def filename(self):
return BUILD_INFO_QMAKE
@property
def content(self):
deps = DepsCppQmake(self.deps_build_info)
template = ('CONAN_INCLUDEPATH{dep_name}{build_type} += {deps.include_paths}\n'
'CONAN_LIBS{dep_name}{build_type} += {deps.libs}\n'
'CONAN_LIBDIRS{dep_name}{build_type} += {deps.lib_paths}\n'
'CONAN_BINDIRS{dep_name}{build_type} += {deps.bin_paths}\n'
'CONAN_RESDIRS{dep_name}{build_type} += {deps.res_paths}\n'
'CONAN_BUILDDIRS{dep_name}{build_type} += {deps.build_paths}\n'
'CONAN_DEFINES{dep_name}{build_type} += {deps.defines}\n'
'CONAN_QMAKE_CXXFLAGS{dep_name}{build_type} += {deps.cppflags}\n'
'CONAN_QMAKE_CFLAGS{dep_name}{build_type} += {deps.cflags}\n'
'CONAN_QMAKE_LFLAGS{dep_name}{build_type} += {deps.sharedlinkflags}\n'
'CONAN_QMAKE_LFLAGS{dep_name}{build_type} += {deps.exelinkflags}\n')
sections = []
template_all = template
all_flags = template_all.format(dep_name="", deps=deps, build_type="")
sections.append(all_flags)
for config, cpp_info in self.deps_build_info.configs.items():
deps = DepsCppQmake(cpp_info)
dep_flags = template_all.format(dep_name="", deps=deps,
build_type="_" + str(config).upper())
sections.append(dep_flags)
template_deps = template + 'CONAN{dep_name}_ROOT{build_type} = "{deps.rootpath}"\n'
for dep_name, dep_cpp_info in self.deps_build_info.dependencies:
deps = DepsCppQmake(dep_cpp_info)
dep_flags = template_deps.format(dep_name="_" + dep_name.upper(), deps=deps,
build_type="")
sections.append(dep_flags)
for config, cpp_info in dep_cpp_info.configs.items():
deps = DepsCppQmake(cpp_info)
dep_flags = template_deps.format(dep_name="_" + dep_name.upper(), deps=deps,
build_type="_" + str(config).upper())
sections.append(dep_flags)
output = "\n".join(sections)
output += ("""\nCONFIG(conan_basic_setup) {
INCLUDEPATH += $$CONAN_INCLUDEPATH
LIBS += $$CONAN_LIBS
LIBS += $$CONAN_LIBDIRS
BINDIRS += $$CONAN_BINDIRS
DEFINES += $$CONAN_DEFINES
CONFIG(release, debug|release) {
message("Release config")
INCLUDEPATH += $$CONAN_INCLUDEPATH_RELEASE
LIBS += $$CONAN_LIBS_RELEASE
LIBS += $$CONAN_LIBDIRS_RELEASE
BINDIRS += $$CONAN_BINDIRS_RELEASE
DEFINES += $$CONAN_DEFINES_RELEASE
} else {
message("Debug config")
INCLUDEPATH += $$CONAN_INCLUDEPATH_DEBUG
LIBS += $$CONAN_LIBS_DEBUG
LIBS += $$CONAN_LIBDIRS_DEBUG
BINDIRS += $$CONAN_BINDIRS_DEBUG
DEFINES += $$CONAN_DEFINES_DEBUG
}
QMAKE_CXXFLAGS += $$CONAN_QMAKE_CXXFLAGS
QMAKE_CFLAGS += $$CONAN_QMAKE_CFLAGS
QMAKE_LFLAGS += $$CONAN_QMAKE_LFLAGS
QMAKE_CXXFLAGS_DEBUG += $$CONAN_QMAKE_CXXFLAGS_DEBUG
QMAKE_CFLAGS_DEBUG += $$CONAN_QMAKE_CFLAGS_DEBUG
QMAKE_LFLAGS_DEBUG += $$CONAN_QMAKE_LFLAGS_DEBUG
QMAKE_CXXFLAGS_RELEASE += $$CONAN_QMAKE_CXXFLAGS_RELEASE
QMAKE_CFLAGS_RELEASE += $$CONAN_QMAKE_CFLAGS_RELEASE
QMAKE_LFLAGS_RELEASE += $$CONAN_QMAKE_LFLAGS_RELEASE
}""")
return output
| true | true |
f7352703ed42ee6015d5ca30e57234884d399073 | 1,652 | py | Python | test/testAnonymizationExecutor.py | AutoDash/AutoDash | 3924795a04159f80ea3b65b2172747babd15f35f | [
"Apache-2.0"
] | 3 | 2020-02-12T01:24:46.000Z | 2020-02-13T00:50:46.000Z | test/testAnonymizationExecutor.py | AutoDash/AutoDash | 3924795a04159f80ea3b65b2172747babd15f35f | [
"Apache-2.0"
] | 32 | 2020-02-20T10:20:56.000Z | 2022-02-10T01:42:46.000Z | test/testAnonymizationExecutor.py | AutoDash/AutoDash | 3924795a04159f80ea3b65b2172747babd15f35f | [
"Apache-2.0"
] | 1 | 2020-02-22T02:47:19.000Z | 2020-02-22T02:47:19.000Z | #!/usr/bin/env python3
import unittest
import os
import shutil
from src.data.VideoItem import VideoItem
from src.data.MetaDataItem import MetaDataItem
from src.executor.FaceBlurrer import FaceBlurrer
from numpy.testing import assert_array_equal, assert_raises
class TestAnonymizationExecutor(unittest.TestCase):
TEST_DIR = os.path.join(os.getcwd(), "anontest")
TEST_FILE = "test.mp4"
DATASET_PATH = "src/lib/anonymization/dataset/input"
ACCEPTED_FILE_EXTENSION = ".mp4"
TEST_FILE_PATH = os.path.join(TEST_DIR, TEST_FILE)
def setUp(self):
# Create test directory and copy one of the test videos from the anonymization repo into it
if not os.path.exists(self.TEST_DIR):
os.mkdir(self.TEST_DIR)
def tearDown(self):
# Delete test directory
if os.path.exists(self.TEST_DIR):
shutil.rmtree(self.TEST_DIR)
def test_compiles(self):
self.assertEqual(True, True)
"""
# Test that the executor works with a single video
def test_face_blurrer_single(self):
# Copy video to test directory
shutil.copy2(os.path.join(os.getcwd(), self.DATASET_PATH, "man_face.mp4"), self.TEST_FILE_PATH)
video = VideoItem(filepath = self.TEST_FILE_PATH, metadata=None)
original_data = video.npy
# Running the face blurrer should overwrite the input file
face_blurrer = FaceBlurrer()
new_data = face_blurrer.run(video)
# Now we check that the video data has changed
assert_raises(AssertionError, assert_array_equal, original_data, new_data)
"""
if __name__ == '__main__':
unittest.main()
| 33.04 | 103 | 0.70339 |
import unittest
import os
import shutil
from src.data.VideoItem import VideoItem
from src.data.MetaDataItem import MetaDataItem
from src.executor.FaceBlurrer import FaceBlurrer
from numpy.testing import assert_array_equal, assert_raises
class TestAnonymizationExecutor(unittest.TestCase):
TEST_DIR = os.path.join(os.getcwd(), "anontest")
TEST_FILE = "test.mp4"
DATASET_PATH = "src/lib/anonymization/dataset/input"
ACCEPTED_FILE_EXTENSION = ".mp4"
TEST_FILE_PATH = os.path.join(TEST_DIR, TEST_FILE)
def setUp(self):
if not os.path.exists(self.TEST_DIR):
os.mkdir(self.TEST_DIR)
def tearDown(self):
if os.path.exists(self.TEST_DIR):
shutil.rmtree(self.TEST_DIR)
def test_compiles(self):
self.assertEqual(True, True)
if __name__ == '__main__':
unittest.main()
| true | true |
f7352712da6d0702be5a451230eec61bfb5112aa | 238 | py | Python | backend/backend/orders/admin.py | PrzemyslawSalek/9eats | b748ba2166a065c8d04e043069a1ddf5641322ca | [
"MIT"
] | null | null | null | backend/backend/orders/admin.py | PrzemyslawSalek/9eats | b748ba2166a065c8d04e043069a1ddf5641322ca | [
"MIT"
] | null | null | null | backend/backend/orders/admin.py | PrzemyslawSalek/9eats | b748ba2166a065c8d04e043069a1ddf5641322ca | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Order
class TodoAdmin(admin.ModelAdmin):
list_display = ('dishes', 'paid', 'completed', 'timestamp', 'user')
# Register your models here.
admin.site.register(Order, TodoAdmin)
| 19.833333 | 71 | 0.735294 | from django.contrib import admin
from .models import Order
class TodoAdmin(admin.ModelAdmin):
list_display = ('dishes', 'paid', 'completed', 'timestamp', 'user')
admin.site.register(Order, TodoAdmin)
| true | true |
f7352926b65d269a72935135f220dd69066912a0 | 9,509 | py | Python | src/appengine/server.py | bonomali/clusterfuzz | 39e0583148b1810cbbe18f48d7a4ee63489f4c84 | [
"Apache-2.0"
] | 2 | 2019-03-10T14:40:17.000Z | 2021-11-17T10:51:31.000Z | src/appengine/server.py | M31MOTH/clusterfuzz | a614d2e09238f11ae578337e10dfeaba38dcae76 | [
"Apache-2.0"
] | 12 | 2020-11-13T18:58:31.000Z | 2022-03-21T22:19:55.000Z | src/appengine/server.py | gaybro8777/clusterfuzz | fb053896ee5b5f1468479e75223c07b4281a72d3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""server.py initialises the appengine server for ClusterFuzz."""
import urllib
import webapp2
from webapp2_extras import routes
from base import utils
from config import local_config
from handlers import base_handler
from handlers import bots
from handlers import commit_range
from handlers import configuration
from handlers import corpora
from handlers import coverage_report
from handlers import crash_stats
from handlers import domain_verifier
from handlers import download
from handlers import fuzzer_stats
from handlers import fuzzers
from handlers import gcs_redirector
from handlers import help_redirector
from handlers import home
from handlers import issue_redirector
from handlers import jobs
from handlers import parse_stacktrace
from handlers import report_csp_failure
from handlers import revisions_info
from handlers import testcase_list
from handlers import upload_testcase
from handlers import viewer
from handlers.cron import backup
from handlers.cron import build_crash_stats
from handlers.cron import cleanup
from handlers.cron import corpus_backup
from handlers.cron import fuzzer_weights
from handlers.cron import load_bigquery_stats
from handlers.cron import manage_vms
from handlers.cron import ml_train
from handlers.cron import oss_fuzz_apply_ccs
from handlers.cron import oss_fuzz_build_status
from handlers.cron import oss_fuzz_setup
from handlers.cron import predator_pull
from handlers.cron import recurring_tasks
from handlers.cron import schedule_corpus_pruning
from handlers.cron import triage
from handlers.performance_report import (show as show_performance_report)
from handlers.testcase_detail import (crash_stats as crash_stats_on_testcase)
from handlers.testcase_detail import (show as show_testcase)
from handlers.testcase_detail import create_issue
from handlers.testcase_detail import delete
from handlers.testcase_detail import download_testcase
from handlers.testcase_detail import find_similar_issues
from handlers.testcase_detail import mark_fixed
from handlers.testcase_detail import mark_security
from handlers.testcase_detail import mark_unconfirmed
from handlers.testcase_detail import redo
from handlers.testcase_detail import remove_duplicate
from handlers.testcase_detail import remove_group
from handlers.testcase_detail import remove_issue
from handlers.testcase_detail import update_from_trunk
from handlers.testcase_detail import update_issue
class _TrailingSlashRemover(webapp2.RequestHandler):
def get(self, url):
self.redirect(url)
# TODO(aarya): Remove after all /v2 links are deprecated.
class _V2Remover(webapp2.RequestHandler):
def get(self, url):
self.redirect('/%s?%s' % (url, urllib.urlencode(self.request.params)))
def redirect_to(to_domain):
"""Create a redirect handler to a domain."""
class RedirectHandler(webapp2.RequestHandler):
"""Handler to redirect to domain."""
def get(self, _):
self.redirect(
'https://' + to_domain + self.request.path_qs, permanent=True)
return RedirectHandler
# Add item to the navigation menu. Order is important.
base_handler.add_menu('Testcases', '/testcases')
base_handler.add_menu('Fuzzer Statistics', '/fuzzer-stats')
base_handler.add_menu('Crash Statistics', '/crash-stats')
base_handler.add_menu('Upload Testcase', '/upload-testcase')
if utils.is_chromium():
base_handler.add_menu('Crashes by range', '/commit-range')
if not utils.is_oss_fuzz():
base_handler.add_menu('Fuzzers', '/fuzzers')
base_handler.add_menu('Corpora', '/corpora')
base_handler.add_menu('Bots', '/bots')
base_handler.add_menu('Jobs', '/jobs')
base_handler.add_menu('Configuration', '/configuration')
base_handler.add_menu('Report Bug', '/report-bug')
base_handler.add_menu('Documentation', '/docs')
# We need to separate routes for cron to avoid redirection.
_CRON_ROUTES = [
('/backup', backup.Handler),
('/build-crash-stats', build_crash_stats.Handler),
('/cleanup', cleanup.Handler),
('/corpus-backup/make-public', corpus_backup.MakePublicHandler),
('/fuzzer-stats/cache', fuzzer_stats.RefreshCacheHandler),
('/fuzzer-stats/preload', fuzzer_stats.PreloadHandler),
('/fuzzer-weights', fuzzer_weights.Handler),
('/home-cache', home.RefreshCacheHandler),
('/load-bigquery-stats', load_bigquery_stats.Handler),
('/manage-vms', manage_vms.Handler),
('/oss-fuzz-apply-ccs', oss_fuzz_apply_ccs.Handler),
('/oss-fuzz-build-status', oss_fuzz_build_status.Handler),
('/oss-fuzz-setup', oss_fuzz_setup.Handler),
('/predator-pull', predator_pull.Handler),
('/schedule-corpus-pruning', schedule_corpus_pruning.Handler),
('/schedule-impact-tasks', recurring_tasks.ImpactTasksScheduler),
('/schedule-ml-train-tasks', ml_train.Handler),
('/schedule-progression-tasks', recurring_tasks.ProgressionTasksScheduler),
('/schedule-upload-reports-tasks',
recurring_tasks.UploadReportsTaskScheduler),
('/testcases/cache', testcase_list.CacheHandler),
('/triage', triage.Handler),
]
_ROUTES = [
('/', home.Handler),
('(.*)/$', _TrailingSlashRemover),
('/v2/(.*)', _V2Remover),
(r'/(google.+\.html)$', domain_verifier.Handler),
('/bots', bots.Handler),
('/bots/dead', bots.DeadBotsHandler),
('/commit-range', commit_range.Handler),
('/commit-range/load', commit_range.JsonHandler),
('/configuration', configuration.Handler),
('/add-external-user-permission', configuration.AddExternalUserPermission),
('/delete-external-user-permission',
configuration.DeleteExternalUserPermission),
('/coverage-report/([^/]+)/([^/]+)/([^/]+)(/.*)?', coverage_report.Handler),
('/crash-stats/load', crash_stats.JsonHandler),
('/crash-stats', crash_stats.Handler),
('/corpora', corpora.Handler),
('/corpora/create', corpora.CreateHandler),
('/corpora/delete', corpora.DeleteHandler),
('/docs', help_redirector.DocumentationHandler),
('/download/?([^/]+)?', download.Handler),
('/fuzzers', fuzzers.Handler),
('/fuzzers/create', fuzzers.CreateHandler),
('/fuzzers/delete', fuzzers.DeleteHandler),
('/fuzzers/edit', fuzzers.EditHandler),
('/fuzzers/log/([^/]+)', fuzzers.LogHandler),
('/fuzzer-stats/load', fuzzer_stats.LoadHandler),
('/fuzzer-stats', fuzzer_stats.Handler),
('/fuzzer-stats/.*', fuzzer_stats.Handler),
('/gcs-redirect', gcs_redirector.Handler),
('/issue/([0-9]+)/(.+)', issue_redirector.Handler),
('/jobs', jobs.Handler),
('/jobs/.*', jobs.Handler),
('/update-job', jobs.UpdateJob),
('/update-job-template', jobs.UpdateJobTemplate),
('/parse_stacktrace', parse_stacktrace.Handler),
('/performance-report/(.+)/(.+)/(.+)', show_performance_report.Handler),
('/report-csp-failure', report_csp_failure.ReportCspFailureHandler),
('/testcase', show_testcase.DeprecatedHandler),
('/testcase-detail/([0-9]+)', show_testcase.Handler),
('/testcase-detail/crash-stats', crash_stats_on_testcase.Handler),
('/testcase-detail/create-issue', create_issue.Handler),
('/testcase-detail/delete', delete.Handler),
('/testcase-detail/download-testcase', download_testcase.Handler),
('/testcase-detail/find-similar-issues', find_similar_issues.Handler),
('/testcase-detail/mark-fixed', mark_fixed.Handler),
('/testcase-detail/mark-security', mark_security.Handler),
('/testcase-detail/mark-unconfirmed', mark_unconfirmed.Handler),
('/testcase-detail/redo', redo.Handler),
('/testcase-detail/refresh', show_testcase.RefreshHandler),
('/testcase-detail/remove-duplicate', remove_duplicate.Handler),
('/testcase-detail/remove-issue', remove_issue.Handler),
('/testcase-detail/remove-group', remove_group.Handler),
('/testcase-detail/update-from-trunk', update_from_trunk.Handler),
('/testcase-detail/update-issue', update_issue.Handler),
('/testcases', testcase_list.Handler),
('/testcases/load', testcase_list.JsonHandler),
('/upload-testcase', upload_testcase.Handler),
('/upload-testcase/get-url-oauth', upload_testcase.UploadUrlHandlerOAuth),
('/upload-testcase/prepare', upload_testcase.PrepareUploadHandler),
('/upload-testcase/load', upload_testcase.JsonHandler),
('/upload-testcase/upload', upload_testcase.UploadHandler),
('/upload-testcase/upload-oauth', upload_testcase.UploadHandlerOAuth),
('/revisions', revisions_info.Handler),
('/report-bug', help_redirector.ReportBugHandler),
('/viewer', viewer.Handler),
]
config = local_config.GAEConfig()
main_domain = config.get('domains.main')
redirect_domains = config.get('domains.redirects')
_DOMAIN_ROUTES = []
if main_domain and redirect_domains:
for redirect_domain in redirect_domains:
_DOMAIN_ROUTES.append(
routes.DomainRoute(redirect_domain, [
webapp2.Route('<:.*>', redirect_to(main_domain)),
]))
app = webapp2.WSGIApplication(
_CRON_ROUTES + _DOMAIN_ROUTES + _ROUTES, debug=True)
| 41.70614 | 80 | 0.7495 |
import urllib
import webapp2
from webapp2_extras import routes
from base import utils
from config import local_config
from handlers import base_handler
from handlers import bots
from handlers import commit_range
from handlers import configuration
from handlers import corpora
from handlers import coverage_report
from handlers import crash_stats
from handlers import domain_verifier
from handlers import download
from handlers import fuzzer_stats
from handlers import fuzzers
from handlers import gcs_redirector
from handlers import help_redirector
from handlers import home
from handlers import issue_redirector
from handlers import jobs
from handlers import parse_stacktrace
from handlers import report_csp_failure
from handlers import revisions_info
from handlers import testcase_list
from handlers import upload_testcase
from handlers import viewer
from handlers.cron import backup
from handlers.cron import build_crash_stats
from handlers.cron import cleanup
from handlers.cron import corpus_backup
from handlers.cron import fuzzer_weights
from handlers.cron import load_bigquery_stats
from handlers.cron import manage_vms
from handlers.cron import ml_train
from handlers.cron import oss_fuzz_apply_ccs
from handlers.cron import oss_fuzz_build_status
from handlers.cron import oss_fuzz_setup
from handlers.cron import predator_pull
from handlers.cron import recurring_tasks
from handlers.cron import schedule_corpus_pruning
from handlers.cron import triage
from handlers.performance_report import (show as show_performance_report)
from handlers.testcase_detail import (crash_stats as crash_stats_on_testcase)
from handlers.testcase_detail import (show as show_testcase)
from handlers.testcase_detail import create_issue
from handlers.testcase_detail import delete
from handlers.testcase_detail import download_testcase
from handlers.testcase_detail import find_similar_issues
from handlers.testcase_detail import mark_fixed
from handlers.testcase_detail import mark_security
from handlers.testcase_detail import mark_unconfirmed
from handlers.testcase_detail import redo
from handlers.testcase_detail import remove_duplicate
from handlers.testcase_detail import remove_group
from handlers.testcase_detail import remove_issue
from handlers.testcase_detail import update_from_trunk
from handlers.testcase_detail import update_issue
class _TrailingSlashRemover(webapp2.RequestHandler):
def get(self, url):
self.redirect(url)
class _V2Remover(webapp2.RequestHandler):
def get(self, url):
self.redirect('/%s?%s' % (url, urllib.urlencode(self.request.params)))
def redirect_to(to_domain):
class RedirectHandler(webapp2.RequestHandler):
def get(self, _):
self.redirect(
'https://' + to_domain + self.request.path_qs, permanent=True)
return RedirectHandler
base_handler.add_menu('Testcases', '/testcases')
base_handler.add_menu('Fuzzer Statistics', '/fuzzer-stats')
base_handler.add_menu('Crash Statistics', '/crash-stats')
base_handler.add_menu('Upload Testcase', '/upload-testcase')
if utils.is_chromium():
base_handler.add_menu('Crashes by range', '/commit-range')
if not utils.is_oss_fuzz():
base_handler.add_menu('Fuzzers', '/fuzzers')
base_handler.add_menu('Corpora', '/corpora')
base_handler.add_menu('Bots', '/bots')
base_handler.add_menu('Jobs', '/jobs')
base_handler.add_menu('Configuration', '/configuration')
base_handler.add_menu('Report Bug', '/report-bug')
base_handler.add_menu('Documentation', '/docs')
_CRON_ROUTES = [
('/backup', backup.Handler),
('/build-crash-stats', build_crash_stats.Handler),
('/cleanup', cleanup.Handler),
('/corpus-backup/make-public', corpus_backup.MakePublicHandler),
('/fuzzer-stats/cache', fuzzer_stats.RefreshCacheHandler),
('/fuzzer-stats/preload', fuzzer_stats.PreloadHandler),
('/fuzzer-weights', fuzzer_weights.Handler),
('/home-cache', home.RefreshCacheHandler),
('/load-bigquery-stats', load_bigquery_stats.Handler),
('/manage-vms', manage_vms.Handler),
('/oss-fuzz-apply-ccs', oss_fuzz_apply_ccs.Handler),
('/oss-fuzz-build-status', oss_fuzz_build_status.Handler),
('/oss-fuzz-setup', oss_fuzz_setup.Handler),
('/predator-pull', predator_pull.Handler),
('/schedule-corpus-pruning', schedule_corpus_pruning.Handler),
('/schedule-impact-tasks', recurring_tasks.ImpactTasksScheduler),
('/schedule-ml-train-tasks', ml_train.Handler),
('/schedule-progression-tasks', recurring_tasks.ProgressionTasksScheduler),
('/schedule-upload-reports-tasks',
recurring_tasks.UploadReportsTaskScheduler),
('/testcases/cache', testcase_list.CacheHandler),
('/triage', triage.Handler),
]
_ROUTES = [
('/', home.Handler),
('(.*)/$', _TrailingSlashRemover),
('/v2/(.*)', _V2Remover),
(r'/(google.+\.html)$', domain_verifier.Handler),
('/bots', bots.Handler),
('/bots/dead', bots.DeadBotsHandler),
('/commit-range', commit_range.Handler),
('/commit-range/load', commit_range.JsonHandler),
('/configuration', configuration.Handler),
('/add-external-user-permission', configuration.AddExternalUserPermission),
('/delete-external-user-permission',
configuration.DeleteExternalUserPermission),
('/coverage-report/([^/]+)/([^/]+)/([^/]+)(/.*)?', coverage_report.Handler),
('/crash-stats/load', crash_stats.JsonHandler),
('/crash-stats', crash_stats.Handler),
('/corpora', corpora.Handler),
('/corpora/create', corpora.CreateHandler),
('/corpora/delete', corpora.DeleteHandler),
('/docs', help_redirector.DocumentationHandler),
('/download/?([^/]+)?', download.Handler),
('/fuzzers', fuzzers.Handler),
('/fuzzers/create', fuzzers.CreateHandler),
('/fuzzers/delete', fuzzers.DeleteHandler),
('/fuzzers/edit', fuzzers.EditHandler),
('/fuzzers/log/([^/]+)', fuzzers.LogHandler),
('/fuzzer-stats/load', fuzzer_stats.LoadHandler),
('/fuzzer-stats', fuzzer_stats.Handler),
('/fuzzer-stats/.*', fuzzer_stats.Handler),
('/gcs-redirect', gcs_redirector.Handler),
('/issue/([0-9]+)/(.+)', issue_redirector.Handler),
('/jobs', jobs.Handler),
('/jobs/.*', jobs.Handler),
('/update-job', jobs.UpdateJob),
('/update-job-template', jobs.UpdateJobTemplate),
('/parse_stacktrace', parse_stacktrace.Handler),
('/performance-report/(.+)/(.+)/(.+)', show_performance_report.Handler),
('/report-csp-failure', report_csp_failure.ReportCspFailureHandler),
('/testcase', show_testcase.DeprecatedHandler),
('/testcase-detail/([0-9]+)', show_testcase.Handler),
('/testcase-detail/crash-stats', crash_stats_on_testcase.Handler),
('/testcase-detail/create-issue', create_issue.Handler),
('/testcase-detail/delete', delete.Handler),
('/testcase-detail/download-testcase', download_testcase.Handler),
('/testcase-detail/find-similar-issues', find_similar_issues.Handler),
('/testcase-detail/mark-fixed', mark_fixed.Handler),
('/testcase-detail/mark-security', mark_security.Handler),
('/testcase-detail/mark-unconfirmed', mark_unconfirmed.Handler),
('/testcase-detail/redo', redo.Handler),
('/testcase-detail/refresh', show_testcase.RefreshHandler),
('/testcase-detail/remove-duplicate', remove_duplicate.Handler),
('/testcase-detail/remove-issue', remove_issue.Handler),
('/testcase-detail/remove-group', remove_group.Handler),
('/testcase-detail/update-from-trunk', update_from_trunk.Handler),
('/testcase-detail/update-issue', update_issue.Handler),
('/testcases', testcase_list.Handler),
('/testcases/load', testcase_list.JsonHandler),
('/upload-testcase', upload_testcase.Handler),
('/upload-testcase/get-url-oauth', upload_testcase.UploadUrlHandlerOAuth),
('/upload-testcase/prepare', upload_testcase.PrepareUploadHandler),
('/upload-testcase/load', upload_testcase.JsonHandler),
('/upload-testcase/upload', upload_testcase.UploadHandler),
('/upload-testcase/upload-oauth', upload_testcase.UploadHandlerOAuth),
('/revisions', revisions_info.Handler),
('/report-bug', help_redirector.ReportBugHandler),
('/viewer', viewer.Handler),
]
config = local_config.GAEConfig()
main_domain = config.get('domains.main')
redirect_domains = config.get('domains.redirects')
_DOMAIN_ROUTES = []
if main_domain and redirect_domains:
for redirect_domain in redirect_domains:
_DOMAIN_ROUTES.append(
routes.DomainRoute(redirect_domain, [
webapp2.Route('<:.*>', redirect_to(main_domain)),
]))
app = webapp2.WSGIApplication(
_CRON_ROUTES + _DOMAIN_ROUTES + _ROUTES, debug=True)
| true | true |
f7352bfdb50c0cc0b1af31b8dcc6b5ece3b6e577 | 2,581 | py | Python | supervisord/tests/test_supervisord_integration.py | vbarbaresi/integrations-core | ab26ab1cd6c28a97c1ad1177093a93659658c7aa | [
"BSD-3-Clause"
] | 663 | 2016-08-23T05:23:45.000Z | 2022-03-29T00:37:23.000Z | supervisord/tests/test_supervisord_integration.py | vbarbaresi/integrations-core | ab26ab1cd6c28a97c1ad1177093a93659658c7aa | [
"BSD-3-Clause"
] | 6,642 | 2016-06-09T16:29:20.000Z | 2022-03-31T22:24:09.000Z | supervisord/tests/test_supervisord_integration.py | vbarbaresi/integrations-core | ab26ab1cd6c28a97c1ad1177093a93659658c7aa | [
"BSD-3-Clause"
] | 1,222 | 2017-01-27T15:51:38.000Z | 2022-03-31T18:17:51.000Z | # (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from time import sleep
import pytest
from .common import PROCESSES, PROCESSES_BY_STATE_BY_ITERATION, STATUSES, SUPERVISOR_VERSION
# Mark all tests in this file as integration tests
pytestmark = [pytest.mark.integration, pytest.mark.usefixtures("dd_environment")]
def test_check(aggregator, check, instance):
"""
Run Supervisord check and assess coverage
"""
instance_tags = ["supervisord_server:travis"]
for i in range(4):
# Run the check
check.check(instance)
# Check metrics and service checks scoped by process
for proc in PROCESSES:
process_tags = instance_tags + ["supervisord_process:{}".format(proc)]
process_status = check.OK if proc in PROCESSES_BY_STATE_BY_ITERATION[i]['up'] else check.CRITICAL
aggregator.assert_metric("supervisord.process.uptime", tags=process_tags, count=1)
aggregator.assert_service_check(
"supervisord.process.status", status=process_status, tags=process_tags, count=1
)
# Check instance metrics
for status in STATUSES:
status_tags = instance_tags + ["status:{}".format(status)]
count_processes = len(PROCESSES_BY_STATE_BY_ITERATION[i][status])
aggregator.assert_metric("supervisord.process.count", value=count_processes, tags=status_tags, count=1)
aggregator.assert_service_check("supervisord.can_connect", status=check.OK, tags=instance_tags, count=1)
aggregator.reset()
# Sleep 10s to give enough time to processes to terminate
sleep(10)
def test_connection_failure(aggregator, check, bad_instance):
"""
Service check reports connection failure
"""
instance_tags = ["supervisord_server:travis"]
with pytest.raises(Exception):
check.check(bad_instance)
aggregator.assert_service_check("supervisord.can_connect", status=check.CRITICAL, tags=instance_tags, count=1)
def test_version_metadata(aggregator, check, instance, datadog_agent):
check.check_id = 'test:123'
check.check(instance)
raw_version = SUPERVISOR_VERSION.replace('_', '.')
major, minor, patch = raw_version.split('.')
version_metadata = {
'version.scheme': 'supervisord',
'version.major': major,
'version.minor': minor,
'version.patch': patch,
'version.raw': raw_version,
}
datadog_agent.assert_metadata('test:123', version_metadata)
| 35.356164 | 118 | 0.69353 |
from time import sleep
import pytest
from .common import PROCESSES, PROCESSES_BY_STATE_BY_ITERATION, STATUSES, SUPERVISOR_VERSION
pytestmark = [pytest.mark.integration, pytest.mark.usefixtures("dd_environment")]
def test_check(aggregator, check, instance):
instance_tags = ["supervisord_server:travis"]
for i in range(4):
check.check(instance)
for proc in PROCESSES:
process_tags = instance_tags + ["supervisord_process:{}".format(proc)]
process_status = check.OK if proc in PROCESSES_BY_STATE_BY_ITERATION[i]['up'] else check.CRITICAL
aggregator.assert_metric("supervisord.process.uptime", tags=process_tags, count=1)
aggregator.assert_service_check(
"supervisord.process.status", status=process_status, tags=process_tags, count=1
)
for status in STATUSES:
status_tags = instance_tags + ["status:{}".format(status)]
count_processes = len(PROCESSES_BY_STATE_BY_ITERATION[i][status])
aggregator.assert_metric("supervisord.process.count", value=count_processes, tags=status_tags, count=1)
aggregator.assert_service_check("supervisord.can_connect", status=check.OK, tags=instance_tags, count=1)
aggregator.reset()
sleep(10)
def test_connection_failure(aggregator, check, bad_instance):
instance_tags = ["supervisord_server:travis"]
with pytest.raises(Exception):
check.check(bad_instance)
aggregator.assert_service_check("supervisord.can_connect", status=check.CRITICAL, tags=instance_tags, count=1)
def test_version_metadata(aggregator, check, instance, datadog_agent):
check.check_id = 'test:123'
check.check(instance)
raw_version = SUPERVISOR_VERSION.replace('_', '.')
major, minor, patch = raw_version.split('.')
version_metadata = {
'version.scheme': 'supervisord',
'version.major': major,
'version.minor': minor,
'version.patch': patch,
'version.raw': raw_version,
}
datadog_agent.assert_metadata('test:123', version_metadata)
| true | true |
f7352c714da27beb77f9d60af9f18596852f5dc3 | 4,781 | py | Python | tensor2tensor/models/video/sv2p_params.py | kpe/tensor2tensor | 453c473030c354a3d9a4c27b12bcec8942334bf4 | [
"Apache-2.0"
] | 34 | 2018-12-19T01:00:57.000Z | 2021-03-26T09:36:37.000Z | tensor2tensor/models/video/sv2p_params.py | kpe/tensor2tensor | 453c473030c354a3d9a4c27b12bcec8942334bf4 | [
"Apache-2.0"
] | 11 | 2018-12-25T03:37:59.000Z | 2021-08-25T14:43:58.000Z | tensor2tensor/models/video/sv2p_params.py | kpe/tensor2tensor | 453c473030c354a3d9a4c27b12bcec8942334bf4 | [
"Apache-2.0"
] | 9 | 2018-12-27T08:00:44.000Z | 2020-06-08T03:05:14.000Z | # coding=utf-8
# Copyright 2019 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Param sets for SV2P model."""
from __future__ import division
from __future__ import print_function
from tensor2tensor.layers import modalities
from tensor2tensor.models.video import basic_stochastic
from tensor2tensor.utils import registry
@registry.register_hparams
def next_frame_sv2p():
"""SV2P model hparams."""
hparams = basic_stochastic.next_frame_basic_stochastic()
hparams.optimizer = "true_adam"
hparams.learning_rate_schedule = "constant"
hparams.learning_rate_constant = 1e-3
hparams.video_num_input_frames = 1
hparams.video_num_target_frames = 3
hparams.batch_size = 16
hparams.bottom = {
"inputs": modalities.video_raw_bottom,
"targets": modalities.video_raw_targets_bottom,
}
hparams.loss = {
"targets": modalities.video_l2_raw_loss,
}
hparams.top = {
"targets": modalities.video_raw_top,
}
hparams.video_modality_loss_cutoff = 0.0
hparams.scheduled_sampling_mode = "count"
hparams.scheduled_sampling_k = 900.0
hparams.add_hparam("reward_prediction", True)
hparams.add_hparam("reward_prediction_stop_gradient", False)
hparams.add_hparam("reward_prediction_buffer_size", 0)
hparams.add_hparam("model_options", "CDNA")
hparams.add_hparam("num_masks", 10)
hparams.add_hparam("multi_latent", False)
hparams.add_hparam("relu_shift", 1e-12)
hparams.add_hparam("dna_kernel_size", 5)
hparams.add_hparam("upsample_method", "conv2d_transpose")
hparams.add_hparam("reward_model", "basic")
hparams.add_hparam("visualize_logits_histogram", True)
return hparams
@registry.register_hparams
def next_frame_sv2p_discrete():
"""SV2P discrete model hparams."""
hparams = next_frame_sv2p()
hparams.action_injection = "multiplicative"
hparams.small_mode = True
hparams.add_hparam("bottleneck_bits", 128)
hparams.add_hparam("bottleneck_noise", 0.02)
hparams.add_hparam("discrete_warmup_steps", 40000)
hparams.add_hparam("full_latent_tower", False)
hparams.add_hparam("latent_predictor_state_size", 128)
hparams.add_hparam("latent_predictor_temperature", 0.5)
hparams.add_hparam("discretize_warmup_steps", 40000)
return hparams
@registry.register_hparams
def next_frame_sv2p_atari():
"""SV2P model for atari."""
hparams = next_frame_sv2p()
hparams.video_num_input_frames = 4
hparams.video_num_target_frames = 4
hparams.action_injection = "multiplicative"
hparams.num_iterations_1st_stage = 12000
hparams.num_iterations_2nd_stage = 12000
hparams.anneal_end = 40000
hparams.latent_loss_multiplier_schedule = "noisy_linear_cosine_decay"
hparams.latent_loss_multiplier = 1e-3
hparams.information_capacity = 0.0
hparams.small_mode = True
return hparams
@registry.register_hparams
def next_frame_sv2p_atari_softmax():
"""SV2P model for atari with softmax."""
hparams = next_frame_sv2p_atari()
hparams.bottom = {}
hparams.loss = {}
hparams.top = {}
hparams.internal_loss = True
return hparams
@registry.register_hparams
def next_frame_sv2p_atari_deterministic():
"""Deterministic for atari."""
hparams = next_frame_sv2p_atari()
hparams.stochastic_model = False
return hparams
@registry.register_hparams
def next_frame_sv2p_atari_softmax_deterministic():
"""Deterministic for atari."""
hparams = next_frame_sv2p_atari_softmax()
hparams.stochastic_model = False
return hparams
@registry.register_hparams
def next_frame_sv2p_tiny():
"""Tiny SV2P model."""
hparams = next_frame_sv2p_atari_softmax()
hparams.batch_size = 2
hparams.tiny_mode = True
hparams.num_masks = 1
hparams.video_modality_loss_cutoff = 0.4
hparams.video_num_input_frames = 4
hparams.video_num_target_frames = 4
return hparams
@registry.register_hparams
def next_frame_sv2p_tiny_external():
"""Tiny SV2P model with external loss."""
hparams = next_frame_sv2p_tiny()
hparams.internal_loss = False
return hparams
@registry.register_hparams
def next_frame_sv2p_cutoff():
"""SV2P model with additional cutoff in L2 loss for environments like pong."""
hparams = next_frame_sv2p()
hparams.video_modality_loss_cutoff = 0.4
hparams.video_num_input_frames = 4
hparams.video_num_target_frames = 1
return hparams
| 31.453947 | 80 | 0.776825 |
from __future__ import division
from __future__ import print_function
from tensor2tensor.layers import modalities
from tensor2tensor.models.video import basic_stochastic
from tensor2tensor.utils import registry
@registry.register_hparams
def next_frame_sv2p():
hparams = basic_stochastic.next_frame_basic_stochastic()
hparams.optimizer = "true_adam"
hparams.learning_rate_schedule = "constant"
hparams.learning_rate_constant = 1e-3
hparams.video_num_input_frames = 1
hparams.video_num_target_frames = 3
hparams.batch_size = 16
hparams.bottom = {
"inputs": modalities.video_raw_bottom,
"targets": modalities.video_raw_targets_bottom,
}
hparams.loss = {
"targets": modalities.video_l2_raw_loss,
}
hparams.top = {
"targets": modalities.video_raw_top,
}
hparams.video_modality_loss_cutoff = 0.0
hparams.scheduled_sampling_mode = "count"
hparams.scheduled_sampling_k = 900.0
hparams.add_hparam("reward_prediction", True)
hparams.add_hparam("reward_prediction_stop_gradient", False)
hparams.add_hparam("reward_prediction_buffer_size", 0)
hparams.add_hparam("model_options", "CDNA")
hparams.add_hparam("num_masks", 10)
hparams.add_hparam("multi_latent", False)
hparams.add_hparam("relu_shift", 1e-12)
hparams.add_hparam("dna_kernel_size", 5)
hparams.add_hparam("upsample_method", "conv2d_transpose")
hparams.add_hparam("reward_model", "basic")
hparams.add_hparam("visualize_logits_histogram", True)
return hparams
@registry.register_hparams
def next_frame_sv2p_discrete():
hparams = next_frame_sv2p()
hparams.action_injection = "multiplicative"
hparams.small_mode = True
hparams.add_hparam("bottleneck_bits", 128)
hparams.add_hparam("bottleneck_noise", 0.02)
hparams.add_hparam("discrete_warmup_steps", 40000)
hparams.add_hparam("full_latent_tower", False)
hparams.add_hparam("latent_predictor_state_size", 128)
hparams.add_hparam("latent_predictor_temperature", 0.5)
hparams.add_hparam("discretize_warmup_steps", 40000)
return hparams
@registry.register_hparams
def next_frame_sv2p_atari():
hparams = next_frame_sv2p()
hparams.video_num_input_frames = 4
hparams.video_num_target_frames = 4
hparams.action_injection = "multiplicative"
hparams.num_iterations_1st_stage = 12000
hparams.num_iterations_2nd_stage = 12000
hparams.anneal_end = 40000
hparams.latent_loss_multiplier_schedule = "noisy_linear_cosine_decay"
hparams.latent_loss_multiplier = 1e-3
hparams.information_capacity = 0.0
hparams.small_mode = True
return hparams
@registry.register_hparams
def next_frame_sv2p_atari_softmax():
hparams = next_frame_sv2p_atari()
hparams.bottom = {}
hparams.loss = {}
hparams.top = {}
hparams.internal_loss = True
return hparams
@registry.register_hparams
def next_frame_sv2p_atari_deterministic():
hparams = next_frame_sv2p_atari()
hparams.stochastic_model = False
return hparams
@registry.register_hparams
def next_frame_sv2p_atari_softmax_deterministic():
hparams = next_frame_sv2p_atari_softmax()
hparams.stochastic_model = False
return hparams
@registry.register_hparams
def next_frame_sv2p_tiny():
hparams = next_frame_sv2p_atari_softmax()
hparams.batch_size = 2
hparams.tiny_mode = True
hparams.num_masks = 1
hparams.video_modality_loss_cutoff = 0.4
hparams.video_num_input_frames = 4
hparams.video_num_target_frames = 4
return hparams
@registry.register_hparams
def next_frame_sv2p_tiny_external():
hparams = next_frame_sv2p_tiny()
hparams.internal_loss = False
return hparams
@registry.register_hparams
def next_frame_sv2p_cutoff():
hparams = next_frame_sv2p()
hparams.video_modality_loss_cutoff = 0.4
hparams.video_num_input_frames = 4
hparams.video_num_target_frames = 1
return hparams
| true | true |
f7352ca98740e2ca42cce07e2ff806ab32c5e176 | 7,486 | py | Python | scripts/wine/wine_explain.py | NRuf77/proset | 101d491e05c2423faddca31029232982f46d8831 | [
"MIT"
] | null | null | null | scripts/wine/wine_explain.py | NRuf77/proset | 101d491e05c2423faddca31029232982f46d8831 | [
"MIT"
] | null | null | null | scripts/wine/wine_explain.py | NRuf77/proset | 101d491e05c2423faddca31029232982f46d8831 | [
"MIT"
] | null | null | null | """Explain proset classifier trained on wine classification data.
Copyright by Nikolaus Ruf
Released under the MIT license - see LICENSE file for details
"""
from copy import deepcopy
import gzip
import os
import pickle
import matplotlib.pyplot as plt
import numpy as np
import shap
import proset.utility as utility
print("* Apply user settings")
input_path = "scripts/results"
output_path = "scripts/reports"
input_files = [
"wine_2d_05_model.gz",
"wine_2d_50_model.gz",
"wine_2d_95_model.gz",
"wine_1d_model.gz",
"wine_fix_model.gz",
"wine_fix_opt_model.gz"
]
print(" Select input file:")
for i, file_name in enumerate(input_files):
print(" {} - {}".format(i, file_name))
choice = int(input())
input_file = input_files[choice]
export_file = input_file.replace(".gz", "_explain.xlsx")
model_name = input_file.replace(".gz", "")
print("* Load model fit results")
with gzip.open(os.path.join(input_path, input_file), mode="rb") as file:
result = pickle.load(file)
print("* Determine reference point")
scale = np.sqrt(result["model"]["transform"].var_)
offset = result["model"]["transform"].mean_
train_features = result["model"]["transform"].transform(result["data"]["X_train"])
train_labels = result["data"]["y_train"]
reference = utility.choose_reference_point(
features=train_features,
model=result["model"]["model"],
scale=scale,
offset=offset
)
utility.print_point_report(
reference=reference,
feature_names=result["data"]["feature_names"],
target_names=result["model"].classes_
)
print("* Show global results")
test_features = result["model"]["transform"].transform(result["data"]["X_test"])
test_labels = result["data"]["y_test"]
prediction, familiarity = result["model"]["model"].predict(X=test_features, compute_familiarity=True)
misclassified = prediction != test_labels
plotter = utility.ClassifierPlots(
model=result["model"]["model"],
model_name=model_name,
feature_names=result["data"]["feature_names"],
scale=scale,
offset=offset
)
x_range, y_range = plotter.plot_batch_map(
batch=1,
features=test_features,
target=test_labels,
comment="test samples",
highlight=misclassified,
highlight_name="misclassified",
reference=reference["features_raw"]
)
plotter.plot_features(
batch=1,
features=test_features,
target=test_labels,
comment="test samples",
highlight=misclassified,
highlight_name="misclassified",
reference=reference["features_raw"],
show_index=False
)
print("* Compute global SHAP values")
shrunk_model = deepcopy(result["model"]["model"])
shrunk_model.shrink()
active_features = reference["active_features"]
active_feature_names = result["data"]["feature_names"][active_features]
explainer = shap.Explainer(
model=shrunk_model.predict_proba,
masker=reference["features_raw"][0:1, active_features],
feature_names=active_feature_names
)
shap_values = explainer(test_features[:, active_features])
for i, label in enumerate(result["model"].classes_):
plt.figure()
shap.plots.bar(shap_values[:, :, i])
plt.title("Average SHAP values for class {} prediction".format(label))
print("* Find single point with worst classification result")
proba = result["model"]["model"].predict_proba(test_features)
truth_int = result["model"]["model"].label_encoder_.transform(test_labels)
worst_ix = np.argmin(proba[np.arange(test_labels.shape[0]), truth_int])
worst_features = test_features[worst_ix:(worst_ix + 1), :]
worst_label = test_labels[worst_ix]
worst_label_int = truth_int[worst_ix]
worst_point = {
"index": worst_ix,
"features_raw": worst_features,
"features_processed": worst_features[:, active_features] * scale[active_features] + offset[active_features],
"prediction": proba[worst_ix, :],
"num_features": test_features.shape[1],
"active_features": active_features
} # use active_features here to ensure same order of content as reference
print(" True class = '{}'".format(test_labels[worst_ix]))
utility.print_point_report(
reference=worst_point,
feature_names=result["data"]["feature_names"],
target_names=result["model"].classes_
)
print("* Generate explanation report")
explain = result["model"]["model"].explain(
X=worst_point["features_raw"],
y=worst_label,
familiarity=familiarity,
sample_name="test sample {}".format(worst_ix),
feature_names=result["data"]["feature_names"],
scale=scale,
offset=offset
)
utility.write_report(file_path=os.path.join(output_path, export_file), report=explain)
print("* Show results for single point")
plotter.plot_batch_map(
batch=1,
features=train_features,
target=train_labels,
comment="training samples",
reference=reference["features_raw"],
explain_features=worst_point["features_raw"],
explain_target=worst_label,
x_range=x_range,
y_range=y_range
)
plotter.plot_batch_map(
batch=1,
reference=reference["features_raw"],
explain_features=worst_point["features_raw"],
explain_target=worst_label,
x_range=x_range,
y_range=y_range
)
plotter.plot_features(
batch=1,
features=train_features,
target=train_labels,
comment="training samples",
reference=reference["features_raw"],
explain_features=worst_point["features_raw"],
explain_target=worst_label,
show_index=False
)
print("* Compute SHAP values for single point")
for i in range(proba.shape[1]):
explain = shap_values[worst_ix, :, i]
shap.plots.force(
base_value=explain.base_values,
shap_values=explain.values,
features=test_features[worst_ix:(worst_ix + 1), active_features],
feature_names=active_feature_names,
matplotlib=True
)
plt.gca().set_position([0.1, -0.25, 0.8, 0.8]) # force plot messes up the axes position within the figure
plt.suptitle("SHAP force plot: probability for class '{}' is {:0.2f}, true class is '{}'".format(
result["model"].classes_[i], proba[worst_ix, i], worst_label
))
print("* Show cross-sections of decision surface")
importance = np.mean(np.abs(shap_values[:, :, worst_label_int].values), axis=0)
top_two = active_features[np.argsort(importance)[-1:-3:-1]]
plotter.plot_surface(
features=test_features,
target=None, # suppress sample plot, features only used to determine plot ranges
baseline=worst_point["features_raw"],
plot_index=top_two,
comment="globally most important features",
reference=reference["features_raw"],
explain_features=worst_point["features_raw"],
explain_target=worst_label,
familiarity=familiarity,
quantiles=(0.01, 0.05),
use_proba=True
)
importance = np.abs(shap_values[worst_ix, :, worst_label_int].values)
top_two = active_features[np.argsort(importance)[-1:-3:-1]]
plotter.plot_surface(
features=test_features,
target=None, # suppress sample plot, features only used to determine plot ranges
baseline=worst_point["features_raw"],
plot_index=top_two,
comment="most important features for single point",
reference=reference["features_raw"],
explain_features=worst_point["features_raw"],
explain_target=worst_label,
familiarity=familiarity,
quantiles=(0.01, 0.05),
use_proba=True
)
print("* Done")
| 34.027273 | 113 | 0.706118 |
from copy import deepcopy
import gzip
import os
import pickle
import matplotlib.pyplot as plt
import numpy as np
import shap
import proset.utility as utility
print("* Apply user settings")
input_path = "scripts/results"
output_path = "scripts/reports"
input_files = [
"wine_2d_05_model.gz",
"wine_2d_50_model.gz",
"wine_2d_95_model.gz",
"wine_1d_model.gz",
"wine_fix_model.gz",
"wine_fix_opt_model.gz"
]
print(" Select input file:")
for i, file_name in enumerate(input_files):
print(" {} - {}".format(i, file_name))
choice = int(input())
input_file = input_files[choice]
export_file = input_file.replace(".gz", "_explain.xlsx")
model_name = input_file.replace(".gz", "")
print("* Load model fit results")
with gzip.open(os.path.join(input_path, input_file), mode="rb") as file:
result = pickle.load(file)
print("* Determine reference point")
scale = np.sqrt(result["model"]["transform"].var_)
offset = result["model"]["transform"].mean_
train_features = result["model"]["transform"].transform(result["data"]["X_train"])
train_labels = result["data"]["y_train"]
reference = utility.choose_reference_point(
features=train_features,
model=result["model"]["model"],
scale=scale,
offset=offset
)
utility.print_point_report(
reference=reference,
feature_names=result["data"]["feature_names"],
target_names=result["model"].classes_
)
print("* Show global results")
test_features = result["model"]["transform"].transform(result["data"]["X_test"])
test_labels = result["data"]["y_test"]
prediction, familiarity = result["model"]["model"].predict(X=test_features, compute_familiarity=True)
misclassified = prediction != test_labels
plotter = utility.ClassifierPlots(
model=result["model"]["model"],
model_name=model_name,
feature_names=result["data"]["feature_names"],
scale=scale,
offset=offset
)
x_range, y_range = plotter.plot_batch_map(
batch=1,
features=test_features,
target=test_labels,
comment="test samples",
highlight=misclassified,
highlight_name="misclassified",
reference=reference["features_raw"]
)
plotter.plot_features(
batch=1,
features=test_features,
target=test_labels,
comment="test samples",
highlight=misclassified,
highlight_name="misclassified",
reference=reference["features_raw"],
show_index=False
)
print("* Compute global SHAP values")
shrunk_model = deepcopy(result["model"]["model"])
shrunk_model.shrink()
active_features = reference["active_features"]
active_feature_names = result["data"]["feature_names"][active_features]
explainer = shap.Explainer(
model=shrunk_model.predict_proba,
masker=reference["features_raw"][0:1, active_features],
feature_names=active_feature_names
)
shap_values = explainer(test_features[:, active_features])
for i, label in enumerate(result["model"].classes_):
plt.figure()
shap.plots.bar(shap_values[:, :, i])
plt.title("Average SHAP values for class {} prediction".format(label))
print("* Find single point with worst classification result")
proba = result["model"]["model"].predict_proba(test_features)
truth_int = result["model"]["model"].label_encoder_.transform(test_labels)
worst_ix = np.argmin(proba[np.arange(test_labels.shape[0]), truth_int])
worst_features = test_features[worst_ix:(worst_ix + 1), :]
worst_label = test_labels[worst_ix]
worst_label_int = truth_int[worst_ix]
worst_point = {
"index": worst_ix,
"features_raw": worst_features,
"features_processed": worst_features[:, active_features] * scale[active_features] + offset[active_features],
"prediction": proba[worst_ix, :],
"num_features": test_features.shape[1],
"active_features": active_features
}
print(" True class = '{}'".format(test_labels[worst_ix]))
utility.print_point_report(
reference=worst_point,
feature_names=result["data"]["feature_names"],
target_names=result["model"].classes_
)
print("* Generate explanation report")
explain = result["model"]["model"].explain(
X=worst_point["features_raw"],
y=worst_label,
familiarity=familiarity,
sample_name="test sample {}".format(worst_ix),
feature_names=result["data"]["feature_names"],
scale=scale,
offset=offset
)
utility.write_report(file_path=os.path.join(output_path, export_file), report=explain)
print("* Show results for single point")
plotter.plot_batch_map(
batch=1,
features=train_features,
target=train_labels,
comment="training samples",
reference=reference["features_raw"],
explain_features=worst_point["features_raw"],
explain_target=worst_label,
x_range=x_range,
y_range=y_range
)
plotter.plot_batch_map(
batch=1,
reference=reference["features_raw"],
explain_features=worst_point["features_raw"],
explain_target=worst_label,
x_range=x_range,
y_range=y_range
)
plotter.plot_features(
batch=1,
features=train_features,
target=train_labels,
comment="training samples",
reference=reference["features_raw"],
explain_features=worst_point["features_raw"],
explain_target=worst_label,
show_index=False
)
print("* Compute SHAP values for single point")
for i in range(proba.shape[1]):
explain = shap_values[worst_ix, :, i]
shap.plots.force(
base_value=explain.base_values,
shap_values=explain.values,
features=test_features[worst_ix:(worst_ix + 1), active_features],
feature_names=active_feature_names,
matplotlib=True
)
plt.gca().set_position([0.1, -0.25, 0.8, 0.8])
plt.suptitle("SHAP force plot: probability for class '{}' is {:0.2f}, true class is '{}'".format(
result["model"].classes_[i], proba[worst_ix, i], worst_label
))
print("* Show cross-sections of decision surface")
importance = np.mean(np.abs(shap_values[:, :, worst_label_int].values), axis=0)
top_two = active_features[np.argsort(importance)[-1:-3:-1]]
plotter.plot_surface(
features=test_features,
target=None,
baseline=worst_point["features_raw"],
plot_index=top_two,
comment="globally most important features",
reference=reference["features_raw"],
explain_features=worst_point["features_raw"],
explain_target=worst_label,
familiarity=familiarity,
quantiles=(0.01, 0.05),
use_proba=True
)
importance = np.abs(shap_values[worst_ix, :, worst_label_int].values)
top_two = active_features[np.argsort(importance)[-1:-3:-1]]
plotter.plot_surface(
features=test_features,
target=None,
baseline=worst_point["features_raw"],
plot_index=top_two,
comment="most important features for single point",
reference=reference["features_raw"],
explain_features=worst_point["features_raw"],
explain_target=worst_label,
familiarity=familiarity,
quantiles=(0.01, 0.05),
use_proba=True
)
print("* Done")
| true | true |
f7352cb393e14e541884e6c60236d4f8a7073061 | 123 | py | Python | fsleyes/__main__.py | pauldmccarthy/fsleyes | 453a6b91ec7763c39195814d635257e3766acf83 | [
"Apache-2.0"
] | 12 | 2018-05-05T01:36:25.000Z | 2021-09-23T20:44:08.000Z | fsleyes/__main__.py | pauldmccarthy/fsleyes | 453a6b91ec7763c39195814d635257e3766acf83 | [
"Apache-2.0"
] | 97 | 2018-05-05T02:17:23.000Z | 2022-03-29T14:58:42.000Z | fsleyes/__main__.py | pauldmccarthy/fsleyes | 453a6b91ec7763c39195814d635257e3766acf83 | [
"Apache-2.0"
] | 6 | 2017-12-09T09:02:00.000Z | 2021-03-05T18:55:13.000Z | #!/usr/bin/env python
if __name__ == '__main__':
import sys
import fsleyes.main as main
sys.exit(main.main())
| 17.571429 | 31 | 0.650407 |
if __name__ == '__main__':
import sys
import fsleyes.main as main
sys.exit(main.main())
| true | true |
f7352d278824bf95db50a16c3dea7eab65426af5 | 1,051 | py | Python | main/views.py | tamirmatok/Beyond-07-team-1 | 3bd7de8916574b28b9f96fc99526de7c4e27eaa2 | [
"MIT"
] | 1 | 2022-03-03T12:03:17.000Z | 2022-03-03T12:03:17.000Z | main/views.py | tamirmatok/Beyond-07-team-1 | 3bd7de8916574b28b9f96fc99526de7c4e27eaa2 | [
"MIT"
] | 38 | 2022-03-07T14:14:48.000Z | 2022-03-31T18:37:52.000Z | main/views.py | tamirmatok/Beyond-07-team-1 | 3bd7de8916574b28b9f96fc99526de7c4e27eaa2 | [
"MIT"
] | 5 | 2022-02-28T18:55:09.000Z | 2022-03-06T08:04:40.000Z | from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.shortcuts import redirect
from django.contrib.auth import logout
from dogowner.models import DogOwner
from daycare.models import DayCare
from dogowner.views import dog_owner_home
from daycare.views import daycare_home
import message.views
def index(request):
if request.user.is_authenticated:
return redirect(to='homepage')
return redirect(to='login')
@login_required()
def homepage(request):
if DogOwner.objects.filter(user=request.user).exists():
return dog_owner_home(request)
elif DayCare.objects.filter(user=request.user).exists():
return daycare_home(request)
def about(request):
return render(request, 'main/about.html')
def logout_view(request):
logout(request)
return index(request)
@login_required()
def messages_view(request):
return message.views.messages(request)
@login_required()
def chat_view(request, contact):
return message.views.chat(request, contact)
| 23.886364 | 60 | 0.766889 | from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.shortcuts import redirect
from django.contrib.auth import logout
from dogowner.models import DogOwner
from daycare.models import DayCare
from dogowner.views import dog_owner_home
from daycare.views import daycare_home
import message.views
def index(request):
if request.user.is_authenticated:
return redirect(to='homepage')
return redirect(to='login')
@login_required()
def homepage(request):
if DogOwner.objects.filter(user=request.user).exists():
return dog_owner_home(request)
elif DayCare.objects.filter(user=request.user).exists():
return daycare_home(request)
def about(request):
return render(request, 'main/about.html')
def logout_view(request):
logout(request)
return index(request)
@login_required()
def messages_view(request):
return message.views.messages(request)
@login_required()
def chat_view(request, contact):
return message.views.chat(request, contact)
| true | true |
f7352d51b446a770f9a814b7005c5afb8d25c8c5 | 662 | py | Python | src/sentry/incidents/receivers.py | learninto/sentry | 4f9f564841498b3af49c1677d6b61f3e47b01923 | [
"BSD-3-Clause"
] | null | null | null | src/sentry/incidents/receivers.py | learninto/sentry | 4f9f564841498b3af49c1677d6b61f3e47b01923 | [
"BSD-3-Clause"
] | null | null | null | src/sentry/incidents/receivers.py | learninto/sentry | 4f9f564841498b3af49c1677d6b61f3e47b01923 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import absolute_import
from sentry.incidents.models import IncidentSuspectCommit
from sentry.signals import release_commits_updated
@release_commits_updated.connect(weak=False)
def handle_release_commits_updated(removed_commit_ids, added_commit_ids, **kwargs):
from sentry.incidents.tasks import calculate_incident_suspects
incident_ids = (
IncidentSuspectCommit.objects.filter(commit_id__in=removed_commit_ids | added_commit_ids)
.values_list("incident_id", flat=True)
.distinct()
)
for incident_id in incident_ids:
calculate_incident_suspects.apply_async(kwargs={"incident_id": incident_id})
| 36.777778 | 97 | 0.796073 | from __future__ import absolute_import
from sentry.incidents.models import IncidentSuspectCommit
from sentry.signals import release_commits_updated
@release_commits_updated.connect(weak=False)
def handle_release_commits_updated(removed_commit_ids, added_commit_ids, **kwargs):
from sentry.incidents.tasks import calculate_incident_suspects
incident_ids = (
IncidentSuspectCommit.objects.filter(commit_id__in=removed_commit_ids | added_commit_ids)
.values_list("incident_id", flat=True)
.distinct()
)
for incident_id in incident_ids:
calculate_incident_suspects.apply_async(kwargs={"incident_id": incident_id})
| true | true |
f7352e39399f7e40e21e71af52bbc0cedb18c9f5 | 513 | py | Python | pt/pt.py | Mic92/tracedumpd | a84eac58106f1f1d7a82f5dee2a327861e763e4e | [
"MIT"
] | 1 | 2021-03-22T18:04:53.000Z | 2021-03-22T18:04:53.000Z | pt/pt.py | Mic92/tracedump | a84eac58106f1f1d7a82f5dee2a327861e763e4e | [
"MIT"
] | null | null | null | pt/pt.py | Mic92/tracedump | a84eac58106f1f1d7a82f5dee2a327861e763e4e | [
"MIT"
] | null | null | null | import glob
from pathlib import Path
from cffi import FFI
with open(Path(__file__).resolve().parent.joinpath("ffi.h")) as f:
header = f.read()
ffibuilder = FFI()
ffibuilder.cdef(header)
ffibuilder.set_source(
"tracedump._pt",
None,
sources=glob.glob("*.cpp"),
extra_compile_args=["-std=c++17", "-Wno-register", "-fvisibility=hidden"],
extra_link_args=["-lipt"],
source_extension=".cpp",
)
if __name__ == "__main__":
ffibuilder.compile()
| 23.318182 | 82 | 0.621832 | import glob
from pathlib import Path
from cffi import FFI
with open(Path(__file__).resolve().parent.joinpath("ffi.h")) as f:
header = f.read()
ffibuilder = FFI()
ffibuilder.cdef(header)
ffibuilder.set_source(
"tracedump._pt",
None,
sources=glob.glob("*.cpp"),
extra_compile_args=["-std=c++17", "-Wno-register", "-fvisibility=hidden"],
extra_link_args=["-lipt"],
source_extension=".cpp",
)
if __name__ == "__main__":
ffibuilder.compile()
| true | true |
f7352e5c3be18f665058f5b27e82e15014db3389 | 2,589 | py | Python | tests/integration/models/test_plot.py | daledali/bokeh | c4f0debe7bd230d7e1aa8500716e8e997c04f528 | [
"BSD-3-Clause"
] | 1 | 2020-01-19T03:17:18.000Z | 2020-01-19T03:17:18.000Z | tests/integration/models/test_plot.py | daledali/bokeh | c4f0debe7bd230d7e1aa8500716e8e997c04f528 | [
"BSD-3-Clause"
] | 1 | 2021-05-12T10:14:45.000Z | 2021-05-12T10:14:45.000Z | tests/integration/models/test_plot.py | daledali/bokeh | c4f0debe7bd230d7e1aa8500716e8e997c04f528 | [
"BSD-3-Clause"
] | 1 | 2020-01-21T12:03:58.000Z | 2020-01-21T12:03:58.000Z | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import time
# Bokeh imports
from bokeh.layouts import column
from bokeh.models import Button, Plot, Range1d
#-----------------------------------------------------------------------------
# Tests
#-----------------------------------------------------------------------------
pytest_plugins = (
"bokeh._testing.plugins.project",
)
@pytest.mark.integration
@pytest.mark.selenium
class Test_Plot(object):
def test_inner_dims_trigger_on_dynamic_add(self, bokeh_server_page):
data = {}
def modify_doc(doc):
p1 = Plot(plot_height=400, plot_width=400, x_range=Range1d(0, 1), y_range=Range1d(0, 1), min_border=10)
p2 = Plot(plot_height=400, plot_width=400, x_range=Range1d(0, 1), y_range=Range1d(0, 1), min_border=10)
button = Button(css_classes=['foo'])
layout = column(p1, button)
def cb(event):
if p2 not in layout.children:
layout.children = [p1, button, p2]
button.on_event('button_click', cb)
def iw(attr, old, new): data['iw'] = (old, new)
def ih(attr, old, new): data['ih'] = (old, new)
p2.on_change('inner_width', iw)
p2.on_change('inner_height', ih)
doc.add_root(layout)
page = bokeh_server_page(modify_doc)
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
# updates can take some time
time.sleep(0.5)
assert data['iw'][0] is None
assert isinstance(data['iw'][1], int)
assert data['iw'][1]< 400
assert data['ih'][0] is None
assert isinstance(data['ih'][1], int)
assert data['ih'][1] < 400
# XXX (bev) disabled until https://github.com/bokeh/bokeh/issues/7970 is resolved
#assert page.has_no_console_errors()
| 35.465753 | 115 | 0.471611 |
import pytest ; pytest
import time
from bokeh.layouts import column
from bokeh.models import Button, Plot, Range1d
pytest_plugins = (
"bokeh._testing.plugins.project",
)
@pytest.mark.integration
@pytest.mark.selenium
class Test_Plot(object):
def test_inner_dims_trigger_on_dynamic_add(self, bokeh_server_page):
data = {}
def modify_doc(doc):
p1 = Plot(plot_height=400, plot_width=400, x_range=Range1d(0, 1), y_range=Range1d(0, 1), min_border=10)
p2 = Plot(plot_height=400, plot_width=400, x_range=Range1d(0, 1), y_range=Range1d(0, 1), min_border=10)
button = Button(css_classes=['foo'])
layout = column(p1, button)
def cb(event):
if p2 not in layout.children:
layout.children = [p1, button, p2]
button.on_event('button_click', cb)
def iw(attr, old, new): data['iw'] = (old, new)
def ih(attr, old, new): data['ih'] = (old, new)
p2.on_change('inner_width', iw)
p2.on_change('inner_height', ih)
doc.add_root(layout)
page = bokeh_server_page(modify_doc)
button = page.driver.find_element_by_css_selector('.foo .bk-btn')
button.click()
time.sleep(0.5)
assert data['iw'][0] is None
assert isinstance(data['iw'][1], int)
assert data['iw'][1]< 400
assert data['ih'][0] is None
assert isinstance(data['ih'][1], int)
assert data['ih'][1] < 400
| true | true |
f7352e5ed2807ab869f2492e923a8d360d62077b | 1,923 | py | Python | src/pygaps/api.py | pauliacomi/pyGAPS | c4d45b710e171c937471686437e382e05aec4ed5 | [
"MIT"
] | 35 | 2018-01-24T14:59:08.000Z | 2022-03-10T02:47:58.000Z | src/pygaps/api.py | pauliacomi/pyGAPS | c4d45b710e171c937471686437e382e05aec4ed5 | [
"MIT"
] | 29 | 2018-01-06T12:08:08.000Z | 2022-03-11T20:26:53.000Z | src/pygaps/api.py | pauliacomi/pyGAPS | c4d45b710e171c937471686437e382e05aec4ed5 | [
"MIT"
] | 20 | 2019-06-12T19:20:29.000Z | 2022-03-02T09:57:02.000Z | # pylint: disable=W0614,W0611,W0622
# flake8: noqa
# isort:skip_file
# Parsing
from .parsing.csv import isotherm_from_csv
from .parsing.csv import isotherm_to_csv
from .parsing.bel_dat import isotherm_from_bel
from .parsing.excel import isotherm_from_xl
from .parsing.excel import isotherm_to_xl
from .parsing.isodb import isotherm_from_isodb
from .parsing.json import isotherm_from_json
from .parsing.json import isotherm_to_json
from .parsing.sqlite import isotherms_from_db
from .parsing.sqlite import isotherm_delete_db
from .parsing.sqlite import isotherm_to_db
# Characterisation
from .characterisation.alphas import alpha_s
from .characterisation.alphas import alpha_s_raw
from .characterisation.area_bet import area_BET
from .characterisation.area_bet import area_BET_raw
from .characterisation.area_langmuir import area_langmuir
from .characterisation.area_langmuir import area_langmuir_raw
from .characterisation.dr_da_plots import da_plot
from .characterisation.dr_da_plots import dr_plot
from .iast.iast import iast
from .iast.iast import iast_binary_svp
from .iast.iast import iast_binary_vle
from .iast.iast import reverse_iast
from .characterisation.initial_enthalpy import initial_enthalpy_comp
from .characterisation.initial_enthalpy import initial_enthalpy_point
from .characterisation.initial_henry import initial_henry_slope
from .characterisation.initial_henry import initial_henry_virial
from .characterisation.isosteric_enthalpy import isosteric_enthalpy
from .characterisation.isosteric_enthalpy import isosteric_enthalpy_raw
from .characterisation.psd_dft import psd_dft
from .characterisation.psd_mesoporous import psd_mesoporous
from .characterisation.psd_microporous import psd_microporous
from .characterisation.tplot import t_plot
from .characterisation.tplot import t_plot_raw
# Modelling/fitting
from .modelling import model_iso
# Plotting
from .graphing.isotherm_graphs import plot_iso
| 40.0625 | 71 | 0.870515 |
from .parsing.csv import isotherm_from_csv
from .parsing.csv import isotherm_to_csv
from .parsing.bel_dat import isotherm_from_bel
from .parsing.excel import isotherm_from_xl
from .parsing.excel import isotherm_to_xl
from .parsing.isodb import isotherm_from_isodb
from .parsing.json import isotherm_from_json
from .parsing.json import isotherm_to_json
from .parsing.sqlite import isotherms_from_db
from .parsing.sqlite import isotherm_delete_db
from .parsing.sqlite import isotherm_to_db
from .characterisation.alphas import alpha_s
from .characterisation.alphas import alpha_s_raw
from .characterisation.area_bet import area_BET
from .characterisation.area_bet import area_BET_raw
from .characterisation.area_langmuir import area_langmuir
from .characterisation.area_langmuir import area_langmuir_raw
from .characterisation.dr_da_plots import da_plot
from .characterisation.dr_da_plots import dr_plot
from .iast.iast import iast
from .iast.iast import iast_binary_svp
from .iast.iast import iast_binary_vle
from .iast.iast import reverse_iast
from .characterisation.initial_enthalpy import initial_enthalpy_comp
from .characterisation.initial_enthalpy import initial_enthalpy_point
from .characterisation.initial_henry import initial_henry_slope
from .characterisation.initial_henry import initial_henry_virial
from .characterisation.isosteric_enthalpy import isosteric_enthalpy
from .characterisation.isosteric_enthalpy import isosteric_enthalpy_raw
from .characterisation.psd_dft import psd_dft
from .characterisation.psd_mesoporous import psd_mesoporous
from .characterisation.psd_microporous import psd_microporous
from .characterisation.tplot import t_plot
from .characterisation.tplot import t_plot_raw
from .modelling import model_iso
from .graphing.isotherm_graphs import plot_iso
| true | true |
f7352ebe72ee0f35ca69d07b451a58729d256f7a | 341 | py | Python | tests/test_api.py | mactov/dbclient | 563d077967fbc383e6c7c4cc6c92ec07b750db56 | [
"MIT"
] | null | null | null | tests/test_api.py | mactov/dbclient | 563d077967fbc383e6c7c4cc6c92ec07b750db56 | [
"MIT"
] | null | null | null | tests/test_api.py | mactov/dbclient | 563d077967fbc383e6c7c4cc6c92ec07b750db56 | [
"MIT"
] | null | null | null | import sys
import json
from urllib.request import urlopen
# sys.path.append('../')
# from api import *
SERVER_URL = 'http://localhost:5000/'
def test_get_servers():
url = SERVER_URL + 'servers'
response = urlopen(url).read()
assert response.decode("utf-8") == json.dumps([{"server": "localhost"}, {"server": "db.ultech.fr"}]) | 26.230769 | 104 | 0.665689 | import sys
import json
from urllib.request import urlopen
SERVER_URL = 'http://localhost:5000/'
def test_get_servers():
url = SERVER_URL + 'servers'
response = urlopen(url).read()
assert response.decode("utf-8") == json.dumps([{"server": "localhost"}, {"server": "db.ultech.fr"}]) | true | true |
f7353011ba3d02275f936e272ca49b67bdebd94d | 1,567 | py | Python | src/deeply/datasets/colonoscopy/cvc_clinic_db.py | achillesrasquinha/deeply | fd1ce32da130591fc92df8df89e07f1497b2b902 | [
"MIT"
] | 2 | 2021-10-05T16:37:30.000Z | 2021-10-11T21:31:43.000Z | src/deeply/datasets/colonoscopy/cvc_clinic_db.py | achillesrasquinha/deeply | fd1ce32da130591fc92df8df89e07f1497b2b902 | [
"MIT"
] | null | null | null | src/deeply/datasets/colonoscopy/cvc_clinic_db.py | achillesrasquinha/deeply | fd1ce32da130591fc92df8df89e07f1497b2b902 | [
"MIT"
] | 1 | 2021-07-16T02:23:37.000Z | 2021-07-16T02:23:37.000Z | from deeply.datasets.util import image_mask
from tensorflow_datasets.core import (
Version,
GeneratorBasedBuilder
)
_DATASET_HOMEPAGE = "https://polyp.grand-challenge.org/CVCClinicDB/"
_DATASET_KAGGLE = "achillesrasquinha/cvcclinicdb"
_DATASET_DESCRIPTION = """
CVC-ClinicDB is a database of frames extracted from colonoscopy videos. These frames contain several examples of polyps. In addition to the frames, we provide the ground truth for the polyps. This ground truth consists of a mask corresponding to the region covered by the polyp in the image
"""
_DATASET_CITATION = """\
Bernal, J., Sánchez, F. J., Fernández-Esparrach, G., Gil, D., Rodríguez, C., & Vilariño, F. (2015). WM-DOVA maps for accurate polyp highlighting in colonoscopy: Validation vs. saliency maps from physicians. Computerized Medical Imaging and Graphics, 43, 99-111
"""
class CVCClinicDB(GeneratorBasedBuilder):
"""
The CVC-ClinicDB Dataset.
"""
VERSION = Version("1.0.0")
RELEASE_NOTES = {
"1.0.0": "Initial Release"
}
def _info(self, *args, **kwargs):
return image_mask._info(self,
description = _DATASET_DESCRIPTION,
homepage = _DATASET_HOMEPAGE,
citation = _DATASET_CITATION,
*args, **kwargs
)
def _split_generators(self, *args, **kwargs):
return image_mask._split_generators(self, kaggle = _DATASET_KAGGLE, *args, **kwargs)
def _generate_examples(self, *args, **kwargs):
return image_mask._generate_examples(self, *args, **kwargs) | 41.236842 | 290 | 0.698787 | from deeply.datasets.util import image_mask
from tensorflow_datasets.core import (
Version,
GeneratorBasedBuilder
)
_DATASET_HOMEPAGE = "https://polyp.grand-challenge.org/CVCClinicDB/"
_DATASET_KAGGLE = "achillesrasquinha/cvcclinicdb"
_DATASET_DESCRIPTION = """
CVC-ClinicDB is a database of frames extracted from colonoscopy videos. These frames contain several examples of polyps. In addition to the frames, we provide the ground truth for the polyps. This ground truth consists of a mask corresponding to the region covered by the polyp in the image
"""
_DATASET_CITATION = """\
Bernal, J., Sánchez, F. J., Fernández-Esparrach, G., Gil, D., Rodríguez, C., & Vilariño, F. (2015). WM-DOVA maps for accurate polyp highlighting in colonoscopy: Validation vs. saliency maps from physicians. Computerized Medical Imaging and Graphics, 43, 99-111
"""
class CVCClinicDB(GeneratorBasedBuilder):
VERSION = Version("1.0.0")
RELEASE_NOTES = {
"1.0.0": "Initial Release"
}
def _info(self, *args, **kwargs):
return image_mask._info(self,
description = _DATASET_DESCRIPTION,
homepage = _DATASET_HOMEPAGE,
citation = _DATASET_CITATION,
*args, **kwargs
)
def _split_generators(self, *args, **kwargs):
return image_mask._split_generators(self, kaggle = _DATASET_KAGGLE, *args, **kwargs)
def _generate_examples(self, *args, **kwargs):
return image_mask._generate_examples(self, *args, **kwargs) | true | true |
f7353179c26a30dba2f3619c510d8718c113c632 | 1,818 | py | Python | openstates/openstates-master/openstates/vt/events.py | Jgorsick/Advocacy_Angular | 8906af3ba729b2303880f319d52bce0d6595764c | [
"CC-BY-4.0"
] | null | null | null | openstates/openstates-master/openstates/vt/events.py | Jgorsick/Advocacy_Angular | 8906af3ba729b2303880f319d52bce0d6595764c | [
"CC-BY-4.0"
] | null | null | null | openstates/openstates-master/openstates/vt/events.py | Jgorsick/Advocacy_Angular | 8906af3ba729b2303880f319d52bce0d6595764c | [
"CC-BY-4.0"
] | null | null | null | import datetime
import json
from billy.scrape.events import Event, EventScraper
class VTEventScraper(EventScraper):
jurisdiction = 'vt'
def scrape(self, session, chambers):
year_slug = session[5: ]
url = 'http://legislature.vermont.gov/committee/loadAllMeetings/{}'.\
format(year_slug)
json_data = self.get(url).text
events = json.loads(json_data)['data']
for info in events:
# Determine when the committee meets
if info['TimeSlot'] == '1':
when = datetime.datetime.strptime(info['MeetingDate'], '%A, %B %d, %Y')
all_day = True
else:
try:
when = datetime.datetime.strptime(
info['MeetingDate'] + ', ' + info['TimeSlot'],
'%A, %B %d, %Y, %I:%M %p'
)
except ValueError:
when = datetime.datetime.strptime(
info['MeetingDate'] + ', ' + info['StartTime'],
'%A, %B %d, %Y, %I:%M %p'
)
all_day = False
event = Event(
session=session,
when=when,
all_day=all_day,
type='committee:meeting',
description="Meeting of the {}".format(info['LongName']),
location="{0}, Room {1}".format(info['BuildingName'], info['RoomNbr'])
)
event.add_source(url)
event.add_participant(
type='host',
participant=info['LongName'],
participant_type='committee'
)
self.save_event(event)
| 34.961538 | 90 | 0.451595 | import datetime
import json
from billy.scrape.events import Event, EventScraper
class VTEventScraper(EventScraper):
jurisdiction = 'vt'
def scrape(self, session, chambers):
year_slug = session[5: ]
url = 'http://legislature.vermont.gov/committee/loadAllMeetings/{}'.\
format(year_slug)
json_data = self.get(url).text
events = json.loads(json_data)['data']
for info in events:
if info['TimeSlot'] == '1':
when = datetime.datetime.strptime(info['MeetingDate'], '%A, %B %d, %Y')
all_day = True
else:
try:
when = datetime.datetime.strptime(
info['MeetingDate'] + ', ' + info['TimeSlot'],
'%A, %B %d, %Y, %I:%M %p'
)
except ValueError:
when = datetime.datetime.strptime(
info['MeetingDate'] + ', ' + info['StartTime'],
'%A, %B %d, %Y, %I:%M %p'
)
all_day = False
event = Event(
session=session,
when=when,
all_day=all_day,
type='committee:meeting',
description="Meeting of the {}".format(info['LongName']),
location="{0}, Room {1}".format(info['BuildingName'], info['RoomNbr'])
)
event.add_source(url)
event.add_participant(
type='host',
participant=info['LongName'],
participant_type='committee'
)
self.save_event(event)
| true | true |
f735342134d7488b304766946a5de2b5f09621a4 | 3,373 | py | Python | inscrawler/browser.py | ckyeungac/instagram-crawler | 0e6a96665074b18a67a311592b9a6acc88419a02 | [
"MIT"
] | null | null | null | inscrawler/browser.py | ckyeungac/instagram-crawler | 0e6a96665074b18a67a311592b9a6acc88419a02 | [
"MIT"
] | null | null | null | inscrawler/browser.py | ckyeungac/instagram-crawler | 0e6a96665074b18a67a311592b9a6acc88419a02 | [
"MIT"
] | null | null | null | import os
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.keys import Keys
from .utils import randmized_sleep
class Browser:
def __init__(self, has_screen):
dir_path = os.path.dirname(os.path.realpath(__file__))
useragent = "Mozilla/5.0 (X11; Linux i686; rv:77.0) Gecko/20100101 Firefox/77.0"
service_args = ["--ignore-ssl-errors=true"]
chrome_options = Options()
chrome_options.add_argument(f'--user-agent={useragent}')
if not has_screen:
chrome_options.add_argument("--headless")
chrome_options.add_argument("--start-maximized")
chrome_options.add_argument("--no-sandbox")
self.driver = webdriver.Chrome(
executable_path="%s/bin/chromedriver" % dir_path,
service_args=service_args,
chrome_options=chrome_options,
)
self.driver.implicitly_wait(5)
@property
def page_height(self):
return self.driver.execute_script("return document.body.scrollHeight")
def get(self, url):
self.driver.get(url)
@property
def current_url(self):
return self.driver.current_url
def implicitly_wait(self, t):
self.driver.implicitly_wait(t)
def find_one(self, css_selector, elem=None, waittime=0):
obj = elem or self.driver
if waittime:
WebDriverWait(obj, waittime).until(
EC.presence_of_element_located((By.CSS_SELECTOR, css_selector))
)
try:
return obj.find_element(By.CSS_SELECTOR, css_selector)
except NoSuchElementException:
return None
def find(self, css_selector, elem=None, waittime=0):
obj = elem or self.driver
try:
if waittime:
WebDriverWait(obj, waittime).until(
EC.presence_of_element_located((By.CSS_SELECTOR, css_selector))
)
except TimeoutException:
return None
try:
return obj.find_elements(By.CSS_SELECTOR, css_selector)
except NoSuchElementException:
return None
def scroll_down(self, wait=0.3):
self.driver.execute_script("window.scrollTo(0, document.body.scrollHeight)")
randmized_sleep(wait)
def scroll_up(self, offset=-1, wait=2):
if offset == -1:
self.driver.execute_script("window.scrollTo(0, 0)")
else:
self.driver.execute_script("window.scrollBy(0, -%s)" % offset)
randmized_sleep(wait)
def js_click(self, elem):
self.driver.execute_script("arguments[0].click();", elem)
def open_new_tab(self, url):
self.driver.execute_script("window.open('%s');" %url)
self.driver.switch_to.window(self.driver.window_handles[1])
def close_current_tab(self):
self.driver.close()
self.driver.switch_to.window(self.driver.window_handles[0])
def __del__(self):
try:
self.driver.quit()
except Exception:
pass
| 32.432692 | 88 | 0.653721 | import os
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.keys import Keys
from .utils import randmized_sleep
class Browser:
def __init__(self, has_screen):
dir_path = os.path.dirname(os.path.realpath(__file__))
useragent = "Mozilla/5.0 (X11; Linux i686; rv:77.0) Gecko/20100101 Firefox/77.0"
service_args = ["--ignore-ssl-errors=true"]
chrome_options = Options()
chrome_options.add_argument(f'--user-agent={useragent}')
if not has_screen:
chrome_options.add_argument("--headless")
chrome_options.add_argument("--start-maximized")
chrome_options.add_argument("--no-sandbox")
self.driver = webdriver.Chrome(
executable_path="%s/bin/chromedriver" % dir_path,
service_args=service_args,
chrome_options=chrome_options,
)
self.driver.implicitly_wait(5)
@property
def page_height(self):
return self.driver.execute_script("return document.body.scrollHeight")
def get(self, url):
self.driver.get(url)
@property
def current_url(self):
return self.driver.current_url
def implicitly_wait(self, t):
self.driver.implicitly_wait(t)
def find_one(self, css_selector, elem=None, waittime=0):
obj = elem or self.driver
if waittime:
WebDriverWait(obj, waittime).until(
EC.presence_of_element_located((By.CSS_SELECTOR, css_selector))
)
try:
return obj.find_element(By.CSS_SELECTOR, css_selector)
except NoSuchElementException:
return None
def find(self, css_selector, elem=None, waittime=0):
obj = elem or self.driver
try:
if waittime:
WebDriverWait(obj, waittime).until(
EC.presence_of_element_located((By.CSS_SELECTOR, css_selector))
)
except TimeoutException:
return None
try:
return obj.find_elements(By.CSS_SELECTOR, css_selector)
except NoSuchElementException:
return None
def scroll_down(self, wait=0.3):
self.driver.execute_script("window.scrollTo(0, document.body.scrollHeight)")
randmized_sleep(wait)
def scroll_up(self, offset=-1, wait=2):
if offset == -1:
self.driver.execute_script("window.scrollTo(0, 0)")
else:
self.driver.execute_script("window.scrollBy(0, -%s)" % offset)
randmized_sleep(wait)
def js_click(self, elem):
self.driver.execute_script("arguments[0].click();", elem)
def open_new_tab(self, url):
self.driver.execute_script("window.open('%s');" %url)
self.driver.switch_to.window(self.driver.window_handles[1])
def close_current_tab(self):
self.driver.close()
self.driver.switch_to.window(self.driver.window_handles[0])
def __del__(self):
try:
self.driver.quit()
except Exception:
pass
| true | true |
f73534a1c859d9f63f8135ec14770d5c8d99854e | 26,217 | py | Python | python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_composition.py | rpatil524/dagster | 6f918d94cbd543ab752ab484a65e3a40fd441716 | [
"Apache-2.0"
] | 4,606 | 2018-06-21T17:45:20.000Z | 2022-03-31T23:39:42.000Z | python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_composition.py | rpatil524/dagster | 6f918d94cbd543ab752ab484a65e3a40fd441716 | [
"Apache-2.0"
] | 6,221 | 2018-06-12T04:36:01.000Z | 2022-03-31T21:43:05.000Z | python_modules/dagster/dagster_tests/core_tests/definitions_tests/test_composition.py | rpatil524/dagster | 6f918d94cbd543ab752ab484a65e3a40fd441716 | [
"Apache-2.0"
] | 619 | 2018-08-22T22:43:09.000Z | 2022-03-31T22:48:06.000Z | import pytest
from dagster import (
DependencyDefinition,
InputDefinition,
Int,
Nothing,
Output,
OutputDefinition,
PipelineDefinition,
SolidDefinition,
composite_solid,
execute_pipeline,
lambda_solid,
pipeline,
solid,
)
from dagster.core.definitions.decorators.hook import event_list_hook, success_hook
from dagster.core.definitions.events import DynamicOutput, HookExecutionResult
from dagster.core.errors import DagsterInvalidDefinitionError, DagsterInvariantViolationError
from dagster.core.execution.api import create_execution_plan
def builder(graph):
return graph.add_one(graph.return_one())
@lambda_solid(output_def=OutputDefinition(Int))
def echo(blah):
return blah
@lambda_solid
def return_one():
return 1
@lambda_solid
def return_two():
return 2
@lambda_solid
def return_tuple():
return (1, 2)
@lambda_solid(input_defs=[InputDefinition("num")])
def add_one(num):
return num + 1
@lambda_solid(input_defs=[InputDefinition("num")])
def pipe(num):
return num
@solid(
input_defs=[InputDefinition("int_1", Int), InputDefinition("int_2", Int)],
output_defs=[OutputDefinition(Int)],
)
def adder(_context, int_1, int_2):
return int_1 + int_2
@solid(output_defs=[OutputDefinition(Int, "one"), OutputDefinition(Int, "two")])
def return_mult(_context):
yield Output(1, "one")
yield Output(2, "two")
@solid(config_schema=int)
def return_config_int(context):
return context.solid_config
def get_duplicate_solids():
return (
SolidDefinition("a_solid", [], lambda: None, []),
SolidDefinition("a_solid", [], lambda: None, []),
)
def test_basic():
@composite_solid
def test():
one = return_one()
add_one(num=one)
assert (
execute_pipeline(PipelineDefinition(solid_defs=[test], name="test"))
.result_for_handle("test.add_one")
.output_value()
== 2
)
def test_args():
@composite_solid
def _test_1():
one = return_one()
add_one(one)
@composite_solid
def _test_2():
adder(return_one(), return_two())
@composite_solid
def _test_3():
adder(int_1=return_one(), int_2=return_two())
@composite_solid
def _test_4():
adder(return_one(), return_two())
@composite_solid
def _test_5():
adder(return_one(), int_2=return_two())
@composite_solid
def _test_6():
adder(return_one())
@composite_solid
def _test_7():
adder(int_2=return_two())
def test_arg_fails():
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid
def _fail_2():
adder(return_one(), 1)
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid
def _fail_3():
# pylint: disable=too-many-function-args
adder(return_one(), return_two(), return_one.alias("three")())
def test_mult_out_fail():
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid
def _test():
ret = return_mult()
add_one(ret)
def test_aliased_with_name_name_fails():
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid
def _test():
one, two = return_mult()
add_one(num=one)
add_one.alias("add_one")(num=two) # explicit alias disables autoalias
def test_composite_with_duplicate_solids():
solid_1, solid_2 = get_duplicate_solids()
with pytest.raises(
DagsterInvalidDefinitionError,
match="Detected conflicting node definitions with the same name",
):
@composite_solid
def _name_conflict_composite():
solid_1()
solid_2()
def test_pipeline_with_duplicate_solids():
solid_1, solid_2 = get_duplicate_solids()
with pytest.raises(
DagsterInvalidDefinitionError,
match="Detected conflicting node definitions with the same name",
):
@pipeline
def _name_conflict_pipeline():
solid_1()
solid_2()
def test_multiple():
@composite_solid
def test():
one, two = return_mult()
add_one(num=one)
add_one.alias("add_one_2")(num=two)
results = execute_pipeline(PipelineDefinition(solid_defs=[test], name="test"))
assert results.result_for_handle("test.add_one").output_value() == 2
assert results.result_for_handle("test.add_one_2").output_value() == 3
def test_two_inputs_with_dsl():
@lambda_solid(input_defs=[InputDefinition("num_one"), InputDefinition("num_two")])
def subtract(num_one, num_two):
return num_one - num_two
@lambda_solid
def return_three():
return 3
@composite_solid
def test():
subtract(num_one=return_two(), num_two=return_three())
assert (
execute_pipeline(PipelineDefinition(solid_defs=[test], name="test"))
.result_for_handle("test.subtract")
.output_value()
== -1
)
def test_basic_aliasing_with_dsl():
@composite_solid
def test():
add_one.alias("renamed")(num=return_one())
assert (
execute_pipeline(PipelineDefinition(solid_defs=[test], name="test"))
.result_for_handle("test.renamed")
.output_value()
== 2
)
def test_diamond_graph():
@solid(output_defs=[OutputDefinition(name="value_one"), OutputDefinition(name="value_two")])
def emit_values(_context):
yield Output(1, "value_one")
yield Output(2, "value_two")
@lambda_solid(input_defs=[InputDefinition("num_one"), InputDefinition("num_two")])
def subtract(num_one, num_two):
return num_one - num_two
@composite_solid
def diamond():
value_one, value_two = emit_values()
subtract(num_one=add_one(num=value_one), num_two=add_one.alias("renamed")(num=value_two))
result = execute_pipeline(PipelineDefinition(solid_defs=[diamond], name="test"))
assert result.result_for_handle("diamond.subtract").output_value() == -1
def test_mapping():
@lambda_solid(
input_defs=[InputDefinition("num_in", Int)], output_def=OutputDefinition(Int, "num_out")
)
def double(num_in):
return num_in * 2
@composite_solid(
input_defs=[InputDefinition("num_in", Int)], output_defs=[OutputDefinition(Int, "num_out")]
)
def composed_inout(num_in):
return double(num_in=num_in)
# have to use "pipe" solid since "result_for_solid" doesnt work with composite mappings
assert (
execute_pipeline(
PipelineDefinition(
solid_defs=[return_one, composed_inout, pipe],
name="test",
dependencies={
"composed_inout": {"num_in": DependencyDefinition("return_one")},
"pipe": {"num": DependencyDefinition("composed_inout", "num_out")},
},
)
)
.result_for_solid("pipe")
.output_value()
== 2
)
def test_mapping_args_kwargs():
@lambda_solid
def take(a, b, c):
return (a, b, c)
@composite_solid
def maps(m_c, m_b, m_a):
take(m_a, b=m_b, c=m_c)
assert maps.input_mappings[2].definition.name == "m_a"
assert maps.input_mappings[2].maps_to.input_name == "a"
assert maps.input_mappings[1].definition.name == "m_b"
assert maps.input_mappings[1].maps_to.input_name == "b"
assert maps.input_mappings[0].definition.name == "m_c"
assert maps.input_mappings[0].maps_to.input_name == "c"
def test_output_map_mult():
@composite_solid(output_defs=[OutputDefinition(Int, "one"), OutputDefinition(Int, "two")])
def wrap_mult():
return return_mult()
@pipeline
def mult_pipe():
one, two = wrap_mult()
echo.alias("echo_one")(one)
echo.alias("echo_two")(two)
result = execute_pipeline(mult_pipe)
assert result.result_for_solid("echo_one").output_value() == 1
assert result.result_for_solid("echo_two").output_value() == 2
def test_output_map_mult_swizzle():
@composite_solid(output_defs=[OutputDefinition(Int, "x"), OutputDefinition(Int, "y")])
def wrap_mult():
one, two = return_mult()
return {"x": one, "y": two}
@pipeline
def mult_pipe():
x, y = wrap_mult()
echo.alias("echo_x")(x)
echo.alias("echo_y")(y)
result = execute_pipeline(mult_pipe)
assert result.result_for_solid("echo_x").output_value() == 1
assert result.result_for_solid("echo_y").output_value() == 2
def test_output_map_fail():
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid(output_defs=[OutputDefinition(Int, "one"), OutputDefinition(Int, "two")])
def _bad(_context):
return return_one()
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid(output_defs=[OutputDefinition(Int, "one"), OutputDefinition(Int, "two")])
def _bad(_context):
return {"one": 1}
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid(
output_defs=[OutputDefinition(Int, "three"), OutputDefinition(Int, "four")]
)
def _bad():
return return_mult()
def test_deep_graph():
@solid(config_schema=Int)
def download_num(context):
return context.solid_config
@lambda_solid(input_defs=[InputDefinition("num")])
def unzip_num(num):
return num
@lambda_solid(input_defs=[InputDefinition("num")])
def ingest_num(num):
return num
@lambda_solid(input_defs=[InputDefinition("num")])
def subsample_num(num):
return num
@lambda_solid(input_defs=[InputDefinition("num")])
def canonicalize_num(num):
return num
@lambda_solid(input_defs=[InputDefinition("num")], output_def=OutputDefinition(Int))
def load_num(num):
return num + 3
@composite_solid(output_defs=[OutputDefinition(Int)])
def test():
return load_num(
num=canonicalize_num(
num=subsample_num(num=ingest_num(num=unzip_num(num=download_num())))
)
)
result = execute_pipeline(
PipelineDefinition(solid_defs=[test], name="test"),
{"solids": {"test": {"solids": {"download_num": {"config": 123}}}}},
)
assert result.result_for_handle("test.canonicalize_num").output_value() == 123
assert result.result_for_handle("test.load_num").output_value() == 126
def test_recursion():
@composite_solid
def outer():
@composite_solid(output_defs=[OutputDefinition()])
def inner():
return add_one(return_one())
add_one(inner())
assert execute_pipeline(PipelineDefinition(solid_defs=[outer], name="test")).success
class Garbage(Exception):
pass
def test_recursion_with_exceptions():
called = {}
@pipeline
def recurse():
@composite_solid
def outer():
try:
@composite_solid
def throws():
called["throws"] = True
raise Garbage()
throws()
except Garbage:
add_one(return_one())
outer()
assert execute_pipeline(recurse).success
assert called["throws"] is True
def test_pipeline_has_solid_def():
@composite_solid(output_defs=[OutputDefinition()])
def inner():
return add_one(return_one())
@composite_solid
def outer():
add_one(inner())
@pipeline
def a_pipeline():
outer()
assert a_pipeline.has_solid_def("add_one")
assert a_pipeline.has_solid_def("outer")
assert a_pipeline.has_solid_def("inner")
def test_mapping_args_ordering():
@lambda_solid
def take(a, b, c):
assert a == "a"
assert b == "b"
assert c == "c"
@composite_solid
def swizzle(b, a, c):
take(a, b, c)
@composite_solid
def swizzle_2(c, b, a):
swizzle(b, a=a, c=c)
@pipeline
def ordered():
swizzle_2()
for mapping in swizzle.input_mappings:
assert mapping.definition.name == mapping.maps_to.input_name
for mapping in swizzle_2.input_mappings:
assert mapping.definition.name == mapping.maps_to.input_name
execute_pipeline(
ordered,
{
"solids": {
"swizzle_2": {
"inputs": {"a": {"value": "a"}, "b": {"value": "b"}, "c": {"value": "c"}}
}
}
},
)
def test_unused_mapping():
with pytest.raises(DagsterInvalidDefinitionError, match="unmapped input"):
@composite_solid
def unused_mapping(_):
return_one()
@lambda_solid
def single_input_solid():
return
def test_collision_invocations():
with pytest.warns(None) as record:
@pipeline
def _():
single_input_solid()
single_input_solid()
single_input_solid()
assert len(record) == 0
def test_alias_invoked(recwarn):
@pipeline
def _():
single_input_solid.alias("foo")()
single_input_solid.alias("bar")()
assert len(recwarn) == 0
def test_alias_not_invoked():
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\.\n'single_input_solid' was aliased as '(foo|bar)'."
),
) as record:
@pipeline
def _my_pipeline():
single_input_solid.alias("foo")
single_input_solid.alias("bar")
assert len(record) == 2 # This pipeline should raise a warning for each aliasing of the solid.
def test_tag_invoked():
with pytest.warns(None) as record:
@pipeline
def _my_pipeline():
single_input_solid.tag({})()
execute_pipeline(_my_pipeline)
assert len(record) == 0
def test_tag_not_invoked():
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\."
),
) as record:
@pipeline
def _my_pipeline():
single_input_solid.tag({})
single_input_solid.tag({})
execute_pipeline(_my_pipeline)
assert len(record) == 1 # We should only raise one warning because solids have same name.
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\.\nProvided tags: {'a': 'b'}\."
),
):
@pipeline
def _my_pipeline():
single_input_solid.tag({"a": "b"})
execute_pipeline(_my_pipeline)
def test_with_hooks_invoked():
with pytest.warns(None) as record:
@pipeline
def _my_pipeline():
single_input_solid.with_hooks(set())()
execute_pipeline(_my_pipeline)
assert len(record) == 0
@event_list_hook(required_resource_keys=set())
def a_hook(_context, _):
return HookExecutionResult("a_hook")
def test_with_hooks_not_invoked():
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\."
),
) as record:
@pipeline
def _my_pipeline():
single_input_solid.with_hooks(set())
single_input_solid.with_hooks(set())
execute_pipeline(_my_pipeline)
# Note not returning out of the pipe causes warning count to go up to 2
assert len(record) == 1 # We should only raise one warning because solids have same name.
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\.\nProvided hook definitions: \['a_hook'\]\."
),
):
@pipeline
def _my_pipeline():
single_input_solid.with_hooks({a_hook})
execute_pipeline(_my_pipeline)
def test_with_hooks_not_empty():
@pipeline
def _():
single_input_solid.with_hooks({a_hook})
assert 1 == 1
def test_multiple_pending_invocations():
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\.\n'single_input_solid' was aliased as 'bar'\.\n"
r"Provided hook definitions: \['a_hook'\]\."
),
) as record:
@pipeline
def _my_pipeline():
foo = single_input_solid.alias("foo")
bar = single_input_solid.alias("bar")
foo_tag = foo.tag({})
_bar_hook = bar.with_hooks({a_hook})
foo_tag()
assert (
len(record) == 1
) # ensure that one warning is thrown per solid_name / alias instead of per every PendingNodeInvocation.
def test_compose_nothing():
@lambda_solid(input_defs=[InputDefinition("start", Nothing)])
def go():
pass
@composite_solid(input_defs=[InputDefinition("start", Nothing)])
def _compose(start):
go(start) # pylint: disable=too-many-function-args
def test_multimap():
@composite_solid(output_defs=[OutputDefinition(int, "x"), OutputDefinition(int, "y")])
def multimap(foo):
x = echo.alias("echo_1")(foo)
y = echo.alias("echo_2")(foo)
return {"x": x, "y": y}
@pipeline
def multimap_pipe():
one = return_one()
multimap(one)
result = execute_pipeline(multimap_pipe)
assert result.result_for_handle("multimap.echo_1").output_value() == 1
assert result.result_for_handle("multimap.echo_2").output_value() == 1
def test_reuse_inputs():
@composite_solid(input_defs=[InputDefinition("one", Int), InputDefinition("two", Int)])
def calculate(one, two):
adder(one, two)
adder.alias("adder_2")(one, two)
@pipeline
def calculate_pipeline():
one = return_one()
two = return_two()
calculate(one, two)
result = execute_pipeline(calculate_pipeline)
assert result.result_for_handle("calculate.adder").output_value() == 3
assert result.result_for_handle("calculate.adder_2").output_value() == 3
def test_output_node_error():
with pytest.raises(DagsterInvariantViolationError):
@pipeline
def _bad_destructure():
_a, _b = return_tuple()
with pytest.raises(DagsterInvariantViolationError):
@pipeline
def _bad_index():
out = return_tuple()
add_one(out[0])
def test_pipeline_composition_metadata():
@solid
def metadata_solid(context):
return context.solid.tags["key"]
@pipeline
def metadata_test_pipeline():
metadata_solid.tag({"key": "foo"}).alias("aliased_one")()
metadata_solid.alias("aliased_two").tag({"key": "foo"}).tag({"key": "bar"})()
metadata_solid.alias("aliased_three").tag({"key": "baz"})()
metadata_solid.tag({"key": "quux"})()
res = execute_pipeline(metadata_test_pipeline)
assert res.result_for_solid("aliased_one").output_value() == "foo"
assert res.result_for_solid("aliased_two").output_value() == "bar"
assert res.result_for_solid("aliased_three").output_value() == "baz"
assert res.result_for_solid("metadata_solid").output_value() == "quux"
def test_composite_solid_composition_metadata():
@solid
def metadata_solid(context):
return context.solid.tags["key"]
@composite_solid
def metadata_composite():
metadata_solid.tag({"key": "foo"}).alias("aliased_one")()
metadata_solid.alias("aliased_two").tag({"key": "foo"}).tag({"key": "bar"})()
metadata_solid.alias("aliased_three").tag({"key": "baz"})()
metadata_solid.tag({"key": "quux"})()
@pipeline
def metadata_test_pipeline():
metadata_composite()
res = execute_pipeline(metadata_test_pipeline)
assert (
res.result_for_solid("metadata_composite").result_for_solid("aliased_one").output_value()
== "foo"
)
assert (
res.result_for_solid("metadata_composite").result_for_solid("aliased_two").output_value()
== "bar"
)
assert (
res.result_for_solid("metadata_composite").result_for_solid("aliased_three").output_value()
== "baz"
)
assert (
res.result_for_solid("metadata_composite").result_for_solid("metadata_solid").output_value()
== "quux"
)
def test_uninvoked_solid_fails():
with pytest.raises(DagsterInvalidDefinitionError, match=r".*Did you forget parentheses?"):
@pipeline
def uninvoked_solid_pipeline():
add_one(return_one)
execute_pipeline(uninvoked_solid_pipeline)
def test_uninvoked_aliased_solid_fails():
with pytest.raises(DagsterInvalidDefinitionError, match=r".*Did you forget parentheses?"):
@pipeline
def uninvoked_aliased_solid_pipeline():
add_one(return_one.alias("something"))
execute_pipeline(uninvoked_aliased_solid_pipeline)
def test_alias_on_invoked_solid_fails():
with pytest.raises(
DagsterInvariantViolationError, match=r".*Consider checking the location of parentheses."
):
@pipeline
def alias_on_invoked_solid_pipeline():
return_one().alias("something") # pylint: disable=no-member
execute_pipeline(alias_on_invoked_solid_pipeline)
def test_warn_on_pipeline_return():
@solid
def noop(_):
pass
with pytest.warns(
UserWarning, match="You have returned a value out of a @pipeline-decorated function. "
):
@pipeline
def _returns_something():
return noop()
def test_tags():
@solid(tags={"def": "1"})
def emit(_):
return 1
@pipeline
def tag():
emit.tag({"invoke": "2"})()
plan = create_execution_plan(tag)
step = list(plan.step_dict.values())[0]
assert step.tags == {"def": "1", "invoke": "2"}
def test_bad_alias():
with pytest.raises(DagsterInvalidDefinitionError, match="not a valid name"):
echo.alias("uh oh")
with pytest.raises(DagsterInvalidDefinitionError, match="not a valid name"):
echo.alias("uh[oh]")
def test_tag_subset():
@solid
def empty(_):
pass
@solid(tags={"def": "1"})
def emit(_):
return 1
@pipeline
def tag():
empty()
emit.tag({"invoke": "2"})()
plan = create_execution_plan(tag.get_pipeline_subset_def({"emit"}))
step = list(plan.step_dict.values())[0]
assert step.tags == {"def": "1", "invoke": "2"}
def test_composition_order():
solid_to_tags = {}
@success_hook
def test_hook(context):
solid_to_tags[context.solid.name] = context.solid.tags
@solid
def a_solid(_):
pass
@pipeline
def a_pipeline():
a_solid.with_hooks(hook_defs={test_hook}).alias("hook_alias_tag").tag({"pos": 3})()
a_solid.with_hooks(hook_defs={test_hook}).tag({"pos": 2}).alias("hook_tag_alias")()
a_solid.alias("alias_tag_hook").tag({"pos": 2}).with_hooks(hook_defs={test_hook})()
a_solid.alias("alias_hook_tag").with_hooks(hook_defs={test_hook}).tag({"pos": 3})()
a_solid.tag({"pos": 1}).with_hooks(hook_defs={test_hook}).alias("tag_hook_alias")()
a_solid.tag({"pos": 1}).alias("tag_alias_hook").with_hooks(hook_defs={test_hook})()
result = execute_pipeline(a_pipeline, raise_on_error=False)
assert result.success
assert solid_to_tags == {
"tag_hook_alias": {"pos": "1"},
"tag_alias_hook": {"pos": "1"},
"hook_tag_alias": {"pos": "2"},
"alias_tag_hook": {"pos": "2"},
"hook_alias_tag": {"pos": "3"},
"alias_hook_tag": {"pos": "3"},
}
def test_fan_in_scalars_fails():
@solid
def fan_in_solid(_, xs):
return sum(xs)
with pytest.raises(
DagsterInvalidDefinitionError,
match="Lists can only contain the output from previous solid invocations or input mappings",
):
@pipeline
def _scalar_fan_in_pipeline():
fan_in_solid([1, 2, 3])
def test_with_hooks_on_invoked_solid_fails():
@solid
def yield_1_solid(_):
return 1
with pytest.raises(
DagsterInvariantViolationError,
match="attempted to call hook method for InvokedSolidOutputHandle.",
):
@pipeline
def _bad_hooks_pipeline():
yield_1_solid().with_hooks({a_hook})
def test_iterating_over_dynamic_outputs_fails():
@solid
def dynamic_output_solid(_):
yield DynamicOutput(1, "1")
yield DynamicOutput(2, "2")
@solid
def yield_input(_, x):
return x
with pytest.raises(
DagsterInvariantViolationError,
match="Attempted to iterate over an InvokedSolidOutputHandle.",
):
@pipeline
def _iterating_over_dynamic_output_pipeline():
for x in dynamic_output_solid():
yield_input(x)
def test_indexing_into_dynamic_outputs_fails():
@solid
def dynamic_output_solid(_):
yield DynamicOutput(1, "1")
yield DynamicOutput(2, "2")
@solid
def yield_input(_, x):
return x
with pytest.raises(
DagsterInvariantViolationError,
match="Attempted to index in to an InvokedSolidOutputHandle.",
):
@pipeline
def _indexing_into_dynamic_output_pipeline():
yield_input(dynamic_output_solid()[0])
def test_aliasing_invoked_dynamic_output_fails():
@solid
def dynamic_output_solid(_):
yield DynamicOutput(1, "1")
yield DynamicOutput(2, "2")
with pytest.raises(
DagsterInvariantViolationError,
match="attempted to call alias method for InvokedSolidOutputHandle.",
):
@pipeline
def _alias_invoked_dynamic_output_pipeline():
dynamic_output_solid().alias("dynamic_output")
| 26.269539 | 109 | 0.634283 | import pytest
from dagster import (
DependencyDefinition,
InputDefinition,
Int,
Nothing,
Output,
OutputDefinition,
PipelineDefinition,
SolidDefinition,
composite_solid,
execute_pipeline,
lambda_solid,
pipeline,
solid,
)
from dagster.core.definitions.decorators.hook import event_list_hook, success_hook
from dagster.core.definitions.events import DynamicOutput, HookExecutionResult
from dagster.core.errors import DagsterInvalidDefinitionError, DagsterInvariantViolationError
from dagster.core.execution.api import create_execution_plan
def builder(graph):
return graph.add_one(graph.return_one())
@lambda_solid(output_def=OutputDefinition(Int))
def echo(blah):
return blah
@lambda_solid
def return_one():
return 1
@lambda_solid
def return_two():
return 2
@lambda_solid
def return_tuple():
return (1, 2)
@lambda_solid(input_defs=[InputDefinition("num")])
def add_one(num):
return num + 1
@lambda_solid(input_defs=[InputDefinition("num")])
def pipe(num):
return num
@solid(
input_defs=[InputDefinition("int_1", Int), InputDefinition("int_2", Int)],
output_defs=[OutputDefinition(Int)],
)
def adder(_context, int_1, int_2):
return int_1 + int_2
@solid(output_defs=[OutputDefinition(Int, "one"), OutputDefinition(Int, "two")])
def return_mult(_context):
yield Output(1, "one")
yield Output(2, "two")
@solid(config_schema=int)
def return_config_int(context):
return context.solid_config
def get_duplicate_solids():
return (
SolidDefinition("a_solid", [], lambda: None, []),
SolidDefinition("a_solid", [], lambda: None, []),
)
def test_basic():
@composite_solid
def test():
one = return_one()
add_one(num=one)
assert (
execute_pipeline(PipelineDefinition(solid_defs=[test], name="test"))
.result_for_handle("test.add_one")
.output_value()
== 2
)
def test_args():
@composite_solid
def _test_1():
one = return_one()
add_one(one)
@composite_solid
def _test_2():
adder(return_one(), return_two())
@composite_solid
def _test_3():
adder(int_1=return_one(), int_2=return_two())
@composite_solid
def _test_4():
adder(return_one(), return_two())
@composite_solid
def _test_5():
adder(return_one(), int_2=return_two())
@composite_solid
def _test_6():
adder(return_one())
@composite_solid
def _test_7():
adder(int_2=return_two())
def test_arg_fails():
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid
def _fail_2():
adder(return_one(), 1)
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid
def _fail_3():
adder(return_one(), return_two(), return_one.alias("three")())
def test_mult_out_fail():
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid
def _test():
ret = return_mult()
add_one(ret)
def test_aliased_with_name_name_fails():
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid
def _test():
one, two = return_mult()
add_one(num=one)
add_one.alias("add_one")(num=two)
def test_composite_with_duplicate_solids():
solid_1, solid_2 = get_duplicate_solids()
with pytest.raises(
DagsterInvalidDefinitionError,
match="Detected conflicting node definitions with the same name",
):
@composite_solid
def _name_conflict_composite():
solid_1()
solid_2()
def test_pipeline_with_duplicate_solids():
solid_1, solid_2 = get_duplicate_solids()
with pytest.raises(
DagsterInvalidDefinitionError,
match="Detected conflicting node definitions with the same name",
):
@pipeline
def _name_conflict_pipeline():
solid_1()
solid_2()
def test_multiple():
@composite_solid
def test():
one, two = return_mult()
add_one(num=one)
add_one.alias("add_one_2")(num=two)
results = execute_pipeline(PipelineDefinition(solid_defs=[test], name="test"))
assert results.result_for_handle("test.add_one").output_value() == 2
assert results.result_for_handle("test.add_one_2").output_value() == 3
def test_two_inputs_with_dsl():
@lambda_solid(input_defs=[InputDefinition("num_one"), InputDefinition("num_two")])
def subtract(num_one, num_two):
return num_one - num_two
@lambda_solid
def return_three():
return 3
@composite_solid
def test():
subtract(num_one=return_two(), num_two=return_three())
assert (
execute_pipeline(PipelineDefinition(solid_defs=[test], name="test"))
.result_for_handle("test.subtract")
.output_value()
== -1
)
def test_basic_aliasing_with_dsl():
@composite_solid
def test():
add_one.alias("renamed")(num=return_one())
assert (
execute_pipeline(PipelineDefinition(solid_defs=[test], name="test"))
.result_for_handle("test.renamed")
.output_value()
== 2
)
def test_diamond_graph():
@solid(output_defs=[OutputDefinition(name="value_one"), OutputDefinition(name="value_two")])
def emit_values(_context):
yield Output(1, "value_one")
yield Output(2, "value_two")
@lambda_solid(input_defs=[InputDefinition("num_one"), InputDefinition("num_two")])
def subtract(num_one, num_two):
return num_one - num_two
@composite_solid
def diamond():
value_one, value_two = emit_values()
subtract(num_one=add_one(num=value_one), num_two=add_one.alias("renamed")(num=value_two))
result = execute_pipeline(PipelineDefinition(solid_defs=[diamond], name="test"))
assert result.result_for_handle("diamond.subtract").output_value() == -1
def test_mapping():
@lambda_solid(
input_defs=[InputDefinition("num_in", Int)], output_def=OutputDefinition(Int, "num_out")
)
def double(num_in):
return num_in * 2
@composite_solid(
input_defs=[InputDefinition("num_in", Int)], output_defs=[OutputDefinition(Int, "num_out")]
)
def composed_inout(num_in):
return double(num_in=num_in)
assert (
execute_pipeline(
PipelineDefinition(
solid_defs=[return_one, composed_inout, pipe],
name="test",
dependencies={
"composed_inout": {"num_in": DependencyDefinition("return_one")},
"pipe": {"num": DependencyDefinition("composed_inout", "num_out")},
},
)
)
.result_for_solid("pipe")
.output_value()
== 2
)
def test_mapping_args_kwargs():
@lambda_solid
def take(a, b, c):
return (a, b, c)
@composite_solid
def maps(m_c, m_b, m_a):
take(m_a, b=m_b, c=m_c)
assert maps.input_mappings[2].definition.name == "m_a"
assert maps.input_mappings[2].maps_to.input_name == "a"
assert maps.input_mappings[1].definition.name == "m_b"
assert maps.input_mappings[1].maps_to.input_name == "b"
assert maps.input_mappings[0].definition.name == "m_c"
assert maps.input_mappings[0].maps_to.input_name == "c"
def test_output_map_mult():
@composite_solid(output_defs=[OutputDefinition(Int, "one"), OutputDefinition(Int, "two")])
def wrap_mult():
return return_mult()
@pipeline
def mult_pipe():
one, two = wrap_mult()
echo.alias("echo_one")(one)
echo.alias("echo_two")(two)
result = execute_pipeline(mult_pipe)
assert result.result_for_solid("echo_one").output_value() == 1
assert result.result_for_solid("echo_two").output_value() == 2
def test_output_map_mult_swizzle():
@composite_solid(output_defs=[OutputDefinition(Int, "x"), OutputDefinition(Int, "y")])
def wrap_mult():
one, two = return_mult()
return {"x": one, "y": two}
@pipeline
def mult_pipe():
x, y = wrap_mult()
echo.alias("echo_x")(x)
echo.alias("echo_y")(y)
result = execute_pipeline(mult_pipe)
assert result.result_for_solid("echo_x").output_value() == 1
assert result.result_for_solid("echo_y").output_value() == 2
def test_output_map_fail():
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid(output_defs=[OutputDefinition(Int, "one"), OutputDefinition(Int, "two")])
def _bad(_context):
return return_one()
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid(output_defs=[OutputDefinition(Int, "one"), OutputDefinition(Int, "two")])
def _bad(_context):
return {"one": 1}
with pytest.raises(DagsterInvalidDefinitionError):
@composite_solid(
output_defs=[OutputDefinition(Int, "three"), OutputDefinition(Int, "four")]
)
def _bad():
return return_mult()
def test_deep_graph():
@solid(config_schema=Int)
def download_num(context):
return context.solid_config
@lambda_solid(input_defs=[InputDefinition("num")])
def unzip_num(num):
return num
@lambda_solid(input_defs=[InputDefinition("num")])
def ingest_num(num):
return num
@lambda_solid(input_defs=[InputDefinition("num")])
def subsample_num(num):
return num
@lambda_solid(input_defs=[InputDefinition("num")])
def canonicalize_num(num):
return num
@lambda_solid(input_defs=[InputDefinition("num")], output_def=OutputDefinition(Int))
def load_num(num):
return num + 3
@composite_solid(output_defs=[OutputDefinition(Int)])
def test():
return load_num(
num=canonicalize_num(
num=subsample_num(num=ingest_num(num=unzip_num(num=download_num())))
)
)
result = execute_pipeline(
PipelineDefinition(solid_defs=[test], name="test"),
{"solids": {"test": {"solids": {"download_num": {"config": 123}}}}},
)
assert result.result_for_handle("test.canonicalize_num").output_value() == 123
assert result.result_for_handle("test.load_num").output_value() == 126
def test_recursion():
@composite_solid
def outer():
@composite_solid(output_defs=[OutputDefinition()])
def inner():
return add_one(return_one())
add_one(inner())
assert execute_pipeline(PipelineDefinition(solid_defs=[outer], name="test")).success
class Garbage(Exception):
pass
def test_recursion_with_exceptions():
called = {}
@pipeline
def recurse():
@composite_solid
def outer():
try:
@composite_solid
def throws():
called["throws"] = True
raise Garbage()
throws()
except Garbage:
add_one(return_one())
outer()
assert execute_pipeline(recurse).success
assert called["throws"] is True
def test_pipeline_has_solid_def():
@composite_solid(output_defs=[OutputDefinition()])
def inner():
return add_one(return_one())
@composite_solid
def outer():
add_one(inner())
@pipeline
def a_pipeline():
outer()
assert a_pipeline.has_solid_def("add_one")
assert a_pipeline.has_solid_def("outer")
assert a_pipeline.has_solid_def("inner")
def test_mapping_args_ordering():
@lambda_solid
def take(a, b, c):
assert a == "a"
assert b == "b"
assert c == "c"
@composite_solid
def swizzle(b, a, c):
take(a, b, c)
@composite_solid
def swizzle_2(c, b, a):
swizzle(b, a=a, c=c)
@pipeline
def ordered():
swizzle_2()
for mapping in swizzle.input_mappings:
assert mapping.definition.name == mapping.maps_to.input_name
for mapping in swizzle_2.input_mappings:
assert mapping.definition.name == mapping.maps_to.input_name
execute_pipeline(
ordered,
{
"solids": {
"swizzle_2": {
"inputs": {"a": {"value": "a"}, "b": {"value": "b"}, "c": {"value": "c"}}
}
}
},
)
def test_unused_mapping():
with pytest.raises(DagsterInvalidDefinitionError, match="unmapped input"):
@composite_solid
def unused_mapping(_):
return_one()
@lambda_solid
def single_input_solid():
return
def test_collision_invocations():
with pytest.warns(None) as record:
@pipeline
def _():
single_input_solid()
single_input_solid()
single_input_solid()
assert len(record) == 0
def test_alias_invoked(recwarn):
@pipeline
def _():
single_input_solid.alias("foo")()
single_input_solid.alias("bar")()
assert len(recwarn) == 0
def test_alias_not_invoked():
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\.\n'single_input_solid' was aliased as '(foo|bar)'."
),
) as record:
@pipeline
def _my_pipeline():
single_input_solid.alias("foo")
single_input_solid.alias("bar")
assert len(record) == 2
def test_tag_invoked():
with pytest.warns(None) as record:
@pipeline
def _my_pipeline():
single_input_solid.tag({})()
execute_pipeline(_my_pipeline)
assert len(record) == 0
def test_tag_not_invoked():
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\."
),
) as record:
@pipeline
def _my_pipeline():
single_input_solid.tag({})
single_input_solid.tag({})
execute_pipeline(_my_pipeline)
assert len(record) == 1
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\.\nProvided tags: {'a': 'b'}\."
),
):
@pipeline
def _my_pipeline():
single_input_solid.tag({"a": "b"})
execute_pipeline(_my_pipeline)
def test_with_hooks_invoked():
with pytest.warns(None) as record:
@pipeline
def _my_pipeline():
single_input_solid.with_hooks(set())()
execute_pipeline(_my_pipeline)
assert len(record) == 0
@event_list_hook(required_resource_keys=set())
def a_hook(_context, _):
return HookExecutionResult("a_hook")
def test_with_hooks_not_invoked():
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\."
),
) as record:
@pipeline
def _my_pipeline():
single_input_solid.with_hooks(set())
single_input_solid.with_hooks(set())
execute_pipeline(_my_pipeline)
assert len(record) == 1
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\.\nProvided hook definitions: \['a_hook'\]\."
),
):
@pipeline
def _my_pipeline():
single_input_solid.with_hooks({a_hook})
execute_pipeline(_my_pipeline)
def test_with_hooks_not_empty():
@pipeline
def _():
single_input_solid.with_hooks({a_hook})
assert 1 == 1
def test_multiple_pending_invocations():
with pytest.warns(
UserWarning,
match=(
r"While in @pipeline context '_my_pipeline', received an uninvoked solid "
r"'single_input_solid'\.\n'single_input_solid' was aliased as 'bar'\.\n"
r"Provided hook definitions: \['a_hook'\]\."
),
) as record:
@pipeline
def _my_pipeline():
foo = single_input_solid.alias("foo")
bar = single_input_solid.alias("bar")
foo_tag = foo.tag({})
_bar_hook = bar.with_hooks({a_hook})
foo_tag()
assert (
len(record) == 1
)
def test_compose_nothing():
@lambda_solid(input_defs=[InputDefinition("start", Nothing)])
def go():
pass
@composite_solid(input_defs=[InputDefinition("start", Nothing)])
def _compose(start):
go(start)
def test_multimap():
@composite_solid(output_defs=[OutputDefinition(int, "x"), OutputDefinition(int, "y")])
def multimap(foo):
x = echo.alias("echo_1")(foo)
y = echo.alias("echo_2")(foo)
return {"x": x, "y": y}
@pipeline
def multimap_pipe():
one = return_one()
multimap(one)
result = execute_pipeline(multimap_pipe)
assert result.result_for_handle("multimap.echo_1").output_value() == 1
assert result.result_for_handle("multimap.echo_2").output_value() == 1
def test_reuse_inputs():
@composite_solid(input_defs=[InputDefinition("one", Int), InputDefinition("two", Int)])
def calculate(one, two):
adder(one, two)
adder.alias("adder_2")(one, two)
@pipeline
def calculate_pipeline():
one = return_one()
two = return_two()
calculate(one, two)
result = execute_pipeline(calculate_pipeline)
assert result.result_for_handle("calculate.adder").output_value() == 3
assert result.result_for_handle("calculate.adder_2").output_value() == 3
def test_output_node_error():
with pytest.raises(DagsterInvariantViolationError):
@pipeline
def _bad_destructure():
_a, _b = return_tuple()
with pytest.raises(DagsterInvariantViolationError):
@pipeline
def _bad_index():
out = return_tuple()
add_one(out[0])
def test_pipeline_composition_metadata():
@solid
def metadata_solid(context):
return context.solid.tags["key"]
@pipeline
def metadata_test_pipeline():
metadata_solid.tag({"key": "foo"}).alias("aliased_one")()
metadata_solid.alias("aliased_two").tag({"key": "foo"}).tag({"key": "bar"})()
metadata_solid.alias("aliased_three").tag({"key": "baz"})()
metadata_solid.tag({"key": "quux"})()
res = execute_pipeline(metadata_test_pipeline)
assert res.result_for_solid("aliased_one").output_value() == "foo"
assert res.result_for_solid("aliased_two").output_value() == "bar"
assert res.result_for_solid("aliased_three").output_value() == "baz"
assert res.result_for_solid("metadata_solid").output_value() == "quux"
def test_composite_solid_composition_metadata():
@solid
def metadata_solid(context):
return context.solid.tags["key"]
@composite_solid
def metadata_composite():
metadata_solid.tag({"key": "foo"}).alias("aliased_one")()
metadata_solid.alias("aliased_two").tag({"key": "foo"}).tag({"key": "bar"})()
metadata_solid.alias("aliased_three").tag({"key": "baz"})()
metadata_solid.tag({"key": "quux"})()
@pipeline
def metadata_test_pipeline():
metadata_composite()
res = execute_pipeline(metadata_test_pipeline)
assert (
res.result_for_solid("metadata_composite").result_for_solid("aliased_one").output_value()
== "foo"
)
assert (
res.result_for_solid("metadata_composite").result_for_solid("aliased_two").output_value()
== "bar"
)
assert (
res.result_for_solid("metadata_composite").result_for_solid("aliased_three").output_value()
== "baz"
)
assert (
res.result_for_solid("metadata_composite").result_for_solid("metadata_solid").output_value()
== "quux"
)
def test_uninvoked_solid_fails():
with pytest.raises(DagsterInvalidDefinitionError, match=r".*Did you forget parentheses?"):
@pipeline
def uninvoked_solid_pipeline():
add_one(return_one)
execute_pipeline(uninvoked_solid_pipeline)
def test_uninvoked_aliased_solid_fails():
with pytest.raises(DagsterInvalidDefinitionError, match=r".*Did you forget parentheses?"):
@pipeline
def uninvoked_aliased_solid_pipeline():
add_one(return_one.alias("something"))
execute_pipeline(uninvoked_aliased_solid_pipeline)
def test_alias_on_invoked_solid_fails():
with pytest.raises(
DagsterInvariantViolationError, match=r".*Consider checking the location of parentheses."
):
@pipeline
def alias_on_invoked_solid_pipeline():
return_one().alias("something")
execute_pipeline(alias_on_invoked_solid_pipeline)
def test_warn_on_pipeline_return():
@solid
def noop(_):
pass
with pytest.warns(
UserWarning, match="You have returned a value out of a @pipeline-decorated function. "
):
@pipeline
def _returns_something():
return noop()
def test_tags():
@solid(tags={"def": "1"})
def emit(_):
return 1
@pipeline
def tag():
emit.tag({"invoke": "2"})()
plan = create_execution_plan(tag)
step = list(plan.step_dict.values())[0]
assert step.tags == {"def": "1", "invoke": "2"}
def test_bad_alias():
with pytest.raises(DagsterInvalidDefinitionError, match="not a valid name"):
echo.alias("uh oh")
with pytest.raises(DagsterInvalidDefinitionError, match="not a valid name"):
echo.alias("uh[oh]")
def test_tag_subset():
@solid
def empty(_):
pass
@solid(tags={"def": "1"})
def emit(_):
return 1
@pipeline
def tag():
empty()
emit.tag({"invoke": "2"})()
plan = create_execution_plan(tag.get_pipeline_subset_def({"emit"}))
step = list(plan.step_dict.values())[0]
assert step.tags == {"def": "1", "invoke": "2"}
def test_composition_order():
solid_to_tags = {}
@success_hook
def test_hook(context):
solid_to_tags[context.solid.name] = context.solid.tags
@solid
def a_solid(_):
pass
@pipeline
def a_pipeline():
a_solid.with_hooks(hook_defs={test_hook}).alias("hook_alias_tag").tag({"pos": 3})()
a_solid.with_hooks(hook_defs={test_hook}).tag({"pos": 2}).alias("hook_tag_alias")()
a_solid.alias("alias_tag_hook").tag({"pos": 2}).with_hooks(hook_defs={test_hook})()
a_solid.alias("alias_hook_tag").with_hooks(hook_defs={test_hook}).tag({"pos": 3})()
a_solid.tag({"pos": 1}).with_hooks(hook_defs={test_hook}).alias("tag_hook_alias")()
a_solid.tag({"pos": 1}).alias("tag_alias_hook").with_hooks(hook_defs={test_hook})()
result = execute_pipeline(a_pipeline, raise_on_error=False)
assert result.success
assert solid_to_tags == {
"tag_hook_alias": {"pos": "1"},
"tag_alias_hook": {"pos": "1"},
"hook_tag_alias": {"pos": "2"},
"alias_tag_hook": {"pos": "2"},
"hook_alias_tag": {"pos": "3"},
"alias_hook_tag": {"pos": "3"},
}
def test_fan_in_scalars_fails():
@solid
def fan_in_solid(_, xs):
return sum(xs)
with pytest.raises(
DagsterInvalidDefinitionError,
match="Lists can only contain the output from previous solid invocations or input mappings",
):
@pipeline
def _scalar_fan_in_pipeline():
fan_in_solid([1, 2, 3])
def test_with_hooks_on_invoked_solid_fails():
@solid
def yield_1_solid(_):
return 1
with pytest.raises(
DagsterInvariantViolationError,
match="attempted to call hook method for InvokedSolidOutputHandle.",
):
@pipeline
def _bad_hooks_pipeline():
yield_1_solid().with_hooks({a_hook})
def test_iterating_over_dynamic_outputs_fails():
@solid
def dynamic_output_solid(_):
yield DynamicOutput(1, "1")
yield DynamicOutput(2, "2")
@solid
def yield_input(_, x):
return x
with pytest.raises(
DagsterInvariantViolationError,
match="Attempted to iterate over an InvokedSolidOutputHandle.",
):
@pipeline
def _iterating_over_dynamic_output_pipeline():
for x in dynamic_output_solid():
yield_input(x)
def test_indexing_into_dynamic_outputs_fails():
@solid
def dynamic_output_solid(_):
yield DynamicOutput(1, "1")
yield DynamicOutput(2, "2")
@solid
def yield_input(_, x):
return x
with pytest.raises(
DagsterInvariantViolationError,
match="Attempted to index in to an InvokedSolidOutputHandle.",
):
@pipeline
def _indexing_into_dynamic_output_pipeline():
yield_input(dynamic_output_solid()[0])
def test_aliasing_invoked_dynamic_output_fails():
@solid
def dynamic_output_solid(_):
yield DynamicOutput(1, "1")
yield DynamicOutput(2, "2")
with pytest.raises(
DagsterInvariantViolationError,
match="attempted to call alias method for InvokedSolidOutputHandle.",
):
@pipeline
def _alias_invoked_dynamic_output_pipeline():
dynamic_output_solid().alias("dynamic_output")
| true | true |
f73534df9f2ba66521a9f589baa13f113875fad2 | 5,388 | py | Python | python/GafferSceneUI/StandardAttributesUI.py | ivanimanishi/gaffer | 7cfd79d2f20c25ed1d680730de9d6a2ee356dd4c | [
"BSD-3-Clause"
] | 1 | 2019-08-02T16:49:59.000Z | 2019-08-02T16:49:59.000Z | python/GafferSceneUI/StandardAttributesUI.py | rkoschmitzky/gaffer | ec6262ae1292767bdeb9520d1447d65a4a511884 | [
"BSD-3-Clause"
] | 2 | 2017-08-23T21:35:45.000Z | 2018-01-29T08:59:33.000Z | python/GafferSceneUI/StandardAttributesUI.py | rkoschmitzky/gaffer | ec6262ae1292767bdeb9520d1447d65a4a511884 | [
"BSD-3-Clause"
] | null | null | null | ##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
import GafferScene
def __attributesSummary( plug ) :
info = []
if plug["visibility"]["enabled"].getValue() :
info.append( "Visible" if plug["visibility"]["value"].getValue() else "Invisible" )
if plug["doubleSided"]["enabled"].getValue() :
info.append( "Double Sided" if plug["doubleSided"]["value"].getValue() else "Single Sided" )
return ", ".join( info )
def __motionBlurSummary( plug ) :
info = []
for motionType in "transform", "deformation" :
onOffEnabled = plug[motionType+"Blur"]["enabled"].getValue()
segmentsEnabled = plug[motionType+"BlurSegments"]["enabled"].getValue()
if onOffEnabled or segmentsEnabled :
items = []
if onOffEnabled :
items.append( "On" if plug[motionType+"Blur"]["value"].getValue() else "Off" )
if segmentsEnabled :
items.append( "%d Segments" % plug[motionType+"BlurSegments"]["value"].getValue() )
info.append( motionType.capitalize() + " : " + "/".join( items ) )
return ", ".join( info )
Gaffer.Metadata.registerNode(
GafferScene.StandardAttributes,
"description",
"""
Modifies the standard attributes on objects - these should
be respected by all renderers.
""",
plugs = {
# sections
"attributes" : [
"layout:section:Attributes:summary", __attributesSummary,
"layout:section:Motion Blur:summary", __motionBlurSummary,
],
# visibility plugs
"attributes.visibility" : [
"description",
"""
Whether or not the object can be seen - invisible objects are
not sent to the renderer at all. Typically more fine
grained (camera, reflection etc) visibility can be
specified using a renderer specific attributes node.
Note that making a parent location invisible will
always make all the children invisible too, regardless
of their visibility settings.
""",
"layout:section", "Attributes",
],
"attributes.doubleSided" : [
"description",
"""
Whether or not the object can be seen from both sides.
Single sided objects appear invisible when seen from
the back.
""",
"layout:section", "Attributes",
],
# motion blur plugs
"attributes.transformBlur" : [
"description",
"""
Whether or not transformation animation on the
object is taken into account in the rendered image.
Use the transformBlurSegments plug to specify the number
of segments used to represent the motion.
""",
"layout:section", "Motion Blur",
"label", "Transform",
],
"attributes.transformBlurSegments" : [
"description",
"""
The number of segments of transform animation to
pass to the renderer when transformBlur is on.
""",
"layout:section", "Motion Blur",
"label", "Transform Segments",
],
"attributes.deformationBlur" : [
"description",
"""
Whether or not deformation animation on the
object is taken into account in the rendered image.
Use the deformationBlurSegments plug to specify the
number of segments used to represent the motion.
""",
"layout:section", "Motion Blur",
"label", "Deformation",
],
"attributes.deformationBlurSegments" : [
"description",
"""
The number of segments of transform animation to
pass to the renderer when transformBlur is on.
""",
"layout:section", "Motion Blur",
"label", "Deformation Segments",
],
"attributes.linkedLights" : [
"description",
"""
The lights to be linked to this object. Accepts a
set expression or a space separated list of lights.
""",
"layout:section", "Light Linking",
"label", "Linked Lights",
],
}
)
| 27.773196 | 94 | 0.677988 | true | true | |
f73536fcb4da1724c15b621e8988c68c0fcb521b | 584 | py | Python | tests/mongodb/iaas_classic_queries/__init__.py | ericmharris/gc3-query | 0bf5226130aafbb1974aeb96d93ee1996833e87d | [
"MIT"
] | null | null | null | tests/mongodb/iaas_classic_queries/__init__.py | ericmharris/gc3-query | 0bf5226130aafbb1974aeb96d93ee1996833e87d | [
"MIT"
] | null | null | null | tests/mongodb/iaas_classic_queries/__init__.py | ericmharris/gc3-query | 0bf5226130aafbb1974aeb96d93ee1996833e87d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
gc3-query.__init__.py [9/13/2018 4:01 PM]
~~~~~~~~~~~~~~~~
<DESCR SHORT>
<DESCR>
"""
################################################################################
## Standard Library Imports
import sys, os
################################################################################
## Third-Party Imports
from dataclasses import dataclass
################################################################################
## Project Imports
from gc3_query.lib import *
_debug, _info, _warning, _error, _critical = get_logging(name=__name__) | 24.333333 | 80 | 0.390411 | true | true | |
f73538c9403eee04afb92dcc25ce6047daa55b6c | 401 | py | Python | juniorPython/wsgi.py | CatOnDrugs/junior-test | 7809d4726b7b39d5c0a69addc56aaf1e81d26bd7 | [
"MIT"
] | null | null | null | juniorPython/wsgi.py | CatOnDrugs/junior-test | 7809d4726b7b39d5c0a69addc56aaf1e81d26bd7 | [
"MIT"
] | null | null | null | juniorPython/wsgi.py | CatOnDrugs/junior-test | 7809d4726b7b39d5c0a69addc56aaf1e81d26bd7 | [
"MIT"
] | null | null | null | """
WSGI config for juniorPython project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'juniorPython.settings')
application = get_wsgi_application()
| 23.588235 | 78 | 0.790524 |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'juniorPython.settings')
application = get_wsgi_application()
| true | true |
f73538f392b87985cef5c7a55ef0b6d5f0b2f365 | 3,837 | py | Python | slackbot.py | nathants/py-slackbot | bc119e64ad1ca2fd74e128379f40e968f74f1f68 | [
"MIT"
] | 1 | 2018-06-05T20:22:37.000Z | 2018-06-05T20:22:37.000Z | slackbot.py | nathants/slackbot | bc119e64ad1ca2fd74e128379f40e968f74f1f68 | [
"MIT"
] | null | null | null | slackbot.py | nathants/slackbot | bc119e64ad1ca2fd74e128379f40e968f74f1f68 | [
"MIT"
] | null | null | null | import json
import inspect
import requests
import os
import boto3
from urllib import parse
ASYNC = 'async'
token = None
slash_handlers = []
event_handlers = []
def slash(command, conditional=lambda text: True):
def fn(f):
slash_handlers.append([conditional, command, f, None])
return f
return fn
def slash_async(command, conditional=lambda text: True):
def fn(f):
slash_handlers.append([conditional, command, f, ASYNC])
return f
return fn
def event(conditional):
def fn(f):
event_handlers.append([conditional, f])
return f
return fn
def _lambda_response(body):
return {'statusCode': '200',
'isBase64Encoded': False,
'headers': {'Content-Type': 'application/json'},
'body': json.dumps(body)}
def response(body, in_channel=True):
if not isinstance(body, dict):
body = {'text': body}
if in_channel:
body["response_type"] = 'in_channel'
else:
body["response_type"] = 'ephemeral'
return body
def asynchronous(command, response_url, data, _file_):
name = os.path.basename(_file_).replace(' ', '-').replace('_', '-').split('.py')[0] # copied from: cli_aws.lambda_name()
val = {'body': json.dumps({'type': ASYNC,
'data': data,
'response_url': response_url,
'command': command,
'token': token})}
boto3.client('lambda').invoke(FunctionName=name,
InvocationType='Event',
Payload=bytes(json.dumps(val), 'utf-8'))
def main(event, context, log_unmatched_events=False):
if not token:
return print('error: must assign slackbot.token = "your verification token from the app page"')
if token == 'SKIP':
print('warning: you should set slackbot.token to the verification token from your slack app page')
if 'body' not in event:
return print(f'error: no body in event {event}')
try:
body = json.loads(event['body'])
if body['token'] != token or token == 'SKIP':
return print(f'error: token mismatch {body["token"]} {token}')
except:
body = parse.parse_qs(event['body'])
if body['token'][0] != token or token == 'SKIP':
return print(f'error: token mismatch {body["token"][0]} {token}')
if 'command' in body:
for conditional, command, handler, kind in slash_handlers:
text = body.get("text", [''])[0]
if body['command'][0] == command and conditional(text):
if kind == ASYNC:
asynchronous(command, body['response_url'][0], text, inspect.getfile(handler))
return _lambda_response(response('one moment please...'))
else:
return _lambda_response(handler(text))
else:
if "challenge" in body:
return _lambda_response({'challenge': body['challenge']})
elif body['type'] == 'event_callback':
for conditional, handler in event_handlers:
if conditional(body['event']):
handler(body['event'])
return _lambda_response('')
elif body['type'] == ASYNC:
for conditional, command, handler, kind in slash_handlers:
text = body['data']
if body['command'] == command and kind == ASYNC and conditional(text):
resp = requests.post(body['response_url'], data=json.dumps(handler(text)))
assert str(resp.status_code)[0] == '2', [resp, resp.text]
return _lambda_response('')
if log_unmatched_events:
print(f'nothing matched: {body}')
| 39.153061 | 124 | 0.565807 | import json
import inspect
import requests
import os
import boto3
from urllib import parse
ASYNC = 'async'
token = None
slash_handlers = []
event_handlers = []
def slash(command, conditional=lambda text: True):
def fn(f):
slash_handlers.append([conditional, command, f, None])
return f
return fn
def slash_async(command, conditional=lambda text: True):
def fn(f):
slash_handlers.append([conditional, command, f, ASYNC])
return f
return fn
def event(conditional):
def fn(f):
event_handlers.append([conditional, f])
return f
return fn
def _lambda_response(body):
return {'statusCode': '200',
'isBase64Encoded': False,
'headers': {'Content-Type': 'application/json'},
'body': json.dumps(body)}
def response(body, in_channel=True):
if not isinstance(body, dict):
body = {'text': body}
if in_channel:
body["response_type"] = 'in_channel'
else:
body["response_type"] = 'ephemeral'
return body
def asynchronous(command, response_url, data, _file_):
name = os.path.basename(_file_).replace(' ', '-').replace('_', '-').split('.py')[0]
val = {'body': json.dumps({'type': ASYNC,
'data': data,
'response_url': response_url,
'command': command,
'token': token})}
boto3.client('lambda').invoke(FunctionName=name,
InvocationType='Event',
Payload=bytes(json.dumps(val), 'utf-8'))
def main(event, context, log_unmatched_events=False):
if not token:
return print('error: must assign slackbot.token = "your verification token from the app page"')
if token == 'SKIP':
print('warning: you should set slackbot.token to the verification token from your slack app page')
if 'body' not in event:
return print(f'error: no body in event {event}')
try:
body = json.loads(event['body'])
if body['token'] != token or token == 'SKIP':
return print(f'error: token mismatch {body["token"]} {token}')
except:
body = parse.parse_qs(event['body'])
if body['token'][0] != token or token == 'SKIP':
return print(f'error: token mismatch {body["token"][0]} {token}')
if 'command' in body:
for conditional, command, handler, kind in slash_handlers:
text = body.get("text", [''])[0]
if body['command'][0] == command and conditional(text):
if kind == ASYNC:
asynchronous(command, body['response_url'][0], text, inspect.getfile(handler))
return _lambda_response(response('one moment please...'))
else:
return _lambda_response(handler(text))
else:
if "challenge" in body:
return _lambda_response({'challenge': body['challenge']})
elif body['type'] == 'event_callback':
for conditional, handler in event_handlers:
if conditional(body['event']):
handler(body['event'])
return _lambda_response('')
elif body['type'] == ASYNC:
for conditional, command, handler, kind in slash_handlers:
text = body['data']
if body['command'] == command and kind == ASYNC and conditional(text):
resp = requests.post(body['response_url'], data=json.dumps(handler(text)))
assert str(resp.status_code)[0] == '2', [resp, resp.text]
return _lambda_response('')
if log_unmatched_events:
print(f'nothing matched: {body}')
| true | true |
f7353949672667f55d93782fdb8c769b8e8a0a9f | 1,825 | py | Python | 1058 Minimize Rounding Error to Meet Target.py | krishna13052001/LeetCode | cd6ec626bea61f0bd9e8493622074f9e69a7a1c3 | [
"MIT"
] | 872 | 2015-06-15T12:02:41.000Z | 2022-03-30T08:44:35.000Z | 1058 Minimize Rounding Error to Meet Target.py | nadeemshaikh-github/LeetCode | 3fb14aeea62a960442e47dfde9f964c7ffce32be | [
"MIT"
] | 8 | 2015-06-21T15:11:59.000Z | 2022-02-01T11:22:34.000Z | 1058 Minimize Rounding Error to Meet Target.py | nadeemshaikh-github/LeetCode | 3fb14aeea62a960442e47dfde9f964c7ffce32be | [
"MIT"
] | 328 | 2015-06-28T03:10:35.000Z | 2022-03-29T11:05:28.000Z | #!/usr/bin/python3
"""
Given an array of prices [p1,p2...,pn] and a target, round each price pi to
Roundi(pi) so that the rounded array [Round1(p1),Round2(p2)...,Roundn(pn)] sums
to the given target. Each operation Roundi(pi) could be either Floor(pi) or
Ceil(pi).
Return the string "-1" if the rounded array is impossible to sum to target.
Otherwise, return the smallest rounding error, which is defined as
Σ |Roundi(pi) - (pi)| for i from 1 to n, as a string with three places after the
decimal.
Example 1:
Input: prices = ["0.700","2.800","4.900"], target = 8
Output: "1.000"
Explanation:
Use Floor, Ceil and Ceil operations to get (0.7 - 0) + (3 - 2.8) + (5 - 4.9) =
0.7 + 0.2 + 0.1 = 1.0 .
Example 2:
Input: prices = ["1.500","2.500","3.500"], target = 10
Output: "-1"
Explanation:
It is impossible to meet the target.
Note:
1 <= prices.length <= 500.
Each string of prices prices[i] represents a real number which is between 0 and
1000 and has exactly 3 decimal places.
target is between 0 and 1000000.
"""
from typing import List
import math
class Solution:
def minimizeError(self, prices: List[str], target: int) -> str:
"""
to determine possible, floor all or ceil all
floor all, sort by floor error inverse, make the adjustment
"""
A = list(map(float, prices))
f_sum = sum(map(math.floor, A))
c_sum = sum(map(math.ceil, A))
if not f_sum <= target <= c_sum:
return "-1"
errors = [
e - math.floor(e)
for e in A
]
errors.sort(reverse=True)
ret = 0
remain = target - f_sum
for err in errors:
if remain > 0:
ret += 1 - err
remain -= 1
else:
ret += err
return f'{ret:.{3}f}'
| 26.449275 | 80 | 0.595068 |
from typing import List
import math
class Solution:
def minimizeError(self, prices: List[str], target: int) -> str:
A = list(map(float, prices))
f_sum = sum(map(math.floor, A))
c_sum = sum(map(math.ceil, A))
if not f_sum <= target <= c_sum:
return "-1"
errors = [
e - math.floor(e)
for e in A
]
errors.sort(reverse=True)
ret = 0
remain = target - f_sum
for err in errors:
if remain > 0:
ret += 1 - err
remain -= 1
else:
ret += err
return f'{ret:.{3}f}'
| true | true |
f735397fbd14352f66bbaf093f10830ab1a84343 | 568 | py | Python | it/structures/python2/default_naming-default/upper_camel.py | reproto/reproto | 92f0a4b258095bc2f8a394d0bd44209e3a599c4f | [
"Apache-2.0",
"MIT"
] | 108 | 2017-07-19T02:07:52.000Z | 2022-02-27T04:46:43.000Z | it/structures/python2/default_naming-default/upper_camel.py | reproto/reproto | 92f0a4b258095bc2f8a394d0bd44209e3a599c4f | [
"Apache-2.0",
"MIT"
] | 42 | 2017-11-21T14:21:40.000Z | 2022-02-26T02:40:38.000Z | it/structures/python2/default_naming-default/upper_camel.py | reproto/reproto | 92f0a4b258095bc2f8a394d0bd44209e3a599c4f | [
"Apache-2.0",
"MIT"
] | 9 | 2017-05-26T00:36:23.000Z | 2020-07-26T10:58:20.000Z | class Value:
def __init__(self, _foo_bar):
self._foo_bar = _foo_bar
@property
def foo_bar(self):
return self._foo_bar
@staticmethod
def decode(data):
f_foo_bar = data["FooBar"]
if not isinstance(f_foo_bar, unicode):
raise Exception("not a string")
return Value(f_foo_bar)
def encode(self):
data = dict()
if self._foo_bar is None:
raise Exception("FooBar: is a required field")
data["FooBar"] = self._foo_bar
return data
def __repr__(self):
return "<Value foo_bar:{!r}>".format(self._foo_bar)
| 18.322581 | 55 | 0.65669 | class Value:
def __init__(self, _foo_bar):
self._foo_bar = _foo_bar
@property
def foo_bar(self):
return self._foo_bar
@staticmethod
def decode(data):
f_foo_bar = data["FooBar"]
if not isinstance(f_foo_bar, unicode):
raise Exception("not a string")
return Value(f_foo_bar)
def encode(self):
data = dict()
if self._foo_bar is None:
raise Exception("FooBar: is a required field")
data["FooBar"] = self._foo_bar
return data
def __repr__(self):
return "<Value foo_bar:{!r}>".format(self._foo_bar)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.