gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
|---|---|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 1 10:21:48 2017
@author: carles
"""
import random
import numpy as np
from keras.models import Sequential
from keras.layers.core import Dense
from keras.layers.convolutional import Convolution2D as Conv2D
from keras.layers import MaxPooling2D
from keras.layers import BatchNormalization
from keras.layers import Flatten
from keras.layers import Dropout
#from keras.constraints import maxnorm
from keras.optimizers import sgd
#import keras.initializers
class Player():
def __init__(self, game,
max_epsilon,
epochs_to_max_epsilon,
max_discount,
epochs_to_max_discount,
kdt,
batch_size,
mem_size,
win_priority,
lose_priority,
sur_priority,
kernel_initializer,
bias_initializer,
frames_used,
convolutional_sizes,
dense_sizes,
pool_shape,
dropout,
learning_rate
):
# Exploration rate (aka epsilon): determines the probability that the
# player will take a random action. This avoids the dilemma between
# exploration and exploitation (should I keep choosing safe actions
# or do I try to maximize my scores?)
self.max_epsilon = max_epsilon # 0.1
self.epsilon = 0.0
self.epsilon_growth = (self.max_epsilon - self.epsilon
)/epochs_to_max_epsilon #/epoch
# Discount rate: determines how much future rewards are taken into
# account when training. Zero will make the player myopic (prefer
# short-term rewards) and one will take the future rewards for
# exact values (so unless deterministic game make discount < 1)
self.max_discount = max_discount #0.9
self.discount = 0.0
self.discount_growth = (self.max_discount - self.discount
)/epochs_to_max_discount #/epoch
# Advantage learning parameter. It is actually k/dt, with dt being
# the steptime of a frame. Not sure how to set it.
self.kdt = kdt
self.batch_size = batch_size
self.max_mem = mem_size
self.win_priority = win_priority
self.lose_priority = lose_priority
self.sur_priority = sur_priority
self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer
self.frames_used = frames_used
self.convolutional_sizes = convolutional_sizes
self.dense_sizes = dense_sizes
self.pool_shape = pool_shape
self.dropout = dropout
self.learning_rate = learning_rate
self.memory = []
self.game = game
self.model = self.build_model()
self.model2 = self.build_model() # for double Q-learning
def build_model(self):
# Build deep neural network
self.input_shape = (self.game.grid_size)
model = Sequential()
model.add(Dense(self.dense_sizes[0], activation="relu",
input_shape=self.input_shape,
kernel_initializer='random_uniform',
bias_initializer='random_uniform'))
model.add(Dropout(self.dropout))
for h in self.dense_sizes:
model.add(Dense(h, activation='relu'))
model.add(Dense(len(self.game.get_actions())))
model.compile(sgd(lr=self.learning_rate), "mse")
return model
# Another model with convolutional layers. Comment or uncomment at need.
def build_model(self):
self.input_shape = (*self.game.grid_shape, self.frames_used)
model = Sequential()
for s in self.convolutional_sizes:
model.add(BatchNormalization(input_shape=self.input_shape))
model.add(Dropout(self.dropout))
model.add(Conv2D(s[0], s[1], activation="relu",
padding='valid',
# subsample=(2, 2),
# dim_ordering='th',
# input_shape=self.input_shape,
kernel_initializer=self.kernel_initializer,
bias_initializer=self.bias_initializer))
if self.pool_shape != (0, 0):
model.add(MaxPooling2D(pool_size=self.pool_shape))
model.add(Flatten())
for s in self.dense_sizes:
model.add(BatchNormalization())
model.add(Dense(s, activation='relu'))
model.add(Dense(len(self.game.get_actions())))
model.compile(sgd(lr=self.learning_rate), "mse")
return model
def shape_grid(self, state):
"""
Shapes a grid into an appropriate form for the model
"""
return state.reshape(self.input_shape)
def forwardpass(self, model_input, secondary_model = False):
"""
Input: a SINGLE input for the model
Wraps a SINGLE input in an array and gets the model's predictions,
so that we don't have to wrap everytime we want to make a fw pass.
Output: the predictions for this input
"""
model = self.model if not secondary_model else self.model2
return model.predict(np.array([model_input]))
def get_action(self, state, exploration=True):
if random.random() < self.epsilon and exploration:
action = random.choice(self.game.get_actions())
else:
Q = self.forwardpass(self.shape_grid(self.game.get_state()))[0]
# action = max(self.game.get_actions(), key=lambda a: Q[a])
A = max(Q) + (Q - max(Q))*self.kdt
action = max(self.game.get_actions(), key=lambda a: A[a])
return action
def memorize(self, state, action, reward, state_final, gameover):
experience = (self.shape_grid(state), action, reward,
self.shape_grid(state_final), gameover)
# Prioritized Experience Replay
if reward == self.game.win_r:
priority = self.win_priority
elif reward == self.game.lose_r:
priority = self.lose_priority
else:
priority = self.sur_priority
for i in range(priority):
self.memory.append(experience)
if len(self.memory) > self.max_mem:
self.memory.pop(0)
def train(self):
# grow the discount rate
self.discount += self.discount_growth
self.discount = min(self.max_discount, self.discount) # bound
# grow the exploration rate
self.epsilon += self.epsilon_growth
self.epsilon = min(self.max_epsilon, self.epsilon) # bound
n = min(len(self.memory), self.batch_size)
# Take the sample at random
sample = random.sample(self.memory, n)
# Take the last experiences, last in first out
# sample = reversed(self.memory[-n:])
# Take the sample at random but prioritizing the last experiences
# w = np.linspace(0.1, 1, num = len(self.memory))
# w = w/np.sum(w)
# s = np.random.choice(len(self.memory), p=w, size=n, replace=False)
# sample = np.array(self.memory)[s]
inputs = np.zeros((n, *(self.input_shape)))
targets = np.zeros((n, len(self.game.get_actions())))
for i, experience in enumerate(sample):
state_t = experience[0]
action_t = experience[1]
reward_t = experience[2]
state_tp1 = experience[3]
gameover = experience[4]
# make the input vector
inputs[i] = state_t
# make the target vector
targets[i] = self.forwardpass(state_t, True)[0]
if gameover:
# if this action resulted in the end of the game
# its future reward is just its reward
targets[i][action_t] = reward_t
else:
# else its future reward is its reward plus the
# an approximation of future rewards
Q = self.forwardpass(state_tp1, True)[0]
# targets[i][action_t] = reward_t + self.discount*max(Q)
nextQ = Q[self.get_action(state_tp1)]
targets[i][action_t] = reward_t + self.discount*nextQ #SARSA
# Update secondary network weights to those of the primary
self.model2.set_weights(self.model.get_weights())
return self.model.train_on_batch(inputs, targets)
def save(self, fname):
self.model.save_weights(fname + '.h5')
def load(self, fname):
self.model.save_weights(fname + '.h5')
|
|
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
from os.path import join, dirname, splitext, basename, exists
from tools.toolchains import mbedToolchain, TOOLCHAIN_PATHS
from tools.hooks import hook_tool
from tools.utils import mkdir
import copy
class ARM(mbedToolchain):
LINKER_EXT = '.sct'
LIBRARY_EXT = '.ar'
STD_LIB_NAME = "%s.ar"
DIAGNOSTIC_PATTERN = re.compile('"(?P<file>[^"]+)", line (?P<line>\d+)( \(column (?P<column>\d+)\)|): (?P<severity>Warning|Error): (?P<message>.+)')
INDEX_PATTERN = re.compile('(?P<col>\s*)\^')
DEP_PATTERN = re.compile('\S+:\s(?P<file>.+)\n')
DEFAULT_FLAGS = {
'common': ["-c", "--gnu",
"-Otime", "--split_sections", "--apcs=interwork",
"--brief_diagnostics", "--restrict", "--multibyte_chars"],
'asm': [],
'c': ["--md", "--no_depend_system_headers", "--c99", "-D__ASSERT_MSG"],
'cxx': ["--cpp", "--no_rtti", "--no_vla"],
'ld': [],
}
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
mbedToolchain.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
if target.core == "Cortex-M0+":
cpu = "Cortex-M0"
elif target.core == "Cortex-M4F":
cpu = "Cortex-M4.fp"
elif target.core == "Cortex-M7FD":
cpu = "Cortex-M7.fp.dp"
elif target.core == "Cortex-M7F":
cpu = "Cortex-M7.fp.sp"
else:
cpu = target.core
ARM_BIN = join(TOOLCHAIN_PATHS['ARM'], "bin")
ARM_INC = join(TOOLCHAIN_PATHS['ARM'], "include")
main_cc = join(ARM_BIN, "armcc")
self.flags['common'] += ["--cpu=%s" % cpu]
if "save-asm" in self.options:
self.flags['common'].extend(["--asm", "--interleave"])
if "debug-info" in self.options:
self.flags['common'].append("-g")
self.flags['c'].append("-O0")
else:
self.flags['c'].append("-O3")
self.asm = [main_cc] + self.flags['common'] + self.flags['asm'] + ["-I \""+ARM_INC+"\""]
self.cc = [main_cc] + self.flags['common'] + self.flags['c'] + ["-I \""+ARM_INC+"\""]
self.cppc = [main_cc] + self.flags['common'] + self.flags['c'] + self.flags['cxx'] + ["-I \""+ARM_INC+"\""]
self.ld = [join(ARM_BIN, "armlink")]
self.sys_libs = []
self.ar = join(ARM_BIN, "armar")
self.elf2bin = join(ARM_BIN, "fromelf")
def parse_dependencies(self, dep_path):
dependencies = []
for line in open(dep_path).readlines():
match = ARM.DEP_PATTERN.match(line)
if match is not None:
#we need to append chroot, because when the .d files are generated the compiler is chrooted
dependencies.append((self.CHROOT if self.CHROOT else '') + match.group('file'))
return dependencies
def parse_output(self, output):
msg = None
for line in output.splitlines():
match = ARM.DIAGNOSTIC_PATTERN.match(line)
if match is not None:
if msg is not None:
self.cc_info(msg)
msg = {
'severity': match.group('severity').lower(),
'file': match.group('file'),
'line': match.group('line'),
'col': match.group('column') if match.group('column') else 0,
'message': match.group('message'),
'text': '',
'target_name': self.target.name,
'toolchain_name': self.name
}
elif msg is not None:
# Determine the warning/error column by calculating the ^ position
match = ARM.INDEX_PATTERN.match(line)
if match is not None:
msg['col'] = len(match.group('col'))
self.cc_info(msg)
msg = None
else:
msg['text'] += line+"\n"
if msg is not None:
self.cc_info(msg)
def get_dep_option(self, object):
base, _ = splitext(object)
dep_path = base + '.d'
return ["--depend", dep_path]
def get_config_option(self, config_header):
return ['--preinclude=' + config_header]
def get_compile_options(self, defines, includes, for_asm=False):
opts = ['-D%s' % d for d in defines]
if self.RESPONSE_FILES:
opts += ['--via', self.get_inc_file(includes)]
else:
opts += ["-I%s" % i for i in includes]
if not for_asm:
config_header = self.get_config_header()
if config_header is not None:
opts = opts + self.get_config_option(config_header)
return opts
@hook_tool
def assemble(self, source, object, includes):
# Preprocess first, then assemble
dir = join(dirname(object), '.temp')
mkdir(dir)
tempfile = join(dir, basename(object) + '.E.s')
# Build preprocess assemble command
cmd_pre = self.asm + self.get_compile_options(self.get_symbols(True), includes) + ["-E", "-o", tempfile, source]
# Build main assemble command
cmd = self.asm + ["-o", object, tempfile]
# Call cmdline hook
cmd_pre = self.hook.get_cmdline_assembler(cmd_pre)
cmd = self.hook.get_cmdline_assembler(cmd)
# Return command array, don't execute
return [cmd_pre, cmd]
@hook_tool
def compile(self, cc, source, object, includes):
# Build compile command
cmd = cc + self.get_compile_options(self.get_symbols(), includes)
cmd.extend(self.get_dep_option(object))
cmd.extend(["-o", object, source])
# Call cmdline hook
cmd = self.hook.get_cmdline_compiler(cmd)
return [cmd]
def compile_c(self, source, object, includes):
return self.compile(self.cc, source, object, includes)
def compile_cpp(self, source, object, includes):
return self.compile(self.cppc, source, object, includes)
@hook_tool
def link(self, output, objects, libraries, lib_dirs, mem_map):
map_file = splitext(output)[0] + ".map"
if len(lib_dirs):
args = ["-o", output, "--userlibpath", ",".join(lib_dirs), "--info=totals", "--map", "--list=%s" % map_file]
else:
args = ["-o", output, "--info=totals", "--map", "--list=%s" % map_file]
if mem_map:
args.extend(["--scatter", mem_map])
# Build linker command
cmd = self.ld + args + objects + libraries + self.sys_libs
# Call cmdline hook
cmd = self.hook.get_cmdline_linker(cmd)
if self.RESPONSE_FILES:
# Split link command to linker executable + response file
cmd_linker = cmd[0]
link_files = self.get_link_file(cmd[1:])
cmd = [cmd_linker, '--via', link_files]
# Exec command
self.cc_verbose("Link: %s" % ' '.join(cmd))
self.default_cmd(cmd)
@hook_tool
def archive(self, objects, lib_path):
if self.RESPONSE_FILES:
param = ['--via', self.get_arch_file(objects)]
else:
param = objects
# Exec command
self.default_cmd([self.ar, '-r', lib_path] + param)
@hook_tool
def binary(self, resources, elf, bin):
# Build binary command
cmd = [self.elf2bin, '--bin', '-o', bin, elf]
# Call cmdline hook
cmd = self.hook.get_cmdline_binary(cmd)
# Exec command
self.cc_verbose("FromELF: %s" % ' '.join(cmd))
self.default_cmd(cmd)
class ARM_STD(ARM):
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
# Run-time values
self.ld.extend(["--libpath", join(TOOLCHAIN_PATHS['ARM'], "lib")])
class ARM_MICRO(ARM):
PATCHED_LIBRARY = False
def __init__(self, target, options=None, notify=None, macros=None, silent=False, extra_verbose=False):
ARM.__init__(self, target, options, notify, macros, silent, extra_verbose=extra_verbose)
# Extend flags
self.flags['common'].extend(["-D__MICROLIB"])
self.flags['c'].extend(["--library_type=microlib"])
self.flags['ld'].extend(["--library_type=microlib"])
# Run-time values
self.asm += ["-D__MICROLIB"]
self.cc += ["-D__MICROLIB", "--library_type=microlib"]
self.cppc += ["-D__MICROLIB", "--library_type=microlib"]
self.ld += ["--library_type=microlib"]
# Only allow a single thread
self.cc += ["-DMBED_RTOS_SINGLE_THREAD"]
self.cppc += ["-DMBED_RTOS_SINGLE_THREAD"]
# We had to patch microlib to add C++ support
# In later releases this patch should have entered mainline
if ARM_MICRO.PATCHED_LIBRARY:
# Run-time values
self.flags['ld'].extend(["--noscanlib"])
# Run-time values
self.ld += ["--noscanlib"]
# System Libraries
self.sys_libs.extend([join(TOOLCHAIN_PATHS['ARM'], "lib", "microlib", lib+".l") for lib in ["mc_p", "mf_p", "m_ps"]])
if target.core == "Cortex-M3":
self.sys_libs.extend([join(TOOLCHAIN_PATHS['ARM'], "lib", "cpplib", lib+".l") for lib in ["cpp_ws", "cpprt_w"]])
elif target.core in ["Cortex-M0", "Cortex-M0+"]:
self.sys_libs.extend([join(TOOLCHAIN_PATHS['ARM'], "lib", "cpplib", lib+".l") for lib in ["cpp_ps", "cpprt_p"]])
else:
# Run-time values
self.ld.extend(["--libpath", join(TOOLCHAIN_PATHS['ARM'], "lib")])
|
|
import os
from decimal import Decimal as D
from datetime import datetime
from django.utils.translation import ugettext_lazy as _
from oscar.apps.catalogue.categories import create_from_breadcrumbs
from oscar.core.loading import get_class, get_classes
from oscar.core.compat import UnicodeCSVReader, atomic_compat
ImportingError = get_class('partner.exceptions', 'ImportingError')
Partner, StockRecord = get_classes('partner.models', ['Partner',
'StockRecord'])
ProductClass, Product, Category, ProductCategory = get_classes(
'catalogue.models', ('ProductClass', 'Product', 'Category',
'ProductCategory'))
class CatalogueImporter(object):
"""
CSV product importer used to built sandbox. Might not work very well
for anything else.
"""
_flush = False
def __init__(self, logger, delimiter=",", flush=False):
self.logger = logger
self._delimiter = delimiter
self._flush = flush
def handle(self, file_path=None):
u"""Handles the actual import process"""
if not file_path:
raise ImportingError(_("No file path supplied"))
Validator().validate(file_path)
if self._flush is True:
self.logger.info(" - Flushing product data before import")
self._flush_product_data()
self._import(file_path)
def _flush_product_data(self):
u"""Flush out product and stock models"""
Product.objects.all().delete()
ProductClass.objects.all().delete()
Partner.objects.all().delete()
StockRecord.objects.all().delete()
@atomic_compat
def _import(self, file_path):
u"""Imports given file"""
stats = {'new_items': 0,
'updated_items': 0}
row_number = 0
with UnicodeCSVReader(
file_path, delimiter=self._delimiter,
quotechar='"', escapechar='\\') as reader:
for row in reader:
row_number += 1
self._import_row(row_number, row, stats)
msg = "New items: %d, updated items: %d" % (stats['new_items'],
stats['updated_items'])
self.logger.info(msg)
def _import_row(self, row_number, row, stats):
if len(row) != 5 and len(row) != 9:
self.logger.error("Row number %d has an invalid number of fields"
" (%d), skipping..." % (row_number, len(row)))
return
item = self._create_item(*row[:5], stats=stats)
if len(row) == 9:
# With stock data
self._create_stockrecord(item, *row[5:9], stats=stats)
def _create_item(self, product_class, category_str, upc, title,
description, stats):
# Ignore any entries that are NULL
if description == 'NULL':
description = ''
# Create item class and item
product_class, __ \
= ProductClass.objects.get_or_create(name=product_class)
try:
item = Product.objects.get(upc=upc)
stats['updated_items'] += 1
except Product.DoesNotExist:
item = Product()
stats['new_items'] += 1
item.upc = upc
item.title = title
item.description = description
item.product_class = product_class
item.save()
# Category
cat = create_from_breadcrumbs(category_str)
ProductCategory.objects.create(product=item, category=cat)
return item
def _create_stockrecord(self, item, partner_name, partner_sku,
price_excl_tax, num_in_stock, stats):
# Create partner and stock record
partner, _ = Partner.objects.get_or_create(
name=partner_name)
try:
stock = StockRecord.objects.get(partner_sku=partner_sku)
except StockRecord.DoesNotExist:
stock = StockRecord()
stock.product = item
stock.partner = partner
stock.partner_sku = partner_sku
stock.price_excl_tax = D(price_excl_tax)
stock.num_in_stock = num_in_stock
stock.save()
class Validator(object):
def validate(self, file_path):
self._exists(file_path)
self._is_file(file_path)
self._is_readable(file_path)
def _exists(self, file_path):
u"""Check whether a file exists"""
if not os.path.exists(file_path):
raise ImportingError(_("%s does not exist") % (file_path))
def _is_file(self, file_path):
u"""Check whether file is actually a file type"""
if not os.path.isfile(file_path):
raise ImportingError(_("%s is not a file") % (file_path))
def _is_readable(self, file_path):
u"""Check file is readable"""
try:
f = open(file_path, 'r')
f.close()
except:
raise ImportingError(_("%s is not readable") % (file_path))
class DemoSiteImporter(object):
"""
Another quick and dirty catalogue product importer. Used to built the
demo site, and most likely not useful outside of it.
"""
def __init__(self, logger):
self.logger = logger
@atomic_compat
def handle(self, product_class_name, filepath):
product_class = ProductClass.objects.get(
name=product_class_name)
attribute_codes = []
with UnicodeCSVReader(filepath) as reader:
for row in reader:
if row[1] == 'UPC':
attribute_codes = row[9:]
continue
self.create_product(product_class, attribute_codes, row)
def create_product(self, product_class, attribute_codes, row): # noqa
(ptype, upc, title, description,
category, partner, sku, price, stock) = row[0:9]
# Create product
is_variant = ptype.lower() == 'variant'
is_group = ptype.lower() == 'group'
if upc:
try:
product = Product.objects.get(upc=upc)
except Product.DoesNotExist:
product = Product(upc=upc)
else:
product = Product()
if not is_variant:
product.title = title
product.description = description
product.product_class = product_class
# Attributes
if not is_group:
for code, value in zip(attribute_codes, row[9:]):
# Need to check if the attribute requires an Option instance
attr = product_class.attributes.get(
code=code)
if attr.is_option:
value = attr.option_group.options.get(option=value)
if attr.type == 'date':
value = datetime.strptime(value, "%d/%m/%Y").date()
setattr(product.attr, code, value)
# Assign parent for variants
if is_variant:
product.parent = self.parent
product.save()
# Save a reference to last group product
if is_group:
self.parent = product
# Category information
if category:
leaf = create_from_breadcrumbs(category)
ProductCategory.objects.get_or_create(
product=product, category=leaf)
# Stock record
if partner:
partner, __ = Partner.objects.get_or_create(name=partner)
try:
record = StockRecord.objects.get(product=product)
except StockRecord.DoesNotExist:
record = StockRecord(product=product)
record.partner = partner
record.partner_sku = sku
record.price_excl_tax = D(price)
if stock != 'NULL':
record.num_in_stock = stock
record.save()
|
|
from common_fixtures import * # NOQA
TEST_SERVICE_OPT_IMAGE = 'ibuildthecloud/helloworld'
TEST_SERVICE_OPT_IMAGE_LATEST = TEST_SERVICE_OPT_IMAGE + ':latest'
TEST_SERVICE_OPT_IMAGE_UUID = 'docker:' + TEST_SERVICE_OPT_IMAGE_LATEST
LB_IMAGE_UUID = "docker:sangeetha/testlbsd:latest"
logger = logging.getLogger(__name__)
if_compose_data_files = pytest.mark.skipif(
not os.environ.get('CATTLE_TEST_DATA_DIR'),
reason='Docker compose files directory location not set')
def test_rancher_compose_service(client,
rancher_cli_container,
socat_containers):
vol_container = client.create_container(imageUuid=TEST_IMAGE_UUID,
name=random_str(),
labels={"c1": "vol"}
)
vol_container = client.wait_success(vol_container)
volume_in_host = "/test/container"
volume_in_container = "/test/vol1"
docker_vol_value = volume_in_host + ":" + volume_in_container + ":ro"
cap_add = ["CHOWN"]
cap_drop = ["KILL"]
restart_policy = {"maximumRetryCount": 10, "name": "on-failure"}
dns_search = ['1.2.3.4']
dns_name = ['1.2.3.4']
domain_name = "rancher.io"
host_name = "test"
user = "root"
command = ["sleep", "9000"]
env_var = {"TEST_FILE": "/etc/testpath.conf"}
memory = 8000000
cpu_set = "0"
cpu_shares = 400
# Not including "dataVolumesFrom": [vol_container.id] since it is not
# implemented yet
launch_config = {"imageUuid": TEST_SERVICE_OPT_IMAGE_UUID,
"command": command,
"dataVolumes": [docker_vol_value],
"environment": env_var,
"capAdd": cap_add,
"capDrop": cap_drop,
"dnsSearch": dns_search,
"dns": dns_name,
"privileged": True,
"domainName": domain_name,
"stdinOpen": True,
"tty": True,
"memory": memory,
"cpuSet": cpu_set,
"cpuShares": cpu_shares,
"restartPolicy": restart_policy,
"directory": "/",
"hostname": host_name,
"user": user,
"labels":
{"io.rancher.scheduler.affinity:container_label":
"c1=vol"}
}
scale = 1
service, env = create_env_and_svc(client, launch_config,
scale)
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
rancher_service = get_rancher_compose_service(
client, rancher_env.id, service)
check_container_in_service(client, rancher_service)
container_list = get_service_container_list(client, rancher_service)
dns_name.append(RANCHER_DNS_SERVER)
dns_search.append(rancher_env.name+"."+RANCHER_DNS_SEARCH)
dns_search.append(
rancher_service.name+"."+rancher_env.name+"."+RANCHER_DNS_SEARCH)
dns_search.append(RANCHER_DNS_SEARCH)
for c in container_list:
print(c)
docker_client = get_docker_client(c.hosts[0])
inspect = docker_client.inspect_container(c.externalId)
assert docker_vol_value in inspect["HostConfig"]["Binds"]
# assert inspect["HostConfig"]["VolumesFrom"] == \
# [vol_container.externalId]
assert inspect["HostConfig"]["PublishAllPorts"] is False
assert inspect["HostConfig"]["Privileged"] is True
assert inspect["Config"]["OpenStdin"] is True
assert inspect["Config"]["Tty"] is True
assert inspect["HostConfig"]["Dns"] == dns_name
assert inspect["HostConfig"]["DnsSearch"] == dns_search
assert inspect["Config"]["Hostname"] == host_name
assert inspect["Config"]["Domainname"] == domain_name
assert inspect["Config"]["User"] == user
assert inspect["HostConfig"]["CapAdd"] == cap_add
assert inspect["HostConfig"]["CapDrop"] == cap_drop
# assert inspect["Config"]["Cpuset"] == cpu_set
# No support for restart
assert inspect["HostConfig"]["RestartPolicy"]["Name"] == ""
assert \
inspect["HostConfig"]["RestartPolicy"]["MaximumRetryCount"] == 0
assert inspect["Config"]["Cmd"] == command
assert inspect["HostConfig"]["Memory"] == memory
assert "TEST_FILE=/etc/testpath.conf" in inspect["Config"]["Env"]
assert inspect["HostConfig"]["CpuShares"] == cpu_shares
delete_all(client, [env, rancher_env])
def test_rancher_compose_service_option_2(client,
rancher_cli_container,
socat_containers):
hosts = client.list_host(kind='docker', removed_null=True, state="active").data
cpu_shares = 400
ulimit = {"hard": 1024, "name": "cpu", "soft": 1024}
ulimit_inspect = {"Hard": 1024, "Name": "cpu", "Soft": 1024}
ipcMode = "host"
sysctls = {"net.ipv4.ip_forward": "1"}
dev_opts = {
'/dev/null': {
'readIops': 2000,
'writeIops': 3000,
'readBps': 4000,
'writeBps': 200,
}
}
cpu_shares = 400
blkio_weight = 1000
cpu_period = 10000
cpu_quota = 20000
cpu_set = "0"
cpu_setmems = "0"
dns_opt = ["abc"]
group_add = ["root"]
kernel_memory = 6000000
memory_reservation = 5000000
memory_swap = -1
memory_swappiness = 100
oom_killdisable = True
oom_scoreadj = 100
read_only = True
shm_size = 1024
stop_signal = "SIGTERM"
uts = "host"
memory = 8000000
dev_opts_inspect = {u"Path": "/dev/null",
u"Rate": 400}
cgroup_parent = "xyz"
extraHosts = ["host1:10.1.1.1", "host2:10.2.2.2"]
tmp_fs = {"/tmp": "rw"}
security_opt = ["label=user:USER", "label=role:ROLE"]
launch_config = {"imageUuid": TEST_SERVICE_OPT_IMAGE_UUID,
"extraHosts": extraHosts,
"privileged": True,
"cpuShares": cpu_shares,
"blkioWeight": blkio_weight,
"blkioDeviceOptions": dev_opts,
"cgroupParent": cgroup_parent,
"cpuShares": cpu_shares,
"cpuPeriod": cpu_period,
"cpuQuota": cpu_quota,
"cpuSet": cpu_set,
"cpuSetMems": cpu_setmems,
"dnsOpt": dns_opt,
"groupAdd": group_add,
"kernelMemory": kernel_memory,
"memory": memory,
"memoryReservation": memory_reservation,
"memorySwap": memory_swap,
"memorySwappiness": memory_swappiness,
"oomKillDisable": oom_killdisable,
"oomScoreAdj": oom_scoreadj,
"readOnly": read_only,
"securityOpt": security_opt,
"shmSize": shm_size,
"stopSignal": stop_signal,
"sysctls": sysctls,
"tmpfs": tmp_fs,
"ulimits": [ulimit],
"ipcMode": ipcMode,
"uts": uts,
"requestedHostId": hosts[0].id
}
scale = 2
service, env = create_env_and_svc(client, launch_config,
scale)
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
rancher_service = get_rancher_compose_service(
client, rancher_env.id, service)
check_container_in_service(client, rancher_service)
container_list = get_service_container_list(client, rancher_service)
for c in container_list:
docker_client = get_docker_client(c.hosts[0])
inspect = docker_client.inspect_container(c.externalId)
assert inspect["HostConfig"]["ExtraHosts"] == extraHosts
assert inspect["HostConfig"]["BlkioWeight"] == blkio_weight
dev_opts_inspect["Path"] = "/dev/null"
dev_opts_inspect["Rate"] = 4000
assert \
inspect["HostConfig"]["BlkioDeviceReadBps"] == [dev_opts_inspect]
dev_opts_inspect["Path"] = "/dev/null"
dev_opts_inspect["Rate"] = 200
assert \
inspect["HostConfig"]["BlkioDeviceWriteBps"] == [dev_opts_inspect]
dev_opts_inspect["Path"] = "/dev/null"
dev_opts_inspect["Rate"] = 2000
assert \
inspect["HostConfig"]["BlkioDeviceReadIOps"] == [dev_opts_inspect]
dev_opts_inspect["Path"] = "/dev/null"
dev_opts_inspect["Rate"] = 3000
assert \
inspect["HostConfig"]["BlkioDeviceWriteIOps"] == [dev_opts_inspect]
assert inspect["HostConfig"]["CpuShares"] == cpu_shares
assert inspect["HostConfig"]["CgroupParent"] == cgroup_parent
assert inspect["HostConfig"]["CpuPeriod"] == cpu_period
assert inspect["HostConfig"]["CpuQuota"] == cpu_quota
assert inspect["HostConfig"]["CpusetCpus"] == cpu_set
# Bug - 6700
"""
assert inspect["HostConfig"]["CpusetMems"] == cpu_setmems
assert inspect["HostConfig"]["KernelMemory"] == kernel_memory
"""
assert inspect["HostConfig"]["MemoryReservation"] == memory_reservation
assert inspect["HostConfig"]["MemorySwap"] == memory_swap
assert inspect["HostConfig"]["MemorySwappiness"] == memory_swappiness
assert inspect["HostConfig"]["OomKillDisable"]
assert inspect["HostConfig"]["OomScoreAdj"] == oom_scoreadj
assert inspect["HostConfig"]["ReadonlyRootfs"]
assert inspect["HostConfig"]["SecurityOpt"] == security_opt
assert inspect["HostConfig"]["Tmpfs"] == tmp_fs
assert inspect["HostConfig"]["ShmSize"] == shm_size
assert inspect["Config"]["StopSignal"] == stop_signal
assert inspect["HostConfig"]["Ulimits"] == [ulimit_inspect]
assert inspect["HostConfig"]["IpcMode"] == ipcMode
assert inspect["HostConfig"]["UTSMode"] == uts
assert inspect["HostConfig"]["DnsOptions"] == dns_opt
assert inspect["HostConfig"]["GroupAdd"] == group_add
delete_all(client, [env])
@pytest.mark.skipif(True, reason='not implemented yet')
def test_rancher_compose_services_port_and_link_options(
client, rancher_cli_container, socat_containers):
hosts = client.list_host(kind='docker', removed_null=True, state="active").data
host = hosts[0]
link_host = hosts[1]
link_name = "WEB1"
link_port = 80
exposed_port = 9999
link_container = client.create_container(
imageUuid=LB_IMAGE_UUID,
environment={'CONTAINER_NAME': link_name},
name=random_str(),
requestedHostId=host.id
)
link_container = client.wait_success(link_container)
launch_config = {"imageUuid": SSH_IMAGE_UUID,
"ports": [str(exposed_port)+":22/tcp"],
"instanceLinks": {
link_name:
link_container.id},
"requestedHostId": link_host.id,
}
service, env = create_env_and_svc(client, launch_config, 1)
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
rancher_service = get_rancher_compose_service(
client, rancher_env.id, service)
container_name = get_container_name(rancher_env, rancher_service, 1)
containers = client.list_container(name=container_name, state="running").data
assert len(containers) == 1
con = containers[0]
validate_exposed_port_and_container_link(client, con, link_name,
link_port, exposed_port)
delete_all(client, [env, rancher_env, link_container])
def test_rancher_compose_lbservice(client,
rancher_cli_container):
port = "7900"
# Add LB service and do not activate services
service_scale = 2
lb_scale = 1
env, service, lb_service = create_env_with_svc_and_lb(
client, service_scale, lb_scale, port)
# Add another target to LB service
launch_config = {"imageUuid": WEB_IMAGE_UUID}
service_name = random_str()
service1 = client.create_service(name=service_name,
stackId=env.id,
launchConfig=launch_config,
scale=2)
service1 = client.wait_success(service1)
assert service1.state == "inactive"
service = activate_svc(client, service)
service1 = activate_svc(client, service1)
# Set LB targets
port_rules = lb_service.lbConfig.portRules
protocol = "http"
target_port = "80"
service_id = service1.id
port_rule = {"sourcePort": port, "protocol": protocol,
"serviceId": service_id, "targetPort": target_port}
port_rules.append(port_rule)
lb_service = client.update(lb_service,
lbConfig=create_lb_config(port_rules))
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
rancher_lb_service = get_rancher_compose_service(
client, rancher_env.id, lb_service)
client.wait_success(rancher_lb_service)
rancher_service = get_rancher_compose_service(
client, rancher_env.id, service)
client.wait_success(rancher_service)
rancher_service1 = get_rancher_compose_service(
client, rancher_env.id, service1)
client.wait_success(rancher_service1)
validate_lb_service(client, rancher_lb_service, port,
[rancher_service, rancher_service1])
delete_all(client, [env, rancher_env])
def test_rancher_compose_lbservice_internal(client,
rancher_cli_container):
port = "7911"
con_port = "7912"
# Deploy container in same network to test accessibility of internal LB
hosts = client.list_host(kind='docker', removed_null=True, state="active").data
assert len(hosts) > 0
host = hosts[0]
client_con = client.create_container(
name=random_str(), imageUuid=SSH_IMAGE_UUID,
ports=[con_port+":22/tcp"], requestedHostId=host.id)
client_con = client.wait_success(client_con, 120)
assert client_con.state == "running"
# Add an internal LB service and do not activate services
service_scale = 2
lb_scale = 1
env, service, lb_service = create_env_with_svc_and_lb(
client, service_scale, lb_scale, port, internal=True)
# Add another target to LB service
launch_config = {"imageUuid": WEB_IMAGE_UUID}
service_name = random_str()
service1 = client.create_service(name=service_name,
stackId=env.id,
launchConfig=launch_config,
scale=2)
service1 = client.wait_success(service1)
assert service1.state == "inactive"
service = activate_svc(client, service)
service1 = activate_svc(client, service1)
# Set LB targets
port_rules = lb_service.lbConfig.portRules
protocol = "http"
target_port = "80"
service_id = service1.id
port_rule = {"sourcePort": port, "protocol": protocol,
"serviceId": service_id, "targetPort": target_port}
port_rules.append(port_rule)
lb_service = client.update(lb_service,
lbConfig=create_lb_config(port_rules))
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
rancher_lb_service = get_rancher_compose_service(
client, rancher_env.id, lb_service)
client.wait_success(rancher_lb_service)
rancher_service = get_rancher_compose_service(
client, rancher_env.id, service)
client.wait_success(rancher_service)
rancher_service1 = get_rancher_compose_service(
client, rancher_env.id, service1)
client.wait_success(rancher_service1)
time.sleep(20)
validate_internal_lb(client, rancher_lb_service,
[rancher_service, rancher_service1],
host, con_port, port)
# Check that port in the host where LB Agent is running is not accessible
lb_containers = get_service_container_list(
client, rancher_lb_service)
assert len(lb_containers) == lb_service.scale
for lb_con in lb_containers:
host = client.by_id('host', lb_con.hosts[0].id)
assert check_for_no_access(host, port)
delete_all(client, [env, rancher_env])
def test_rancher_compose_service_links(client,
rancher_cli_container):
port = "7901"
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service = create_env_with_2_svc(
client, service_scale, consumed_service_scale, port)
service_link = {"serviceId": consumed_service.id, "ports": ["80"]}
service.addservicelink(serviceLink=service_link)
service = client.wait_success(service, 120)
# Launch env using docker compose
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
rancher_service = get_rancher_compose_service(
client, rancher_env.id, service)
rancher_consumed_service = get_rancher_compose_service(
client, rancher_env.id, consumed_service)
client.wait_success(rancher_service)
client.wait_success(rancher_consumed_service)
validate_add_service_link(client, rancher_service,
rancher_consumed_service)
validate_linked_service(client, rancher_service,
[rancher_consumed_service], port)
delete_all(client, [env, rancher_env])
def test_rancher_compose_dns_services(client,
rancher_cli_container):
port = "7902"
rancher_compose_dns_services(client, port,
rancher_cli_container)
def test_rancher_compose_dns_services_cross_stack(client,
rancher_cli_container):
port = "7903"
rancher_compose_dns_services(client, port,
rancher_cli_container, True)
def test_rancher_compose_external_services(client,
rancher_cli_container):
port = "7904"
service_scale = 1
env, service, ext_service, con_list = create_env_with_ext_svc(
client, service_scale, port)
service_link = {"serviceId": ext_service.id}
service.addservicelink(serviceLink=service_link)
# Launch env using docker compose
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
rancher_service = get_rancher_compose_service(
client, rancher_env.id, service)
rancher_ext_service = get_rancher_compose_service(
client, rancher_env.id, ext_service)
client.wait_success(con_list[0])
client.wait_success(con_list[1])
client.wait_success(rancher_service)
client.wait_success(rancher_ext_service)
validate_add_service_link(client, rancher_service,
rancher_ext_service)
validate_external_service(client, rancher_service,
[rancher_ext_service],
port, con_list)
delete_all(client, [env, rancher_env])
def test_rancher_compose_lbservice_host_routing(client,
rancher_cli_container):
port1 = "7906"
service_scale = 2
lb_scale = 1
service_count = 3
port_rules = []
port_rule = {"hostname": "www.abc1.com",
"path": "/service1.html",
"serviceId": 0,
"sourcePort": port1,
"targetPort": "80",
"protocol": "http"
}
port_rules.append(port_rule)
port_rule = {"hostname": "www.abc2.com",
"path": "/service2.html",
"serviceId": 0,
"sourcePort": port1,
"targetPort": "80",
"protocol": "http"
}
port_rules.append(port_rule)
port_rule = {"hostname": "www.abc1.com",
"path": "/service1.html",
"serviceId": 1,
"sourcePort": port1,
"targetPort": "80",
"protocol": "http"
}
port_rules.append(port_rule)
port_rule = {"hostname": "www.abc2.com",
"path": "/service2.html",
"serviceId": 1,
"sourcePort": port1,
"targetPort": "80",
"protocol": "http"
}
port_rules.append(port_rule)
port_rule = {"hostname": "www.abc1.com",
"path": "/name.html",
"serviceId": 2,
"sourcePort": port1,
"targetPort": "80",
"protocol": "http"
}
port_rules.append(port_rule)
port_rule = {"hostname": "www.abc2.com",
"path": "/name.html",
"serviceId": 2,
"sourcePort": port1,
"targetPort": "80",
"protocol": "http"
}
port_rules.append(port_rule)
env, services, lb_service = create_env_with_multiple_svc_and_lb(
client, service_scale, lb_scale, [port1], service_count, port_rules)
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
rancher_lb_service = get_rancher_compose_service(client, rancher_env.id,
lb_service)
client.wait_success(rancher_lb_service)
rancher_service = get_rancher_compose_service(
client, rancher_env.id, services[0])
client.wait_success(rancher_service)
rancher_service1 = get_rancher_compose_service(
client, rancher_env.id, services[1])
client.wait_success(rancher_service1)
rancher_service2 = get_rancher_compose_service(
client, rancher_env.id, services[2])
client.wait_success(rancher_service2)
validate_lb_service(client,
rancher_lb_service, port1,
[rancher_service, rancher_service1],
"www.abc1.com", "/service1.html")
validate_lb_service(client,
rancher_lb_service, port1,
[rancher_service, rancher_service1],
"www.abc2.com", "/service2.html")
validate_lb_service(client,
rancher_lb_service, port1, [rancher_service2],
"www.abc1.com", "/name.html")
validate_lb_service(client,
rancher_lb_service, port1, [rancher_service2],
"www.abc2.com", "/name.html")
validate_lb_service_for_no_access(client, rancher_lb_service, port1,
"www.abc1.com",
"/service2.html")
validate_lb_service_for_no_access(client, rancher_lb_service, port1,
"www.abc2.com",
"/service1.html")
delete_all(client, [env, rancher_env])
def test_rancher_compose_lbservice_multiple_port(client,
rancher_cli_container):
port1 = "7907"
port2 = "7908"
service_scale = 2
lb_scale = 1
service_count = 2
port_rules = []
port_rule = {"hostname": "www.abc1.com",
"path": "/service1.html",
"serviceId": 0,
"sourcePort": port1,
"targetPort": "80",
"protocol": "http"
}
port_rules.append(port_rule)
port_rule = {"hostname": "www.abc2.com",
"path": "/service3.html",
"serviceId": 1,
"sourcePort": port2,
"targetPort": "81",
"protocol": "http"
}
port_rules.append(port_rule)
env, services, lb_service = create_env_with_multiple_svc_and_lb(
client, service_scale, lb_scale, [port1, port2],
service_count, port_rules)
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
rancher_lb_service = get_rancher_compose_service(client, rancher_env.id,
lb_service)
client.wait_success(rancher_lb_service)
rancher_service = get_rancher_compose_service(
client, rancher_env.id, services[0])
client.wait_success(rancher_service)
rancher_service1 = get_rancher_compose_service(
client, rancher_env.id, services[1])
client.wait_success(rancher_service1)
validate_lb_service(client,
rancher_lb_service, port1, [rancher_service],
"www.abc1.com", "/service1.html")
validate_lb_service(client,
rancher_lb_service, port2, [rancher_service1],
"www.abc2.com", "/service3.html")
delete_all(client, [env, rancher_env])
def test_rancher_compose_external_services_hostname(client,
rancher_cli_container):
port = "7904"
service_scale = 1
env, service, ext_service, con_list = create_env_with_ext_svc(
client, service_scale, port, True)
service_link = {"serviceId": ext_service.id}
service.addservicelink(serviceLink=service_link)
# Launch env using docker compose
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
rancher_service = get_rancher_compose_service(
client, rancher_env.id, service)
rancher_ext_service = get_rancher_compose_service(
client, rancher_env.id, ext_service)
client.wait_success(rancher_service)
client.wait_success(rancher_ext_service)
validate_add_service_link(client, rancher_service,
rancher_ext_service)
validate_external_service_for_hostname(client, rancher_service,
[rancher_ext_service], port)
delete_all(client, [env, rancher_env])
def rancher_compose_dns_services(client, port,
rancher_cli_container,
cross_linking=False):
service_scale = 1
consumed_service_scale = 2
env, service, consumed_service, consumed_service1, dns = \
create_env_with_2_svc_dns(
client, service_scale, consumed_service_scale, port, cross_linking)
service_link = {"serviceId": dns.id}
service.addservicelink(serviceLink=service_link)
service_link = {"serviceId": consumed_service.id}
dns.addservicelink(serviceLink=service_link)
service_link = {"serviceId": consumed_service1.id}
dns.addservicelink(serviceLink=service_link)
# Launch dns env using docker compose
launch_rancher_compose(client, env)
rancher_envs = client.list_stack(name=env.name+"rancher").data
assert len(rancher_envs) == 1
rancher_env = rancher_envs[0]
# Launch envs using docker compose
if cross_linking:
# Launch Consumed Service2
env_con = get_env(client, consumed_service)
env_con = env_con.activateservices()
env_con = client.wait_success(env_con, 120)
assert env_con.state == "active"
con_service1_id = env_con.id
# Launch Consumed Service1
env_con1 = get_env(client, consumed_service1)
env_con1 = env_con1.activateservices()
env_con1 = client.wait_success(env_con1, 120)
assert env_con1.state == "active"
con_service2_id = env_con1.id
else:
con_service1_id = rancher_env.id
con_service2_id = rancher_env.id
rancher_consumed_service = get_rancher_compose_service(
client, con_service1_id, consumed_service)
rancher_consumed_service1 = get_rancher_compose_service(
client, con_service2_id, consumed_service1)
rancher_dns = get_rancher_compose_service(
client, rancher_env.id, dns)
rancher_service = get_rancher_compose_service(
client, rancher_env.id, service)
client.wait_success(rancher_dns)
client.wait_success(rancher_consumed_service)
client.wait_success(rancher_consumed_service1)
client.wait_success(rancher_service)
validate_add_service_link(client, rancher_service,
rancher_dns)
validate_add_service_link(client, rancher_dns,
rancher_consumed_service)
validate_add_service_link(client, rancher_dns,
rancher_consumed_service1)
validate_dns_service(client, rancher_service,
[rancher_consumed_service, rancher_consumed_service1],
port, rancher_dns.name)
to_delete = [env, rancher_env]
if cross_linking:
to_delete.append(env_con)
to_delete.append(env_con1)
delete_all(client, to_delete)
def get_rancher_compose_service(client, rancher_env_id, service):
rancher_services = client.list_service(name=service.name,
stackId=rancher_env_id,
removed_null=True).data
assert len(rancher_services) == 1
rancher_service = rancher_services[0]
print(service.kind)
if service.kind != 'externalService' and service.kind != 'dnsService':
assert rancher_service.scale == service.scale
rancher_service = client.wait_success(rancher_service, 120)
return rancher_service
|
|
from fanstatic import Library, Resource, NeededResources
from fanstatic import compat, Inclusion, MINIFIED
from fanstatic import set_resource_file_existence_checking
from fanstatic.compiler import Compiler, Minifier
from .test_checksum import _copy_testdata
from zipfile import ZipFile
import fanstatic
import fanstatic.compiler
import os
import pytest
import subprocess
import sys
import time
class MockCompiler(fanstatic.compiler.Compiler):
name = 'mock'
source_extension = '.source'
available = True
def __init__(self):
self.calls = []
def __call__(self, resource, force=False):
self.calls.append(resource)
class MockMinifier(fanstatic.compiler.Minifier):
name = 'mock'
target_extension = '.min.js'
available = True
def __init__(self):
self.calls = []
def __call__(self, resource, force=False):
self.calls.append(resource)
class TestingRegistry(object):
def __init__(self, request):
self.request = request
def add_compiler(self, compiler):
return self._register_compiler(
fanstatic.CompilerRegistry, compiler)
def add_minifier(self, compiler):
return self._register_compiler(
fanstatic.MinifierRegistry, compiler)
def _register_compiler(self, registry, compiler):
self.request.addfinalizer(
lambda: registry.instance().pop(compiler.name))
registry.instance().add(compiler)
return compiler
def compiler(self, name):
return fanstatic.CompilerRegistry.instance()[name]
def minifier(self, name):
return fanstatic.MinifierRegistry.instance()[name]
@pytest.fixture
def compilers(request):
return TestingRegistry(request)
def test_logging_when_compiling(tmpdir, compilers, caplog):
class WhiteSpaceRemover(fanstatic.compiler.Compiler):
"""A silly minifier, to showcase logging."""
name = 'whitespace'
source_extension = '.frop'
def process(self, source, target):
with open(target, 'wb') as output:
output.write(compat.as_bytestring(
open(source, 'r').read().replace(' ', '')))
compilers.add_compiler(WhiteSpaceRemover())
lib = Library('lib', str(tmpdir))
tmpdir.join('a.frop').write(' foo bar baz ')
a = Resource(lib, 'a.js', compiler='whitespace')
assert len(caplog.records()) == 0
a.compile()
assert len(caplog.records()) == 1
assert "Compiling <Resource 'a.js' in library 'lib'> in" in caplog.text()
# The 'compiler' really worked!
assert tmpdir.join('a.js').read() == 'foobarbaz'
def test_compile_only_for_libraries_under_development(
compilers):
compilers.add_compiler(MockCompiler())
lib = Library('lib', '')
a = Resource(lib, 'a.js', compiler='mock')
needed = NeededResources(resources=[a])
incl = Inclusion(needed, compile=True)
assert len(compilers.compiler('mock').calls) == 1
# Gathering all resources again will add a call.
incl = Inclusion(needed, compile=True)
assert len(compilers.compiler('mock').calls) == 2
lib.version = 1
incl = Inclusion(needed, compile=True)
assert len(compilers.compiler('mock').calls) == 2
def test_setting_compile_False_should_not_call_compiler_and_minifier(
compilers):
compilers.add_compiler(MockCompiler())
compilers.add_minifier(MockMinifier())
lib = Library('lib', '')
a = Resource(lib, 'a.js', compiler='mock', minifier='mock')
needed = NeededResources(resources=[a])
incl = Inclusion(needed)
incl.render()
assert not compilers.compiler('mock').calls
assert not compilers.minifier('mock').calls
def test_setting_compile_True_should_call_compiler_and_minifier(
compilers):
compilers.add_compiler(MockCompiler())
compilers.add_minifier(MockMinifier())
lib = Library('lib', '')
a = Resource(lib, 'a.js', compiler='mock', minifier='mock')
needed = NeededResources(resources=[a])
incl = Inclusion(needed, compile=True)
incl.render()
mock_compiler = compilers.compiler('mock')
mock_minifier = compilers.minifier('mock')
assert len(mock_compiler.calls) == 1
assert mock_compiler.calls[0] == a
assert len(mock_minifier.calls) == 1
assert mock_minifier.calls[0] == a
def test_minified_mode_should_call_compiler_and_minifier_of_parent_resource(
compilers):
compilers.add_compiler(MockCompiler())
compilers.add_minifier(MockMinifier())
lib = Library('lib', '')
a = Resource(lib, 'a.js', compiler='mock', minifier='mock')
needed = NeededResources(resources=[a])
incl = Inclusion(needed, compile=True, mode=MINIFIED)
assert len(incl.resources) == 1
assert incl.resources[0].relpath == 'a.min.js'
assert incl.resources[0] != a
mock_compiler = compilers.compiler('mock')
mock_minifier = compilers.minifier('mock')
assert len(mock_compiler.calls) == 1
assert mock_compiler.calls[0] == a
assert len(mock_minifier.calls) == 1
assert mock_minifier.calls[0] == a
def test_minified_mode_relpath_respect_subdir(compilers):
compilers.add_compiler(MockCompiler())
compilers.add_minifier(MockMinifier())
lib = Library('lib', '')
a = Resource(lib, 'foo/bar/a.js', compiler='mock', minifier='mock')
needed = NeededResources(resources=[a])
incl = Inclusion(needed, compile=True, mode=MINIFIED)
assert len(incl.resources) == 1
assert incl.resources[0].relpath == 'foo/bar/a.min.js'
assert incl.resources[0] != a
def test_nothing_given_on_resource_uses_settings_from_library(compilers):
mock_compiler = MockCompiler()
compilers.add_compiler(mock_compiler)
mock_minifier = MockMinifier()
compilers.add_minifier(mock_minifier)
lib = Library(
'lib', '', compilers={'.js': 'mock'}, minifiers={'.js': 'mock'})
a = Resource(lib, 'a.js')
assert a.compiler is mock_compiler
assert a.minifier is mock_minifier
def test_settings_on_resource_override_settings_from_library(compilers):
compilers.add_compiler(MockCompiler())
other_compiler = MockCompiler()
other_compiler.name = 'other'
compilers.add_compiler(other_compiler)
compilers.add_minifier(MockMinifier())
lib = Library(
'lib', '', compilers={'.js': 'mock'}, minifiers={'.js': 'mock'})
a = Resource(lib, 'a.js', compiler='other', minifier=None)
assert a.compiler is other_compiler
assert isinstance(a.minifier, fanstatic.compiler.NullCompiler)
def test_compiler_target_is_full_resource_path():
lib = Library('lib', '/foo')
a = Resource(lib, 'a.js')
compiler = Compiler()
assert compiler.target_path(a) == '/foo/a.js'
def test_compiler_uses_source_if_given_on_resource():
lib = Library('lib', '/foo')
a = Resource(lib, 'a.js', source='a.source')
compiler = Compiler()
assert compiler.source_path(a) == '/foo/a.source'
def test_compiler_source_transforms_extension_if_no_source_given():
lib = Library('lib', '/foo')
a = Resource(lib, 'a.js')
compiler = Compiler()
compiler.source_extension = '.source'
assert compiler.source_path(a) == '/foo/a.source'
def test_minifier_source_is_full_resource_path():
lib = Library('lib', '/foo')
a = Resource(lib, 'a.js')
minifier = Minifier()
assert minifier.source_path(a) == '/foo/a.js'
def test_minifier_uses_minified_if_given_on_resource():
lib = Library('lib', '/foo')
a = Resource(lib, 'a.js', minified='a.min.js')
minifier = Minifier()
assert minifier.target_path(a) == '/foo/a.min.js'
def test_minifier_target_transforms_extension_if_no_name_given():
lib = Library('lib', '/foo')
a = Resource(lib, 'a.js')
minifier = Minifier()
minifier.target_extension = '.min.js'
assert minifier.target_path(a) == '/foo/a.min.js'
def test_should_process_if_target_does_not_exist(tmpdir):
assert Compiler().should_process(None, str(tmpdir / 'target'))
def test_should_process_if_target_is_older_than_source(tmpdir):
source = str(tmpdir / 'source')
open(source, 'w').close()
target = str(tmpdir / 'target')
open(target, 'w').close()
old = time.time() - 1
os.utime(target, (old, old))
assert Compiler().should_process(source, target)
def test_should_not_process_if_target_is_newer_than_source(tmpdir):
source = str(tmpdir / 'source')
open(source, 'w').close()
target = str(tmpdir / 'target')
open(target, 'w').close()
old = time.time() - 1
os.utime(source, (old, old))
assert not Compiler().should_process(source, target)
def test_compiler_available_and_source_not_present_should_raise(
tmpdir, compilers):
compilers.add_compiler(MockCompiler())
set_resource_file_existence_checking(True)
lib = Library('lib', str(tmpdir))
with pytest.raises(fanstatic.UnknownResourceError) as exc:
a = Resource(lib, 'a.js', compiler='mock')
assert 'a.source' in str(exc.value)
def test_compiler_not_available_and_source_not_present_should_raise(
tmpdir, compilers):
open(str(tmpdir / 'a.js'), 'w').close()
compiler = MockCompiler()
compiler.available = False
compilers.add_compiler(compiler)
set_resource_file_existence_checking(True)
lib = Library('lib', str(tmpdir))
# assert_nothing_raised
a = Resource(lib, 'a.js', compiler='mock')
def test_compiler_available_and_resource_file_not_present_should_not_raise(
tmpdir, compilers):
open(str(tmpdir / 'a.source'), 'w').close()
# since the compiler can be used to generate the resource file
compilers.add_compiler(MockCompiler())
set_resource_file_existence_checking(True)
lib = Library('lib', str(tmpdir))
# assert_nothing_raised
a = Resource(lib, 'a.js', compiler='mock')
def test_compiler_not_available_and_resource_file_not_present_should_raise(
tmpdir, compilers):
compiler = MockCompiler()
compiler.available = False
compilers.add_compiler(compiler)
set_resource_file_existence_checking(True)
lib = Library('lib', str(tmpdir))
with pytest.raises(fanstatic.UnknownResourceError) as exc:
a = Resource(lib, 'a.js', compiler='mock')
assert 'a.js' in str(exc.value)
def test_minifier_available_and_minified_file_not_present_should_not_raise(
tmpdir, compilers):
open(str(tmpdir / 'a.js'), 'w').close()
compilers.add_minifier(MockMinifier())
set_resource_file_existence_checking(True)
lib = Library('lib', str(tmpdir))
# assert_nothing_raised
a = Resource(lib, 'a.js', minifier='mock')
def test_minifier_available_and_minified_not_a_string_should_raise(compilers):
compilers.add_minifier(MockMinifier())
lib = Library('lib', '')
minified = Resource(lib, 'a.min.js')
with pytest.raises(fanstatic.ConfigurationError) as exc:
a = Resource(lib, 'a.js', minifier='mock', minified=minified)
def test_resource_name_conflict_raises_error(compilers):
compilers.add_minifier(MockMinifier())
lib = Library('lib', '', minifiers={'.js': 'mock'})
a = Resource(lib, 'a.js')
with pytest.raises(fanstatic.ConfigurationError) as exc:
Resource(lib, 'a.min.js')
assert str(exc.value) == 'Resource path a.min.js is already defined.'
def test_cli_compiler_is_not_available_if_command_not_found_on_path():
class Nonexistent(fanstatic.compiler.CommandlineBase):
command = 'does-not-exist'
assert not Nonexistent().available
def test_cli_compiler_is_available_if_command_found_on_path():
class Cat(fanstatic.compiler.CommandlineBase):
command = 'cat'
assert Cat().available
def test_cli_compiler_is_available_if_command_is_absolute_path():
class Cat(fanstatic.compiler.CommandlineBase):
command = '/bin/cat'
assert Cat().available
def test_converts_placeholders_to_arguments(tmpdir):
from fanstatic.compiler import SOURCE, TARGET
source = str(tmpdir / 'source')
with open(source, 'w') as f:
f.write('source')
target = str(tmpdir / 'target')
with open(target, 'w') as f:
f.write('target')
class Echo(fanstatic.compiler.CommandlineBase):
command = 'echo'
arguments = ['-n', SOURCE, TARGET]
def process(self, source, target):
p = super(Echo, self).process(source, target)
return p.stdout.read()
assert Echo().process(source, target) == compat.as_bytestring(
'%s %s' % (source, target))
def test_coffeescript_compiler(tmpdir):
compiler = fanstatic.CompilerRegistry.instance()['coffee']
if not compiler.available:
pytest.skip('`%s` not found on PATH' % compiler.command)
source = str(tmpdir / 'a.coffee')
target = str(tmpdir / 'a.js')
with open(source, 'w') as f:
f.write('square = (x) -> x * x')
compiler.process(source, target)
assert 'square = function(x) {' in open(target).read()
def test_less_compiler(tmpdir):
compiler = fanstatic.CompilerRegistry.instance()['less']
if not compiler.available:
pytest.skip('`%s` not found on PATH' % compiler.command)
source = str(tmpdir / 'a.less')
target = str(tmpdir / 'a.css')
with open(source, 'w') as f:
f.write('body { padding: (1 + 1)px; }')
compiler.process(source, target)
assert 'padding: 2 px;' in open(target).read()
def test_sass_compiler(tmpdir):
compiler = fanstatic.CompilerRegistry.instance()['sass']
if not compiler.available:
pytest.skip('`%s` not found on PATH' % compiler.command)
compiler.arguments = ['--no-cache'] + compiler.arguments
# from http://sass-lang.com/tutorial.html :
source = str(tmpdir / 'a.scss')
target = str(tmpdir / 'a.css')
with open(source, 'w') as f:
f.write('''\
#navbar {
li {
a { font-weight: bold; }
}
}''')
compiler.process(source, target)
assert '#navbar li a' in open(target).read()
def test_sass_resource(tmpdir):
compiler = fanstatic.CompilerRegistry.instance()['sass']
if not compiler.available:
pytest.skip('`%s` not found on PATH' % compiler.command)
lib = Library('lib', str(tmpdir), compilers={'.css': 'sass'})
a = Resource(lib, 'a.css')
tmpdir.join('a.scss').write('''\
#navbar {
li {
a { font-weight: bold; }
}
}''')
# Before compilation, the resource is not present.
assert not tmpdir.join('a.css').check()
needed = NeededResources(resources=[a])
incl = Inclusion(needed, compile=True)
incl.render()
# After compilation, the resource is present, and compiled using the sass
# compiler.
assert '#navbar li a' in tmpdir.join('a.css').read()
def test_package_compiler_is_not_available_if_package_not_importable():
class Nonexistent(fanstatic.compiler.PythonPackageBase):
package = 'does-not-exist'
assert not Nonexistent().available
def test_package_compiler_is_available_if_package_is_importable():
class Example(fanstatic.compiler.PythonPackageBase):
package = 'fanstatic'
assert Example().available
def test_cssmin_minifier(tmpdir):
compiler = fanstatic.MinifierRegistry.instance()['cssmin']
if not compiler.available:
pytest.skip('`%s` not found' % compiler.package)
source = str(tmpdir / 'a.scss')
target = str(tmpdir / 'a.css')
with open(source, 'w') as f:
f.write('body { padding: 2px; }')
compiler.process(source, target)
assert 'body{padding:2px}' == open(target).read()
def test_jsmin_minifier(tmpdir):
compiler = fanstatic.MinifierRegistry.instance()['jsmin']
if not compiler.available:
pytest.skip('`%s` not found' % compiler.package)
source = str(tmpdir / 'a.js')
target = str(tmpdir / 'a.min.js')
with open(source, 'w') as f:
f.write('function foo() { var bar = "baz"; };')
compiler.process(source, target)
assert 'function foo(){var bar="baz";};' == open(target).read()
def test_closure_minifier(tmpdir):
compiler = fanstatic.MinifierRegistry.instance()['closure']
if not compiler.available:
pytest.skip('`%s` not found' % compiler.package)
source = str(tmpdir / 'a.js')
target = str(tmpdir / 'a.min.js')
with open(source, 'w') as f:
f.write('function foo() { var bar = "baz"; };')
compiler.process(source, target)
assert 'function foo(){var bar="baz"};\n' == open(target).read()
def test_closure_minifier_communicate_exit_status(tmpdir):
compiler = fanstatic.MinifierRegistry.instance()['closure']
if not compiler.available:
pytest.skip('`%s` not found' % compiler.package)
from fanstatic.compiler import CompilerError
source = str(tmpdir / 'a.js')
target = str(tmpdir / 'a.min.js')
with pytest.raises(CompilerError) as exc:
compiler.process(source, target)
assert 'Cannot read:' in str(exc)
@pytest.fixture
def libraries(request):
def cleanup():
fanstatic.LibraryRegistry._instance = None
request.addfinalizer(cleanup)
def test_console_script_collects_resources_from_package(
monkeypatch, libraries):
mypackage = pytest.importorskip('mypackage')
lib = Library('other', '')
a = Resource(lib, 'a.js')
fanstatic.LibraryRegistry.instance().add(lib)
def log_compile(self, force=False):
calls.append((self, force))
calls = []
monkeypatch.setattr(Resource, 'compile', log_compile)
fanstatic.compiler._compile_resources('mypackage')
assert len(calls) == 1
assert calls[0] == (mypackage.style, True)
def test_custom_sdist_command_runs_compiler_beforehand(tmpdir, monkeypatch):
pkgdir = _copy_testdata(tmpdir)
monkeypatch.chdir(pkgdir)
p = subprocess.Popen(
[sys.executable, 'setup.py', 'sdist', '--formats', 'zip'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = p.communicate()
p.wait()
assert compat.as_bytestring(
'hard linking src/somepackage/resources/style.min.css') in stdout
dist = ZipFile(str(pkgdir / 'dist' / 'somepackage-1.0dev.zip'))
assert (
'somepackage-1.0dev/src/somepackage/resources/style.min.css'
in dist.namelist())
|
|
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Vincent Michel <vincent.michel@inria.fr>
# Gilles Louppe <g.louppe@gmail.com>
#
# License: BSD 3 clause
"""Recursive feature elimination for feature ranking"""
import numpy as np
from ..utils import check_X_y, safe_sqr
from ..utils.metaestimators import if_delegate_has_method
from ..base import BaseEstimator
from ..base import MetaEstimatorMixin
from ..base import clone
from ..base import is_classifier
from ..externals.joblib import Parallel, delayed
from ..model_selection import check_cv
from ..model_selection._validation import _safe_split, _score
from ..metrics.scorer import check_scoring
from .base import SelectorMixin
def _rfe_single_fit(rfe, estimator, X, y, train, test, scorer):
"""
Return the score for a fit across one fold.
"""
X_train, y_train = _safe_split(estimator, X, y, train)
X_test, y_test = _safe_split(estimator, X, y, test, train)
return rfe._fit(
X_train, y_train, lambda estimator, features:
_score(estimator, X_test[:, features], y_test, scorer)).scores_
class RFE(BaseEstimator, MetaEstimatorMixin, SelectorMixin):
"""Feature ranking with recursive feature elimination.
Given an external estimator that assigns weights to features (e.g., the
coefficients of a linear model), the goal of recursive feature elimination
(RFE) is to select features by recursively considering smaller and smaller
sets of features. First, the estimator is trained on the initial set of
features and weights are assigned to each one of them. Then, features whose
absolute weights are the smallest are pruned from the current set features.
That procedure is recursively repeated on the pruned set until the desired
number of features to select is eventually reached.
Read more in the :ref:`User Guide <rfe>`.
Parameters
----------
estimator : object
A supervised learning estimator with a `fit` method that updates a
`coef_` attribute that holds the fitted parameters. Important features
must correspond to high absolute values in the `coef_` array.
For instance, this is the case for most supervised learning
algorithms such as Support Vector Classifiers and Generalized
Linear Models from the `svm` and `linear_model` modules.
n_features_to_select : int or None (default=None)
The number of features to select. If `None`, half of the features
are selected.
step : int or float, optional (default=1)
If greater than or equal to 1, then `step` corresponds to the (integer)
number of features to remove at each iteration.
If within (0.0, 1.0), then `step` corresponds to the percentage
(rounded down) of features to remove at each iteration.
verbose : int, default=0
Controls verbosity of output.
Attributes
----------
n_features_ : int
The number of selected features.
support_ : array of shape [n_features]
The mask of selected features.
ranking_ : array of shape [n_features]
The feature ranking, such that ``ranking_[i]`` corresponds to the
ranking position of the i-th feature. Selected (i.e., estimated
best) features are assigned rank 1.
estimator_ : object
The external estimator fit on the reduced dataset.
Examples
--------
The following example shows how to retrieve the 5 right informative
features in the Friedman #1 dataset.
>>> from sklearn.datasets import make_friedman1
>>> from sklearn.feature_selection import RFE
>>> from sklearn.svm import SVR
>>> X, y = make_friedman1(n_samples=50, n_features=10, random_state=0)
>>> estimator = SVR(kernel="linear")
>>> selector = RFE(estimator, 5, step=1)
>>> selector = selector.fit(X, y)
>>> selector.support_ # doctest: +NORMALIZE_WHITESPACE
array([ True, True, True, True, True,
False, False, False, False, False], dtype=bool)
>>> selector.ranking_
array([1, 1, 1, 1, 1, 6, 4, 3, 2, 5])
References
----------
.. [1] Guyon, I., Weston, J., Barnhill, S., & Vapnik, V., "Gene selection
for cancer classification using support vector machines",
Mach. Learn., 46(1-3), 389--422, 2002.
"""
def __init__(self, estimator, n_features_to_select=None, step=1,
verbose=0):
self.estimator = estimator
self.n_features_to_select = n_features_to_select
self.step = step
self.verbose = verbose
@property
def _estimator_type(self):
return self.estimator._estimator_type
def fit(self, X, y):
"""Fit the RFE model and then the underlying estimator on the selected
features.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
The training input samples.
y : array-like, shape = [n_samples]
The target values.
"""
return self._fit(X, y)
def _fit(self, X, y, step_score=None):
X, y = check_X_y(X, y, "csc")
# Initialization
n_features = X.shape[1]
if self.n_features_to_select is None:
n_features_to_select = n_features // 2
else:
n_features_to_select = self.n_features_to_select
if 0.0 < self.step < 1.0:
step = int(max(1, self.step * n_features))
else:
step = int(self.step)
if step <= 0:
raise ValueError("Step must be >0")
support_ = np.ones(n_features, dtype=np.bool)
ranking_ = np.ones(n_features, dtype=np.int)
if step_score:
self.scores_ = []
# Elimination
while np.sum(support_) > n_features_to_select:
# Remaining features
features = np.arange(n_features)[support_]
# Rank the remaining features
estimator = clone(self.estimator)
if self.verbose > 0:
print("Fitting estimator with %d features." % np.sum(support_))
estimator.fit(X[:, features], y)
# Get coefs
if hasattr(estimator, 'coef_'):
coefs = estimator.coef_
else:
coefs = getattr(estimator, 'feature_importances_', None)
if coefs is None:
raise RuntimeError('The classifier does not expose '
'"coef_" or "feature_importances_" '
'attributes')
# Get ranks
if coefs.ndim > 1:
ranks = np.argsort(safe_sqr(coefs).sum(axis=0))
else:
ranks = np.argsort(safe_sqr(coefs))
# for sparse case ranks is matrix
ranks = np.ravel(ranks)
# Eliminate the worse features
threshold = min(step, np.sum(support_) - n_features_to_select)
# Compute step score on the previous selection iteration
# because 'estimator' must use features
# that have not been eliminated yet
if step_score:
self.scores_.append(step_score(estimator, features))
support_[features[ranks][:threshold]] = False
ranking_[np.logical_not(support_)] += 1
# Set final attributes
features = np.arange(n_features)[support_]
self.estimator_ = clone(self.estimator)
self.estimator_.fit(X[:, features], y)
# Compute step score when only n_features_to_select features left
if step_score:
self.scores_.append(step_score(self.estimator_, features))
self.n_features_ = support_.sum()
self.support_ = support_
self.ranking_ = ranking_
return self
@if_delegate_has_method(delegate='estimator')
def predict(self, X):
"""Reduce X to the selected features and then predict using the
underlying estimator.
Parameters
----------
X : array of shape [n_samples, n_features]
The input samples.
Returns
-------
y : array of shape [n_samples]
The predicted target values.
"""
return self.estimator_.predict(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def score(self, X, y):
"""Reduce X to the selected features and then return the score of the
underlying estimator.
Parameters
----------
X : array of shape [n_samples, n_features]
The input samples.
y : array of shape [n_samples]
The target values.
"""
return self.estimator_.score(self.transform(X), y)
def _get_support_mask(self):
return self.support_
@if_delegate_has_method(delegate='estimator')
def decision_function(self, X):
return self.estimator_.decision_function(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def predict_proba(self, X):
return self.estimator_.predict_proba(self.transform(X))
@if_delegate_has_method(delegate='estimator')
def predict_log_proba(self, X):
return self.estimator_.predict_log_proba(self.transform(X))
class RFECV(RFE, MetaEstimatorMixin):
"""Feature ranking with recursive feature elimination and cross-validated
selection of the best number of features.
Read more in the :ref:`User Guide <rfe>`.
Parameters
----------
estimator : object
A supervised learning estimator with a `fit` method that updates a
`coef_` attribute that holds the fitted parameters. Important features
must correspond to high absolute values in the `coef_` array.
For instance, this is the case for most supervised learning
algorithms such as Support Vector Classifiers and Generalized
Linear Models from the `svm` and `linear_model` modules.
step : int or float, optional (default=1)
If greater than or equal to 1, then `step` corresponds to the (integer)
number of features to remove at each iteration.
If within (0.0, 1.0), then `step` corresponds to the percentage
(rounded down) of features to remove at each iteration.
cv : int, cross-validation generator or an iterable, optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if ``y`` is binary or multiclass,
:class:`sklearn.model_selection.StratifiedKFold` is used. If the
estimator is a classifier or if ``y`` is neither binary nor multiclass,
:class:`sklearn.model_selection.KFold` is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
scoring : string, callable or None, optional, default: None
A string (see model evaluation documentation) or
a scorer callable object / function with signature
``scorer(estimator, X, y)``.
verbose : int, default=0
Controls verbosity of output.
n_jobs : int, default 1
Number of cores to run in parallel while fitting across folds.
Defaults to 1 core. If `n_jobs=-1`, then number of jobs is set
to number of cores.
Attributes
----------
n_features_ : int
The number of selected features with cross-validation.
support_ : array of shape [n_features]
The mask of selected features.
ranking_ : array of shape [n_features]
The feature ranking, such that `ranking_[i]`
corresponds to the ranking
position of the i-th feature.
Selected (i.e., estimated best)
features are assigned rank 1.
grid_scores_ : array of shape [n_subsets_of_features]
The cross-validation scores such that
``grid_scores_[i]`` corresponds to
the CV score of the i-th subset of features.
estimator_ : object
The external estimator fit on the reduced dataset.
Notes
-----
The size of ``grid_scores_`` is equal to ceil((n_features - 1) / step) + 1,
where step is the number of features removed at each iteration.
Examples
--------
The following example shows how to retrieve the a-priori not known 5
informative features in the Friedman #1 dataset.
>>> from sklearn.datasets import make_friedman1
>>> from sklearn.feature_selection import RFECV
>>> from sklearn.svm import SVR
>>> X, y = make_friedman1(n_samples=50, n_features=10, random_state=0)
>>> estimator = SVR(kernel="linear")
>>> selector = RFECV(estimator, step=1, cv=5)
>>> selector = selector.fit(X, y)
>>> selector.support_ # doctest: +NORMALIZE_WHITESPACE
array([ True, True, True, True, True,
False, False, False, False, False], dtype=bool)
>>> selector.ranking_
array([1, 1, 1, 1, 1, 6, 4, 3, 2, 5])
References
----------
.. [1] Guyon, I., Weston, J., Barnhill, S., & Vapnik, V., "Gene selection
for cancer classification using support vector machines",
Mach. Learn., 46(1-3), 389--422, 2002.
"""
def __init__(self, estimator, step=1, cv=None, scoring=None, verbose=0,
n_jobs=1):
self.estimator = estimator
self.step = step
self.cv = cv
self.scoring = scoring
self.verbose = verbose
self.n_jobs = n_jobs
def fit(self, X, y):
"""Fit the RFE model and automatically tune the number of selected
features.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vector, where `n_samples` is the number of samples and
`n_features` is the total number of features.
y : array-like, shape = [n_samples]
Target values (integers for classification, real numbers for
regression).
"""
X, y = check_X_y(X, y, "csr")
# Initialization
cv = check_cv(self.cv, y, is_classifier(self.estimator))
scorer = check_scoring(self.estimator, scoring=self.scoring)
n_features = X.shape[1]
n_features_to_select = 1
rfe = RFE(estimator=self.estimator,
n_features_to_select=n_features_to_select,
step=self.step, verbose=self.verbose - 1)
# Determine the number of subsets of features by fitting across
# the train folds and choosing the "features_to_select" parameter
# that gives the least averaged error across all folds.
# Note that joblib raises a non-picklable error for bound methods
# even if n_jobs is set to 1 with the default multiprocessing
# backend.
# This branching is done so that to
# make sure that user code that sets n_jobs to 1
# and provides bound methods as scorers is not broken with the
# addition of n_jobs parameter in version 0.18.
if self.n_jobs == 1:
parallel, func = list, _rfe_single_fit
else:
parallel, func, = Parallel(n_jobs=self.n_jobs), delayed(_rfe_single_fit)
scores = parallel(
func(rfe, self.estimator, X, y, train, test, scorer)
for train, test in cv.split(X, y))
scores = np.sum(scores, axis=0)
n_features_to_select = max(
n_features - (np.argmax(scores) * self.step),
n_features_to_select)
# Re-execute an elimination with best_k over the whole set
rfe = RFE(estimator=self.estimator,
n_features_to_select=n_features_to_select, step=self.step)
rfe.fit(X, y)
# Set final attributes
self.support_ = rfe.support_
self.n_features_ = rfe.n_features_
self.ranking_ = rfe.ranking_
self.estimator_ = clone(self.estimator)
self.estimator_.fit(self.transform(X), y)
# Fixing a normalization error, n is equal to get_n_splits(X, y) - 1
# here, the scores are normalized by get_n_splits(X, y)
self.grid_scores_ = scores[::-1] / cv.get_n_splits(X, y)
return self
|
|
# -*- coding: utf-8 -*-
"""
Person Registry, Controllers
@see: U{http://eden.sahanafoundation.org/wiki/BluePrintVITA}
"""
module = request.controller
resourcename = request.function
# -----------------------------------------------------------------------------
# Options Menu (available in all Functions' Views)
def s3_menu_postp():
# @todo: rewrite this for new framework
menu_selected = []
group_id = s3mgr.get_session("pr", "group")
if group_id:
group = s3db.pr_group
query = (group.id == group_id)
record = db(query).select(group.id, group.name, limitby=(0, 1)).first()
if record:
name = record.name
menu_selected.append(["%s: %s" % (T("Group"), name), False,
URL(f="group",
args=[record.id])])
person_id = s3mgr.get_session("pr", "person")
if person_id:
person = s3db.pr_person
query = (person.id == person_id)
record = db(query).select(person.id, limitby=(0, 1)).first()
if record:
person_represent = s3db.pr_person_represent
name = person_represent(record.id)
menu_selected.append(["%s: %s" % (T("Person"), name), False,
URL(f="person",
args=[record.id])])
if menu_selected:
menu_selected = [T("Open recent"), True, None, menu_selected]
response.menu_options.append(menu_selected)
# -----------------------------------------------------------------------------
def index():
""" Module's Home Page """
try:
module_name = deployment_settings.modules[module].name_nice
except:
module_name = T("Person Registry")
# Load Model
s3db.table("pr_address")
def prep(r):
if r.representation == "html":
if not r.id and not r.method:
r.method = "search"
else:
redirect(URL(f="person", args=request.args))
return True
s3.prep = prep
def postp(r, output):
if isinstance(output, dict):
# Add information for Dashboard
pr_gender_opts = s3db.pr_gender_opts
pr_age_group_opts = s3db.pr_age_group_opts
table = db.pr_person
gender = []
for g_opt in pr_gender_opts:
query = (table.deleted == False) & \
(table.gender == g_opt)
count = db(query).count()
gender.append([str(pr_gender_opts[g_opt]), int(count)])
age = []
for a_opt in pr_age_group_opts:
query = (table.deleted == False) & \
(table.age_group == a_opt)
count = db(query).count()
age.append([str(pr_age_group_opts[a_opt]), int(count)])
total = int(db(table.deleted == False).count())
output.update(module_name=module_name,
gender=json.dumps(gender),
age=json.dumps(age),
total=total)
if r.interactive:
if not r.component:
label = READ
else:
label = UPDATE
linkto = r.resource.crud._linkto(r)("[id]")
s3.actions = [
dict(label=str(label), _class="action-btn", url=str(linkto))
]
r.next = None
return output
s3.postp = postp
output = s3_rest_controller("pr", "person")
response.view = "pr/index.html"
response.title = module_name
return output
# -----------------------------------------------------------------------------
def person():
""" RESTful CRUD controller """
# Enable this to allow migration of users between instances
#s3.filter = (s3db.pr_person.pe_id == s3db.pr_person_user.pe_id) & \
#(s3db.auth_user.id == s3db.pr_person_user.user_id) & \
#(s3db.auth_user.registration_key != "disabled")
# Organisation Dependent Fields
set_org_dependent_field = deployment_settings.set_org_dependent_field
person_details_table = s3db.pr_person_details
set_org_dependent_field(person_details_table.father_name)
set_org_dependent_field(person_details_table.mother_name)
set_org_dependent_field(person_details_table.affiliations)
set_org_dependent_field(person_details_table.company)
# Custom Method for Contacts
s3db.set_method(module, resourcename,
method="contacts",
action=s3db.pr_contacts)
def prep(r):
if r.representation == "json" and \
not r.component and session.s3.filter_staff:
person_ids = session.s3.filter_staff
session.s3.filter_staff = None
r.resource.add_filter = (~(db.pr_person.id.belongs(person_ids)))
elif r.interactive:
if r.representation == "popup":
# Hide "pe_label" and "missing" fields in person popups
r.table.pe_label.readable = False
r.table.pe_label.writable = False
r.table.missing.readable = False
r.table.missing.writable = False
if r.component_name == "config":
_config = s3db.gis_config
s3db.gis_config_form_setup()
# Name will be generated from person's name.
_config.name.readable = _config.name.writable = False
# Hide Location
_config.region_location_id.readable = _config.region_location_id.writable = False
elif r.component_name == "competency":
ctable = s3db.hrm_competency
ctable.organisation_id.writable = False
ctable.skill_id.comment = None
elif r.component_name == "saved_search":
if r.method == "load":
if r.component_id:
table = db.pr_saved_search
record = db(table.id == r.component_id).select(table.url,
limitby=(0, 1)
).first()
if record:
redirect(record.url)
else:
raise HTTP(404)
elif r.id:
r.table.volunteer.readable = True
r.table.volunteer.writable = True
return True
s3.prep = prep
def postp(r, output):
if r.component_name == "saved_search":
s3_action_buttons(r)
s3.actions.append(
dict(url=URL(args=r.args + ["[id]", "load"]),
label=str(T("Load")),
_class="action-btn")
)
return output
s3.postp = postp
s3db.configure("pr_group_membership",
list_fields=["id",
"group_id",
"group_head",
"description"
])
# Basic tabs
tabs = [(T("Basic Details"), None),
(T("Address"), "address"),
#(T("Contacts"), "contact"),
(T("Contact Details"), "contacts"),
(T("Images"), "image"),
(T("Identity"), "identity"),
(T("Education"), "education"),
(T("Groups"), "group_membership"),
(T("Journal"), "note"),
(T("Skills"), "competency"),
(T("Training"), "training"),
(T("Saved Searches"), "saved_search"),
]
# Configuration tabs
tabs.append((T("Map Settings"), "config"))
s3db.configure("pr_person", listadd=False, insertable=True)
output = s3_rest_controller(main="first_name",
extra="last_name",
rheader=lambda r: \
s3db.pr_rheader(r, tabs=tabs))
return output
# -----------------------------------------------------------------------------
def address():
"""
RESTful controller to allow creating/editing of address records within
contacts()
"""
# CRUD pre-process
def prep(r):
controller = request.get_vars.get("controller", "pr")
person_id = request.get_vars.get("person", None)
if person_id and controller:
s3db.configure("pr_address",
create_next=URL(c=controller,
f="person",
args=[person_id, "contacts"]),
update_next=URL(c=controller,
f="person",
args=[person_id, "contacts"])
)
if r.method == "create":
table = s3db.pr_person
query = (table.id == person_id)
pe_id = db(query).select(table.pe_id,
limitby=(0, 1)).first().pe_id
s3db.pr_address.pe_id.default = pe_id
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def contact():
"""
RESTful controller to allow creating/editing of contact records within
contacts()
"""
# CRUD pre-process
def prep(r):
controller = request.get_vars.get("controller", "pr")
person_id = request.get_vars.get("person", None)
if person_id:
s3db.configure("pr_contact",
create_next=URL(c=controller,
f="person",
args=[person_id, "contacts"]),
update_next=URL(c=controller,
f="person",
args=[person_id, "contacts"])
)
if r.method == "create":
table = s3db.pr_person
query = (table.id == person_id)
pe_id = db(query).select(table.pe_id,
limitby=(0, 1)).first().pe_id
s3db.pr_contact.pe_id.default = pe_id
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def contact_emergency():
"""
RESTful controller to allow creating/editing of emergency contact
records within contacts()
"""
# CRUD pre-process
def prep(r):
controller = request.get_vars.get("controller", "pr")
person_id = request.get_vars.get("person", None)
if person_id:
s3db.configure("pr_contact_emergency",
create_next=URL(c=controller,
f="person",
args=[person_id, "contacts"]),
update_next=URL(c=controller,
f="person",
args=[person_id, "contacts"])
)
if r.method == "create":
table = s3db.pr_person
query = (table.id == person_id)
pe_id = db(query).select(table.pe_id,
limitby=(0, 1)).first().pe_id
s3db.pr_contact_emergency.pe_id.default = pe_id
return True
s3.prep = prep
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def person_search():
"""
Person REST controller
- limited to just search.json for use in Autocompletes
- allows differential access permissions
"""
s3.prep = lambda r: r.representation == "json" and \
r.method == "search"
return s3_rest_controller(module, "person")
# -----------------------------------------------------------------------------
def group():
""" RESTful CRUD controller """
tablename = "pr_group"
table = s3db[tablename]
s3.filter = (table.system == False) # do not show system groups
s3db.configure("pr_group_membership",
list_fields=["id",
"person_id",
"group_head",
"description"
])
rheader = lambda r: s3db.pr_rheader(r, tabs = [(T("Group Details"), None),
(T("Address"), "address"),
(T("Contact Data"), "contact"),
(T("Members"), "group_membership")
])
output = s3_rest_controller(rheader=rheader)
return output
# -----------------------------------------------------------------------------
def image():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def education():
""" RESTful CRUD controller """
tablename = "pr_education"
table = s3db[tablename]
return s3_rest_controller("pr", "education")
# -----------------------------------------------------------------------------
#def contact():
# """ RESTful CRUD controller """
#
# table = s3db.pr_contact
#
# table.pe_id.label = T("Person/Group")
# table.pe_id.readable = True
# table.pe_id.writable = True
#
# return s3_rest_controller()
# -----------------------------------------------------------------------------
def presence():
"""
RESTful CRUD controller
- needed for Map Popups (no Menu entry for direct access)
@deprecated - People now use Base Location pr_person.location_id
"""
table = s3db.pr_presence
# Settings suitable for use in Map Popups
table.pe_id.readable = True
table.pe_id.label = "Name"
table.pe_id.represent = s3db.pr_person_represent
table.observer.readable = False
table.presence_condition.readable = False
# @ToDo: Add Skills
return s3_rest_controller()
# -----------------------------------------------------------------------------
def pentity():
"""
RESTful CRUD controller
- limited to just search.json for use in Autocompletes
"""
s3.prep = lambda r: r.representation in ("s3json", "json", "xml")
return s3_rest_controller()
# -----------------------------------------------------------------------------
def affiliation():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def role():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def tooltip():
""" Ajax tooltips """
if "formfield" in request.vars:
response.view = "pr/ajaxtips/%s.html" % request.vars.formfield
return dict()
# -----------------------------------------------------------------------------
def saved_search():
"""
REST controller for saving and loading saved searches
"""
return s3_rest_controller()
# END =========================================================================
|
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pytest
from mock import patch
from polyaxon.exceptions import PolyaxonSchemaError
from polyaxon.k8s.k8s_schemas import V1Container
from polyaxon.polyaxonfile import check_polyaxonfile
from polyaxon.polyaxonfile.check import collect_dag_components
from polyaxon.polyaxonfile.specs import (
CompiledOperationSpecification,
OperationSpecification,
)
from polyaxon.polyflow import V1IO, V1CompiledOperation, V1Component, V1Job, V1RunKind
from polyaxon.polyflow.early_stopping import (
V1FailureEarlyStopping,
V1MetricEarlyStopping,
)
from polyaxon.polyflow.matrix import V1GridSearch, V1Hyperband, V1RandomSearch
from polyaxon.polyflow.matrix.params import V1HpChoice, V1HpLinSpace
from polyaxon.polyflow.run import V1Dag
from polyaxon.utils.test_utils import BaseTestCase
@pytest.mark.polyaxonfile_mark
class TestPolyaxonfileWithPipelines(BaseTestCase):
def test_pipeline_with_no_ops_raises(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath("tests/fixtures/pipelines/pipeline_with_no_ops.yml"),
{"kind": "compiled_operation"},
]
)
with self.assertRaises(PolyaxonSchemaError):
CompiledOperationSpecification.apply_operation_contexts(run_config)
def test_pipeline_with_no_components_raises(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath(
"tests/fixtures/pipelines/pipeline_with_no_components.yml"
),
{"kind": "compiled_operation"},
]
)
with self.assertRaises(PolyaxonSchemaError):
CompiledOperationSpecification.apply_operation_contexts(run_config)
def test_pipeline_ops_not_corresponding_to_components(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath(
"tests/fixtures/pipelines/pipeline_ops_not_corresponding_to_components.yml"
),
{"kind": "compiled_operation"},
]
)
with self.assertRaises(PolyaxonSchemaError):
CompiledOperationSpecification.apply_operation_contexts(run_config)
def test_cyclic_pipeline_raises(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath("tests/fixtures/pipelines/cyclic_pipeline.yml"),
{"kind": "compiled_operation"},
]
)
assert run_config.is_dag_run is True
assert run_config.has_pipeline is True
with self.assertRaises(PolyaxonSchemaError):
CompiledOperationSpecification.apply_operation_contexts(run_config)
def test_cron_pipeline(self):
plx_file = check_polyaxonfile(
polyaxonfile=os.path.abspath(
"tests/fixtures/pipelines/simple_cron_pipeline.yml"
),
is_cli=False,
to_op=False,
)
# Get compiled_operation data
run_config = OperationSpecification.compile_operation(plx_file)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert run_config.run is not None
assert len(run_config.run.operations) == 1
assert run_config.run.operations[0].name == "cron-task"
assert run_config.schedule is not None
assert run_config.schedule.kind == "cron"
assert run_config.schedule.cron == "0 0 * * *"
def test_refs_pipeline(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath("tests/fixtures/pipelines/ref_pipeline.yml"),
{"kind": "compiled_operation"},
]
)
with patch("polyaxon.config_reader.spec.ConfigSpec.read") as config_read:
config_read.return_value = V1Component(
kind="component",
version=" 1.1",
inputs=[V1IO(name="str-input", type="str")],
run=V1Job(container=V1Container(name="test")),
).to_dict()
collect_dag_components(run_config.run)
compiled_op = CompiledOperationSpecification.apply_operation_contexts(
run_config
)
assert compiled_op.run is not None
assert len(compiled_op.run.operations) == 3
assert compiled_op.run.operations[0].name == "ref-path-op"
assert compiled_op.run.operations[1].name == "ref-url-op"
assert compiled_op.run.operations[2].name == "ref-hub-op"
def test_interval_pipeline(self):
plx_file = check_polyaxonfile(
polyaxonfile=os.path.abspath(
"tests/fixtures/pipelines/simple_recurrent_pipeline.yml"
),
is_cli=False,
to_op=False,
)
# Get compiled_operation data
run_config = OperationSpecification.compile_operation(plx_file)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert run_config.run is not None
assert len(run_config.run.operations) == 1
assert run_config.run.operations[0].name == "recurrent-task"
assert run_config.schedule is not None
assert run_config.schedule.kind == "interval"
assert run_config.schedule.start_at.year == 2019
assert run_config.schedule.frequency.seconds == 120
assert run_config.schedule.depends_on_past is True
assert run_config.schedule is not None
def test_sequential_pipeline(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath(
"tests/fixtures/pipelines/simple_sequential_pipeline.yml"
),
{"kind": "compiled_operation"},
]
)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert run_config.run is not None
assert len(run_config.run.operations) == 4
assert run_config.run.operations[0].name == "job1"
assert run_config.run.operations[1].name == "job2"
assert run_config.run.operations[1].dependencies == ["job1"]
assert run_config.run.operations[2].name == "experiment1"
assert run_config.run.operations[2].dependencies == ["job2"]
assert run_config.run.operations[3].name == "experiment2"
assert run_config.run.operations[3].dependencies == ["experiment1"]
dag_strategy = run_config.run
assert dag_strategy.sort_topologically(dag_strategy.dag) == [
["job1"],
["job2"],
["experiment1"],
["experiment2"],
]
assert run_config.schedule is None
def test_parallel_pipeline(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath(
"tests/fixtures/pipelines/simple_parallel_pipeline.yml"
),
{"kind": "compiled_operation"},
]
)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert len(run_config.run.operations) == 4
assert run_config.run.operations[0].name == "job1"
assert run_config.run.operations[0].dependencies is None
assert run_config.run.operations[1].name == "job2"
assert run_config.run.operations[1].dependencies is None
assert run_config.run.operations[2].name == "experiment1"
assert run_config.run.operations[2].dependencies is None
assert run_config.run.operations[3].name == "experiment2"
assert run_config.run.operations[3].dependencies is None
dag_strategy = run_config.run
assert set(dag_strategy.sort_topologically(dag_strategy.dag)[0]) == {
"job1",
"job2",
"experiment1",
"experiment2",
}
assert run_config.run.concurrency == 2
assert run_config.schedule is None
def test_joins_pipeline(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath("tests/fixtures/pipelines/simple_joins_pipeline.yml"),
{"kind": "compiled_operation"},
]
)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert len(run_config.run.operations) == 5
assert run_config.run.operations[0].name == "job1"
assert run_config.run.operations[0].dependencies is None
assert run_config.run.operations[1].name == "job2"
assert run_config.run.operations[1].dependencies is None
assert run_config.run.operations[2].name == "experiment1"
assert run_config.run.operations[2].dependencies is None
assert run_config.run.operations[3].name == "experiment2"
assert run_config.run.operations[3].dependencies is None
assert run_config.run.operations[4].name == "reduce"
assert run_config.run.operations[4].dependencies is None
dag_strategy = run_config.run
assert set(dag_strategy.sort_topologically(dag_strategy.dag)[0]) == {
"job1",
"job2",
"experiment1",
"experiment2",
"reduce",
}
assert run_config.run.concurrency == 2
assert run_config.schedule is None
def test_dag_pipeline(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath("tests/fixtures/pipelines/simple_dag_pipeline.yml"),
{"kind": "compiled_operation"},
]
)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert len(run_config.run.operations) == 5
assert run_config.run.operations[0].name == "job1"
assert run_config.run.operations[1].name == "experiment1"
assert run_config.run.operations[1].dependencies == ["job1"]
assert run_config.run.operations[2].name == "experiment2"
assert run_config.run.operations[2].dependencies == ["job1"]
assert run_config.run.operations[3].name == "experiment3"
assert run_config.run.operations[3].dependencies == ["job1"]
assert run_config.run.operations[4].name == "job2"
assert run_config.run.operations[4].dependencies == [
"experiment1",
"experiment2",
"experiment3",
]
dag_strategy = run_config.run
sorted_dag = dag_strategy.sort_topologically(dag_strategy.dag)
assert sorted_dag[0] == ["job1"]
assert set(sorted_dag[1]) == {"experiment1", "experiment2", "experiment3"}
assert sorted_dag[2] == ["job2"]
assert run_config.run.concurrency == 3
assert run_config.schedule is None
def test_dag_pipeline_with_builds(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath(
"tests/fixtures/pipelines/simple_dag_pipeline_with_builds.yml"
),
{"kind": "compiled_operation"},
]
)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert run_config.build.hub_ref == "kaniko"
assert run_config.build.connection == "docker-connection"
assert len(run_config.build.params) == 1
assert len(run_config.build.run_patch["init"]) == 2
assert len(run_config.run.components) == 2
assert run_config.run.components[0].build.hub_ref == "kaniko"
assert run_config.run.components[0].build.connection == "docker-connection2"
assert len(run_config.run.components[0].build.params) == 1
assert len(run_config.run.components[0].build.run_patch["init"]) == 1
assert len(run_config.run.operations) == 5
assert run_config.run.operations[0].name == "job1"
assert run_config.run.operations[1].name == "experiment1"
assert run_config.run.operations[1].dependencies == ["job1"]
assert run_config.run.operations[2].name == "experiment2"
assert run_config.run.operations[2].dependencies == ["job1"]
assert run_config.run.operations[3].name == "experiment3"
assert run_config.run.operations[3].dependencies == ["job1"]
assert run_config.run.operations[4].name == "job2"
assert run_config.run.operations[4].dependencies == [
"experiment1",
"experiment2",
"experiment3",
]
dag_strategy = run_config.run
sorted_dag = dag_strategy.sort_topologically(dag_strategy.dag)
assert sorted_dag[0] == ["job1"]
assert set(sorted_dag[1]) == {"experiment1", "experiment2", "experiment3"}
assert sorted_dag[2] == ["job2"]
assert run_config.run.concurrency == 3
assert run_config.schedule is None
def test_build_run_pipeline(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath("tests/fixtures/pipelines/build_run_pipeline.yml"),
{"kind": "compiled_operation"},
]
)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert len(run_config.run.operations) == 2
assert run_config.run.operations[0].name == "build"
assert run_config.run.operations[1].name == "run"
assert run_config.is_dag_run is True
assert run_config.has_pipeline is True
assert run_config.schedule is None
assert len(run_config.run.components) == 2
assert run_config.run.components[0].name == "experiment-template"
assert run_config.run.components[0].termination.to_dict() == {"maxRetries": 2}
assert run_config.run.components[0].run.to_dict() == {
"kind": V1RunKind.JOB,
"environment": {
"nodeSelector": {"polyaxon": "experiments"},
"serviceAccountName": "service",
"imagePullSecrets": ["secret1", "secret2"],
},
"container": {
"image": "{{ image }}",
"command": ["python3", "main.py"],
"args": "--lr={{ lr }}",
"name": "polyaxon-main",
"resources": {"requests": {"cpu": 1}},
},
}
assert run_config.run.components[1].name == "build-template"
assert run_config.run.components[1].run.container.image == "base"
assert run_config.run.operations[0].name == "build"
# Create a an op spec
run_config.run.set_op_component("run")
assert run_config.run.operations[1].has_component_reference is True
job_config = run_config.run.get_op_spec_by_index(1)
assert {p: job_config.params[p].to_dict() for p in job_config.params} == {
"image": {"value": "outputs.docker-image", "ref": "ops.build"},
"lr": {"value": 0.001},
}
run_config = OperationSpecification.compile_operation(job_config)
run_config.apply_params({"image": {"value": "foo"}, "lr": {"value": 0.001}})
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
run_config = CompiledOperationSpecification.apply_runtime_contexts(run_config)
assert run_config.termination.to_dict() == {"maxRetries": 2}
assert run_config.run.to_dict() == {
"kind": V1RunKind.JOB,
"environment": {
"nodeSelector": {"polyaxon": "experiments"},
"serviceAccountName": "service",
"imagePullSecrets": ["secret1", "secret2"],
},
"container": {
"image": "foo",
"command": ["python3", "main.py"],
"args": "--lr=0.001",
"name": "polyaxon-main",
"resources": {"requests": {"cpu": 1}},
},
}
def test_matrix_early_stopping_file_passes(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath(
"tests/fixtures/pipelines/matrix_file_early_stopping.yml"
),
{"kind": "compiled_operation"},
]
)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert run_config.run is not None
assert run_config.is_dag_run is True
assert run_config.has_pipeline is True
assert run_config.schedule is None
assert run_config.run.concurrency == 4
assert isinstance(run_config.run, V1Dag)
assert run_config.run.early_stopping[0].kind == "failure_early_stopping"
assert isinstance(run_config.run.early_stopping[0], V1FailureEarlyStopping)
assert len(run_config.run.early_stopping) == 1
assert run_config.run.kind == V1Dag.IDENTIFIER
assert len(run_config.run.operations) == 2
assert len(run_config.run.components) == 1
template_random = run_config.run.operations[1].matrix
assert isinstance(template_random, V1RandomSearch)
assert isinstance(template_random.params["lr"], V1HpLinSpace)
assert isinstance(template_random.params["loss"], V1HpChoice)
assert template_random.params["lr"].to_dict() == {
"kind": "linspace",
"value": {"start": 0.01, "stop": 0.1, "num": 5},
}
assert template_random.params["loss"].to_dict() == {
"kind": "choice",
"value": ["MeanSquaredError", "AbsoluteDifference"],
}
assert template_random.concurrency == 2
assert template_random.num_runs == 300
assert template_random.early_stopping[0].kind == "metric_early_stopping"
assert len(template_random.early_stopping) == 1
assert isinstance(template_random.early_stopping[0], V1MetricEarlyStopping)
def test_matrix_file_passes(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath("tests/fixtures/pipelines/matrix_file.yml"),
{"kind": "compiled_operation"},
]
)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert run_config.version == 1.1
assert run_config.is_dag_run is True
assert run_config.has_pipeline is True
assert run_config.schedule is None
assert run_config.run.concurrency == 4
assert isinstance(run_config.run, V1Dag)
assert run_config.run.early_stopping is None
assert run_config.run.kind == V1Dag.IDENTIFIER
assert len(run_config.run.operations) == 2
assert len(run_config.run.components) == 1
template_hyperband = run_config.run.operations[1].matrix
assert isinstance(template_hyperband.params["lr"], V1HpLinSpace)
assert isinstance(template_hyperband.params["loss"], V1HpChoice)
assert template_hyperband.params["lr"].to_dict() == {
"kind": "linspace",
"value": {"start": 0.01, "stop": 0.1, "num": 5},
}
assert template_hyperband.params["loss"].to_dict() == {
"kind": "choice",
"value": ["MeanSquaredError", "AbsoluteDifference"],
}
assert template_hyperband.params["normal_rate"].to_dict() == {
"kind": "normal",
"value": {"loc": 0, "scale": 0.9},
}
assert template_hyperband.params["dropout"].to_dict() == {
"kind": "qloguniform",
"value": {"high": 0.8, "low": 0, "q": 0.1},
}
assert template_hyperband.params["activation"].to_dict() == {
"kind": "pchoice",
"value": [["relu", 0.1], ["sigmoid", 0.8]],
}
assert template_hyperband.params["model"].to_dict() == {
"kind": "choice",
"value": ["CDNA", "DNA", "STP"],
}
assert template_hyperband.concurrency == 2
assert isinstance(template_hyperband, V1Hyperband)
def test_matrix_file_passes_int_float_types(self):
run_config = V1CompiledOperation.read(
[
os.path.abspath(
"tests/fixtures/pipelines/matrix_file_with_int_float_types.yml"
),
{"kind": "compiled_operation"},
]
)
run_config = CompiledOperationSpecification.apply_operation_contexts(run_config)
assert run_config.version == 1.1
assert run_config.is_dag_run is True
assert run_config.has_pipeline is True
assert run_config.schedule is None
assert run_config.run.concurrency == 4
assert isinstance(run_config.run, V1Dag)
assert run_config.run.early_stopping is None
assert run_config.run.kind == V1Dag.IDENTIFIER
assert len(run_config.run.operations) == 2
assert len(run_config.run.components) == 1
template_grid = run_config.run.operations[1].matrix
assert isinstance(template_grid, V1GridSearch)
assert isinstance(template_grid.params["param1"], V1HpChoice)
assert isinstance(template_grid.params["param2"], V1HpChoice)
assert template_grid.params["param1"].to_dict() == {
"kind": "choice",
"value": [1, 2],
}
assert template_grid.params["param2"].to_dict() == {
"kind": "choice",
"value": [3.3, 4.4],
}
assert template_grid.concurrency == 2
assert template_grid.early_stopping is None
|
|
import glob
import json
import os
import re
from datetime import datetime
from ConfigParser import ConfigParser
from pyramid.exceptions import NotFound
from git import Repo
from git.exc import (
InvalidGitRepositoryError, NoSuchPathError, GitCommandError, BadName)
import avro.schema
from elasticutils import get_es as get_es_object
from elasticgit.commands.avro import deserialize
from elasticgit.storage import StorageManager
class UCConfigParser(ConfigParser):
"""
A config parser that understands lists and dictionaries.
"""
def get_list(self, section, option):
"""
This allows for loading of Pyramid list style configuration
options:
[foo]
bar =
baz
qux
zap
``get_list('foo', 'bar')`` returns ``['baz', 'qux', 'zap']``
:param str section:
The section to read.
:param str option:
The option to read from the section.
:returns: list
"""
value = self.get(section, option)
return list(filter(None, (x.strip() for x in value.splitlines())))
def get_dict(self, section, option):
"""
This allows for loading of Pyramid dictionary style configuration
options:
[foo]
bar =
baz=qux
zap=paz
``get_dict('foo', 'bar')`` returns ``{'baz': 'qux', 'zap': 'paz'}``
:param str section:
The section to read.
:param str option:
The option to read from the section.
:returns: dict
"""
return dict(re.split('\s*=\s*', value)
for value in self.get_list(section, option))
def get_repositories(path):
"""
Return an array of tuples with the name and path for
repositories found in a directory.
:param str path:
The path to find repositories in
:returns: tuple
"""
return [get_repository(os.path.join(path, subdir))
for subdir in os.listdir(path)
if os.path.isdir(
os.path.join(path, subdir, '.git'))]
def get_repository_names(path):
"""
Return an array of the path name for
repositories found in a directory.
:param str path:
The path to find repositories in
:returns: array
"""
return [subdir
for subdir in os.listdir(path)
if os.path.isdir(os.path.join(path, subdir, '.git'))]
def get_repository(path):
"""
Return a repository for whatever's at a path
:param str path:
The path to the repository
:returns: Repo
"""
try:
return Repo(path)
except (NoSuchPathError, InvalidGitRepositoryError):
raise NotFound('Repository not found.')
def get_index_prefix(path):
"""
Return the Elasticsearch index prefix for the repo at path.
:param str repo_path:
The path to the repositoy
:returns: string
"""
return os.path.basename(path).lower()
def list_schemas(repo):
"""
Return a list of parsed avro schemas as dictionaries.
:param Repo repo:
The git repository.
:returns: dict
"""
schema_files = glob.glob(
os.path.join(repo.working_dir, '_schemas', '*.avsc'))
schemas = {}
for schema_file in schema_files:
with open(schema_file, 'r') as fp:
schema = json.load(fp)
schemas['%(namespace)s.%(name)s' % schema] = schema
return schemas
def list_content_types(repo):
"""
Return a list of content types in a repository.
:param Repo repo:
The git repository.
:returns: list
"""
schema_files = glob.glob(
os.path.join(repo.working_dir, '_schemas', '*.avsc'))
return [os.path.splitext(os.path.basename(schema_file))[0]
for schema_file in schema_files]
def get_schema(repo, content_type):
"""
Return a schema for a content type in a repository.
:param Repo repo:
The git repository.
:returns: dict
"""
try:
with open(
os.path.join(repo.working_dir,
'_schemas',
'%s.avsc' % (content_type,)), 'r') as fp:
data = fp.read()
return avro.schema.parse(data)
except IOError: # pragma: no cover
raise NotFound('Schema does not exist.')
def get_mapping(repo, content_type):
"""
Return an ES mapping for a content type in a repository.
:param Repo repo:
This git repository.
:returns: dict
"""
try:
with open(
os.path.join(repo.working_dir,
'_mappings',
'%s.json' % (content_type,)), 'r') as fp:
return json.load(fp)
except IOError:
raise NotFound('Mapping does not exist.')
def format_repo(repo):
"""
Return a dictionary representing the repository
It returns ``None`` for things we do not support or are not
relevant.
:param str repo_name:
The name of the repository.
:param git.Repo repo:
The repository object.
:param str base_url:
The base URL for the repository's links.
:returns: dict
"""
commit = repo.commit()
return {
'name': os.path.basename(repo.working_dir),
'branch': repo.active_branch.name,
'commit': commit.hexsha,
'timestamp': datetime.fromtimestamp(
commit.committed_date).isoformat(),
'author': '%s <%s>' % (commit.author.name, commit.author.email),
'schemas': list_schemas(repo)
}
def format_diff_A(diff):
return {
'type': 'A',
'path': diff.b_blob.path,
}
def format_diff_D(diff):
return {
'type': 'D',
'path': diff.a_blob.path,
}
def format_diff_R(diff):
return {
'type': 'R',
'rename_from': diff.rename_from,
'rename_to': diff.rename_to,
}
def format_diff_M(diff):
return {
'type': 'M',
'path': diff.a_blob.path,
}
def format_diffindex(diff_index):
"""
Return a JSON formattable representation of a DiffIndex.
Returns a generator that returns dictionaries representing the changes.
.. code::
[
{
'type': 'A',
'path': 'path/to/added/file.txt',
},
{
'type': 'D',
'path': 'path/to/deleted/file.txt',
},
{
'type': 'M',
'path': 'path/to/modified/file.txt',
},
{
'type': 'R',
'rename_from': 'original/path/to/file.txt',
'rename_to': 'new/path/to/file.txt',
},
]
:returns: generator
"""
for diff in diff_index:
if diff.new_file:
yield format_diff_A(diff)
elif diff.deleted_file:
yield format_diff_D(diff)
elif diff.renamed:
yield format_diff_R(diff)
elif diff.a_blob and diff.b_blob and diff.a_blob != diff.b_blob:
yield format_diff_M(diff)
def format_content_type(repo, content_type):
"""
Return a list of all content objects for a given content type
in a repository.
:param Repo repo:
The git repository.
:param str content_type:
The content type to list
:returns: list
"""
storage_manager = StorageManager(repo)
model_class = load_model_class(repo, content_type)
return [dict(model_obj)
for model_obj in storage_manager.iterate(model_class)]
def format_content_type_object(repo, content_type, uuid):
"""
Return a content object from a repository for a given content_type
and uuid
:param Repo repo:
The git repository.
:param str content_type:
The content type to list
:returns: dict
"""
try:
storage_manager = StorageManager(repo)
model_class = load_model_class(repo, content_type)
return dict(storage_manager.get(model_class, uuid))
except GitCommandError:
raise NotFound('Object does not exist.')
def format_repo_status(repo):
"""
Return a dictionary representing the repository status
It returns ``None`` for things we do not support or are not
relevant.
:param str repo_name:
The name of the repository.
:returns: dict
"""
commit = repo.commit()
return {
'name': os.path.basename(repo.working_dir),
'commit': commit.hexsha,
'timestamp': datetime.fromtimestamp(
commit.committed_date).isoformat(),
}
def save_content_type_object(repo, schema, uuid, data):
"""
Save an object as a certain content type
"""
storage_manager = StorageManager(repo)
model_class = deserialize(schema,
module_name=schema['namespace'])
model = model_class(data)
commit = storage_manager.store(model, 'Updated via PUT request.')
return commit, model
def delete_content_type_object(repo, content_type, uuid):
"""
Delete an object of a certain content type
"""
storage_manager = StorageManager(repo)
model_class = load_model_class(repo, content_type)
model = storage_manager.get(model_class, uuid)
commit = storage_manager.delete(model, 'Deleted via DELETE request.')
return commit, model
def get_config(request): # pragma: no cover
"""
Get the configuration for a request.
:param Request request:
The HTTP request
"""
return request.registry.settings
def get_es_settings(config):
"""
Return the Elasticsearch settings based on the config or ENV.
:param dict config:
The app configuration
:returns: dict
"""
es_host = os.environ.get('ES_HOST')
return {
'urls': [es_host or config.get('es.host', 'http://localhost:9200')]
}
def get_es(config):
"""
Return the :py:class:`elasticsearch.Elasticsearch` object based
on the config.
:param dict config:
The app configuration
:returns: Elasticsearch
"""
return get_es_object(**get_es_settings(config))
def load_model_class(repo, content_type):
"""
Return a model class for a content type in a repository.
:param Repo repo:
The git repository.
:param str content_type:
The content type to list
:returns: class
"""
schema = get_schema(repo, content_type).to_json()
return deserialize(schema, module_name=schema['namespace'])
def add_model_item_to_pull_dict(storage_manager, path, pull_dict):
if path.endswith(".json"):
model = storage_manager.load(path)
pull_dict[model.__module__ + "." +
model.__class__.__name__].append(dict(model))
return True
return False
def get_repository_diff(repo, commit_id):
try:
old_commit = repo.commit(commit_id)
diff = old_commit.diff(repo.head)
return {
"name": os.path.basename(repo.working_dir),
"previous-index": commit_id,
"current-index": repo.commit().hexsha,
"diff": list(format_diffindex(diff))
}
except (GitCommandError, BadName):
raise NotFound("The git index does not exist")
def pull_repository_files(repo, commit_id):
changed_files = {}
for name in list_content_types(repo):
changed_files[name] = []
try:
old_commit = repo.commit(commit_id)
diff = old_commit.diff(repo.head)
sm = StorageManager(repo)
for diff_added in diff.iter_change_type('A'):
add_model_item_to_pull_dict(
sm, diff_added.b_blob.path, changed_files)
for diff_modified in diff.iter_change_type('M'):
add_model_item_to_pull_dict(
sm, diff_modified.b_blob.path, changed_files)
json_diff = []
for diff_added in diff.iter_change_type('R'):
json_diff.append(format_diff_R(diff_added))
for diff_removed in diff.iter_change_type('D'):
json_diff.append(format_diff_D(diff_removed))
changed_files["other"] = json_diff
changed_files["commit"] = repo.head.commit.hexsha
return changed_files
except (GitCommandError, BadName):
raise NotFound("The git index does not exist")
def clone_repository(repo):
files = {}
for name in list_content_types(repo):
files[name] = format_content_type(repo, name)
files['commit'] = repo.head.commit.hexsha
return files
|
|
"""Provide a way to connect entities belonging to one device."""
import logging
import uuid
from typing import List, Optional
from collections import OrderedDict
import attr
from homeassistant.core import callback
from homeassistant.loader import bind_hass
_LOGGER = logging.getLogger(__name__)
_UNDEF = object()
DATA_REGISTRY = 'device_registry'
STORAGE_KEY = 'core.device_registry'
STORAGE_VERSION = 1
SAVE_DELAY = 10
CONNECTION_NETWORK_MAC = 'mac'
CONNECTION_UPNP = 'upnp'
CONNECTION_ZIGBEE = 'zigbee'
@attr.s(slots=True, frozen=True)
class DeviceEntry:
"""Device Registry Entry."""
config_entries = attr.ib(type=set, converter=set,
default=attr.Factory(set))
connections = attr.ib(type=set, converter=set, default=attr.Factory(set))
identifiers = attr.ib(type=set, converter=set, default=attr.Factory(set))
manufacturer = attr.ib(type=str, default=None)
model = attr.ib(type=str, default=None)
name = attr.ib(type=str, default=None)
sw_version = attr.ib(type=str, default=None)
hub_device_id = attr.ib(type=str, default=None)
area_id = attr.ib(type=str, default=None)
name_by_user = attr.ib(type=str, default=None)
id = attr.ib(type=str, default=attr.Factory(lambda: uuid.uuid4().hex))
def format_mac(mac):
"""Format the mac address string for entry into dev reg."""
to_test = mac
if len(to_test) == 17 and to_test.count(':') == 5:
return to_test.lower()
if len(to_test) == 17 and to_test.count('-') == 5:
to_test = to_test.replace('-', '')
elif len(to_test) == 14 and to_test.count('.') == 2:
to_test = to_test.replace('.', '')
if len(to_test) == 12:
# no : included
return ':'.join(to_test.lower()[i:i + 2] for i in range(0, 12, 2))
# Not sure how formatted, return original
return mac
class DeviceRegistry:
"""Class to hold a registry of devices."""
def __init__(self, hass):
"""Initialize the device registry."""
self.hass = hass
self.devices = None
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
@callback
def async_get(self, device_id: str) -> Optional[DeviceEntry]:
"""Get device."""
return self.devices.get(device_id)
@callback
def async_get_device(self, identifiers: set, connections: set):
"""Check if device is registered."""
for device in self.devices.values():
if any(iden in device.identifiers for iden in identifiers) or \
any(conn in device.connections for conn in connections):
return device
return None
@callback
def async_get_or_create(self, *, config_entry_id, connections=None,
identifiers=None, manufacturer=_UNDEF,
model=_UNDEF, name=_UNDEF, sw_version=_UNDEF,
via_hub=None):
"""Get device. Create if it doesn't exist."""
if not identifiers and not connections:
return None
if identifiers is None:
identifiers = set()
if connections is None:
connections = set()
connections = {
(key, format_mac(value)) if key == CONNECTION_NETWORK_MAC
else (key, value)
for key, value in connections
}
device = self.async_get_device(identifiers, connections)
if device is None:
device = DeviceEntry()
self.devices[device.id] = device
if via_hub is not None:
hub_device = self.async_get_device({via_hub}, set())
hub_device_id = hub_device.id if hub_device else _UNDEF
else:
hub_device_id = _UNDEF
return self._async_update_device(
device.id,
add_config_entry_id=config_entry_id,
hub_device_id=hub_device_id,
merge_connections=connections or _UNDEF,
merge_identifiers=identifiers or _UNDEF,
manufacturer=manufacturer,
model=model,
name=name,
sw_version=sw_version
)
@callback
def async_update_device(
self, device_id, *, area_id=_UNDEF, name_by_user=_UNDEF):
"""Update properties of a device."""
return self._async_update_device(
device_id, area_id=area_id, name_by_user=name_by_user)
@callback
def _async_update_device(self, device_id, *, add_config_entry_id=_UNDEF,
remove_config_entry_id=_UNDEF,
merge_connections=_UNDEF,
merge_identifiers=_UNDEF,
manufacturer=_UNDEF,
model=_UNDEF,
name=_UNDEF,
sw_version=_UNDEF,
hub_device_id=_UNDEF,
area_id=_UNDEF,
name_by_user=_UNDEF):
"""Update device attributes."""
old = self.devices[device_id]
changes = {}
config_entries = old.config_entries
if (add_config_entry_id is not _UNDEF and
add_config_entry_id not in old.config_entries):
config_entries = old.config_entries | {add_config_entry_id}
if (remove_config_entry_id is not _UNDEF and
remove_config_entry_id in config_entries):
config_entries = config_entries - {remove_config_entry_id}
if config_entries is not old.config_entries:
changes['config_entries'] = config_entries
for attr_name, value in (
('connections', merge_connections),
('identifiers', merge_identifiers),
):
old_value = getattr(old, attr_name)
# If not undefined, check if `value` contains new items.
if value is not _UNDEF and not value.issubset(old_value):
changes[attr_name] = old_value | value
for attr_name, value in (
('manufacturer', manufacturer),
('model', model),
('name', name),
('sw_version', sw_version),
('hub_device_id', hub_device_id),
):
if value is not _UNDEF and value != getattr(old, attr_name):
changes[attr_name] = value
if (area_id is not _UNDEF and area_id != old.area_id):
changes['area_id'] = area_id
if (name_by_user is not _UNDEF and
name_by_user != old.name_by_user):
changes['name_by_user'] = name_by_user
if not changes:
return old
new = self.devices[device_id] = attr.evolve(old, **changes)
self.async_schedule_save()
return new
async def async_load(self):
"""Load the device registry."""
data = await self._store.async_load()
devices = OrderedDict()
if data is not None:
for device in data['devices']:
devices[device['id']] = DeviceEntry(
config_entries=set(device['config_entries']),
connections={tuple(conn) for conn
in device['connections']},
identifiers={tuple(iden) for iden
in device['identifiers']},
manufacturer=device['manufacturer'],
model=device['model'],
name=device['name'],
sw_version=device['sw_version'],
id=device['id'],
# Introduced in 0.79
hub_device_id=device.get('hub_device_id'),
# Introduced in 0.87
area_id=device.get('area_id'),
name_by_user=device.get('name_by_user')
)
self.devices = devices
@callback
def async_schedule_save(self):
"""Schedule saving the device registry."""
self._store.async_delay_save(self._data_to_save, SAVE_DELAY)
@callback
def _data_to_save(self):
"""Return data of device registry to store in a file."""
data = {}
data['devices'] = [
{
'config_entries': list(entry.config_entries),
'connections': list(entry.connections),
'identifiers': list(entry.identifiers),
'manufacturer': entry.manufacturer,
'model': entry.model,
'name': entry.name,
'sw_version': entry.sw_version,
'id': entry.id,
'hub_device_id': entry.hub_device_id,
'area_id': entry.area_id,
'name_by_user': entry.name_by_user
} for entry in self.devices.values()
]
return data
@callback
def async_clear_config_entry(self, config_entry_id):
"""Clear config entry from registry entries."""
for dev_id, device in self.devices.items():
if config_entry_id in device.config_entries:
self._async_update_device(
dev_id, remove_config_entry_id=config_entry_id)
@callback
def async_clear_area_id(self, area_id: str) -> None:
"""Clear area id from registry entries."""
for dev_id, device in self.devices.items():
if area_id == device.area_id:
self._async_update_device(dev_id, area_id=None)
@bind_hass
async def async_get_registry(hass) -> DeviceRegistry:
"""Return device registry instance."""
task = hass.data.get(DATA_REGISTRY)
if task is None:
async def _load_reg():
registry = DeviceRegistry(hass)
await registry.async_load()
return registry
task = hass.data[DATA_REGISTRY] = hass.async_create_task(_load_reg())
return await task
@callback
def async_entries_for_area(registry: DeviceRegistry, area_id: str) \
-> List[DeviceEntry]:
"""Return entries that match an area."""
return [device for device in registry.devices.values()
if device.area_id == area_id]
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the experimental input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import os
import zlib
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.data.ops import readers
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.lib.io import python_io
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.platform import test
from tensorflow.python.util import compat
try:
import psutil # pylint: disable=g-import-not-at-top
psutil_import_succeeded = True
except ImportError:
psutil_import_succeeded = False
class TextLineDatasetTest(test_base.DatasetTestBase):
def _lineText(self, f, l):
return compat.as_bytes("%d: %d" % (f, l))
def _createFiles(self,
num_files,
num_lines,
crlf=False,
compression_type=None):
filenames = []
for i in range(num_files):
fn = os.path.join(self.get_temp_dir(), "text_line.%d.txt" % i)
filenames.append(fn)
contents = []
for j in range(num_lines):
contents.append(self._lineText(i, j))
# Always include a newline after the record unless it is
# at the end of the file, in which case we include it
if j + 1 != num_lines or i == 0:
contents.append(b"\r\n" if crlf else b"\n")
contents = b"".join(contents)
if not compression_type:
with open(fn, "wb") as f:
f.write(contents)
elif compression_type == "GZIP":
with gzip.GzipFile(fn, "wb") as f:
f.write(contents)
elif compression_type == "ZLIB":
contents = zlib.compress(contents)
with open(fn, "wb") as f:
f.write(contents)
else:
raise ValueError("Unsupported compression_type", compression_type)
return filenames
def _testTextLineDataset(self, compression_type=None):
test_filenames = self._createFiles(
2, 5, crlf=True, compression_type=compression_type)
filenames = array_ops.placeholder(dtypes.string, shape=[None])
num_epochs = array_ops.placeholder(dtypes.int64, shape=[])
batch_size = array_ops.placeholder(dtypes.int64, shape=[])
repeat_dataset = readers.TextLineDataset(
filenames, compression_type=compression_type).repeat(num_epochs)
batch_dataset = repeat_dataset.batch(batch_size)
iterator = iterator_ops.Iterator.from_structure(batch_dataset.output_types)
init_op = iterator.make_initializer(repeat_dataset)
init_batch_op = iterator.make_initializer(batch_dataset)
get_next = iterator.get_next()
with self.cached_session() as sess:
# Basic test: read from file 0.
sess.run(
init_op, feed_dict={filenames: [test_filenames[0]],
num_epochs: 1})
for i in range(5):
self.assertEqual(self._lineText(0, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Basic test: read from file 1.
sess.run(
init_op, feed_dict={filenames: [test_filenames[1]],
num_epochs: 1})
for i in range(5):
self.assertEqual(self._lineText(1, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Basic test: read from both files.
sess.run(init_op, feed_dict={filenames: test_filenames, num_epochs: 1})
for j in range(2):
for i in range(5):
self.assertEqual(self._lineText(j, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test repeated iteration through both files.
sess.run(init_op, feed_dict={filenames: test_filenames, num_epochs: 10})
for _ in range(10):
for j in range(2):
for i in range(5):
self.assertEqual(self._lineText(j, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test batched and repeated iteration through both files.
sess.run(
init_batch_op,
feed_dict={filenames: test_filenames,
num_epochs: 10,
batch_size: 5})
for _ in range(10):
self.assertAllEqual([self._lineText(0, i) for i in range(5)],
sess.run(get_next))
self.assertAllEqual([self._lineText(1, i) for i in range(5)],
sess.run(get_next))
def testTextLineDatasetNoCompression(self):
self._testTextLineDataset()
def testTextLineDatasetGzipCompression(self):
self._testTextLineDataset(compression_type="GZIP")
def testTextLineDatasetZlibCompression(self):
self._testTextLineDataset(compression_type="ZLIB")
def testTextLineDatasetBuffering(self):
test_filenames = self._createFiles(2, 5, crlf=True)
repeat_dataset = readers.TextLineDataset(test_filenames, buffer_size=10)
iterator = repeat_dataset.make_one_shot_iterator()
with self.cached_session() as sess:
for j in range(2):
for i in range(5):
self.assertEqual(self._lineText(j, i), sess.run(iterator.get_next()))
with self.assertRaises(errors.OutOfRangeError):
sess.run(iterator.get_next())
def testIteratorResourceCleanup(self):
filename = os.path.join(self.get_temp_dir(), "text.txt")
with open(filename, "wt") as f:
for i in range(3):
f.write("%d\n" % (i,))
with context.eager_mode():
first_iterator = iter(readers.TextLineDataset(filename))
self.assertEqual(b"0", next(first_iterator).numpy())
second_iterator = iter(readers.TextLineDataset(filename))
self.assertEqual(b"0", next(second_iterator).numpy())
# Eager kernel caching is based on op attributes, which includes the
# Dataset's output shape. Create a different kernel to test that they
# don't create resources with the same names.
different_kernel_iterator = iter(
readers.TextLineDataset(filename).repeat().batch(16))
self.assertEqual([16], next(different_kernel_iterator).shape)
# Remove our references to the Python Iterator objects, which (assuming no
# reference cycles) is enough to trigger DestroyResourceOp and close the
# partially-read files.
del first_iterator
del second_iterator
del different_kernel_iterator
if not psutil_import_succeeded:
self.skipTest(
"psutil is required to check that we've closed our files.")
open_files = psutil.Process().open_files()
self.assertNotIn(filename, [open_file.path for open_file in open_files])
class FixedLengthRecordReaderTest(test_base.DatasetTestBase):
def setUp(self):
super(FixedLengthRecordReaderTest, self).setUp()
self._num_files = 2
self._num_records = 7
self._header_bytes = 5
self._record_bytes = 3
self._footer_bytes = 2
def _record(self, f, r):
return compat.as_bytes(str(f * 2 + r) * self._record_bytes)
def _createFiles(self, compression_type=None):
filenames = []
for i in range(self._num_files):
fn = os.path.join(self.get_temp_dir(), "fixed_length_record.%d.txt" % i)
filenames.append(fn)
contents = []
contents.append(b"H" * self._header_bytes)
for j in range(self._num_records):
contents.append(self._record(i, j))
contents.append(b"F" * self._footer_bytes)
contents = b"".join(contents)
if not compression_type:
with open(fn, "wb") as f:
f.write(contents)
elif compression_type == "GZIP":
with gzip.GzipFile(fn, "wb") as f:
f.write(contents)
elif compression_type == "ZLIB":
contents = zlib.compress(contents)
with open(fn, "wb") as f:
f.write(contents)
else:
raise ValueError("Unsupported compression_type", compression_type)
return filenames
def _testFixedLengthRecordDataset(self, compression_type=None):
test_filenames = self._createFiles(compression_type=compression_type)
filenames = array_ops.placeholder(dtypes.string, shape=[None])
num_epochs = array_ops.placeholder(dtypes.int64, shape=[])
batch_size = array_ops.placeholder(dtypes.int64, shape=[])
repeat_dataset = (
readers.FixedLengthRecordDataset(
filenames,
self._record_bytes,
self._header_bytes,
self._footer_bytes,
compression_type=compression_type).repeat(num_epochs))
batch_dataset = repeat_dataset.batch(batch_size)
iterator = iterator_ops.Iterator.from_structure(batch_dataset.output_types)
init_op = iterator.make_initializer(repeat_dataset)
init_batch_op = iterator.make_initializer(batch_dataset)
get_next = iterator.get_next()
with self.cached_session() as sess:
# Basic test: read from file 0.
sess.run(
init_op, feed_dict={filenames: [test_filenames[0]],
num_epochs: 1})
for i in range(self._num_records):
self.assertEqual(self._record(0, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Basic test: read from file 1.
sess.run(
init_op, feed_dict={filenames: [test_filenames[1]],
num_epochs: 1})
for i in range(self._num_records):
self.assertEqual(self._record(1, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Basic test: read from both files.
sess.run(init_op, feed_dict={filenames: test_filenames, num_epochs: 1})
for j in range(self._num_files):
for i in range(self._num_records):
self.assertEqual(self._record(j, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test repeated iteration through both files.
sess.run(init_op, feed_dict={filenames: test_filenames, num_epochs: 10})
for _ in range(10):
for j in range(self._num_files):
for i in range(self._num_records):
self.assertEqual(self._record(j, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test batched and repeated iteration through both files.
sess.run(
init_batch_op,
feed_dict={
filenames: test_filenames,
num_epochs: 10,
batch_size: self._num_records
})
for _ in range(10):
for j in range(self._num_files):
self.assertAllEqual(
[self._record(j, i) for i in range(self._num_records)],
sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFixedLengthRecordDatasetNoCompression(self):
self._testFixedLengthRecordDataset()
def testFixedLengthRecordDatasetGzipCompression(self):
self._testFixedLengthRecordDataset(compression_type="GZIP")
def testFixedLengthRecordDatasetZlibCompression(self):
self._testFixedLengthRecordDataset(compression_type="ZLIB")
def testFixedLengthRecordDatasetBuffering(self):
test_filenames = self._createFiles()
dataset = readers.FixedLengthRecordDataset(
test_filenames,
self._record_bytes,
self._header_bytes,
self._footer_bytes,
buffer_size=10)
iterator = dataset.make_one_shot_iterator()
with self.cached_session() as sess:
for j in range(self._num_files):
for i in range(self._num_records):
self.assertEqual(self._record(j, i), sess.run(iterator.get_next()))
with self.assertRaises(errors.OutOfRangeError):
sess.run(iterator.get_next())
def testFixedLengthRecordDatasetWrongSize(self):
test_filenames = self._createFiles()
dataset = readers.FixedLengthRecordDataset(
test_filenames,
self._record_bytes + 1, # Incorrect record length.
self._header_bytes,
self._footer_bytes,
buffer_size=10)
iterator = dataset.make_one_shot_iterator()
with self.cached_session() as sess:
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Excluding the header \(5 bytes\) and footer \(2 bytes\), input "
r"file \".*fixed_length_record.0.txt\" has body length 21 bytes, "
r"which is not an exact multiple of the record length \(4 bytes\)."):
sess.run(iterator.get_next())
def _iterator_checkpoint_path(self):
return os.path.join(self.get_temp_dir(), "iterator")
def _save_op(self, iterator_resource):
iterator_state_variant = gen_dataset_ops.serialize_iterator(
iterator_resource)
save_op = io_ops.write_file(
self._iterator_checkpoint_path(),
parsing_ops.serialize_tensor(iterator_state_variant))
return save_op
def _restore_op(self, iterator_resource):
iterator_state_variant = parsing_ops.parse_tensor(
io_ops.read_file(self._iterator_checkpoint_path()), dtypes.variant)
restore_op = gen_dataset_ops.deserialize_iterator(iterator_resource,
iterator_state_variant)
return restore_op
def _build_iterator_graph(self, num_epochs):
filenames = self._createFiles()
dataset = (readers.FixedLengthRecordDataset(
filenames, self._record_bytes, self._header_bytes, self._footer_bytes)
.repeat(num_epochs))
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next_op = iterator.get_next()
save_op = self._save_op(iterator._iterator_resource)
restore_op = self._restore_op(iterator._iterator_resource)
return init_op, get_next_op, save_op, restore_op
def _restore_iterator(self):
output_types = dtypes.string
output_shapes = tensor_shape.scalar()
iterator = iterator_ops.Iterator.from_structure(output_types, output_shapes)
get_next = iterator.get_next()
restore_op = self._restore_op(iterator._iterator_resource)
return restore_op, get_next
def testSaveRestore(self):
num_epochs = 10
epoch_break = 5
file_break = self._num_files // 2
record_break = self._num_records // 2
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch == epoch_break and f == file_break and
r == record_break):
sess.run(save_op)
break
self.assertEqual(self._record(f, r), sess.run(get_next_op))
else:
continue
break
else:
continue
break
else:
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch < epoch_break or
(epoch == epoch_break and f < file_break) or
(epoch == epoch_break and f == file_break and
r < record_break)):
continue
self.assertEqual(self._record(f, r), sess.run(get_next_op))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
def testInitThenRestore(self):
# Note: Calling init_op before restore_op is redundant. This test just makes
# sure we do not fail if restore is called on an already initialized
# iterator resource.
num_epochs = 10
epoch_break = 5
file_break = self._num_files // 2
record_break = self._num_records // 2
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch == epoch_break and f == file_break and
r == record_break):
sess.run(save_op)
break
self.assertEqual(self._record(f, r), sess.run(get_next_op))
else:
continue
break
else:
continue
break
else:
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch < epoch_break or
(epoch == epoch_break and f < file_break) or
(epoch == epoch_break and f == file_break and
r < record_break)):
continue
self.assertEqual(self._record(f, r), sess.run(get_next_op))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
def testRestoreInModifiedGraph(self):
num_epochs = 10
num_epochs_1 = 20
epoch_break = 5
file_break = self._num_files // 2
record_break = self._num_records // 2
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch == epoch_break and f == file_break and
r == record_break):
sess.run(save_op)
break
self.assertEqual(self._record(f, r), sess.run(get_next_op))
else:
continue
break
else:
continue
break
else:
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs_1)
with self.session(graph=g) as sess:
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch < epoch_break or
(epoch == epoch_break and f < file_break) or
(epoch == epoch_break and f == file_break and
r < record_break)):
continue
self.assertEqual(self._record(f, r), sess.run(get_next_op))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
def testRestoreWithoutBuildingDatasetGraph(self):
num_epochs = 10
epoch_break = 5
file_break = self._num_files // 2
record_break = self._num_records // 2
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch == epoch_break and f == file_break and
r == record_break):
sess.run(save_op)
break
self.assertEqual(self._record(f, r), sess.run(get_next_op))
else:
continue
break
else:
continue
break
else:
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
with ops.Graph().as_default() as g:
restore_op, get_next_op = self._restore_iterator()
with self.session(graph=g) as sess:
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch < epoch_break or
(epoch == epoch_break and f < file_break) or
(epoch == epoch_break and f == file_break and
r < record_break)):
continue
self.assertEqual(self._record(f, r), sess.run(get_next_op))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
def testRestoreUnusedIterator(self):
num_epochs = 10
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
# Save unused iterator.
sess.run(save_op)
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(restore_op)
for _ in range(num_epochs * self._num_files * self._num_records):
sess.run(get_next_op)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
def testRestoreExhaustedIterator(self):
num_epochs = 10
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for _ in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
self.assertEqual(self._record(f, r), sess.run(get_next_op))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
sess.run(save_op)
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(restore_op)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
class TFRecordDatasetTest(test_base.DatasetTestBase):
def setUp(self):
super(TFRecordDatasetTest, self).setUp()
self._num_files = 2
self._num_records = 7
self.test_filenames = self._createFiles()
self.filenames = array_ops.placeholder(dtypes.string, shape=[None])
self.num_epochs = array_ops.placeholder_with_default(
constant_op.constant(1, dtypes.int64), shape=[])
self.compression_type = array_ops.placeholder_with_default("", shape=[])
self.batch_size = array_ops.placeholder(dtypes.int64, shape=[])
repeat_dataset = readers.TFRecordDataset(self.filenames,
self.compression_type).repeat(
self.num_epochs)
batch_dataset = repeat_dataset.batch(self.batch_size)
iterator = iterator_ops.Iterator.from_structure(batch_dataset.output_types)
self.init_op = iterator.make_initializer(repeat_dataset)
self.init_batch_op = iterator.make_initializer(batch_dataset)
self.get_next = iterator.get_next()
def _record(self, f, r):
return compat.as_bytes("Record %d of file %d" % (r, f))
def _createFiles(self):
filenames = []
for i in range(self._num_files):
fn = os.path.join(self.get_temp_dir(), "tf_record.%d.txt" % i)
filenames.append(fn)
writer = python_io.TFRecordWriter(fn)
for j in range(self._num_records):
writer.write(self._record(i, j))
writer.close()
return filenames
def testReadOneEpoch(self):
with self.cached_session() as sess:
# Basic test: read from file 0.
sess.run(
self.init_op,
feed_dict={
self.filenames: [self.test_filenames[0]],
self.num_epochs: 1
})
for i in range(self._num_records):
self.assertAllEqual(self._record(0, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
# Basic test: read from file 1.
sess.run(
self.init_op,
feed_dict={
self.filenames: [self.test_filenames[1]],
self.num_epochs: 1
})
for i in range(self._num_records):
self.assertAllEqual(self._record(1, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
# Basic test: read from both files.
sess.run(
self.init_op,
feed_dict={self.filenames: self.test_filenames,
self.num_epochs: 1})
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
def testReadTenEpochs(self):
with self.cached_session() as sess:
sess.run(
self.init_op,
feed_dict={self.filenames: self.test_filenames,
self.num_epochs: 10})
for _ in range(10):
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
def testReadTenEpochsOfBatches(self):
with self.cached_session() as sess:
sess.run(
self.init_batch_op,
feed_dict={
self.filenames: self.test_filenames,
self.num_epochs: 10,
self.batch_size: self._num_records
})
for _ in range(10):
for j in range(self._num_files):
values = sess.run(self.get_next)
self.assertAllEqual(
[self._record(j, i) for i in range(self._num_records)], values)
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
def testReadZlibFiles(self):
zlib_files = []
for i, fn in enumerate(self.test_filenames):
with open(fn, "rb") as f:
cdata = zlib.compress(f.read())
zfn = os.path.join(self.get_temp_dir(), "tfrecord_%s.z" % i)
with open(zfn, "wb") as f:
f.write(cdata)
zlib_files.append(zfn)
with self.cached_session() as sess:
sess.run(
self.init_op,
feed_dict={self.filenames: zlib_files,
self.compression_type: "ZLIB"})
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
def testReadGzipFiles(self):
gzip_files = []
for i, fn in enumerate(self.test_filenames):
with open(fn, "rb") as f:
gzfn = os.path.join(self.get_temp_dir(), "tfrecord_%s.gz" % i)
with gzip.GzipFile(gzfn, "wb") as gzf:
gzf.write(f.read())
gzip_files.append(gzfn)
with self.cached_session() as sess:
sess.run(
self.init_op,
feed_dict={self.filenames: gzip_files,
self.compression_type: "GZIP"})
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
def testReadWithBuffer(self):
one_mebibyte = 2**20
d = readers.TFRecordDataset(self.test_filenames, buffer_size=one_mebibyte)
iterator = d.make_one_shot_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testReadFromDatasetOfFiles(self):
files = dataset_ops.Dataset.from_tensor_slices(self.test_filenames)
d = readers.TFRecordDataset(files)
iterator = d.make_one_shot_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testReadTenEpochsFromDatasetOfFilesInParallel(self):
files = dataset_ops.Dataset.from_tensor_slices(
self.test_filenames).repeat(10)
d = readers.TFRecordDataset(files, num_parallel_reads=4)
iterator = d.make_one_shot_iterator()
next_element = iterator.get_next()
expected = []
actual = []
with self.cached_session() as sess:
for _ in range(10):
for j in range(self._num_files):
for i in range(self._num_records):
expected.append(self._record(j, i))
actual.append(sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
self.assertEqual(sorted(expected), sorted(actual))
if __name__ == "__main__":
test.main()
|
|
# -*- coding: utf-8 -*-
"""
.. module:: pytfa
:platform: Unix, Windows
:synopsis: Thermodynamics-based Flux Analysis
.. moduleauthor:: pyTFA team
Thermodynamic computations for reactions
"""
from functools import reduce
from math import log, sqrt
from . import std
from .utils import find_transported_mets
from .metabolite import CPD_PROTON
###################
# REACTIONS TOOLS #
###################
def calcDGtpt_rhs(reaction, compartmentsData, thermo_units):
""" Calculates the RHS of the deltaG constraint, i.e. the sum of the
non-concentration terms
:param cobra.thermo.reaction.Reaction reaction: The reaction to compute the
data for
:param dict(float) compartmentsData: Data of the compartments of the cobra_model
:param str thermo_units: The thermodynamic database of the cobra_model
:returns: deltaG_tpt and the breakdown of deltaG_tpt
:rtype: tuple(float, dict(float))
Example:
ATP Synthase reaction::
reaction = cpd00008 + 4 cpd00067 + cpd00009 <=> cpd00002 + 3 cpd00067 + cpd00001
compartments = 'c' 'e' 'c' 'c' 'c' 'c'
If there are any metabolites with unknown energies then returns
``(0, None)``.
"""
# Compute our constants in accordance with the thermoDB
if thermo_units == "kJ/mol":
GAS_CONSTANT = 8.314472 / 1000 # kJ/(K mol)
faraday_const = 96.485 # kJ/eV
else:
GAS_CONSTANT = 1.9858775 / 1000 # Kcal/(K mol)
faraday_const = 23.061 # kcal/eV
TEMPERATURE = 298.15 # K
RT = GAS_CONSTANT * TEMPERATURE
if reduce(lambda count, met: (
count + (1 if met.thermo.deltaGf_tr > 10 ** 6 else count)),
reaction.metabolites,
0) > 1:
return (0, None)
sum_deltaGFis_trans = 0
sum_stoich_NH = 0
RT_sum_H_LC_tpt = 0 # to include the differential proton concentration
# effects if protons are transported
transportedMets = find_transported_mets(reaction)
compartments = {'reactant': [], 'product': []}
for seed_id in transportedMets:
for metType in ['reactant', 'product']:
if seed_id != 'cpd00001':
met = transportedMets[seed_id][metType]
pH_comp = met.thermo.pH
ionicStr_comp = met.thermo.ionicStr
deltaGfsp = met.thermo.deltaGf_tr
compartments[metType].append(met.compartment)
sum_stoich_NH += ((1 if metType == 'product' else -1)
* transportedMets[seed_id]['coeff']
* met.thermo.nH_std
* RT
* log(10 ** -pH_comp))
sum_deltaGFis_trans += ((1 if metType == 'product' else -1)
* transportedMets[seed_id]['coeff']
* deltaGfsp)
else:
compartments[metType].append('')
if seed_id == CPD_PROTON:
met = transportedMets[seed_id][metType]
pH_comp = met.thermo.pH
RT_sum_H_LC_tpt += ((1 if metType == 'product' else -1)
* RT
* transportedMets[seed_id]['coeff']
* log(10 ** -pH_comp))
# calculate the transport of any ions
# membrane potential is always defined as inside - outside
# we should take the larger stoich of the transported compound
sum_F_memP_charge = 0
for seed_id in transportedMets:
if seed_id != 'cpd00001':
out_comp = transportedMets[seed_id]['reactant'].compartment
in_comp = transportedMets[seed_id]['product'].compartment
mem_pot = compartmentsData[out_comp]['membranePot'][in_comp]
charge = transportedMets[seed_id]['reactant'].thermo.charge_std
# Equal to the product's one
sum_F_memP_charge += (faraday_const
* (mem_pot / 1000.)
* transportedMets[seed_id]['coeff']
* charge)
deltaG = 0
for met in reaction.metabolites:
if CPD_PROTON != met.annotation['seed_id']:
deltaG += reaction.metabolites[met] * met.thermo.deltaGf_tr
sum_deltaGFis = 0
# lastly we calculate the deltaG of the chemical reaction if any
# but we do not add this part to the rhs as it would be included in the
# potential energy of the enzyme
final_coeffs = reaction.metabolites.copy()
for seed_id in transportedMets:
for metType in ['reactant', 'product']:
final_coeffs[transportedMets[seed_id][metType]] -= (
(1 if metType == 'product' else -1)
* transportedMets[seed_id]['coeff'])
for met in final_coeffs:
if final_coeffs[met] != 0 and met.annotation['seed_id'] != CPD_PROTON:
met_deltaGis = met.thermo.deltaGf_tr
sum_deltaGFis += final_coeffs[met] * met_deltaGis
# Sum all the parts
DG_trans_RHS = (sum_stoich_NH
+ sum_F_memP_charge
+ sum_deltaGFis_trans
+ RT_sum_H_LC_tpt
+ sum_deltaGFis)
breakdown = {
'sum_deltaGFis': sum_deltaGFis,
'sum_stoich_NH': sum_stoich_NH,
'sum_F_memP_charge': sum_F_memP_charge,
'sum_deltaGFis_trans': sum_deltaGFis_trans,
'RT_sum_H_LC_tpt': RT_sum_H_LC_tpt
}
return (DG_trans_RHS, breakdown)
def calcDGR_cues(reaction, reaction_cues_data):
""" Calculates the deltaG reaction and error of the reaction using the
constituent structural cues changes and returns also the error if any.
:param cobra.thermo.reaction.Reaction reaction: The reaction to compute
deltaG for
:param dict reaction_cues_data:
:returns: deltaGR, error on deltaGR, the cues in the reaction (keys of the
dictionnary) and their indices (values of the dictionnary),
and the error code if any.
If everything went right, the error code is an empty string
:rtype: tuple(float, float, dict(float), str)
"""
deltaGR = 0
deltaGR_err = 0
cues = {}
error = ''
# First we should check if all the reactants are in terms of compound IDs
for reactant in reaction.metabolites:
if len(reactant.thermo.struct_cues) == 0:
return (10 ** 7, 10 ** 7, '', 'UNKNOWN_GROUPS')
(deltaGF, deltaGFerr, cpd_cues) = calcDGF_cues(
reactant.thermo.struct_cues,
reaction_cues_data)
for cue in cpd_cues:
if cue in cues:
cues[cue] += reaction.metabolites[reactant] * cpd_cues[cue]
else:
cues[cue] = reaction.metabolites[reactant] * cpd_cues[cue]
for cue in cues:
deltaGR += cues[cue] * reaction_cues_data[cue]['energy']
deltaGR_err += (cues[cue] * reaction_cues_data[cue]['error']) ** 2
deltaGR_err = sqrt(deltaGR_err)
return (deltaGR, deltaGR_err, cues, error)
def calcDGF_cues(cues, reaction_cues_data):
""" Calculates the deltaG formation and error of the compound using its
constituent structural cues.
:param list(str) cues: A list of cues' names
:param dict reaction_cues_data:
:returns: deltaG formation, the error on deltaG formation, and a dictionnary
with the cues' names as key and their coefficient as value
:rtype: tuple(float, float, dict(float)).
"""
deltaGF = 0
deltaGF_err = 0
finalcues = {}
for cue in cues:
if cue in finalcues:
finalcues[cue] += cues[cue]
else:
finalcues[cue] = cues[cue]
deltaGF += reaction_cues_data[cue]['energy'] * cues[cue]
deltaGF_err += (reaction_cues_data[cue]['error'] * cues[cue]) ** 2
deltaGF_err = sqrt(deltaGF_err)
return (deltaGF, deltaGF_err, finalcues)
def get_debye_huckel_b(T):
"""
The Debye-Huckel A and B do depend on the temperature
As for now though they are returned as a constant (value at 298.15K)
:param T: Temperature in Kelvin
:return: Debye_Huckel_B
"""
return std.DEBYE_HUCKEL_B_0
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=invalid-name
"""Base library for TVM FFI."""
from __future__ import absolute_import
import sys
import os
import ctypes
import numpy as np
from . import libinfo
#----------------------------
# library loading
#----------------------------
if sys.version_info[0] == 3:
string_types = (str,)
integer_types = (int, np.int32)
numeric_types = integer_types + (float, np.float32)
# this function is needed for python3
# to convert ctypes.char_p .value back to python str
if sys.platform == "win32":
encoding = 'cp' + str(ctypes.cdll.kernel32.GetACP())
py_str = lambda x: x.decode(encoding)
else:
py_str = lambda x: x.decode('utf-8')
else:
string_types = (basestring,)
integer_types = (int, long, np.int32)
numeric_types = integer_types + (float, np.float32)
py_str = lambda x: x
def _load_lib():
"""Load libary by searching possible path."""
lib_path = libinfo.find_lib_path()
lib = ctypes.CDLL(lib_path[0], ctypes.RTLD_GLOBAL)
# DMatrix functions
lib.TVMGetLastError.restype = ctypes.c_char_p
return lib, os.path.basename(lib_path[0])
# version number
__version__ = libinfo.__version__
# library instance of nnvm
_LIB, _LIB_NAME = _load_lib()
# Whether we are runtime only
_RUNTIME_ONLY = "runtime" in _LIB_NAME
# The FFI mode of TVM
_FFI_MODE = os.environ.get("TVM_FFI", "auto")
#----------------------------
# helper function in ctypes.
#----------------------------
def c_str(string):
"""Create ctypes char * from a python string
Parameters
----------
string : string type
python string
Returns
-------
str : c_char_p
A char pointer that can be passed to C API
"""
return ctypes.c_char_p(string.encode('utf-8'))
def c_array(ctype, values):
"""Create ctypes array from a python array
Parameters
----------
ctype : ctypes data type
data type of the array we want to convert to
values : tuple or list
data content
Returns
-------
out : ctypes array
Created ctypes array
"""
return (ctype * len(values))(*values)
def decorate(func, fwrapped):
"""A wrapper call of decorator package, differs to call time
Parameters
----------
func : function
The original function
fwrapped : function
The wrapped function
"""
import decorator
return decorator.decorate(func, fwrapped)
#-----------------------------------------
# Base code for structured error handling.
#-----------------------------------------
# Maps error type to its constructor
ERROR_TYPE = {}
class TVMError(RuntimeError):
"""Default error thrown by TVM functions.
TVMError will be raised if you do not give any error type specification,
"""
def register_error(func_name=None, cls=None):
"""Register an error class so it can be recognized by the ffi error handler.
Parameters
----------
func_name : str or function or class
The name of the error function.
cls : function
The function to create the class
Returns
-------
fregister : function
Register function if f is not specified.
Examples
--------
.. code-block:: python
@tvm.error.register_error
class MyError(RuntimeError):
pass
err_inst = tvm.error.create_ffi_error("MyError: xyz")
assert isinstance(err_inst, MyError)
"""
if callable(func_name):
cls = func_name
func_name = cls.__name__
def register(mycls):
"""internal register function"""
err_name = func_name if isinstance(func_name, str) else mycls.__name__
ERROR_TYPE[err_name] = mycls
return mycls
if cls is None:
return register
return register(cls)
def _valid_error_name(name):
"""Check whether name is a valid error name."""
return all(x.isalnum() or x in "_." for x in name)
def _find_error_type(line):
"""Find the error name given the first line of the error message.
Parameters
----------
line : str
The first line of error message.
Returns
-------
name : str The error name
"""
end_pos = line.find(":")
if end_pos == -1:
return None
err_name = line[:end_pos]
if _valid_error_name(err_name):
return err_name
return None
def c2pyerror(err_msg):
"""Translate C API error message to python style.
Parameters
----------
err_msg : str
The error message.
Returns
-------
new_msg : str
Translated message.
err_type : str
Detected error type.
"""
arr = err_msg.split("\n")
if arr[-1] == "":
arr.pop()
err_type = _find_error_type(arr[0])
trace_mode = False
stack_trace = []
message = []
for line in arr:
if trace_mode:
if line.startswith(" "):
stack_trace.append(line)
else:
trace_mode = False
if not trace_mode:
if line.startswith("Stack trace"):
trace_mode = True
else:
message.append(line)
out_msg = ""
if stack_trace:
out_msg += "Traceback (most recent call last):\n"
out_msg += "\n".join(reversed(stack_trace)) + "\n"
out_msg += "\n".join(message)
return out_msg, err_type
def py2cerror(err_msg):
"""Translate python style error message to C style.
Parameters
----------
err_msg : str
The error message.
Returns
-------
new_msg : str
Translated message.
"""
arr = err_msg.split("\n")
if arr[-1] == "":
arr.pop()
trace_mode = False
stack_trace = []
message = []
for line in arr:
if trace_mode:
if line.startswith(" "):
stack_trace.append(line)
else:
trace_mode = False
if not trace_mode:
if line.find("Traceback") != -1:
trace_mode = True
else:
message.append(line)
# Remove the first error name if there are two of them.
# RuntimeError: MyErrorName: message => MyErrorName: message
head_arr = message[0].split(":", 3)
if len(head_arr) >= 3 and _valid_error_name(head_arr[1].strip()):
head_arr[1] = head_arr[1].strip()
message[0] = ":".join(head_arr[1:])
# reverse the stack trace.
out_msg = "\n".join(message)
if stack_trace:
out_msg += "\nStack trace:\n"
out_msg += "\n".join(reversed(stack_trace)) + "\n"
return out_msg
def get_last_ffi_error():
"""Create error object given result of TVMGetLastError.
Returns
-------
err : object
The error object based on the err_msg
"""
c_err_msg = py_str(_LIB.TVMGetLastError())
py_err_msg, err_type = c2pyerror(c_err_msg)
if err_type is not None and err_type.startswith("tvm.error."):
err_type = err_type[10:]
return ERROR_TYPE.get(err_type, TVMError)(py_err_msg)
def check_call(ret):
"""Check the return value of C API call
This function will raise exception when error occurs.
Wrap every API call with this function
Parameters
----------
ret : int
return value from API calls
"""
if ret != 0:
raise get_last_ffi_error()
|
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base classes for our unit tests.
Allows overriding of CONF for use of fakes, and some black magic for
inline callbacks.
"""
import copy
import logging
import os
import uuid
import fixtures
import mock
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from oslo_log.fixture import logging_error as log_fixture
from oslo_messaging import conffixture as messaging_conffixture
from oslo_utils import strutils
from oslo_utils import timeutils
from oslotest import moxstubout
import six
import testtools
from cinder.common import config # noqa Need to register global_opts
from cinder import coordination
from cinder.db import migration
from cinder.db.sqlalchemy import api as sqla_api
from cinder import i18n
from cinder.objects import base as objects_base
from cinder import rpc
from cinder import service
from cinder.tests import fixtures as cinder_fixtures
from cinder.tests.unit import conf_fixture
from cinder.tests.unit import fake_notifier
CONF = cfg.CONF
_DB_CACHE = None
class TestingException(Exception):
pass
class Database(fixtures.Fixture):
def __init__(self, db_api, db_migrate, sql_connection):
self.sql_connection = sql_connection
# Suppress logging for test runs
migrate_logger = logging.getLogger('migrate')
migrate_logger.setLevel(logging.WARNING)
self.engine = db_api.get_engine()
self.engine.dispose()
conn = self.engine.connect()
db_migrate.db_sync()
self._DB = "".join(line for line in conn.connection.iterdump())
self.engine.dispose()
def setUp(self):
super(Database, self).setUp()
conn = self.engine.connect()
conn.connection.executescript(self._DB)
self.addCleanup(self.engine.dispose)
class TestCase(testtools.TestCase):
"""Test case base class for all unit tests."""
def _get_joined_notifier(self, *args, **kwargs):
# We create a new fake notifier but we join the notifications with
# the default notifier
notifier = fake_notifier.get_fake_notifier(*args, **kwargs)
notifier.notifications = self.notifier.notifications
return notifier
def setUp(self):
"""Run before each test method to initialize test environment."""
super(TestCase, self).setUp()
# Create default notifier
self.notifier = fake_notifier.get_fake_notifier()
# Mock rpc get notifier with fake notifier method that joins all
# notifications with the default notifier
p = mock.patch('cinder.rpc.get_notifier',
side_effect=self._get_joined_notifier)
p.start()
# Unit tests do not need to use lazy gettext
i18n.enable_lazy(False)
test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
try:
test_timeout = int(test_timeout)
except ValueError:
# If timeout value is invalid do not set a timeout.
test_timeout = 0
if test_timeout > 0:
self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
self.useFixture(fixtures.NestedTempfile())
self.useFixture(fixtures.TempHomeDir())
environ_enabled = (lambda var_name:
strutils.bool_from_string(os.environ.get(var_name)))
if environ_enabled('OS_STDOUT_CAPTURE'):
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
if environ_enabled('OS_STDERR_CAPTURE'):
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
self.useFixture(log_fixture.get_logging_handle_error_fixture())
self.useFixture(cinder_fixtures.StandardLogging())
rpc.add_extra_exmods("cinder.tests.unit")
self.addCleanup(rpc.clear_extra_exmods)
self.addCleanup(rpc.cleanup)
self.messaging_conf = messaging_conffixture.ConfFixture(CONF)
self.messaging_conf.transport_driver = 'fake'
self.messaging_conf.response_timeout = 15
self.useFixture(self.messaging_conf)
rpc.init(CONF)
# NOTE(geguileo): This is required because _determine_obj_version_cap
# and _determine_rpc_version_cap functions in cinder.rpc.RPCAPI cache
# versions in LAST_RPC_VERSIONS and LAST_OBJ_VERSIONS so we may have
# weird interactions between tests if we don't clear them before each
# test.
rpc.LAST_OBJ_VERSIONS = {}
rpc.LAST_RPC_VERSIONS = {}
conf_fixture.set_defaults(CONF)
CONF([], default_config_files=[])
# NOTE(vish): We need a better method for creating fixtures for tests
# now that we have some required db setup for the system
# to work properly.
self.start = timeutils.utcnow()
CONF.set_default('connection', 'sqlite://', 'database')
CONF.set_default('sqlite_synchronous', False, 'database')
global _DB_CACHE
if not _DB_CACHE:
_DB_CACHE = Database(sqla_api, migration,
sql_connection=CONF.database.connection)
self.useFixture(_DB_CACHE)
# NOTE(danms): Make sure to reset us back to non-remote objects
# for each test to avoid interactions. Also, backup the object
# registry.
objects_base.CinderObject.indirection_api = None
self._base_test_obj_backup = copy.copy(
objects_base.CinderObjectRegistry._registry._obj_classes)
self.addCleanup(self._restore_obj_registry)
# emulate some of the mox stuff, we can't use the metaclass
# because it screws with our generators
mox_fixture = self.useFixture(moxstubout.MoxStubout())
self.mox = mox_fixture.mox
self.stubs = mox_fixture.stubs
self.addCleanup(CONF.reset)
self.addCleanup(self._common_cleanup)
self.injected = []
self._services = []
fake_notifier.mock_notifier(self)
self.override_config('fatal_exception_format_errors', True)
# This will be cleaned up by the NestedTempfile fixture
lock_path = self.useFixture(fixtures.TempDir()).path
self.fixture = self.useFixture(
config_fixture.Config(lockutils.CONF))
self.fixture.config(lock_path=lock_path,
group='oslo_concurrency')
lockutils.set_defaults(lock_path)
self.override_config('policy_file',
os.path.join(
os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
),
'cinder/tests/unit/policy.json'),
group='oslo_policy')
self._disable_osprofiler()
self._disallow_invalid_uuids()
# NOTE(geguileo): This is required because common get_by_id method in
# cinder.db.sqlalchemy.api caches get methods and if we use a mocked
# get method in one test it would carry on to the next test. So we
# clear out the cache.
sqla_api._GET_METHODS = {}
self.override_config('backend_url', 'file://' + lock_path,
group='coordination')
coordination.COORDINATOR.start()
self.addCleanup(coordination.COORDINATOR.stop)
def _restore_obj_registry(self):
objects_base.CinderObjectRegistry._registry._obj_classes = \
self._base_test_obj_backup
def _disable_osprofiler(self):
"""Disable osprofiler.
osprofiler should not run for unit tests.
"""
side_effect = lambda value: value
mock_decorator = mock.MagicMock(side_effect=side_effect)
p = mock.patch("osprofiler.profiler.trace_cls",
return_value=mock_decorator)
p.start()
def _disallow_invalid_uuids(self):
def catch_uuid_warning(message, *args, **kwargs):
ovo_message = "invalid UUID. Using UUIDFields with invalid UUIDs " \
"is no longer supported"
if ovo_message in message:
raise AssertionError(message)
p = mock.patch("warnings.warn",
side_effect=catch_uuid_warning)
p.start()
def _common_cleanup(self):
"""Runs after each test method to tear down test environment."""
# Stop any timers
for x in self.injected:
try:
x.stop()
except AssertionError:
pass
# Kill any services
for x in self._services:
try:
x.kill()
except Exception:
pass
# Delete attributes that don't start with _ so they don't pin
# memory around unnecessarily for the duration of the test
# suite
for key in [k for k in self.__dict__.keys() if k[0] != '_']:
del self.__dict__[key]
def override_config(self, name, override, group=None):
"""Cleanly override CONF variables."""
CONF.set_override(name, override, group)
self.addCleanup(CONF.clear_override, name, group)
def flags(self, **kw):
"""Override CONF variables for a test."""
for k, v in kw.items():
self.override_config(k, v)
def start_service(self, name, host=None, **kwargs):
host = host and host or uuid.uuid4().hex
kwargs.setdefault('host', host)
kwargs.setdefault('binary', 'cinder-%s' % name)
svc = service.Service.create(**kwargs)
svc.start()
self._services.append(svc)
return svc
def mock_object(self, obj, attr_name, *args, **kwargs):
"""Use python mock to mock an object attribute
Mocks the specified objects attribute with the given value.
Automatically performs 'addCleanup' for the mock.
"""
patcher = mock.patch.object(obj, attr_name, *args, **kwargs)
result = patcher.start()
self.addCleanup(patcher.stop)
return result
def patch(self, path, *args, **kwargs):
"""Use python mock to mock a path with automatic cleanup."""
patcher = mock.patch(path, *args, **kwargs)
result = patcher.start()
self.addCleanup(patcher.stop)
return result
# Useful assertions
def assertDictMatch(self, d1, d2, approx_equal=False, tolerance=0.001):
"""Assert two dicts are equivalent.
This is a 'deep' match in the sense that it handles nested
dictionaries appropriately.
NOTE:
If you don't care (or don't know) a given value, you can specify
the string DONTCARE as the value. This will cause that dict-item
to be skipped.
"""
def raise_assertion(msg):
d1str = d1
d2str = d2
base_msg = ('Dictionaries do not match. %(msg)s d1: %(d1str)s '
'd2: %(d2str)s' %
{'msg': msg, 'd1str': d1str, 'd2str': d2str})
raise AssertionError(base_msg)
d1keys = set(d1.keys())
d2keys = set(d2.keys())
if d1keys != d2keys:
d1only = d1keys - d2keys
d2only = d2keys - d1keys
raise_assertion('Keys in d1 and not d2: %(d1only)s. '
'Keys in d2 and not d1: %(d2only)s' %
{'d1only': d1only, 'd2only': d2only})
for key in d1keys:
d1value = d1[key]
d2value = d2[key]
try:
error = abs(float(d1value) - float(d2value))
within_tolerance = error <= tolerance
except (ValueError, TypeError):
# If both values aren't convertible to float, just ignore
# ValueError if arg is a str, TypeError if it's something else
# (like None)
within_tolerance = False
if hasattr(d1value, 'keys') and hasattr(d2value, 'keys'):
self.assertDictMatch(d1value, d2value)
elif 'DONTCARE' in (d1value, d2value):
continue
elif approx_equal and within_tolerance:
continue
elif d1value != d2value:
raise_assertion("d1['%(key)s']=%(d1value)s != "
"d2['%(key)s']=%(d2value)s" %
{
'key': key,
'd1value': d1value,
'd2value': d2value,
})
def assert_notify_called(self, mock_notify, calls):
for i in range(0, len(calls)):
mock_call = mock_notify.call_args_list[i]
call = calls[i]
posargs = mock_call[0]
self.assertEqual(call[0], posargs[0])
self.assertEqual(call[1], posargs[2])
class ModelsObjectComparatorMixin(object):
def _dict_from_object(self, obj, ignored_keys):
if ignored_keys is None:
ignored_keys = []
if isinstance(obj, dict):
items = obj.items()
else:
items = obj.iteritems()
return {k: v for k, v in items
if k not in ignored_keys}
def _assertEqualObjects(self, obj1, obj2, ignored_keys=None):
obj1 = self._dict_from_object(obj1, ignored_keys)
obj2 = self._dict_from_object(obj2, ignored_keys)
self.assertEqual(
len(obj1), len(obj2),
"Keys mismatch: %s" % six.text_type(
set(obj1.keys()) ^ set(obj2.keys())))
for key, value in obj1.items():
self.assertEqual(value, obj2[key])
def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None,
msg=None):
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
sort_key = lambda d: [d[k] for k in sorted(d)]
conv_and_sort = lambda obj: sorted(map(obj_to_dict, obj), key=sort_key)
self.assertListEqual(conv_and_sort(objs1), conv_and_sort(objs2),
msg=msg)
def _assertEqualListsOfPrimitivesAsSets(self, primitives1, primitives2):
self.assertEqual(len(primitives1), len(primitives2))
for primitive in primitives1:
self.assertIn(primitive, primitives2)
for primitive in primitives2:
self.assertIn(primitive, primitives1)
|
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ads.googleads.v8.resources.types import mobile_app_category_constant
from google.ads.googleads.v8.services.types import (
mobile_app_category_constant_service,
)
from .transports.base import (
MobileAppCategoryConstantServiceTransport,
DEFAULT_CLIENT_INFO,
)
from .transports.grpc import MobileAppCategoryConstantServiceGrpcTransport
class MobileAppCategoryConstantServiceClientMeta(type):
"""Metaclass for the MobileAppCategoryConstantService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[MobileAppCategoryConstantServiceTransport]]
_transport_registry["grpc"] = MobileAppCategoryConstantServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[MobileAppCategoryConstantServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class MobileAppCategoryConstantServiceClient(
metaclass=MobileAppCategoryConstantServiceClientMeta
):
"""Service to fetch mobile app category constants."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
MobileAppCategoryConstantServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
MobileAppCategoryConstantServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> MobileAppCategoryConstantServiceTransport:
"""Return the transport used by the client instance.
Returns:
MobileAppCategoryConstantServiceTransport: The transport used by the client instance.
"""
return self._transport
@staticmethod
def mobile_app_category_constant_path(mobile_app_category_id: str,) -> str:
"""Return a fully-qualified mobile_app_category_constant string."""
return "mobileAppCategoryConstants/{mobile_app_category_id}".format(
mobile_app_category_id=mobile_app_category_id,
)
@staticmethod
def parse_mobile_app_category_constant_path(path: str) -> Dict[str, str]:
"""Parse a mobile_app_category_constant path into its component segments."""
m = re.match(
r"^mobileAppCategoryConstants/(?P<mobile_app_category_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Return a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Return a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[
str, MobileAppCategoryConstantServiceTransport, None
] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the mobile app category constant service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.MobileAppCategoryConstantServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(
util.strtobool(
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
)
)
ssl_credentials = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
import grpc # type: ignore
cert, key = client_options.client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
is_mtls = True
else:
creds = SslCredentials()
is_mtls = creds.is_mtls
ssl_credentials = creds.ssl_credentials if is_mtls else None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, MobileAppCategoryConstantServiceTransport):
# transport is a MobileAppCategoryConstantServiceTransport instance.
if credentials:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
self._transport = transport
elif isinstance(transport, str):
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials, host=self.DEFAULT_ENDPOINT
)
else:
self._transport = MobileAppCategoryConstantServiceGrpcTransport(
credentials=credentials,
host=api_endpoint,
ssl_channel_credentials=ssl_credentials,
client_info=client_info,
)
def get_mobile_app_category_constant(
self,
request: mobile_app_category_constant_service.GetMobileAppCategoryConstantRequest = None,
*,
resource_name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> mobile_app_category_constant.MobileAppCategoryConstant:
r"""Returns the requested mobile app category constant.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Args:
request (:class:`google.ads.googleads.v8.services.types.GetMobileAppCategoryConstantRequest`):
The request object. Request message for
[MobileAppCategoryConstantService.GetMobileAppCategoryConstant][google.ads.googleads.v8.services.MobileAppCategoryConstantService.GetMobileAppCategoryConstant].
resource_name (:class:`str`):
Required. Resource name of the mobile
app category constant to fetch.
This corresponds to the ``resource_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v8.resources.types.MobileAppCategoryConstant:
A mobile application category
constant.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([resource_name]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a mobile_app_category_constant_service.GetMobileAppCategoryConstantRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request,
mobile_app_category_constant_service.GetMobileAppCategoryConstantRequest,
):
request = mobile_app_category_constant_service.GetMobileAppCategoryConstantRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if resource_name is not None:
request.resource_name = resource_name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.get_mobile_app_category_constant
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("resource_name", request.resource_name),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
__all__ = ("MobileAppCategoryConstantServiceClient",)
|
|
import time
from dataclasses import dataclass
from typing import Iterator, List
import wsproto
import wsproto.extensions
import wsproto.frame_protocol
import wsproto.utilities
from mitmproxy import connection, http, websocket
from mitmproxy.proxy import commands, events, layer
from mitmproxy.proxy.commands import StartHook
from mitmproxy.proxy.context import Context
from mitmproxy.proxy.events import MessageInjected
from mitmproxy.proxy.utils import expect
from wsproto import ConnectionState
from wsproto.frame_protocol import Opcode
@dataclass
class WebsocketStartHook(StartHook):
"""
A WebSocket connection has commenced.
"""
flow: http.HTTPFlow
@dataclass
class WebsocketMessageHook(StartHook):
"""
Called when a WebSocket message is received from the client or
server. The most recent message will be flow.messages[-1]. The
message is user-modifiable. Currently there are two types of
messages, corresponding to the BINARY and TEXT frame types.
"""
flow: http.HTTPFlow
@dataclass
class WebsocketEndHook(StartHook):
"""
A WebSocket connection has ended.
You can check `flow.websocket.close_code` to determine why it ended.
"""
flow: http.HTTPFlow
class WebSocketMessageInjected(MessageInjected[websocket.WebSocketMessage]):
"""
The user has injected a custom WebSocket message.
"""
class WebsocketConnection(wsproto.Connection):
"""
A very thin wrapper around wsproto.Connection:
- we keep the underlying connection as an attribute for easy access.
- we add a framebuffer for incomplete messages
- we wrap .send() so that we can directly yield it.
"""
conn: connection.Connection
frame_buf: List[bytes]
def __init__(self, *args, conn: connection.Connection, **kwargs):
super(WebsocketConnection, self).__init__(*args, **kwargs)
self.conn = conn
self.frame_buf = [b""]
def send2(self, event: wsproto.events.Event) -> commands.SendData:
data = self.send(event)
return commands.SendData(self.conn, data)
def __repr__(self):
return f"WebsocketConnection<{self.state.name}, {self.conn}>"
class WebsocketLayer(layer.Layer):
"""
WebSocket layer that intercepts and relays messages.
"""
flow: http.HTTPFlow
client_ws: WebsocketConnection
server_ws: WebsocketConnection
def __init__(self, context: Context, flow: http.HTTPFlow):
super().__init__(context)
self.flow = flow
@expect(events.Start)
def start(self, _) -> layer.CommandGenerator[None]:
client_extensions = []
server_extensions = []
# Parse extension headers. We only support deflate at the moment and ignore everything else.
assert self.flow.response # satisfy type checker
ext_header = self.flow.response.headers.get("Sec-WebSocket-Extensions", "")
if ext_header:
for ext in wsproto.utilities.split_comma_header(ext_header.encode("ascii", "replace")):
ext_name = ext.split(";", 1)[0].strip()
if ext_name == wsproto.extensions.PerMessageDeflate.name:
client_deflate = wsproto.extensions.PerMessageDeflate()
client_deflate.finalize(ext)
client_extensions.append(client_deflate)
server_deflate = wsproto.extensions.PerMessageDeflate()
server_deflate.finalize(ext)
server_extensions.append(server_deflate)
else:
yield commands.Log(f"Ignoring unknown WebSocket extension {ext_name!r}.")
self.client_ws = WebsocketConnection(wsproto.ConnectionType.SERVER, client_extensions, conn=self.context.client)
self.server_ws = WebsocketConnection(wsproto.ConnectionType.CLIENT, server_extensions, conn=self.context.server)
yield WebsocketStartHook(self.flow)
self._handle_event = self.relay_messages
_handle_event = start
@expect(events.DataReceived, events.ConnectionClosed, WebSocketMessageInjected)
def relay_messages(self, event: events.Event) -> layer.CommandGenerator[None]:
assert self.flow.websocket # satisfy type checker
if isinstance(event, events.ConnectionEvent):
from_client = event.connection == self.context.client
elif isinstance(event, WebSocketMessageInjected):
from_client = event.message.from_client
else:
raise AssertionError(f"Unexpected event: {event}")
from_str = 'client' if from_client else 'server'
if from_client:
src_ws = self.client_ws
dst_ws = self.server_ws
else:
src_ws = self.server_ws
dst_ws = self.client_ws
if isinstance(event, events.DataReceived):
src_ws.receive_data(event.data)
elif isinstance(event, events.ConnectionClosed):
src_ws.receive_data(None)
elif isinstance(event, WebSocketMessageInjected):
fragmentizer = Fragmentizer([], event.message.type == Opcode.TEXT)
src_ws._events.extend(
fragmentizer(event.message.content)
)
else: # pragma: no cover
raise AssertionError(f"Unexpected event: {event}")
for ws_event in src_ws.events():
if isinstance(ws_event, wsproto.events.Message):
is_text = isinstance(ws_event.data, str)
if is_text:
typ = Opcode.TEXT
src_ws.frame_buf[-1] += ws_event.data.encode()
else:
typ = Opcode.BINARY
src_ws.frame_buf[-1] += ws_event.data
if ws_event.message_finished:
content = b"".join(src_ws.frame_buf)
fragmentizer = Fragmentizer(src_ws.frame_buf, is_text)
src_ws.frame_buf = [b""]
message = websocket.WebSocketMessage(typ, from_client, content)
self.flow.websocket.messages.append(message)
yield WebsocketMessageHook(self.flow)
if not message.dropped:
for msg in fragmentizer(message.content):
yield dst_ws.send2(msg)
elif ws_event.frame_finished:
src_ws.frame_buf.append(b"")
elif isinstance(ws_event, (wsproto.events.Ping, wsproto.events.Pong)):
yield commands.Log(
f"Received WebSocket {ws_event.__class__.__name__.lower()} from {from_str} "
f"(payload: {bytes(ws_event.payload)!r})"
)
yield dst_ws.send2(ws_event)
elif isinstance(ws_event, wsproto.events.CloseConnection):
self.flow.websocket.timestamp_end = time.time()
self.flow.websocket.closed_by_client = from_client
self.flow.websocket.close_code = ws_event.code
self.flow.websocket.close_reason = ws_event.reason
for ws in [self.server_ws, self.client_ws]:
if ws.state in {ConnectionState.OPEN, ConnectionState.REMOTE_CLOSING}:
# response == original event, so no need to differentiate here.
yield ws.send2(ws_event)
yield commands.CloseConnection(ws.conn)
yield WebsocketEndHook(self.flow)
self._handle_event = self.done
else: # pragma: no cover
raise AssertionError(f"Unexpected WebSocket event: {ws_event}")
@expect(events.DataReceived, events.ConnectionClosed, WebSocketMessageInjected)
def done(self, _) -> layer.CommandGenerator[None]:
yield from ()
class Fragmentizer:
"""
Theory (RFC 6455):
Unless specified otherwise by an extension, frames have no semantic
meaning. An intermediary might coalesce and/or split frames, [...]
Practice:
Some WebSocket servers reject large payload sizes.
Other WebSocket servers reject CONTINUATION frames.
As a workaround, we either retain the original chunking or, if the payload has been modified, use ~4kB chunks.
If one deals with web servers that do not support CONTINUATION frames, addons need to monkeypatch FRAGMENT_SIZE
if they need to modify the message.
"""
# A bit less than 4kb to accommodate for headers.
FRAGMENT_SIZE = 4000
def __init__(self, fragments: List[bytes], is_text: bool):
self.fragment_lengths = [len(x) for x in fragments]
self.is_text = is_text
def msg(self, data: bytes, message_finished: bool):
if self.is_text:
data_str = data.decode(errors="replace")
return wsproto.events.TextMessage(data_str, message_finished=message_finished)
else:
return wsproto.events.BytesMessage(data, message_finished=message_finished)
def __call__(self, content: bytes) -> Iterator[wsproto.events.Message]:
if len(content) == sum(self.fragment_lengths):
# message has the same length, we can reuse the same sizes
offset = 0
for fl in self.fragment_lengths[:-1]:
yield self.msg(content[offset:offset + fl], False)
offset += fl
yield self.msg(content[offset:], True)
else:
offset = 0
total = len(content) - self.FRAGMENT_SIZE
while offset < total:
yield self.msg(content[offset:offset + self.FRAGMENT_SIZE], False)
offset += self.FRAGMENT_SIZE
yield self.msg(content[offset:], True)
|
|
#!/usr/bin/env python
#
# Copyright (c) 2015 Juniper Networks, Inc. All rights reserved.
#
import logging
import subprocess
import sys
import platform
import argparse
import os
import time
# Parses nodetool status output and returns a dict
# containing the nodes and their status
def parse_nodetool_status_output(output):
""" Following is sample nodetool status output:
Datacenter: datacenter1
=======================
Status=Up/Down
|/ State=Normal/Leaving/Joining/Moving
-- Address Load Tokens Owns Host ID Rack
UN 10.84.27.27 28.06 GB 256 30.3% 2905fcf3-b702-4a62-9eb9-9f2396f17665 rack1
UN 10.84.27.8 28.41 GB 256 32.2% 46999810-e412-41a5-8d50-fe43c5933945 rack1
UN 10.84.27.9 29.61 GB 256 37.5% 205d5521-1ccc-40b1-98fb-fb2256d776de rack1
"""
# Extract the nodes (Find the header and start from there)
olines = output.splitlines()
olcounter = 0
for line in olines:
line_info = line.split()
if (len(line_info) >= 3 and line_info[1] == "Address" and
line_info[2] == "Load"):
olcounter += 1
break
olcounter += 1
if olcounter == 0:
logging.error("FAILED to parse: {output}".format(output=output))
return {}
nodes = olines[olcounter:]
# Create a node status dict indexed by Host ID (column 6 or column 5
# depending on the output)
"""
UN 10.84.27.8 28.41 GB 256 32.2% 46999810-e412-41a5-8d50-fe43c5933945 rack1
DN 10.84.23.59 ? 256 30.3% 315f045a-ea54-42f7-9c05-72c8ac4b34b6 rack1
"""
nodes_status = {}
for node in nodes:
node_info = node.split()
node_info_len = len(node_info)
if node_info_len == 8:
node_id = node_info[6]
elif node_info_len == 7:
node_id = node_info[5]
else:
logging.error("FAILED to parse: {line}".format(line=node))
return {}
# Node status is column 0
nodes_status[node_id] = node_info[0]
return nodes_status
#end parse_nodetool_status_output
# Determine the number of UP nodes and verify that they are
# greater than or equal to RF/2 + 1 for QUORUM reads/writes
# to succeed. If RF is not passed, assumption is that RF is
# equal to number of nodes.
def is_cluster_partitioned(options):
cmd = [options.nodetool, "-h", options.host, "status"]
success, cmd, stdout, stderr = run_command(*cmd)
if not success or not stdout:
logging.error("FAILED: {cmd}".format(cmd=cmd))
logging.error(stderr)
return True
nodes_status = parse_nodetool_status_output(stdout)
if options.replication_factor:
num_nodes = options.replication_factor
else:
num_nodes = len(nodes_status)
nodes_up_status = dict((node_id, node_status) for \
node_id, node_status in nodes_status.items() if 'U' in node_status)
num_up_nodes = len(nodes_up_status)
if num_up_nodes < (num_nodes/2) + 1:
return True
else:
return False
#end is_cluster_partitioned
def get_cassandra_secs_since_up(options):
secs_since_up = 0
if options.status_up_file and os.path.exists(options.status_up_file):
statinfo = os.stat(options.status_up_file)
last_up_secs = int(statinfo.st_atime)
current_time_secs = int(time.time())
secs_since_up = current_time_secs - last_up_secs
return secs_since_up
#end get_cassandra_secs_since_up
def update_status(options):
# Find the node ID from nodetool info
cmd = [options.nodetool, "-h", options.host, "info",
"|", "grep", "ID", "|", "awk \'{print $3}\'"]
success, cmd, stdout, stderr = run_command(*cmd)
if not success or not stdout:
logging.error("FAILED: {cmd}".format(cmd=cmd))
logging.error(stderr)
return 1
node_id = stdout.strip()
# Run nodetool status and check the status of node ID
cmd = [options.nodetool, "-h", options.host, "status",
"|", "grep", node_id, "|", "awk \'{print $1}\'"]
success, cmd, stdout, stderr = run_command(*cmd)
if not success or not stdout:
logging.error("FAILED: {cmd}".format(cmd=cmd))
logging.error(stderr)
return 2
self_status = stdout.strip()
# Update status_up_file if the status is UP and the cluster is not
# partitioned
partitioned = is_cluster_partitioned(options)
if 'U' in self_status and not partitioned and options.status_up_file:
cmd = ["touch", options.status_up_file]
success, cmd, _, stderr = run_command(*cmd)
if not success:
logging.error("FAILED: {cmd}".format(cmd=cmd))
logging.error(stderr)
return 3
if options.debug:
logging.debug("STATUS: {status}, PARTITIONED: {partitioned}".format(
status=self_status, partitioned=partitioned))
return 0
#end update_status
def verify_up_status(options):
# Verify if the status has NOT being UP for max_allowed_down_seconds, then
# stop cassandra
secs_since_up = get_cassandra_secs_since_up(options)
if secs_since_up >= options.max_allowed_down_seconds:
cmd = ["service", "contrail-database", "stop"]
success, cmd, _, stderr = run_command(*cmd)
if not success:
logging.error("FAILED: {cmd}".format(cmd=cmd))
logging.error(stderr)
return 4
if options.debug:
logging.debug("SECS SINCE UP: {secs}".format(secs=secs_since_up))
return 0
#end verify_up_status
def status(options):
update_status(options)
ret = verify_up_status(options)
return ret
#end status
def run_command(*command):
"""Execute a shell command and return the output
:param command: the command to be run and all of the arguments
:returns: success_boolean, command_string, stdout, stderr
"""
cmd = " ".join(command)
logging.debug("run_command: " + cmd)
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
return proc.returncode == 0, cmd, stdout, stderr
#end run_command
def setup_logging(option_group):
"""Sets up logging in a syslog format by log level
:param option_group: options as returned by the OptionParser
"""
stderr_log_format = "%(levelname) -10s %(asctime)s %(funcName) -20s line:%(lineno) -5d: %(message)s"
file_log_format = "%(asctime)s - %(levelname)s - %(message)s"
logger = logging.getLogger()
if option_group.debug:
logger.setLevel(level=logging.DEBUG)
elif option_group.verbose:
logger.setLevel(level=logging.INFO)
else:
logger.setLevel(level=logging.WARNING)
handlers = []
if option_group.syslog:
handlers.append(logging.SyslogHandler(facility=option_group.syslog))
# Use standard format here because timestamp and level will be added by syslogd.
if option_group.logfile:
handlers.append(logging.FileHandler(option_group.logfile))
handlers[0].setFormatter(logging.Formatter(file_log_format))
if not handlers:
handlers.append(logging.StreamHandler())
handlers[0].setFormatter(logging.Formatter(stderr_log_format))
for handler in handlers:
logger.addHandler(handler)
#end setup_logging
def main():
"""Validate arguments and check status
"""
parser = argparse.ArgumentParser(
# print script description with -h/--help
description=__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-H", "--host", dest="host", default=platform.node(),
metavar="HOST",
help="Hostname to check status")
parser.add_argument("-n", "--nodetool", dest="nodetool", default="nodetool",
metavar="NODETOOL",
help="Path to nodetool")
parser.add_argument("--replication-factor", dest="replication_factor",
metavar="NUM", type=int,
help="Maximum replication factor of any keyspace")
parser.add_argument("--max-allowed-down-seconds", dest="max_allowed_down_seconds",
metavar="SECONDS", type=int, default=int(864000*0.9),
help="Maximum seconds allowed for cassandra status to"
" not be UP before stopping cassandra")
parser.add_argument("--status-up-file", dest="status_up_file",
metavar="FILENAME", default="/var/log/cassandra/status-up",
help="Record up status to file")
parser.add_argument("-v", "--verbose", dest="verbose", action='store_true',
default=False, help="Verbose output")
parser.add_argument("-d", "--debug", dest="debug", action='store_true',
default=False, help="Debugging output")
parser.add_argument("--syslog", dest="syslog", metavar="FACILITY",
help="Send log messages to the syslog")
parser.add_argument("--log-file", dest="logfile", metavar="FILENAME",
help="Send log messages to a file")
options = parser.parse_args()
setup_logging(options)
ret = status(options)
exit(ret)
#end main
if __name__ == "__main__":
main()
|
|
# Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import subprocess
import mock
import netaddr
from oslo_config import cfg
from rally.plugins.openstack.scenarios.vm import utils
from tests.unit import test
VMTASKS_UTILS = "rally.plugins.openstack.scenarios.vm.utils"
CONF = cfg.CONF
class VMScenarioTestCase(test.ScenarioTestCase):
@mock.patch("%s.open" % VMTASKS_UTILS,
side_effect=mock.mock_open(), create=True)
def test__run_command_over_ssh_script_file(self, mock_open):
mock_ssh = mock.MagicMock()
vm_scenario = utils.VMScenario(self.context)
vm_scenario._run_command_over_ssh(
mock_ssh,
{
"script_file": "foobar",
"interpreter": ["interpreter", "interpreter_arg"],
"command_args": ["arg1", "arg2"]
}
)
mock_ssh.execute.assert_called_once_with(
["interpreter", "interpreter_arg", "arg1", "arg2"],
stdin=mock_open.side_effect())
mock_open.assert_called_once_with("foobar", "rb")
@mock.patch("%s.six.moves.StringIO" % VMTASKS_UTILS)
def test__run_command_over_ssh_script_inline(self, mock_string_io):
mock_ssh = mock.MagicMock()
vm_scenario = utils.VMScenario(self.context)
vm_scenario._run_command_over_ssh(
mock_ssh,
{
"script_inline": "foobar",
"interpreter": ["interpreter", "interpreter_arg"],
"command_args": ["arg1", "arg2"]
}
)
mock_ssh.execute.assert_called_once_with(
["interpreter", "interpreter_arg", "arg1", "arg2"],
stdin=mock_string_io.return_value)
mock_string_io.assert_called_once_with("foobar")
def test__run_command_over_ssh_remote_path(self):
mock_ssh = mock.MagicMock()
vm_scenario = utils.VMScenario(self.context)
vm_scenario._run_command_over_ssh(
mock_ssh,
{
"remote_path": ["foo", "bar"],
"command_args": ["arg1", "arg2"]
}
)
mock_ssh.execute.assert_called_once_with(
["foo", "bar", "arg1", "arg2"],
stdin=None)
def test__run_command_over_ssh_remote_path_copy(self):
mock_ssh = mock.MagicMock()
vm_scenario = utils.VMScenario(self.context)
vm_scenario._run_command_over_ssh(
mock_ssh,
{
"remote_path": ["foo", "bar"],
"local_path": "/bin/false",
"command_args": ["arg1", "arg2"]
}
)
mock_ssh.put_file.assert_called_once_with(
"/bin/false", "bar", mode=0o755
)
mock_ssh.execute.assert_called_once_with(
["foo", "bar", "arg1", "arg2"],
stdin=None)
def test__wait_for_ssh(self):
ssh = mock.MagicMock()
vm_scenario = utils.VMScenario(self.context)
vm_scenario._wait_for_ssh(ssh)
ssh.wait.assert_called_once_with(120, 1)
def test__wait_for_ping(self):
vm_scenario = utils.VMScenario(self.context)
vm_scenario._ping_ip_address = mock.Mock(return_value=True)
vm_scenario._wait_for_ping(netaddr.IPAddress("1.2.3.4"))
self.mock_wait_for_status.mock.assert_called_once_with(
utils.Host("1.2.3.4"),
ready_statuses=[utils.Host.ICMP_UP_STATUS],
update_resource=utils.Host.update_status,
timeout=CONF.benchmark.vm_ping_timeout,
check_interval=CONF.benchmark.vm_ping_poll_interval)
@mock.patch(VMTASKS_UTILS + ".VMScenario._run_command_over_ssh")
@mock.patch("rally.common.sshutils.SSH")
def test__run_command(self, mock_sshutils_ssh,
mock_vm_scenario__run_command_over_ssh):
vm_scenario = utils.VMScenario(self.context)
vm_scenario.context = {"user": {"keypair": {"private": "ssh"}}}
vm_scenario._run_command("1.2.3.4", 22, "username", "password",
command={"script_file": "foo",
"interpreter": "bar"})
mock_sshutils_ssh.assert_called_once_with(
"username", "1.2.3.4",
port=22, pkey="ssh", password="password")
mock_sshutils_ssh.return_value.wait.assert_called_once_with(120, 1)
mock_vm_scenario__run_command_over_ssh.assert_called_once_with(
mock_sshutils_ssh.return_value,
{"script_file": "foo", "interpreter": "bar"})
def get_scenario(self):
server = mock.Mock(
networks={"foo_net": "foo_data"},
addresses={"foo_net": [{"addr": "foo_ip"}]},
tenant_id="foo_tenant"
)
scenario = utils.VMScenario(self.context)
scenario._boot_server = mock.Mock(return_value=server)
scenario._delete_server = mock.Mock()
scenario._associate_floating_ip = mock.Mock()
scenario._wait_for_ping = mock.Mock()
return scenario, server
def test__boot_server_with_fip_without_networks(self):
scenario, server = self.get_scenario()
server.networks = {}
self.assertRaises(RuntimeError,
scenario._boot_server_with_fip,
"foo_image", "foo_flavor", foo_arg="foo_value")
scenario._boot_server.assert_called_once_with(
"foo_image", "foo_flavor",
foo_arg="foo_value", auto_assign_nic=True)
def test__boot_server_with_fixed_ip(self):
scenario, server = self.get_scenario()
scenario._attach_floating_ip = mock.Mock()
server, ip = scenario._boot_server_with_fip(
"foo_image", "foo_flavor", floating_network="ext_network",
use_floating_ip=False, foo_arg="foo_value")
self.assertEqual(ip, {"ip": "foo_ip", "id": None,
"is_floating": False})
scenario._boot_server.assert_called_once_with(
"foo_image", "foo_flavor",
auto_assign_nic=True, foo_arg="foo_value")
self.assertEqual(scenario._attach_floating_ip.mock_calls, [])
def test__boot_server_with_fip(self):
scenario, server = self.get_scenario()
scenario._attach_floating_ip = mock.Mock(
return_value={"id": "foo_id", "ip": "foo_ip"})
server, ip = scenario._boot_server_with_fip(
"foo_image", "foo_flavor", floating_network="ext_network",
use_floating_ip=True, foo_arg="foo_value")
self.assertEqual(ip, {"ip": "foo_ip", "id": "foo_id",
"is_floating": True})
scenario._boot_server.assert_called_once_with(
"foo_image", "foo_flavor",
auto_assign_nic=True, foo_arg="foo_value")
scenario._attach_floating_ip.assert_called_once_with(
server, "ext_network")
def test__delete_server_with_fixed_ip(self):
ip = {"ip": "foo_ip", "id": None, "is_floating": False}
scenario, server = self.get_scenario()
scenario._delete_floating_ip = mock.Mock()
scenario._delete_server_with_fip(server, ip, force_delete=True)
self.assertEqual(scenario._delete_floating_ip.mock_calls, [])
scenario._delete_server.assert_called_once_with(server, force=True)
def test__delete_server_with_fip(self):
fip = {"ip": "foo_ip", "id": "foo_id", "is_floating": True}
scenario, server = self.get_scenario()
scenario._delete_floating_ip = mock.Mock()
scenario._delete_server_with_fip(server, fip, force_delete=True)
scenario._delete_floating_ip.assert_called_once_with(server, fip)
scenario._delete_server.assert_called_once_with(server, force=True)
@mock.patch(VMTASKS_UTILS + ".network_wrapper.wrap")
def test__attach_floating_ip(self, mock_wrap):
scenario, server = self.get_scenario()
netwrap = mock_wrap.return_value
netwrap.create_floating_ip.return_value = {
"id": "foo_id", "ip": "foo_ip"}
scenario._attach_floating_ip(
server, floating_network="bar_network")
mock_wrap.assert_called_once_with(scenario.clients, scenario)
netwrap.create_floating_ip.assert_called_once_with(
ext_network="bar_network",
tenant_id="foo_tenant", fixed_ip="foo_ip")
scenario._associate_floating_ip.assert_called_once_with(
server, "foo_ip", fixed_address="foo_ip")
@mock.patch(VMTASKS_UTILS + ".network_wrapper.wrap")
def test__delete_floating_ip(self, mock_wrap):
scenario, server = self.get_scenario()
_check_addr = mock.Mock(return_value=True)
scenario.check_ip_address = mock.Mock(return_value=_check_addr)
scenario._dissociate_floating_ip = mock.Mock()
scenario._delete_floating_ip(
server, fip={"id": "foo_id", "ip": "foo_ip"})
scenario.check_ip_address.assert_called_once_with(
"foo_ip")
_check_addr.assert_called_once_with(server)
scenario._dissociate_floating_ip.assert_called_once_with(
server, "foo_ip")
mock_wrap.assert_called_once_with(scenario.clients, scenario)
mock_wrap.return_value.delete_floating_ip.assert_called_once_with(
"foo_id", wait=True)
class HostTestCase(test.TestCase):
@mock.patch(VMTASKS_UTILS + ".sys")
@mock.patch("subprocess.Popen")
def test__ping_ip_address_linux(self, mock_popen, mock_sys):
mock_popen.return_value.returncode = 0
mock_sys.platform = "linux2"
host = utils.Host("1.2.3.4")
self.assertEqual(utils.Host.ICMP_UP_STATUS,
utils.Host.update_status(host).status)
mock_popen.assert_called_once_with(
["ping", "-c1", "-w1", str(host.ip)],
stderr=subprocess.PIPE, stdout=subprocess.PIPE)
mock_popen.return_value.wait.assert_called_once_with()
@mock.patch(VMTASKS_UTILS + ".sys")
@mock.patch("subprocess.Popen")
def test__ping_ip_address_linux_ipv6(self, mock_popen, mock_sys):
mock_popen.return_value.returncode = 0
mock_sys.platform = "linux2"
host = utils.Host("1ce:c01d:bee2:15:a5:900d:a5:11fe")
self.assertEqual(utils.Host.ICMP_UP_STATUS,
utils.Host.update_status(host).status)
mock_popen.assert_called_once_with(
["ping6", "-c1", "-w1", str(host.ip)],
stderr=subprocess.PIPE, stdout=subprocess.PIPE)
mock_popen.return_value.wait.assert_called_once_with()
@mock.patch(VMTASKS_UTILS + ".sys")
@mock.patch("subprocess.Popen")
def test__ping_ip_address_other_os(self, mock_popen, mock_sys):
mock_popen.return_value.returncode = 0
mock_sys.platform = "freebsd10"
host = utils.Host("1.2.3.4")
self.assertEqual(utils.Host.ICMP_UP_STATUS,
utils.Host.update_status(host).status)
mock_popen.assert_called_once_with(
["ping", "-c1", str(host.ip)],
stderr=subprocess.PIPE, stdout=subprocess.PIPE)
mock_popen.return_value.wait.assert_called_once_with()
@mock.patch(VMTASKS_UTILS + ".sys")
@mock.patch("subprocess.Popen")
def test__ping_ip_address_other_os_ipv6(self, mock_popen, mock_sys):
mock_popen.return_value.returncode = 0
mock_sys.platform = "freebsd10"
host = utils.Host("1ce:c01d:bee2:15:a5:900d:a5:11fe")
self.assertEqual(utils.Host.ICMP_UP_STATUS,
utils.Host.update_status(host).status)
mock_popen.assert_called_once_with(
["ping6", "-c1", str(host.ip)],
stderr=subprocess.PIPE, stdout=subprocess.PIPE)
mock_popen.return_value.wait.assert_called_once_with()
|
|
#!/usr/bin/env python
import subprocess
import multiprocessing
from multiprocessing import Process, Queue
import os
import time
import fileinput
import atexit
import sys
import socket
import re
# Todo:
# turn the enum into an actual enum
# HTTP Scan should be added to the doc
# syn scan doesn't print it's command line
# nikto and dirb can both run on HTTP and HTTPS should probably modfify them to do so in template
# Why isn't smtp being printed into the file?
# Add mysql nmap-script
start = time.time()
ip_output_dir = ""
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
# Creates a function for multiprocessing. Several things at once.
def multProc(targetin, scanip, port):
jobs = []
p = multiprocessing.Process(target=targetin, args=(scanip,port))
jobs.append(p)
p.start()
return
def connect_to_port(ip_address, port, service):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip_address, int(port)))
banner = s.recv(1024)
if service == "ftp":
s.send("USER anonymous\r\n")
user = s.recv(1024)
s.send("PASS anonymous\r\n")
password = s.recv(1024)
total_communication = banner + "\r\n" + user + "\r\n" + password
elif service == "smtp":
total_communication = banner + "\r\n"
elif service == "ssh":
total_communication = banner
elif service == "pop3":
s.send("USER root\r\n")
user = s.recv(1024)
s.send("PASS root\r\n")
password = s.recv(1024)
total_communication = banner + user + password
s.close()
def dirb(ip_address, port, url_start, wordlist="/usr/share/wordlist/dirb/big.txt, /usr/share/wordlist/dirb/vulns/cgis.txt"):
print bcolors.HEADER + "INFO: Starting dirb scan for " + ip_address + bcolors.ENDC
DIRBSCAN = "dirb %s://%s:%s -o %s/scans/dirb-%s.txt -r %s " % (url_start, ip_address, port, ip_output_dir, ip_address, wordlist)
print bcolors.HEADER + DIRBSCAN + bcolors.ENDC
results_dirb = subprocess.check_output(DIRBSCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with dirb scan for " + ip_address + bcolors.ENDC
print results_dirb
# dirb output has a lot of -'s, which I think mean something on the command line. Trying to sub them out for
# *'s to see if that solves the problem
#while "-" in results_dirb:
# results_dirb = results_dirb.replace("-", "*")
#write_to_file(ip_address, "INSERT_DIRB_SCAN", results_dirb)
return
def nikto(ip_address, port, url_start):
print bcolors.HEADER + "INFO: Starting nikto scan for " + ip_address + bcolors.ENDC
NIKTOSCAN = "nikto -h %s://%s -o %s/scans/nikto-%s-%s.txt" % (url_start, ip_address, ip_output_dir, url_start, ip_address)
print bcolors.HEADER + NIKTOSCAN + bcolors.ENDC
results_nikto = subprocess.check_output(NIKTOSCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with NIKTO-scan for " + ip_address + bcolors.ENDC
print results_nikto
#write_to_file(ip_address, "INSERT_NIKTO_SCAN", results_nikto)
return
def httpEnum(ip_address, port):
print bcolors.HEADER + "INFO: Detected http on " + ip_address + ":" + port + bcolors.ENDC
print bcolors.HEADER + "INFO: Performing nmap web script scan for " + ip_address + ":" + port + bcolors.ENDC
dirb_process = multiprocessing.Process(target=dirb, args=(ip_address,port,"http"))
dirb_process.start()
nikto_process = multiprocessing.Process(target=nikto, args=(ip_address,port,"http"))
nikto_process.start()
#CURLSCAN = "curl -I http://%s" % (ip_address)
#print bcolors.HEADER + CURLSCAN + bcolors.ENDC
#curl_results = subprocess.check_output(CURLSCAN, shell=True)
#write_to_file(ip_address, "INSERT_CURL_HEADER", curl_results)
HTTPSCAN = "nmap -sV -Pn -vv -p %s --script=http-vhosts,http-userdir-enum,http-apache-negotiation," \
"http-backup-finder,http-config-backup,http-default-accounts,http-methods,http-method-tamper,http-passwd," \
"http-robots.txt,http-devframework,http-enum,http-frontpage-login,http-git,http-iis-webdav-vuln,http-php-version," \
"http-robots.txt,http-shellshock,http-vuln-cve2015-1635 " \
"-oN %s/scans/%s_http.nmap " \
"-oX %s/scans/xml/%s_http.xml %s" % (port, ip_output_dir, ip_address, ip_output_dir, ip_address, ip_address)
print bcolors.HEADER + HTTPSCAN + bcolors.ENDC
# TODO add this to the template file
http_results = subprocess.check_output(HTTPSCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with HTTP-SCAN for " + ip_address + bcolors.ENDC
print http_results
return
def httpsEnum(ip_address, port):
print bcolors.HEADER + "INFO: Detected https on " + ip_address + ":" + port + bcolors.ENDC
print bcolors.HEADER + "INFO: Performing nmap web script scan for " + ip_address + ":" + port + bcolors.ENDC
dirb_process = multiprocessing.Process(target=dirb, args=(ip_address,port,"https"))
dirb_process.start()
nikto_process = multiprocessing.Process(target=nikto, args=(ip_address,port,"https"))
nikto_process.start()
SSLSCAN = "sslscan %s:%s >> %s/scans/ssl_scan_%s" % (ip_address, port, ip_output_dir, ip_address)
print bcolors.HEADER + SSLSCAN + bcolors.ENDC
ssl_results = subprocess.check_output(SSLSCAN, shell=True)
print bcolors.OKGREEN + "INFO: CHECK FILE - Finished with SSLSCAN for " + ip_address + bcolors.ENDC
HTTPSCANS = "nmap -sV -Pn -vv -p %s --script=http-vhosts,http-userdir-enum,http-apache-negotiation," \
"http-backup-finder,http-config-backup,http-default-accounts,http-methods,http-method-tamper,http-passwd," \
"http-robots.txt,http-devframework,http-enum,http-frontpage-login,http-git,http-iis-webdav-vuln,http-php-version," \
"http-robots.txt,http-shellshock,http-vuln-cve2015-1635 " \
"-oN %s/scans/%s_http.nmap " \
"-oX %s/scans/xml/%s_http.xml %s" % (port, ip_output_dir, ip_address, ip_output_dir, ip_address, ip_address)
print bcolors.HEADER + HTTPSCANS + bcolors.ENDC
https_results = subprocess.check_output(HTTPSCANS, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with HTTPS-scan for " + ip_address + bcolors.ENDC
print https_results
return
def mssqlEnum(ip_address, port):
print bcolors.HEADER + "INFO: Detected MS-SQL on " + ip_address + ":" + port + bcolors.ENDC
print bcolors.HEADER + "INFO: Performing nmap mssql script scan for " + ip_address + ":" + port + bcolors.ENDC
MSSQLSCAN = "nmap -sV -Pn -p %s --script=ms-sql-info,ms-sql-config,ms-sql-dump-hashes " \
"--script-args=mssql.instance-port=1433,smsql.username-sa,mssql.password-sa " \
"-oN %s/scans/mssql_%s.nmap " \
"-oX %s/scans/xml/mssql_%s.xml %s" % (port, ip_output_dir, ip_address, ip_output_dir, ip_address, ip_address)
print bcolors.HEADER + MSSQLSCAN + bcolors.ENDC
mssql_results = subprocess.check_output(MSSQLSCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with MSSQL-scan for " + ip_address + bcolors.ENDC
print mssql_results
return
def smtpEnum(ip_address, port):
print bcolors.HEADER + "INFO: Detected smtp on " + ip_address + ":" + port + bcolors.ENDC
connect_to_port(ip_address, port, "smtp")
SMTPSCAN = "nmap -sV -Pn -p %s --script=smtp-commands,smtp-enum-users,smtp-vuln-cve2010-4344," \
"smtp-vuln-cve2011-1720,smtp-vuln-cve2011-1764 " \
"-oN %s/scans/smtp_%s.nmap " \
"-oX %s/scans/xml/smtp_%s.xml %s" % (port, ip_output_dir, ip_address, ip_output_dir, ip_address, ip_address)
print bcolors.HEADER + SMTPSCAN + bcolors.ENDC
smtp_results = subprocess.check_output(SMTPSCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with SMTP-scan for " + ip_address + bcolors.ENDC
print smtp_results
# TODO why isn't this being printed into the file?
# better_#write_to_file(ip_address, "smtp", smtp_results)
return
def smbNmap(ip_address):
print "INFO: Detected SMB on " + ip_address
smbNmap = "nmap --script=smb-enum-*,smb-ls.nse,smb-mbenum.nse,smb-os-discovery.nse,smb-security-mode.nse,smb-vuln-* " \
"-oN %s/scans/smb_%s.nmap " \
"-oX %s/scans/xml/smb_%s.xml %s" % (ip_output_dir, ip_address, ip_output_dir, ip_address, ip_address)
smbNmap_results = subprocess.check_output(smbNmap, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with SMB-Nmap-scan for " + ip_address + bcolors.ENDC
print smbNmap_results
return
# TODO improve this function
# TODO make sure it's being called appropriately
def smbEnum(ip_address):
print_things = True
print "INFO: Detected SMB on " + ip_address
enum4linux = "enum4linux -a %s > %s/scans/enum4linux_%s" % (ip_address, ip_output_dir, ip_address)
# add smbmap and smbclient
try:
enum4linux_results = subprocess.check_output(enum4linux, shell=True)
except:
print bcolors.FAIL + "ERROR: enum4linux execution failed" + bcolors.ENDC
print_things = False
if print_things:
print bcolors.OKGREEN + "INFO: CHECK FILE - Finished with ENUM4LINUX-scan for " + ip_address + bcolors.ENDC
print enum4linux_results
#write_to_file(ip_address, "INSERT_ENUM4LINUX_SCAN", enum4linux_results)
return
# TODO add this to the template?
def snmpEnum(ip_address, port):
print bcolors.HEADER + "INFO: Detected snmp on " + ip_address + ":" + port + bcolors.ENDC
snmpdetect = 0
#ip_address = sys.argv[1]
ONESIXONESCAN = "onesixtyone %s" % (ip_address)
results = subprocess.check_output(ONESIXONESCAN, shell=True).strip()
if results != "":
if "Windows" in results:
results = results.split("Software: ")[1]
snmpdetect = 1
elif "Linux" in results:
results = results.split("[public] ")[1]
snmpdetect = 1
if snmpdetect == 1:
print "[*] SNMP running on " + ip_address + "; OS Detect: " + results
SNMPWALK = "snmpwalk -c public -v1 %s 1 > results/scans/%s_snmpwalk.txt" % (ip_address, ip_address)
results = subprocess.check_output(SNMPWALK, shell=True)
NMAPSCAN = "nmap -vv -sV -sU -Pn -p 161,162 --script=snmp-netstat,snmp-processes " \
"-oN %s/scans/snmp_%s.nmap " \
"-oX %s/scans/xml/snmp_%s.xml %s" % (ip_output_dir, ip_address, ip_output_dir, ip_address, ip_address)
results = subprocess.check_output(NMAPSCAN, shell=True)
print results
return
# TODO add this to the template?
def ftpEnum(ip_address, port):
print bcolors.HEADER + "INFO: Detected ftp on " + ip_address + ":" + port + bcolors.ENDC
#connect_to_port(ip_address, port, "ftp")
FTPSCAN = "nmap -sV -Pn -vv -p %s --script=ftp-anon,ftp-bounce,ftp-libopie,ftp-proftpd-backdoor," \
"ftp-vsftpd-backdoor,ftp-vuln-cve2010-4221 " \
"-oN %s/scans/ftp_%s.nmap " \
"-oX %s/scans/xml/ftp_%s.xml %s" % (port, ip_output_dir, ip_address, ip_output_dir, ip_address, ip_address)
print bcolors.HEADER + FTPSCAN + bcolors.ENDC
results_ftp = subprocess.check_output(FTPSCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with FTP-Nmap-scan for " + ip_address + bcolors.ENDC
print results_ftp
return
def udpScan(ip_address):
print bcolors.HEADER + "INFO: Detected UDP on " + ip_address + bcolors.ENDC
#first, run a super simple scan you can use to parse results
SIMPLE_UDP_SCAN = "nmap -sU --top-ports 200 " \
"-oN %s/scans/udp_simple_%s.nmap " \
"-oX %s/scans/xml/udp_simple_%s.xml %s" % (ip_output_dir, ip_address, ip_output_dir, ip_address, ip_address)
print bcolors.HEADER + SIMPLE_UDP_SCAN + bcolors.ENDC
simple_udpscan_results = subprocess.check_output(SIMPLE_UDP_SCAN, shell=True)
UDPSCAN = "nmap -vv -Pn -A -sC -sU -T 4 --top-ports 200 " \
"-oN %s/scans/udp_%s.nmap " \
"-oX %s/scans/xml/udp_%s.xml %s" % (ip_output_dir, ip_address, ip_output_dir, ip_address, ip_address)
print bcolors.HEADER + UDPSCAN + bcolors.ENDC
udpscan_results = subprocess.check_output(UDPSCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with UDP-Nmap scan for " + ip_address + bcolors.ENDC
print udpscan_results
#write_to_file(ip_address, "INSERT_UDP_SCAN", udpscan_results)
#UNICORNSCAN = "unicornscan -mU -v -I %s > %s/scans/unicorn_udp_%s.txt" % (ip_address, ip_output_dir, ip_address)
# Note - redirected output into a file. There is nothing to print here.
#unicornscan_results = subprocess.check_output(UNICORNSCAN, shell=True)
#print bcolors.OKGREEN + "INFO: CHECK FILE - Finished with UNICORNSCAN for " + ip_address + bcolors.ENDC
return simple_udpscan_results
def sshScan(ip_address, port):
print bcolors.HEADER + "INFO: Detected SSH on " + ip_address + ":" + port + bcolors.ENDC
connect_to_port(ip_address, port, "ssh")
def pop3Scan(ip_address, port):
print bcolors.HEADER + "INFO: Detected POP3 on " + ip_address + ":" + port + bcolors.ENDC
connect_to_port(ip_address, port, "pop3")
def basicNmapTcpScans (ip_address):
ip_address = ip_address.strip()
print bcolors.OKGREEN + "INFO: Running general TCP/UDP nmap scans for " + ip_address + bcolors.ENDC
# run the first nmap (syn) scan
SYN_SCAN = "nmap -sS %s -oN %s/scans/syn_%s.nmap -oX %s/scans/xml/syn_%s.xml" % (ip_address, ip_output_dir, ip_address, ip_output_dir, ip_address)
print bcolors.HEADER + SYN_SCAN + bcolors.ENDC
results = subprocess.check_output(SYN_SCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with BASIC Nmap-scan for " + ip_address + bcolors.ENDC
print results
#write_to_file(ip_address, "INSERT_SYN_SCAN", results)
# run the basic TCP nmap scan
TCP_SCAN = "nmap -sV -O %s -oN %s/scans/%s.nmap -oX %s/scans/xml/%s.xml" % (ip_address, ip_output_dir, ip_address, ip_output_dir, ip_address)
print bcolors.HEADER + TCP_SCAN + bcolors.ENDC
results_to_parse = subprocess.check_output(TCP_SCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with BASIC TCP Nmap-scan for " + ip_address + bcolors.ENDC
print results_to_parse
#write_to_file(ip_address, "INSERT_TCP_SCAN", results_to_parse)
return results_to_parse
def advancedNmapScans(ip_address):
# run the script nmap scan
SCRIPT_SCAN = "nmap -sC %s -oN %s/scans/default_script_%s.nmap -oX %s/scans/xml/default_script_%s.xml" % (ip_address, ip_output_dir, ip_address, ip_output_dir, ip_address)
print bcolors.HEADER + SCRIPT_SCAN + bcolors.ENDC
results = subprocess.check_output(SCRIPT_SCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with Default Script Nmap-scan for " + ip_address + bcolors.ENDC
print results
#write_to_file(ip_address, "INSERT_SCRIPT_SCAN", results)
# run a full port nmap scan
FULL_PORT_SCAN = "nmap -p- %s -oN %s/scans/full_port_%s.nmap -oX %s/scans/xml/full_port_%s.xml" % (ip_address, ip_output_dir, ip_address, ip_output_dir, ip_address)
print bcolors.HEADER + FULL_PORT_SCAN + bcolors.ENDC
results = subprocess.check_output(FULL_PORT_SCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with Full Port Nmap-scan for " + ip_address + bcolors.ENDC
print results
#write_to_file(ip_address, "INSERT_FULL_PORT_SCAN", results)
# run the "monster scan"
MONSTER_SCAN = "nmap -p- -A -T4 -sC %s -oN %s/scans/monster_%s.nmap -oN %s/scans/xml/monster_%s.xml" % (ip_address, ip_output_dir, ip_address, ip_output_dir, ip_address)
print bcolors.HEADER + MONSTER_SCAN + bcolors.ENDC
results = subprocess.check_output(MONSTER_SCAN, shell=True)
print bcolors.OKGREEN + "INFO: RESULT BELOW - Finished with Monster Nmap-scan for " + ip_address + bcolors.ENDC
print results
#write_to_file(ip_address, "INSERT_MONSTER_SCAN", results)
def parseResults(results_to_parse, protocol):
lines = results_to_parse.split("\n")
serv_dict = {}
for line in lines:
ports = []
line = line.strip()
if (protocol in line) and ("open" in line) and not ("Discovered" in line):
# print line
while " " in line:
line = line.replace(" ", " ")
linesplit= line.split(" ")
service = linesplit[2] # grab the service name
port = line.split(" ")[0] # grab the port/proto
# print port
if service in serv_dict:
ports = serv_dict[service] # if the service is already in the dict, grab the port list
ports.append(port)
# print ports
serv_dict[service] = ports # add service to the dictionary along with the associated port(2)
return serv_dict
def furtherEnum(ip_address, serv_dict):
# go through the service dictionary to call additional targeted enumeration functions
for serv in serv_dict:
ports = serv_dict[serv]
if (serv == "http") or (serv == "http-proxy") or (serv == "http-alt") or (serv == "http?"):
for port in ports:
port = port.split("/")[0]
httpEnum(ip_address, port)
#multProc(httpEnum, ip_address, port)
elif (serv == "ssl/http") or ("https" == serv) or ("https?" == serv):
for port in ports:
port = port.split("/")[0]
httpsEnum(ip_address, port)
# multProc(httpsEnum, ip_address, port)
elif "smtp" in serv:
for port in ports:
port = port.split("/")[0]
smtpEnum(ip_address, port)
#multProc(smtpEnum, ip_address, port)
elif "ftp" in serv:
for port in ports:
port = port.split("/")[0]
ftpEnum(ip_address, port)
#multProc(ftpEnum, ip_address, port)
elif ("microsoft-ds" in serv) or ("netbios-ssn" == serv):
for port in ports:
port = port.split("/")[0]
smbEnum(ip_address)
#multProc(smbEnum, ip_address, port)
smbNmap(ip_address)
#multProc(smbNmap, ip_address, port)
elif "ms-sql" in serv:
for port in ports:
port = port.split("/")[0]
mssqlEnum(ip_address, port)
#multProc(mssqlEnum, ip_address, port)
elif "ssh" in serv:
for port in ports:
port = port.split("/")[0]
sshScan(ip_address, port)
#multProc(sshScan, ip_address, port)
elif "snmp" in serv:
for port in ports:
port = port.split("/")[0]
snmpEnum(ip_address, port)
#multProc(snmpEnum, ip_address, port)
# elif ("domain" in serv):
# for port in ports:
# port = port.split("/")[0]
# multProc(dnsEnum, ip_address, port)
return
def scan(ip_address):
# first, run the basic TCP Nmap scans and get our list of processes
tcp_nmap_results = basicNmapTcpScans(ip_address)
# next, run the intense TCP Nmap Scans
advancedNmapScans(ip_address)
# then, check for other TCP enumeration we can do
serv_dict = parseResults(tcp_nmap_results, "tcp")
furtherEnum(ip_address, serv_dict)
# then, run UDP nmap scans and get our list of processes
udp_nmap_results = udpScan(ip_address)
# finally, check for other UDP enumeration we can do
serv_dict = parseResults(udp_nmap_results, "udp")
furtherEnum(ip_address, serv_dict)
print bcolors.HEADER
print "------------------------------------------------------------"
print "!!!! RECON SCAN !!!!!"
print "!!!! A multi-process service scanner !!!!!"
print "!!!! dirb, nikto, ftp, ssh, mssql, pop3, tcp !!!!!"
print "!!!! udp, smtp, smb !!!!!"
print "------------------------------------------------------------"
if len(sys.argv) < 2:
print ""
print "Usage: python reconscan.py <ip> <ip> <ip>"
print "Example: python reconscan.py 192.168.1.101 192.168.1.102"
print ""
print "############################################################"
pass
sys.exit()
print bcolors.ENDC
if __name__=='__main__':
print ("Enter your base directory: ")
recon_dir_path = raw_input()
print (recon_dir_path)
if recon_dir_path.endswith('/'):
print ("Proceeding...")
else:
recon_dir_path = recon_dir_path + "/"
print ("Path fixed, proceeding...")
print (recon_dir_path)
targets = sys.argv
targets.pop(0)
if not os.path.exists(recon_dir_path):
os.makedirs(recon_dir_path)
dirs = os.listdir(recon_dir_path)
for scanip in targets:
scanip = scanip.rstrip()
ip_output_dir = recon_dir_path + scanip
if not scanip in dirs:
print bcolors.HEADER + "INFO: No folder was found for " + scanip + ". Setting up folder." + bcolors.ENDC
subprocess.check_output("mkdir " + ip_output_dir, shell=True)
subprocess.check_output("mkdir " + ip_output_dir + "/exploit", shell=True)
subprocess.check_output("mkdir " + ip_output_dir + "/loot", shell=True)
subprocess.check_output("mkdir " + ip_output_dir + "/scans", shell=True)
subprocess.check_output("mkdir " + ip_output_dir + "/scans/xml", shell=True)
subprocess.check_output("touch " + ip_output_dir + "/notes.txt", shell=True)
subprocess.check_output("touch " + ip_output_dir + "/creds.txt", shell=True)
subprocess.check_output("touch " + ip_output_dir + "/proof.txt", shell=True)
print bcolors.OKGREEN + "INFO: Folders and f created here: " + ip_output_dir + bcolors.ENDC
#scan(scanip)
p = multiprocessing.Process(target=scan, args=(scanip,))
p.start()
|
|
# Copyright 2014 The Swarming Authors. All rights reserved.
# Use of this source code is governed by the Apache v2.0 license that can be
# found in the LICENSE file.
"""NDB model classes used to model AuthDB relations.
Models defined here are used by central authentication service (that stores all
groups and secrets) and by services that implement some concrete functionality
protected with ACLs (like isolate and swarming services).
Applications that use auth component may work in 3 modes:
1. Standalone. Application is self contained and manages its own groups.
Useful when developing a new service or for simple installations.
2. Replica. Application uses a central authentication service. An application
can be dynamically switched from Standalone to Replica mode.
3. Primary. Application IS a central authentication service. Only 'auth'
service is running in this mode. 'configure_as_primary' call during startup
switches application to that mode.
Central authentication service (Primary) holds authoritative copy of all auth
related information (groups, secrets, etc.) and acts as a single source of truth
for it. All other services (Replicas) hold copies of a relevant subset of
this information (that they use to perform authorization checks).
Primary service is responsible for updating replicas' configuration via
service-to-service push based replication protocol.
AuthDB holds a list of groups. Each group has a unique name and is defined
as union of 3 sets:
1) Explicit enumeration of particular Identities e.g. 'user:alice@example.com'
2) Set of glob-like identity patterns e.g. 'user:*@example.com'
3) Set of nested Groups.
Identity defines an actor making an action (it can be a real person, a bot,
an AppEngine application or special 'anonymous' identity).
In addition to that, AuthDB stores small amount of authentication related
configuration data, such as OAuth2 client_id and client_secret and various
secret keys.
"""
import collections
import fnmatch
import logging
import os
import re
from google.appengine.api import app_identity
from google.appengine.ext import ndb
from components import datastore_utils
from components import utils
from . import ipaddr
# Part of public API of 'auth' component, exposed by this module.
__all__ = [
'ADMIN_GROUP',
'Anonymous',
'bootstrap_group',
'bootstrap_ip_whitelist',
'bootstrap_loopback_ips',
'BOTS_IP_WHITELIST',
'configure_as_primary',
'find_group_dependency_cycle',
'find_referencing_groups',
'get_auth_db_revision',
'get_missing_groups',
'get_service_self_identity',
'group_key',
'Identity',
'IDENTITY_ANONYMOUS',
'IDENTITY_BOT',
'IDENTITY_SERVICE',
'IDENTITY_USER',
'IdentityProperty',
'ip_whitelist_key',
'is_empty_group',
'is_external_group_name',
'is_primary',
'is_replica',
'is_standalone',
'is_valid_group_name',
'is_valid_ip_whitelist_name',
'replicate_auth_db',
]
# Name of a group whose members have access to Group management UI. It's the
# only group needed to bootstrap everything else.
ADMIN_GROUP = 'administrators'
# Name of AuthIPWhitelist with bots IP ranges. See AuthIPWhitelist.
BOTS_IP_WHITELIST = 'bots'
# No identity information is provided. Identity name is always 'anonymous'.
IDENTITY_ANONYMOUS = 'anonymous'
# Using bot credentials. Identity name is bot's id.
IDENTITY_BOT = 'bot'
# Using App Engine service credentials. Identity name is app name.
IDENTITY_SERVICE = 'service'
# Using user credentials. Identity name is user's email.
IDENTITY_USER = 'user'
# All allowed identity kinds + regexps to validate identity name.
ALLOWED_IDENTITY_KINDS = {
IDENTITY_ANONYMOUS: re.compile(r'^anonymous$'),
IDENTITY_BOT: re.compile(r'^[0-9a-zA-Z_\-\.@]+$'),
IDENTITY_SERVICE: re.compile(r'^[0-9a-zA-Z_\-\:\.]+$'),
IDENTITY_USER: re.compile(r'^[0-9a-zA-Z_\-\.\+]+@[0-9a-z_\-\.]+$'),
}
# Regular expression that matches group names. ASCII only, no leading or
# trailing spaces allowed (spaces inside are fine).
GROUP_NAME_RE = re.compile(
r'^([a-z\-]+/)?[0-9a-zA-Z_][0-9a-zA-Z_\-\.\ ]{1,80}[0-9a-zA-Z_\-\.]$')
# Special group name that means 'All possible users' (including anonymous!).
GROUP_ALL = '*'
# Regular expression for IP whitelist name.
IP_WHITELIST_NAME_RE = re.compile(r'^[0-9a-zA-Z_\-\+\.\ ]{2,200}$')
# Configuration of Primary service, set by 'configure_as_primary'.
_replication_callback = None
# Root ndb keys of various models. They can't be defined as a module level
# constants because ndb.Key implicitly includes current APPLICATION_ID. And in
# testing environment it is '_' during module loading time. Trying to use such
# key from within a testbed test case results in the following error:
# BadRequestError: app "testbed-test" cannot access app "_"'s data
def root_key():
"""Global root key of auth models entity group."""
return ndb.Key('AuthGlobalConfig', 'root')
def replication_state_key():
"""Key of AuthReplicationState entity."""
return ndb.Key('AuthReplicationState', 'self', parent=root_key())
def ip_whitelist_assignments_key():
"""Key of AuthIPWhitelistAssignments entity."""
return ndb.Key('AuthIPWhitelistAssignments', 'default', parent=root_key())
################################################################################
## Identity & IdentityGlob.
class Identity(
datastore_utils.BytesSerializable,
collections.namedtuple('Identity', 'kind, name')):
"""Represents a caller that makes requests. Immutable.
A tuple of (kind, name) where 'kind' is one of IDENTITY_* constants and
meaning of 'name' depends on a kind (see comments for IDENTITY_*).
It generalizes accounts of real people, bot accounts and service-to-service
accounts.
It's a pure identity information. Any additional information that may be
related to an identity (e.g. registration date, last access time, etc.) should
be stored elsewhere using Identity.to_bytes() as a key.
"""
# Inheriting from tuple requires use of __new__ instead of __init__. __init__
# is called with object already 'frozen', so it's not possible to modify its
# attributes in __init__.
# See http://docs.python.org/2/reference/datamodel.html#object.__new__
def __new__(cls, kind, name):
if isinstance(name, unicode):
try:
name = name.encode('ascii')
except UnicodeEncodeError:
raise ValueError('Identity has invalid format: only ASCII is allowed')
if (kind not in ALLOWED_IDENTITY_KINDS or
not ALLOWED_IDENTITY_KINDS[kind].match(name)):
raise ValueError('Identity has invalid format: %s' % name)
return super(Identity, cls).__new__(cls, str(kind), name)
def to_bytes(self):
"""Serializes this identity to byte buffer."""
return '%s:%s' % (self.kind, self.name)
@classmethod
def from_bytes(cls, byte_buf):
"""Given a byte buffer returns corresponding Identity object."""
kind, sep, name = byte_buf.partition(':')
if not sep:
raise ValueError('Missing \':\' separator in Identity string')
return cls(kind, name)
@property
def is_anonymous(self):
"""True if this object represents anonymous identity."""
return self.kind == IDENTITY_ANONYMOUS
@property
def is_bot(self):
"""True if this object represents bot account."""
return self.kind == IDENTITY_BOT
@property
def is_service(self):
"""True if this object represents service account."""
return self.kind == IDENTITY_SERVICE
@property
def is_user(self):
"""True if this object represents user account."""
return self.kind == IDENTITY_USER
# Predefined Anonymous identity.
Anonymous = Identity(IDENTITY_ANONYMOUS, 'anonymous')
class IdentityProperty(datastore_utils.BytesSerializableProperty):
"""NDB model property for Identity values.
Identities are stored as indexed short blobs internally.
"""
_value_type = Identity
_indexed = True
class IdentityGlob(
datastore_utils.BytesSerializable,
collections.namedtuple('IdentityGlob', 'kind, pattern')):
"""Glob-like pattern that matches subset of identities. Immutable.
Tuple (kind, glob) where 'kind' is is one of IDENTITY_* constants and 'glob'
defines pattern that identity names' should match. For example, IdentityGlob
that matches all bots is (IDENTITY_BOT, '*') which is also can be written
as 'bot:*'.
"""
# See comment for Identity.__new__ regarding use of __new__ here.
def __new__(cls, kind, pattern):
if isinstance(pattern, unicode):
try:
pattern = pattern.encode('ascii')
except UnicodeEncodeError:
raise ValueError('Invalid IdentityGlob pattern: only ASCII is allowed')
if not pattern:
raise ValueError('No pattern is given')
if kind not in ALLOWED_IDENTITY_KINDS:
raise ValueError('Invalid Identity kind: %s' % kind)
return super(IdentityGlob, cls).__new__(cls, str(kind), pattern)
def to_bytes(self):
"""Serializes this identity glob to byte buffer."""
return '%s:%s' % (self.kind, self.pattern)
@classmethod
def from_bytes(cls, byte_buf):
"""Given a byte buffer returns corresponding IdentityGlob object."""
kind, sep, pattern = byte_buf.partition(':')
if not sep:
raise ValueError('Missing \':\' separator in IdentityGlob string')
return cls(kind, pattern)
def match(self, identity):
"""Return True if |identity| matches this pattern."""
if identity.kind != self.kind:
return False
return fnmatch.fnmatchcase(identity.name, self.pattern)
class IdentityGlobProperty(datastore_utils.BytesSerializableProperty):
"""NDB model property for IdentityGlob values.
IdentityGlobs are stored as short indexed blobs internally.
"""
_value_type = IdentityGlob
_indexed = True
################################################################################
## Singleton entities and replication related models.
def configure_as_primary(replication_callback):
"""Registers a callback to be called when AuthDB changes.
Should be called during Primary application startup. The callback will be
called as 'replication_callback(AuthReplicationState)' from inside transaction
on root_key() entity group whenever replicate_auth_db() is called (i.e. on
every change to auth db that should be replication to replicas).
"""
global _replication_callback
_replication_callback = replication_callback
def is_primary():
"""Returns True if current application was configured as Primary."""
return bool(_replication_callback)
def is_replica():
"""Returns True if application is in Replica mode."""
return not is_primary() and not is_standalone()
def is_standalone():
"""Returns True if application is in Standalone mode."""
ent = get_replication_state()
return not ent or not ent.primary_id
def get_replication_state():
"""Returns AuthReplicationState singleton entity if it exists."""
return replication_state_key().get()
def get_auth_db_revision():
"""Returns current revision of AuthDB, it increases with each change."""
state = get_replication_state()
return state.auth_db_rev if state else 0
def get_service_self_identity():
"""Returns Identity that correspond to the current GAE app itself."""
return Identity(IDENTITY_SERVICE, app_identity.get_application_id())
class AuthGlobalConfig(ndb.Model):
"""Acts as a root entity for auth models.
There should be only one instance of this model in Datastore, with a key set
to root_key(). A change to an entity group rooted at this key is a signal that
AuthDB has to be refetched (see 'fetch_auth_db' in api.py).
Entities that change often or associated with particular bot or user
MUST NOT be in this entity group.
Content of this particular entity is replicated from Primary service to all
Replicas.
Entities that belong to this entity group are:
* AuthGroup
* AuthIPWhitelist
* AuthReplicationState
* AuthSecret
"""
# OAuth2 client_id to use to mint new OAuth2 tokens.
oauth_client_id = ndb.StringProperty(indexed=False, default='')
# OAuth2 client secret. Not so secret really, since it's passed to clients.
oauth_client_secret = ndb.StringProperty(indexed=False, default='')
# Additional OAuth2 client_ids allowed to access the services.
oauth_additional_client_ids = ndb.StringProperty(repeated=True, indexed=False)
class AuthReplicationState(ndb.Model, datastore_utils.SerializableModelMixin):
"""Contains state used to control Primary -> Replica replication.
It's a singleton entity with key replication_state_key() (in same entity
groups as root_key()). This entity should be small since it is updated
(auth_db_rev is incremented) whenever AuthDB changes.
Exists in any AuthDB (on Primary and Replicas). Primary updates it whenever
changes to AuthDB are made, Replica updates it whenever it receives a push
from Primary.
"""
# How to convert this entity to or from serializable dict.
serializable_properties = {
'primary_id': datastore_utils.READABLE,
'primary_url': datastore_utils.READABLE,
'auth_db_rev': datastore_utils.READABLE,
'modified_ts': datastore_utils.READABLE,
}
# For services in Standalone mode it is None.
# For services in Primary mode: own GAE application ID.
# For services in Replica mode it is a GAE application ID of Primary.
primary_id = ndb.StringProperty(indexed=False)
# For services in Replica mode, root URL of Primary, i.e https://<host>.
primary_url = ndb.StringProperty(indexed=False)
# Revision of auth DB. Increased by 1 with every change that should be
# propagate to replicas. Only services in Standalone or Primary mode
# update this property by themselves. Replicas receive it from Primary.
auth_db_rev = ndb.IntegerProperty(default=0, indexed=False)
# Time when auth_db_rev was created (by Primary clock). For informational
# purposes only. See comment at AuthGroup.modified_ts for explanation why
# auto_now is not used.
modified_ts = ndb.DateTimeProperty(auto_now_add=True, indexed=False)
def replicate_auth_db():
"""Increments auth_db_rev by one.
It is a signal that Auth DB should be replicated to Replicas. If called from
inside a transaction, it inherits it and updates auth_db_rev only once (even
if called multiple times during that transaction).
Should only be called for services in Standalone or Primary modes. Will raise
ValueError if called on Replica. When called for service in Standalone mode,
will update auth_db_rev but won't kick any replication. For services in
Primary mode will also initiate replication by calling callback set in
'configure_as_primary'.
WARNING: This function relies on a valid transaction context. NDB hooks and
asynchronous operations are known to be buggy in this regard: NDB hook for
an async operation in a transaction may be called with a wrong context
(main event loop context instead of transaction context). One way to work
around that is to monkey patch NDB (as done here: https://goo.gl/1yASjL).
Another is to not use hooks at all. There's no way to differentiate between
sync and async modes of an NDB operation from inside a hook. And without a
strict assert it's very easy to forget about "Do not use put_async" warning.
For that reason _post_put_hook is NOT used and replicate_auth_db() should be
called explicitly whenever relevant part of root_key() entity group is
updated.
"""
def increment_revision_and_update_replicas():
"""Does the actual job, called inside a transaction."""
# Update auth_db_rev. replication_state_key() is in same group as root_key.
state = replication_state_key().get()
if not state:
primary_id = app_identity.get_application_id() if is_primary() else None
state = AuthReplicationState(
key=replication_state_key(),
primary_id=primary_id,
auth_db_rev=0)
# Assert Primary or Standalone. Replicas can't increment auth db revision.
if not is_primary() and state.primary_id:
raise ValueError('Can\'t modify Auth DB on Replica')
state.auth_db_rev += 1
state.modified_ts = utils.utcnow()
state.put()
# Only Primary does active replication.
if is_primary():
_replication_callback(state)
# If not in a transaction, start a new one.
if not ndb.in_transaction():
ndb.transaction(increment_revision_and_update_replicas)
return
# If in a transaction, use transaction context to store "already did this"
# flag. Note that each transaction retry gets its own new transaction context,
# see ndb/context.py, 'transaction' tasklet, around line 982 (for SDK 1.9.6).
ctx = ndb.get_context()
if not getattr(ctx, '_auth_db_inc_called', False):
increment_revision_and_update_replicas()
ctx._auth_db_inc_called = True
################################################################################
## Groups.
class AuthGroup(ndb.Model, datastore_utils.SerializableModelMixin):
"""A group of identities, entity id is a group name.
Parent is AuthGlobalConfig entity keyed at root_key().
Primary service holds authoritative list of Groups, that gets replicated to
all Replicas.
"""
# How to convert this entity to or from serializable dict.
serializable_properties = {
'members': datastore_utils.READABLE | datastore_utils.WRITABLE,
'globs': datastore_utils.READABLE | datastore_utils.WRITABLE,
'nested': datastore_utils.READABLE | datastore_utils.WRITABLE,
'description': datastore_utils.READABLE | datastore_utils.WRITABLE,
'created_ts': datastore_utils.READABLE,
'created_by': datastore_utils.READABLE,
'modified_ts': datastore_utils.READABLE,
'modified_by': datastore_utils.READABLE,
}
# List of members that are explicitly in this group. Indexed.
members = IdentityProperty(repeated=True)
# List of identity-glob expressions (like 'user:*@example.com'). Indexed.
globs = IdentityGlobProperty(repeated=True)
# List of nested group names. Indexed.
nested = ndb.StringProperty(repeated=True)
# Human readable description.
description = ndb.TextProperty(default='')
# When the group was created.
created_ts = ndb.DateTimeProperty(auto_now_add=True)
# Who created the group.
created_by = IdentityProperty()
# When the group was modified last time. Do not use 'auto_now' property since
# such property overrides any explicitly set value with now() during put. It's
# undesired when storing a copy of entity received from Primary (Replica
# should have modified_ts be same as on Primary). Still use auto_now_add to
# ensure this property is always non None (and to simplify tests that create
# a lot of one off entities).
modified_ts = ndb.DateTimeProperty(auto_now_add=True)
# Who modified the group last time.
modified_by = IdentityProperty()
def group_key(group):
"""Returns ndb.Key for AuthGroup entity."""
return ndb.Key(AuthGroup, group, parent=root_key())
def is_empty_group(group):
"""Returns True if group is missing or completely empty."""
group = group_key(group).get()
return not group or not(group.members or group.globs or group.nested)
def is_valid_group_name(name):
"""True if string looks like a valid group name."""
return bool(GROUP_NAME_RE.match(name))
def is_external_group_name(name):
"""True if group is imported from outside and is not writable."""
return is_valid_group_name(name) and '/' in name
@ndb.transactional
def bootstrap_group(group, identities, description=''):
"""Makes a group (if not yet exists) and adds |identities| to it as members.
Returns True if modified the group, False if identities are already there.
"""
key = group_key(group)
entity = key.get()
if entity and all(i in entity.members for i in identities):
return False
if not entity:
entity = AuthGroup(
key=key,
description=description,
created_by=get_service_self_identity(),
modified_by=get_service_self_identity())
for i in identities:
if i not in entity.members:
entity.members.append(i)
entity.modified_by = get_service_self_identity()
entity.put()
replicate_auth_db()
return True
def find_referencing_groups(group):
"""Finds groups that reference the specified group as nested group.
Used to verify that |group| is safe to delete, i.e. no other group is
depending on it.
Returns:
Set of names of referencing groups.
"""
referencing_groups = AuthGroup.query(
AuthGroup.nested == group, ancestor=root_key()).fetch(keys_only=True)
return set(key.id() for key in referencing_groups)
def get_missing_groups(groups):
"""Given a list of group names, returns a list of groups that do not exist."""
# We need to iterate over |groups| twice. It won't work if |groups|
# is a generator. So convert to list first.
groups = list(groups)
entities = ndb.get_multi(group_key(name) for name in groups)
return [name for name, ent in zip(groups, entities) if not ent]
def find_group_dependency_cycle(group):
"""Searches for dependency cycle between nested groups.
Traverses the dependency graph starting from |group|, fetching all necessary
groups from datastore along the way.
Args:
group: instance of AuthGroup to start traversing from. It doesn't have to be
committed to Datastore itself (but all its nested groups should be
there already).
Returns:
List of names of groups that form a cycle or empty list if no cycles.
"""
# It is a depth-first search on a directed graph with back edge detection.
# See http://www.cs.nyu.edu/courses/summer04/G22.1170-001/6a-Graphs-More.pdf
# Cache of already fetched groups.
groups = {group.key.id(): group}
# List of groups that are completely explored (all subtree is traversed).
visited = []
# Stack of groups that are being explored now. In case cycle is detected
# it would contain that cycle.
visiting = []
def visit(group):
"""Recursively explores |group| subtree, returns True if finds a cycle."""
assert group not in visiting
assert group not in visited
# Load bodies of nested groups not seen so far into |groups|.
entities = ndb.get_multi(
group_key(name) for name in group.nested if name not in groups)
groups.update({entity.key.id(): entity for entity in entities if entity})
visiting.append(group)
for nested in group.nested:
obj = groups.get(nested)
# Do not crash if non-existent group is referenced somehow.
if not obj:
continue
# Cross edge. Can happen in diamond-like graph, not a cycle.
if obj in visited:
continue
# Back edge: |group| references its own ancestor -> cycle.
if obj in visiting:
return True
# Explore subtree.
if visit(obj):
return True
visiting.pop()
visited.append(group)
return False
visit(group)
return [group.key.id() for group in visiting]
################################################################################
## Secrets store.
class AuthSecretScope(ndb.Model):
"""Entity to act as parent entity for AuthSecret.
Parent is AuthGlobalConfig entity keyed at root_key().
Id of this entity defines scope of secret keys that have this entity as
a parent. Possible scopes are 'local' and 'global'.
Secrets in 'local' scope never leave Datastore they are stored in and they
are different for each service (even for Replicas). Only service that
generated a local secret knows it.
Secrets in 'global' scope are known to all services (via Primary -> Replica
DB replication mechanism). Source of truth for global secrets is in Primary's
Datastore.
"""
def secret_scope_key(scope):
"""Key of AuthSecretScope entity for a given scope ('global' or 'local')."""
return ndb.Key(AuthSecretScope, scope, parent=root_key())
class AuthSecret(ndb.Model):
"""Some service-wide named secret blob.
Entity can be a child of:
* Key(AuthSecretScope, 'global', parent=root_key()):
Global secrets replicated across all services.
* Key(AuthSecretScope, 'local', parent=root_key()):
Secrets local to the current service.
There should be only very limited number of AuthSecret entities around. AuthDB
fetches them all at once. Do not use this entity for per-user secrets.
Holds most recent value of a secret as well as several previous values. Most
recent value is used to generate new tokens, previous values may be used to
validate existing tokens. That way secret can be rotated without invalidating
any existing outstanding tokens.
"""
# Last several values of a secret, with current value in front.
values = ndb.BlobProperty(repeated=True, indexed=False)
# When secret was modified last time.
modified_ts = ndb.DateTimeProperty(auto_now_add=True)
# Who modified the secret last time.
modified_by = IdentityProperty()
@classmethod
def bootstrap(cls, name, scope, length=32):
"""Creates a secret if it doesn't exist yet.
Args:
name: name of the secret.
scope: 'local' or 'global', see doc string for AuthSecretScope. 'global'
scope should only be used on Primary service.
length: length of the secret to generate if secret doesn't exist yet.
Returns:
Instance of AuthSecret (creating it if necessary) with random secret set.
"""
# Note that 'get_or_insert' is a bad fit here. With 'get_or_insert' we'd
# have to call os.urandom every time we want to get a key. It's a waste of
# time and entropy.
if scope not in ('local', 'global'):
raise ValueError('Invalid secret scope: %s' % scope)
key = ndb.Key(cls, name, parent=secret_scope_key(scope))
entity = key.get()
if entity is not None:
return entity
@ndb.transactional
def create():
entity = key.get()
if entity is not None:
return entity
logging.info('Creating new secret key %s in %s scope', name, scope)
# Global keys can only be created on Primary or Standalone service.
if scope == 'global' and is_replica():
raise ValueError('Can\'t bootstrap global key on Replica')
entity = cls(
key=key,
values=[os.urandom(length)],
modified_by=get_service_self_identity())
entity.put()
# Only global keys are part of replicated state.
if scope == 'global':
replicate_auth_db()
return entity
return create()
################################################################################
## IP whitelist.
class AuthIPWhitelistAssignments(ndb.Model):
"""A singleton entity with "identity -> AuthIPWhitelist to use" mapping.
Entity key is ip_whitelist_assignments_key(). Parent entity is root_key().
See AuthIPWhitelist for more info about IP whitelists.
"""
class Assignment(ndb.Model):
# Identity name to limit by IP whitelist. Unique key in 'assignments' list.
identity = IdentityProperty()
# Name of IP whitelist to use (see AuthIPWhitelist).
ip_whitelist = ndb.StringProperty()
# Why the assignment was created.
comment = ndb.StringProperty()
# When the assignment was created.
created_ts = ndb.DateTimeProperty(auto_now_add=True)
# Who created the assignment.
created_by = IdentityProperty()
# Holds all the assignments.
assignments = ndb.LocalStructuredProperty(Assignment, repeated=True)
class AuthIPWhitelist(ndb.Model, datastore_utils.SerializableModelMixin):
"""A named set of whitelisted IPv4 and IPv6 subnets.
Can be assigned to individual user accounts to forcibly limit them only to
particular IP addresses, e.g. it can be used to enforce that specific service
account is used only from some known IP range. The mapping between accounts
and IP whitelists is stored in AuthIPWhitelistAssignments.
Entity id is a name of the whitelist. Parent entity is root_key().
There's a special IP whitelist named 'bots' that can be used to list
IP addresses of machines the service trusts unconditionally. Requests from
such machines doesn't have to have any additional credentials attached.
Requests will be authenticated as coming from identity 'bot:<IP address>'.
"""
# How to convert this entity to or from serializable dict.
serializable_properties = {
'subnets': datastore_utils.READABLE | datastore_utils.WRITABLE,
'description': datastore_utils.READABLE | datastore_utils.WRITABLE,
'created_ts': datastore_utils.READABLE,
'created_by': datastore_utils.READABLE,
'modified_ts': datastore_utils.READABLE,
'modified_by': datastore_utils.READABLE,
}
# The list of subnets. The validator is used only as a last measure. JSON API
# handler should do validation too.
subnets = ndb.StringProperty(
repeated=True, validator=lambda _, val: ipaddr.normalize_subnet(val))
# Human readable description.
description = ndb.TextProperty(default='')
# When the list was created.
created_ts = ndb.DateTimeProperty(auto_now_add=True)
# Who created the list.
created_by = IdentityProperty()
# When the list was modified.
modified_ts = ndb.DateTimeProperty(auto_now_add=True)
# Who modified the list the last time.
modified_by = IdentityProperty()
def is_ip_whitelisted(self, ip):
"""Returns True if ipaddr.IP is in the whitelist."""
# TODO(vadimsh): If number of subnets to check grows it makes sense to add
# an internal cache to 'subnet_from_string' (sort of like in re.compile).
return any(
ipaddr.is_in_subnet(ip, ipaddr.subnet_from_string(net))
for net in self.subnets)
def ip_whitelist_key(name):
"""Returns ndb.Key for AuthIPWhitelist entity given its name."""
return ndb.Key(AuthIPWhitelist, name, parent=root_key())
def is_valid_ip_whitelist_name(name):
"""True if string looks like a valid IP whitelist name."""
return bool(IP_WHITELIST_NAME_RE.match(name))
@ndb.transactional
def bootstrap_ip_whitelist(name, subnets, description=''):
"""Adds subnets to an IP whitelist if not there yet.
Can be used on local dev appserver to add 127.0.0.1 to IP whitelist during
startup. Should not be used from request handlers.
Args:
name: IP whitelist name to add a subnet to.
subnets: IP subnet to add (as a list of strings).
description: description of IP whitelist (if new entity is created).
Returns:
True if entry was added, False if it is already there or subnet is invalid.
"""
assert isinstance(subnets, (list, tuple))
try:
subnets = [ipaddr.normalize_subnet(s) for s in subnets]
except ValueError:
return False
key = ip_whitelist_key(name)
entity = key.get()
if entity and all(s in entity.subnets for s in subnets):
return False
if not entity:
entity = AuthIPWhitelist(
key=key,
description=description,
created_by=get_service_self_identity(),
modified_by=get_service_self_identity())
for s in subnets:
if s not in entity.subnets:
entity.subnets.append(s)
entity.modified_by = get_service_self_identity()
entity.put()
replicate_auth_db()
return True
def bootstrap_loopback_ips():
"""Adds 127.0.0.1 and ::1 to 'bots' IP whitelist.
Useful on local dev server and in tests. Must not be used in production.
Returns list of corresponding bot Identities.
"""
# See api.py, AuthDB.verify_ip_whitelisted for IP -> Identity conversion.
assert utils.is_local_dev_server()
bootstrap_ip_whitelist(BOTS_IP_WHITELIST, ['127.0.0.1', '::1'], 'Local bots')
return [
Identity(IDENTITY_BOT, '127.0.0.1'),
Identity(IDENTITY_BOT, '0-0-0-0-0-0-0-1'),
]
@ndb.transactional
def bootstrap_ip_whitelist_assignment(identity, ip_whitelist, comment=''):
"""Sets a mapping "identity -> IP whitelist to use" for some account.
Replaces existing assignment. Can be used on local dev appserver to configure
IP whitelist assignments during startup or in tests. Should not be used from
request handlers.
Args:
identity: Identity to modify.
ip_whitelist: name of AuthIPWhitelist to assign.
comment: comment to set.
Returns:
True if IP whitelist assignment was modified, False if it was already set.
"""
entity = (
ip_whitelist_assignments_key().get() or
AuthIPWhitelistAssignments(key=ip_whitelist_assignments_key()))
found = False
for assignment in entity.assignments:
if assignment.identity == identity:
if assignment.ip_whitelist == ip_whitelist:
return False
assignment.ip_whitelist = ip_whitelist
assignment.comment = comment
found = True
break
if not found:
entity.assignments.append(
AuthIPWhitelistAssignments.Assignment(
identity=identity,
ip_whitelist=ip_whitelist,
comment=comment,
created_by=get_service_self_identity()))
entity.put()
replicate_auth_db()
return True
def fetch_ip_whitelists():
"""Fetches AuthIPWhitelistAssignments and relevant AuthIPWhitelist entities.
Returns:
(AuthIPWhitelistAssignments, list of AuthIPWhitelist).
"""
assignments = (
ip_whitelist_assignments_key().get() or
AuthIPWhitelistAssignments(key=ip_whitelist_assignments_key()))
names = set(a.ip_whitelist for a in assignments.assignments)
names.add(BOTS_IP_WHITELIST)
whitelists = ndb.get_multi(ip_whitelist_key(n) for n in names)
whitelists = sorted(filter(None, whitelists), key=lambda x: x.key.id())
return assignments, whitelists
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_put_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
encryption_scope_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2020-08-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"accountName": _SERIALIZER.url("account_name", account_name, 'str', max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"encryptionScopeName": _SERIALIZER.url("encryption_scope_name", encryption_scope_name, 'str', max_length=63, min_length=3),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_patch_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
encryption_scope_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2020-08-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"accountName": _SERIALIZER.url("account_name", account_name, 'str', max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"encryptionScopeName": _SERIALIZER.url("encryption_scope_name", encryption_scope_name, 'str', max_length=63, min_length=3),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PATCH",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_get_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
encryption_scope_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-08-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"accountName": _SERIALIZER.url("account_name", account_name, 'str', max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
"encryptionScopeName": _SERIALIZER.url("encryption_scope_name", encryption_scope_name, 'str', max_length=63, min_length=3),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
resource_group_name: str,
account_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-08-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
"accountName": _SERIALIZER.url("account_name", account_name, 'str', max_length=24, min_length=3),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class EncryptionScopesOperations(object):
"""EncryptionScopesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2020_08_01_preview.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def put(
self,
resource_group_name: str,
account_name: str,
encryption_scope_name: str,
encryption_scope: "_models.EncryptionScope",
**kwargs: Any
) -> "_models.EncryptionScope":
"""Synchronously creates or updates an encryption scope under the specified storage account. If an
encryption scope is already created and a subsequent request is issued with different
properties, the encryption scope properties will be updated per the specified request.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param encryption_scope_name: The name of the encryption scope within the specified storage
account. Encryption scope names must be between 3 and 63 characters in length and use numbers,
lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type encryption_scope_name: str
:param encryption_scope: Encryption scope properties to be used for the create or update.
:type encryption_scope: ~azure.mgmt.storage.v2020_08_01_preview.models.EncryptionScope
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EncryptionScope, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2020_08_01_preview.models.EncryptionScope
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScope"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(encryption_scope, 'EncryptionScope')
request = build_put_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
encryption_scope_name=encryption_scope_name,
content_type=content_type,
json=_json,
template_url=self.put.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('EncryptionScope', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('EncryptionScope', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}'} # type: ignore
@distributed_trace
def patch(
self,
resource_group_name: str,
account_name: str,
encryption_scope_name: str,
encryption_scope: "_models.EncryptionScope",
**kwargs: Any
) -> "_models.EncryptionScope":
"""Update encryption scope properties as specified in the request body. Update fails if the
specified encryption scope does not already exist.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param encryption_scope_name: The name of the encryption scope within the specified storage
account. Encryption scope names must be between 3 and 63 characters in length and use numbers,
lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type encryption_scope_name: str
:param encryption_scope: Encryption scope properties to be used for the update.
:type encryption_scope: ~azure.mgmt.storage.v2020_08_01_preview.models.EncryptionScope
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EncryptionScope, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2020_08_01_preview.models.EncryptionScope
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScope"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(encryption_scope, 'EncryptionScope')
request = build_patch_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
encryption_scope_name=encryption_scope_name,
content_type=content_type,
json=_json,
template_url=self.patch.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('EncryptionScope', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
patch.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
account_name: str,
encryption_scope_name: str,
**kwargs: Any
) -> "_models.EncryptionScope":
"""Returns the properties for the specified encryption scope.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param encryption_scope_name: The name of the encryption scope within the specified storage
account. Encryption scope names must be between 3 and 63 characters in length and use numbers,
lower-case letters and dash (-) only. Every dash (-) character must be immediately preceded and
followed by a letter or number.
:type encryption_scope_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: EncryptionScope, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2020_08_01_preview.models.EncryptionScope
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScope"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
encryption_scope_name=encryption_scope_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('EncryptionScope', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes/{encryptionScopeName}'} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
account_name: str,
**kwargs: Any
) -> Iterable["_models.EncryptionScopeListResult"]:
"""Lists all the encryption scopes available under the specified storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either EncryptionScopeListResult or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2020_08_01_preview.models.EncryptionScopeListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.EncryptionScopeListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("EncryptionScopeListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/encryptionScopes'} # type: ignore
|
|
# Python test set -- part 5, built-in exceptions
import os
import sys
import unittest
import pickle, cPickle
from test.test_support import (TESTFN, unlink, run_unittest, captured_output,
check_warnings)
from test.test_pep352 import ignore_deprecation_warnings
# XXX This is not really enough, each *operation* should be tested!
class ExceptionTests(unittest.TestCase):
def testReload(self):
# Reloading the built-in exceptions module failed prior to Py2.2, while it
# should act the same as reloading built-in sys.
try:
from imp import reload
import exceptions
reload(exceptions)
except ImportError, e:
self.fail("reloading exceptions: %s" % e)
def raise_catch(self, exc, excname):
try:
raise exc, "spam"
except exc, err:
buf1 = str(err)
try:
raise exc("spam")
except exc, err:
buf2 = str(err)
self.assertEquals(buf1, buf2)
self.assertEquals(exc.__name__, excname)
def testRaising(self):
self.raise_catch(AttributeError, "AttributeError")
self.assertRaises(AttributeError, getattr, sys, "undefined_attribute")
self.raise_catch(EOFError, "EOFError")
fp = open(TESTFN, 'w')
fp.close()
fp = open(TESTFN, 'r')
savestdin = sys.stdin
try:
try:
sys.stdin = fp
x = raw_input()
except EOFError:
pass
finally:
sys.stdin = savestdin
fp.close()
unlink(TESTFN)
self.raise_catch(IOError, "IOError")
self.assertRaises(IOError, open, 'this file does not exist', 'r')
self.raise_catch(ImportError, "ImportError")
self.assertRaises(ImportError, __import__, "undefined_module")
self.raise_catch(IndexError, "IndexError")
x = []
self.assertRaises(IndexError, x.__getitem__, 10)
self.raise_catch(KeyError, "KeyError")
x = {}
self.assertRaises(KeyError, x.__getitem__, 'key')
self.raise_catch(KeyboardInterrupt, "KeyboardInterrupt")
self.raise_catch(MemoryError, "MemoryError")
self.raise_catch(NameError, "NameError")
try: x = undefined_variable
except NameError: pass
self.raise_catch(OverflowError, "OverflowError")
x = 1
for dummy in range(128):
x += x # this simply shouldn't blow up
self.raise_catch(RuntimeError, "RuntimeError")
self.raise_catch(SyntaxError, "SyntaxError")
try: exec '/\n'
except SyntaxError: pass
self.raise_catch(IndentationError, "IndentationError")
self.raise_catch(TabError, "TabError")
# can only be tested under -tt, and is the only test for -tt
#try: compile("try:\n\t1/0\n \t1/0\nfinally:\n pass\n", '<string>', 'exec')
#except TabError: pass
#else: self.fail("TabError not raised")
self.raise_catch(SystemError, "SystemError")
self.raise_catch(SystemExit, "SystemExit")
self.assertRaises(SystemExit, sys.exit, 0)
self.raise_catch(TypeError, "TypeError")
try: [] + ()
except TypeError: pass
self.raise_catch(ValueError, "ValueError")
self.assertRaises(ValueError, chr, 10000)
self.raise_catch(ZeroDivisionError, "ZeroDivisionError")
try: x = 1 // 0
except ZeroDivisionError: pass
self.raise_catch(Exception, "Exception")
try: x = 1 // 0
except Exception, e: pass
def testSyntaxErrorMessage(self):
# make sure the right exception message is raised for each of
# these code fragments
def ckmsg(src, msg):
try:
compile(src, '<fragment>', 'exec')
except SyntaxError, e:
if e.msg != msg:
self.fail("expected %s, got %s" % (msg, e.msg))
else:
self.fail("failed to get expected SyntaxError")
s = '''while 1:
try:
pass
finally:
continue'''
if not sys.platform.startswith('java'):
ckmsg(s, "'continue' not supported inside 'finally' clause")
s = '''if 1:
try:
continue
except:
pass'''
ckmsg(s, "'continue' not properly in loop")
ckmsg("continue\n", "'continue' not properly in loop")
def testSettingException(self):
# test that setting an exception at the C level works even if the
# exception object can't be constructed.
class BadException:
def __init__(self_):
raise RuntimeError, "can't instantiate BadException"
def test_capi1():
import _testcapi
try:
_testcapi.raise_exception(BadException, 1)
except TypeError, err:
exc, err, tb = sys.exc_info()
co = tb.tb_frame.f_code
self.assertEquals(co.co_name, "test_capi1")
self.assert_(co.co_filename.endswith('test_exceptions'+os.extsep+'py'))
else:
self.fail("Expected exception")
def test_capi2():
import _testcapi
try:
_testcapi.raise_exception(BadException, 0)
except RuntimeError, err:
exc, err, tb = sys.exc_info()
co = tb.tb_frame.f_code
self.assertEquals(co.co_name, "__init__")
self.assert_(co.co_filename.endswith('test_exceptions'+os.extsep+'py'))
co2 = tb.tb_frame.f_back.f_code
self.assertEquals(co2.co_name, "test_capi2")
else:
self.fail("Expected exception")
if not sys.platform.startswith('java'):
test_capi1()
test_capi2()
def test_WindowsError(self):
try:
WindowsError
except NameError:
pass
else:
self.failUnlessEqual(str(WindowsError(1001)),
"1001")
self.failUnlessEqual(str(WindowsError(1001, "message")),
"[Error 1001] message")
self.failUnlessEqual(WindowsError(1001, "message").errno, 22)
self.failUnlessEqual(WindowsError(1001, "message").winerror, 1001)
@ignore_deprecation_warnings
def testAttributes(self):
# test that exception attributes are happy
exceptionList = [
(BaseException, (), {'message' : '', 'args' : ()}),
(BaseException, (1, ), {'message' : 1, 'args' : (1,)}),
(BaseException, ('foo',),
{'message' : 'foo', 'args' : ('foo',)}),
(BaseException, ('foo', 1),
{'message' : '', 'args' : ('foo', 1)}),
(SystemExit, ('foo',),
{'message' : 'foo', 'args' : ('foo',), 'code' : 'foo'}),
(IOError, ('foo',),
{'message' : 'foo', 'args' : ('foo',), 'filename' : None,
'errno' : None, 'strerror' : None}),
(IOError, ('foo', 'bar'),
{'message' : '', 'args' : ('foo', 'bar'), 'filename' : None,
'errno' : 'foo', 'strerror' : 'bar'}),
(IOError, ('foo', 'bar', 'baz'),
{'message' : '', 'args' : ('foo', 'bar'), 'filename' : 'baz',
'errno' : 'foo', 'strerror' : 'bar'}),
(IOError, ('foo', 'bar', 'baz', 'quux'),
{'message' : '', 'args' : ('foo', 'bar', 'baz', 'quux')}),
(EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : ('errnoStr', 'strErrorStr'),
'strerror' : 'strErrorStr', 'errno' : 'errnoStr',
'filename' : 'filenameStr'}),
(EnvironmentError, (1, 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : (1, 'strErrorStr'), 'errno' : 1,
'strerror' : 'strErrorStr', 'filename' : 'filenameStr'}),
(SyntaxError, (), {'message' : '', 'msg' : None, 'text' : None,
'filename' : None, 'lineno' : None, 'offset' : None,
'print_file_and_line' : None}),
(SyntaxError, ('msgStr',),
{'message' : 'msgStr', 'args' : ('msgStr',), 'text' : None,
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : None, 'lineno' : None, 'offset' : None}),
(SyntaxError, ('msgStr', ('filenameStr', 'linenoStr', 'offsetStr',
'textStr')),
{'message' : '', 'offset' : 'offsetStr', 'text' : 'textStr',
'args' : ('msgStr', ('filenameStr', 'linenoStr',
'offsetStr', 'textStr')),
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : 'filenameStr', 'lineno' : 'linenoStr'}),
(SyntaxError, ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr',
'textStr', 'print_file_and_lineStr'),
{'message' : '', 'text' : None,
'args' : ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr',
'textStr', 'print_file_and_lineStr'),
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : None, 'lineno' : None, 'offset' : None}),
(UnicodeError, (), {'message' : '', 'args' : (),}),
(UnicodeEncodeError, ('ascii', u'a', 0, 1, 'ordinal not in range'),
{'message' : '', 'args' : ('ascii', u'a', 0, 1,
'ordinal not in range'),
'encoding' : 'ascii', 'object' : u'a',
'start' : 0, 'reason' : 'ordinal not in range'}),
(UnicodeDecodeError, ('ascii', '\xff', 0, 1, 'ordinal not in range'),
{'message' : '', 'args' : ('ascii', '\xff', 0, 1,
'ordinal not in range'),
'encoding' : 'ascii', 'object' : '\xff',
'start' : 0, 'reason' : 'ordinal not in range'}),
(UnicodeTranslateError, (u"\u3042", 0, 1, "ouch"),
{'message' : '', 'args' : (u'\u3042', 0, 1, 'ouch'),
'object' : u'\u3042', 'reason' : 'ouch',
'start' : 0, 'end' : 1}),
]
try:
exceptionList.append(
(WindowsError, (1, 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : (1, 'strErrorStr'),
'strerror' : 'strErrorStr', 'winerror' : 1,
'errno' : 22, 'filename' : 'filenameStr'})
)
except NameError:
pass
for exc, args, expected in exceptionList:
try:
raise exc(*args)
except BaseException, e:
if type(e) is not exc:
raise
# Verify module name
self.assertEquals(type(e).__module__, 'exceptions')
# Verify no ref leaks in Exc_str()
s = str(e)
for checkArgName in expected:
self.assertEquals(repr(getattr(e, checkArgName)),
repr(expected[checkArgName]),
'exception "%s", attribute "%s"' %
(repr(e), checkArgName))
# test for pickling support
for p in pickle, cPickle:
for protocol in range(p.HIGHEST_PROTOCOL + 1):
new = p.loads(p.dumps(e, protocol))
for checkArgName in expected:
got = repr(getattr(new, checkArgName))
want = repr(expected[checkArgName])
self.assertEquals(got, want,
'pickled "%r", attribute "%s"' %
(e, checkArgName))
def testDeprecatedMessageAttribute(self):
# Accessing BaseException.message and relying on its value set by
# BaseException.__init__ triggers a deprecation warning.
exc = BaseException("foo")
with check_warnings(("BaseException.message has been deprecated "
"as of Python 2.6", DeprecationWarning)) as w:
self.assertEqual(exc.message, "foo")
self.assertEqual(len(w.warnings), 1)
def testRegularMessageAttribute(self):
# Accessing BaseException.message after explicitly setting a value
# for it does not trigger a deprecation warning.
exc = BaseException("foo")
exc.message = "bar"
with check_warnings(quiet=True) as w:
self.assertEqual(exc.message, "bar")
self.assertEqual(len(w.warnings), 0)
# Deleting the message is supported, too.
del exc.message
self.assertRaises(AttributeError, getattr, exc, "message")
@ignore_deprecation_warnings
def testPickleMessageAttribute(self):
# Pickling with message attribute must work, as well.
e = Exception("foo")
f = Exception("foo")
f.message = "bar"
for p in pickle, cPickle:
ep = p.loads(p.dumps(e))
self.assertEqual(ep.message, "foo")
fp = p.loads(p.dumps(f))
self.assertEqual(fp.message, "bar")
@ignore_deprecation_warnings
def testSlicing(self):
# Test that you can slice an exception directly instead of requiring
# going through the 'args' attribute.
args = (1, 2, 3)
exc = BaseException(*args)
self.failUnlessEqual(exc[:], args)
self.assertEqual(exc.args[:], args)
def testKeywordArgs(self):
# test that builtin exception don't take keyword args,
# but user-defined subclasses can if they want
self.assertRaises(TypeError, BaseException, a=1)
class DerivedException(BaseException):
def __init__(self, fancy_arg):
BaseException.__init__(self)
self.fancy_arg = fancy_arg
x = DerivedException(fancy_arg=42)
self.assertEquals(x.fancy_arg, 42)
def testInfiniteRecursion(self):
def f():
return f()
self.assertRaises(RuntimeError, f)
def g():
try:
return g()
except ValueError:
return -1
# The test prints an unraisable recursion error when
# doing "except ValueError", this is because subclass
# checking has recursion checking too.
with captured_output("stderr"):
try:
g()
except RuntimeError:
pass
except:
self.fail("Should have raised KeyError")
else:
self.fail("Should have raised KeyError")
def testUnicodeStrUsage(self):
# Make sure both instances and classes have a str and unicode
# representation.
self.failUnless(str(Exception))
self.failUnless(unicode(Exception))
self.failUnless(str(Exception('a')))
self.failUnless(unicode(Exception(u'a')))
self.failUnless(unicode(Exception(u'\xe1')))
def testUnicodeChangeAttributes(self):
# See issue 7309. This was a crasher.
u = UnicodeEncodeError('baz', u'xxxxx', 1, 5, 'foo')
self.assertEqual(str(u), "'baz' codec can't encode characters in position 1-4: foo")
u.end = 2
self.assertEqual(str(u), "'baz' codec can't encode character u'\\x78' in position 1: foo")
u.end = 5
u.reason = 0x345345345345345345
self.assertEqual(str(u), "'baz' codec can't encode characters in position 1-4: 965230951443685724997")
u.encoding = 4000
self.assertEqual(str(u), "'4000' codec can't encode characters in position 1-4: 965230951443685724997")
u.start = 1000
self.assertEqual(str(u), "'4000' codec can't encode characters in position 1000-4: 965230951443685724997")
u = UnicodeDecodeError('baz', 'xxxxx', 1, 5, 'foo')
self.assertEqual(str(u), "'baz' codec can't decode bytes in position 1-4: foo")
u.end = 2
self.assertEqual(str(u), "'baz' codec can't decode byte 0x78 in position 1: foo")
u.end = 5
u.reason = 0x345345345345345345
self.assertEqual(str(u), "'baz' codec can't decode bytes in position 1-4: 965230951443685724997")
u.encoding = 4000
self.assertEqual(str(u), "'4000' codec can't decode bytes in position 1-4: 965230951443685724997")
u.start = 1000
self.assertEqual(str(u), "'4000' codec can't decode bytes in position 1000-4: 965230951443685724997")
u = UnicodeTranslateError(u'xxxx', 1, 5, 'foo')
self.assertEqual(str(u), "can't translate characters in position 1-4: foo")
u.end = 2
self.assertEqual(str(u), "can't translate character u'\\x78' in position 1: foo")
u.end = 5
u.reason = 0x345345345345345345
self.assertEqual(str(u), "can't translate characters in position 1-4: 965230951443685724997")
u.start = 1000
self.assertEqual(str(u), "can't translate characters in position 1000-4: 965230951443685724997")
def test_badisinstance(self):
# Bug #2542: if issubclass(e, MyException) raises an exception,
# it should be ignored
class Meta(type):
def __subclasscheck__(cls, subclass):
raise ValueError()
class MyException(Exception):
__metaclass__ = Meta
pass
with captured_output("stderr") as stderr:
try:
raise KeyError()
except MyException, e:
self.fail("exception should not be a MyException")
except KeyError:
pass
except:
self.fail("Should have raised KeyError")
else:
self.fail("Should have raised KeyError")
with captured_output("stderr") as stderr:
def g():
try:
return g()
except RuntimeError:
return sys.exc_info()
e, v, tb = g()
self.assert_(e is RuntimeError, e)
self.assert_("maximum recursion depth exceeded" in str(v), v)
# Helper class used by TestSameStrAndUnicodeMsg
class ExcWithOverriddenStr(Exception):
"""Subclass of Exception that accepts a keyword 'msg' arg that is
returned by __str__. 'msg' won't be included in self.args"""
def __init__(self, *args, **kwargs):
self.msg = kwargs.pop('msg') # msg should always be present
super(ExcWithOverriddenStr, self).__init__(*args, **kwargs)
def __str__(self):
return self.msg
class TestSameStrAndUnicodeMsg(unittest.TestCase):
"""unicode(err) should return the same message of str(err). See #6108"""
def check_same_msg(self, exc, msg):
"""Helper function that checks if str(exc) == unicode(exc) == msg"""
self.assertEqual(str(exc), msg)
self.assertEqual(str(exc), unicode(exc))
def test_builtin_exceptions(self):
"""Check same msg for built-in exceptions"""
# These exceptions implement a __str__ method that uses the args
# to create a better error message. unicode(e) should return the same
# message.
exceptions = [
SyntaxError('invalid syntax', ('<string>', 1, 3, '2+*3')),
IOError(2, 'No such file or directory'),
KeyError('both should have the same quotes'),
UnicodeDecodeError('ascii', '\xc3\xa0', 0, 1,
'ordinal not in range(128)'),
UnicodeEncodeError('ascii', u'\u1234', 0, 1,
'ordinal not in range(128)')
]
for exception in exceptions:
self.assertEqual(str(exception), unicode(exception))
def test_0_args(self):
"""Check same msg for Exception with 0 args"""
# str() and unicode() on an Exception with no args should return an
# empty string
self.check_same_msg(Exception(), '')
def test_0_args_with_overridden___str__(self):
"""Check same msg for exceptions with 0 args and overridden __str__"""
# str() and unicode() on an exception with overridden __str__ that
# returns an ascii-only string should return the same string
for msg in ('foo', u'foo'):
self.check_same_msg(ExcWithOverriddenStr(msg=msg), msg)
# if __str__ returns a non-ascii unicode string str() should fail
# but unicode() should return the unicode string
e = ExcWithOverriddenStr(msg=u'f\xf6\xf6') # no args
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
def test_1_arg(self):
"""Check same msg for Exceptions with 1 arg"""
for arg in ('foo', u'foo'):
self.check_same_msg(Exception(arg), arg)
# if __str__ is not overridden and self.args[0] is a non-ascii unicode
# string, str() should try to return str(self.args[0]) and fail.
# unicode() should return unicode(self.args[0]) and succeed.
e = Exception(u'f\xf6\xf6')
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
def test_1_arg_with_overridden___str__(self):
"""Check same msg for exceptions with overridden __str__ and 1 arg"""
# when __str__ is overridden and __unicode__ is not implemented
# unicode(e) returns the same as unicode(e.__str__()).
for msg in ('foo', u'foo'):
self.check_same_msg(ExcWithOverriddenStr('arg', msg=msg), msg)
# if __str__ returns a non-ascii unicode string, str() should fail
# but unicode() should succeed.
e = ExcWithOverriddenStr('arg', msg=u'f\xf6\xf6') # 1 arg
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
def test_many_args(self):
"""Check same msg for Exceptions with many args"""
argslist = [
(3, 'foo'),
(1, u'foo', 'bar'),
(4, u'f\xf6\xf6', u'bar', 'baz')
]
# both str() and unicode() should return a repr() of the args
for args in argslist:
self.check_same_msg(Exception(*args), repr(args))
def test_many_args_with_overridden___str__(self):
"""Check same msg for exceptions with overridden __str__ and many args"""
# if __str__ returns an ascii string / ascii unicode string
# both str() and unicode() should succeed
for msg in ('foo', u'foo'):
e = ExcWithOverriddenStr('arg1', u'arg2', u'f\xf6\xf6', msg=msg)
self.check_same_msg(e, msg)
# if __str__ returns a non-ascii unicode string, str() should fail
# but unicode() should succeed
e = ExcWithOverriddenStr('arg1', u'f\xf6\xf6', u'arg3', # 3 args
msg=u'f\xf6\xf6')
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
def test_main():
run_unittest(ExceptionTests, TestSameStrAndUnicodeMsg)
if __name__ == '__main__':
test_main()
|
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""Miscellaneous file manipulation functions
"""
from future import standard_library
standard_library.install_aliases()
import pickle
import gzip
import hashlib
from hashlib import md5
import simplejson
import os
import re
import shutil
import numpy as np
from .misc import is_container
from .config import mkdir_p
from ..external.six import string_types
from ..interfaces.traits_extension import isdefined
from .. import logging, config
fmlogger = logging.getLogger("filemanip")
class FileNotFoundError(Exception):
pass
def nipype_hardlink_wrapper(raw_src, raw_dst):
"""Attempt to use hard link instead of file copy.
The intent is to avoid unnnecessary duplication
of large files when using a DataSink.
Hard links are not supported on all file systems
or os environments, and will not succeed if the
src and dst are not on the same physical hardware
partition.
If the hardlink fails, then fall back to using
a standard copy.
"""
src = os.path.normpath(raw_src)
dst = os.path.normpath(raw_dst)
del raw_src
del raw_dst
if src != dst and os.path.exists(dst):
os.unlink(dst) # First remove destination
try:
os.link(src, dst) # Reference same inode to avoid duplication
except:
shutil.copyfile(src, dst) # Fall back to traditional copy
def split_filename(fname):
"""Split a filename into parts: path, base filename and extension.
Parameters
----------
fname : str
file or path name
Returns
-------
pth : str
base path from fname
fname : str
filename from fname, without extension
ext : str
file extension from fname
Examples
--------
>>> from nipype.utils.filemanip import split_filename
>>> pth, fname, ext = split_filename('/home/data/subject.nii.gz')
>>> pth
'/home/data'
>>> fname
'subject'
>>> ext
'.nii.gz'
"""
special_extensions = [".nii.gz", ".tar.gz"]
pth = os.path.dirname(fname)
fname = os.path.basename(fname)
ext = None
for special_ext in special_extensions:
ext_len = len(special_ext)
if (len(fname) > ext_len) and \
(fname[-ext_len:].lower() == special_ext.lower()):
ext = fname[-ext_len:]
fname = fname[:-ext_len]
break
if not ext:
fname, ext = os.path.splitext(fname)
return pth, fname, ext
def fname_presuffix(fname, prefix='', suffix='', newpath=None, use_ext=True):
"""Manipulates path and name of input filename
Parameters
----------
fname : string
A filename (may or may not include path)
prefix : string
Characters to prepend to the filename
suffix : string
Characters to append to the filename
newpath : string
Path to replace the path of the input fname
use_ext : boolean
If True (default), appends the extension of the original file
to the output name.
Returns
-------
Absolute path of the modified filename
>>> from nipype.utils.filemanip import fname_presuffix
>>> fname = 'foo.nii.gz'
>>> fname_presuffix(fname,'pre','post','/tmp')
'/tmp/prefoopost.nii.gz'
"""
pth, fname, ext = split_filename(fname)
if not use_ext:
ext = ''
if newpath and isdefined(newpath):
pth = os.path.abspath(newpath)
return os.path.join(pth, prefix + fname + suffix + ext)
def fnames_presuffix(fnames, prefix='', suffix='', newpath=None, use_ext=True):
"""Calls fname_presuffix for a list of files.
"""
f2 = []
for fname in fnames:
f2.append(fname_presuffix(fname, prefix, suffix, newpath, use_ext))
return f2
def hash_rename(filename, hashvalue):
"""renames a file given original filename and hash
and sets path to output_directory
"""
path, name, ext = split_filename(filename)
newfilename = ''.join((name, '_0x', hashvalue, ext))
return os.path.join(path, newfilename)
def check_forhash(filename):
"""checks if file has a hash in its filename"""
if isinstance(filename, list):
filename = filename[0]
path, name = os.path.split(filename)
if re.search('(_0x[a-z0-9]{32})', name):
hashvalue = re.findall('(_0x[a-z0-9]{32})', name)
return True, hashvalue
else:
return False, None
def hash_infile(afile, chunk_len=8192, crypto=hashlib.md5):
""" Computes hash of a file using 'crypto' module"""
hex = None
if os.path.isfile(afile):
crypto_obj = crypto()
with open(afile, 'rb') as fp:
while True:
data = fp.read(chunk_len)
if not data:
break
crypto_obj.update(data)
hex = crypto_obj.hexdigest()
return hex
def hash_timestamp(afile):
""" Computes md5 hash of the timestamp of a file """
md5hex = None
if os.path.isfile(afile):
md5obj = md5()
stat = os.stat(afile)
md5obj.update(str(stat.st_size).encode())
md5obj.update(str(stat.st_mtime).encode())
md5hex = md5obj.hexdigest()
return md5hex
def copyfile(originalfile, newfile, copy=False, create_new=False,
hashmethod=None, use_hardlink=False):
"""Copy or symlink ``originalfile`` to ``newfile``.
Parameters
----------
originalfile : str
full path to original file
newfile : str
full path to new file
copy : Bool
specifies whether to copy or symlink files
(default=False) but only for POSIX systems
Returns
-------
None
"""
newhash = None
orighash = None
fmlogger.debug(newfile)
if create_new:
while os.path.exists(newfile):
base, fname, ext = split_filename(newfile)
s = re.search('_c[0-9]{4,4}$', fname)
i = 0
if s:
i = int(s.group()[2:]) + 1
fname = fname[:-6] + "_c%04d" % i
else:
fname += "_c%04d" % i
newfile = base + os.sep + fname + ext
if hashmethod is None:
hashmethod = config.get('execution', 'hash_method').lower()
elif os.path.exists(newfile):
if hashmethod == 'timestamp':
newhash = hash_timestamp(newfile)
elif hashmethod == 'content':
newhash = hash_infile(newfile)
fmlogger.debug("File: %s already exists,%s, copy:%d"
% (newfile, newhash, copy))
# the following seems unnecessary
# if os.name is 'posix' and copy:
# if os.path.lexists(newfile) and os.path.islink(newfile):
# os.unlink(newfile)
# newhash = None
if os.name is 'posix' and not copy:
if os.path.lexists(newfile):
if hashmethod == 'timestamp':
orighash = hash_timestamp(originalfile)
elif hashmethod == 'content':
orighash = hash_infile(originalfile)
fmlogger.debug('Original hash: %s, %s' % (originalfile, orighash))
if newhash != orighash:
os.unlink(newfile)
if (newhash is None) or (newhash != orighash):
os.symlink(originalfile, newfile)
else:
if newhash:
if hashmethod == 'timestamp':
orighash = hash_timestamp(originalfile)
elif hashmethod == 'content':
orighash = hash_infile(originalfile)
if (newhash is None) or (newhash != orighash):
try:
fmlogger.debug("Copying File: %s->%s" %
(newfile, originalfile))
if use_hardlink:
nipype_hardlink_wrapper(originalfile, newfile)
else:
shutil.copyfile(originalfile, newfile)
except shutil.Error as e:
fmlogger.warn(e.message)
else:
fmlogger.debug("File: %s already exists, not overwriting, copy:%d"
% (newfile, copy))
if originalfile.endswith(".img"):
hdrofile = originalfile[:-4] + ".hdr"
hdrnfile = newfile[:-4] + ".hdr"
matofile = originalfile[:-4] + ".mat"
if os.path.exists(matofile):
matnfile = newfile[:-4] + ".mat"
copyfile(matofile, matnfile, copy)
copyfile(hdrofile, hdrnfile, copy)
elif originalfile.endswith(".BRIK"):
hdrofile = originalfile[:-5] + ".HEAD"
hdrnfile = newfile[:-5] + ".HEAD"
copyfile(hdrofile, hdrnfile, copy)
return newfile
def get_related_files(filename):
"""Returns a list of related files for Nifti-Pair, Analyze (SPM) and AFNI
files
"""
related_files = []
if filename.endswith(".img") or filename.endswith(".hdr"):
path, name, ext = split_filename(filename)
for ext in ['.hdr', '.img', '.mat']:
related_files.append(os.path.join(path, name + ext))
elif filename.endswith(".BRIK") or filename.endswith(".HEAD"):
path, name, ext = split_filename(filename)
for ext in ['.BRIK', '.HEAD']:
related_files.append(os.path.join(path, name + ext))
if not len(related_files):
related_files = [filename]
return related_files
def copyfiles(filelist, dest, copy=False, create_new=False):
"""Copy or symlink files in ``filelist`` to ``dest`` directory.
Parameters
----------
filelist : list
List of files to copy.
dest : path/files
full path to destination. If it is a list of length greater
than 1, then it assumes that these are the names of the new
files.
copy : Bool
specifies whether to copy or symlink files
(default=False) but only for posix systems
Returns
-------
None
"""
outfiles = filename_to_list(dest)
newfiles = []
for i, f in enumerate(filename_to_list(filelist)):
if isinstance(f, list):
newfiles.insert(i, copyfiles(f, dest, copy=copy,
create_new=create_new))
else:
if len(outfiles) > 1:
destfile = outfiles[i]
else:
destfile = fname_presuffix(f, newpath=outfiles[0])
destfile = copyfile(f, destfile, copy, create_new=create_new)
newfiles.insert(i, destfile)
return newfiles
def filename_to_list(filename):
"""Returns a list given either a string or a list
"""
if isinstance(filename, (str, string_types)):
return [filename]
elif isinstance(filename, list):
return filename
elif is_container(filename):
return [x for x in filename]
else:
return None
def list_to_filename(filelist):
"""Returns a list if filelist is a list of length greater than 1,
otherwise returns the first element
"""
if len(filelist) > 1:
return filelist
else:
return filelist[0]
def save_json(filename, data):
"""Save data to a json file
Parameters
----------
filename : str
Filename to save data in.
data : dict
Dictionary to save in json file.
"""
with open(filename, 'w') as fp:
simplejson.dump(data, fp, sort_keys=True, indent=4)
def load_json(filename):
"""Load data from a json file
Parameters
----------
filename : str
Filename to load data from.
Returns
-------
data : dict
"""
with open(filename, 'r') as fp:
data = simplejson.load(fp)
return data
def loadcrash(infile, *args):
if '.pkl' in infile:
return loadpkl(infile)
elif '.npz' in infile:
DeprecationWarning(('npz files will be deprecated in the next '
'release. you can use numpy to open them.'))
data = np.load(infile)
out = {}
for k in data.files:
out[k] = [f for f in data[k].flat]
if len(out[k]) == 1:
out[k] = out[k].pop()
return out
else:
raise ValueError('Only pickled crashfiles are supported')
def loadpkl(infile):
"""Load a zipped or plain cPickled file
"""
if infile.endswith('pklz'):
pkl_file = gzip.open(infile, 'rb')
else:
pkl_file = open(infile)
return pickle.load(pkl_file)
def savepkl(filename, record):
if filename.endswith('pklz'):
pkl_file = gzip.open(filename, 'wb')
else:
pkl_file = open(filename, 'wb')
pickle.dump(record, pkl_file)
pkl_file.close()
rst_levels = ['=', '-', '~', '+']
def write_rst_header(header, level=0):
return '\n'.join((header, ''.join([rst_levels[level]
for _ in header]))) + '\n\n'
def write_rst_list(items, prefix=''):
out = []
for item in items:
out.append(prefix + ' ' + str(item))
return '\n'.join(out) + '\n\n'
def write_rst_dict(info, prefix=''):
out = []
for key, value in sorted(info.items()):
out.append(prefix + '* ' + key + ' : ' + str(value))
return '\n'.join(out) + '\n\n'
|
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.ads.googleads.v10.services.types import (
keyword_plan_ad_group_keyword_service,
)
from google.rpc import status_pb2 # type: ignore
from .transports.base import (
KeywordPlanAdGroupKeywordServiceTransport,
DEFAULT_CLIENT_INFO,
)
from .transports.grpc import KeywordPlanAdGroupKeywordServiceGrpcTransport
class KeywordPlanAdGroupKeywordServiceClientMeta(type):
"""Metaclass for the KeywordPlanAdGroupKeywordService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[KeywordPlanAdGroupKeywordServiceTransport]]
_transport_registry["grpc"] = KeywordPlanAdGroupKeywordServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[KeywordPlanAdGroupKeywordServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class KeywordPlanAdGroupKeywordServiceClient(
metaclass=KeywordPlanAdGroupKeywordServiceClientMeta
):
"""Service to manage Keyword Plan ad group keywords.
KeywordPlanAdGroup is required to add ad group keywords.
Positive and negative keywords are supported. A maximum of
10,000 positive keywords are allowed per keyword plan. A maximum
of 1,000 negative keywords are allower per keyword plan. This
includes campaign negative keywords and ad group negative
keywords.
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
KeywordPlanAdGroupKeywordServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
KeywordPlanAdGroupKeywordServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> KeywordPlanAdGroupKeywordServiceTransport:
"""Returns the transport used by the client instance.
Returns:
KeywordPlanAdGroupKeywordServiceTransport: The transport used by the client
instance.
"""
return self._transport
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
@staticmethod
def keyword_plan_ad_group_path(
customer_id: str, keyword_plan_ad_group_id: str,
) -> str:
"""Returns a fully-qualified keyword_plan_ad_group string."""
return "customers/{customer_id}/keywordPlanAdGroups/{keyword_plan_ad_group_id}".format(
customer_id=customer_id,
keyword_plan_ad_group_id=keyword_plan_ad_group_id,
)
@staticmethod
def parse_keyword_plan_ad_group_path(path: str) -> Dict[str, str]:
"""Parses a keyword_plan_ad_group path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/keywordPlanAdGroups/(?P<keyword_plan_ad_group_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def keyword_plan_ad_group_keyword_path(
customer_id: str, keyword_plan_ad_group_keyword_id: str,
) -> str:
"""Returns a fully-qualified keyword_plan_ad_group_keyword string."""
return "customers/{customer_id}/keywordPlanAdGroupKeywords/{keyword_plan_ad_group_keyword_id}".format(
customer_id=customer_id,
keyword_plan_ad_group_keyword_id=keyword_plan_ad_group_keyword_id,
)
@staticmethod
def parse_keyword_plan_ad_group_keyword_path(path: str) -> Dict[str, str]:
"""Parses a keyword_plan_ad_group_keyword path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/keywordPlanAdGroupKeywords/(?P<keyword_plan_ad_group_keyword_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[
str, KeywordPlanAdGroupKeywordServiceTransport, None
] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the keyword plan ad group keyword service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, KeywordPlanAdGroupKeywordServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in (
"true",
"false",
):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
use_client_cert = (
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true"
)
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, KeywordPlanAdGroupKeywordServiceTransport):
# transport is a KeywordPlanAdGroupKeywordServiceTransport instance.
if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def mutate_keyword_plan_ad_group_keywords(
self,
request: Union[
keyword_plan_ad_group_keyword_service.MutateKeywordPlanAdGroupKeywordsRequest,
dict,
] = None,
*,
customer_id: str = None,
operations: Sequence[
keyword_plan_ad_group_keyword_service.KeywordPlanAdGroupKeywordOperation
] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> keyword_plan_ad_group_keyword_service.MutateKeywordPlanAdGroupKeywordsResponse:
r"""Creates, updates, or removes Keyword Plan ad group keywords.
Operation statuses are returned.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `DatabaseError <>`__ `FieldError <>`__
`HeaderError <>`__ `InternalError <>`__
`KeywordPlanAdGroupKeywordError <>`__ `KeywordPlanError <>`__
`MutateError <>`__ `QuotaError <>`__ `RequestError <>`__
`ResourceCountLimitExceededError <>`__
Args:
request (Union[google.ads.googleads.v10.services.types.MutateKeywordPlanAdGroupKeywordsRequest, dict]):
The request object. Request message for
[KeywordPlanAdGroupKeywordService.MutateKeywordPlanAdGroupKeywords][google.ads.googleads.v10.services.KeywordPlanAdGroupKeywordService.MutateKeywordPlanAdGroupKeywords].
customer_id (str):
Required. The ID of the customer
whose Keyword Plan ad group keywords are
being modified.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
operations (Sequence[google.ads.googleads.v10.services.types.KeywordPlanAdGroupKeywordOperation]):
Required. The list of operations to
perform on individual Keyword Plan ad
group keywords.
This corresponds to the ``operations`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v10.services.types.MutateKeywordPlanAdGroupKeywordsResponse:
Response message for a Keyword Plan
ad group keyword mutate.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([customer_id, operations])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a keyword_plan_ad_group_keyword_service.MutateKeywordPlanAdGroupKeywordsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request,
keyword_plan_ad_group_keyword_service.MutateKeywordPlanAdGroupKeywordsRequest,
):
request = keyword_plan_ad_group_keyword_service.MutateKeywordPlanAdGroupKeywordsRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if operations is not None:
request.operations = operations
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.mutate_keyword_plan_ad_group_keywords
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("KeywordPlanAdGroupKeywordServiceClient",)
|
|
#!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Performance runner for d8.
Call e.g. with tools/run-perf.py --arch ia32 some_suite.json
The suite json format is expected to be:
{
"path": <relative path chunks to perf resources and main file>,
"name": <optional suite name, file name is default>,
"archs": [<architecture name for which this suite is run>, ...],
"binary": <name of binary to run, default "d8">,
"flags": [<flag to d8>, ...],
"test_flags": [<flag to the test file>, ...],
"run_count": <how often will this suite run (optional)>,
"run_count_XXX": <how often will this suite run for arch XXX (optional)>,
"resources": [<js file to be moved to android device>, ...]
"main": <main js perf runner file>,
"results_regexp": <optional regexp>,
"results_processor": <optional python results processor script>,
"units": <the unit specification for the performance dashboard>,
"tests": [
{
"name": <name of the trace>,
"results_regexp": <optional more specific regexp>,
"results_processor": <optional python results processor script>,
"units": <the unit specification for the performance dashboard>,
}, ...
]
}
The tests field can also nest other suites in arbitrary depth. A suite
with a "main" file is a leaf suite that can contain one more level of
tests.
A suite's results_regexp is expected to have one string place holder
"%s" for the trace name. A trace's results_regexp overwrites suite
defaults.
A suite's results_processor may point to an optional python script. If
specified, it is called after running the tests like this (with a path
relatve to the suite level's path):
<results_processor file> <same flags as for d8> <suite level name> <output>
The <output> is a temporary file containing d8 output. The results_regexp will
be applied to the output of this script.
A suite without "tests" is considered a performance test itself.
Full example (suite with one runner):
{
"path": ["."],
"flags": ["--expose-gc"],
"test_flags": ["5"],
"archs": ["ia32", "x64"],
"run_count": 5,
"run_count_ia32": 3,
"main": "run.js",
"results_regexp": "^%s: (.+)$",
"units": "score",
"tests": [
{"name": "Richards"},
{"name": "DeltaBlue"},
{"name": "NavierStokes",
"results_regexp": "^NavierStokes: (.+)$"}
]
}
Full example (suite with several runners):
{
"path": ["."],
"flags": ["--expose-gc"],
"archs": ["ia32", "x64"],
"run_count": 5,
"units": "score",
"tests": [
{"name": "Richards",
"path": ["richards"],
"main": "run.js",
"run_count": 3,
"results_regexp": "^Richards: (.+)$"},
{"name": "NavierStokes",
"path": ["navier_stokes"],
"main": "run.js",
"results_regexp": "^NavierStokes: (.+)$"}
]
}
Path pieces are concatenated. D8 is always run with the suite's path as cwd.
The test flags are passed to the js test file after '--'.
"""
from collections import OrderedDict
import json
import logging
import math
import optparse
import os
import re
import sys
from testrunner.local import commands
from testrunner.local import utils
ARCH_GUESS = utils.DefaultArch()
SUPPORTED_ARCHS = ["android_arm",
"android_arm64",
"android_ia32",
"arm",
"ia32",
"mips",
"mipsel",
"nacl_ia32",
"nacl_x64",
"x64",
"arm64"]
GENERIC_RESULTS_RE = re.compile(r"^RESULT ([^:]+): ([^=]+)= ([^ ]+) ([^ ]*)$")
RESULT_STDDEV_RE = re.compile(r"^\{([^\}]+)\}$")
RESULT_LIST_RE = re.compile(r"^\[([^\]]+)\]$")
def LoadAndroidBuildTools(path): # pragma: no cover
assert os.path.exists(path)
sys.path.insert(0, path)
from pylib.device import device_utils # pylint: disable=F0401
from pylib.device import device_errors # pylint: disable=F0401
from pylib.perf import cache_control # pylint: disable=F0401
from pylib.perf import perf_control # pylint: disable=F0401
import pylib.android_commands # pylint: disable=F0401
global cache_control
global device_errors
global device_utils
global perf_control
global pylib
def GeometricMean(values):
"""Returns the geometric mean of a list of values.
The mean is calculated using log to avoid overflow.
"""
values = map(float, values)
return str(math.exp(sum(map(math.log, values)) / len(values)))
class Results(object):
"""Place holder for result traces."""
def __init__(self, traces=None, errors=None):
self.traces = traces or []
self.errors = errors or []
def ToDict(self):
return {"traces": self.traces, "errors": self.errors}
def WriteToFile(self, file_name):
with open(file_name, "w") as f:
f.write(json.dumps(self.ToDict()))
def __add__(self, other):
self.traces += other.traces
self.errors += other.errors
return self
def __str__(self): # pragma: no cover
return str(self.ToDict())
class Node(object):
"""Represents a node in the suite tree structure."""
def __init__(self, *args):
self._children = []
def AppendChild(self, child):
self._children.append(child)
class DefaultSentinel(Node):
"""Fake parent node with all default values."""
def __init__(self):
super(DefaultSentinel, self).__init__()
self.binary = "d8"
self.run_count = 10
self.timeout = 60
self.path = []
self.graphs = []
self.flags = []
self.test_flags = []
self.resources = []
self.results_regexp = None
self.stddev_regexp = None
self.units = "score"
self.total = False
class Graph(Node):
"""Represents a suite definition.
Can either be a leaf or an inner node that provides default values.
"""
def __init__(self, suite, parent, arch):
super(Graph, self).__init__()
self._suite = suite
assert isinstance(suite.get("path", []), list)
assert isinstance(suite["name"], basestring)
assert isinstance(suite.get("flags", []), list)
assert isinstance(suite.get("test_flags", []), list)
assert isinstance(suite.get("resources", []), list)
# Accumulated values.
self.path = parent.path[:] + suite.get("path", [])
self.graphs = parent.graphs[:] + [suite["name"]]
self.flags = parent.flags[:] + suite.get("flags", [])
self.test_flags = parent.test_flags[:] + suite.get("test_flags", [])
# Values independent of parent node.
self.resources = suite.get("resources", [])
# Descrete values (with parent defaults).
self.binary = suite.get("binary", parent.binary)
self.run_count = suite.get("run_count", parent.run_count)
self.run_count = suite.get("run_count_%s" % arch, self.run_count)
self.timeout = suite.get("timeout", parent.timeout)
self.timeout = suite.get("timeout_%s" % arch, self.timeout)
self.units = suite.get("units", parent.units)
self.total = suite.get("total", parent.total)
# A regular expression for results. If the parent graph provides a
# regexp and the current suite has none, a string place holder for the
# suite name is expected.
# TODO(machenbach): Currently that makes only sense for the leaf level.
# Multiple place holders for multiple levels are not supported.
if parent.results_regexp:
regexp_default = parent.results_regexp % re.escape(suite["name"])
else:
regexp_default = None
self.results_regexp = suite.get("results_regexp", regexp_default)
# A similar regular expression for the standard deviation (optional).
if parent.stddev_regexp:
stddev_default = parent.stddev_regexp % re.escape(suite["name"])
else:
stddev_default = None
self.stddev_regexp = suite.get("stddev_regexp", stddev_default)
class Trace(Graph):
"""Represents a leaf in the suite tree structure.
Handles collection of measurements.
"""
def __init__(self, suite, parent, arch):
super(Trace, self).__init__(suite, parent, arch)
assert self.results_regexp
self.results = []
self.errors = []
self.stddev = ""
def ConsumeOutput(self, stdout):
try:
result = re.search(self.results_regexp, stdout, re.M).group(1)
self.results.append(str(float(result)))
except ValueError:
self.errors.append("Regexp \"%s\" returned a non-numeric for test %s."
% (self.results_regexp, self.graphs[-1]))
except:
self.errors.append("Regexp \"%s\" didn't match for test %s."
% (self.results_regexp, self.graphs[-1]))
try:
if self.stddev_regexp and self.stddev:
self.errors.append("Test %s should only run once since a stddev "
"is provided by the test." % self.graphs[-1])
if self.stddev_regexp:
self.stddev = re.search(self.stddev_regexp, stdout, re.M).group(1)
except:
self.errors.append("Regexp \"%s\" didn't match for test %s."
% (self.stddev_regexp, self.graphs[-1]))
def GetResults(self):
return Results([{
"graphs": self.graphs,
"units": self.units,
"results": self.results,
"stddev": self.stddev,
}], self.errors)
class Runnable(Graph):
"""Represents a runnable suite definition (i.e. has a main file).
"""
@property
def main(self):
return self._suite.get("main", "")
def ChangeCWD(self, suite_path):
"""Changes the cwd to to path defined in the current graph.
The tests are supposed to be relative to the suite configuration.
"""
suite_dir = os.path.abspath(os.path.dirname(suite_path))
bench_dir = os.path.normpath(os.path.join(*self.path))
os.chdir(os.path.join(suite_dir, bench_dir))
def GetCommandFlags(self):
suffix = ["--"] + self.test_flags if self.test_flags else []
return self.flags + [self.main] + suffix
def GetCommand(self, shell_dir):
# TODO(machenbach): This requires +.exe if run on windows.
return [os.path.join(shell_dir, self.binary)] + self.GetCommandFlags()
def Run(self, runner):
"""Iterates over several runs and handles the output for all traces."""
for stdout in runner():
for trace in self._children:
trace.ConsumeOutput(stdout)
res = reduce(lambda r, t: r + t.GetResults(), self._children, Results())
if not res.traces or not self.total:
return res
# Assume all traces have the same structure.
if len(set(map(lambda t: len(t["results"]), res.traces))) != 1:
res.errors.append("Not all traces have the same number of results.")
return res
# Calculate the geometric means for all traces. Above we made sure that
# there is at least one trace and that the number of results is the same
# for each trace.
n_results = len(res.traces[0]["results"])
total_results = [GeometricMean(t["results"][i] for t in res.traces)
for i in range(0, n_results)]
res.traces.append({
"graphs": self.graphs + ["Total"],
"units": res.traces[0]["units"],
"results": total_results,
"stddev": "",
})
return res
class RunnableTrace(Trace, Runnable):
"""Represents a runnable suite definition that is a leaf."""
def __init__(self, suite, parent, arch):
super(RunnableTrace, self).__init__(suite, parent, arch)
def Run(self, runner):
"""Iterates over several runs and handles the output."""
for stdout in runner():
self.ConsumeOutput(stdout)
return self.GetResults()
class RunnableGeneric(Runnable):
"""Represents a runnable suite definition with generic traces."""
def __init__(self, suite, parent, arch):
super(RunnableGeneric, self).__init__(suite, parent, arch)
def Run(self, runner):
"""Iterates over several runs and handles the output."""
traces = OrderedDict()
for stdout in runner():
for line in stdout.strip().splitlines():
match = GENERIC_RESULTS_RE.match(line)
if match:
stddev = ""
graph = match.group(1)
trace = match.group(2)
body = match.group(3)
units = match.group(4)
match_stddev = RESULT_STDDEV_RE.match(body)
match_list = RESULT_LIST_RE.match(body)
errors = []
if match_stddev:
result, stddev = map(str.strip, match_stddev.group(1).split(","))
results = [result]
elif match_list:
results = map(str.strip, match_list.group(1).split(","))
else:
results = [body.strip()]
try:
results = map(lambda r: str(float(r)), results)
except ValueError:
results = []
errors = ["Found non-numeric in %s" %
"/".join(self.graphs + [graph, trace])]
trace_result = traces.setdefault(trace, Results([{
"graphs": self.graphs + [graph, trace],
"units": (units or self.units).strip(),
"results": [],
"stddev": "",
}], errors))
trace_result.traces[0]["results"].extend(results)
trace_result.traces[0]["stddev"] = stddev
return reduce(lambda r, t: r + t, traces.itervalues(), Results())
def MakeGraph(suite, arch, parent):
"""Factory method for making graph objects."""
if isinstance(parent, Runnable):
# Below a runnable can only be traces.
return Trace(suite, parent, arch)
elif suite.get("main"):
# A main file makes this graph runnable.
if suite.get("tests"):
# This graph has subgraphs (traces).
return Runnable(suite, parent, arch)
else:
# This graph has no subgraphs, it's a leaf.
return RunnableTrace(suite, parent, arch)
elif suite.get("generic"):
# This is a generic suite definition. It is either a runnable executable
# or has a main js file.
return RunnableGeneric(suite, parent, arch)
elif suite.get("tests"):
# This is neither a leaf nor a runnable.
return Graph(suite, parent, arch)
else: # pragma: no cover
raise Exception("Invalid suite configuration.")
def BuildGraphs(suite, arch, parent=None):
"""Builds a tree structure of graph objects that corresponds to the suite
configuration.
"""
parent = parent or DefaultSentinel()
# TODO(machenbach): Implement notion of cpu type?
if arch not in suite.get("archs", SUPPORTED_ARCHS):
return None
graph = MakeGraph(suite, arch, parent)
for subsuite in suite.get("tests", []):
BuildGraphs(subsuite, arch, graph)
parent.AppendChild(graph)
return graph
def FlattenRunnables(node, node_cb):
"""Generator that traverses the tree structure and iterates over all
runnables.
"""
node_cb(node)
if isinstance(node, Runnable):
yield node
elif isinstance(node, Node):
for child in node._children:
for result in FlattenRunnables(child, node_cb):
yield result
else: # pragma: no cover
raise Exception("Invalid suite configuration.")
class Platform(object):
@staticmethod
def GetPlatform(options):
if options.arch.startswith("android"):
return AndroidPlatform(options)
else:
return DesktopPlatform(options)
class DesktopPlatform(Platform):
def __init__(self, options):
self.shell_dir = options.shell_dir
def PreExecution(self):
pass
def PostExecution(self):
pass
def PreTests(self, node, path):
if isinstance(node, Runnable):
node.ChangeCWD(path)
def Run(self, runnable, count):
output = commands.Execute(runnable.GetCommand(self.shell_dir),
timeout=runnable.timeout)
print ">>> Stdout (#%d):" % (count + 1)
print output.stdout
if output.stderr: # pragma: no cover
# Print stderr for debugging.
print ">>> Stderr (#%d):" % (count + 1)
print output.stderr
if output.timed_out:
print ">>> Test timed out after %ss." % runnable.timeout
return output.stdout
class AndroidPlatform(Platform): # pragma: no cover
DEVICE_DIR = "/data/local/tmp/v8/"
def __init__(self, options):
self.shell_dir = options.shell_dir
LoadAndroidBuildTools(options.android_build_tools)
if not options.device:
# Detect attached device if not specified.
devices = pylib.android_commands.GetAttachedDevices(
hardware=True, emulator=False, offline=False)
assert devices and len(devices) == 1, (
"None or multiple devices detected. Please specify the device on "
"the command-line with --device")
options.device = devices[0]
adb_wrapper = pylib.android_commands.AndroidCommands(options.device)
self.device = device_utils.DeviceUtils(adb_wrapper)
self.adb = adb_wrapper.Adb()
def PreExecution(self):
perf = perf_control.PerfControl(self.device)
perf.SetHighPerfMode()
# Remember what we have already pushed to the device.
self.pushed = set()
def PostExecution(self):
perf = perf_control.PerfControl(self.device)
perf.SetDefaultPerfMode()
self.device.RunShellCommand(["rm", "-rf", AndroidPlatform.DEVICE_DIR])
def _PushFile(self, host_dir, file_name, target_rel="."):
file_on_host = os.path.join(host_dir, file_name)
file_on_device = os.path.join(
AndroidPlatform.DEVICE_DIR, target_rel, file_name)
# Only push files not yet pushed in one execution.
if file_on_host in self.pushed:
return
else:
self.pushed.add(file_on_host)
logging.info("adb push %s %s" % (file_on_host, file_on_device))
self.adb.Push(file_on_host, file_on_device)
def PreTests(self, node, path):
suite_dir = os.path.abspath(os.path.dirname(path))
if node.path:
bench_rel = os.path.normpath(os.path.join(*node.path))
bench_abs = os.path.join(suite_dir, bench_rel)
else:
bench_rel = "."
bench_abs = suite_dir
self._PushFile(self.shell_dir, node.binary)
if isinstance(node, Runnable):
self._PushFile(bench_abs, node.main, bench_rel)
for resource in node.resources:
self._PushFile(bench_abs, resource, bench_rel)
def Run(self, runnable, count):
cache = cache_control.CacheControl(self.device)
cache.DropRamCaches()
binary_on_device = AndroidPlatform.DEVICE_DIR + runnable.binary
cmd = [binary_on_device] + runnable.GetCommandFlags()
# Relative path to benchmark directory.
if runnable.path:
bench_rel = os.path.normpath(os.path.join(*runnable.path))
else:
bench_rel = "."
try:
output = self.device.RunShellCommand(
cmd,
cwd=os.path.join(AndroidPlatform.DEVICE_DIR, bench_rel),
timeout=runnable.timeout,
retries=0,
)
stdout = "\n".join(output)
print ">>> Stdout (#%d):" % (count + 1)
print stdout
except device_errors.CommandTimeoutError:
print ">>> Test timed out after %ss." % runnable.timeout
stdout = ""
return stdout
# TODO: Implement results_processor.
def Main(args):
logging.getLogger().setLevel(logging.INFO)
parser = optparse.OptionParser()
parser.add_option("--android-build-tools",
help="Path to chromium's build/android.")
parser.add_option("--arch",
help=("The architecture to run tests for, "
"'auto' or 'native' for auto-detect"),
default="x64")
parser.add_option("--buildbot",
help="Adapt to path structure used on buildbots",
default=False, action="store_true")
parser.add_option("--device",
help="The device ID to run Android tests on. If not given "
"it will be autodetected.")
parser.add_option("--json-test-results",
help="Path to a file for storing json results.")
parser.add_option("--outdir", help="Base directory with compile output",
default="out")
(options, args) = parser.parse_args(args)
if len(args) == 0: # pragma: no cover
parser.print_help()
return 1
if options.arch in ["auto", "native"]: # pragma: no cover
options.arch = ARCH_GUESS
if not options.arch in SUPPORTED_ARCHS: # pragma: no cover
print "Unknown architecture %s" % options.arch
return 1
if (bool(options.arch.startswith("android")) !=
bool(options.android_build_tools)): # pragma: no cover
print ("Android architectures imply setting --android-build-tools and the "
"other way around.")
return 1
if (options.device and not
options.arch.startswith("android")): # pragma: no cover
print "Specifying a device requires an Android architecture to be used."
return 1
workspace = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
if options.buildbot:
options.shell_dir = os.path.join(workspace, options.outdir, "Release")
else:
options.shell_dir = os.path.join(workspace, options.outdir,
"%s.release" % options.arch)
platform = Platform.GetPlatform(options)
results = Results()
for path in args:
path = os.path.abspath(path)
if not os.path.exists(path): # pragma: no cover
results.errors.append("Configuration file %s does not exist." % path)
continue
with open(path) as f:
suite = json.loads(f.read())
# If no name is given, default to the file name without .json.
suite.setdefault("name", os.path.splitext(os.path.basename(path))[0])
# Setup things common to one test suite.
platform.PreExecution()
# Build the graph/trace tree structure.
root = BuildGraphs(suite, options.arch)
# Callback to be called on each node on traversal.
def NodeCB(node):
platform.PreTests(node, path)
# Traverse graph/trace tree and interate over all runnables.
for runnable in FlattenRunnables(root, NodeCB):
print ">>> Running suite: %s" % "/".join(runnable.graphs)
def Runner():
"""Output generator that reruns several times."""
for i in xrange(0, max(1, runnable.run_count)):
# TODO(machenbach): Allow timeout per arch like with run_count per
# arch.
yield platform.Run(runnable, i)
# Let runnable iterate over all runs and handle output.
results += runnable.Run(Runner)
platform.PostExecution()
if options.json_test_results:
results.WriteToFile(options.json_test_results)
else: # pragma: no cover
print results
return min(1, len(results.errors))
if __name__ == "__main__": # pragma: no cover
sys.exit(Main(sys.argv[1:]))
|
|
# Copyright (c) 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""XVP (Xenserver VNC Proxy) driver."""
import os
import signal
import jinja2
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from nova import context
from nova import db
from nova.i18n import _, _LE
from nova import paths
from nova import utils
xvp_opts = [
cfg.StrOpt('console_xvp_conf_template',
default=paths.basedir_def('nova/console/xvp.conf.template'),
help='XVP conf template'),
cfg.StrOpt('console_xvp_conf',
default='/etc/xvp.conf',
help='Generated XVP conf file'),
cfg.StrOpt('console_xvp_pid',
default='/var/run/xvp.pid',
help='XVP master process pid file'),
cfg.StrOpt('console_xvp_log',
default='/var/log/xvp.log',
help='XVP log file'),
cfg.IntOpt('console_xvp_multiplex_port',
default=5900,
min=1,
max=65535,
help='Port for XVP to multiplex VNC connections on'),
]
CONF = cfg.CONF
CONF.register_opts(xvp_opts)
CONF.import_opt('host', 'nova.netconf')
LOG = logging.getLogger(__name__)
class XVPConsoleProxy(object):
"""Sets up XVP config, and manages XVP daemon."""
def __init__(self):
self.xvpconf_template = open(CONF.console_xvp_conf_template).read()
self.host = CONF.host # default, set by manager.
super(XVPConsoleProxy, self).__init__()
@property
def console_type(self):
return 'vnc+xvp'
def get_port(self, context):
"""Get available port for consoles that need one."""
# TODO(mdragon): implement port selection for non multiplex ports,
# we are not using that, but someone else may want
# it.
return CONF.console_xvp_multiplex_port
def setup_console(self, context, console):
"""Sets up actual proxies."""
self._rebuild_xvp_conf(context.elevated())
def teardown_console(self, context, console):
"""Tears down actual proxies."""
self._rebuild_xvp_conf(context.elevated())
def init_host(self):
"""Start up any config'ed consoles on start."""
ctxt = context.get_admin_context()
self._rebuild_xvp_conf(ctxt)
def fix_pool_password(self, password):
"""Trim password to length, and encode."""
return self._xvp_encrypt(password, is_pool_password=True)
def fix_console_password(self, password):
"""Trim password to length, and encode."""
return self._xvp_encrypt(password)
def _rebuild_xvp_conf(self, context):
LOG.debug('Rebuilding xvp conf')
pools = [pool for pool in
db.console_pool_get_all_by_host_type(context, self.host,
self.console_type)
if pool['consoles']]
if not pools:
LOG.debug('No console pools!')
self._xvp_stop()
return
conf_data = {'multiplex_port': CONF.console_xvp_multiplex_port,
'pools': pools}
tmpl_path, tmpl_file = os.path.split(CONF.injected_network_template)
env = jinja2.Environment(loader=jinja2.FileSystemLoader(tmpl_path))
env.filters['pass_encode'] = self.fix_console_password
template = env.get_template(tmpl_file)
self._write_conf(template.render(conf_data))
self._xvp_restart()
def _write_conf(self, config):
try:
LOG.debug('Re-wrote %s', CONF.console_xvp_conf)
with open(CONF.console_xvp_conf, 'w') as cfile:
cfile.write(config)
except IOError:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Failed to write configuration file"))
def _xvp_stop(self):
LOG.debug('Stopping xvp')
pid = self._xvp_pid()
if not pid:
return
try:
os.kill(pid, signal.SIGTERM)
except OSError:
# if it's already not running, no problem.
pass
def _xvp_start(self):
if self._xvp_check_running():
return
LOG.debug('Starting xvp')
try:
utils.execute('xvp',
'-p', CONF.console_xvp_pid,
'-c', CONF.console_xvp_conf,
'-l', CONF.console_xvp_log)
except processutils.ProcessExecutionError as err:
LOG.error(_LE('Error starting xvp: %s'), err)
def _xvp_restart(self):
LOG.debug('Restarting xvp')
if not self._xvp_check_running():
LOG.debug('xvp not running...')
self._xvp_start()
else:
pid = self._xvp_pid()
os.kill(pid, signal.SIGUSR1)
def _xvp_pid(self):
try:
with open(CONF.console_xvp_pid, 'r') as pidfile:
pid = int(pidfile.read())
except IOError:
return None
except ValueError:
return None
return pid
def _xvp_check_running(self):
pid = self._xvp_pid()
if not pid:
return False
try:
os.kill(pid, 0)
except OSError:
return False
return True
def _xvp_encrypt(self, password, is_pool_password=False):
"""Call xvp to obfuscate passwords for config file.
Args:
- password: the password to encode, max 8 char for vm passwords,
and 16 chars for pool passwords. passwords will
be trimmed to max len before encoding.
- is_pool_password: True if this is the XenServer api password
False if it's a VM console password
(xvp uses different keys and max lengths for pool passwords)
Note that xvp's obfuscation should not be considered 'real' encryption.
It simply DES encrypts the passwords with static keys plainly viewable
in the xvp source code.
"""
maxlen = 8
flag = '-e'
if is_pool_password:
maxlen = 16
flag = '-x'
# xvp will blow up on passwords that are too long (mdragon)
password = password[:maxlen]
out, err = utils.execute('xvp', flag, process_input=password)
if err:
raise processutils.ProcessExecutionError(_("Failed to run xvp."))
return out.strip()
|
|
import httplib
import itertools
import array
import StringIO
import socket
import errno
import os
import tempfile
import unittest
TestCase = unittest.TestCase
from test import test_support
here = os.path.dirname(__file__)
# Self-signed cert file for 'localhost'
CERT_localhost = os.path.join(here, 'keycert.pem')
# Self-signed cert file for 'fakehostname'
CERT_fakehostname = os.path.join(here, 'keycert2.pem')
# Self-signed cert file for self-signed.pythontest.net
CERT_selfsigned_pythontestdotnet = os.path.join(here, 'selfsigned_pythontestdotnet.pem')
HOST = test_support.HOST
class FakeSocket:
def __init__(self, text, fileclass=StringIO.StringIO, host=None, port=None):
self.text = text
self.fileclass = fileclass
self.data = ''
self.file_closed = False
self.host = host
self.port = port
def sendall(self, data):
self.data += ''.join(data)
def makefile(self, mode, bufsize=None):
if mode != 'r' and mode != 'rb':
raise httplib.UnimplementedFileMode()
# keep the file around so we can check how much was read from it
self.file = self.fileclass(self.text)
self.file.close = self.file_close #nerf close ()
return self.file
def file_close(self):
self.file_closed = True
def close(self):
pass
class EPipeSocket(FakeSocket):
def __init__(self, text, pipe_trigger):
# When sendall() is called with pipe_trigger, raise EPIPE.
FakeSocket.__init__(self, text)
self.pipe_trigger = pipe_trigger
def sendall(self, data):
if self.pipe_trigger in data:
raise socket.error(errno.EPIPE, "gotcha")
self.data += data
def close(self):
pass
class NoEOFStringIO(StringIO.StringIO):
"""Like StringIO, but raises AssertionError on EOF.
This is used below to test that httplib doesn't try to read
more from the underlying file than it should.
"""
def read(self, n=-1):
data = StringIO.StringIO.read(self, n)
if data == '':
raise AssertionError('caller tried to read past EOF')
return data
def readline(self, length=None):
data = StringIO.StringIO.readline(self, length)
if data == '':
raise AssertionError('caller tried to read past EOF')
return data
class HeaderTests(TestCase):
def test_auto_headers(self):
# Some headers are added automatically, but should not be added by
# .request() if they are explicitly set.
class HeaderCountingBuffer(list):
def __init__(self):
self.count = {}
def append(self, item):
kv = item.split(':')
if len(kv) > 1:
# item is a 'Key: Value' header string
lcKey = kv[0].lower()
self.count.setdefault(lcKey, 0)
self.count[lcKey] += 1
list.append(self, item)
for explicit_header in True, False:
for header in 'Content-length', 'Host', 'Accept-encoding':
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket('blahblahblah')
conn._buffer = HeaderCountingBuffer()
body = 'spamspamspam'
headers = {}
if explicit_header:
headers[header] = str(len(body))
conn.request('POST', '/', body, headers)
self.assertEqual(conn._buffer.count[header.lower()], 1)
def test_content_length_0(self):
class ContentLengthChecker(list):
def __init__(self):
list.__init__(self)
self.content_length = None
def append(self, item):
kv = item.split(':', 1)
if len(kv) > 1 and kv[0].lower() == 'content-length':
self.content_length = kv[1].strip()
list.append(self, item)
# Here, we're testing that methods expecting a body get a
# content-length set to zero if the body is empty (either None or '')
bodies = (None, '')
methods_with_body = ('PUT', 'POST', 'PATCH')
for method, body in itertools.product(methods_with_body, bodies):
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', body)
self.assertEqual(
conn._buffer.content_length, '0',
'Header Content-Length incorrect on {}'.format(method)
)
# For these methods, we make sure that content-length is not set when
# the body is None because it might cause unexpected behaviour on the
# server.
methods_without_body = (
'GET', 'CONNECT', 'DELETE', 'HEAD', 'OPTIONS', 'TRACE',
)
for method in methods_without_body:
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', None)
self.assertEqual(
conn._buffer.content_length, None,
'Header Content-Length set for empty body on {}'.format(method)
)
# If the body is set to '', that's considered to be "present but
# empty" rather than "missing", so content length would be set, even
# for methods that don't expect a body.
for method in methods_without_body:
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', '')
self.assertEqual(
conn._buffer.content_length, '0',
'Header Content-Length incorrect on {}'.format(method)
)
# If the body is set, make sure Content-Length is set.
for method in itertools.chain(methods_without_body, methods_with_body):
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn._buffer = ContentLengthChecker()
conn.request(method, '/', ' ')
self.assertEqual(
conn._buffer.content_length, '1',
'Header Content-Length incorrect on {}'.format(method)
)
def test_putheader(self):
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket(None)
conn.putrequest('GET','/')
conn.putheader('Content-length',42)
self.assertIn('Content-length: 42', conn._buffer)
conn.putheader('Foo', ' bar ')
self.assertIn(b'Foo: bar ', conn._buffer)
conn.putheader('Bar', '\tbaz\t')
self.assertIn(b'Bar: \tbaz\t', conn._buffer)
conn.putheader('Authorization', 'Bearer mytoken')
self.assertIn(b'Authorization: Bearer mytoken', conn._buffer)
conn.putheader('IterHeader', 'IterA', 'IterB')
self.assertIn(b'IterHeader: IterA\r\n\tIterB', conn._buffer)
conn.putheader('LatinHeader', b'\xFF')
self.assertIn(b'LatinHeader: \xFF', conn._buffer)
conn.putheader('Utf8Header', b'\xc3\x80')
self.assertIn(b'Utf8Header: \xc3\x80', conn._buffer)
conn.putheader('C1-Control', b'next\x85line')
self.assertIn(b'C1-Control: next\x85line', conn._buffer)
conn.putheader('Embedded-Fold-Space', 'is\r\n allowed')
self.assertIn(b'Embedded-Fold-Space: is\r\n allowed', conn._buffer)
conn.putheader('Embedded-Fold-Tab', 'is\r\n\tallowed')
self.assertIn(b'Embedded-Fold-Tab: is\r\n\tallowed', conn._buffer)
conn.putheader('Key Space', 'value')
self.assertIn(b'Key Space: value', conn._buffer)
conn.putheader('KeySpace ', 'value')
self.assertIn(b'KeySpace : value', conn._buffer)
conn.putheader(b'Nonbreak\xa0Space', 'value')
self.assertIn(b'Nonbreak\xa0Space: value', conn._buffer)
conn.putheader(b'\xa0NonbreakSpace', 'value')
self.assertIn(b'\xa0NonbreakSpace: value', conn._buffer)
def test_ipv6host_header(self):
# Default host header on IPv6 transaction should wrapped by [] if
# its actual IPv6 address
expected = 'GET /foo HTTP/1.1\r\nHost: [2001::]:81\r\n' \
'Accept-Encoding: identity\r\n\r\n'
conn = httplib.HTTPConnection('[2001::]:81')
sock = FakeSocket('')
conn.sock = sock
conn.request('GET', '/foo')
self.assertTrue(sock.data.startswith(expected))
expected = 'GET /foo HTTP/1.1\r\nHost: [2001:102A::]\r\n' \
'Accept-Encoding: identity\r\n\r\n'
conn = httplib.HTTPConnection('[2001:102A::]')
sock = FakeSocket('')
conn.sock = sock
conn.request('GET', '/foo')
self.assertTrue(sock.data.startswith(expected))
def test_malformed_headers_coped_with(self):
# Issue 19996
body = "HTTP/1.1 200 OK\r\nFirst: val\r\n: nval\r\nSecond: val\r\n\r\n"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.getheader('First'), 'val')
self.assertEqual(resp.getheader('Second'), 'val')
def test_invalid_headers(self):
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket('')
conn.putrequest('GET', '/')
# http://tools.ietf.org/html/rfc7230#section-3.2.4, whitespace is no
# longer allowed in header names
cases = (
(b'Invalid\r\nName', b'ValidValue'),
(b'Invalid\rName', b'ValidValue'),
(b'Invalid\nName', b'ValidValue'),
(b'\r\nInvalidName', b'ValidValue'),
(b'\rInvalidName', b'ValidValue'),
(b'\nInvalidName', b'ValidValue'),
(b' InvalidName', b'ValidValue'),
(b'\tInvalidName', b'ValidValue'),
(b'Invalid:Name', b'ValidValue'),
(b':InvalidName', b'ValidValue'),
(b'ValidName', b'Invalid\r\nValue'),
(b'ValidName', b'Invalid\rValue'),
(b'ValidName', b'Invalid\nValue'),
(b'ValidName', b'InvalidValue\r\n'),
(b'ValidName', b'InvalidValue\r'),
(b'ValidName', b'InvalidValue\n'),
)
for name, value in cases:
with self.assertRaisesRegexp(ValueError, 'Invalid header'):
conn.putheader(name, value)
class BasicTest(TestCase):
def test_status_lines(self):
# Test HTTP status lines
body = "HTTP/1.1 200 Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(0), '') # Issue #20007
self.assertFalse(resp.isclosed())
self.assertEqual(resp.read(), 'Text')
self.assertTrue(resp.isclosed())
body = "HTTP/1.1 400.100 Not Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
self.assertRaises(httplib.BadStatusLine, resp.begin)
def test_bad_status_repr(self):
exc = httplib.BadStatusLine('')
self.assertEqual(repr(exc), '''BadStatusLine("\'\'",)''')
def test_partial_reads(self):
# if we have a length, the system knows when to close itself
# same behaviour than when we read the whole thing with read()
body = "HTTP/1.1 200 Ok\r\nContent-Length: 4\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(2), 'Te')
self.assertFalse(resp.isclosed())
self.assertEqual(resp.read(2), 'xt')
self.assertTrue(resp.isclosed())
def test_partial_reads_no_content_length(self):
# when no length is present, the socket should be gracefully closed when
# all data was read
body = "HTTP/1.1 200 Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(2), 'Te')
self.assertFalse(resp.isclosed())
self.assertEqual(resp.read(2), 'xt')
self.assertEqual(resp.read(1), '')
self.assertTrue(resp.isclosed())
def test_partial_reads_incomplete_body(self):
# if the server shuts down the connection before the whole
# content-length is delivered, the socket is gracefully closed
body = "HTTP/1.1 200 Ok\r\nContent-Length: 10\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(2), 'Te')
self.assertFalse(resp.isclosed())
self.assertEqual(resp.read(2), 'xt')
self.assertEqual(resp.read(1), '')
self.assertTrue(resp.isclosed())
def test_host_port(self):
# Check invalid host_port
# Note that httplib does not accept user:password@ in the host-port.
for hp in ("www.python.org:abc", "user:password@www.python.org"):
self.assertRaises(httplib.InvalidURL, httplib.HTTP, hp)
for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000", "fe80::207:e9ff:fe9b",
8000),
("www.python.org:80", "www.python.org", 80),
("www.python.org", "www.python.org", 80),
("www.python.org:", "www.python.org", 80),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 80)):
http = httplib.HTTP(hp)
c = http._conn
if h != c.host:
self.fail("Host incorrectly parsed: %s != %s" % (h, c.host))
if p != c.port:
self.fail("Port incorrectly parsed: %s != %s" % (p, c.host))
def test_response_headers(self):
# test response with multiple message headers with the same field name.
text = ('HTTP/1.1 200 OK\r\n'
'Set-Cookie: Customer="WILE_E_COYOTE";'
' Version="1"; Path="/acme"\r\n'
'Set-Cookie: Part_Number="Rocket_Launcher_0001"; Version="1";'
' Path="/acme"\r\n'
'\r\n'
'No body\r\n')
hdr = ('Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"'
', '
'Part_Number="Rocket_Launcher_0001"; Version="1"; Path="/acme"')
s = FakeSocket(text)
r = httplib.HTTPResponse(s)
r.begin()
cookies = r.getheader("Set-Cookie")
if cookies != hdr:
self.fail("multiple headers not combined properly")
def test_read_head(self):
# Test that the library doesn't attempt to read any data
# from a HEAD request. (Tickles SF bug #622042.)
sock = FakeSocket(
'HTTP/1.1 200 OK\r\n'
'Content-Length: 14432\r\n'
'\r\n',
NoEOFStringIO)
resp = httplib.HTTPResponse(sock, method="HEAD")
resp.begin()
if resp.read() != "":
self.fail("Did not expect response from HEAD request")
def test_too_many_headers(self):
headers = '\r\n'.join('Header%d: foo' % i for i in xrange(200)) + '\r\n'
text = ('HTTP/1.1 200 OK\r\n' + headers)
s = FakeSocket(text)
r = httplib.HTTPResponse(s)
self.assertRaises(httplib.HTTPException, r.begin)
def test_send_file(self):
expected = 'GET /foo HTTP/1.1\r\nHost: example.com\r\n' \
'Accept-Encoding: identity\r\nContent-Length:'
body = open(__file__, 'rb')
conn = httplib.HTTPConnection('example.com')
sock = FakeSocket(body)
conn.sock = sock
conn.request('GET', '/foo', body)
self.assertTrue(sock.data.startswith(expected))
self.assertIn('def test_send_file', sock.data)
def test_send_tempfile(self):
expected = ('GET /foo HTTP/1.1\r\nHost: example.com\r\n'
'Accept-Encoding: identity\r\nContent-Length: 9\r\n\r\n'
'fake\ndata')
with tempfile.TemporaryFile() as body:
body.write('fake\ndata')
body.seek(0)
conn = httplib.HTTPConnection('example.com')
sock = FakeSocket(body)
conn.sock = sock
conn.request('GET', '/foo', body)
self.assertEqual(sock.data, expected)
def test_send(self):
expected = 'this is a test this is only a test'
conn = httplib.HTTPConnection('example.com')
sock = FakeSocket(None)
conn.sock = sock
conn.send(expected)
self.assertEqual(expected, sock.data)
sock.data = ''
conn.send(array.array('c', expected))
self.assertEqual(expected, sock.data)
sock.data = ''
conn.send(StringIO.StringIO(expected))
self.assertEqual(expected, sock.data)
def test_chunked(self):
chunked_start = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello worl\r\n'
'1\r\n'
'd\r\n'
)
sock = FakeSocket(chunked_start + '0\r\n')
resp = httplib.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), 'hello world')
resp.close()
for x in ('', 'foo\r\n'):
sock = FakeSocket(chunked_start + x)
resp = httplib.HTTPResponse(sock, method="GET")
resp.begin()
try:
resp.read()
except httplib.IncompleteRead, i:
self.assertEqual(i.partial, 'hello world')
self.assertEqual(repr(i),'IncompleteRead(11 bytes read)')
self.assertEqual(str(i),'IncompleteRead(11 bytes read)')
else:
self.fail('IncompleteRead expected')
finally:
resp.close()
def test_chunked_head(self):
chunked_start = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'a\r\n'
'hello world\r\n'
'1\r\n'
'd\r\n'
)
sock = FakeSocket(chunked_start + '0\r\n')
resp = httplib.HTTPResponse(sock, method="HEAD")
resp.begin()
self.assertEqual(resp.read(), '')
self.assertEqual(resp.status, 200)
self.assertEqual(resp.reason, 'OK')
self.assertTrue(resp.isclosed())
def test_negative_content_length(self):
sock = FakeSocket('HTTP/1.1 200 OK\r\n'
'Content-Length: -1\r\n\r\nHello\r\n')
resp = httplib.HTTPResponse(sock, method="GET")
resp.begin()
self.assertEqual(resp.read(), 'Hello\r\n')
self.assertTrue(resp.isclosed())
def test_incomplete_read(self):
sock = FakeSocket('HTTP/1.1 200 OK\r\nContent-Length: 10\r\n\r\nHello\r\n')
resp = httplib.HTTPResponse(sock, method="GET")
resp.begin()
try:
resp.read()
except httplib.IncompleteRead as i:
self.assertEqual(i.partial, 'Hello\r\n')
self.assertEqual(repr(i),
"IncompleteRead(7 bytes read, 3 more expected)")
self.assertEqual(str(i),
"IncompleteRead(7 bytes read, 3 more expected)")
self.assertTrue(resp.isclosed())
else:
self.fail('IncompleteRead expected')
def test_epipe(self):
sock = EPipeSocket(
"HTTP/1.0 401 Authorization Required\r\n"
"Content-type: text/html\r\n"
"WWW-Authenticate: Basic realm=\"example\"\r\n",
b"Content-Length")
conn = httplib.HTTPConnection("example.com")
conn.sock = sock
self.assertRaises(socket.error,
lambda: conn.request("PUT", "/url", "body"))
resp = conn.getresponse()
self.assertEqual(401, resp.status)
self.assertEqual("Basic realm=\"example\"",
resp.getheader("www-authenticate"))
def test_filenoattr(self):
# Just test the fileno attribute in the HTTPResponse Object.
body = "HTTP/1.1 200 Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
self.assertTrue(hasattr(resp,'fileno'),
'HTTPResponse should expose a fileno attribute')
# Test lines overflowing the max line size (_MAXLINE in http.client)
def test_overflowing_status_line(self):
self.skipTest("disabled for HTTP 0.9 support")
body = "HTTP/1.1 200 Ok" + "k" * 65536 + "\r\n"
resp = httplib.HTTPResponse(FakeSocket(body))
self.assertRaises((httplib.LineTooLong, httplib.BadStatusLine), resp.begin)
def test_overflowing_header_line(self):
body = (
'HTTP/1.1 200 OK\r\n'
'X-Foo: bar' + 'r' * 65536 + '\r\n\r\n'
)
resp = httplib.HTTPResponse(FakeSocket(body))
self.assertRaises(httplib.LineTooLong, resp.begin)
def test_overflowing_chunked_line(self):
body = (
'HTTP/1.1 200 OK\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
+ '0' * 65536 + 'a\r\n'
'hello world\r\n'
'0\r\n'
)
resp = httplib.HTTPResponse(FakeSocket(body))
resp.begin()
self.assertRaises(httplib.LineTooLong, resp.read)
def test_early_eof(self):
# Test httpresponse with no \r\n termination,
body = "HTTP/1.1 200 Ok"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock)
resp.begin()
self.assertEqual(resp.read(), '')
self.assertTrue(resp.isclosed())
def test_error_leak(self):
# Test that the socket is not leaked if getresponse() fails
conn = httplib.HTTPConnection('example.com')
response = []
class Response(httplib.HTTPResponse):
def __init__(self, *pos, **kw):
response.append(self) # Avoid garbage collector closing the socket
httplib.HTTPResponse.__init__(self, *pos, **kw)
conn.response_class = Response
conn.sock = FakeSocket('') # Emulate server dropping connection
conn.request('GET', '/')
self.assertRaises(httplib.BadStatusLine, conn.getresponse)
self.assertTrue(response)
#self.assertTrue(response[0].closed)
self.assertTrue(conn.sock.file_closed)
class OfflineTest(TestCase):
def test_responses(self):
self.assertEqual(httplib.responses[httplib.NOT_FOUND], "Not Found")
class TestServerMixin:
"""A limited socket server mixin.
This is used by test cases for testing http connection end points.
"""
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = test_support.bind_port(self.serv)
self.source_port = test_support.find_unused_port()
self.serv.listen(5)
self.conn = None
def tearDown(self):
if self.conn:
self.conn.close()
self.conn = None
self.serv.close()
self.serv = None
class SourceAddressTest(TestServerMixin, TestCase):
def testHTTPConnectionSourceAddress(self):
self.conn = httplib.HTTPConnection(HOST, self.port,
source_address=('', self.source_port))
self.conn.connect()
self.assertEqual(self.conn.sock.getsockname()[1], self.source_port)
@unittest.skipIf(not hasattr(httplib, 'HTTPSConnection'),
'httplib.HTTPSConnection not defined')
def testHTTPSConnectionSourceAddress(self):
self.conn = httplib.HTTPSConnection(HOST, self.port,
source_address=('', self.source_port))
# We don't test anything here other the constructor not barfing as
# this code doesn't deal with setting up an active running SSL server
# for an ssl_wrapped connect() to actually return from.
class HTTPTest(TestServerMixin, TestCase):
def testHTTPConnection(self):
self.conn = httplib.HTTP(host=HOST, port=self.port, strict=None)
self.conn.connect()
self.assertEqual(self.conn._conn.host, HOST)
self.assertEqual(self.conn._conn.port, self.port)
def testHTTPWithConnectHostPort(self):
testhost = 'unreachable.test.domain'
testport = '80'
self.conn = httplib.HTTP(host=testhost, port=testport)
self.conn.connect(host=HOST, port=self.port)
self.assertNotEqual(self.conn._conn.host, testhost)
self.assertNotEqual(self.conn._conn.port, testport)
self.assertEqual(self.conn._conn.host, HOST)
self.assertEqual(self.conn._conn.port, self.port)
class TimeoutTest(TestCase):
PORT = None
def setUp(self):
self.serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
TimeoutTest.PORT = test_support.bind_port(self.serv)
self.serv.listen(5)
def tearDown(self):
self.serv.close()
self.serv = None
def testTimeoutAttribute(self):
'''This will prove that the timeout gets through
HTTPConnection and into the socket.
'''
# default -- use global socket timeout
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
httpConn = httplib.HTTPConnection(HOST, TimeoutTest.PORT)
httpConn.connect()
finally:
socket.setdefaulttimeout(None)
self.assertEqual(httpConn.sock.gettimeout(), 30)
httpConn.close()
# no timeout -- do not use global socket default
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
httpConn = httplib.HTTPConnection(HOST, TimeoutTest.PORT,
timeout=None)
httpConn.connect()
finally:
socket.setdefaulttimeout(None)
self.assertEqual(httpConn.sock.gettimeout(), None)
httpConn.close()
# a value
httpConn = httplib.HTTPConnection(HOST, TimeoutTest.PORT, timeout=30)
httpConn.connect()
self.assertEqual(httpConn.sock.gettimeout(), 30)
httpConn.close()
class HTTPSTest(TestCase):
def setUp(self):
if not hasattr(httplib, 'HTTPSConnection'):
self.skipTest('ssl support required')
def make_server(self, certfile):
from test.ssl_servers import make_https_server
return make_https_server(self, certfile=certfile)
def test_attributes(self):
# simple test to check it's storing the timeout
h = httplib.HTTPSConnection(HOST, TimeoutTest.PORT, timeout=30)
self.assertEqual(h.timeout, 30)
def test_networked(self):
# Default settings: requires a valid cert from a trusted CA
import ssl
test_support.requires('network')
with test_support.transient_internet('self-signed.pythontest.net'):
h = httplib.HTTPSConnection('self-signed.pythontest.net', 443)
with self.assertRaises(ssl.SSLError) as exc_info:
h.request('GET', '/')
self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_networked_noverification(self):
# Switch off cert verification
import ssl
test_support.requires('network')
with test_support.transient_internet('self-signed.pythontest.net'):
context = ssl._create_stdlib_context()
h = httplib.HTTPSConnection('self-signed.pythontest.net', 443,
context=context)
h.request('GET', '/')
resp = h.getresponse()
self.assertIn('nginx', resp.getheader('server'))
@test_support.system_must_validate_cert
def test_networked_trusted_by_default_cert(self):
# Default settings: requires a valid cert from a trusted CA
test_support.requires('network')
with test_support.transient_internet('www.python.org'):
h = httplib.HTTPSConnection('www.python.org', 443)
h.request('GET', '/')
resp = h.getresponse()
content_type = resp.getheader('content-type')
self.assertIn('text/html', content_type)
def test_networked_good_cert(self):
# We feed the server's cert as a validating cert
import ssl
test_support.requires('network')
with test_support.transient_internet('self-signed.pythontest.net'):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_selfsigned_pythontestdotnet)
h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context)
h.request('GET', '/')
resp = h.getresponse()
server_string = resp.getheader('server')
self.assertIn('nginx', server_string)
def test_networked_bad_cert(self):
# We feed a "CA" cert that is unrelated to the server's cert
import ssl
test_support.requires('network')
with test_support.transient_internet('self-signed.pythontest.net'):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_localhost)
h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context)
with self.assertRaises(ssl.SSLError) as exc_info:
h.request('GET', '/')
self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_local_unknown_cert(self):
# The custom cert isn't known to the default trust bundle
import ssl
server = self.make_server(CERT_localhost)
h = httplib.HTTPSConnection('localhost', server.port)
with self.assertRaises(ssl.SSLError) as exc_info:
h.request('GET', '/')
self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_local_good_hostname(self):
# The (valid) cert validates the HTTP hostname
import ssl
server = self.make_server(CERT_localhost)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERT_localhost)
h = httplib.HTTPSConnection('localhost', server.port, context=context)
h.request('GET', '/nonexistent')
resp = h.getresponse()
self.assertEqual(resp.status, 404)
def test_local_bad_hostname(self):
# The (valid) cert doesn't validate the HTTP hostname
import ssl
server = self.make_server(CERT_fakehostname)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True
context.load_verify_locations(CERT_fakehostname)
h = httplib.HTTPSConnection('localhost', server.port, context=context)
with self.assertRaises(ssl.CertificateError):
h.request('GET', '/')
h.close()
# With context.check_hostname=False, the mismatching is ignored
context.check_hostname = False
h = httplib.HTTPSConnection('localhost', server.port, context=context)
h.request('GET', '/nonexistent')
resp = h.getresponse()
self.assertEqual(resp.status, 404)
def test_host_port(self):
# Check invalid host_port
for hp in ("www.python.org:abc", "user:password@www.python.org"):
self.assertRaises(httplib.InvalidURL, httplib.HTTPSConnection, hp)
for hp, h, p in (("[fe80::207:e9ff:fe9b]:8000",
"fe80::207:e9ff:fe9b", 8000),
("www.python.org:443", "www.python.org", 443),
("www.python.org:", "www.python.org", 443),
("www.python.org", "www.python.org", 443),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 443),
("[fe80::207:e9ff:fe9b]:", "fe80::207:e9ff:fe9b",
443)):
c = httplib.HTTPSConnection(hp)
self.assertEqual(h, c.host)
self.assertEqual(p, c.port)
class TunnelTests(TestCase):
def test_connect(self):
response_text = (
'HTTP/1.0 200 OK\r\n\r\n' # Reply to CONNECT
'HTTP/1.1 200 OK\r\n' # Reply to HEAD
'Content-Length: 42\r\n\r\n'
)
def create_connection(address, timeout=None, source_address=None):
return FakeSocket(response_text, host=address[0], port=address[1])
conn = httplib.HTTPConnection('proxy.com')
conn._create_connection = create_connection
# Once connected, we should not be able to tunnel anymore
conn.connect()
self.assertRaises(RuntimeError, conn.set_tunnel, 'destination.com')
# But if close the connection, we are good.
conn.close()
conn.set_tunnel('destination.com')
conn.request('HEAD', '/', '')
self.assertEqual(conn.sock.host, 'proxy.com')
self.assertEqual(conn.sock.port, 80)
self.assertTrue('CONNECT destination.com' in conn.sock.data)
self.assertTrue('Host: destination.com' in conn.sock.data)
self.assertTrue('Host: proxy.com' not in conn.sock.data)
conn.close()
conn.request('PUT', '/', '')
self.assertEqual(conn.sock.host, 'proxy.com')
self.assertEqual(conn.sock.port, 80)
self.assertTrue('CONNECT destination.com' in conn.sock.data)
self.assertTrue('Host: destination.com' in conn.sock.data)
@test_support.reap_threads
def test_main(verbose=None):
test_support.run_unittest(HeaderTests, OfflineTest, BasicTest, TimeoutTest,
HTTPTest, HTTPSTest, SourceAddressTest,
TunnelTests)
if __name__ == '__main__':
test_main()
|
|
#!/usr/bin/env python3
"""
This tool should create bundle specifications and tar packages from input given on stdin
NOTES:
* Create test files e.g with "for n in {1..3}; do dd if=/dev/urandom of=file${n} bs=1M count=1; done"
TODO:
* Add some form of file duplicate check; warning or symlinking
"""
import argparse
import logging
import pprint
import sys
import tarfile
import os
import yaml
import bundle
import subprocess
__author__ = "Mikael Karlsson <i8myshoes@gmail.com>"
def refine_path(src, tools_path):
"""
Tries to do some refining and validation on path given
:rtype: str
"""
if not os.path.isabs(src):
if os.path.exists(tools_path + src):
new_src = tools_path + src
else:
raise Exception("File path is incomplete! " + tools_path + src)
else:
new_src = src
return new_src
def create_tarball(archive_name, file_list, compression):
"""
Create a tarball, which is possibly gz or bz2 compressed
:type archive_name: str
:type file_list: list
:type compression: str
:param archive_name: Archive name
:param file_list: List of files
:param compression: Compression schema
* gz
* bz2
* none
"""
def compress_block(tf):
logging.debug("compress_block({})".format(tf))
for file in file_list:
name_in, name_out = file[0], file[1]
logging.debug("name_in: %s, name_out: %s" % (name_in, name_out))
tf.add(name=name_in, arcname=name_out, recursive=False)
def external_compressor(cmd):
logging.debug("external_compressor({})".format(cmd))
try:
with open(archive_name, "wb") as f, \
subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=f) as p, \
tarfile.open(fileobj=p.stdin, mode="w|") as tf:
compress_block(tf)
except OSError as e:
if e.errno == 2:
logging.warning("Compressor '{}' not found! {}".format(cmd[0], e))
try:
logging.debug("Trying next compressor!")
external_compressor(compressors[compression].pop(0))
except IndexError as e:
logging.warning("Out of external compressors! {}".format(e))
logging.debug("Switching to internal compressor!")
internal_compressor()
else:
raise
def internal_compressor():
logging.debug("internal_compressor()")
with tarfile.open(name=archive_name, mode="w:" + compression) as tf:
compress_block(tf)
logging.debug("create_tarball({})".format(archive_name, file_list, compression))
compressors = {
"gz": [
["pigz", "-6"],
["gzip", "-6"]
],
"bz2": [
["pbzip2", "-6"],
["lbzip2", "-6"],
["bzip2", "-6"]
]
}
if compression in compressors:
external_compressor(compressors[compression].pop(0))
else:
internal_compressor()
def detect_duplicates_and_rename(file_name, file_list):
"""
Detect if filename is duplicate and adjust it to allow flat directory structure
"""
if file_name in [fn[1] for fn in file_list]:
logging.debug("detect_duplicates_and_rename(): filename: %s" % file_name)
if file_name[-2] == "." and file_name[-1].isdigit():
file_name = "%s%i" % (file_name[:-1], int(file_name[-1]) + 1)
else:
file_name += ".1"
return file_name
def process_file(file_name, file_list):
"""
Takes a file name and returns a triple (file_name, base_name and checksum)
"""
logging.debug("file_name: %s" % file_name)
logging.debug("file_list: %s" % file_list)
# Get the base name (file name part)
base_name = os.path.basename(file_name)
logging.debug("base name (before): %s" % base_name)
# Detect duplicates and rename these to "name.[0-9]"
# This should allow tar archive structure to be flat
base_name = detect_duplicates_and_rename(base_name, file_list)
logging.debug("base name (after): %s" % base_name)
# Calculate checksum
checksum = bundle.calculate_checksum(file_name)
logging.debug("checksum: %s" % checksum)
return file_name, base_name, checksum
def create_spec(version, chipster, deprecated, file_list, symlink_list, archive_name):
"""
This function creates a structure of dicts and lists to be dumped as YAML
"""
# Files block
files = []
for file in file_list:
files.append(
{
"destination": file[0],
"source": file[1],
"checksum": file[2]
}
)
logging.debug("files: %s" % files)
# Symlinks block
symlinks = []
for link in symlink_list:
symlinks.append(
{
"source": link[0],
"destination": link[1]
}
)
logging.debug("symlinks: %s" % symlinks)
# Package block
new_packages = {
archive_name: {
"files": files,
}
}
if symlinks:
new_packages[archive_name]["symlinks"] = symlinks
# Version block
new_version = {
"version": str(version),
"chipster": str(chipster),
"packages": new_packages
}
if deprecated:
new_version["deprecated"] = str(deprecated)
return new_version
def main():
"""
Main function
"""
yaml_dict = {}
file_list = []
symlink_list = []
logging.basicConfig(level=logging.INFO)
params = vars(parse_commandline())
if params["tools"]:
tools_path = params["tools"]
else:
tools_path = "/opt/chipster/tools/"
params["archive"] = "{}-{}.tar{}".format(
params["name"], params["version"],
"." + params["compression"] if params["compression"] in ("gz", "bz2") else "")
if not params["file"]:
params["file"] = "{}-{}.yaml".format(params["name"], params["version"])
logging.debug("Params: {}".format(params))
# Process input files
for file_name in sys.stdin:
rel_file=file_name.strip()
abs_file = refine_path(rel_file, tools_path)
if os.path.islink(abs_file):
symlink_list.append((os.readlink(abs_file), rel_file))
logging.debug("symlink_list: %s" % symlink_list)
elif os.path.isfile(abs_file):
file_list.append(process_file(file_name=rel_file, file_list=file_list))
logging.debug("file_list: %s" % file_list)
elif os.path.isdir(abs_file):
logging.warning("What are you feeding me!! Directories are rubbish!!")
# Create structure
#abc:
# - version: x.y
# chipster: x.y.z
# deprecated: x.y.z
# packages:
# 'abc':
# files:
# - source: 'abc'
# destination: 'abc'
# checksum: '123'
# symlinks:
# - source: 'abc/d'
# destination: 'd'
yaml_dict[params["name"]] = [
create_spec(version=params["version"],
chipster=params["platform"],
deprecated=params["deprecated"],
file_list=file_list,
symlink_list=symlink_list,
archive_name=params["prefix"] + params["archive"])
]
pprint.pprint(yaml_dict)
yaml.dump(yaml_dict, open(params["file"], "w"), default_flow_style=False)
create_tarball(archive_name=params["archive"],
file_list=file_list,
compression=params["compression"])
# TODO: Complete this!
def parse_commandline():
"""
"""
parser = argparse.ArgumentParser(description="Creation tool for Chipster bundles", epilog="Blah blah blah")
# group = parser.add_mutually_exclusive_group()
# group.add_argument("-v", "--verbose", action="store_true")
# group.add_argument("-q", "--quiet", action="store_true")
# parser.add_argument("action",
# type=str,
# help="Action to perform",
# choices=["create"])
# # , "list"]) # ,metavar="action"
parser.add_argument("-n", "--name",
type=str,
required=True,
help="Bundle <name>")
# ,metavar="bundle name"
parser.add_argument("-v", "--version",
type=float,
required=True,
help="Bundle <version>")
parser.add_argument("-p", "--platform",
type=float,
required=True,
help="Chipster <version>")
parser.add_argument("-d", "--deprecated",
type=float,
help="Bundle deprecated since <version>")
# ,metavar="bundle name"
parser.add_argument("-c", "--compression",
type=str,
help="Bundle <compression>",
choices=["gz", "bz2", "no"],
default="gz")
parser.add_argument("-f", "--file",
type=str,
help="Output <file>")
parser.add_argument("--prefix",
type=str,
help="package filename prefix, e.g. URL of the bundle repository")
parser.add_argument("-t", "--tools",
type=str,
help="Path for converting file names from relative to absolute")
# ,metavar="bundle name"
# parser.add_argument("updates", type=str, help="Check for updates", choices=["check-update"])
args = parser.parse_args()
return args
###########
# Main code
###########
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt as e:
logging.warning("Processing interrupted! {}".format(e))
|
|
from unittest.mock import patch
from django.urls import reverse
from rest_framework import status
from bgp.models import Relationship
from devices.models import Platform
from net.models import Connection
from peering.constants import *
from peering.enums import CommunityType, DeviceState, RoutingPolicyType
from peering.models import (
AutonomousSystem,
BGPGroup,
Community,
Configuration,
DirectPeeringSession,
Email,
InternetExchange,
InternetExchangePeeringSession,
Router,
RoutingPolicy,
)
from peering.tests.mocked_data import load_peeringdb_data, mocked_subprocess_popen
from utils.testing import APITestCase, StandardAPITestCases
class AppTest(APITestCase):
def test_root(self):
response = self.client.get(reverse("peering-api:api-root"), **self.header)
self.assertEqual(response.status_code, status.HTTP_200_OK)
class AutonomousSystemTest(StandardAPITestCases.View):
model = AutonomousSystem
brief_fields = [
"id",
"url",
"display",
"asn",
"name",
"ipv6_max_prefixes",
"ipv4_max_prefixes",
]
create_data = [
{"asn": 64541, "name": "Test 1"},
{"asn": 64542, "name": "Test 2"},
{"asn": 64543, "name": "Test 3"},
]
bulk_update_data = {"comments": "Awesome peer"}
@classmethod
def setUpTestData(cls):
AutonomousSystem.objects.bulk_create(
[
AutonomousSystem(asn=65536, name="Example 1", irr_as_set="AS-MOCKED"),
AutonomousSystem(
asn=64496, name="Example 2", irr_as_set="AS-EXAMPLE-2"
),
AutonomousSystem(
asn=64497, name="Example 3", irr_as_set="AS-EXAMPLE-3"
),
]
)
cls.autonomous_system = AutonomousSystem.objects.get(asn=65536)
load_peeringdb_data()
def test_synchronize_with_peeringdb(self):
autonomous_system = AutonomousSystem.objects.create(
asn=201281, name="Test", irr_as_set="AS-TEST"
)
url = reverse(
"peering-api:autonomoussystem-sync-with-peeringdb",
kwargs={"pk": autonomous_system.pk},
)
response = self.client.post(url, format="json", **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
def test_get_irr_as_set_prefixes(self):
with patch("peering.subprocess.Popen", side_effect=mocked_subprocess_popen):
url = reverse(
"peering-api:autonomoussystem-as-set-prefixes",
kwargs={"pk": self.autonomous_system.pk},
)
response = self.client.get(url, format="json", **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(len(response.data["ipv6"]), 1)
self.assertEqual(len(response.data["ipv4"]), 1)
def test_shared_internet_exchanges(self):
local_as = AutonomousSystem.objects.create(
asn=65535, name="Local", irr_as_set="AS-LOCAL", affiliated=True
)
self.user.preferences.set("context.as", local_as.pk, commit=True)
url = reverse(
"peering-api:autonomoussystem-shared-ixps",
kwargs={"pk": self.autonomous_system.pk},
)
response = self.client.get(url, format="json", **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(response.data, [])
class BGPGroupTest(StandardAPITestCases.View):
model = BGPGroup
brief_fields = ["id", "url", "display", "name", "slug"]
create_data = [
{"name": "Test 1", "slug": "test-1"},
{"name": "Test 2", "slug": "test-2"},
{"name": "Test 3", "slug": "test-3"},
]
bulk_update_data = {"comments": "Awesome group"}
@classmethod
def setUpTestData(cls):
BGPGroup.objects.bulk_create(
[
BGPGroup(name="Example 1", slug="example-1"),
BGPGroup(name="Example 2", slug="example-2"),
BGPGroup(name="Example 3", slug="example-3"),
]
)
def test_poll_sessions(self):
url = reverse(
"peering-api:bgpgroup-poll-sessions",
kwargs={"pk": BGPGroup.objects.get(slug="example-1").pk},
)
response = self.client.post(url, **self.header)
self.assertHttpStatus(response, status.HTTP_202_ACCEPTED)
class CommunityTest(StandardAPITestCases.View):
model = Community
brief_fields = ["id", "url", "display", "name", "slug", "value", "type"]
create_data = [
{
"name": "Test1",
"slug": "test1",
"value": "64500:11",
"type": CommunityType.EGRESS,
},
{
"name": "Test2",
"slug": "test2",
"value": "64500:12",
"type": CommunityType.EGRESS,
},
{
"name": "Test3",
"slug": "test3",
"value": "64500:13",
"type": CommunityType.EGRESS,
},
]
bulk_update_data = {"comments": "Awesome community"}
@classmethod
def setUpTestData(cls):
Community.objects.bulk_create(
[
Community(
name="Example 1",
slug="example-1",
value="64500:1",
type=CommunityType.EGRESS,
),
Community(
name="Example 2",
slug="example-2",
value="64500:2",
type=CommunityType.EGRESS,
),
Community(
name="Example 3",
slug="example-3",
value="64500:3",
type=CommunityType.EGRESS,
),
]
)
class ConfigurationTest(StandardAPITestCases.View):
model = Configuration
brief_fields = ["id", "url", "display", "name"]
create_data = [
{"name": "Test1", "template": "test1_template"},
{"name": "Test2", "template": "test2_template"},
{"name": "Test3", "template": "test3_template"},
]
bulk_update_data = {"template": "{{ router.hostname }}"}
@classmethod
def setUpTestData(cls):
Configuration.objects.bulk_create(
[
Configuration(name="Example 1", template="example_1"),
Configuration(name="Example 2", template="example_2"),
Configuration(name="Example 3", template="example_3"),
]
)
class DirectPeeringSessionTest(StandardAPITestCases.View):
model = DirectPeeringSession
brief_fields = ["id", "url", "display", "ip_address", "enabled"]
bulk_update_data = {"enabled": False}
@classmethod
def setUpTestData(cls):
local_autonomous_system = AutonomousSystem.objects.create(
asn=201281, name="Guillaume Mazoyer", affiliated=True
)
autonomous_system = AutonomousSystem.objects.create(asn=64500, name="Dummy")
relationship_private_peering = Relationship.objects.create(
name="Private Peering", slug="private-peering"
)
DirectPeeringSession.objects.bulk_create(
[
DirectPeeringSession(
local_autonomous_system=local_autonomous_system,
autonomous_system=autonomous_system,
relationship=relationship_private_peering,
ip_address="2001:db8::1",
password="mypassword",
),
DirectPeeringSession(
local_autonomous_system=local_autonomous_system,
autonomous_system=autonomous_system,
relationship=relationship_private_peering,
ip_address="2001:db8::2",
),
DirectPeeringSession(
local_autonomous_system=local_autonomous_system,
autonomous_system=autonomous_system,
relationship=relationship_private_peering,
ip_address="2001:db8::3",
),
]
)
cls.create_data = [
{
"service_reference": "PNI-0001",
"local_autonomous_system": local_autonomous_system.pk,
"autonomous_system": autonomous_system.pk,
"relationship": relationship_private_peering.pk,
"ip_address": "198.51.100.1",
},
{
"local_autonomous_system": local_autonomous_system.pk,
"autonomous_system": autonomous_system.pk,
"relationship": relationship_private_peering.pk,
"ip_address": "198.51.100.2",
},
{
"local_autonomous_system": local_autonomous_system.pk,
"autonomous_system": autonomous_system.pk,
"relationship": relationship_private_peering.pk,
"ip_address": "198.51.100.3",
},
]
class EmailTest(StandardAPITestCases.View):
model = Email
brief_fields = ["id", "url", "display", "name"]
create_data = [
{"name": "Test1", "subject": "test1_subject", "template": "test1_template"},
{"name": "Test2", "subject": "test2_subject", "template": "test2_template"},
{"name": "Test3", "subject": "test3_subject", "template": "test3_template"},
]
bulk_update_data = {"template": "{{ autonomous_system.asn }}"}
@classmethod
def setUpTestData(cls):
Email.objects.bulk_create(
[
Email(name="Example 1", subject="Example 1", template="example_1"),
Email(name="Example 2", subject="Example 2", template="example_2"),
Email(name="Example 3", subject="Example 3", template="example_3"),
]
)
class InternetExchangeTest(StandardAPITestCases.View):
model = InternetExchange
brief_fields = ["id", "url", "display", "name", "slug"]
bulk_update_data = {"comments": "Awesome IXP"}
@classmethod
def setUpTestData(cls):
cls.local_autonomous_system = AutonomousSystem.objects.create(
asn=201281, name="Guillaume Mazoyer", affiliated=True
)
InternetExchange.objects.bulk_create(
[
InternetExchange(
name="Example 1",
slug="example-1",
local_autonomous_system=cls.local_autonomous_system,
),
InternetExchange(
name="Example 2",
slug="example-2",
local_autonomous_system=cls.local_autonomous_system,
),
InternetExchange(
name="Example 3",
slug="example-3",
local_autonomous_system=cls.local_autonomous_system,
),
]
)
cls.internet_exchange = InternetExchange.objects.get(slug="example-1")
cls.create_data = [
{
"name": "Test1",
"slug": "test1",
"local_autonomous_system": cls.local_autonomous_system.pk,
},
{
"name": "Test2",
"slug": "test2",
"local_autonomous_system": cls.local_autonomous_system.pk,
},
{
"name": "Test3",
"slug": "test3",
"local_autonomous_system": cls.local_autonomous_system.pk,
},
]
def test_available_peers(self):
url = reverse(
"peering-api:internetexchange-available-peers",
kwargs={"pk": self.internet_exchange.pk},
)
response = self.client.get(url, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
def test_import_sessions(self):
url = reverse(
"peering-api:internetexchange-import-sessions",
kwargs={"pk": self.internet_exchange.pk},
)
response = self.client.post(url, **self.header)
self.assertHttpStatus(response, status.HTTP_202_ACCEPTED)
def test_prefixes(self):
url = reverse(
"peering-api:internetexchange-prefixes",
kwargs={"pk": self.internet_exchange.pk},
)
response = self.client.get(url, **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertDictEqual(response.data, {})
def test_poll_sessions(self):
url = reverse(
"peering-api:internetexchange-poll-sessions",
kwargs={"pk": self.internet_exchange.pk},
)
response = self.client.post(url, **self.header)
self.assertHttpStatus(response, status.HTTP_202_ACCEPTED)
class InternetExchangePeeringSessionTest(StandardAPITestCases.View):
model = InternetExchangePeeringSession
brief_fields = [
"id",
"url",
"display",
"ip_address",
"enabled",
"is_route_server",
]
bulk_update_data = {"enabled": False}
@classmethod
def setUpTestData(cls):
local_autonomous_system = AutonomousSystem.objects.create(
asn=201281, name="Guillaume Mazoyer", affiliated=True
)
autonomous_system = AutonomousSystem.objects.create(asn=64500, name="Dummy")
ixp = InternetExchange.objects.create(
name="Test", slug="test", local_autonomous_system=local_autonomous_system
)
ixp_connection = Connection.objects.create(
vlan=2000, internet_exchange_point=ixp
)
InternetExchangePeeringSession.objects.bulk_create(
[
InternetExchangePeeringSession(
autonomous_system=autonomous_system,
ixp_connection=ixp_connection,
ip_address="2001:db8::1",
password="mypassword",
),
InternetExchangePeeringSession(
autonomous_system=autonomous_system,
ixp_connection=ixp_connection,
ip_address="2001:db8::2",
),
InternetExchangePeeringSession(
autonomous_system=autonomous_system,
ixp_connection=ixp_connection,
ip_address="2001:db8::3",
),
]
)
cls.create_data = [
{
"service_reference": "IXP-0001",
"autonomous_system": autonomous_system.pk,
"ixp_connection": ixp_connection.pk,
"ip_address": "198.51.100.1",
},
{
"autonomous_system": autonomous_system.pk,
"ixp_connection": ixp_connection.pk,
"ip_address": "198.51.100.2",
},
{
"autonomous_system": autonomous_system.pk,
"ixp_connection": ixp_connection.pk,
"ip_address": "198.51.100.3",
},
]
class RouterTest(StandardAPITestCases.View):
model = Router
brief_fields = ["id", "url", "display", "name", "hostname"]
bulk_update_data = {"device_state": DeviceState.MAINTENANCE}
@classmethod
def setUpTestData(cls):
cls.local_autonomous_system = AutonomousSystem.objects.create(
asn=201281, name="Guillaume Mazoyer", affiliated=True
)
cls.platform = Platform.objects.create(name="No Bugs OS", slug="nobugsos")
cls.template = Configuration.objects.create(
name="Test", template="Nothing useful"
)
Router.objects.bulk_create(
[
Router(
name="Example 1",
hostname="1.example.com",
device_state=DeviceState.ENABLED,
configuration_template=cls.template,
local_autonomous_system=cls.local_autonomous_system,
),
Router(
name="Example 2",
hostname="2.example.com",
device_state=DeviceState.ENABLED,
configuration_template=cls.template,
local_autonomous_system=cls.local_autonomous_system,
),
Router(
name="Example 3",
hostname="3.example.com",
device_state=DeviceState.ENABLED,
configuration_template=cls.template,
local_autonomous_system=cls.local_autonomous_system,
),
]
)
cls.router = Router.objects.get(hostname="1.example.com")
cls.create_data = [
{
"name": "Test 1",
"hostname": "test1.example.com",
"device_state": DeviceState.ENABLED,
"configuration_template": cls.template.pk,
"local_autonomous_system": cls.local_autonomous_system.pk,
"platform": cls.platform.pk,
},
{
"name": "Test 2",
"hostname": "test2.example.com",
"device_state": DeviceState.MAINTENANCE,
"configuration_template": cls.template.pk,
"local_autonomous_system": cls.local_autonomous_system.pk,
"platform": cls.platform.pk,
},
{
"name": "Test 3",
"hostname": "test3.example.com",
"device_state": DeviceState.DISABLED,
"configuration_template": cls.template.pk,
"local_autonomous_system": cls.local_autonomous_system.pk,
"platform": cls.platform.pk,
},
]
def test_configuration(self):
url = reverse("peering-api:router-configuration", kwargs={"pk": self.router.pk})
response = self.client.get(url, **self.header)
self.assertHttpStatus(response, status.HTTP_202_ACCEPTED)
def test_test_napalm_connection(self):
url = reverse(
"peering-api:router-test-napalm-connection", kwargs={"pk": self.router.pk}
)
response = self.client.get(url, **self.header)
self.assertHttpStatus(response, status.HTTP_202_ACCEPTED)
class RoutingPolicyTest(StandardAPITestCases.View):
model = RoutingPolicy
brief_fields = ["id", "url", "display", "name", "slug", "type"]
create_data = [
{
"name": "Test1",
"slug": "test1",
"type": RoutingPolicyType.EXPORT,
"weight": 1,
},
{
"name": "Test2",
"slug": "test2",
"type": RoutingPolicyType.EXPORT,
"weight": 2,
},
{
"name": "Test3",
"slug": "test3",
"type": RoutingPolicyType.IMPORT_EXPORT,
"weight": 3,
},
]
bulk_update_data = {"comments": "Awesome routing policy"}
@classmethod
def setUpTestData(cls):
RoutingPolicy.objects.bulk_create(
[
RoutingPolicy(
name="Example 1",
slug="example-1",
type=RoutingPolicyType.EXPORT,
weight=0,
),
RoutingPolicy(
name="Example 2",
slug="example-2",
type=RoutingPolicyType.IMPORT,
weight=0,
),
RoutingPolicy(
name="Example 3",
slug="example-3",
type=RoutingPolicyType.EXPORT,
weight=0,
),
]
)
|
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: ios_user
version_added: "2.4"
author: "Trishna Guha (@trishnaguha)"
short_description: Manage the aggregate of local users on Cisco IOS device
description:
- This module provides declarative management of the local usernames
configured on network devices. It allows playbooks to manage
either individual usernames or the aggregate of usernames in the
current running config. It also supports purging usernames from the
configuration that are not explicitly defined.
notes:
- Tested against IOS 15.6
options:
aggregate:
description:
- The set of username objects to be configured on the remote
Cisco IOS device. The list entries can either be the username
or a hash of username and properties. This argument is mutually
exclusive with the C(name) argument. alias C(users).
name:
description:
- The username to be configured on the Cisco IOS device.
This argument accepts a string value and is mutually exclusive
with the C(aggregate) argument.
Please note that this option is not same as C(provider username).
configured_password:
description:
- The password to be configured on the Cisco IOS device. The
password needs to be provided in clear and it will be encrypted
on the device.
Please note that this option is not same as C(provider password).
update_password:
description:
- Since passwords are encrypted in the device running config, this
argument will instruct the module when to change the password. When
set to C(always), the password will always be updated in the device
and when set to C(on_create) the password will be updated only if
the username is created.
default: always
choices: ['on_create', 'always']
privilege:
description:
- The C(privilege) argument configures the privilege level of the
user when logged into the system. This argument accepts integer
values in the range of 1 to 15.
view:
description:
- Configures the view for the username in the
device running configuration. The argument accepts a string value
defining the view name. This argument does not check if the view
has been configured on the device.
nopassword:
description:
- Defines the username without assigning
a password. This will allow the user to login to the system
without being authenticated by a password.
type: bool
purge:
description:
- Instructs the module to consider the
resource definition absolute. It will remove any previously
configured usernames on the device with the exception of the
`admin` user (the current defined set of users).
type: bool
default: false
state:
description:
- Configures the state of the username definition
as it relates to the device operational configuration. When set
to I(present), the username(s) should be configured in the device active
configuration and when set to I(absent) the username(s) should not be
in the device active configuration
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: create a new user
ios_user:
name: ansible
nopassword: True
state: present
- name: remove all users except admin
ios_user:
purge: yes
- name: set multiple users to privilege level 15
ios_user:
aggregate:
- name: netop
- name: netend
privilege: 15
state: present
- name: set user view/role
ios_user:
name: netop
view: network-operator
state: present
- name: Change Password for User netop
ios_user:
name: netop
configured_password: "{{ new_password }}"
update_password: always
state: present
- name: Aggregate of users
ios_user:
aggregate:
- name: ansibletest2
- name: ansibletest3
view: network-admin
- name: Delete users with aggregate
ios_user:
aggregate:
- name: ansibletest1
- name: ansibletest2
- name: ansibletest3
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- username ansible secret password
- username admin secret admin
"""
from copy import deepcopy
import re
import json
from functools import partial
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network_common import remove_default_spec
from ansible.module_utils.ios import get_config, load_config
from ansible.module_utils.six import iteritems
from ansible.module_utils.ios import ios_argument_spec, check_args
def validate_privilege(value, module):
if value and not 1 <= value <= 15:
module.fail_json(msg='privilege must be between 1 and 15, got %s' % value)
def user_del_cmd(username):
return json.dumps({
'command': 'no username %s' % username,
'prompt': 'This operation will remove all username related configurations with same name',
'answer': 'y'
})
def map_obj_to_commands(updates, module):
commands = list()
state = module.params['state']
update_password = module.params['update_password']
def needs_update(want, have, x):
return want.get(x) and (want.get(x) != have.get(x))
def add(command, want, x):
command.append('username %s %s' % (want['name'], x))
for update in updates:
want, have = update
if want['state'] == 'absent':
commands.append(user_del_cmd(want['name']))
continue
if needs_update(want, have, 'view'):
add(commands, want, 'view %s' % want['view'])
if needs_update(want, have, 'privilege'):
add(commands, want, 'privilege %s' % want['privilege'])
if needs_update(want, have, 'configured_password'):
if update_password == 'always' or not have:
add(commands, want, 'secret %s' % want['configured_password'])
if needs_update(want, have, 'nopassword'):
if want['nopassword']:
add(commands, want, 'nopassword')
else:
add(commands, want, user_del_cmd(want['name']))
return commands
def parse_view(data):
match = re.search(r'view (\S+)', data, re.M)
if match:
return match.group(1)
def parse_privilege(data):
match = re.search(r'privilege (\S+)', data, re.M)
if match:
return int(match.group(1))
def map_config_to_obj(module):
data = get_config(module, flags=['| section username'])
match = re.findall(r'^username (\S+)', data, re.M)
if not match:
return list()
instances = list()
for user in set(match):
regex = r'username %s .+$' % user
cfg = re.findall(regex, data, re.M)
cfg = '\n'.join(cfg)
obj = {
'name': user,
'state': 'present',
'nopassword': 'nopassword' in cfg,
'configured_password': None,
'privilege': parse_privilege(cfg),
'view': parse_view(cfg)
}
instances.append(obj)
return instances
def get_param_value(key, item, module):
# if key doesn't exist in the item, get it from module.params
if not item.get(key):
value = module.params[key]
# if key does exist, do a type check on it to validate it
else:
value_type = module.argument_spec[key].get('type', 'str')
type_checker = module._CHECK_ARGUMENT_TYPES_DISPATCHER[value_type]
type_checker(item[key])
value = item[key]
# validate the param value (if validator func exists)
validator = globals().get('validate_%s' % key)
if all((value, validator)):
validator(value, module)
return value
def map_params_to_obj(module):
users = module.params['aggregate']
if not users:
if not module.params['name'] and module.params['purge']:
return list()
elif not module.params['name']:
module.fail_json(msg='username is required')
else:
aggregate = [{'name': module.params['name']}]
else:
aggregate = list()
for item in users:
if not isinstance(item, dict):
aggregate.append({'name': item})
elif 'name' not in item:
module.fail_json(msg='name is required')
else:
aggregate.append(item)
objects = list()
for item in aggregate:
get_value = partial(get_param_value, item=item, module=module)
item['configured_password'] = get_value('configured_password')
item['nopassword'] = get_value('nopassword')
item['privilege'] = get_value('privilege')
item['view'] = get_value('view')
item['state'] = get_value('state')
objects.append(item)
return objects
def update_objects(want, have):
updates = list()
for entry in want:
item = next((i for i in have if i['name'] == entry['name']), None)
if all((item is None, entry['state'] == 'present')):
updates.append((entry, {}))
elif item:
for key, value in iteritems(entry):
if value and value != item[key]:
updates.append((entry, item))
return updates
def main():
""" main entry point for module execution
"""
element_spec = dict(
name=dict(),
configured_password=dict(no_log=True),
nopassword=dict(type='bool'),
update_password=dict(default='always', choices=['on_create', 'always']),
privilege=dict(type='int'),
view=dict(aliases=['role']),
state=dict(default='present', choices=['present', 'absent'])
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec, aliases=['users', 'collection']),
purge=dict(type='bool', default=False)
)
argument_spec.update(element_spec)
argument_spec.update(ios_argument_spec)
mutually_exclusive = [('name', 'aggregate')]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
if module.params['password'] and not module.params['configured_password']:
warnings.append(
'The "password" argument is used to authenticate the current connection. ' +
'To set a user password use "configured_password" instead.'
)
check_args(module, warnings)
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands(update_objects(want, have), module)
if module.params['purge']:
want_users = [x['name'] for x in want]
have_users = [x['name'] for x in have]
for item in set(have_users).difference(want_users):
if item != 'admin':
commands.append(user_del_cmd(item))
result['commands'] = commands
# the ios cli prevents this by rule so capture it and display
# a nice failure message
for cmd in commands:
if 'no username admin' in cmd:
module.fail_json(msg='cannot delete the `admin` account')
if commands:
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
|
|
# GloVe model from the NLP lab at Stanford:
# http://nlp.stanford.edu/projects/glove/.
import array
import collections
import io
try:
# Python 2 compat
import cPickle as pickle
except ImportError:
import pickle
import numpy as np
import scipy.sparse as sp
from .glove_cython import fit_vectors, transform_paragraph
class Glove(object):
"""
Class for estimating GloVe word embeddings using the
corpus coocurrence matrix.
"""
def __init__(self, no_components=30, learning_rate=0.05,
alpha=0.75, max_count=100, max_loss=10.0):
"""
Parameters:
- int no_components: number of latent dimensions
- float learning_rate: learning rate for SGD estimation.
- float alpha, float max_count: parameters for the
weighting function (see the paper).
- float max_loss: the maximum absolute value of calculated
gradient for any single co-occurrence pair.
Only try setting to a lower value if you
are experiencing problems with numerical
stability.
"""
self.no_components = no_components
self.learning_rate = float(learning_rate)
self.alpha = float(alpha)
self.max_count = float(max_count)
self.max_loss = max_loss
self.word_vectors = None
self.word_biases = None
self.vectors_sum_gradients = None
self.biases_sum_gradients = None
self.dictionary = None
self.inverse_dictionary = None
def __getitem__(self, key):
try:
return self.word_vectors[self.dictionary[key]]
except KeyError:
raise KeyError("Word not found")
except TypeError:
raise TypeError("Word Vectors/Dictionary not found")
def fit(self, matrix, epochs=5, no_threads=2, verbose=False):
"""
Estimate the word embeddings.
Parameters:
- scipy.sparse.coo_matrix matrix: coocurrence matrix
- int epochs: number of training epochs
- int no_threads: number of training threads
- bool verbose: print progress messages if True
"""
shape = matrix.shape
if (len(shape) != 2 or
shape[0] != shape[1]):
raise Exception('Coocurrence matrix must be square')
if not sp.isspmatrix_coo(matrix):
raise Exception('Coocurrence matrix must be in the COO format')
self.word_vectors = ((np.random.rand(shape[0],
self.no_components) - 0.5)
/ self.no_components)
self.word_biases = np.zeros(shape[0],
dtype=np.float64)
self.vectors_sum_gradients = np.ones_like(self.word_vectors)
self.biases_sum_gradients = np.ones_like(self.word_biases)
shuffle_indices = np.arange(matrix.nnz, dtype=np.int32)
if verbose:
print('Performing %s training epochs '
'with %s threads' % (epochs, no_threads))
for epoch in range(epochs):
if verbose:
print('Epoch %s' % epoch)
# Shuffle the coocurrence matrix
np.random.shuffle(shuffle_indices)
fit_vectors(self.word_vectors,
self.vectors_sum_gradients,
self.word_biases,
self.biases_sum_gradients,
matrix.row,
matrix.col,
matrix.data,
shuffle_indices,
self.learning_rate,
self.max_count,
self.alpha,
self.max_loss,
int(no_threads))
if not np.isfinite(self.word_vectors).all():
raise Exception('Non-finite values in word vectors. '
'Try reducing the learning rate or the '
'max_loss parameter.')
def transform_paragraph(self, paragraph, epochs=50, ignore_missing=False):
"""
Transform an iterable of tokens into its vector representation
(a paragraph vector).
Experimental. This will return something close to a tf-idf
weighted average of constituent token vectors by fitting
rare words (with low word bias values) more closely.
"""
if self.word_vectors is None:
raise Exception('Model must be fit to transform paragraphs')
if self.dictionary is None:
raise Exception('Dictionary must be provided to '
'transform paragraphs')
cooccurrence = collections.defaultdict(lambda: 0.0)
for token in paragraph:
try:
cooccurrence[self.dictionary[token]] += self.max_count / 10.0
except KeyError:
if not ignore_missing:
raise
word_ids = np.array(cooccurrence.keys(), dtype=np.int32)
values = np.array(cooccurrence.values(), dtype=np.float64)
shuffle_indices = np.arange(len(word_ids), dtype=np.int32)
# Initialize the vector to mean of constituent word vectors
paragraph_vector = np.mean(self.word_vectors[word_ids], axis=0)
sum_gradients = np.ones_like(paragraph_vector)
# Shuffle the coocurrence matrix
np.random.shuffle(shuffle_indices)
transform_paragraph(self.word_vectors,
self.word_biases,
paragraph_vector,
sum_gradients,
word_ids,
values,
shuffle_indices,
self.learning_rate,
self.max_count,
self.alpha,
epochs)
return paragraph_vector
def add_dictionary(self, dictionary):
"""
Supply a word-id dictionary to allow similarity queries.
"""
if self.word_vectors is None:
raise Exception('Model must be fit before adding a dictionary')
if len(dictionary) > self.word_vectors.shape[0]:
raise Exception('Dictionary length must be smaller '
'or equal to the number of word vectors')
self.dictionary = dictionary
if hasattr(self.dictionary, 'iteritems'):
# Python 2 compat
items_iterator = self.dictionary.iteritems()
else:
items_iterator = self.dictionary.items()
self.inverse_dictionary = {v: k for k, v in items_iterator}
def save(self, filename):
"""
Serialize model to filename.
"""
with open(filename, 'wb') as savefile:
pickle.dump(self.__dict__,
savefile,
protocol=pickle.HIGHEST_PROTOCOL)
@classmethod
def load(cls, filename):
"""
Load model from filename.
"""
instance = Glove()
with open(filename, 'rb') as savefile:
instance.__dict__ = pickle.load(savefile)
return instance
@classmethod
def load_stanford(cls, filename):
"""
Load model from the output files generated by
the C code from http://nlp.stanford.edu/projects/glove/.
The entries of the word dictionary will be of type
unicode in Python 2 and str in Python 3.
"""
dct = {}
vectors = array.array('d')
# Read in the data.
with io.open(filename, 'r', encoding='utf-8') as savefile:
for i, line in enumerate(savefile):
tokens = line.split(' ')
word = tokens[0]
entries = tokens[1:]
dct[word] = i
vectors.extend(float(x) for x in entries)
# Infer word vectors dimensions.
no_components = len(entries)
no_vectors = len(dct)
# Set up the model instance.
instance = Glove()
instance.no_components = no_components
instance.word_vectors = (np.array(vectors)
.reshape(no_vectors,
no_components))
instance.word_biases = np.zeros(no_vectors)
instance.add_dictionary(dct)
return instance
def _similarity_query(self, word_vec, number):
dst = (np.dot(self.word_vectors, word_vec)
/ np.linalg.norm(self.word_vectors, axis=1)
/ np.linalg.norm(word_vec))
word_ids = np.argsort(-dst)
return [(self.inverse_dictionary[x], dst[x]) for x in word_ids[:number]
if x in self.inverse_dictionary]
def most_similar(self, word, number=5):
"""
Run a similarity query, retrieving number
most similar words.
"""
if self.word_vectors is None:
raise Exception('Model must be fit before querying')
if self.dictionary is None:
raise Exception('No word dictionary supplied')
try:
word_idx = self.dictionary[word]
except KeyError:
raise Exception('Word not in dictionary')
return self._similarity_query(self.word_vectors[word_idx], number)[1:]
def most_similar_paragraph(self, paragraph, number=5, **kwargs):
"""
Return words most similar to a given paragraph (iterable of tokens).
"""
paragraph_vector = self.transform_paragraph(paragraph, **kwargs)
return self._similarity_query(paragraph_vector, number)
|
|
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib.parse
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, TypeVar
import bleach
import jinja2
from synapse.api.constants import EventTypes, Membership, RoomTypes
from synapse.api.errors import StoreError
from synapse.config.emailconfig import EmailSubjectConfig
from synapse.events import EventBase
from synapse.push.presentable_names import (
calculate_room_name,
descriptor_from_member_events,
name_from_member_event,
)
from synapse.push.push_types import (
EmailReason,
MessageVars,
NotifVars,
RoomVars,
TemplateVars,
)
from synapse.storage.databases.main.event_push_actions import EmailPushAction
from synapse.storage.state import StateFilter
from synapse.types import StateMap, UserID
from synapse.util.async_helpers import concurrently_execute
from synapse.visibility import filter_events_for_client
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
T = TypeVar("T")
CONTEXT_BEFORE = 1
CONTEXT_AFTER = 1
# From https://github.com/matrix-org/matrix-react-sdk/blob/master/src/HtmlUtils.js
ALLOWED_TAGS = [
"font", # custom to matrix for IRC-style font coloring
"del", # for markdown
# deliberately no h1/h2 to stop people shouting.
"h3",
"h4",
"h5",
"h6",
"blockquote",
"p",
"a",
"ul",
"ol",
"nl",
"li",
"b",
"i",
"u",
"strong",
"em",
"strike",
"code",
"hr",
"br",
"div",
"table",
"thead",
"caption",
"tbody",
"tr",
"th",
"td",
"pre",
]
ALLOWED_ATTRS = {
# custom ones first:
"font": ["color"], # custom to matrix
"a": ["href", "name", "target"], # remote target: custom to matrix
# We don't currently allow img itself by default, but this
# would make sense if we did
"img": ["src"],
}
# When bleach release a version with this option, we can specify schemes
# ALLOWED_SCHEMES = ["http", "https", "ftp", "mailto"]
class Mailer:
def __init__(
self,
hs: "HomeServer",
app_name: str,
template_html: jinja2.Template,
template_text: jinja2.Template,
):
self.hs = hs
self.template_html = template_html
self.template_text = template_text
self.send_email_handler = hs.get_send_email_handler()
self.store = self.hs.get_datastores().main
self.state_store = self.hs.get_storage().state
self.macaroon_gen = self.hs.get_macaroon_generator()
self.state_handler = self.hs.get_state_handler()
self.storage = hs.get_storage()
self.app_name = app_name
self.email_subjects: EmailSubjectConfig = hs.config.email.email_subjects
logger.info("Created Mailer for app_name %s" % app_name)
async def send_password_reset_mail(
self, email_address: str, token: str, client_secret: str, sid: str
) -> None:
"""Send an email with a password reset link to a user
Args:
email_address: Email address we're sending the password
reset to
token: Unique token generated by the server to verify
the email was received
client_secret: Unique token generated by the client to
group together multiple email sending attempts
sid: The generated session ID
"""
params = {"token": token, "client_secret": client_secret, "sid": sid}
link = (
self.hs.config.server.public_baseurl
+ "_synapse/client/password_reset/email/submit_token?%s"
% urllib.parse.urlencode(params)
)
template_vars: TemplateVars = {"link": link}
await self.send_email(
email_address,
self.email_subjects.password_reset
% {"server_name": self.hs.config.server.server_name},
template_vars,
)
async def send_registration_mail(
self, email_address: str, token: str, client_secret: str, sid: str
) -> None:
"""Send an email with a registration confirmation link to a user
Args:
email_address: Email address we're sending the registration
link to
token: Unique token generated by the server to verify
the email was received
client_secret: Unique token generated by the client to
group together multiple email sending attempts
sid: The generated session ID
"""
params = {"token": token, "client_secret": client_secret, "sid": sid}
link = (
self.hs.config.server.public_baseurl
+ "_matrix/client/unstable/registration/email/submit_token?%s"
% urllib.parse.urlencode(params)
)
template_vars: TemplateVars = {"link": link}
await self.send_email(
email_address,
self.email_subjects.email_validation
% {"server_name": self.hs.config.server.server_name, "app": self.app_name},
template_vars,
)
async def send_add_threepid_mail(
self, email_address: str, token: str, client_secret: str, sid: str
) -> None:
"""Send an email with a validation link to a user for adding a 3pid to their account
Args:
email_address: Email address we're sending the validation link to
token: Unique token generated by the server to verify the email was received
client_secret: Unique token generated by the client to group together
multiple email sending attempts
sid: The generated session ID
"""
params = {"token": token, "client_secret": client_secret, "sid": sid}
link = (
self.hs.config.server.public_baseurl
+ "_matrix/client/unstable/add_threepid/email/submit_token?%s"
% urllib.parse.urlencode(params)
)
template_vars: TemplateVars = {"link": link}
await self.send_email(
email_address,
self.email_subjects.email_validation
% {"server_name": self.hs.config.server.server_name, "app": self.app_name},
template_vars,
)
async def send_notification_mail(
self,
app_id: str,
user_id: str,
email_address: str,
push_actions: Iterable[EmailPushAction],
reason: EmailReason,
) -> None:
"""
Send email regarding a user's room notifications
Params:
app_id: The application receiving the notification.
user_id: The user receiving the notification.
email_address: The email address receiving the notification.
push_actions: All outstanding notifications.
reason: The notification that was ready and is the cause of an email
being sent.
"""
rooms_in_order = deduped_ordered_list([pa.room_id for pa in push_actions])
notif_events = await self.store.get_events([pa.event_id for pa in push_actions])
notifs_by_room: Dict[str, List[EmailPushAction]] = {}
for pa in push_actions:
notifs_by_room.setdefault(pa.room_id, []).append(pa)
# collect the current state for all the rooms in which we have
# notifications
state_by_room = {}
try:
user_display_name = await self.store.get_profile_displayname(
UserID.from_string(user_id).localpart
)
if user_display_name is None:
user_display_name = user_id
except StoreError:
user_display_name = user_id
async def _fetch_room_state(room_id: str) -> None:
room_state = await self.store.get_current_state_ids(room_id)
state_by_room[room_id] = room_state
# Run at most 3 of these at once: sync does 10 at a time but email
# notifs are much less realtime than sync so we can afford to wait a bit.
await concurrently_execute(_fetch_room_state, rooms_in_order, 3)
# actually sort our so-called rooms_in_order list, most recent room first
rooms_in_order.sort(key=lambda r: -(notifs_by_room[r][-1].received_ts or 0))
rooms: List[RoomVars] = []
for r in rooms_in_order:
roomvars = await self._get_room_vars(
r, user_id, notifs_by_room[r], notif_events, state_by_room[r]
)
rooms.append(roomvars)
reason["room_name"] = await calculate_room_name(
self.store,
state_by_room[reason["room_id"]],
user_id,
fallback_to_members=True,
)
if len(notifs_by_room) == 1:
# Only one room has new stuff
room_id = list(notifs_by_room.keys())[0]
summary_text = await self._make_summary_text_single_room(
room_id,
notifs_by_room[room_id],
state_by_room[room_id],
notif_events,
user_id,
)
else:
summary_text = await self._make_summary_text(
notifs_by_room, state_by_room, notif_events, reason
)
template_vars: TemplateVars = {
"user_display_name": user_display_name,
"unsubscribe_link": self._make_unsubscribe_link(
user_id, app_id, email_address
),
"summary_text": summary_text,
"rooms": rooms,
"reason": reason,
}
await self.send_email(email_address, summary_text, template_vars)
async def send_email(
self, email_address: str, subject: str, extra_template_vars: TemplateVars
) -> None:
"""Send an email with the given information and template text"""
template_vars: TemplateVars = {
"app_name": self.app_name,
"server_name": self.hs.config.server.server_name,
}
template_vars.update(extra_template_vars)
html_text = self.template_html.render(**template_vars)
plain_text = self.template_text.render(**template_vars)
await self.send_email_handler.send_email(
email_address=email_address,
subject=subject,
app_name=self.app_name,
html=html_text,
text=plain_text,
)
async def _get_room_vars(
self,
room_id: str,
user_id: str,
notifs: Iterable[EmailPushAction],
notif_events: Dict[str, EventBase],
room_state_ids: StateMap[str],
) -> RoomVars:
"""
Generate the variables for notifications on a per-room basis.
Args:
room_id: The room ID
user_id: The user receiving the notification.
notifs: The outstanding push actions for this room.
notif_events: The events related to the above notifications.
room_state_ids: The event IDs of the current room state.
Returns:
A dictionary to be added to the template context.
"""
# Check if one of the notifs is an invite event for the user.
is_invite = False
for n in notifs:
ev = notif_events[n.event_id]
if ev.type == EventTypes.Member and ev.state_key == user_id:
if ev.content.get("membership") == Membership.INVITE:
is_invite = True
break
room_name = await calculate_room_name(self.store, room_state_ids, user_id)
room_vars: RoomVars = {
"title": room_name,
"hash": string_ordinal_total(room_id), # See sender avatar hash
"notifs": [],
"invite": is_invite,
"link": self._make_room_link(room_id),
"avatar_url": await self._get_room_avatar(room_state_ids),
}
if not is_invite:
for n in notifs:
notifvars = await self._get_notif_vars(
n, user_id, notif_events[n.event_id], room_state_ids
)
# merge overlapping notifs together.
# relies on the notifs being in chronological order.
merge = False
if room_vars["notifs"] and "messages" in room_vars["notifs"][-1]:
prev_messages = room_vars["notifs"][-1]["messages"]
for message in notifvars["messages"]:
pm = list(
filter(lambda pm: pm["id"] == message["id"], prev_messages)
)
if pm:
if not message["is_historical"]:
pm[0]["is_historical"] = False
merge = True
elif merge:
# we're merging, so append any remaining messages
# in this notif to the previous one
prev_messages.append(message)
if not merge:
room_vars["notifs"].append(notifvars)
return room_vars
async def _get_room_avatar(
self,
room_state_ids: StateMap[str],
) -> Optional[str]:
"""
Retrieve the avatar url for this room---if it exists.
Args:
room_state_ids: The event IDs of the current room state.
Returns:
room's avatar url if it's present and a string; otherwise None.
"""
event_id = room_state_ids.get((EventTypes.RoomAvatar, ""))
if event_id:
ev = await self.store.get_event(event_id)
url = ev.content.get("url")
if isinstance(url, str):
return url
return None
async def _get_notif_vars(
self,
notif: EmailPushAction,
user_id: str,
notif_event: EventBase,
room_state_ids: StateMap[str],
) -> NotifVars:
"""
Generate the variables for a single notification.
Args:
notif: The outstanding notification for this room.
user_id: The user receiving the notification.
notif_event: The event related to the above notification.
room_state_ids: The event IDs of the current room state.
Returns:
A dictionary to be added to the template context.
"""
results = await self.store.get_events_around(
notif.room_id,
notif.event_id,
before_limit=CONTEXT_BEFORE,
after_limit=CONTEXT_AFTER,
)
ret: NotifVars = {
"link": self._make_notif_link(notif),
"ts": notif.received_ts,
"messages": [],
}
the_events = await filter_events_for_client(
self.storage, user_id, results.events_before
)
the_events.append(notif_event)
for event in the_events:
messagevars = await self._get_message_vars(notif, event, room_state_ids)
if messagevars is not None:
ret["messages"].append(messagevars)
return ret
async def _get_message_vars(
self, notif: EmailPushAction, event: EventBase, room_state_ids: StateMap[str]
) -> Optional[MessageVars]:
"""
Generate the variables for a single event, if possible.
Args:
notif: The outstanding notification for this room.
event: The event under consideration.
room_state_ids: The event IDs of the current room state.
Returns:
A dictionary to be added to the template context, or None if the
event cannot be processed.
"""
if event.type != EventTypes.Message and event.type != EventTypes.Encrypted:
return None
# Get the sender's name and avatar from the room state.
type_state_key = ("m.room.member", event.sender)
sender_state_event_id = room_state_ids.get(type_state_key)
if sender_state_event_id:
sender_state_event: Optional[EventBase] = await self.store.get_event(
sender_state_event_id
)
else:
# Attempt to check the historical state for the room.
historical_state = await self.state_store.get_state_for_event(
event.event_id, StateFilter.from_types((type_state_key,))
)
sender_state_event = historical_state.get(type_state_key)
if sender_state_event:
sender_name = name_from_member_event(sender_state_event)
sender_avatar_url: Optional[str] = sender_state_event.content.get(
"avatar_url"
)
else:
# No state could be found, fallback to the MXID.
sender_name = event.sender
sender_avatar_url = None
# 'hash' for deterministically picking default images: use
# sender_hash % the number of default images to choose from
sender_hash = string_ordinal_total(event.sender)
ret: MessageVars = {
"event_type": event.type,
"is_historical": event.event_id != notif.event_id,
"id": event.event_id,
"ts": event.origin_server_ts,
"sender_name": sender_name,
"sender_avatar_url": sender_avatar_url,
"sender_hash": sender_hash,
}
# Encrypted messages don't have any additional useful information.
if event.type == EventTypes.Encrypted:
return ret
msgtype = event.content.get("msgtype")
if not isinstance(msgtype, str):
msgtype = None
ret["msgtype"] = msgtype
if msgtype == "m.text":
self._add_text_message_vars(ret, event)
elif msgtype == "m.image":
self._add_image_message_vars(ret, event)
if "body" in event.content:
ret["body_text_plain"] = event.content["body"]
return ret
def _add_text_message_vars(
self, messagevars: MessageVars, event: EventBase
) -> None:
"""
Potentially add a sanitised message body to the message variables.
Args:
messagevars: The template context to be modified.
event: The event under consideration.
"""
msgformat = event.content.get("format")
if not isinstance(msgformat, str):
msgformat = None
formatted_body = event.content.get("formatted_body")
body = event.content.get("body")
if msgformat == "org.matrix.custom.html" and formatted_body:
messagevars["body_text_html"] = safe_markup(formatted_body)
elif body:
messagevars["body_text_html"] = safe_text(body)
def _add_image_message_vars(
self, messagevars: MessageVars, event: EventBase
) -> None:
"""
Potentially add an image URL to the message variables.
Args:
messagevars: The template context to be modified.
event: The event under consideration.
"""
if "url" in event.content:
messagevars["image_url"] = event.content["url"]
async def _make_summary_text_single_room(
self,
room_id: str,
notifs: List[EmailPushAction],
room_state_ids: StateMap[str],
notif_events: Dict[str, EventBase],
user_id: str,
) -> str:
"""
Make a summary text for the email when only a single room has notifications.
Args:
room_id: The ID of the room.
notifs: The push actions for this room.
room_state_ids: The state map for the room.
notif_events: A map of event ID -> notification event.
user_id: The user receiving the notification.
Returns:
The summary text.
"""
# If the room has some kind of name, use it, but we don't
# want the generated-from-names one here otherwise we'll
# end up with, "new message from Bob in the Bob room"
room_name = await calculate_room_name(
self.store, room_state_ids, user_id, fallback_to_members=False
)
# See if one of the notifs is an invite event for the user
invite_event = None
for n in notifs:
ev = notif_events[n.event_id]
if ev.type == EventTypes.Member and ev.state_key == user_id:
if ev.content.get("membership") == Membership.INVITE:
invite_event = ev
break
if invite_event:
inviter_member_event_id = room_state_ids.get(
("m.room.member", invite_event.sender)
)
inviter_name = invite_event.sender
if inviter_member_event_id:
inviter_member_event = await self.store.get_event(
inviter_member_event_id, allow_none=True
)
if inviter_member_event:
inviter_name = name_from_member_event(inviter_member_event)
if room_name is None:
return self.email_subjects.invite_from_person % {
"person": inviter_name,
"app": self.app_name,
}
# If the room is a space, it gets a slightly different topic.
create_event_id = room_state_ids.get(("m.room.create", ""))
if create_event_id:
create_event = await self.store.get_event(
create_event_id, allow_none=True
)
if (
create_event
and create_event.content.get("room_type") == RoomTypes.SPACE
):
return self.email_subjects.invite_from_person_to_space % {
"person": inviter_name,
"space": room_name,
"app": self.app_name,
}
return self.email_subjects.invite_from_person_to_room % {
"person": inviter_name,
"room": room_name,
"app": self.app_name,
}
if len(notifs) == 1:
# There is just the one notification, so give some detail
sender_name = None
event = notif_events[notifs[0].event_id]
if ("m.room.member", event.sender) in room_state_ids:
state_event_id = room_state_ids[("m.room.member", event.sender)]
state_event = await self.store.get_event(state_event_id)
sender_name = name_from_member_event(state_event)
if sender_name is not None and room_name is not None:
return self.email_subjects.message_from_person_in_room % {
"person": sender_name,
"room": room_name,
"app": self.app_name,
}
elif sender_name is not None:
return self.email_subjects.message_from_person % {
"person": sender_name,
"app": self.app_name,
}
# The sender is unknown, just use the room name (or ID).
return self.email_subjects.messages_in_room % {
"room": room_name or room_id,
"app": self.app_name,
}
else:
# There's more than one notification for this room, so just
# say there are several
if room_name is not None:
return self.email_subjects.messages_in_room % {
"room": room_name,
"app": self.app_name,
}
return await self._make_summary_text_from_member_events(
room_id, notifs, room_state_ids, notif_events
)
async def _make_summary_text(
self,
notifs_by_room: Dict[str, List[EmailPushAction]],
room_state_ids: Dict[str, StateMap[str]],
notif_events: Dict[str, EventBase],
reason: EmailReason,
) -> str:
"""
Make a summary text for the email when multiple rooms have notifications.
Args:
notifs_by_room: A map of room ID to the push actions for that room.
room_state_ids: A map of room ID to the state map for that room.
notif_events: A map of event ID -> notification event.
reason: The reason this notification is being sent.
Returns:
The summary text.
"""
# Stuff's happened in multiple different rooms
# ...but we still refer to the 'reason' room which triggered the mail
if reason["room_name"] is not None:
return self.email_subjects.messages_in_room_and_others % {
"room": reason["room_name"],
"app": self.app_name,
}
room_id = reason["room_id"]
return await self._make_summary_text_from_member_events(
room_id, notifs_by_room[room_id], room_state_ids[room_id], notif_events
)
async def _make_summary_text_from_member_events(
self,
room_id: str,
notifs: List[EmailPushAction],
room_state_ids: StateMap[str],
notif_events: Dict[str, EventBase],
) -> str:
"""
Make a summary text for the email when only a single room has notifications.
Args:
room_id: The ID of the room.
notifs: The push actions for this room.
room_state_ids: The state map for the room.
notif_events: A map of event ID -> notification event.
Returns:
The summary text.
"""
# If the room doesn't have a name, say who the messages
# are from explicitly to avoid, "messages in the Bob room"
# Find the latest event ID for each sender, note that the notifications
# are already in descending received_ts.
sender_ids = {}
for n in notifs:
sender = notif_events[n.event_id].sender
if sender not in sender_ids:
sender_ids[sender] = n.event_id
# Get the actual member events (in order to calculate a pretty name for
# the room).
member_event_ids = []
member_events = {}
for sender_id, event_id in sender_ids.items():
type_state_key = ("m.room.member", sender_id)
sender_state_event_id = room_state_ids.get(type_state_key)
if sender_state_event_id:
member_event_ids.append(sender_state_event_id)
else:
# Attempt to check the historical state for the room.
historical_state = await self.state_store.get_state_for_event(
event_id, StateFilter.from_types((type_state_key,))
)
sender_state_event = historical_state.get(type_state_key)
if sender_state_event:
member_events[event_id] = sender_state_event
member_events.update(await self.store.get_events(member_event_ids))
if not member_events:
# No member events were found! Maybe the room is empty?
# Fallback to the room ID (note that if there was a room name this
# would already have been used previously).
return self.email_subjects.messages_in_room % {
"room": room_id,
"app": self.app_name,
}
# There was a single sender.
if len(member_events) == 1:
return self.email_subjects.messages_from_person % {
"person": descriptor_from_member_events(member_events.values()),
"app": self.app_name,
}
# There was more than one sender, use the first one and a tweaked template.
return self.email_subjects.messages_from_person_and_others % {
"person": descriptor_from_member_events(list(member_events.values())[:1]),
"app": self.app_name,
}
def _make_room_link(self, room_id: str) -> str:
"""
Generate a link to open a room in the web client.
Args:
room_id: The room ID to generate a link to.
Returns:
A link to open a room in the web client.
"""
if self.hs.config.email.email_riot_base_url:
base_url = "%s/#/room" % (self.hs.config.email.email_riot_base_url)
elif self.app_name == "Vector":
# need /beta for Universal Links to work on iOS
base_url = "https://vector.im/beta/#/room"
else:
base_url = "https://matrix.to/#"
return "%s/%s" % (base_url, room_id)
def _make_notif_link(self, notif: EmailPushAction) -> str:
"""
Generate a link to open an event in the web client.
Args:
notif: The notification to generate a link for.
Returns:
A link to open the notification in the web client.
"""
if self.hs.config.email.email_riot_base_url:
return "%s/#/room/%s/%s" % (
self.hs.config.email.email_riot_base_url,
notif.room_id,
notif.event_id,
)
elif self.app_name == "Vector":
# need /beta for Universal Links to work on iOS
return "https://vector.im/beta/#/room/%s/%s" % (
notif.room_id,
notif.event_id,
)
else:
return "https://matrix.to/#/%s/%s" % (notif.room_id, notif.event_id)
def _make_unsubscribe_link(
self, user_id: str, app_id: str, email_address: str
) -> str:
"""
Generate a link to unsubscribe from email notifications.
Args:
user_id: The user receiving the notification.
app_id: The application receiving the notification.
email_address: The email address receiving the notification.
Returns:
A link to unsubscribe from email notifications.
"""
params = {
"access_token": self.macaroon_gen.generate_delete_pusher_token(user_id),
"app_id": app_id,
"pushkey": email_address,
}
# XXX: make r0 once API is stable
return "%s_matrix/client/unstable/pushers/remove?%s" % (
self.hs.config.server.public_baseurl,
urllib.parse.urlencode(params),
)
def safe_markup(raw_html: str) -> jinja2.Markup:
"""
Sanitise a raw HTML string to a set of allowed tags and attributes, and linkify any bare URLs.
Args
raw_html: Unsafe HTML.
Returns:
A Markup object ready to safely use in a Jinja template.
"""
return jinja2.Markup(
bleach.linkify(
bleach.clean(
raw_html,
tags=ALLOWED_TAGS,
attributes=ALLOWED_ATTRS,
# bleach master has this, but it isn't released yet
# protocols=ALLOWED_SCHEMES,
strip=True,
)
)
)
def safe_text(raw_text: str) -> jinja2.Markup:
"""
Sanitise text (escape any HTML tags), and then linkify any bare URLs.
Args
raw_text: Unsafe text which might include HTML markup.
Returns:
A Markup object ready to safely use in a Jinja template.
"""
return jinja2.Markup(
bleach.linkify(bleach.clean(raw_text, tags=[], attributes=[], strip=False))
)
def deduped_ordered_list(it: Iterable[T]) -> List[T]:
seen = set()
ret = []
for item in it:
if item not in seen:
seen.add(item)
ret.append(item)
return ret
def string_ordinal_total(s: str) -> int:
tot = 0
for c in s:
tot += ord(c)
return tot
|
|
from glob import glob
import logging
from OpenSSL.crypto import FILETYPE_PEM
from mcloud.application import ApplicationController
import os
import sys
import netifaces
from traceback import print_tb
import traceback
import inject
from mcloud.deployment import DeploymentController
from mcloud.plugin import IMcloudPlugin
import pkg_resources
from twisted.internet import reactor
from twisted.internet._sslverify import KeyPair
from twisted.internet.defer import inlineCallbacks
from twisted.internet.protocol import Factory
import txredisapi
from twisted.python import log
from mcloud.util import txtimeout
from zope.interface.verify import verifyClass
log.startLogging(sys.stdout)
Factory.noisy = False
def get_argparser():
import argparse
parser = argparse.ArgumentParser(description='Mcloud rpc server')
parser.add_argument('--config', default='/etc/mcloud/mcloud-server.yml', help='Config file path')
parser.add_argument('--no-ssl', default=False, action='store_true', help='Disable ssl')
return parser
from confire import Configuration
class SslConfiguration(Configuration):
enabled = False
key = '/etc/mcloud/server.key'
cert = '/etc/mcloud/server.crt'
ca = '/etc/mcloud/ca.crt'
class RedisConfiguration(Configuration):
host = 'localhost'
port = 6379
password = None
dbid = 1
timeout = 3
class McloudConfiguration(Configuration):
haproxy = False
web = True
dns_ip = None
dns_port = 7053
websocket_ip = '0.0.0.0'
websocket_port = 7080
dns_search_suffix = 'mcloud.lh'
ssl = SslConfiguration()
redis = RedisConfiguration()
home_dir = '/root/.mcloud'
btrfs = False
demo_mode = False
def entry_point():
console_handler = logging.StreamHandler(stream=sys.stderr)
console_handler.setFormatter(logging.Formatter(fmt='[%(asctime)s][%(levelname)s][%(name)s] %(message)s'))
console_handler.setLevel(logging.ERROR)
root_logger = logging.getLogger()
root_logger.addHandler(console_handler)
root_logger.setLevel(logging.ERROR)
log.msg('Logger initialized')
parser = get_argparser()
args = parser.parse_args()
class _McloudConfiguration(McloudConfiguration):
CONF_PATHS = [args.config]
settings = _McloudConfiguration.load()
if args.no_ssl:
settings.ssl.enabled = False
def resolve_host_ip():
if 'docker0' in netifaces.interfaces():
return netifaces.ifaddresses('docker0')[netifaces.AF_INET][0]['addr']
else:
import netinfo
host_ip = None
for route in netinfo.get_routes():
if route['dest'] == '0.0.0.0': # default route
host_ip = route['gateway']
if not host_ip:
reactor.stop()
print('ERROR: Can not get default route - can not connect to Docker')
return host_ip
@inlineCallbacks
def run_server(redis):
from mcloud.events import EventBus
from mcloud.remote import ApiRpcServer, Server
from mcloud.tasks import TaskService
log.msg('Running server')
eb = EventBus(redis)
log.msg('Connecting event bus')
yield eb.connect()
log.msg('Configuring injector.')
plugins_loaded = []
def my_config(binder):
binder.bind(txredisapi.Connection, redis)
binder.bind(EventBus, eb)
binder.bind('settings', settings)
binder.bind('host-ip', resolve_host_ip())
binder.bind('dns-search-suffix', settings.dns_search_suffix)
binder.bind('plugins', plugins_loaded)
# Configure a shared injector.
inject.configure(my_config)
api = inject.instance(ApiRpcServer)
tasks = inject.instance(TaskService)
api.tasks = tasks.collect_tasks()
log.msg('Starting rpc listener on port %d' % settings.websocket_port)
server = Server(port=settings.websocket_port)
server.bind()
# load plugins
for ep in pkg_resources.iter_entry_points(group='mcloud_plugins'):
try:
plugin_class = ep.load()
log.msg('=' * 80)
log.msg('Loading plugin %s' % plugin_class)
log.msg('-' * 80)
yield verifyClass(IMcloudPlugin, plugin_class)
plugin = plugin_class()
yield plugin.setup()
plugins_loaded.append(plugin)
print "Loaded %s - OK" % plugin_class
except Exception as e:
print '!-' * 40
print e.__class__.__name__
print e
print(traceback.format_exc())
print '!-' * 40
# reactor.stop()
log.msg('=' * 80)
log.msg('-' * 80)
log.msg('All plugins loaded.')
log.msg('=' * 80)
deployment_controller = inject.instance(DeploymentController)
yield deployment_controller.configure_docker_machine()
log.msg('Started.')
def timeout():
print('Can not connect to redis!')
reactor.stop()
print '*******'
print 'Connecting redis:'
print settings.redis
print '*******'
txtimeout(txredisapi.Connection(
dbid=settings.redis.dbid,
host=settings.redis.host,
port=settings.redis.port,
password=settings.redis.password
), settings.redis.timeout, timeout).addCallback(run_server)
reactor.run()
if __name__ == '__main__':
entry_point()
|
|
# Copyright 2015-2016 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Base class to parse trace.dat dumps"""
import re
import pandas as pd
def trace_parser_explode_array(string, array_lengths):
"""Explode an array in the trace into individual elements for easy parsing
Basically, turn :code:`load={1 1 2 2}` into :code:`load0=1 load1=1 load2=2
load3=2`.
:param string: Input string from the trace
:type string: str
:param array_lengths: A dictionary of array names and their
expected length. If we get array that's shorter than the expected
length, additional keys have to be introduced with value 0 to
compensate.
:type array_lengths: dict
For example:
::
trace_parser_explode_array(string="load={1 2}",
array_lengths={"load": 4})
"load0=1 load1=2 load2=0 load3=0"
"""
while True:
match = re.search(r"[^ ]+={[^}]+}", string)
if match is None:
break
to_explode = match.group()
col_basename = re.match(r"([^=]+)=", to_explode).groups()[0]
vals_str = re.search(r"{(.+)}", to_explode).groups()[0]
vals_array = vals_str.split(' ')
exploded_str = ""
for (idx, val) in enumerate(vals_array):
exploded_str += "{}{}={} ".format(col_basename, idx, val)
vals_added = len(vals_array)
if vals_added < array_lengths[col_basename]:
for idx in range(vals_added, array_lengths[col_basename]):
exploded_str += "{}{}=0 ".format(col_basename, idx)
exploded_str = exploded_str[:-1]
begin_idx = match.start()
end_idx = match.end()
string = string[:begin_idx] + exploded_str + string[end_idx:]
return string
class Base(object):
"""Base class to parse trace.dat dumps.
Don't use directly, create a subclass that has a unique_word class
variable. unique_word is a string that can uniquely identify
lines in the trace that correspond to this event. This is usually
the trace_name (optionally followed by a semicolong,
e.g. "sched_switch:") but it can be anything else for trace points
generated using trace_printk().
:param parse_raw: If :code:`True`, raw trace data (-R option) to
trace-cmd will be used
This class acts as a base class for all TRAPpy events
"""
def __init__(self, parse_raw=False):
self.data_frame = pd.DataFrame()
self.data_array = []
self.time_array = []
self.comm_array = []
self.pid_array = []
self.cpu_array = []
self.parse_raw = parse_raw
def finalize_object(self):
pass
def __get_trace_array_lengths(self):
"""Calculate the lengths of all arrays in the trace
Returns a dict with the name of each array found in the trace
as keys and their corresponding length as value
"""
from collections import defaultdict
pat_array = re.compile(r"([A-Za-z0-9_]+)={([^}]+)}")
ret = defaultdict(int)
for line in self.data_array:
while True:
match = re.search(pat_array, line)
if not match:
break
(array_name, array_elements) = match.groups()
array_len = len(array_elements.split(' '))
if array_len > ret[array_name]:
ret[array_name] = array_len
line = line[match.end():]
# Stop scanning if the trace doesn't have arrays
if len(ret) == 0:
break
return ret
def append_data(self, time, comm, pid, cpu, data):
"""Append data parsed from a line to the corresponding arrays
The :mod:`DataFrame` will be created from this when the whole trace
has been parsed.
:param time: The time for the line that was printed in the trace
:type time: float
:param comm: The command name or the execname from which the trace
line originated
:type comm: str
:param pid: The PID of the process from which the trace
line originated
:type pid: int
:param data: The data for matching line in the trace
:type data: str
"""
self.time_array.append(time)
self.comm_array.append(comm)
self.pid_array.append(pid)
self.cpu_array.append(cpu)
self.data_array.append(data)
def generate_parsed_data(self):
for (comm, pid, cpu, data_str) in zip(self.comm_array, self.pid_array,
self.cpu_array, self.data_array):
data_dict = {"__comm": comm, "__pid": pid, "__cpu": cpu}
prev_key = None
for field in data_str.split():
if "=" not in field:
# Concatenation is supported only for "string" values
if type(data_dict[prev_key]) is not str:
continue
data_dict[prev_key] += ' ' + field
continue
(key, value) = field.split('=')
try:
value = int(value)
except ValueError:
pass
data_dict[key] = value
prev_key = key
yield data_dict
def create_dataframe(self):
"""Create the final :mod:`pandas.DataFrame`"""
if not self.time_array:
return
trace_arr_lengths = self.__get_trace_array_lengths()
if trace_arr_lengths.items():
for (idx, val) in enumerate(self.data_array):
expl_val = trace_parser_explode_array(val, trace_arr_lengths)
self.data_array[idx] = expl_val
time_idx = pd.Index(self.time_array, name="Time")
self.data_frame = pd.DataFrame(self.generate_parsed_data(), index=time_idx)
self.time_array = []
self.comm_array = []
self.pid_array = []
self.cpu_array = []
self.data_array = []
def write_csv(self, fname):
"""Write the csv info into a CSV file
:param fname: The name of the CSV file
:type fname: str
"""
self.data_frame.to_csv(fname)
def normalize_time(self, basetime):
"""Substract basetime from the Time of the data frame
:param basetime: The offset which needs to be subtracted from
the time index
:type basetime: float
"""
if basetime and not self.data_frame.empty:
self.data_frame.reset_index(inplace=True)
self.data_frame["Time"] = self.data_frame["Time"] - basetime
self.data_frame.set_index("Time", inplace=True)
|
|
import struct
class Packet(object):
"""Creates ICMPv4 and v6 packets.
header
two-item sequence containing the type and code of the packet,
respectively.
version
Automatically set to version of protocol being used or None if ambiguous.
data
Contains data of the packet. Can only assign a subclass of string
or None.
packet
binary representation of packet.
EXAMPLE: (using Python as root)
>>> import icmpLib
>>> icmplib.ping('127.0.0.1')
"""
header_table = {
0 : (0, 4),
#3 : (15, 4), Overlap with ICMPv6
3 : (15, None),
#4 : (0, 4), Deprecated by RFC 1812
5 : (3, 4),
8 : (0, 4),
9 : (0, 4),
10: (0, 4),
11: (1, 4),
12: (1, 4),
13: (0, 4),
14: (0, 4),
15: (0, 4),
16: (0, 4),
17: (0, 4),
18: (0, 4),
1 : (4, 6),
2 : (0, 6),
#3 : (2, 6), Overlap with ICMPv4
#4 : (2, 6), Type of 4 in ICMPv4 is deprecated
4 : (2, None),
128: (0, 6),
129: (0, 6),
130: (0, 6),
131: (0, 6),
132: (0, 6),
133: (0, 6),
134: (0, 6),
135: (0, 6),
136: (0, 6),
137: (0, 6),
}
def _setheader(self, header):
"""Set type, code, and version for the packet."""
if len(header) != 2:
raise ValueError("header data must be in a two-item sequence")
type_, code = header
try:
max_range, version = self.header_table[type_]
except KeyError:
raise ValueError("%s is not a valid type argument" % type_)
else:
if code > max_range:
raise ValueError("%s is not a valid code value for type %s" %\
(type_, code))
self._type, self._code, self._version = type_, code, version
header = property(lambda self: (self._type, self._code), _setheader,
doc="type and code of packet")
version = property(lambda self: self._version,
doc="Protocol version packet is using or None if "
"ambiguous")
def _setdata(self, data):
"""Setter for self.data; will only accept a string or None type."""
if not isinstance(data, (str, bytes)) and not isinstance(data, type(None)):
raise TypeError("value must be a subclass of string or None, "
"not %s" % type(data))
self._data = data
data = property(lambda self: self._data, _setdata,
doc="data contained within the packet")
def __init__(self, header=(None, None), data=None):
"""Set instance attributes if given."""
#XXX: Consider using __slots__
# self._version initialized by setting self.header
self.header = header
self.data = data
def __repr__(self):
return "<ICMPv%s packet: type = %s, code = %s, data length = %s>" % \
(self.version, self.type, self.code, len(self.data))
def create(self):
"""Return a packet."""
# Kept as a separate method instead of rolling into 'packet' property so
# as to allow passing method around without having to define a lambda
# method.
args = [self.header[0], self.header[1], 0]
pack_format = "!BBH"
if self.data != None:
pack_format += "%ss" % len(self.data)
args.append(self.data)
# ICMPv6 has the IP stack calculate the checksum
# For ambiguous cases, just go ahead and calculate it just in case
if self.version == 4 or not self.version:
args[2] = self._checksum(struct.pack(pack_format, *args))
return struct.pack(pack_format, *args)
packet = property(create,
doc="Complete ICMP packet")
def _checksum(self, checksum_packet):
"""Calculate checksum"""
byte_count = len(checksum_packet)
#XXX: Think there is an error here about odd number of bytes
if byte_count % 2:
odd_byte = ord(checksum_packet[-1])
checksum_packet = checksum_packet[:-1]
else:
odd_byte = 0
two_byte_chunks = struct.unpack("!%dH" % (len(checksum_packet)/2),
checksum_packet)
total = 0
for two_bytes in two_byte_chunks:
total += two_bytes
else:
total += odd_byte
total = (total >> 16) + (total & 0xFFFF)
total += total >> 16
return ~total
def parse(cls, packet):
"""Parse ICMP packet and return an instance of Packet"""
string_len = len(packet) - 4 # Ignore IP header
pack_format = "!BBH"
if string_len:
pack_format += "%ss" % string_len
unpacked_packet = struct.unpack(pack_format, packet)
type, code, checksum = unpacked_packet[:3]
try:
data = unpacked_packet[3]
except IndexError:
data = None
return cls((type, code), data)
parse = classmethod(parse)
#------------
# ping.py
#------------
import struct,socket,sys,time, os
#from icmplib import Packet
def main(addr):
s = socket.socket(socket.AF_INET,socket.SOCK_RAW)
s.connect((addr,))
datalen = 56
BUFSIZE = 1500
def ping(addr):
print( "PING (%s): %d data bytes" % (addr,datalen) )
## create socket
s = socket.socket(socket.AF_INET,socket.SOCK_RAW,
socket.getprotobyname('icmp'))
s.connect((addr,22))
## setuid back to normal user
os.setuid(os.getuid())
seq_num = 0
packet_count = 0
process_id = os.getpid()
base_packet = Packet((8, 0))
while 1:
## create ping packet
seq_num += 1
pdata = struct.pack("!HHd",process_id,seq_num,time.time())
## send initial packet
base_packet.data = pdata
s.send(base_packet.packet)
## recv packet
buf = s.recv(BUFSIZE)
current_time = time.time()
## parse packet; remove IP header first
r = Packet.parse(buf[20:])
## parse ping data
(ident,seq,timestamp) = struct.unpack("!HHd",r.data)
## calculate rounttrip time
rtt = current_time - timestamp
rtt *= 1000
print( "%d bytes from %s: id=%s, seq=%u, rtt=%.3f ms" % (len(buf), addr, ident, seq, rtt) )
time.sleep(1)
if __name__=='__main__':
import sys
ping(sys.argv[1])
|
|
# --------------------------------------------------------
# Fast/er R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Bharath Hariharan
# --------------------------------------------------------
import xml.etree.ElementTree as ET
import os
import cPickle
import numpy as np
import ipdb
def parse_rec(filename):
""" Parse a PASCAL VOC xml file """
tree = ET.parse(filename)
objects = []
for obj in tree.findall('object'):
obj_struct = {}
obj_struct['name'] = obj.find('name').text
obj_struct['pose'] = obj.find('pose').text
obj_struct['truncated'] = int(obj.find('truncated').text)
obj_struct['difficult'] = int(obj.find('difficult').text)
bbox = obj.find('bndbox')
obj_struct['bbox'] = [int(bbox.find('xmin').text),
int(bbox.find('ymin').text),
int(bbox.find('xmax').text),
int(bbox.find('ymax').text)]
objects.append(obj_struct)
return objects
def voc_ap(rec, prec, use_07_metric=False):
""" ap = voc_ap(rec, prec, [use_07_metric])
Compute VOC AP given precision and recall.
If use_07_metric is true, uses the
VOC 07 11 point method (default:False).
"""
if use_07_metric:
# 11 point metric
ap = 0.
for t in np.arange(0., 1.1, 0.1):
if np.sum(rec >= t) == 0:
p = 0
else:
p = np.max(prec[rec >= t])
ap = ap + p / 11.
else:
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def voc_eval(detpath,
annopath,
imagesetfile,
classname,
cachedir,
ovthresh=0.5,
use_07_metric=False):
"""rec, prec, ap = voc_eval(detpath,
annopath,
imagesetfile,
classname,
[ovthresh],
[use_07_metric])
Top level function that does the PASCAL VOC evaluation.
detpath: Path to detections
detpath.format(classname) should produce the detection results file.
annopath: Path to annotations
annopath.format(imagename) should be the xml annotations file.
imagesetfile: Text file containing the list of images, one image per line.
classname: Category name (duh)
cachedir: Directory for caching the annotations
[ovthresh]: Overlap threshold (default = 0.5)
[use_07_metric]: Whether to use VOC07's 11 point AP computation
(default False)
"""
# assumes detections are in detpath.format(classname)
# assumes annotations are in annopath.format(imagename)
# assumes imagesetfile is a text file with each line an image name
# cachedir caches the annotations in a pickle file
# first load gt
if not os.path.isdir(cachedir):
os.mkdir(cachedir)
cachefile = os.path.join(cachedir, 'annots.pkl')
# read list of images
with open(imagesetfile, 'r') as f:
lines = f.readlines()
imagenames = [x.strip() for x in lines]
if 'pg' in imagenames:
ipdb.set_trace()
if not os.path.isfile(cachefile):
# load annots
recs = {}
for i, imagename in enumerate(imagenames):
recs[imagename] = parse_rec(annopath.format(imagename))
if i % 100 == 0:
print 'Reading annotation for {:d}/{:d}'.format(
i + 1, len(imagenames))
# save
print 'Saving cached annotations to {:s}'.format(cachefile)
with open(cachefile, 'w') as f:
cPickle.dump(recs, f)
else:
# load
with open(cachefile, 'r') as f:
recs = cPickle.load(f)
# extract gt objects for this class
class_recs = {}
npos = 0
for imagename in imagenames:
R = [obj for obj in recs[imagename] if obj['name'] == classname]
bbox = np.array([x['bbox'] for x in R])
difficult = np.array([x['difficult'] for x in R]).astype(np.bool)
det = [False] * len(R)
npos = npos + sum(~difficult)
class_recs[imagename] = {'bbox': bbox,
'difficult': difficult,
'det': det}
# read dets
detfile = detpath.format(classname)
with open(detfile, 'r') as f:
lines = f.readlines()
if any(lines) == 1:
splitlines = [x.strip().split(' ') for x in lines]
image_ids = [x[0] for x in splitlines]
confidence = np.array([float(x[1]) for x in splitlines])
BB = np.array([[float(z) for z in x[2:]] for x in splitlines])
# sort by confidence
sorted_ind = np.argsort(-confidence)
sorted_scores = np.sort(-confidence)
BB = BB[sorted_ind, :]
image_ids = [image_ids[x] for x in sorted_ind]
# go down dets and mark TPs and FPs
nd = len(image_ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for d in range(nd):
R = class_recs[image_ids[d]]
bb = BB[d, :].astype(float)
ovmax = -np.inf
BBGT = R['bbox'].astype(float)
if BBGT.size > 0:
# compute overlaps
# intersection
ixmin = np.maximum(BBGT[:, 0], bb[0])
iymin = np.maximum(BBGT[:, 1], bb[1])
ixmax = np.minimum(BBGT[:, 2], bb[2])
iymax = np.minimum(BBGT[:, 3], bb[3])
iw = np.maximum(ixmax - ixmin + 1., 0.)
ih = np.maximum(iymax - iymin + 1., 0.)
inters = iw * ih
# union
uni = ((bb[2] - bb[0] + 1.) * (bb[3] - bb[1] + 1.) +
(BBGT[:, 2] - BBGT[:, 0] + 1.) *
(BBGT[:, 3] - BBGT[:, 1] + 1.) - inters)
overlaps = inters / uni
ovmax = np.max(overlaps)
jmax = np.argmax(overlaps)
if ovmax > ovthresh:
if not R['difficult'][jmax]:
if not R['det'][jmax]:
tp[d] = 1.
R['det'][jmax] = 1
else:
fp[d] = 1.
else:
fp[d] = 1.
# compute precision recall
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / float(npos)
# avoid divide by zero in case the first detection matches a difficult
# ground truth
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = voc_ap(rec, prec, use_07_metric)
else:
rec = -1
prec = -1
ap = -1
return rec, prec, ap
|
|
"""Test the validation module"""
from __future__ import division
import sys
import warnings
import tempfile
import os
from time import sleep
import numpy as np
from scipy.sparse import coo_matrix, csr_matrix
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_warns
from sklearn.utils.mocking import CheckingClassifier, MockDataFrame
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import cross_val_predict
from sklearn.model_selection import permutation_test_score
from sklearn.model_selection import KFold
from sklearn.model_selection import StratifiedKFold
from sklearn.model_selection import LeaveOneOut
from sklearn.model_selection import LeaveOneLabelOut
from sklearn.model_selection import LeavePLabelOut
from sklearn.model_selection import LabelKFold
from sklearn.model_selection import LabelShuffleSplit
from sklearn.model_selection import learning_curve
from sklearn.model_selection import validation_curve
from sklearn.model_selection._validation import _check_is_permutation
from sklearn.datasets import make_regression
from sklearn.datasets import load_boston
from sklearn.datasets import load_iris
from sklearn.metrics import explained_variance_score
from sklearn.metrics import make_scorer
from sklearn.metrics import precision_score
from sklearn.linear_model import Ridge, LogisticRegression
from sklearn.linear_model import PassiveAggressiveClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.cluster import KMeans
from sklearn.preprocessing import Imputer
from sklearn.pipeline import Pipeline
from sklearn.externals.six.moves import cStringIO as StringIO
from sklearn.base import BaseEstimator
from sklearn.multiclass import OneVsRestClassifier
from sklearn.utils import shuffle
from sklearn.datasets import make_classification
from sklearn.datasets import make_multilabel_classification
from sklearn.model_selection.tests.test_split import MockClassifier
try:
WindowsError
except NameError:
WindowsError = None
class MockImprovingEstimator(BaseEstimator):
"""Dummy classifier to test the learning curve"""
def __init__(self, n_max_train_sizes):
self.n_max_train_sizes = n_max_train_sizes
self.train_sizes = 0
self.X_subset = None
def fit(self, X_subset, y_subset=None):
self.X_subset = X_subset
self.train_sizes = X_subset.shape[0]
return self
def predict(self, X):
raise NotImplementedError
def score(self, X=None, Y=None):
# training score becomes worse (2 -> 1), test error better (0 -> 1)
if self._is_training_data(X):
return 2. - float(self.train_sizes) / self.n_max_train_sizes
else:
return float(self.train_sizes) / self.n_max_train_sizes
def _is_training_data(self, X):
return X is self.X_subset
class MockIncrementalImprovingEstimator(MockImprovingEstimator):
"""Dummy classifier that provides partial_fit"""
def __init__(self, n_max_train_sizes):
super(MockIncrementalImprovingEstimator,
self).__init__(n_max_train_sizes)
self.x = None
def _is_training_data(self, X):
return self.x in X
def partial_fit(self, X, y=None, **params):
self.train_sizes += X.shape[0]
self.x = X[0]
class MockEstimatorWithParameter(BaseEstimator):
"""Dummy classifier to test the validation curve"""
def __init__(self, param=0.5):
self.X_subset = None
self.param = param
def fit(self, X_subset, y_subset):
self.X_subset = X_subset
self.train_sizes = X_subset.shape[0]
return self
def predict(self, X):
raise NotImplementedError
def score(self, X=None, y=None):
return self.param if self._is_training_data(X) else 1 - self.param
def _is_training_data(self, X):
return X is self.X_subset
# XXX: use 2D array, since 1D X is being detected as a single sample in
# check_consistent_length
X = np.ones((10, 2))
X_sparse = coo_matrix(X)
y = np.array([0, 0, 1, 1, 2, 2, 3, 3, 4, 4])
# The number of samples per class needs to be > n_folds, for StratifiedKFold(3)
y2 = np.array([1, 1, 1, 2, 2, 2, 3, 3, 3, 3])
def test_cross_val_score():
clf = MockClassifier()
for a in range(-10, 10):
clf.a = a
# Smoke test
scores = cross_val_score(clf, X, y2)
assert_array_equal(scores, clf.score(X, y2))
# test with multioutput y
multioutput_y = np.column_stack([y2, y2[::-1]])
scores = cross_val_score(clf, X_sparse, multioutput_y)
assert_array_equal(scores, clf.score(X_sparse, multioutput_y))
scores = cross_val_score(clf, X_sparse, y2)
assert_array_equal(scores, clf.score(X_sparse, y2))
# test with multioutput y
scores = cross_val_score(clf, X_sparse, multioutput_y)
assert_array_equal(scores, clf.score(X_sparse, multioutput_y))
# test with X and y as list
list_check = lambda x: isinstance(x, list)
clf = CheckingClassifier(check_X=list_check)
scores = cross_val_score(clf, X.tolist(), y2.tolist())
clf = CheckingClassifier(check_y=list_check)
scores = cross_val_score(clf, X, y2.tolist())
assert_raises(ValueError, cross_val_score, clf, X, y2, scoring="sklearn")
# test with 3d X and
X_3d = X[:, :, np.newaxis]
clf = MockClassifier(allow_nd=True)
scores = cross_val_score(clf, X_3d, y2)
clf = MockClassifier(allow_nd=False)
assert_raises(ValueError, cross_val_score, clf, X_3d, y2)
def test_cross_val_score_predict_labels():
# Check if ValueError (when labels is None) propagates to cross_val_score
# and cross_val_predict
# And also check if labels is correctly passed to the cv object
X, y = make_classification(n_samples=20, n_classes=2, random_state=0)
clf = SVC(kernel="linear")
label_cvs = [LeaveOneLabelOut(), LeavePLabelOut(2), LabelKFold(),
LabelShuffleSplit()]
for cv in label_cvs:
assert_raise_message(ValueError,
"The labels parameter should not be None",
cross_val_score, estimator=clf, X=X, y=y, cv=cv)
assert_raise_message(ValueError,
"The labels parameter should not be None",
cross_val_predict, estimator=clf, X=X, y=y, cv=cv)
def test_cross_val_score_pandas():
# check cross_val_score doesn't destroy pandas dataframe
types = [(MockDataFrame, MockDataFrame)]
try:
from pandas import Series, DataFrame
types.append((Series, DataFrame))
except ImportError:
pass
for TargetType, InputFeatureType in types:
# X dataframe, y series
# 3 fold cross val is used so we need atleast 3 samples per class
X_df, y_ser = InputFeatureType(X), TargetType(y2)
check_df = lambda x: isinstance(x, InputFeatureType)
check_series = lambda x: isinstance(x, TargetType)
clf = CheckingClassifier(check_X=check_df, check_y=check_series)
cross_val_score(clf, X_df, y_ser)
def test_cross_val_score_mask():
# test that cross_val_score works with boolean masks
svm = SVC(kernel="linear")
iris = load_iris()
X, y = iris.data, iris.target
kfold = KFold(5)
scores_indices = cross_val_score(svm, X, y, cv=kfold)
kfold = KFold(5)
cv_masks = []
for train, test in kfold.split(X, y):
mask_train = np.zeros(len(y), dtype=np.bool)
mask_test = np.zeros(len(y), dtype=np.bool)
mask_train[train] = 1
mask_test[test] = 1
cv_masks.append((train, test))
scores_masks = cross_val_score(svm, X, y, cv=cv_masks)
assert_array_equal(scores_indices, scores_masks)
def test_cross_val_score_precomputed():
# test for svm with precomputed kernel
svm = SVC(kernel="precomputed")
iris = load_iris()
X, y = iris.data, iris.target
linear_kernel = np.dot(X, X.T)
score_precomputed = cross_val_score(svm, linear_kernel, y)
svm = SVC(kernel="linear")
score_linear = cross_val_score(svm, X, y)
assert_array_equal(score_precomputed, score_linear)
# Error raised for non-square X
svm = SVC(kernel="precomputed")
assert_raises(ValueError, cross_val_score, svm, X, y)
# test error is raised when the precomputed kernel is not array-like
# or sparse
assert_raises(ValueError, cross_val_score, svm,
linear_kernel.tolist(), y)
def test_cross_val_score_fit_params():
clf = MockClassifier()
n_samples = X.shape[0]
n_classes = len(np.unique(y))
W_sparse = coo_matrix((np.array([1]), (np.array([1]), np.array([0]))),
shape=(10, 1))
P_sparse = coo_matrix(np.eye(5))
DUMMY_INT = 42
DUMMY_STR = '42'
DUMMY_OBJ = object()
def assert_fit_params(clf):
# Function to test that the values are passed correctly to the
# classifier arguments for non-array type
assert_equal(clf.dummy_int, DUMMY_INT)
assert_equal(clf.dummy_str, DUMMY_STR)
assert_equal(clf.dummy_obj, DUMMY_OBJ)
fit_params = {'sample_weight': np.ones(n_samples),
'class_prior': np.ones(n_classes) / n_classes,
'sparse_sample_weight': W_sparse,
'sparse_param': P_sparse,
'dummy_int': DUMMY_INT,
'dummy_str': DUMMY_STR,
'dummy_obj': DUMMY_OBJ,
'callback': assert_fit_params}
cross_val_score(clf, X, y, fit_params=fit_params)
def test_cross_val_score_score_func():
clf = MockClassifier()
_score_func_args = []
def score_func(y_test, y_predict):
_score_func_args.append((y_test, y_predict))
return 1.0
with warnings.catch_warnings(record=True):
scoring = make_scorer(score_func)
score = cross_val_score(clf, X, y, scoring=scoring)
assert_array_equal(score, [1.0, 1.0, 1.0])
assert len(_score_func_args) == 3
def test_cross_val_score_errors():
class BrokenEstimator:
pass
assert_raises(TypeError, cross_val_score, BrokenEstimator(), X)
def test_cross_val_score_with_score_func_classification():
iris = load_iris()
clf = SVC(kernel='linear')
# Default score (should be the accuracy score)
scores = cross_val_score(clf, iris.data, iris.target, cv=5)
assert_array_almost_equal(scores, [0.97, 1., 0.97, 0.97, 1.], 2)
# Correct classification score (aka. zero / one score) - should be the
# same as the default estimator score
zo_scores = cross_val_score(clf, iris.data, iris.target,
scoring="accuracy", cv=5)
assert_array_almost_equal(zo_scores, [0.97, 1., 0.97, 0.97, 1.], 2)
# F1 score (class are balanced so f1_score should be equal to zero/one
# score
f1_scores = cross_val_score(clf, iris.data, iris.target,
scoring="f1_weighted", cv=5)
assert_array_almost_equal(f1_scores, [0.97, 1., 0.97, 0.97, 1.], 2)
def test_cross_val_score_with_score_func_regression():
X, y = make_regression(n_samples=30, n_features=20, n_informative=5,
random_state=0)
reg = Ridge()
# Default score of the Ridge regression estimator
scores = cross_val_score(reg, X, y, cv=5)
assert_array_almost_equal(scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
# R2 score (aka. determination coefficient) - should be the
# same as the default estimator score
r2_scores = cross_val_score(reg, X, y, scoring="r2", cv=5)
assert_array_almost_equal(r2_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
# Mean squared error; this is a loss function, so "scores" are negative
mse_scores = cross_val_score(reg, X, y, cv=5, scoring="mean_squared_error")
expected_mse = np.array([-763.07, -553.16, -274.38, -273.26, -1681.99])
assert_array_almost_equal(mse_scores, expected_mse, 2)
# Explained variance
scoring = make_scorer(explained_variance_score)
ev_scores = cross_val_score(reg, X, y, cv=5, scoring=scoring)
assert_array_almost_equal(ev_scores, [0.94, 0.97, 0.97, 0.99, 0.92], 2)
def test_permutation_score():
iris = load_iris()
X = iris.data
X_sparse = coo_matrix(X)
y = iris.target
svm = SVC(kernel='linear')
cv = StratifiedKFold(2)
score, scores, pvalue = permutation_test_score(
svm, X, y, n_permutations=30, cv=cv, scoring="accuracy")
assert_greater(score, 0.9)
assert_almost_equal(pvalue, 0.0, 1)
score_label, _, pvalue_label = permutation_test_score(
svm, X, y, n_permutations=30, cv=cv, scoring="accuracy",
labels=np.ones(y.size), random_state=0)
assert_true(score_label == score)
assert_true(pvalue_label == pvalue)
# check that we obtain the same results with a sparse representation
svm_sparse = SVC(kernel='linear')
cv_sparse = StratifiedKFold(2)
score_label, _, pvalue_label = permutation_test_score(
svm_sparse, X_sparse, y, n_permutations=30, cv=cv_sparse,
scoring="accuracy", labels=np.ones(y.size), random_state=0)
assert_true(score_label == score)
assert_true(pvalue_label == pvalue)
# test with custom scoring object
def custom_score(y_true, y_pred):
return (((y_true == y_pred).sum() - (y_true != y_pred).sum()) /
y_true.shape[0])
scorer = make_scorer(custom_score)
score, _, pvalue = permutation_test_score(
svm, X, y, n_permutations=100, scoring=scorer, cv=cv, random_state=0)
assert_almost_equal(score, .93, 2)
assert_almost_equal(pvalue, 0.01, 3)
# set random y
y = np.mod(np.arange(len(y)), 3)
score, scores, pvalue = permutation_test_score(
svm, X, y, n_permutations=30, cv=cv, scoring="accuracy")
assert_less(score, 0.5)
assert_greater(pvalue, 0.2)
def test_permutation_test_score_allow_nans():
# Check that permutation_test_score allows input data with NaNs
X = np.arange(200, dtype=np.float64).reshape(10, -1)
X[2, :] = np.nan
y = np.repeat([0, 1], X.shape[0] / 2)
p = Pipeline([
('imputer', Imputer(strategy='mean', missing_values='NaN')),
('classifier', MockClassifier()),
])
permutation_test_score(p, X, y, cv=5)
def test_cross_val_score_allow_nans():
# Check that cross_val_score allows input data with NaNs
X = np.arange(200, dtype=np.float64).reshape(10, -1)
X[2, :] = np.nan
y = np.repeat([0, 1], X.shape[0] / 2)
p = Pipeline([
('imputer', Imputer(strategy='mean', missing_values='NaN')),
('classifier', MockClassifier()),
])
cross_val_score(p, X, y, cv=5)
def test_cross_val_score_multilabel():
X = np.array([[-3, 4], [2, 4], [3, 3], [0, 2], [-3, 1],
[-2, 1], [0, 0], [-2, -1], [-1, -2], [1, -2]])
y = np.array([[1, 1], [0, 1], [0, 1], [0, 1], [1, 1],
[0, 1], [1, 0], [1, 1], [1, 0], [0, 0]])
clf = KNeighborsClassifier(n_neighbors=1)
scoring_micro = make_scorer(precision_score, average='micro')
scoring_macro = make_scorer(precision_score, average='macro')
scoring_samples = make_scorer(precision_score, average='samples')
score_micro = cross_val_score(clf, X, y, scoring=scoring_micro, cv=5)
score_macro = cross_val_score(clf, X, y, scoring=scoring_macro, cv=5)
score_samples = cross_val_score(clf, X, y, scoring=scoring_samples, cv=5)
assert_almost_equal(score_micro, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 3])
assert_almost_equal(score_macro, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 4])
assert_almost_equal(score_samples, [1, 1 / 2, 3 / 4, 1 / 2, 1 / 4])
def test_cross_val_predict():
boston = load_boston()
X, y = boston.data, boston.target
cv = KFold()
est = Ridge()
# Naive loop (should be same as cross_val_predict):
preds2 = np.zeros_like(y)
for train, test in cv.split(X, y):
est.fit(X[train], y[train])
preds2[test] = est.predict(X[test])
preds = cross_val_predict(est, X, y, cv=cv)
assert_array_almost_equal(preds, preds2)
preds = cross_val_predict(est, X, y)
assert_equal(len(preds), len(y))
cv = LeaveOneOut()
preds = cross_val_predict(est, X, y, cv=cv)
assert_equal(len(preds), len(y))
Xsp = X.copy()
Xsp *= (Xsp > np.median(Xsp))
Xsp = coo_matrix(Xsp)
preds = cross_val_predict(est, Xsp, y)
assert_array_almost_equal(len(preds), len(y))
preds = cross_val_predict(KMeans(), X)
assert_equal(len(preds), len(y))
class BadCV():
def split(self, X, y=None, labels=None):
for i in range(4):
yield np.array([0, 1, 2, 3]), np.array([4, 5, 6, 7, 8])
assert_raises(ValueError, cross_val_predict, est, X, y, cv=BadCV())
def test_cross_val_predict_input_types():
iris = load_iris()
X, y = iris.data, iris.target
X_sparse = coo_matrix(X)
multioutput_y = np.column_stack([y, y[::-1]])
clf = Ridge(fit_intercept=False, random_state=0)
# 3 fold cv is used --> atleast 3 samples per class
# Smoke test
predictions = cross_val_predict(clf, X, y)
assert_equal(predictions.shape, (150,))
# test with multioutput y
predictions = cross_val_predict(clf, X_sparse, multioutput_y)
assert_equal(predictions.shape, (150, 2))
predictions = cross_val_predict(clf, X_sparse, y)
assert_array_equal(predictions.shape, (150,))
# test with multioutput y
predictions = cross_val_predict(clf, X_sparse, multioutput_y)
assert_array_equal(predictions.shape, (150, 2))
# test with X and y as list
list_check = lambda x: isinstance(x, list)
clf = CheckingClassifier(check_X=list_check)
predictions = cross_val_predict(clf, X.tolist(), y.tolist())
clf = CheckingClassifier(check_y=list_check)
predictions = cross_val_predict(clf, X, y.tolist())
# test with 3d X and
X_3d = X[:, :, np.newaxis]
check_3d = lambda x: x.ndim == 3
clf = CheckingClassifier(check_X=check_3d)
predictions = cross_val_predict(clf, X_3d, y)
assert_array_equal(predictions.shape, (150,))
def test_cross_val_predict_pandas():
# check cross_val_score doesn't destroy pandas dataframe
types = [(MockDataFrame, MockDataFrame)]
try:
from pandas import Series, DataFrame
types.append((Series, DataFrame))
except ImportError:
pass
for TargetType, InputFeatureType in types:
# X dataframe, y series
X_df, y_ser = InputFeatureType(X), TargetType(y2)
check_df = lambda x: isinstance(x, InputFeatureType)
check_series = lambda x: isinstance(x, TargetType)
clf = CheckingClassifier(check_X=check_df, check_y=check_series)
cross_val_predict(clf, X_df, y_ser)
def test_cross_val_score_sparse_fit_params():
iris = load_iris()
X, y = iris.data, iris.target
clf = MockClassifier()
fit_params = {'sparse_sample_weight': coo_matrix(np.eye(X.shape[0]))}
a = cross_val_score(clf, X, y, fit_params=fit_params)
assert_array_equal(a, np.ones(3))
def test_learning_curve():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
with warnings.catch_warnings(record=True) as w:
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=3, train_sizes=np.linspace(0.1, 1.0, 10))
if len(w) > 0:
raise RuntimeError("Unexpected warning: %r" % w[0].message)
assert_equal(train_scores.shape, (10, 3))
assert_equal(test_scores.shape, (10, 3))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_unsupervised():
X, _ = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y=None, cv=3, train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_verbose():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
old_stdout = sys.stdout
sys.stdout = StringIO()
try:
train_sizes, train_scores, test_scores = \
learning_curve(estimator, X, y, cv=3, verbose=1)
finally:
out = sys.stdout.getvalue()
sys.stdout.close()
sys.stdout = old_stdout
assert("[learning_curve]" in out)
def test_learning_curve_incremental_learning_not_possible():
X, y = make_classification(n_samples=2, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
# The mockup does not have partial_fit()
estimator = MockImprovingEstimator(1)
assert_raises(ValueError, learning_curve, estimator, X, y,
exploit_incremental_learning=True)
def test_learning_curve_incremental_learning():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockIncrementalImprovingEstimator(20)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=3, exploit_incremental_learning=True,
train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_incremental_learning_unsupervised():
X, _ = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockIncrementalImprovingEstimator(20)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y=None, cv=3, exploit_incremental_learning=True,
train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_learning_curve_batch_and_incremental_learning_are_equal():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
train_sizes = np.linspace(0.2, 1.0, 5)
estimator = PassiveAggressiveClassifier(n_iter=1, shuffle=False)
train_sizes_inc, train_scores_inc, test_scores_inc = \
learning_curve(
estimator, X, y, train_sizes=train_sizes,
cv=3, exploit_incremental_learning=True)
train_sizes_batch, train_scores_batch, test_scores_batch = \
learning_curve(
estimator, X, y, cv=3, train_sizes=train_sizes,
exploit_incremental_learning=False)
assert_array_equal(train_sizes_inc, train_sizes_batch)
assert_array_almost_equal(train_scores_inc.mean(axis=1),
train_scores_batch.mean(axis=1))
assert_array_almost_equal(test_scores_inc.mean(axis=1),
test_scores_batch.mean(axis=1))
def test_learning_curve_n_sample_range_out_of_bounds():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0, 1])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0.0, 1.0])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0.1, 1.1])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[0, 20])
assert_raises(ValueError, learning_curve, estimator, X, y, cv=3,
train_sizes=[1, 21])
def test_learning_curve_remove_duplicate_sample_sizes():
X, y = make_classification(n_samples=3, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(2)
train_sizes, _, _ = assert_warns(
RuntimeWarning, learning_curve, estimator, X, y, cv=3,
train_sizes=np.linspace(0.33, 1.0, 3))
assert_array_equal(train_sizes, [1, 2])
def test_learning_curve_with_boolean_indices():
X, y = make_classification(n_samples=30, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
estimator = MockImprovingEstimator(20)
cv = KFold(n_folds=3)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=cv, train_sizes=np.linspace(0.1, 1.0, 10))
assert_array_equal(train_sizes, np.linspace(2, 20, 10))
assert_array_almost_equal(train_scores.mean(axis=1),
np.linspace(1.9, 1.0, 10))
assert_array_almost_equal(test_scores.mean(axis=1),
np.linspace(0.1, 1.0, 10))
def test_validation_curve():
X, y = make_classification(n_samples=2, n_features=1, n_informative=1,
n_redundant=0, n_classes=2,
n_clusters_per_class=1, random_state=0)
param_range = np.linspace(0, 1, 10)
with warnings.catch_warnings(record=True) as w:
train_scores, test_scores = validation_curve(
MockEstimatorWithParameter(), X, y, param_name="param",
param_range=param_range, cv=2
)
if len(w) > 0:
raise RuntimeError("Unexpected warning: %r" % w[0].message)
assert_array_almost_equal(train_scores.mean(axis=1), param_range)
assert_array_almost_equal(test_scores.mean(axis=1), 1 - param_range)
def test_check_is_permutation():
p = np.arange(100)
assert_true(_check_is_permutation(p, 100))
assert_false(_check_is_permutation(np.delete(p, 23), 100))
p[0] = 23
assert_false(_check_is_permutation(p, 100))
def test_cross_val_predict_sparse_prediction():
# check that cross_val_predict gives same result for sparse and dense input
X, y = make_multilabel_classification(n_classes=2, n_labels=1,
allow_unlabeled=False,
return_indicator=True,
random_state=1)
X_sparse = csr_matrix(X)
y_sparse = csr_matrix(y)
classif = OneVsRestClassifier(SVC(kernel='linear'))
preds = cross_val_predict(classif, X, y, cv=10)
preds_sparse = cross_val_predict(classif, X_sparse, y_sparse, cv=10)
preds_sparse = preds_sparse.toarray()
assert_array_almost_equal(preds_sparse, preds)
def test_cross_val_predict_with_method():
iris = load_iris()
X, y = iris.data, iris.target
X, y = shuffle(X, y, random_state=0)
classes = len(set(y))
kfold = KFold(len(iris.target))
methods = ['decision_function', 'predict_proba', 'predict_log_proba']
for method in methods:
est = LogisticRegression()
predictions = cross_val_predict(est, X, y, method=method)
assert_equal(len(predictions), len(y))
expected_predictions = np.zeros([len(y), classes])
func = getattr(est, method)
# Naive loop (should be same as cross_val_predict):
for train, test in kfold.split(X, y):
est.fit(X[train], y[train])
expected_predictions[test] = func(X[test])
predictions = cross_val_predict(est, X, y, method=method,
cv=kfold)
assert_array_almost_equal(expected_predictions, predictions)
def test_score_memmap():
# Ensure a scalar score of memmap type is accepted
iris = load_iris()
X, y = iris.data, iris.target
clf = MockClassifier()
tf = tempfile.NamedTemporaryFile(mode='wb', delete=False)
tf.write(b'Hello world!!!!!')
tf.close()
scores = np.memmap(tf.name, dtype=np.float64)
score = np.memmap(tf.name, shape=(), mode='r', dtype=np.float64)
try:
cross_val_score(clf, X, y, scoring=lambda est, X, y: score)
# non-scalar should still fail
assert_raises(ValueError, cross_val_score, clf, X, y,
scoring=lambda est, X, y: scores)
finally:
# Best effort to release the mmap file handles before deleting the
# backing file under Windows
scores, score = None, None
for _ in range(3):
try:
os.unlink(tf.name)
break
except WindowsError:
sleep(1.)
|
|
import math
import glob
import os
import sys
#os.system("apt-get -y update")
#os.system("apt-get -y dist-upgrade")
#os.system("apt-get -y upgrade")
#os.system("apt-get install -y python-tk")
#os.system("pip install youtube-dl")
#os.system("youtube-dl -U")
#os.system("apt-get install -y ffmpeg")
os.system("rm -rf videos")
os.system("rm -rf Saves")
os.mkdir("Saves")
os.mkdir("videos")
#if not os.path.isdir("aud"):
# os.mkdir("aud")
import json
import tensorflow as tf
import numpy as np
#from sklearn.metrics import precision_recall_fscore_support
#import matplotlib
import librosa
import os
import json
import subprocess
import voice
import time
import shelve
SAMPLE_RATE= 5000
d = shelve.open("data")
def variable_summaries(var):
with tf.name_scope('summaries'):
mean = tf.reduce_mean(var)
tf.summary.scalar('mean', mean)
with tf.name_scope('stddev'):
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar('stddev', stddev)
tf.summary.scalar('max', tf.reduce_max(var))
tf.summary.scalar('min', tf.reduce_min(var))
tf.summary.histogram('histogram', var)
def run(cmd, shell=True):
return subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=shell).communicate()[0]
#keys_train = []
#keys_test=[]
#vids = run("youtube-dl --get-id https://www.youtube.com/channel/UCYO_jab_esuFRV4b17AJtAw -i")
#vids = vids.split()
#vids_=vids[0:2]
#print vids_
#for i in vids_:
# keys_train = keys_train + voice.main(i)
#keys_test.append(voice.main(vids[6]))
#print keys_train
#print keys_test
#d["train"] = keys_train
#d["test"] = keys_test
keys_train = d["train"]
keys_test = d["test"]
for i in range(len(keys_train)):
keys_train[i][0]=keys_train[i][0]+".flac"
for i in range(len(keys_test)):
keys_test[i][1]=keys_test[i][1]+".flac"
d.close()
def run(cmd, shell=True):
return subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=shell).communicate()[0]
def extract_feature(file_name):
X, sample_rate = librosa.load(file_name)
stft = np.abs(librosa.stft(X))
mfccs = np.mean(librosa.feature.mfcc(y=X, sr=sample_rate, n_mfcc=40).T,axis=0)
#chroma = np.mean(librosa.feature.chroma_stft(S=stft, sr=sample_rate).T,axis=0)
mel = np.mean(librosa.feature.melspectrogram(X, sr=sample_rate).T,axis=0)
contrast = np.mean(librosa.feature.spectral_contrast(S=stft, sr=sample_rate).T,axis=0)
#tonnetz = np.mean(librosa.feature.tonnetz(y=librosa.effects.harmonic(X), sr=sample_rate).T,axis=0)
#return mfccs,chroma,mel,contrast,tonnetz
return mfccs,mel,contrast
def join_sound(file_in, file_out,size=300, mode=0):
# print mode
if mode == 0:
inp,_ = librosa.load(file_in, sr=SAMPLE_RATE)
dur=librosa.get_duration(inp,SAMPLE_RATE)
x,_ = librosa.load(file_out,sr=SAMPLE_RATE)
#print x
new_dur= librosa.get_duration(x,sr=SAMPLE_RATE)
out, _ = librosa.load(file_out, sr=int(SAMPLE_RATE*dur/new_dur))
print out.shape
print inp.shape
if(len(inp)>len(out)):
inp=inp[0:len(out)]
else:
out=out[0:len(inp)]
#print len(out)
#print len(inp)
else:
inp = file_in
out = file_out
for i in range(len(inp)):
inp[i]= (inp[i]+1)/2
for i in range(len(out)):
out[i]= (out[i]+1)/2
out = out[0:int(size*math.floor(len(out)/size))]
inp = inp[0:int(size*math.floor(len(inp)/size))]
inp=np.split(inp,len(inp)/size)
out=np.split(out,len(out)/size)
newInp = []
newOut = []
index = 0
for i in out:
print i
for j in range(len(i)):
print "j "+ str(j)
newOut.append(i.tolist())
newOut[-1].append(1/(j+1))
newInp.append(inp[index][j])
index+=1
# for i in range(len(inp)):
# inp[i]=np.append(inp[i],1/(i+1))
# inp[i]=np.append(inp[i],1/(len(inp)+1))
for i in range(len(out)):
out[i]=np.append(out[i],1/(i+1))
out[i]=np.append(out[i],1/(len(out)+1))
return np.array(newOut[:-1]),np.vstack(newInp[:-1])
#inp,out = join_sound("aud/" + keys_train[10][0],"aud/" + keys_train[10][1])
#print inp
#print out
#print inp.shape
#print out.shape
#raise
def parse_audio_files(files,dir,ind=None):
if ind is None:
ind = len(files)
inputs, outputs = [], []
#print files
#print len(files[0:ind])
for fn in files[0:ind]:
print "in"
if len(fn) == 2:
try:
inp, out = join_sound(dir + "/" + fn[0],dir + "/" + fn[1])
#print ("inp",inp)
#print ("out",out)
#time.sleep(2)
#nexts = np.append(nexts, nxt)
for i in out:
outputs.append(i.tolist())
for i in inp:
inputs.append(i.tolist())
except Exception as e:
print e
return np.array(inputs),np.array(outputs)
#print join_sound(np.array((1000,1000,1000,1000)), np.array((1,2,3,4,5,6,7,8,9,10)),mode=1)
train_x, train_y = parse_audio_files(keys_train,"aud",1)
print (train_x, "trainx")
print (train_x.shape, "trainx.shape")
print (train_y, "trainy")
print (train_y.shape, "trainy.shape")
"""
x_list = test_x.tolist()
print "len \n"
print len(x_list)
y_list = test_y.tolist()
#len(x_list)
json_ = open("test_data.json","w")
for i in xrange(len(x_list)):
json_.write(json.dumps({"sound":x_list[i], "key":y_list[i][1]}) + "\n")
#json_.write(json.dumps({"sound":x_list[i], "key":0}) + "\n")
#x_json = x_json[1:-1]
#json_.write(x_json)
json_.close()
"""
training_epochs = 5000
print train_x.shape
#n_dim = train_x.shape[1]
#n_classes = train_y.shape[1]
n_dim=1
n_classes=2
n_hidden_units_one = 2
n_hidden_units_two = 2
sd = 1 / np.sqrt(n_dim)
learning_rate = 100
def run_training():
X = tf.placeholder(tf.float32,[None,n_dim])
Y = tf.placeholder(tf.float32,[None,n_classes])
W_1 = tf.Variable(tf.random_normal([n_dim,n_hidden_units_one], mean = 0, stddev=sd))
b_1 = tf.Variable(tf.random_normal([n_hidden_units_one], mean = 0, stddev=sd))
h_1 = tf.nn.tanh(tf.matmul(X,W_1) + b_1)
W_2 = tf.Variable(tf.random_normal([n_hidden_units_one,n_hidden_units_two], mean = 0, stddev=sd))
b_2 = tf.Variable(tf.random_normal([n_hidden_units_two], mean = 0, stddev=sd))
h_2 = tf.nn.sigmoid(tf.matmul(h_1,W_2) + b_2)
W = tf.Variable(tf.random_normal([n_hidden_units_two,n_classes], mean = 0, stddev=sd))
b = tf.Variable(tf.random_normal([n_classes], mean = 0, stddev=sd))
y_ = tf.nn.softmax(tf.matmul(h_2,W) + b)
init = tf.global_variables_initializer()
#keys_placeholder = tf.placeholder(tf.int64, shape=(None,))
#keys = tf.identity(keys_placeholder)
#inputs = {'key': keys_placeholder.name, 'sound': X.name}
#tf.add_to_collection('inputs', json.dumps(inputs))
#prediction = tf.argmax(y_, 1)
#scores = tf.nn.softmax(y_)
#outputs = {'key': keys.name,
# 'prediction': prediction.name,
# 'scores': scores.name}
#tf.add_to_collection('outputs', json.dumps(outputs))
cost_function = -tf.reduce_sum(Y * tf.log(y_))
#cost_function = tf.reduce_sum(tf.pow(Y-y_,2))
# tf.summary.scalar('cost', cost_function)
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost_function)
cost_history = np.empty(shape=[1],dtype=float)
y_true, y_pred = None, None
#with tf.name_scope('accuracy'):
#with tf.name_scope('correct_prediction'):
correct_prediction = tf.equal(tf.argmax(Y, 1), tf.argmax(y_, 1))
#with tf.name_scope('accuracy'):
correct_prediction = tf.equal(tf.argmax(y_,1), tf.argmax(Y,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
# tf.summary.scalar('accuracy', accuracy)
merged = tf.summary.merge_all()
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(init)
test_writer = tf.summary.FileWriter('Saves/test')
for epoch in range(training_epochs):
feed_dict={X:[[1],[1/2],[1/3],[1/4],[1/5],[1/6],[1/7],[1/8],[1/9],[1/10],[1/11],[1/12],[1/13],[1/14]],Y:[[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[1,0],[0,1]]}
_,cost = sess.run([optimizer,cost_function],feed_dict=feed_dict)
cost_history = np.append(cost_history,cost)
print(epoch, cost)
# if ((epoch%10) == 0) or epoch+1==training_epochs:
# summary, acc = sess.run([merged, accuracy], feed_dict=feed_dict)
# test_writer.add_summary(summary, epoch)
if ((epoch%1000) == 0):
print "saving"
saver.save(sess, os.path.join("Saves", 'model.ckpt'), global_step=epoch)
if epoch+1==training_epochs:
saver.save(sess, os.path.join("Saves", 'export'))
run_training()
|
|
#!/usr/bin/env python2.7
from flask import Flask, render_template, send_from_directory, request
import socket,os,sys
from datetime import datetime,timedelta
from datetime import date as ddate
from ConfigParser import SafeConfigParser
import glob
par = SafeConfigParser()
#will be /etc/ssh-rank.ini or where ever you want it
par.read(os.getcwd()+"/config.ini")
from flask.ext.sqlalchemy import SQLAlchemy
import code
debug=par.get("sshrank","debugging")
webUI_port=par.get("web","webUI_port")
nmap_xml_path=par.get("web","nmap_xml")
sqlclassPath=par.get("sshrank","sqlclassPath")
screenshotpath=par.get("sshrank","screenshotpath")
sys.path.append(sqlclassPath)
from sqlclass import *
mysqluser=par.get("sql","sqluser")
mysqlserv=par.get("sql","sqlserv")
mysqlpass=par.get("sql","sqlpass")
user_cnt=int(par.get("web","user_cnt"))
total_ip=par.get("web","total_ip")
stats_ip=par.get("web","stats_ip")
socket.setdefaulttimeout(3)
def getlastattempt(ip):
sqlsess.query(ips.datetime).filter(ips.ip==ip).order_by(-ips.pk).limit(1).scalar()
date=datetime.strptime(str(date),'%Y-%m-%d %H:%M:%S')
return date.strftime('%Y-%m-%d %H:%M:%S')
def killtuple(lista):
listb=[]
for a in lista:
listb.append(a[0])
return listb
#im not even sure wtf im doing...neat i think?
def tree_finder(thing):
#lets just do it with start from ip
list_ips=[]
#make sure its a real ip...
uniq_ips=killtuple(sqlsess.query(ips.ip).distinct())
if thing in uniq_ips:
#get the users
users = killtuple(sqlsess.query(ips.user).order_by(ips.ip).filter(ips.ip==str(thing)).distinct())
for user in users:
list_ips.append([user,killtuple(sqlsess.query(ips.ip).filter(ips.user==str(user)).distinct())])
return list_ips
else:
return 'nope'
def getlen(user):
return len(killtuple(sqlsess.query(ips.ip).filter(ips.user==str(user)).distinct()))
def tree_user(user):
list_user=[]
uniq_user=killtuple(sqlsess.query(ips.user).distinct())
if user in uniq_user:
iplist = killtuple(sqlsess.query(ips.ip).order_by(ips.ip).filter(ips.user==str(user)).distinct())
for ip in iplist:
list_user.append([ip,killtuple(sqlsess.query(ips.user).order_by(ips.user).filter(ips.ip==str(ip)).distinct())])
return list_user
else:
return 'nope'
app=Flask(__name__)
#app.debug=True
if debug == 1:
app.debug=True
#date=sqlsess.query(ips.dtime).filter(ips.ip==a[0]).order_by(-ips.pk).limit(1).scalar()
#date=datetime.strptime(str(date),'%Y-%m-%d %H:%M:%S')
#date=date.strftime('%Y-%m-%d %H:%M:%S')
#
@app.route('/')
def main():
subhead="main"
return render_template('main.html',subhead=subhead)
@app.route('/ssh_rank/lists/<time>')
def list_test(time):
sqlsess.commit()
userlist=[]
datelist=[]
deltime=[]
if time == 'week':
lastweek=datetime.today()-timedelta(7)
uniq_ips=sqlsess.query(ips.ip,func.count(ips.ip).label('total')).group_by(ips.ip).order_by('total DESC').filter(ips.dtime >= lastweek).limit(int(total_ip)).all()
elif time == 'all':
uniq_ips=sqlsess.query(ips.ip,func.count(ips.ip).label('total')).group_by(ips.ip).order_by('total DESC').limit(int(total_ip)).all()
elif time == '30day':
lastweek=datetime.today()-timedelta(30)
uniq_ips=sqlsess.query(ips.ip,func.count(ips.ip).label('total')).group_by(ips.ip).order_by('total DESC').filter(ips.dtime >= lastweek).limit(int(total_ip)).all()
elif time == '24hr':
lastweek=datetime.today()-timedelta(1)
uniq_ips=sqlsess.query(ips.ip,func.count(ips.ip).label('total')).group_by(ips.ip).order_by('total DESC').filter(ips.dtime >= lastweek).limit(int(total_ip)).all()
else:
return render_template('404.html'),404
for ip in uniq_ips:
users = sqlsess.query(ips.user,func.count(ips.user).label('total')).filter(ips.ip==str(ip[0])).group_by(ips.user).order_by('total DESC').limit(user_cnt).all()
date=sqlsess.query(ips.dtime).filter(ips.ip==ip[0]).order_by(-ips.pk).limit(1).scalar()
date=datetime.strptime(str(date),'%Y-%m-%d %H:%M:%S')
date=date.strftime('%Y-%m-%d %H:%M:%S')
#ip.append(date)
datelist.append((ip[0],date))
deltime.append(date)
for user in users:
userlist.append((ip,user[0],user[1]))
alldns=sqlsess.query(rdns).all()
newest=max(deltime)
nmapips=killtuple(sqlsess.query(nmapSQL.ip).distinct().all())
return render_template('page_for_listings_main.html',uniq_ips=uniq_ips,userlist=userlist,alldns=alldns,datelist=datelist,newest=newest,subhead=time,nmapips=nmapips)
@app.route('/ssh_rank/users/<sort>')
def all_user(sort):
if sort == 'letter':
users=killtuple(sqlsess.query(ips.user).order_by(ips.user).distinct())
return render_template('all_users.html',users=users, subhead='userlist')
elif sort == 'attempts':
users=[(user,total) for user, total in sqlsess.query(ips.user,func.count(ips.user).label('total')).group_by(ips.user).order_by('total DESC').all() if total > 1]
return render_template('users_sort_by_total.html',subhead='userlist',users=users)
else:
return render_template('404.html'),404
@app.route('/ssh_rank/ip_info/<ip>')
def ip_info(ip):
sqlsess.commit()#needs to be here; or the nmapSQL won't update...
iplist=sqlsess.query(ips.ip).distinct().all()
nmapstuff = 'wut'
if any(b[0] == ip for b in iplist):
users = sqlsess.query(ips.user,func.count(ips.user).label('total')).filter(ips.ip==str(ip)).group_by(ips.user).order_by('total DESC').all()
dates=killtuple(sqlsess.query(ips.dtime).filter(ips.ip==str(ip)).order_by(ips.dtime).all())
rdns_res=sqlsess.query(rdns).filter(rdns.ip==str(ip)).all()
#nmapstuff=sqlsess.query(rdns).filter(rdns.ip==str(ip)).all()
nmapstuff= sqlsess.query(nmapSQL.dtime,nmapSQL.portnum,nmapSQL.state,nmapSQL.proto,nmapSQL.service,nmapSQL.verinfo).filter(nmapSQL.ip==str(ip)).all()
if nmapstuff == []:
hasnmap=False
screenshots=False
else:
hasnmap=True
screenshots=[os.path.basename(x) for x in glob.glob(screenshotpath+ip+'*png')]
if len(screenshots) == 0:
screenshots=False
return render_template('ip_info.html',subhead='ipinfo', ip=ip,users=users, dates=dates,hasnmap=hasnmap,nmapstuff=nmapstuff, rdns_res=rdns_res,screenshots=screenshots)
else:
return render_template('404.html'),404
@app.route('/ssh_rank/port/<num>')
def port_search(num):
try:
int(num)
except ValueError:
return render_template('404.html'),404
if 1 <= int(num) <= 65535:
sqlsess.commit()
otherports=sqlsess.query(nmapSQL.portnum).distinct().all()
ports=sqlsess.query(nmapSQL).filter(nmapSQL.portnum==int(num))
return render_template('port_search.html', subhead='port search', ports=ports, num=num, otherports=otherports)
else:
return render_template('404.html'),404
@app.route('/ssh_rank/port_list')
def port_list():
#ports=killtuple(sqlsess.query(nmapSQL.portnum).distinct().all())
ports=sqlsess.query(nmapSQL.portnum,func.count(nmapSQL.portnum).label('total')).group_by(nmapSQL.portnum).order_by('total DESC').all()
return render_template('portlist.html', subhead='port listing', ports=ports)
@app.route('/ssh_rank/service_list')
def serv_list():
servlist=sqlsess.query(nmapSQL.service,func.count(nmapSQL.service).label('total')).group_by(nmapSQL.service).order_by('total DESC').all()
return render_template('servicelist.html',subhead='service listing', servlist=servlist)
@app.route('/ssh_rank/nmapIPs')
def nmapIPs():
nmapips=killtuple(sqlsess.query(nmapSQL.ip).distinct().all())
nmapips.sort()
return render_template('nmapips.html',subhead='IPs nmap', nmapips=nmapips)
@app.route('/ssh_rank/service/<service>')
def servpage(service):
services=killtuple(sqlsess.query(nmapSQL.service).all())
if str(service) in services:
servlist=sqlsess.query(nmapSQL).filter(nmapSQL.service==str(service)).all()
return render_template('port_search.html', subhead='service search', ports=servlist, num=str(service), otherports=services)
else:
return render_template('404.html'),404
@app.route('/ssh_rank/user/<user>')
def userpage(user):
users = killtuple(sqlsess.query(ips.user).distinct())
if str(user) in users:
other_ips=killtuple(sqlsess.query(ips.ip).filter(ips.user==str(user)).distinct())
return render_template('user_info.html',subhead='users',user=user, other_ips=other_ips)
else:
return render_template('404.html'),404
@app.route('/testing/<user>')
def testuser(user):
if ',' in user:
user_list= user.split(",")
return render_template("test.user.html",user_list=user_list)
@app.route('/ssh_rank/tree/<ttype>/<thing>')
def tree(ttype,thing):
if ttype == 'ip':
tree = tree_finder(str(thing))
if tree is not 'nope':
return render_template('tree.html', subhead='tree',tree=tree, ip=str(thing))
else:
return render_template('404.html'),404
elif ttype == 'user':
tree = tree_user(str(thing))
if tree is not 'nope':
return render_template('tree_user.html', subhead='tree',tree=tree, user=str(thing))
else:
return render_template('404.html'),404
else:
return render_template('404.html'),404
@app.route('/ssh_rank/user2p')
def userp2p():
users=sqlsess.query(ips.user,func.count(ips.user).label('total')).group_by(ips.user).order_by('total DESC').all()
users2=[user for user, total in users if total > 2]
users3=[(userblarg,getlen(userblarg)) for userblarg in users2 if getlen(userblarg) >2]
users3.sort(key=lambda tup: tup[1],reverse=True)
return render_template('users_with_2p_ip.html',subhead='2pip',users=users3)
@app.route('/about')
def about():
return render_template('about.html',subhead='about')
@app.route('/ssh_rank/screenshots')
def screenshot():
screenshots=[os.path.basename(x) for x in glob.glob(screenshotpath+'*png')]
print screenshots
return render_template('screens.html',subhead='screens', screenshots=screenshots)
@app.errorhandler(404)
def page404(e):
return render_template('404.html'),404
@app.route('/temp.php')
@app.route('/robots.txt')
def static_from_root():
return send_from_directory(app.static_folder, request.path[1:])
@app.route('/ping')
def pong():
return 'pong'
if __name__=='__main__':
app.run(host='0.0.0.0',port=(int(webUI_port)), debug=False)
|
|
#!/usr/bin/env python3
from tempfile import mkstemp
from shutil import move
from os import remove, close
from datetime import datetime
import re
#from os import system
def replace(filePath, pattern, subst, count):
#Create temp file
fh, absPath = mkstemp()
newFile = open(absPath,'w')
oldFile = open(filePath)
for line in oldFile:
newFile.write(line.replace(pattern, subst, count))
#close temp file
newFile.close()
close(fh)
oldFile.close()
#Remove original file
remove(filePath)
#Move new file
move(absPath, filePath)
#import fileinput
#import sys
#def replaceAll(file,searchExp,replaceExp,count):
# for line in fileinput.input(file, inplace=1):
# if searchExp in line:
# line = line.replace(searchExp,replaceExp,count)
# sys.stdout.write(line)
def printUsage():
print ("Usage: myCppDir [path]")
def filterCscopeFiles(cscopeFiles, excludeList=None):
'''
filter cscopeFiles with excludeList
'''
raw = ""
if excludeList is not None:
for i in range(len(excludeList)):
raw = raw+'('+excludeList[i]+')|'
raw = raw[:-1] #discard last |
#print("excludeList pattern "+raw)
#ignoreTest = re.compile(r'([Tt]est)|(verif)')
ignoreTest = re.compile(raw)
if os.path.exists(cscopeFiles):
#Create temp file
fh, absPath = mkstemp()
newFile = open(absPath,'w')
pathFile = open(cscopeFiles)
#filename is consist of absolute path and file name
for filename in pathFile:
dirname = os.path.dirname( filename )
if ignoreTest.search(dirname):
continue
newFile.write(filename)
pathFile.close()
#close temp file
newFile.close()
close(fh)
#Remove original file
remove(cscopeFiles)
#Rename new file
move(absPath, cscopeFiles)
import os
def genCT(excludeList, path, args):
parentDir = os.getcwd()
os.chdir(path)
print(datetime.now().time())
print("generate cscope database and tags under " + os.getcwd())
srcPath = os.getcwd()
modulePattern = re.compile(args.module)
targetPath = modulePattern.sub(args.module+'/build/tagsroot/'+args.user+'/'+args.module, \
srcPath)
targetTags = os.path.join(targetPath, "tags")
targetCscopeFiles = os.path.join(targetPath, "cscope.files")
targetCscopeInOut = os.path.join(targetPath, "cscope.in.out")
targetCscopePoOut = os.path.join(targetPath, "cscope.po.out")
targetCscopeOut = os.path.join(targetPath, "cscope.out")
if args.update != True:
print("--->clean exist cscope database and tags under " + os.getcwd())
if args.outproject == True:
#clean legacy temp files
if os.path.exists(targetCscopeFiles):
os.remove(targetCscopeFiles)
if os.path.exists(targetCscopeInOut):
os.remove(targetCscopeInOut)
if os.path.exists(targetCscopePoOut):
os.remove(targetCscopePoOut)
if os.path.exists(targetCscopeOut):
os.remove(targetCscopeOut)
if os.path.exists(targetTags):
os.remove(targetTags)
else:
#clean legacy temp files
if os.path.exists("cscope.files"):
os.remove("cscope.files")
if os.path.exists("cscope.in.out"):
os.remove("cscope.in.out")
if os.path.exists("cscope.po.out"):
os.remove("cscope.po.out")
if os.path.exists("cscope.out"):
os.remove("cscope.out")
if os.path.exists("tags"):
os.remove("tags")
#find target files for later cscope and ctags usage
print(datetime.now().time())
print("--->find C++ files under "+
os.getcwd()+
"recursively for cscope")
#note follow symbol link
findcmd = "find . -type f -name '*.c' \
-o -type f -name '*.h' \
-o -type f -name '*.cpp' \
-o -type f -name '*.sig' \
-o -type f -name '*.cc' \
-o -type f -name '*.cxx' \
-o -type f -name '*.hpp' \
-o -type f -name '*.h++' \
-o -type f -name '*.hh'>cscope.files "
os.system(findcmd)
print(datetime.now().time())
print("--->exlucde files in cscope.files contain ", excludeList)
filterCscopeFiles("cscope.files", excludeList)
print(datetime.now().time())
print("--->update cscope.files relative path to absolute path")
absPath = os.path.abspath('.')
replace("cscope.files", ".", absPath, 1)
else:
print("--->update cscope and generate tags based on \
exist cscope.files under " + os.getcwd())
print(datetime.now().time())
if args.update == True:
print("--->update cscope database:: \
cscope.out cscope.in.out cscope.po.out")
os.system("cscope -Ubkq")
else:
print("--->generate cscope database:: \
cscope.out cscope.in.out cscope.po.out")
os.system("cscope -bkq")
print(datetime.now().time())
print("--->generate global tags \
--file-scope=yes database under "+os.getcwd())
os.system("ctags \
-L cscope.files \
-F \
--languages=C++ \
--extra=+fq \
--fields=+imnlSfkst \
--file-scope=yes \
--totals=yes ")
if args.outproject == True:
print("tags and cscope files to ", targetPath)
if os.path.exists("tags"):
move("tags", targetPath)
if os.path.exists("cscope.files"):
move("cscope.files", targetPath)
if os.path.exists("cscope.in.out"):
move("cscope.in.out", targetPath)
if os.path.exists("cscope.po.out"):
move("cscope.po.out", targetPath)
if os.path.exists("cscope.out"):
move("cscope.out", targetPath)
print(datetime.now().time())
os.chdir(parentDir)
def main():
"""
if current folder is the only path you want to generate tags and cscope.out, then no args are expected
if subfolders are provided as args, then current folder is not expected. otherwise will result duplicate tags and cscope.out data entries
"""
from argparse import ArgumentParser
from argparse import ArgumentDefaultsHelpFormatter
parser = ArgumentParser(\
description = 'generate tags and cscope.out under current folder \
recursively',\
formatter_class = ArgumentDefaultsHelpFormatter\
)
parser.add_argument('-version', '-v', \
action = 'version',\
version = '%(prog)s 1.0')
parser.add_argument('-t', '--type', \
nargs = 1, \
type = str, \
default = ['review'], \
dest = 'type', \
choices = ['review', 'test'], \
help = 'for review type, all verif, black_box and \
other test related files will be ignored')
parser.add_argument('-p', \
default = ['.'], \
nargs = '+', \
dest = 'paths', \
help= 'paths to run CTA')
parser.add_argument('-up', '--update', \
action = 'store_true',\
help= 'only update exist cscope.out and tags with ctags -a')
parser.add_argument('--outproject', \
action = 'store_true',\
help= 'generted tags and cscope.out will not stored in src paths. \
but in other folders beside top src, like cmake build ')
parser.add_argument('-rp', '--repo', \
type = str, \
required = False, \
default = '/workspace/git', \
dest = 'repoRoot', \
help= 'git repo root')
parser.add_argument('-u', '--user', \
type = str, \
default = 'xjiashe', \
dest = 'user', \
help = 'user or dedicate repo ')
parser.add_argument('-m', '--module', \
type = str, \
default = 'wbb', \
dest = 'module', \
help = 'software module in differnt repo ')
args = parser.parse_args()
excludeList = [
'lost+found'
'dbcc_cdci',
'dbch_cdci',
'dbp_test_if',
'e_dbch_cdci',
'e_sc_cdci',
'tx_sbci',
'hs_dbch_cdci',
'internal_if',
'tte'
]
if args.type[0] == 'review':
excludeList.append('blackbox_test')
excludeList.append('[Tt]est')
excludeList.append('verif')
for path in args.paths:
genCT(excludeList, path, args)
if __name__ == "__main__":
main()
|
|
#!/usr/bin/env python
from Tkinter import *
from tkFileDialog import askopenfilename
import os,time
_THEME_COLOR = "moccasin" # theme color for selection, highlights, etc.
# see full list in /usr/lib/X11/rgb.txt
_CHROME_COLOR= "grey95" # color for backgrounds of objects
_HIGHLIGHT_COLOR = "blanchedalmond" # color for highlight, before widget is pressed
_BD = 1 # border distance (width) for GUI elements. 1 looks better
class genericDialog(Toplevel):
"""A generic dialog that can be inherited from and customized as needed."""
def __init__(self, parent, title=None, b1="OK", b2="Cancel"):
"""The constructor function"""
Toplevel.__init__(self, parent,background=_CHROME_COLOR)
self.transient(parent)
if title:
self.title(title)
self.b1 = b1 # button 1 text, default is OK
self.b2 = b2 # button 2 text, default is Cancel
self.parent = parent
self.result = None
body = Frame(self,background=_CHROME_COLOR)
self.initial_focus = self.body(body)
body.pack(padx=5, pady=5,fill=BOTH,expand=YES)
self.buttonbox()
self.grab_set()
if not self.initial_focus:
self.initial_focus = self
self.protocol("WM_DELETE_WINDOW", self.cancel)
#self.geometry("+%d+%d" % (parent.winfo_rootx(),
# parent.winfo_rooty()))
self.initial_focus.focus_set()
self.wait_window(self)
# construction hooks
def body(self, master):
"""Put the contents of the dialog box in here by overriding"""
pass
def buttonbox(self):
"""Creates standard button box"""
box = Frame(self,background=_CHROME_COLOR)
w = Button(box, text=self.b1, width=10, command=self.ok, bd=_BD,
default=ACTIVE, highlightthickness=_BD,
activebackground=_HIGHLIGHT_COLOR,
highlightbackground=_THEME_COLOR,
background=_CHROME_COLOR)
w.pack(side=LEFT, padx=5, pady=5)
w = Button(box, text=self.b2, width=10, command=self.cancel,bd=_BD,
highlightthickness=_BD,activebackground=_HIGHLIGHT_COLOR,
background=_CHROME_COLOR)
w.pack(side=LEFT, padx=5, pady=5)
self.bind("<Return>", self.ok)
self.bind("<Escape>", self.cancel)
box.pack()
# standard button semantics
def ok(self, event=None):
"""Function called when button b1 (the default) is clicked. This is
typically 'OK'."""
if not self.validate():
self.initial_focus.focus_set() # put focus back
return
self.withdraw()
self.update_idletasks()
self.apply()
self.cancel()
def cancel(self, event=None):
"""Function called when button b2 is clicked ('cancel' by default)."""
# put focus back to the parent window
self.parent.focus_set()
self.destroy()
# command hooks
def validate(self):
"""Secondary function called when button b1 is clicked. This checks
for some sort of validation condition to be met. If it is met,
then self.apply will be called by the ok function."""
return 1 # override
def apply(self):
"""Secondary function called when button b1 is clicked.
The user should override this function when creating their
own subclass. This function will then assign variables or
whatever is needed when the 'ok' (b1) is clicked."""
pass # override
class inputFile(Frame):
def __init__(self, master, lab='in:', name='???', width=15):
Frame.__init__(self, master, background=_CHROME_COLOR)
self.a = [("Data files","?*.dat"),("Data files","?*.tbl"),("All files","*")]
self.dir = StringVar()
self.name = StringVar()
self.master = master
if name == '???':
name = '%s/???' %os.getcwd()
self.update(name)
self.makeframe(lab,width)
def makeframe(self,lab,width):
"""Create the GUI elements that go in the frame"""
Label(self,text=lab,background=_CHROME_COLOR).pack(side=LEFT)
Label(self,textvariable=self.name,width=width,bg="White").pack(side=LEFT,padx=2)
Button(self,text="Browse",command=self.getname,bd=_BD,
highlightthickness=0,activebackground=_HIGHLIGHT_COLOR).pack(side=LEFT)
def getname(self,initdir=None):
"""Calls tkFileDialog to allow the user to browse for a file,
and then assigns this name to self.name"""
if not initdir:
path = askopenfilename(title="Choose file to process:",filetypes=self.a,
initialdir=self.dir.get())
else:
path = askopenfilename(title="Choose file to process:",filetypes=self.a,
initialdir=initdir)
if path:
if self.validate():
self.update(path)
self.dostuff()
def update(self,name):
"""Update the entry and internal variables"""
junk1,junk2 = os.path.split(name)
self.path = name
self.dir.set(junk1)
self.name.set(junk2)
def validate(self):
"""Function called to check whether to call self.update and self.dostuff.
This checks to make sure some sort of validation condition is met
first before calling self.update and self.dostuff"""
return 1
def dostuff(self):
"""Hook function to do something after a file is input"""
pass
class _setit:
"""Internal class. It wraps the command in the widget OptionMenu."""
def __init__(self, parent,var, value, index,callback=None):
self.__value = value
self.__var = var
self.__index = index
self.__callback = callback
self.parent = parent
def __call__(self, *args):
self.__var.set(self.__value)
self.parent.index = self.__index
if self.__callback:
self.__callback(self.__value, *args)
class myOptionMenu(Menubutton):
"""Modified the builtin OptionMenu so it doesn't look like ass. Also keeps
track of the index of items on the menu and allows you to change the
menu on the fly."""
def __init__(self, master, variable, value, *values, **kwargs):
"""Construct an optionmenu widget with the parent MASTER, with
the resource textvariable set to VARIABLE, the initially selected
value VALUE, the other menu values VALUES and an additional
keyword argument command."""
kw = {"borderwidth": 1, "textvariable": variable,
"indicatoron": 1, "relief": RAISED, "anchor": "c",
"highlightthickness": 1, 'direction' : 'below'}
if kwargs.has_key('direction'):
kw['direction'] = kwargs['direction']
Widget.__init__(self, master, "menubutton", kw)
self.widgetName = 'tk_optionMenu'
menu = self.__menu = Menu(self, name="menu", tearoff=0, borderwidth=_BD,
activeborderwidth=1)
self.menuname = menu._w
# 'command' is the only supported keyword
self.callback = kwargs.get('command')
#if kwargs.has_key('command'):
# del kwargs['command']
#if kwargs:
# raise TclError, 'unknown option -'+kwargs.keys()[0]
self.index = -1
self["menu"] = menu
self.variable = variable
self.setoptions([value]+list(values))
def __getitem__(self, name):
if name == 'menu':
return self.__menu
return Widget.__getitem__(self, name)
def setoptions(self,options):
"""Set the options in the menu"""
menu = self["menu"]
menu.delete(0,END)
for i,v in enumerate(options):
menu.add_command(label=v,command=_setit(self,self.variable, v, i+1, self.callback))
def destroy(self):
"""Destroy this widget and the associated menu."""
Menubutton.destroy(self)
self.__menu = None
class myaskopenfilename(genericDialog):
"""An altered askopenfilename that lets user put in file formats and not
strings of text for extensions."""
def __init__(self,master,title=None,initialdir=None,filetypes=None):
"""Constructor function"""
if not initialdir:
self.cwd = os.getcwd() # full path of current folder
else:
self.cwd = initialdir
self.path = self.cwd.split('/')
self.path[0] = '/'
self.currentFolder = StringVar() # name of current folder
self.currentFolder.set(self.path[-1])
self.filetypes = filetypes
self.currentType = StringVar() # name of current filetype
self.currentType.set(self.filetypes[0])
genericDialog.__init__(self,master,b1='Open',title=title)
def body(self,master):
"""The body of the file browser"""
f1 = Frame(master,background=_CHROME_COLOR)
self.optionmenu = myOptionMenu(f1,self.currentFolder,command=self._changedir,
direction='above',*self.path)
self.optionmenu.pack(side=LEFT)
Button(f1,text="Up",bd=_BD,highlightthickness=0,
activebackground=_HIGHLIGHT_COLOR,command=self._goup).pack(side=LEFT)
f1.pack()
f2=Frame(master,background=_CHROME_COLOR)
self.scrollbar = Scrollbar(f2,orient=VERTICAL,troughcolor="gray80",bd=_BD,
activebackground=_THEME_COLOR,background=_CHROME_COLOR)
self.listbox = Listbox(f2,width=30,height=15,bg="White",
bd=_BD,yscrollcommand=self.scrollbar.set,selectbackground='White',
selectborderwidth=0,background=_CHROME_COLOR)
self.scrollbar.config(command=self.listbox.yview)
self.scrollbar.pack(side=RIGHT,fill=Y)
self.listbox.pack(fill=BOTH,expand=1)
f2.pack(fill=BOTH,expand=1,anchor=N)
f3 = Frame(master,background=_CHROME_COLOR)
Label(f3,text='format:').pack(side=LEFT)
myOptionMenu(f3,self.currentType,*self.filetypes).pack(side=LEFT)
f3.pack()
self._showdir()
def apply(self):
"""Figure out full path of selected file and return it along with
file format"""
idx = self.listbox.curselection()
filename = self.listbox.get(int(idx[0]))
path = '/'.join(self.path)
print path,filename
return path
def _showdir(self):
"""Fill the listbox with the files in the current directory"""
filelist = os.listdir(self.cwd)
self.listbox.delete(0,END)
for line in filelist:
if line[0] != '.':
self.listbox.insert(END,line)
def _changedir(self,event=None):
"""Change dir when user selects from optionmenu"""
val = self.optionmenu.index
self.cwd = '/' + '/'.join(self.path[1:val])
print self.cwd
self.path = self.path[:val]
self.currentFolder.set(self.path[-1])
self.optionmenu.setoptions(self.path)
self._showdir()
def _goup(self,event=None):
"""Go up 1 level"""
if len(self.path) > 1:
self._changedir()
|
|
##
# Copyright (c) 2014-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Control API resource.
This provides an HTTP API to allow an admin to trigger various "internal" actions on the server.
The intent of this is to primarily support automated testing tools that may need to alter
server behavior during tests via an HTTP-only API.
"""
__all__ = [
"ControlAPIResource",
]
from calendarserver.tools.util import recordForPrincipalID
from twext.enterprise.jobs.jobitem import JobItem
from twext.enterprise.jobs.workitem import WORK_PRIORITY_HIGH, WORK_WEIGHT_1
from twext.python.log import Logger
from twisted.internet import reactor
from twisted.internet.defer import succeed, inlineCallbacks, returnValue
from twistedcaldav.config import config
from twistedcaldav.extensions import DAVResource, \
DAVResourceWithoutChildrenMixin
from twistedcaldav.resource import ReadOnlyNoCopyResourceMixIn
from txdav.caldav.datastore.scheduling.work import ScheduleOrganizerWork, \
ScheduleOrganizerSendWork, ScheduleReplyWork, ScheduleRefreshWork, \
ScheduleAutoReplyWork
from txdav.common.datastore.work.load_work import TestWork
from txdav.who.groups import GroupCacherPollingWork, GroupRefreshWork, \
GroupAttendeeReconciliationWork, GroupDelegateChangesWork, \
GroupShareeReconciliationWork
from txdav.xml import element as davxml
from txweb2 import responsecode
from txweb2.dav.method.propfind import http_PROPFIND
from txweb2.dav.noneprops import NonePropertyStore
from txweb2.dav.util import allDataFromStream
from txweb2.http import HTTPError, JSONResponse, StatusResponse
from txweb2.http import Response
from txweb2.http_headers import MimeType
import json
import os
import signal
import time
log = Logger()
class ControlAPIResource (ReadOnlyNoCopyResourceMixIn, DAVResourceWithoutChildrenMixin, DAVResource):
"""
Resource used to execute admin commands.
Extends L{DAVResource} to provide service functionality.
"""
def __init__(self, root, directory, store, principalCollections=()):
"""
@param parent: the parent resource of this one.
"""
DAVResource.__init__(self, principalCollections=principalCollections)
self.parent = root
self._store = store
self._directory = directory
def deadProperties(self):
if not hasattr(self, "_dead_properties"):
self._dead_properties = NonePropertyStore(self)
return self._dead_properties
def etag(self):
return succeed(None)
def checkPreconditions(self, request):
return None
def defaultAccessControlList(self):
return succeed(davxml.ACL(*config.AdminACEs))
def contentType(self):
return MimeType.fromString("text/html; charset=utf-8")
def resourceType(self):
return None
def isCollection(self):
return False
def isCalendarCollection(self):
return False
def isPseudoCalendarCollection(self):
return False
def render(self, request):
output = """<html>
<head>
<title>Control API Resource</title>
</head>
<body>
<h1>Control API Resource.</h1>
</body
</html>"""
response = Response(200, {}, output)
response.headers.setHeader("content-type", MimeType("text", "html"))
return response
http_PROPFIND = http_PROPFIND
def http_GET(self, request):
"""
GET just returns HTML description.
"""
return self.render(request)
def _ok(self, status, description, result=None):
if result is None:
result = {}
result["status"] = status
result["description"] = description
return JSONResponse(
responsecode.OK,
result,
)
def _error(self, status, description):
raise HTTPError(JSONResponse(
responsecode.BAD_REQUEST,
{
"status": status,
"description": description,
},
))
def _recordsToJSON(self, records):
results = []
for record in sorted(records, key=lambda r: r.uid):
try:
shortNames = record.shortNames
except AttributeError:
shortNames = []
results.append(
{
"type": record.recordType.name,
"cn": record.displayName,
"uid": record.uid,
"sn": shortNames
}
)
return results
@inlineCallbacks
def http_POST(self, request):
"""
POST method with JSON body is used for control.
"""
#
# Check authentication and access controls
#
yield self.authorize(request, (davxml.Read(),))
contentType = request.headers.getHeader("content-type")
# Check content first
if "{}/{}".format(contentType.mediaType, contentType.mediaSubtype) != "application/json":
self.log.error("MIME type {mime} not allowed in request", mime=contentType)
raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, "MIME type {} not allowed in request".format(contentType)))
body = (yield allDataFromStream(request.stream))
try:
j = json.loads(body)
except (ValueError, TypeError) as e:
self.log.error("Invalid JSON data in request: {ex}\n{body}", ex=e, body=body)
raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, "Invalid JSON data in request: {}\n{}".format(e, body)))
try:
action = j["action"]
except KeyError:
self._error("error", "No 'action' member in root JSON object.")
method = "action_{}".format(action)
if not hasattr(self, method):
self._error("error", "The action '{}' is not supported.".format(action))
result = yield getattr(self, method)(j)
returnValue(result)
@inlineCallbacks
def action_listgroupmembers(self, j):
try:
grpID = j["group"]
except KeyError:
self._error("error", "No 'group' member in root JSON object.")
try:
record = yield recordForPrincipalID(self._directory, grpID)
except ValueError:
record = None
if record is None:
self._error("error", "No group with id '{}' in the directory.".format(grpID))
members = yield record.members()
returnValue(self._ok("ok", "Group membership", {
"group": grpID,
"members": self._recordsToJSON(members),
}))
@inlineCallbacks
def action_addgroupmembers(self, j):
try:
grpID = j["group"]
except KeyError:
self._error("error", "No 'group' member in root JSON object.")
try:
memberIDs = j["members"]
except KeyError:
self._error("error", "No 'members' member in root JSON object.")
try:
record = yield recordForPrincipalID(self._directory, grpID)
except ValueError:
record = None
if record is None:
self._error("error", "No group with id '{}' in the directory.".format(grpID))
existingMembers = yield record.members()
existingMemberUIDs = set([member.uid for member in existingMembers])
add = set()
invalid = set()
exists = set()
for memberID in memberIDs:
memberRecord = yield recordForPrincipalID(self._directory, memberID)
if memberRecord is None:
invalid.add(memberID)
elif memberRecord.uid in existingMemberUIDs:
exists.add(memberRecord)
else:
add.add(memberRecord)
if add:
yield record.addMembers(add)
yield record.service.updateRecords([record], create=False)
returnValue(self._ok("ok", "Added group members", {
"group": grpID,
"added": self._recordsToJSON(add),
"exists": self._recordsToJSON(exists),
"invalid": sorted(invalid),
}))
@inlineCallbacks
def action_removegroupmembers(self, j):
try:
grpID = j["group"]
except KeyError:
self._error("error", "No 'group' member in root JSON object.")
try:
memberIDs = j["members"]
except KeyError:
self._error("error", "No 'members' member in root JSON object.")
try:
record = yield recordForPrincipalID(self._directory, grpID)
except ValueError:
record = None
if record is None:
self._error("error", "No group with id '{}' in the directory.".format(grpID))
existingMembers = yield record.members()
existingMemberUIDs = set([member.uid for member in existingMembers])
remove = set()
invalid = set()
missing = set()
for memberID in memberIDs:
memberRecord = yield recordForPrincipalID(self._directory, memberID)
if memberRecord is None:
invalid.add(memberID)
elif memberRecord.uid not in existingMemberUIDs:
missing.add(memberRecord)
else:
remove.add(memberRecord)
if remove:
record.removeMembers(remove)
yield record.service.updateRecords([record], create=False)
returnValue(self._ok("ok", "Removed group members", {
"group": grpID,
"removed": self._recordsToJSON(remove),
"missing": self._recordsToJSON(missing),
"invalid": sorted(invalid),
}))
@inlineCallbacks
def action_refreshgroups(self, j):
txn = self._store.newTransaction(label="ControlAPIResource.action_refreshgroups")
os.kill(os.getppid(), signal.SIGUSR1)
work = yield GroupCacherPollingWork.reschedule(txn, 2, force=True)
jobID = work.jobID
yield txn.commit()
if "wait" in j and j["wait"]:
yield JobItem.waitJobDone(self._store.newTransaction, reactor, 60.0, jobID)
yield JobItem.waitWorkDone(self._store.newTransaction, reactor, 60.0, (
GroupRefreshWork, GroupAttendeeReconciliationWork, GroupDelegateChangesWork, GroupShareeReconciliationWork,
))
returnValue(self._ok("ok", "Group refresh scheduled"))
@inlineCallbacks
def action_schedulingdone(self, j):
"""
Wait for all schedule queue items to complete.
"""
yield JobItem.waitWorkDone(self._store.newTransaction, reactor, 120.0, (
ScheduleOrganizerWork, ScheduleOrganizerSendWork, ScheduleReplyWork, ScheduleRefreshWork, ScheduleAutoReplyWork,
))
returnValue(self._ok("ok", "Scheduling done"))
@inlineCallbacks
def action_testwork(self, j):
"""
Wait for all schedule queue items to complete.
"""
try:
when = j["when"]
except KeyError:
when = 0
try:
priority = j["priority"]
except KeyError:
priority = WORK_PRIORITY_HIGH
try:
weight = j["weight"]
except KeyError:
weight = WORK_WEIGHT_1
try:
delay = j["delay"]
except KeyError:
delay = 0
try:
jobs = j["jobs"]
except KeyError:
jobs = 1
start_time = time.time()
for _ in range(jobs):
effective_when = max(when - (time.time() - start_time), 0)
yield TestWork.schedule(
self._store,
effective_when,
priority,
weight,
delay,
)
returnValue(self._ok("ok", "Test work scheduled"))
@inlineCallbacks
def action_revisioncleanup(self, j):
"""
Wait for all schedule queue items to complete.
"""
from txdav.common.datastore.work.revision_cleanup import _triggerRevisionCleanup
from txdav.common.datastore.work.revision_cleanup import RevisionCleanupWork
txn = self._store.newTransaction(label="ControlAPIResource.action_revisioncleanup")
yield _triggerRevisionCleanup(txn, 60)
yield txn.commit()
yield JobItem.waitWorkDone(self._store.newTransaction, reactor, 120.0, (
RevisionCleanupWork,
))
returnValue(self._ok("ok", "RevisionCleanupWork done"))
|
|
from robot.http_connection import HttpConnection
import pytest
from asynctest.mock import MagicMock, Mock
from aiohttp.client import ClientSession
from unittest.mock import sentinel, call, DEFAULT
from aiosocks.connector import ProxyClientRequest
from aiohttp import ClientError
class MockResponse:
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
pass
async def text(self):
return ''
async def read(self):
return ''
def __getattr__(self, item):
return 'mock'
def failed_at_step(step, exception_cls):
current = 1
def failed(*args, **kwargs):
nonlocal current
current += 1
if current - 1 == step:
raise exception_cls()
return DEFAULT
return failed
class TestHttpConnection:
@pytest.fixture
def no_config(self):
return {}
@pytest.fixture
def timeout_config(self):
return {
'timeouts': {
'conn_timeout': 123,
'read_timeout': 456
}
}
@pytest.fixture(params=['http://localhost:8080', 'socks4://localhost:8080', 'socks5://localhost:8080'])
def proxy_only_config(self, request):
return {
'proxy': {
'url': request.param,
'proxy_only': True
}
}
@pytest.fixture(params=['http://localhost:8080', 'socks4://localhost:8080', 'socks5://localhost:8080'])
def proxy_config(self, request):
return {
'proxy': {
'url': request.param,
'proxy_only': False
}
}
@pytest.fixture(params=['http://localhost:8080', 'socks4://localhost:8080', 'socks5://localhost:8080'])
def proxy_with_auth(self, request):
return {
'proxy': {
'url': request.param,
'auth': {
'login': 'myname',
'password': 'pw'
} if not request.param.startswith("socks4://") else {
'login': 'myname'
},
'proxy_only': True
}
}
@pytest.fixture
def invalid_config(self):
return [
{
'proxy': {
'url': 'https://localhost:8000'
}
},
{
'proxy': {
'url': 'thunder://localhost:8000'
}
},
{
'proxy': {
'url': None
}
},
{
'proxy': {
'url': 'socks4://localhost:8000',
'auth': {
'password': 'pw'
}
}
},
{
'proxy': {
'proxy_only': True
}
}
]
@staticmethod
def _setup(mocker):
session = MagicMock(ClientSession)
session.get.return_value = MockResponse()
session_cls = mocker.patch('robot.http_connection.ClientSession', new=Mock(return_value=session))
mocker.patch('robot.http_connection.ProxyConnector', new=Mock(return_value=sentinel.connector))
return session_cls, session
@pytest.mark.asyncio
async def test_request_with_no_config_when_got_response_will_return_response(self, mocker, no_config):
session_cls, session = self._setup(mocker)
with HttpConnection(no_config) as conn:
resp = await conn.request("http://www.baidu.com")
assert resp.proxy_used is False
session_cls.assert_called_once_with(connector=sentinel.connector, request_class=ProxyClientRequest,
raise_for_status=True)
session.get.assert_called_once_with("http://www.baidu.com")
@pytest.mark.asyncio
async def test_request_with_no_config_when_got_response_fail_will_throw(self, mocker, no_config):
session_cls, session = self._setup(mocker)
session.get = Mock(side_effect=ClientError)
with HttpConnection(no_config) as conn:
with pytest.raises(ClientError):
await conn.request("http://www.baidu.com")
@pytest.mark.asyncio
async def test_request_with_timeout_config_will_configure_aiohttp_timeouts(self, mocker, timeout_config):
session_cls, session = self._setup(mocker)
with HttpConnection(timeout_config) as conn:
await conn.request("http://www.baidu.com")
session_cls.assert_called_once_with(connector=sentinel.connector, request_class=ProxyClientRequest,
raise_for_status=True, conn_timeout=123, read_timeout=456)
@pytest.mark.asyncio
async def test_request_with_proxy_only_config_will_only_try_with_proxy(self, mocker, proxy_only_config):
session_cls, session = self._setup(mocker)
with HttpConnection(proxy_only_config) as conn:
resp = await conn.request("http://www.baidu.com")
assert resp.proxy_used is True
session.get.assert_called_once_with("http://www.baidu.com", proxy=proxy_only_config['proxy']['url'],
proxy_auth=None)
session.get = Mock(side_effect=ClientError)
with HttpConnection(proxy_only_config) as conn:
with pytest.raises(ClientError):
await conn.request("http://www.baidu.com")
@pytest.mark.asyncio
async def test_request_with_proxy_config_will_try_both(self, mocker, proxy_config):
session_cls, session = self._setup(mocker)
# no errors happened
with HttpConnection(proxy_config) as conn:
resp = await conn.request("http://www.baidu.com")
assert resp.proxy_used is False
session.get.assert_called_once_with("http://www.baidu.com")
session.get.reset_mock()
with HttpConnection(proxy_config) as conn:
resp = await conn.request("http://www.baidu.com", True)
assert resp.proxy_used is True
session.get.assert_called_once_with("http://www.baidu.com", proxy=proxy_config['proxy']['url'],
proxy_auth=None)
session.get.reset_mock()
# errors always happen
session.get = Mock(side_effect=ClientError)
with HttpConnection(proxy_config) as conn:
with pytest.raises(ClientError):
await conn.request("http://www.baidu.com")
assert session.get.call_count == 2
session.get.assert_has_calls([call("http://www.baidu.com"),
call("http://www.baidu.com", proxy=proxy_config['proxy']['url'], proxy_auth=None)],
any_order=False)
session.get.reset_mock()
with HttpConnection(proxy_config) as conn:
with pytest.raises(ClientError):
await conn.request("http://www.baidu.com", True)
assert session.get.call_count == 2
session.get.assert_has_calls([call("http://www.baidu.com", proxy=proxy_config['proxy']['url'], proxy_auth=None),
call("http://www.baidu.com")], any_order=False)
session.get.reset_mock()
# errors happened first time
session.get = Mock(side_effect=failed_at_step(1, ClientError), return_value=MockResponse())
with HttpConnection(proxy_config) as conn:
await conn.request("http://www.baidu.com")
assert session.get.call_count == 2
session.get.assert_has_calls([call("http://www.baidu.com"),
call("http://www.baidu.com", proxy=proxy_config['proxy']['url'], proxy_auth=None)],
any_order=False)
session.get.reset_mock()
# error will happen at the 2nd time
session.get = Mock(side_effect=failed_at_step(2, ClientError), return_value=MockResponse())
with HttpConnection(proxy_config) as conn:
await conn.request("http://www.baidu.com")
assert session.get.call_count == 1
session.get.assert_called_once_with("http://www.baidu.com")
session.get.reset_mock()
@pytest.mark.asyncio
async def test_request_with_proxy_auth_will_set_auth(self, mocker, proxy_with_auth):
session_cls, session = self._setup(mocker)
with HttpConnection(proxy_with_auth) as conn:
await conn.request("http://www.baidu.com")
session.get.assert_called_once()
auth = session.get.call_args[1]['proxy_auth']
assert auth.login == 'myname' or auth.login.decode('utf-8') == 'myname'
if not proxy_with_auth['proxy']['url'].startswith('socks4://'):
assert auth.password == 'pw' or auth.password.decode('utf-8') == 'pw'
@pytest.mark.asyncio
async def test_request_with_invalid_config_will_throw(self, mocker, invalid_config):
self._setup(mocker)
for config in invalid_config:
with pytest.raises(Exception):
with HttpConnection(config):
pass
|
|
import numpy as np
import pytest
from pandas._libs.tslibs.period import IncompatibleFrequency
from pandas.core.dtypes.dtypes import PeriodDtype
from pandas import (
Index,
NaT,
Period,
PeriodIndex,
Series,
date_range,
offsets,
period_range,
)
import pandas._testing as tm
from pandas.core.arrays import PeriodArray
class TestPeriodIndex:
def test_construction_base_constructor(self):
# GH 13664
arr = [Period("2011-01", freq="M"), NaT, Period("2011-03", freq="M")]
tm.assert_index_equal(Index(arr), PeriodIndex(arr))
tm.assert_index_equal(Index(np.array(arr)), PeriodIndex(np.array(arr)))
arr = [np.nan, NaT, Period("2011-03", freq="M")]
tm.assert_index_equal(Index(arr), PeriodIndex(arr))
tm.assert_index_equal(Index(np.array(arr)), PeriodIndex(np.array(arr)))
arr = [Period("2011-01", freq="M"), NaT, Period("2011-03", freq="D")]
tm.assert_index_equal(Index(arr), Index(arr, dtype=object))
tm.assert_index_equal(Index(np.array(arr)), Index(np.array(arr), dtype=object))
def test_base_constructor_with_period_dtype(self):
dtype = PeriodDtype("D")
values = ["2011-01-01", "2012-03-04", "2014-05-01"]
result = Index(values, dtype=dtype)
expected = PeriodIndex(values, dtype=dtype)
tm.assert_index_equal(result, expected)
@pytest.mark.parametrize(
"values_constructor", [list, np.array, PeriodIndex, PeriodArray._from_sequence]
)
def test_index_object_dtype(self, values_constructor):
# Index(periods, dtype=object) is an Index (not an PeriodIndex)
periods = [
Period("2011-01", freq="M"),
NaT,
Period("2011-03", freq="M"),
]
values = values_constructor(periods)
result = Index(values, dtype=object)
assert type(result) is Index
tm.assert_numpy_array_equal(result.values, np.array(values))
def test_constructor_use_start_freq(self):
# GH #1118
p = Period("4/2/2012", freq="B")
expected = period_range(start="4/2/2012", periods=10, freq="B")
index = period_range(start=p, periods=10)
tm.assert_index_equal(index, expected)
def test_constructor_field_arrays(self):
# GH #1264
years = np.arange(1990, 2010).repeat(4)[2:-2]
quarters = np.tile(np.arange(1, 5), 20)[2:-2]
index = PeriodIndex(year=years, quarter=quarters, freq="Q-DEC")
expected = period_range("1990Q3", "2009Q2", freq="Q-DEC")
tm.assert_index_equal(index, expected)
index2 = PeriodIndex(year=years, quarter=quarters, freq="2Q-DEC")
tm.assert_numpy_array_equal(index.asi8, index2.asi8)
index = PeriodIndex(year=years, quarter=quarters)
tm.assert_index_equal(index, expected)
years = [2007, 2007, 2007]
months = [1, 2]
msg = "Mismatched Period array lengths"
with pytest.raises(ValueError, match=msg):
PeriodIndex(year=years, month=months, freq="M")
with pytest.raises(ValueError, match=msg):
PeriodIndex(year=years, month=months, freq="2M")
years = [2007, 2007, 2007]
months = [1, 2, 3]
idx = PeriodIndex(year=years, month=months, freq="M")
exp = period_range("2007-01", periods=3, freq="M")
tm.assert_index_equal(idx, exp)
def test_constructor_U(self):
# U was used as undefined period
with pytest.raises(ValueError, match="Invalid frequency: X"):
period_range("2007-1-1", periods=500, freq="X")
def test_constructor_nano(self):
idx = period_range(
start=Period(ordinal=1, freq="N"), end=Period(ordinal=4, freq="N"), freq="N"
)
exp = PeriodIndex(
[
Period(ordinal=1, freq="N"),
Period(ordinal=2, freq="N"),
Period(ordinal=3, freq="N"),
Period(ordinal=4, freq="N"),
],
freq="N",
)
tm.assert_index_equal(idx, exp)
def test_constructor_arrays_negative_year(self):
years = np.arange(1960, 2000, dtype=np.int64).repeat(4)
quarters = np.tile(np.array([1, 2, 3, 4], dtype=np.int64), 40)
pindex = PeriodIndex(year=years, quarter=quarters)
tm.assert_index_equal(pindex.year, Index(years))
tm.assert_index_equal(pindex.quarter, Index(quarters))
def test_constructor_invalid_quarters(self):
msg = "Quarter must be 1 <= q <= 4"
with pytest.raises(ValueError, match=msg):
PeriodIndex(year=range(2000, 2004), quarter=list(range(4)), freq="Q-DEC")
def test_constructor_corner(self):
result = period_range("2007-01", periods=10.5, freq="M")
exp = period_range("2007-01", periods=10, freq="M")
tm.assert_index_equal(result, exp)
def test_constructor_fromarraylike(self):
idx = period_range("2007-01", periods=20, freq="M")
# values is an array of Period, thus can retrieve freq
tm.assert_index_equal(PeriodIndex(idx.values), idx)
tm.assert_index_equal(PeriodIndex(list(idx.values)), idx)
msg = "freq not specified and cannot be inferred"
with pytest.raises(ValueError, match=msg):
PeriodIndex(idx.asi8)
with pytest.raises(ValueError, match=msg):
PeriodIndex(list(idx.asi8))
msg = "'Period' object is not iterable"
with pytest.raises(TypeError, match=msg):
PeriodIndex(data=Period("2007", freq="A"))
result = PeriodIndex(iter(idx))
tm.assert_index_equal(result, idx)
result = PeriodIndex(idx)
tm.assert_index_equal(result, idx)
result = PeriodIndex(idx, freq="M")
tm.assert_index_equal(result, idx)
result = PeriodIndex(idx, freq=offsets.MonthEnd())
tm.assert_index_equal(result, idx)
assert result.freq == "M"
result = PeriodIndex(idx, freq="2M")
tm.assert_index_equal(result, idx.asfreq("2M"))
assert result.freq == "2M"
result = PeriodIndex(idx, freq=offsets.MonthEnd(2))
tm.assert_index_equal(result, idx.asfreq("2M"))
assert result.freq == "2M"
result = PeriodIndex(idx, freq="D")
exp = idx.asfreq("D", "e")
tm.assert_index_equal(result, exp)
def test_constructor_datetime64arr(self):
vals = np.arange(100000, 100000 + 10000, 100, dtype=np.int64)
vals = vals.view(np.dtype("M8[us]"))
msg = r"Wrong dtype: datetime64\[us\]"
with pytest.raises(ValueError, match=msg):
PeriodIndex(vals, freq="D")
@pytest.mark.parametrize("box", [None, "series", "index"])
def test_constructor_datetime64arr_ok(self, box):
# https://github.com/pandas-dev/pandas/issues/23438
data = date_range("2017", periods=4, freq="M")
if box is None:
data = data._values
elif box == "series":
data = Series(data)
result = PeriodIndex(data, freq="D")
expected = PeriodIndex(
["2017-01-31", "2017-02-28", "2017-03-31", "2017-04-30"], freq="D"
)
tm.assert_index_equal(result, expected)
def test_constructor_dtype(self):
# passing a dtype with a tz should localize
idx = PeriodIndex(["2013-01", "2013-03"], dtype="period[M]")
exp = PeriodIndex(["2013-01", "2013-03"], freq="M")
tm.assert_index_equal(idx, exp)
assert idx.dtype == "period[M]"
idx = PeriodIndex(["2013-01-05", "2013-03-05"], dtype="period[3D]")
exp = PeriodIndex(["2013-01-05", "2013-03-05"], freq="3D")
tm.assert_index_equal(idx, exp)
assert idx.dtype == "period[3D]"
# if we already have a freq and its not the same, then asfreq
# (not changed)
idx = PeriodIndex(["2013-01-01", "2013-01-02"], freq="D")
res = PeriodIndex(idx, dtype="period[M]")
exp = PeriodIndex(["2013-01", "2013-01"], freq="M")
tm.assert_index_equal(res, exp)
assert res.dtype == "period[M]"
res = PeriodIndex(idx, freq="M")
tm.assert_index_equal(res, exp)
assert res.dtype == "period[M]"
msg = "specified freq and dtype are different"
with pytest.raises(IncompatibleFrequency, match=msg):
PeriodIndex(["2011-01"], freq="M", dtype="period[D]")
def test_constructor_empty(self):
idx = PeriodIndex([], freq="M")
assert isinstance(idx, PeriodIndex)
assert len(idx) == 0
assert idx.freq == "M"
with pytest.raises(ValueError, match="freq not specified"):
PeriodIndex([])
def test_constructor_pi_nat(self):
idx = PeriodIndex(
[Period("2011-01", freq="M"), NaT, Period("2011-01", freq="M")]
)
exp = PeriodIndex(["2011-01", "NaT", "2011-01"], freq="M")
tm.assert_index_equal(idx, exp)
idx = PeriodIndex(
np.array([Period("2011-01", freq="M"), NaT, Period("2011-01", freq="M")])
)
tm.assert_index_equal(idx, exp)
idx = PeriodIndex(
[NaT, NaT, Period("2011-01", freq="M"), Period("2011-01", freq="M")]
)
exp = PeriodIndex(["NaT", "NaT", "2011-01", "2011-01"], freq="M")
tm.assert_index_equal(idx, exp)
idx = PeriodIndex(
np.array(
[NaT, NaT, Period("2011-01", freq="M"), Period("2011-01", freq="M")]
)
)
tm.assert_index_equal(idx, exp)
idx = PeriodIndex([NaT, NaT, "2011-01", "2011-01"], freq="M")
tm.assert_index_equal(idx, exp)
with pytest.raises(ValueError, match="freq not specified"):
PeriodIndex([NaT, NaT])
with pytest.raises(ValueError, match="freq not specified"):
PeriodIndex(np.array([NaT, NaT]))
with pytest.raises(ValueError, match="freq not specified"):
PeriodIndex(["NaT", "NaT"])
with pytest.raises(ValueError, match="freq not specified"):
PeriodIndex(np.array(["NaT", "NaT"]))
def test_constructor_incompat_freq(self):
msg = "Input has different freq=D from PeriodIndex\\(freq=M\\)"
with pytest.raises(IncompatibleFrequency, match=msg):
PeriodIndex([Period("2011-01", freq="M"), NaT, Period("2011-01", freq="D")])
with pytest.raises(IncompatibleFrequency, match=msg):
PeriodIndex(
np.array(
[Period("2011-01", freq="M"), NaT, Period("2011-01", freq="D")]
)
)
# first element is NaT
with pytest.raises(IncompatibleFrequency, match=msg):
PeriodIndex([NaT, Period("2011-01", freq="M"), Period("2011-01", freq="D")])
with pytest.raises(IncompatibleFrequency, match=msg):
PeriodIndex(
np.array(
[NaT, Period("2011-01", freq="M"), Period("2011-01", freq="D")]
)
)
def test_constructor_mixed(self):
idx = PeriodIndex(["2011-01", NaT, Period("2011-01", freq="M")])
exp = PeriodIndex(["2011-01", "NaT", "2011-01"], freq="M")
tm.assert_index_equal(idx, exp)
idx = PeriodIndex(["NaT", NaT, Period("2011-01", freq="M")])
exp = PeriodIndex(["NaT", "NaT", "2011-01"], freq="M")
tm.assert_index_equal(idx, exp)
idx = PeriodIndex([Period("2011-01-01", freq="D"), NaT, "2012-01-01"])
exp = PeriodIndex(["2011-01-01", "NaT", "2012-01-01"], freq="D")
tm.assert_index_equal(idx, exp)
def test_constructor_simple_new(self):
idx = period_range("2007-01", name="p", periods=2, freq="M")
with pytest.raises(AssertionError, match="<class .*PeriodIndex'>"):
idx._simple_new(idx, name="p")
result = idx._simple_new(idx._data, name="p")
tm.assert_index_equal(result, idx)
msg = "Should be numpy array of type i8"
with pytest.raises(AssertionError, match=msg):
# Need ndarray, not Int64Index
type(idx._data)._simple_new(Index(idx.asi8), freq=idx.freq)
arr = type(idx._data)._simple_new(idx.asi8, freq=idx.freq)
result = idx._simple_new(arr, name="p")
tm.assert_index_equal(result, idx)
def test_constructor_simple_new_empty(self):
# GH13079
idx = PeriodIndex([], freq="M", name="p")
with pytest.raises(AssertionError, match="<class .*PeriodIndex'>"):
idx._simple_new(idx, name="p")
result = idx._simple_new(idx._data, name="p")
tm.assert_index_equal(result, idx)
@pytest.mark.parametrize("floats", [[1.1, 2.1], np.array([1.1, 2.1])])
def test_constructor_floats(self, floats):
with pytest.raises(AssertionError, match="<class "):
PeriodIndex._simple_new(floats)
msg = "PeriodIndex does not allow floating point in construction"
with pytest.raises(TypeError, match=msg):
PeriodIndex(floats)
def test_constructor_nat(self):
msg = "start and end must not be NaT"
with pytest.raises(ValueError, match=msg):
period_range(start="NaT", end="2011-01-01", freq="M")
with pytest.raises(ValueError, match=msg):
period_range(start="2011-01-01", end="NaT", freq="M")
def test_constructor_year_and_quarter(self):
year = Series([2001, 2002, 2003])
quarter = year - 2000
idx = PeriodIndex(year=year, quarter=quarter)
strs = [f"{t[0]:d}Q{t[1]:d}" for t in zip(quarter, year)]
lops = list(map(Period, strs))
p = PeriodIndex(lops)
tm.assert_index_equal(p, idx)
def test_constructor_freq_mult(self):
# GH #7811
pidx = period_range(start="2014-01", freq="2M", periods=4)
expected = PeriodIndex(["2014-01", "2014-03", "2014-05", "2014-07"], freq="2M")
tm.assert_index_equal(pidx, expected)
pidx = period_range(start="2014-01-02", end="2014-01-15", freq="3D")
expected = PeriodIndex(
["2014-01-02", "2014-01-05", "2014-01-08", "2014-01-11", "2014-01-14"],
freq="3D",
)
tm.assert_index_equal(pidx, expected)
pidx = period_range(end="2014-01-01 17:00", freq="4H", periods=3)
expected = PeriodIndex(
["2014-01-01 09:00", "2014-01-01 13:00", "2014-01-01 17:00"], freq="4H"
)
tm.assert_index_equal(pidx, expected)
msg = "Frequency must be positive, because it represents span: -1M"
with pytest.raises(ValueError, match=msg):
PeriodIndex(["2011-01"], freq="-1M")
msg = "Frequency must be positive, because it represents span: 0M"
with pytest.raises(ValueError, match=msg):
PeriodIndex(["2011-01"], freq="0M")
msg = "Frequency must be positive, because it represents span: 0M"
with pytest.raises(ValueError, match=msg):
period_range("2011-01", periods=3, freq="0M")
@pytest.mark.parametrize("freq", ["A", "M", "D", "T", "S"])
@pytest.mark.parametrize("mult", [1, 2, 3, 4, 5])
def test_constructor_freq_mult_dti_compat(self, mult, freq):
freqstr = str(mult) + freq
pidx = period_range(start="2014-04-01", freq=freqstr, periods=10)
expected = date_range(start="2014-04-01", freq=freqstr, periods=10).to_period(
freqstr
)
tm.assert_index_equal(pidx, expected)
def test_constructor_freq_combined(self):
for freq in ["1D1H", "1H1D"]:
pidx = PeriodIndex(["2016-01-01", "2016-01-02"], freq=freq)
expected = PeriodIndex(["2016-01-01 00:00", "2016-01-02 00:00"], freq="25H")
for freq in ["1D1H", "1H1D"]:
pidx = period_range(start="2016-01-01", periods=2, freq=freq)
expected = PeriodIndex(["2016-01-01 00:00", "2016-01-02 01:00"], freq="25H")
tm.assert_index_equal(pidx, expected)
def test_constructor(self):
pi = period_range(freq="A", start="1/1/2001", end="12/1/2009")
assert len(pi) == 9
pi = period_range(freq="Q", start="1/1/2001", end="12/1/2009")
assert len(pi) == 4 * 9
pi = period_range(freq="M", start="1/1/2001", end="12/1/2009")
assert len(pi) == 12 * 9
pi = period_range(freq="D", start="1/1/2001", end="12/31/2009")
assert len(pi) == 365 * 9 + 2
pi = period_range(freq="B", start="1/1/2001", end="12/31/2009")
assert len(pi) == 261 * 9
pi = period_range(freq="H", start="1/1/2001", end="12/31/2001 23:00")
assert len(pi) == 365 * 24
pi = period_range(freq="Min", start="1/1/2001", end="1/1/2001 23:59")
assert len(pi) == 24 * 60
pi = period_range(freq="S", start="1/1/2001", end="1/1/2001 23:59:59")
assert len(pi) == 24 * 60 * 60
start = Period("02-Apr-2005", "B")
i1 = period_range(start=start, periods=20)
assert len(i1) == 20
assert i1.freq == start.freq
assert i1[0] == start
end_intv = Period("2006-12-31", "W")
i1 = period_range(end=end_intv, periods=10)
assert len(i1) == 10
assert i1.freq == end_intv.freq
assert i1[-1] == end_intv
end_intv = Period("2006-12-31", "1w")
i2 = period_range(end=end_intv, periods=10)
assert len(i1) == len(i2)
assert (i1 == i2).all()
assert i1.freq == i2.freq
end_intv = Period("2005-05-01", "B")
i1 = period_range(start=start, end=end_intv)
# infer freq from first element
i2 = PeriodIndex([end_intv, Period("2005-05-05", "B")])
assert len(i2) == 2
assert i2[0] == end_intv
i2 = PeriodIndex(np.array([end_intv, Period("2005-05-05", "B")]))
assert len(i2) == 2
assert i2[0] == end_intv
# Mixed freq should fail
vals = [end_intv, Period("2006-12-31", "w")]
msg = r"Input has different freq=W-SUN from PeriodIndex\(freq=B\)"
with pytest.raises(IncompatibleFrequency, match=msg):
PeriodIndex(vals)
vals = np.array(vals)
with pytest.raises(IncompatibleFrequency, match=msg):
PeriodIndex(vals)
# tuple freq disallowed GH#34703
with pytest.raises(TypeError, match="pass as a string instead"):
Period("2006-12-31", ("w", 1))
@pytest.mark.parametrize(
"freq", ["M", "Q", "A", "D", "B", "T", "S", "L", "U", "N", "H"]
)
def test_recreate_from_data(self, freq):
org = period_range(start="2001/04/01", freq=freq, periods=1)
idx = PeriodIndex(org.values, freq=freq)
tm.assert_index_equal(idx, org)
def test_map_with_string_constructor(self):
raw = [2005, 2007, 2009]
index = PeriodIndex(raw, freq="A")
expected = Index([str(num) for num in raw])
res = index.map(str)
# should return an Index
assert isinstance(res, Index)
# preserve element types
assert all(isinstance(resi, str) for resi in res)
# lastly, values should compare equal
tm.assert_index_equal(res, expected)
class TestShallowCopy:
def test_shallow_copy_empty(self):
# GH#13067
idx = PeriodIndex([], freq="M")
result = idx._view()
expected = idx
tm.assert_index_equal(result, expected)
def test_shallow_copy_disallow_i8(self):
# GH#24391
pi = period_range("2018-01-01", periods=3, freq="2D")
with pytest.raises(AssertionError, match="ndarray"):
pi._shallow_copy(pi.asi8)
def test_shallow_copy_requires_disallow_period_index(self):
pi = period_range("2018-01-01", periods=3, freq="2D")
with pytest.raises(AssertionError, match="PeriodIndex"):
pi._shallow_copy(pi)
class TestSeriesPeriod:
def setup_method(self, method):
self.series = Series(period_range("2000-01-01", periods=10, freq="D"))
def test_constructor_cant_cast_period(self):
msg = "Cannot cast PeriodArray to dtype float64"
with pytest.raises(TypeError, match=msg):
Series(period_range("2000-01-01", periods=10, freq="D"), dtype=float)
def test_constructor_cast_object(self):
s = Series(period_range("1/1/2000", periods=10), dtype=PeriodDtype("D"))
exp = Series(period_range("1/1/2000", periods=10))
tm.assert_series_equal(s, exp)
|
|
# -*- coding: utf-8 -*-
"""Page Admin module."""
from pages import settings
from pages.models import Page, Content, PageAlias, Media
from pages.phttp import get_language_from_request, get_template_from_request
from pages.utils import get_placeholders
from pages.templatetags.pages_tags import PlaceholderNode
from pages.admin.forms import make_form
from pages.admin import views
from pages.urlconf_registry import registry
from os.path import join
import urllib
from collections import defaultdict
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_text
from django.conf import settings as global_settings
from django.http import HttpResponseRedirect, Http404
from django.contrib.admin.sites import AlreadyRegistered
extra_actions_registery = []
def add_page_action(action):
if action not in extra_actions_registery:
extra_actions_registery.append(action)
class PageAdmin(admin.ModelAdmin):
"""Page Admin class."""
actions = extra_actions_registery
# these mandatory fields are not versioned
mandatory_placeholders = ('title', 'slug')
general_fields = [
'title', 'slug', 'status', 'target',
'position', 'freeze_date', 'template', 'language',
'redirect_to', 'redirect_to_url']
if settings.PAGE_USE_SITE_ID and not settings.PAGE_HIDE_SITES:
general_fields.append('sites')
insert_point = general_fields.index('status') + 1
# Strange django behavior. If not provided, django will try to find
# 'page' foreign key in all registered models
inlines = []
if settings.PAGE_TAGGING:
general_fields.insert(insert_point, 'tags')
# Add support for future dating and expiration based on settings.
if settings.PAGE_SHOW_END_DATE:
general_fields.insert(insert_point, 'publication_end_date')
if settings.PAGE_SHOW_START_DATE:
general_fields.insert(insert_point, 'publication_date')
general_fields.append('delegate_to')
insert_point = general_fields.index('status') + 1
page_templates = settings.get_page_templates()
fieldsets = (
[_('General'), {
'fields': general_fields,
'classes': ('module-general',),
}],
)
class Media:
css = {
'all': [join(settings.PAGES_STATIC_URL, path) for path in (
'css/rte.css',
'css/pages.css'
)]
}
js = [join(settings.PAGES_STATIC_URL, path) for path in (
'javascript/jquery.js',
'javascript/jquery.rte.js',
'javascript/pages.js',
'javascript/pages_list.js',
'javascript/pages_form.js',
'javascript/jquery.query-2.1.7.js',
'javascript/iframeResizer.min.js',
)]
def get_urls(self):
urls = super(PageAdmin, self).get_urls()
from django.conf.urls import url
pages_urls = [
url(r'^$', self.list_pages, name='page-changelist'),
url(r'^(?P<page_id>[0-9]+)/traduction/(?P<language_id>[-\w]+)/$',
views.traduction, name='page-traduction'),
url(r'^(?P<page_id>[0-9]+)/get-content/(?P<content_id>[0-9]+)/$',
views.get_content, name='page-get-content'),
url(r'^(?P<page_id>[0-9]+)/modify-content/(?P<content_type>[-\w]+)/(?P<language_id>[-\w]+)/$',
views.modify_content, name='page-modify-content'),
url(r'^(?P<page_id>[0-9]+)/modify-placeholder/$',
views.modify_placeholder, name='page-modify-placeholder'),
url(r'^(?P<page_id>[0-9]+)/get-last-content/$',
views.get_last_content, name='page-get-last-content'),
url(r'^(?P<page_id>[0-9]+)/delete-content/(?P<language_id>[-\w]+)/$',
views.delete_content, name='page-delete-content'),
url(r'^(?P<page_id>[0-9]+)/sub-menu/$',
views.sub_menu, name='page-sub-menu'),
url(r'^(?P<page_id>[0-9]+)/move-page/$',
views.move_page, name='page-move-page'),
url(r'^(?P<page_id>[0-9]+)/change-status/$',
views.change_status, name='page-change-status'),
url(r'^(?P<media_id>[0-9]+)/media-url/$',
views.get_media_url, name='get-media-url'),
]
return pages_urls + urls
def save_model(self, request, page, form, change):
"""Move the page in the tree if necessary and save every
placeholder :class:`Content <pages.models.Content>`.
"""
language = form.cleaned_data['language']
target = form.data.get('target', None)
position = form.data.get('position', None)
page.save()
# if True, we need to move the page
if target and position:
try:
target = self.model.objects.get(pk=target)
except self.model.DoesNotExist:
pass
else:
target.invalidate()
page.move_to(target, position)
for name in self.mandatory_placeholders:
data = form.cleaned_data[name]
placeholder = PlaceholderNode(name)
extra_data = placeholder.get_extra_data(form.data)
placeholder.save(page, language, data, change,
extra_data=extra_data)
for placeholder in get_placeholders(page.get_template()):
if(placeholder.ctype in form.cleaned_data and placeholder.ctype
not in self.mandatory_placeholders):
data = form.cleaned_data[placeholder.ctype]
extra_data = placeholder.get_extra_data(form.data)
placeholder.save(page, language, data, change,
extra_data=extra_data)
page.invalidate()
def get_fieldsets(self, request, obj=None):
"""
Add fieldsets of placeholders to the list of already
existing fieldsets.
"""
# some ugly business to remove freeze_date
# from the field list
general_module = {
'fields': list(self.general_fields),
'classes': ('module-general',),
}
default_fieldsets = list(self.fieldsets)
if not request.user.has_perm('pages.can_freeze'):
general_module['fields'].remove('freeze_date')
if not request.user.has_perm('pages.can_publish'):
general_module['fields'].remove('status')
if not len(registry):
general_module['fields'].remove('delegate_to')
default_fieldsets[0][1] = general_module
placeholder_fieldsets = []
section_placeholder_fieldsets = defaultdict(list)
template = get_template_from_request(request, obj)
for placeholder in get_placeholders(template):
if placeholder.ctype not in self.mandatory_placeholders and not placeholder.section:
placeholder_fieldsets.append(placeholder.ctype)
elif placeholder.section:
section_placeholder_fieldsets[placeholder.section].append(placeholder.ctype)
additional_fieldsets = []
for title, fieldset in section_placeholder_fieldsets.items():
additional_fieldsets.append((_(title), {
'fields': fieldset,
'classes': ('module-content collapse grp-collapse grp-closed',),
}))
additional_fieldsets.append((_('Content'), {
'fields': placeholder_fieldsets,
'classes': ('module-content',),
}))
return default_fieldsets + additional_fieldsets
def save_form(self, request, form, change):
"""Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added."""
instance = super(PageAdmin, self).save_form(request, form, change)
instance.template = form.cleaned_data['template']
if not change:
instance.author = request.user
return instance
def get_form(self, request, obj=None, **kwargs):
"""Get a :class:`Page <pages.admin.forms.PageForm>` for the
:class:`Page <pages.models.Page>` and modify its fields depending on
the request."""
template = get_template_from_request(request, obj)
#model = create_page_model(get_placeholders(template))
form = make_form(self.model, get_placeholders(template))
# bound the form
language = get_language_from_request(request)
form.base_fields['language'].initial = language
if obj:
initial_slug = obj.slug(language=language, fallback=False)
initial_title = obj.title(language=language, fallback=False)
form.base_fields['slug'].initial = initial_slug
form.base_fields['title'].initial = initial_title
template = get_template_from_request(request, obj)
page_templates = settings.get_page_templates()
template_choices = list(page_templates)
# is default template is not in the list add it
if not [tpl for tpl in template_choices if tpl[0] == settings.PAGE_DEFAULT_TEMPLATE]:
template_choices.insert(0, (settings.PAGE_DEFAULT_TEMPLATE,
_('Default template')))
form.base_fields['template'].choices = template_choices
form.base_fields['template'].initial = force_text(template)
for placeholder in get_placeholders(template):
ctype = placeholder.ctype
if obj:
initial = placeholder.get_content(obj, language, lang_fallback=False)
else:
initial = None
form.base_fields[ctype] = placeholder.get_field(obj,
language, initial=initial)
return form
def change_view(self, request, object_id, form_url='', extra_context=None):
"""The ``change`` admin view for the
:class:`Page <pages.models.Page>`."""
language = get_language_from_request(request)
extra_context = {
'language': language,
'page_languages': settings.PAGE_LANGUAGES,
}
try:
int(object_id)
except ValueError:
raise Http404('The "%s" part of the location is invalid.'
% str(object_id))
try:
obj = self.model.objects.get(pk=object_id)
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
else:
template = get_template_from_request(request, obj)
extra_context['placeholders'] = get_placeholders(template)
extra_context['traduction_languages'] = [l for l in
settings.PAGE_LANGUAGES if Content.objects.get_content(obj,
l[0], "title") and l[0] != language]
extra_context['page'] = obj
response = super(PageAdmin, self).change_view(request, object_id,
form_url=form_url, extra_context=extra_context)
if request.method == 'POST' and isinstance(response, HttpResponseRedirect):
if '_continue' in request.POST or '_saveasnew' in request.POST or '_addanother' in request.POST:
addlanguage = True
else:
addlanguage = False
if addlanguage:
splitted = list(urllib.parse.urlparse(response.url))
query = urllib.parse.parse_qs(splitted[4])
query['language'] = language
splitted[4] = urllib.parse.urlencode(query)
response = HttpResponseRedirect(urllib.parse.urlunparse(splitted))
return response
def add_view(self, request, form_url='', extra_context=None):
"""The ``add`` admin view for the :class:`Page <pages.models.Page>`."""
extra_context = {
'language': get_language_from_request(request),
'page_languages': settings.PAGE_LANGUAGES,
}
return super(PageAdmin, self).add_view(request, form_url,
extra_context)
def has_add_permission(self, request):
"""Return ``True`` if the current user has permission to add a new
page."""
return request.user.has_perm('pages.add_page')
def has_change_permission(self, request, obj=None):
"""Return ``True`` if the current user has permission
to change the page."""
return request.user.has_perm('pages.change_page')
def has_delete_permission(self, request, obj=None):
"""Return ``True`` if the current user has permission on the page."""
return request.user.has_perm('pages.delete_page')
def list_pages(self, request, template_name=None, extra_context=None):
"""List root pages"""
if not self.admin_site.has_permission(request):
return self.admin_site.login(request)
language = get_language_from_request(request)
query = request.POST.get('q', '').strip()
if query:
page_ids = list(set([c.page.pk for c in
Content.objects.filter(body__icontains=query)]))
pages = Page.objects.filter(pk__in=page_ids)
else:
pages = Page.objects.root()
if settings.PAGE_HIDE_SITES:
pages = pages.filter(sites=global_settings.SITE_ID)
context = {
'can_publish': request.user.has_perm('pages.can_publish'),
'can_import': settings.PAGE_IMPORT_ENABLED,
'lang': language, # lang is the key used by show_content
'pages': pages,
'opts': self.model._meta,
'q': query
}
context.update(extra_context or {})
change_list = self.changelist_view(request, context)
return change_list
try:
admin.site.register(Page, PageAdmin)
except AlreadyRegistered:
pass
class ContentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'type', 'language', 'page')
list_filter = ('page',)
search_fields = ('body',)
class AliasAdmin(admin.ModelAdmin):
list_display = ('page', 'url',)
list_editable = ('url',)
raw_id_fields = ['page']
try:
admin.site.register(PageAlias, AliasAdmin)
except AlreadyRegistered:
pass
class MediaAdmin(admin.ModelAdmin):
list_display = ('image', 'title', 'creation_date', 'description', 'extension')
list_display_links = ('image', 'title', )
try:
admin.site.register(Media, MediaAdmin)
except AlreadyRegistered:
pass
|
|
"""
Settings for Oscar's demo site.
Notes:
* The demo site uses the stores extension which requires a spatial database.
Only the postgis and spatialite backends are tested, but all backends
supported by GeoDjango should work.
"""
import os
# Django settings for oscar project.
PROJECT_DIR = os.path.dirname(__file__)
location = lambda x: os.path.join(
os.path.dirname(os.path.realpath(__file__)), x)
DEBUG = True
TEMPLATE_DEBUG = True
SQL_DEBUG = True
SEND_BROKEN_LINK_EMAILS = False
ADMINS = (
)
EMAIL_SUBJECT_PREFIX = '[Oscar demo] '
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
ALLOWED_HOSTS = ['demo.oscarcommerce.com',
'demo.oscar.tangentlabs.co.uk']
MANAGERS = ADMINS
# Use settings_local to override this default
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'oscar_demo',
'USER': 'm',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/London'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-gb'
LANGUAGES = (
('en-gb', 'English'),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = location("public/media")
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
#ADMIN_MEDIA_PREFIX = '/media/admin/'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
location('static'),
)
STATIC_ROOT = location('public/static')
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '$)a7n&o80u!6y5t-+jrd3)3!%vh&shg$wqpjpxc!ar&p#!)n1a'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# needed by django-treebeard for admin (and potentially other libs)
'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.request",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages",
# Oscar specific
'oscar.apps.search.context_processors.search_form',
'oscar.apps.promotions.context_processors.promotions',
'oscar.apps.checkout.context_processors.checkout',
'oscar.core.context_processors.metadata',
'oscar.apps.customer.notifications.context_processors.notifications',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.transaction.TransactionMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
'oscar.apps.basket.middleware.BasketMiddleware',
)
DEBUG_TOOLBAR_PATCH_SETTINGS = False
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False
}
ROOT_URLCONF = 'urls'
from oscar import OSCAR_MAIN_TEMPLATE_DIR
TEMPLATE_DIRS = (
location('templates'),
OSCAR_MAIN_TEMPLATE_DIR,
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
},
'checkout_file': {
'level': 'INFO',
'class': 'oscar.core.logging.handlers.EnvFileHandler',
'filename': 'checkout.log',
'formatter': 'verbose'
},
'error_file': {
'level': 'INFO',
'class': 'oscar.core.logging.handlers.EnvFileHandler',
'filename': 'errors.log',
'formatter': 'verbose'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'filters': ['require_debug_false'],
},
},
'loggers': {
'django': {
'handlers': ['null'],
'propagate': True,
'level': 'INFO',
},
'django.request': {
'handlers': ['mail_admins', 'error_file'],
'level': 'ERROR',
'propagate': False,
},
'oscar.checkout': {
'handlers': ['console', 'checkout_file'],
'propagate': True,
'level': 'INFO',
},
'datacash': {
'handlers': ['console'],
'propagate': True,
'level': 'INFO',
},
'django.db.backends': {
'handlers': ['null'],
'propagate': False,
'level': 'DEBUG',
},
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.flatpages',
'django.contrib.staticfiles',
'django.contrib.gis',
# Oscar dependencies
'compressor',
'south',
# Oscar extensions
'stores',
'paypal',
'datacash',
# External apps
'django_extensions',
'debug_toolbar',
# For profile testing
'apps.user',
'apps.bigbang',
# Sentry (for live demo site)
'raven.contrib.django.raven_compat'
]
# Include core apps with a few overrides:
# - a shipping override app to provide some shipping methods
# - an order app to provide order processing logic
from oscar import get_core_apps
INSTALLED_APPS = INSTALLED_APPS + get_core_apps(
['apps.shipping', 'apps.order', 'apps.catalogue'])
AUTHENTICATION_BACKENDS = (
'oscar.apps.customer.auth_backends.Emailbackend',
'django.contrib.auth.backends.ModelBackend',
)
LOGIN_REDIRECT_URL = '/'
APPEND_SLASH = True
# Haystack settings - we use a local Solr instance running on the default port
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://127.0.0.1:8983/solr',
},
}
AUTH_PROFILE_MODULE = 'user.Profile'
# Oscar settings
from oscar.defaults import *
OSCAR_RECENTLY_VIEWED_PRODUCTS = 20
OSCAR_ALLOW_ANON_CHECKOUT = True
OSCAR_SHOP_NAME = 'Oscar'
OSCAR_SHOP_TAGLINE = 'Demo'
COMPRESS_ENABLED = False
COMPRESS_PRECOMPILERS = (
('text/less', 'lessc {infile} {outfile}'),
)
THUMBNAIL_KEY_PREFIX = 'oscar-demo'
LOG_ROOT = location('logs')
# Ensure log root exists
if not os.path.exists(LOG_ROOT):
os.mkdir(LOG_ROOT)
DISPLAY_VERSION = False
USE_TZ = True
# Must be within MEDIA_ROOT for sorl to work
OSCAR_MISSING_IMAGE_URL = 'image_not_found.jpg'
GOOGLE_ANALYTICS_ID = 'UA-45363517-4'
# Add stores node to navigation
new_nav = OSCAR_DASHBOARD_NAVIGATION
new_nav.append(
{
'label': 'Stores',
'icon': 'icon-shopping-cart',
'children': [
{
'label': 'Stores',
'url_name': 'stores-dashboard:store-list',
},
{
'label': 'Store groups',
'url_name': 'stores-dashboard:store-group-list',
},
]
})
new_nav.append(
{
'label': 'Datacash',
'icon': 'icon-globe',
'children': [
{
'label': 'Transactions',
'url_name': 'datacash-transaction-list',
},
]
})
OSCAR_DASHBOARD_NAVIGATION = new_nav
GEOIP_PATH = os.path.join(os.path.dirname(__file__), 'geoip')
#default currency for django-oscar-datacash
DATACASH_CURRENCY = "GBP"
try:
from settings_local import *
except ImportError:
pass
|
|
from __future__ import absolute_import
import pytest
from datetime import datetime, timedelta
from django.core.urlresolvers import reverse
from sentry.testutils import APITestCase, SnubaTestCase
from sentry.testutils.helpers.datetime import before_now, iso_format
class DiscoverQueryTest(APITestCase, SnubaTestCase):
def setUp(self):
super(DiscoverQueryTest, self).setUp()
self.now = datetime.now()
self.one_second_ago = iso_format(before_now(seconds=1))
self.login_as(user=self.user, superuser=False)
self.org = self.create_organization(owner=self.user, name="foo")
self.project = self.create_project(name="bar", organization=self.org)
self.other_project = self.create_project(name="other")
self.event = self.store_event(
data={
"platform": "python",
"timestamp": self.one_second_ago,
"environment": "production",
"tags": {"sentry:release": "foo", "error.custom": "custom"},
"exception": {
"values": [
{
"type": "ValidationError",
"value": "Bad request",
"mechanism": {"type": "1", "value": "1"},
"stacktrace": {
"frames": [
{
"function": "?",
"filename": "http://localhost:1337/error.js",
"lineno": 29,
"colno": 3,
"in_app": True,
}
]
},
}
]
},
},
project_id=self.project.id,
)
def test(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["environment", "platform.name"],
"start": iso_format(datetime.now() - timedelta(seconds=10)),
"end": iso_format(datetime.now()),
"orderby": "-timestamp",
"range": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 1
assert response.data["data"][0]["environment"] == "production"
assert response.data["data"][0]["platform.name"] == "python"
def test_with_discover_basic(self):
# Dashboards requires access to the discover1 endpoints for now.
# But newer saas plans don't include discover1, only discover2 (discover-basic).
with self.feature("organizations:discover-basic"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["environment", "platform.name"],
"start": iso_format(datetime.now() - timedelta(seconds=10)),
"end": iso_format(datetime.now()),
"orderby": "-timestamp",
"range": None,
},
)
assert response.status_code == 200, response.content
def test_relative_dates(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["environment", "platform.name"],
"range": "1d",
"orderby": "-timestamp",
"start": None,
"end": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 1
assert response.data["data"][0]["environment"] == "production"
assert response.data["data"][0]["platform.name"] == "python"
def test_invalid_date_request(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["message", "platform"],
"range": "1d",
"start": iso_format(datetime.now() - timedelta(seconds=10)),
"end": iso_format(datetime.now()),
"orderby": "-timestamp",
},
)
assert response.status_code == 400, response.content
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["message", "platform"],
"statsPeriodStart": "7d",
"statsPeriodEnd": "1d",
"start": iso_format(datetime.now() - timedelta(seconds=10)),
"end": iso_format(datetime.now()),
"orderby": "-timestamp",
},
)
assert response.status_code == 400, response.content
def test_conditional_fields(self):
with self.feature("organizations:discover"):
self.store_event(
data={
"platform": "javascript",
"environment": "production",
"tags": {"sentry:release": "bar"},
"timestamp": self.one_second_ago,
},
project_id=self.project.id,
)
self.store_event(
data={
"platform": "javascript",
"environment": "production",
"tags": {"sentry:release": "baz"},
"timestamp": self.one_second_ago,
},
project_id=self.project.id,
)
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"aggregations": [["count()", None, "count"]],
"conditionFields": [
[
"if",
[["in", ["release", "tuple", ["'foo'"]]], "release", "'other'"],
"release",
]
],
"start": iso_format(datetime.now() - timedelta(seconds=10)),
"end": iso_format(datetime.now()),
"groupby": ["time", "release"],
"rollup": 86400,
"limit": 1000,
"orderby": "-time",
"range": None,
},
)
assert response.status_code == 200, response.content
# rollup is by one day and diff of start/end is 10 seconds, so we only have one day
assert len(response.data["data"]) == 2
for data in response.data["data"]:
# note this "release" key represents the alias for the column condition
# and is also used in `groupby`, it is NOT the release tag
if data["release"] == "foo":
assert data["count"] == 1
elif data["release"] == "other":
assert data["count"] == 2
def test_invalid_range_value(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["message", "platform"],
"range": "1x",
"orderby": "-timestamp",
"start": None,
"end": None,
},
)
assert response.status_code == 400, response.content
def test_invalid_aggregation_function(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["message", "platform"],
"aggregations": [["test", "test", "test"]],
"range": "14d",
"orderby": "-timestamp",
"start": None,
"end": None,
},
)
assert response.status_code == 400, response.content
@pytest.mark.xfail(reason="Failing due to constrain_columns_to_dataset")
def test_boolean_condition(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["environment", "platform.name"],
"conditions": [["stack.in_app", "=", True]],
"start": (datetime.now() - timedelta(seconds=10)).strftime("%Y-%m-%dT%H:%M:%S"),
"end": (datetime.now()).strftime("%Y-%m-%dT%H:%M:%S"),
"orderby": "-timestamp",
"range": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 1
assert response.data["data"][0]["environment"] == "production"
assert response.data["data"][0]["platform.name"] == "python"
def test_strip_double_quotes_in_condition_strings(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["environment"],
"conditions": [["environment", "=", '"production"']],
"range": "14d",
"orderby": "-timestamp",
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 1
assert response.data["data"][0]["environment"] == "production"
def test_array_join(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["message", "error.type"],
"start": (datetime.now() - timedelta(seconds=10)).strftime("%Y-%m-%dT%H:%M:%S"),
"end": (datetime.now() + timedelta(seconds=10)).strftime("%Y-%m-%dT%H:%M:%S"),
"orderby": "-timestamp",
"range": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 1
assert response.data["data"][0]["error.type"] == "ValidationError"
def test_array_condition_equals(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"conditions": [["error.type", "=", "ValidationError"]],
"fields": ["message"],
"start": (datetime.now() - timedelta(seconds=10)).strftime("%Y-%m-%dT%H:%M:%S"),
"end": (datetime.now()).strftime("%Y-%m-%dT%H:%M:%S"),
"orderby": "-timestamp",
"range": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 1
def test_array_condition_not_equals(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"conditions": [["error.type", "!=", "ValidationError"]],
"fields": ["message"],
"start": (datetime.now() - timedelta(seconds=10)).strftime("%Y-%m-%dT%H:%M:%S"),
"end": (datetime.now()).strftime("%Y-%m-%dT%H:%M:%S"),
"orderby": "-timestamp",
"range": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 0
def test_array_condition_custom_tag(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"conditions": [["error.custom", "!=", "custom"]],
"fields": ["message"],
"start": (datetime.now() - timedelta(seconds=10)).strftime("%Y-%m-%dT%H:%M:%S"),
"end": (datetime.now()).strftime("%Y-%m-%dT%H:%M:%S"),
"orderby": "-timestamp",
"range": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 0
def test_select_project_name(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["project.name"],
"range": "14d",
"orderby": "-timestamp",
"start": None,
"end": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 1
assert (response.data["data"][0]["project.name"]) == "bar"
def test_groupby_project_name(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"aggregations": [["count()", "", "count"]],
"fields": ["project.name"],
"range": "14d",
"orderby": "-count",
"start": None,
"end": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 1
assert (response.data["data"][0]["project.name"]) == "bar"
assert (response.data["data"][0]["count"]) == 1
def test_zerofilled_dates_when_rollup_relative(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"aggregations": [["count()", "", "count"]],
"fields": ["project.name"],
"groupby": ["time"],
"orderby": "time",
"range": "5d",
"rollup": 86400,
"start": None,
"end": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 6
assert (response.data["data"][5]["time"]) > response.data["data"][4]["time"]
assert (response.data["data"][5]["project.name"]) == "bar"
assert (response.data["data"][5]["count"]) == 1
def test_zerofilled_dates_when_rollup_absolute(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"aggregations": [["count()", "", "count"]],
"fields": ["project.name"],
"groupby": ["time"],
"orderby": "-time",
"start": (self.now - timedelta(seconds=300)).strftime("%Y-%m-%dT%H:%M:%S"),
"end": self.now.strftime("%Y-%m-%dT%H:%M:%S"),
"rollup": 60,
"range": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 6
event_record = response.data["data"][0]
# This test can span across an hour, where the start is in hour 1, end is in hour 2, and event is in hour 2.
# That pushes the result to the second row.
if "project.name" not in event_record:
event_record = response.data["data"][1]
assert (event_record["time"]) > response.data["data"][2]["time"]
assert (event_record["project.name"]) == "bar"
assert (event_record["count"]) == 1
def test_uniq_project_name(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"aggregations": [["uniq", "project.name", "uniq_project_name"]],
"range": "14d",
"orderby": "-uniq_project_name",
"start": None,
"end": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 1
assert (response.data["data"][0]["uniq_project_name"]) == 1
def test_meta_types(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["project.id", "project.name"],
"aggregations": [["count()", "", "count"]],
"range": "14d",
"orderby": "-count",
"start": None,
"end": None,
},
)
assert response.status_code == 200, response.content
assert response.data["meta"] == [
{"name": "project.id", "type": "integer"},
{"name": "project.name", "type": "string"},
{"name": "count", "type": "integer"},
]
def test_no_feature_access(self):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
with self.feature({"organizations:discover": False, "organizations:discover-basic": False}):
response = self.client.post(
url,
{
"projects": [self.project.id],
"fields": ["message", "platform"],
"range": "14d",
"orderby": "-timestamp",
"start": None,
"end": None,
},
)
assert response.status_code == 404, response.content
def test_invalid_project(self):
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [self.other_project.id],
"fields": ["message", "platform"],
"range": "14d",
"orderby": "-timestamp",
"start": None,
"end": None,
},
)
assert response.status_code == 403, response.content
def test_superuser(self):
self.new_org = self.create_organization(name="foo_new")
self.new_project = self.create_project(name="bar_new", organization=self.new_org)
self.login_as(user=self.user, superuser=True)
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.new_org.slug])
response = self.client.post(
url,
{
"projects": [self.new_project.id],
"fields": ["message", "platform"],
"start": iso_format(datetime.now() - timedelta(seconds=10)),
"end": iso_format(datetime.now()),
"orderby": "-timestamp",
"range": None,
},
)
assert response.status_code == 200, response.content
def test_all_projects(self):
project = self.create_project(organization=self.org)
self.event = self.store_event(
data={
"message": "other message",
"platform": "python",
"timestamp": iso_format(self.now - timedelta(minutes=1)),
},
project_id=project.id,
)
with self.feature("organizations:discover"):
url = reverse("sentry-api-0-discover-query", args=[self.org.slug])
response = self.client.post(
url,
{
"projects": [-1],
"fields": ["message", "platform.name"],
"range": "1d",
"orderby": "-timestamp",
"start": None,
"end": None,
},
)
assert response.status_code == 200, response.content
assert len(response.data["data"]) == 2
|
|
# -*- coding: utf-8 -*-
"""Module that contains functionality to monitor stopping.
Rationale
---------
In machine learning, optimization is usually not performed until the objective
is minimized; instead, if this is the case, the true objective (to which the
loss function being minimized is often just a proxy) is what is more important.
To achieve good results, several heuristics have been proposed to monitor for
convergence. This module collects some of these.
Usage
-----
A stopping criterion is a function which takes a climin ``info`` dictionary as
its only argument. It the returns ``True`` if the stopping criterion is
fulfilled, that is if we should stop. The functions in this module are mostly
functions which create these functions. The idea behind this is that we have
a common API with functions which are supposed to have a state, which can be
realized by generator functions or objects with a ``__call__`` magic method.
"""
from __future__ import absolute_import
import itertools
import signal
import sys
import time
from climin.mathadapt import isnan
from ..compat import basestring
class AfterNIterations(object):
"""AfterNIterations class.
Useful for stopping after an amount of iterations performed.
Internally, the ``n_iter`` field of the climin info dictionary is
inspected; if the value in there exceeds ``n`` by one, the criterion
returns ``True``.
Attributes
----------
max_iter : int
Maximum amount of iterations after which we stop.
Examples
--------
>>> S.AfterNIterations(10)({'n_iter': 10})
True
>>> S.AfterNIterations(10)({'n_iter': 5})
False
>>> S.AfterNIterations(10)({'n_iter': 9})
True
"""
def __init__(self, max_iter):
"""Create AfterNIterations object.
Parameters
----------
max_iter : int
Maximum amount of iterations after which we stop.
"""
self.max_iter = max_iter
def __call__(self, info):
return info['n_iter'] >= self.max_iter - 1
class ModuloNIterations(object):
"""Class representing a stop criterion that stops at each `n`-th iteration.
This is useful if one wants a regular pause in optimization, e.g. to save
data to disk or give feedback to the user.
Attributes
----------
n : int
Number of iterations to perform between pauses.
Examples
--------
>>> S.ModuleNIterations(10)({'n_iter': 9})
False
>>> S.ModuleNIterations(10)({'n_iter': 10})
True
>>> S.ModuleNIterations(10)({'n_iter': 11})
False
"""
def __init__(self, n):
"""Create a ModuloNIterations object.
Parameters
----------
n : int
Number of iterations to perform between pauses.
"""
self.n = n
def __call__(self, info):
return info['n_iter'] % self.n == 0
class TimeElapsed(object):
"""Stop criterion that stops after `sec` seconds after
initializing.
Attributes
----------
sec : float
Number of seconds until the criterion returns True.
Examples
--------
>>> stop = S.TimeElapsed(.5); stop({})
False
>>> time.sleep(0.5)
>>> stop({})
True
>>> stop2 = S.TimeElapsed(10); stop2({'runtime': 9})
False
>>> stop3 = S.TimeElapsed(10); stop2({'runtime': 11})
True
"""
def __init__(self, sec):
"""Create a TimeElapsed object.
Parameters
----------
sec : float
Number of seconds until the criterion returns True.
"""
self.sec = sec
self.start = time.time()
def __call__(self, info):
if 'runtime' in info:
return info['runtime'] > self.sec
else:
return time.time() - self.start > self.sec
def All(criterions):
"""Class representing a stop criterion that given a list `criterions` of
stop criterions only returns True, if all of criterions return True.
This basically implements a logical AND for stop criterions.
"""
# TODO document
def __init__(self, criterions):
self.criterions = criterions
def __call__(self, info):
return all(c(info) for c in self.criterions)
class Any(object):
"""Class representing a stop criterion that given a list `criterions` of
stop criterions only returns True, if any of the criterions returns True.
This basically implements a logical OR for stop criterions.
"""
# TODO document
def __init__(self, criterions):
self.criterions = criterions
def __call__(self, info):
return any(c(info) for c in self.criterions)
class NotBetterThanAfter(object):
"""Stop criterion that returns True if the error is not less than
`minimal` after `n_iter` iterations."""
def __init__(self, minimal, after, key='loss'):
self.minimal = minimal
self.after = after
self.key = key
def __call__(self, info):
return info['n_iter'] > self.after and info[self.key] >= self.minimal
class IsNaN(object):
"""Stop criterion that returns True if any value corresponding to
user-specified keys is NaN.
Attributes
----------
keys : list
List of keys to check whether nan or not
Examples
--------
>>> stop = S.IsNaN(['test']); stop({'test': 0})
False
>>> stop({'test': numpy.nan})
True
>>> stop({'test': gnumpy.as_garray(numpy.nan)})
True
"""
def __init__(self, keys=[]):
self.keys = keys
def __call__(self, info):
return any([isnan(info.get(key, 0)) for key in self.keys])
class Patience(object):
"""Stop criterion inspired by Bengio's patience method.
The idea is to increase the number of iterations until stopping by
a multiplicative and/or additive constant once a new best candidate is
found.
Attributes
----------
func_or_key : function, hashable
Either a function or a hashable object. In the first case, the function
will be called to get the latest loss. In the second case, the loss
will be obtained from the in the corresponding field of the ``info``
dictionary.
initial : int
Initial patience. Lower bound on the number of iterations.
grow_factor : float
Everytime we find a sufficiently better candidate (determined by
``threshold``) we increase the patience multiplicatively by
``grow_factor``.
grow_offset : float
Everytime we find a sufficiently better candidate (determined by
``threshold``) we increase the patience additively by ``grow_offset``.
threshold : float, optional, default: 1e-4
A loss of a is assumed to be a better candidate than b, if a is larger
than b by a margin of ``threshold``.
"""
def __init__(self, func_or_key, initial, grow_factor=1., grow_offset=0.,
threshold=1e-4):
if grow_factor == 1 and grow_offset == 0:
raise ValueError('need to specify either grow_factor != 1'
'or grow_offset != 0)')
self.func_or_key = func_or_key
self.patience = initial
self.grow_factor = grow_factor
self.grow_offset = grow_offset
self.threshold = threshold
self.best_iter = 0
self.best_loss = float('inf')
self.count = itertools.count()
def __call__(self, info):
i = info['n_iter']
if isinstance(self.func_or_key, basestring):
loss = info[self.func_or_key]
else:
loss = self.func_or_key()
if loss < self.best_loss:
if (self.best_loss - loss) > self.threshold and i > 0:
self.patience = max(i * self.grow_factor + self.grow_offset,
self.patience)
self.best_iter = i
self.best_loss = loss
return i >= self.patience
class OnUnixSignal(object):
"""Stopping criterion that is sensitive to some signal."""
def __init__(self, sig=signal.SIGINT):
"""Return a stopping criterion that stops upon a signal.
Previous handler will be overwritten.
Parameters
----------
sig : signal, optional [default: signal.SIGINT]
Signal upon which to stop.
"""
self.sig = sig
self.stopped = False
self._register()
def _register(self):
self.prev_handler = signal.signal(self.sig, self.handler)
def handler(self, signal, frame):
self.stopped = True
def __call__(self, info):
res, self.stopped = self.stopped, False
return res
def __del__(self):
signal.signal(self.sig, self.prev_handler)
def __setstate__(self, dct):
self.__dict__.update(dct)
self._register()
class OnWindowsSignal(object):
"""Stopping criterion that is sensitive to signals Ctrl-C or Ctrl-Break
on Windows."""
def __init__(self, sig=None):
"""Return a stopping criterion that stops upon a signal.
Previous handlers will be overwritten.
Parameters
----------
sig : signal, optional [default: [0,1]]
Signal upon which to stop.
Default encodes signal.SIGINT and signal.SIGBREAK.
"""
self.sig = [0, 1] if sig is None else sig
self.stopped = False
self._register()
def _register(self):
import win32api
win32api.SetConsoleCtrlHandler(self.handler, 1)
def handler(self, ctrl_type):
if ctrl_type in self.sig: # Ctrl-C and Ctrl-Break
self.stopped = True
return 1 # don't chain to the next handler
return 0 # chain to the next handler
def __call__(self, info):
res, self.stopped = self.stopped, False
return res
def __setstate__(self, dct):
self.__dict__.update(dct)
self._register()
OnSignal = OnWindowsSignal if sys.platform == 'win32' else OnUnixSignal
def never(info):
return False
def always(info):
return True
|
|
# -*- coding: utf-8 -*-
"""
Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
import collections
from .compat import cookielib, urlparse, Morsel
try:
import threading
# grr, pyflakes: this fixes "redefinition of unused 'threading'"
threading
except ImportError:
import dummy_threading as threading
class MockRequest(object):
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
The original request object is read-only. The client is responsible for collecting
the new headers via `get_new_headers()` and interpreting them appropriately. You
probably want `get_cookie_header`, defined below.
"""
def __init__(self, request):
self._r = request
self._new_headers = {}
self.type = urlparse(self._r.url).scheme
def get_type(self):
return self.type
def get_host(self):
return urlparse(self._r.url).netloc
def get_origin_req_host(self):
return self.get_host()
def get_full_url(self):
return self._r.url
def is_unverifiable(self):
return True
def has_header(self, name):
return name in self._r.headers or name in self._new_headers
def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
def add_unredirected_header(self, name, value):
self._new_headers[name] = value
def get_new_headers(self):
return self._new_headers
@property
def unverifiable(self):
return self.is_unverifiable()
class MockResponse(object):
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
"""
def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
self._headers = headers
def info(self):
return self._headers
def getheaders(self, name):
self._headers.getheaders(name)
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
# the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request)
# pull out the HTTPMessage with the headers and put it in the mock:
res = MockResponse(response._original_response.msg)
jar.extract_cookies(res, req)
def get_cookie_header(jar, request):
"""Produce an appropriate Cookie header string to be sent with `request`, or None."""
r = MockRequest(request)
jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie')
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""Unsets a cookie by name, by default over all domains and paths.
Wraps CookieJar.clear(), is O(n).
"""
clearables = []
for cookie in cookiejar:
if cookie.name == name:
if domain is None or domain == cookie.domain:
if path is None or path == cookie.path:
clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
class CookieConflictError(RuntimeError):
"""There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific."""
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
Don't use the dict interface internally; it's just for compatibility with
with external client code. All `requests` code should work out of the box
with externally provided instances of CookieJar, e.g., LWPCookieJar and
FileCookieJar.
Caution: dictionary operations that are normally O(1) may be O(n).
Unlike a regular CookieJar, this class is pickleable.
"""
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains. Caution: operation is O(n), not O(1)."""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
return default
def set(self, name, value, **kwargs):
"""Dict-like set() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains."""
# support client code that unsets cookies by assignment of a None value:
if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
return
if isinstance(value, Morsel):
c = morsel_to_cookie(value)
else:
c = create_cookie(name, value, **kwargs)
self.set_cookie(c)
return c
def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the jar.
See values() and items()."""
keys = []
for cookie in iter(self):
keys.append(cookie.name)
return keys
def values(self):
"""Dict-like values() that returns a list of values of cookies from the jar.
See keys() and items()."""
values = []
for cookie in iter(self):
values.append(cookie.value)
return values
def items(self):
"""Dict-like items() that returns a list of name-value tuples from the jar.
See keys() and values(). Allows client-code to call "dict(RequestsCookieJar)
and get a vanilla python dict of key value pairs."""
items = []
for cookie in iter(self):
items.append((cookie.name, cookie.value))
return items
def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains
def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if cookie.path not in paths:
paths.append(cookie.path)
return paths
def multiple_domains(self):
"""Returns True if there are multiple domains in the jar.
Returns False otherwise."""
domains = []
for cookie in iter(self):
if cookie.domain is not None and cookie.domain in domains:
return True
domains.append(cookie.domain)
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain old
Python dict of name-value pairs of cookies that meet the requirements."""
dictionary = {}
for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None
or cookie.path == path):
dictionary[cookie.name] = cookie.value
return dictionary
def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws exception
if there are more than one cookie with name. In that case, use the more
explicit get() method instead. Caution: operation is O(n), not O(1)."""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws exception
if there is already a cookie of that name in the jar. In that case, use the more
explicit set() method instead."""
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name()."""
remove_cookie_by_name(self, name)
def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(cookie)
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values. Takes as args name
and optional domain and path. Returns a cookie.value. If there are conflicting cookies,
_find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown
if there are conflicting cookies."""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally.
Takes as args name and optional domain and path. Returns a cookie.value.
Throws KeyError if cookie is not found and CookieConflictError if there are
multiple cookies that match name and optionally domain and path."""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
if toReturn:
return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy()
# remove the unpickleable RLock object
state.pop('_cookies_lock')
return state
def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__:
self._cookies_lock = threading.RLock()
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.update(self)
return new_cj
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = dict(
version=0,
name=name,
value=value,
port=None,
domain='',
path='/',
secure=False,
expires=None,
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False,)
badargs = set(kwargs) - set(result)
if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s'
raise TypeError(err % list(badargs))
result.update(kwargs)
result['port_specified'] = bool(result['port'])
result['domain_specified'] = bool(result['domain'])
result['domain_initial_dot'] = result['domain'].startswith('.')
result['path_specified'] = bool(result['path'])
return cookielib.Cookie(**result)
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
c = create_cookie(
name=morsel.key,
value=morsel.value,
version=morsel['version'] or 0,
port=None,
port_specified=False,
domain=morsel['domain'],
domain_specified=bool(morsel['domain']),
domain_initial_dot=morsel['domain'].startswith('.'),
path=morsel['path'],
path_specified=bool(morsel['path']),
secure=bool(morsel['secure']),
expires=morsel['max-age'] or morsel['expires'],
discard=False,
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,)
return c
def cookiejar_from_dict(cookie_dict, cookiejar=None):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
if cookie_dict is not None:
for name in cookie_dict:
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar
|
|
# Copyright 2012 OpenStack Foundation
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import hashlib
import json
from lxml import etree
import re
import time
import pdb
from urlparse import urlparse
from tempest.common import http
from tempest import exceptions
from tempest.openstack.common import log as logging
from tempest.services.compute.xml.common import xml_to_json
# redrive rate limited calls at most twice
MAX_RECURSION_DEPTH = 2
TOKEN_CHARS_RE = re.compile('^[-A-Za-z0-9+/=]*$')
# All the successful HTTP status codes from RFC 2616
HTTP_SUCCESS = (200, 201, 202, 203, 204, 205, 206)
class RestClient(object):
TYPE = "json"
LOG = logging.getLogger(__name__)
def __init__(self, config, user, password, auth_url, token_url,
tenant_name=None, auth_version='v2'):
self.config = config
self.user = user
self.password = password
self.auth_url = auth_url
self.tenant_name = tenant_name
self.auth_version = auth_version
self.token_url = token_url
self.service = None
self.token = None
self.base_url = None
self.all_urls = None
self.region = {}
for cfgname in dir(self.config):
# Find all config.FOO.catalog_type and assume FOO is a service.
cfg = getattr(self.config, cfgname)
catalog_type = getattr(cfg, 'catalog_type', None)
if not catalog_type:
continue
service_region = getattr(cfg, 'region', None)
if not service_region:
service_region = self.config.identity.region
self.region[catalog_type] = service_region
self.endpoint_url = 'publicURL'
self.headers = {'Content-Type': 'application/%s' % self.TYPE,
'Accept': 'application/%s' % self.TYPE}
self.build_interval = config.compute.build_interval
self.build_timeout = config.compute.build_timeout
self.general_header_lc = set(('cache-control', 'connection',
'date', 'pragma', 'trailer',
'transfer-encoding', 'via',
'warning'))
self.response_header_lc = set(('accept-ranges', 'age', 'etag',
'location', 'proxy-authenticate',
'retry-after', 'server',
'vary', 'www-authenticate'))
dscv = self.config.identity.disable_ssl_certificate_validation
self.http_obj = http.ClosingHttp(
disable_ssl_certificate_validation=dscv)
def __str__(self):
STRING_LIMIT = 80
str_format = ("config:%s, user:%s, password:%s, "
"auth_url:%s, tenant_name:%s, auth_version:%s, "
"service:%s, base_url:%s, region:%s, "
"endpoint_url:%s, build_interval:%s, build_timeout:%s"
"\ntoken:%s..., \nheaders:%s...")
return str_format % (self.config, self.user, self.password,
self.auth_url, self.tenant_name,
self.auth_version, self.service,
self.base_url, self.region, self.endpoint_url,
self.build_interval, self.build_timeout,
str(self.token)[0:STRING_LIMIT],
str(self.headers)[0:STRING_LIMIT])
def _set_auth(self, region):
"""
Sets the token and base_url used in requests based on the strategy type
"""
# if self.auth_version == 'v3':
# auth_func = self.identity_auth_v3
# else:
# auth_func = self.keystone_auth
#
# self.token, self.all_urls = (
# auth_func(self.user, self.password, self.auth_url,
# self.service, self.tenant_name, region))
if self.auth_version == 'v3':
token_func = self.identity_auth_v3_token
else:
token_func = self.keystone_auth_token
self.token = (
token_func(self.user, self.password, self.token_url, self.service,
self.tenant_name, region))
if self.auth_version == 'v3':
urls_func = self.identity_auth_v3_urls
else:
urls_func = self.keystone_auth_urls
self.all_urls = (
urls_func(self.user, self.password, self.auth_url, self.service,
self.tenant_name, region))
#print "token is: %s" % self.token
#print "base urls are: %s" % self.all_urls
def identity_auth_v3_token(self, user, password, auth_url, service, tenant_name,
region):
print "blank"
def keystone_auth_token(self, user, password, token_url, service, tenant_name,
region):
if 'tokens' not in token_url:
token_url = token_url.rstrip('/') + '/tokens'
creds = {
'auth': {
'passwordCredentials': {
'username': user,
'password': password,
},
'tenantName': tenant_name,
}
}
headers = {'Content-Type': 'application/json'}
body = json.dumps(creds)
#self._log_request('POST', token_url, headers, body)
resp, resp_body = self.http_obj.request(token_url, 'POST',
headers=headers, body=body)
#self._log_response(resp, resp_body)
if resp.status == 200:
try:
auth_data = json.loads(resp_body)['access']
token = auth_data['token']['id']
except Exception as e:
print("Failed to obtain token for user: %s" % e)
raise
return token
elif resp.status == 401:
raise exceptions.AuthenticationFailure(user=user,
password=password,
tenant=tenant_name)
def identity_auth_v3_urls(self, user, password, auth_url, service, tenant_name,
region):
print "blank"
def keystone_auth_urls(self, user, password, auth_url, service, tenant_name,
region):
#urls that need to go to the catalog to get endpoints
#pdb.set_trace()
if re.search('http://auth.staging.rs-heat.com*', auth_url) or re.search('https://identity.api.rackspacecloud.com*', auth_url):
if 'tokens' not in auth_url:
auth_url = auth_url.rstrip('/') + '/tokens'
creds = {
'auth': {
'passwordCredentials': {
'username': user,
'password': password,
},
'tenantName': tenant_name,
}
}
headers = {'Content-Type': 'application/json'}
body = json.dumps(creds)
#self._log_request('POST', auth_url, headers, body)
resp, resp_body = self.http_obj.request(auth_url, 'POST',
headers=headers, body=body)
#self._log_response(resp, resp_body)
if resp.status == 200:
try:
auth_data = json.loads(resp_body)['access']
except Exception as e:
print("Failed to obtain token for user: %s" % e)
raise
mgmt_url = None
#print "Service catalog: %s" % auth_data['serviceCatalog']
for ep in auth_data['serviceCatalog']:
if ep["type"] == service:
mgmt_url = ep['endpoints']
if mgmt_url is None:
raise exceptions.EndpointNotFound(service)
return mgmt_url
elif resp.status == 401:
raise exceptions.AuthenticationFailure(user=user,
password=password,tenant=tenant_name)
#inactive node endpoints
elif re.search('https://inactive*', auth_url):
mgmt_url = []
url = auth_url.split(",")
num_elements = len(url)
for num in range(0, num_elements):
#get region from the url
reg = url[num].split(".")
mgmt_url.append({'region': reg[1], 'publicURL': url[num] + "/" + tenant_name})
return mgmt_url
#prod endpoints to run without hitting the catalog
elif re.search('https://[a-z]{3}.orchestration.api.rackspacecloud.com*', auth_url):
mgmt_url = []
url = auth_url.split(",")
num_elements = len(url)
for num in range(0, num_elements):
#get region from the url
reg = url[num].split(".")
mgmt_url.append({'region': reg[0][-3:], 'publicURL': url[num] + "/" + tenant_name})
return mgmt_url
#local endpoints
else:
url = auth_url.split(",")
num_elements = len(url)
for num in range(0, num_elements):
mgmt_url = [{'region': region, 'publicURL': url[num] + "/" + tenant_name}]
return mgmt_url
def clear_auth(self):
"""
Can be called to clear the token and base_url so that the next request
will fetch a new token and base_url.
"""
self.token = None
self.base_url = None
def get_auth(self):
"""Returns the token of the current request or sets the token if
none.
"""
if not self.token:
self._set_auth()
return self.token
def basic_auth(self, user, password, auth_url):
"""
Provides authentication for the target API.
"""
params = {}
params['headers'] = {'User-Agent': 'Test-Client', 'X-Auth-User': user,
'X-Auth-Key': password}
resp, body = self.http_obj.request(auth_url, 'GET', **params)
try:
return resp['x-auth-token'], resp['x-server-management-url']
except Exception:
raise
def keystone_auth(self, user, password, auth_url, service, tenant_name,
region):
"""
Provides authentication via Keystone using v2 identity API.
"""
print "region in keystone_auth is: %s" % region
# Normalize URI to ensure /tokens is in it.
if 'tokens' not in auth_url:
auth_url = auth_url.rstrip('/') + '/tokens'
creds = {
'auth': {
'passwordCredentials': {
'username': user,
'password': password,
},
'tenantName': tenant_name,
}
}
headers = {'Content-Type': 'application/json'}
body = json.dumps(creds)
#self._log_request('POST', auth_url, headers, body)
resp, resp_body = self.http_obj.request(auth_url, 'POST',
headers=headers, body=body)
#self._log_response(resp, resp_body)
#print resp.status
if resp.status == 200:
try:
auth_data = json.loads(resp_body)['access']
token = auth_data['token']['id']
except Exception as e:
print("Failed to obtain token for user: %s" % e)
raise
mgmt_url = None
for ep in auth_data['serviceCatalog']:
if ep["type"] == service:
mgmt_url = ep['endpoints']
if mgmt_url is None:
raise exceptions.EndpointNotFound(service)
return token, mgmt_url
elif resp.status == 401:
raise exceptions.AuthenticationFailure(user=user,
password=password,
tenant=tenant_name)
raise exceptions.IdentityError('Unexpected status code {0}'.format(
resp.status))
def identity_auth_v3(self, user, password, auth_url, service,
project_name, domain_id='default', region='default'):
"""Provides authentication using Identity API v3."""
req_url = auth_url.rstrip('/') + '/auth/tokens'
creds = {
"auth": {
"identity": {
"methods": ["password"],
"password": {
"user": {
"name": user, "password": password,
"domain": {"id": domain_id}
}
}
},
"scope": {
"project": {
"domain": {"id": domain_id},
"name": project_name
}
}
}
}
headers = {'Content-Type': 'application/json'}
body = json.dumps(creds)
resp, body = self.http_obj.request(req_url, 'POST',
headers=headers, body=body)
if resp.status == 201:
try:
token = resp['x-subject-token']
except Exception:
self.LOG.exception("Failed to obtain token using V3"
" authentication (auth URL is '%s')" %
req_url)
raise
catalog = json.loads(body)['token']['catalog']
mgmt_url = None
for service_info in catalog:
if service_info['type'] != service:
continue # this isn't the entry for us.
endpoints = service_info['endpoints']
# Look for an endpoint in the region if configured.
if service in self.region:
region = self.region[service]
for ep in endpoints:
if ep['region'] != region:
continue
mgmt_url = ep['url']
# FIXME(blk-u): this isn't handling endpoint type
# (public, internal, admin).
break
if not mgmt_url:
# Didn't find endpoint for region, use the first.
ep = endpoints[0]
mgmt_url = ep['url']
# FIXME(blk-u): this isn't handling endpoint type
# (public, internal, admin).
break
return token, mgmt_url
elif resp.status == 401:
raise exceptions.AuthenticationFailure(user=user,
password=password)
else:
self.LOG.error("Failed to obtain token using V3 authentication"
" (auth URL is '%s'), the response status is %s" %
(req_url, resp.status))
raise exceptions.AuthenticationFailure(user=user,
password=password)
def expected_success(self, expected_code, read_code):
assert_msg = ("This function only allowed to use for HTTP status"
"codes which explicitly defined in the RFC 2616. {0}"
" is not a defined Success Code!").format(expected_code)
assert expected_code in HTTP_SUCCESS, assert_msg
# NOTE(afazekas): the http status code above 400 is processed by
# the _error_checker method
if read_code < 400 and read_code != expected_code:
pattern = """Unexpected http success status code {0},
The expected status code is {1}"""
details = pattern.format(read_code, expected_code)
raise exceptions.InvalidHttpSuccessCode(details)
def post(self, url, region, body, headers):
return self.request('POST', url, region, headers, body)
def get(self, url, region, headers=None):
return self.request('GET', url, region, headers)
def delete(self, url, region, headers=None):
return self.request('DELETE', url, region, headers)
def patch(self, url, body, headers):
return self.request('PATCH', url, headers, body)
def put(self, url, region, body, headers):
return self.request('PUT', url, region, headers, body)
def head(self, url, headers=None):
return self.request('HEAD', url, headers)
def copy(self, url, headers=None):
return self.request('COPY', url, headers)
def get_versions(self):
resp, body = self.get('')
body = self._parse_resp(body)
body = body['versions']
versions = map(lambda x: x['id'], body)
return resp, versions
def _log_request(self, method, req_url, headers, body):
self.LOG.info('Request: ' + method + ' ' + req_url)
if headers:
print_headers = headers
if 'X-Auth-Token' in headers and headers['X-Auth-Token']:
token = headers['X-Auth-Token']
if len(token) > 64 and TOKEN_CHARS_RE.match(token):
print_headers = headers.copy()
print_headers['X-Auth-Token'] = "<Token omitted>"
self.LOG.debug('Request Headers: ' + str(print_headers))
if body:
str_body = str(body)
length = len(str_body)
self.LOG.debug('Request Body: ' + str_body[:2048])
if length >= 2048:
self.LOG.debug("Large body (%d) md5 summary: %s", length,
hashlib.md5(str_body).hexdigest())
def _log_response(self, resp, resp_body):
status = resp['status']
self.LOG.info("Response Status: " + status)
headers = resp.copy()
del headers['status']
if headers.get('x-compute-request-id'):
self.LOG.info("Nova request id: %s" %
headers.pop('x-compute-request-id'))
elif headers.get('x-openstack-request-id'):
self.LOG.info("Glance request id %s" %
headers.pop('x-openstack-request-id'))
if len(headers):
self.LOG.debug('Response Headers: ' + str(headers))
if resp_body:
str_body = str(resp_body)
length = len(str_body)
self.LOG.debug('Response Body: ' + str_body[:2048])
if length >= 2048:
self.LOG.debug("Large body (%d) md5 summary: %s", length,
hashlib.md5(str_body).hexdigest())
def _parse_resp(self, body):
return json.loads(body)
def response_checker(self, method, url, headers, body, resp, resp_body):
if (resp.status in set((204, 205, 304)) or resp.status < 200 or
method.upper() == 'HEAD') and resp_body:
raise exceptions.ResponseWithNonEmptyBody(status=resp.status)
# NOTE(afazekas):
# If the HTTP Status Code is 205
# 'The response MUST NOT include an entity.'
# A HTTP entity has an entity-body and an 'entity-header'.
# In the HTTP response specification (Section 6) the 'entity-header'
# 'generic-header' and 'response-header' are in OR relation.
# All headers not in the above two group are considered as entity
# header in every interpretation.
if (resp.status == 205 and
0 != len(set(resp.keys()) - set(('status',)) -
self.response_header_lc - self.general_header_lc)):
raise exceptions.ResponseWithEntity()
# NOTE(afazekas)
# Now the swift sometimes (delete not empty container)
# returns with non json error response, we can create new rest class
# for swift.
# Usually RFC2616 says error responses SHOULD contain an explanation.
# The warning is normal for SHOULD/SHOULD NOT case
# Likely it will cause an error
if not resp_body and resp.status >= 400:
self.LOG.warning("status >= 400 response with empty body")
def _request(self, method, url, region,
headers=None, body=None):
"""A simple HTTP request interface."""
if region is not None:
for ep in self.all_urls:
if 'region' in ep:
if region.lower() == ep['region'].lower():
self.base_url = ep['publicURL']
break
else:
self.base_url = ep['publicURL']
if self.base_url is None:
raise exceptions.EndpointNotFound()
req_url = urlparse(url)
if req_url.scheme in ['http', 'https']:
req_url = url
else:
req_url = "%s/%s" % (self.base_url, url)
#print "Request url: %s" % req_url
resp, resp_body = self.http_obj.request(req_url, method,
headers=headers, body=body)
#uncomment below to see responses
#self._log_response(resp, resp_body)
self.response_checker(method, url, headers, body, resp, resp_body)
return resp, resp_body
def request(self, method, url, region,
headers=None, body=None):
retry = 0
if (self.token is None) or (self.base_url is None):
self._set_auth(region)
if headers is None:
headers = {}
headers['X-Auth-Token'] = self.token
resp, resp_body = self._request(method, url, region,
headers=headers, body=body)
while (resp.status == 413 and
'retry-after' in resp and
not self.is_absolute_limit(
resp, self._parse_resp(resp_body)) and
retry < MAX_RECURSION_DEPTH):
retry += 1
# TODO: Use a timezone-aware time parser (python-dateutil?)
delay_until = time.mktime(time.strptime(resp['retry-after'],
'%a, %d %b %Y %H:%M:%S %Z'))
gmt_cdt_diff = 18000
seconds_delay = (delay_until - gmt_cdt_diff) - time.time()
time.sleep(seconds_delay)
resp, resp_body = self._request(method, url,
headers=headers, body=body)
# self._error_checker(method, url, headers, body,
# resp, resp_body)
return resp, resp_body
def _error_checker(self, method, url,
headers, body, resp, resp_body):
# NOTE(mtreinish): Check for httplib response from glance_http. The
# object can't be used here because importing httplib breaks httplib2.
# If another object from a class not imported were passed here as
# resp this could possibly fail
if str(type(resp)) == "<type 'instance'>":
ctype = resp.getheader('content-type')
else:
try:
ctype = resp['content-type']
# NOTE(mtreinish): Keystone delete user responses doesn't have a
# content-type header. (They don't have a body) So just pretend it
# is set.
except KeyError:
ctype = 'application/json'
# It is not an error response
if resp.status < 400:
return
JSON_ENC = ['application/json; charset=UTF-8', 'application/json',
'application/json; charset=utf-8']
# NOTE(mtreinish): This is for compatibility with Glance and swift
# APIs. These are the return content types that Glance api v1
# (and occasionally swift) are using.
TXT_ENC = ['text/plain', 'text/plain; charset=UTF-8',
'text/html; charset=UTF-8', 'text/plain; charset=utf-8']
XML_ENC = ['application/xml', 'application/xml; charset=UTF-8']
if ctype in JSON_ENC or ctype in XML_ENC:
parse_resp = True
elif ctype in TXT_ENC:
parse_resp = False
else:
raise exceptions.RestClientException(str(resp.status))
if resp.status == 401 or resp.status == 403:
raise exceptions.Unauthorized()
if resp.status == 404:
raise exceptions.NotFound(resp_body)
if resp.status == 400:
if parse_resp:
resp_body = self._parse_resp(resp_body)
raise exceptions.BadRequest(resp_body)
if resp.status == 409:
if parse_resp:
resp_body = self._parse_resp(resp_body)
raise exceptions.Conflict(resp_body)
if resp.status == 413:
if parse_resp:
resp_body = self._parse_resp(resp_body)
if self.is_absolute_limit(resp, resp_body):
raise exceptions.OverLimit(resp_body)
else:
raise exceptions.RateLimitExceeded(resp_body)
if resp.status == 422:
if parse_resp:
resp_body = self._parse_resp(resp_body)
raise exceptions.UnprocessableEntity(resp_body)
if resp.status in (500, 501):
message = resp_body
if parse_resp:
try:
resp_body = self._parse_resp(resp_body)
except ValueError:
# If response body is a non-json string message.
# Use resp_body as is and raise InvalidResponseBody
# exception.
raise exceptions.InvalidHTTPResponseBody(message)
else:
# I'm seeing both computeFault
# and cloudServersFault come back.
# Will file a bug to fix, but leave as is for now.
if 'cloudServersFault' in resp_body:
message = resp_body['cloudServersFault']['message']
elif 'computeFault' in resp_body:
message = resp_body['computeFault']['message']
elif 'error' in resp_body: # Keystone errors
message = resp_body['error']['message']
raise exceptions.IdentityError(message)
elif 'message' in resp_body:
message = resp_body['message']
raise exceptions.ServerFault(message)
if resp.status >= 400:
if parse_resp:
resp_body = self._parse_resp(resp_body)
raise exceptions.RestClientException(str(resp.status))
def is_absolute_limit(self, resp, resp_body):
if (not isinstance(resp_body, collections.Mapping) or
'retry-after' not in resp):
return True
over_limit = resp_body.get('overLimit', None)
if not over_limit:
return True
return 'exceed' in over_limit.get('message', 'blabla')
def wait_for_resource_deletion(self, id):
"""Waits for a resource to be deleted."""
start_time = int(time.time())
while True:
if self.is_resource_deleted(id):
return
if int(time.time()) - start_time >= self.build_timeout:
raise exceptions.TimeoutException
time.sleep(self.build_interval)
def is_resource_deleted(self, id):
"""
Subclasses override with specific deletion detection.
"""
message = ('"%s" does not implement is_resource_deleted'
% self.__class__.__name__)
raise NotImplementedError(message)
class RestClientXML(RestClient):
TYPE = "xml"
def _parse_resp(self, body):
return xml_to_json(etree.fromstring(body))
def is_absolute_limit(self, resp, resp_body):
if (not isinstance(resp_body, collections.Mapping) or
'retry-after' not in resp):
return True
return 'exceed' in resp_body.get('message', 'blabla')
|
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from oslo_log import log as logging
from tempest_lib.services.identity.v2.token_client import TokenClient
from tempest_lib.services.identity.v3.token_client import V3TokenClient
from tempest.common import cred_provider
from tempest.common import negative_rest_client
from tempest import config
from tempest import exceptions
from tempest import manager
from tempest.services.baremetal.v1.json.baremetal_client import \
BaremetalClient
from tempest.services import botoclients
from tempest.services.compute.json.agents_client import \
AgentsClient
from tempest.services.compute.json.aggregates_client import \
AggregatesClient
from tempest.services.compute.json.availability_zone_client import \
AvailabilityZoneClient
from tempest.services.compute.json.baremetal_nodes_client import \
BaremetalNodesClient
from tempest.services.compute.json.certificates_client import \
CertificatesClient
from tempest.services.compute.json.extensions_client import \
ExtensionsClient
from tempest.services.compute.json.fixed_ips_client import FixedIPsClient
from tempest.services.compute.json.flavors_client import FlavorsClient
from tempest.services.compute.json.floating_ip_pools_client import \
FloatingIPPoolsClient
from tempest.services.compute.json.floating_ips_bulk_client import \
FloatingIPsBulkClient
from tempest.services.compute.json.floating_ips_client import \
FloatingIPsClient
from tempest.services.compute.json.hosts_client import HostsClient
from tempest.services.compute.json.hypervisor_client import \
HypervisorClient
from tempest.services.compute.json.images_client import ImagesClient
from tempest.services.compute.json.instance_usage_audit_log_client import \
InstanceUsagesAuditLogClient
from tempest.services.compute.json.interfaces_client import \
InterfacesClient
from tempest.services.compute.json.keypairs_client import KeyPairsClient
from tempest.services.compute.json.limits_client import LimitsClient
from tempest.services.compute.json.migrations_client import \
MigrationsClient
from tempest.services.compute.json.networks_client import NetworksClient
from tempest.services.compute.json.quota_classes_client import \
QuotaClassesClient
from tempest.services.compute.json.quotas_client import QuotasClient
from tempest.services.compute.json.security_group_default_rules_client import \
SecurityGroupDefaultRulesClient
from tempest.services.compute.json.security_group_rules_client import \
SecurityGroupRulesClient
from tempest.services.compute.json.security_groups_client import \
SecurityGroupsClient
from tempest.services.compute.json.server_groups_client import \
ServerGroupsClient
from tempest.services.compute.json.servers_client import ServersClient
from tempest.services.compute.json.services_client import ServicesClient
from tempest.services.compute.json.snapshots_extensions_client import \
SnapshotsExtensionsClient
from tempest.services.compute.json.tenant_networks_client import \
TenantNetworksClient
from tempest.services.compute.json.tenant_usages_client import \
TenantUsagesClient
from tempest.services.compute.json.versions_client import VersionsClient
from tempest.services.compute.json.volumes_extensions_client import \
VolumesExtensionsClient
from tempest.services.data_processing.v1_1.data_processing_client import \
DataProcessingClient
from tempest.services.database.json.flavors_client import \
DatabaseFlavorsClient
from tempest.services.database.json.limits_client import \
DatabaseLimitsClient
from tempest.services.database.json.versions_client import \
DatabaseVersionsClient
from tempest.services.identity.v2.json.identity_client import \
IdentityClient
from tempest.services.identity.v3.json.credentials_client import \
CredentialsClient
from tempest.services.identity.v3.json.endpoints_client import \
EndPointClient
from tempest.services.identity.v3.json.identity_client import \
IdentityV3Client
from tempest.services.identity.v3.json.policy_client import PolicyClient
from tempest.services.identity.v3.json.region_client import RegionClient
from tempest.services.identity.v3.json.service_client import \
ServiceClient
from tempest.services.image.v1.json.image_client import ImageClient
from tempest.services.image.v2.json.image_client import ImageClientV2
from tempest.services.messaging.json.messaging_client import \
MessagingClient
from tempest.services.network.json.network_client import NetworkClient
from tempest.services.object_storage.account_client import AccountClient
from tempest.services.object_storage.container_client import ContainerClient
from tempest.services.object_storage.object_client import ObjectClient
from tempest.services.orchestration.json.orchestration_client import \
OrchestrationClient
from tempest.services.telemetry.json.telemetry_client import \
TelemetryClient
from tempest.services.volume.json.admin.volume_hosts_client import \
VolumeHostsClient
from tempest.services.volume.json.admin.volume_quotas_client import \
VolumeQuotasClient
from tempest.services.volume.json.admin.volume_services_client import \
VolumesServicesClient
from tempest.services.volume.json.admin.volume_types_client import \
VolumeTypesClient
from tempest.services.volume.json.availability_zone_client import \
VolumeAvailabilityZoneClient
from tempest.services.volume.json.backups_client import BackupsClient
from tempest.services.volume.json.extensions_client import \
ExtensionsClient as VolumeExtensionClient
from tempest.services.volume.json.qos_client import QosSpecsClient
from tempest.services.volume.json.snapshots_client import SnapshotsClient
from tempest.services.volume.json.volumes_client import VolumesClient
from tempest.services.volume.v2.json.admin.volume_hosts_client import \
VolumeHostsV2Client
from tempest.services.volume.v2.json.admin.volume_quotas_client import \
VolumeQuotasV2Client
from tempest.services.volume.v2.json.admin.volume_services_client import \
VolumesServicesV2Client
from tempest.services.volume.v2.json.admin.volume_types_client import \
VolumeTypesV2Client
from tempest.services.volume.v2.json.availability_zone_client import \
VolumeV2AvailabilityZoneClient
from tempest.services.volume.v2.json.backups_client import BackupsClientV2
from tempest.services.volume.v2.json.extensions_client import \
ExtensionsV2Client as VolumeV2ExtensionClient
from tempest.services.volume.v2.json.qos_client import QosSpecsV2Client
from tempest.services.volume.v2.json.snapshots_client import \
SnapshotsV2Client
from tempest.services.volume.v2.json.volumes_client import VolumesV2Client
CONF = config.CONF
LOG = logging.getLogger(__name__)
class Manager(manager.Manager):
"""
Top level manager for OpenStack tempest clients
"""
default_params = {
'disable_ssl_certificate_validation':
CONF.identity.disable_ssl_certificate_validation,
'ca_certs': CONF.identity.ca_certificates_file,
'trace_requests': CONF.debug.trace_requests
}
# NOTE: Tempest uses timeout values of compute API if project specific
# timeout values don't exist.
default_params_with_timeout_values = {
'build_interval': CONF.compute.build_interval,
'build_timeout': CONF.compute.build_timeout
}
default_params_with_timeout_values.update(default_params)
def __init__(self, credentials=None, service=None):
super(Manager, self).__init__(credentials=credentials)
self._set_compute_clients()
self._set_database_clients()
self._set_identity_clients()
self._set_volume_clients()
self._set_object_storage_clients()
self.baremetal_client = BaremetalClient(
self.auth_provider,
CONF.baremetal.catalog_type,
CONF.identity.region,
endpoint_type=CONF.baremetal.endpoint_type,
**self.default_params_with_timeout_values)
self.network_client = NetworkClient(
self.auth_provider,
CONF.network.catalog_type,
CONF.network.region or CONF.identity.region,
endpoint_type=CONF.network.endpoint_type,
build_interval=CONF.network.build_interval,
build_timeout=CONF.network.build_timeout,
**self.default_params)
self.messaging_client = MessagingClient(
self.auth_provider,
CONF.messaging.catalog_type,
CONF.identity.region,
**self.default_params_with_timeout_values)
if CONF.service_available.ceilometer:
self.telemetry_client = TelemetryClient(
self.auth_provider,
CONF.telemetry.catalog_type,
CONF.identity.region,
endpoint_type=CONF.telemetry.endpoint_type,
**self.default_params_with_timeout_values)
if CONF.service_available.glance:
self.image_client = ImageClient(
self.auth_provider,
CONF.image.catalog_type,
CONF.image.region or CONF.identity.region,
endpoint_type=CONF.image.endpoint_type,
build_interval=CONF.image.build_interval,
build_timeout=CONF.image.build_timeout,
**self.default_params)
self.image_client_v2 = ImageClientV2(
self.auth_provider,
CONF.image.catalog_type,
CONF.image.region or CONF.identity.region,
endpoint_type=CONF.image.endpoint_type,
build_interval=CONF.image.build_interval,
build_timeout=CONF.image.build_timeout,
**self.default_params)
self.orchestration_client = OrchestrationClient(
self.auth_provider,
CONF.orchestration.catalog_type,
CONF.orchestration.region or CONF.identity.region,
endpoint_type=CONF.orchestration.endpoint_type,
build_interval=CONF.orchestration.build_interval,
build_timeout=CONF.orchestration.build_timeout,
**self.default_params)
self.data_processing_client = DataProcessingClient(
self.auth_provider,
CONF.data_processing.catalog_type,
CONF.identity.region,
endpoint_type=CONF.data_processing.endpoint_type,
**self.default_params_with_timeout_values)
self.negative_client = negative_rest_client.NegativeRestClient(
self.auth_provider, service, **self.default_params)
# Generating EC2 credentials in tempest is only supported
# with identity v2
if CONF.identity_feature_enabled.api_v2 and \
CONF.identity.auth_version == 'v2':
# EC2 and S3 clients, if used, will check configured AWS
# credentials and generate new ones if needed
self.ec2api_client = botoclients.APIClientEC2(self.identity_client)
self.s3_client = botoclients.ObjectClientS3(self.identity_client)
def _set_compute_clients(self):
params = {
'service': CONF.compute.catalog_type,
'region': CONF.compute.region or CONF.identity.region,
'endpoint_type': CONF.compute.endpoint_type,
'build_interval': CONF.compute.build_interval,
'build_timeout': CONF.compute.build_timeout
}
params.update(self.default_params)
self.agents_client = AgentsClient(self.auth_provider, **params)
self.networks_client = NetworksClient(self.auth_provider, **params)
self.migrations_client = MigrationsClient(self.auth_provider,
**params)
self.security_group_default_rules_client = (
SecurityGroupDefaultRulesClient(self.auth_provider, **params))
self.certificates_client = CertificatesClient(self.auth_provider,
**params)
self.servers_client = ServersClient(
self.auth_provider,
enable_instance_password=CONF.compute_feature_enabled
.enable_instance_password,
**params)
self.server_groups_client = ServerGroupsClient(
self.auth_provider, **params)
self.limits_client = LimitsClient(self.auth_provider, **params)
self.images_client = ImagesClient(self.auth_provider, **params)
self.keypairs_client = KeyPairsClient(self.auth_provider, **params)
self.quotas_client = QuotasClient(self.auth_provider, **params)
self.quota_classes_client = QuotaClassesClient(self.auth_provider,
**params)
self.flavors_client = FlavorsClient(self.auth_provider, **params)
self.extensions_client = ExtensionsClient(self.auth_provider,
**params)
self.floating_ip_pools_client = FloatingIPPoolsClient(
self.auth_provider, **params)
self.floating_ips_bulk_client = FloatingIPsBulkClient(
self.auth_provider, **params)
self.floating_ips_client = FloatingIPsClient(self.auth_provider,
**params)
self.security_group_rules_client = SecurityGroupRulesClient(
self.auth_provider, **params)
self.security_groups_client = SecurityGroupsClient(
self.auth_provider, **params)
self.interfaces_client = InterfacesClient(self.auth_provider,
**params)
self.fixed_ips_client = FixedIPsClient(self.auth_provider,
**params)
self.availability_zone_client = AvailabilityZoneClient(
self.auth_provider, **params)
self.aggregates_client = AggregatesClient(self.auth_provider,
**params)
self.services_client = ServicesClient(self.auth_provider, **params)
self.tenant_usages_client = TenantUsagesClient(self.auth_provider,
**params)
self.hosts_client = HostsClient(self.auth_provider, **params)
self.hypervisor_client = HypervisorClient(self.auth_provider,
**params)
self.instance_usages_audit_log_client = \
InstanceUsagesAuditLogClient(self.auth_provider, **params)
self.tenant_networks_client = \
TenantNetworksClient(self.auth_provider, **params)
self.baremetal_nodes_client = BaremetalNodesClient(
self.auth_provider, **params)
# NOTE: The following client needs special timeout values because
# the API is a proxy for the other component.
params_volume = copy.deepcopy(params)
params_volume.update({
'build_interval': CONF.volume.build_interval,
'build_timeout': CONF.volume.build_timeout
})
self.volumes_extensions_client = VolumesExtensionsClient(
self.auth_provider, **params_volume)
self.compute_versions_client = VersionsClient(self.auth_provider,
**params_volume)
self.snapshots_extensions_client = SnapshotsExtensionsClient(
self.auth_provider, **params_volume)
def _set_database_clients(self):
self.database_flavors_client = DatabaseFlavorsClient(
self.auth_provider,
CONF.database.catalog_type,
CONF.identity.region,
**self.default_params_with_timeout_values)
self.database_limits_client = DatabaseLimitsClient(
self.auth_provider,
CONF.database.catalog_type,
CONF.identity.region,
**self.default_params_with_timeout_values)
self.database_versions_client = DatabaseVersionsClient(
self.auth_provider,
CONF.database.catalog_type,
CONF.identity.region,
**self.default_params_with_timeout_values)
def _set_identity_clients(self):
params = {
'service': CONF.identity.catalog_type,
'region': CONF.identity.region
}
params.update(self.default_params_with_timeout_values)
params_v2_admin = params.copy()
params_v2_admin['endpoint_type'] = CONF.identity.v2_admin_endpoint_type
# Client uses admin endpoint type of Keystone API v2
self.identity_client = IdentityClient(self.auth_provider,
**params_v2_admin)
params_v2_public = params.copy()
params_v2_public['endpoint_type'] = (
CONF.identity.v2_public_endpoint_type)
# Client uses public endpoint type of Keystone API v2
self.identity_public_client = IdentityClient(self.auth_provider,
**params_v2_public)
params_v3 = params.copy()
params_v3['endpoint_type'] = CONF.identity.v3_endpoint_type
# Client uses the endpoint type of Keystone API v3
self.identity_v3_client = IdentityV3Client(self.auth_provider,
**params_v3)
self.endpoints_client = EndPointClient(self.auth_provider,
**params)
self.service_client = ServiceClient(self.auth_provider, **params)
self.policy_client = PolicyClient(self.auth_provider, **params)
self.region_client = RegionClient(self.auth_provider, **params)
self.credentials_client = CredentialsClient(self.auth_provider,
**params)
# Token clients do not use the catalog. They only need default_params.
# They read auth_url, so they should only be set if the corresponding
# API version is marked as enabled
if CONF.identity_feature_enabled.api_v2:
if CONF.identity.uri:
self.token_client = TokenClient(
CONF.identity.uri, **self.default_params)
else:
msg = 'Identity v2 API enabled, but no identity.uri set'
raise exceptions.InvalidConfiguration(msg)
if CONF.identity_feature_enabled.api_v3:
if CONF.identity.uri_v3:
self.token_v3_client = V3TokenClient(
CONF.identity.uri_v3, **self.default_params)
else:
msg = 'Identity v3 API enabled, but no identity.uri_v3 set'
raise exceptions.InvalidConfiguration(msg)
def _set_volume_clients(self):
params = {
'service': CONF.volume.catalog_type,
'region': CONF.volume.region or CONF.identity.region,
'endpoint_type': CONF.volume.endpoint_type,
'build_interval': CONF.volume.build_interval,
'build_timeout': CONF.volume.build_timeout
}
params.update(self.default_params)
self.volume_qos_client = QosSpecsClient(self.auth_provider,
**params)
self.volume_qos_v2_client = QosSpecsV2Client(
self.auth_provider, **params)
self.volume_services_v2_client = VolumesServicesV2Client(
self.auth_provider, **params)
self.backups_client = BackupsClient(self.auth_provider, **params)
self.backups_v2_client = BackupsClientV2(self.auth_provider,
**params)
self.snapshots_client = SnapshotsClient(self.auth_provider,
**params)
self.snapshots_v2_client = SnapshotsV2Client(self.auth_provider,
**params)
self.volumes_client = VolumesClient(
self.auth_provider, default_volume_size=CONF.volume.volume_size,
**params)
self.volumes_v2_client = VolumesV2Client(
self.auth_provider, default_volume_size=CONF.volume.volume_size,
**params)
self.volume_types_client = VolumeTypesClient(self.auth_provider,
**params)
self.volume_services_client = VolumesServicesClient(
self.auth_provider, **params)
self.volume_hosts_client = VolumeHostsClient(self.auth_provider,
**params)
self.volume_hosts_v2_client = VolumeHostsV2Client(
self.auth_provider, **params)
self.volume_quotas_client = VolumeQuotasClient(self.auth_provider,
**params)
self.volume_quotas_v2_client = VolumeQuotasV2Client(self.auth_provider,
**params)
self.volumes_extension_client = VolumeExtensionClient(
self.auth_provider, **params)
self.volumes_v2_extension_client = VolumeV2ExtensionClient(
self.auth_provider, **params)
self.volume_availability_zone_client = \
VolumeAvailabilityZoneClient(self.auth_provider, **params)
self.volume_v2_availability_zone_client = \
VolumeV2AvailabilityZoneClient(self.auth_provider, **params)
self.volume_types_v2_client = VolumeTypesV2Client(
self.auth_provider, **params)
def _set_object_storage_clients(self):
params = {
'service': CONF.object_storage.catalog_type,
'region': CONF.object_storage.region or CONF.identity.region,
'endpoint_type': CONF.object_storage.endpoint_type
}
params.update(self.default_params_with_timeout_values)
self.account_client = AccountClient(self.auth_provider, **params)
self.container_client = ContainerClient(self.auth_provider, **params)
self.object_client = ObjectClient(self.auth_provider, **params)
class AdminManager(Manager):
"""
Manager object that uses the admin credentials for its
managed client objects
"""
def __init__(self, service=None):
super(AdminManager, self).__init__(
credentials=cred_provider.get_configured_credentials(
'identity_admin'),
service=service)
|
|
import sys
import re
from operator import itemgetter
from random import choice
from random import randint
from HTMLParser import HTMLParser # html.parser in Python 3
from UserString import MutableString
# constants
VERSION = 1
# punch line defaults
punch_default_speed = 85
punch_default_shape = 100
punch_default_volume = 130
class Comedy:
'''A Comedy Class'''
def __init__(self, comedy):
# check the version
_version = comedy['version']
self._check_version(_version)
# parse main voice
_main_voice = comedy['main_voice']
self.tts_main_voice = _main_voice
# parse comedy_jokes
_jokes = comedy['jokes']
self.comedy_jokes = self._parse_jokes(_jokes)
# build comedy_joke_sequence
_comedy_joke_sequence = comedy['comedy_joke_sequence']
sequence = self._build_joke_sequence(_comedy_joke_sequence)
self.comedy_joke_sequence = sequence
# build the responses
_responses = comedy['responses']
self.responses = self.build_responses(_responses)
def __iter__(self):
'''Iteration support'''
return self
def _parse_jokes(self, jokes):
_jokes = {}
for item in jokes:
# find key and item
_joke = Joke(item['lines'])
_id = item['id']
# add them to the dictionary
_jokes[_id] = _joke
return _jokes
def _check_version(self, version):
if (VERSION != version):
sys.exit('Input file version mismatch')
def _get_unused_jokes(self, sequence):
_unused_jokes = []
for key in self.comedy_jokes.keys():
if (key not in sequence):
_unused_jokes.append(self.comedy_jokes[key])
return _unused_jokes
def _build_joke_sequence(self, comedy_joke_sequence):
_joke_sequence = []
# check if there are available parts for this sequence
_comedy_parts_length = len(self.comedy_jokes)
_comedy_joke_sequence_length = len(comedy_joke_sequence)
if (_comedy_parts_length < _comedy_joke_sequence_length):
sys.exit('Not enough comedies in the input file. (' +
str(_comedy_joke_sequence_length) +
' required)')
# get parts that is not directly referenced in the sequence
_unused_jokes = self._get_unused_jokes(comedy_joke_sequence)
# build the joke sequence
for item in comedy_joke_sequence:
if (item != '*'):
# check if it exists
if item in self.comedy_jokes:
comedy = self.comedy_jokes[item]
else:
print "WARNING: Joke '" + str(item) + "' doesn't exist"
else:
# that's a random joke
comedy = choice(_unused_jokes)
_unused_jokes.remove(comedy)
_joke_sequence.append(comedy)
return _joke_sequence
def build_responses(self, responses):
_responces = Responces()
# positive
positive = responses['positive']
for response in positive:
# get items
trigger = response['trigger']
item = Line(response['response'])
_responces.addResponse('positive', trigger, item)
# negative
negative = responses['negative']
for response in negative:
# get items
trigger = response['trigger']
item = Line(response['response'])
_responces.addResponse('negative', trigger, item)
# return the object
return _responces
def say(self):
# play the intro
self.comedy_intro.say()
# play the sequence
for item in self.comedy_joke_sequence:
item.say()
# play the outro
self.comedy_outro.say()
def getText(self):
# use MutableString for efficiency
comedy_text = MutableString()
# intro
comedy_text += self.comedy_intro.getText()
# sequence
for item in self.comedy_joke_sequence:
comedy_text += item.getText()
# outro
comedy_text += self.comedy_outro.getText()
return str(comedy_text)
def length(self):
length = self.comedy_intro.length()
for item in self.comedy_joke_sequence:
length = length + item.length()
length = length + self.comedy_outro.length()
return length
def nextJoke(self):
'''Get the next joke with the use of generators'''
for joke in self.comedy_joke_sequence:
yield joke
class Joke:
'''A Joke Class'''
def __init__(self, lines):
# parse the lines of that joke
self.lines = self._parse_lines(lines)
def _parse_lines(self, lines):
_lines = []
# iterate
for line in lines:
_line = Line(line)
# add it to the list
_lines.append(_line)
return _lines
def say(self):
for line in self.lines:
line.say()
def getText(self):
# use MutableString for efficiency
comedy_text = MutableString()
for line in self.lines:
comedy_text += line.getText() + ' '
return str(comedy_text)
def nextLine(self):
'''Get the next line with the use of generators'''
for line in self.lines:
yield line
class Line:
'''A Line Class'''
# use the NLTK library and load the english language tokenizer
import nltk.data
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
def __init__(self, line):
# break it into sentences
self.sentences = self._parseLine(line)
def nextSentence(self):
'''Get the next sentence with the use of generators'''
for sentence in self.sentences:
yield sentence
def _parseLine(self, line):
''' Get a list of sentences from the line'''
# init a list
sentences_list = []
# get a list of the sentences as strings
sentences = self.tokenizer.tokenize(line)
# iterate
for sentence in sentences:
# create the Sentence object
sentence_object = Sentence(sentence)
# add it to the list
sentences_list.append(sentence_object)
return sentences_list
class Sentence:
''' A Sentence Class '''
def __init__(self, sentence):
self.tokens = self._parseSentence(sentence)
def _parseSentence(self, sentence):
'''break html style tags
e.g. <voice>foo</voice>
<break />
<voice attribute='true'> </voice> '''
# Debug:
#print "Sentence: " + sentence
# init the TagTokeniser (an HTMLParser)
tagTokeniser = TagTokeniser()
tagTokeniser.feed(sentence)
# get the tokens
_tokens = tagTokeniser.tokens()
return _tokens
def say(self):
for token in self.tokens:
token.say()
def getText(self):
# use MutableString for efficiency
comedy_text = MutableString()
for token in self.tokens:
comedy_text += token.getText()
return str(comedy_text)
def length(self):
length = 0
for token in self.tokens:
length = length + token.length()
return length
def nextToken(self):
'''Get the next token with the use of generators'''
# iterate
for token in self.getCombinedTokens():
yield token
def getCombinedTokens(self):
# init list
combinedTokens = []
text = CombinedText()
# iterate
for token in self.tokens:
if isinstance(token, Text) or isinstance(token, Pause):
# add the text of it
text.add(token.getText())
else:
# if the text is not empty
if len(text.getText()) > 0:
# add it to the list
combinedTokens.append(text)
# reset it
text = CombinedText()
# add the other token to the list
combinedTokens.append(token)
# don't forget to add the remaining text to the list
if len(text.getText()) > 0:
# add it to the list
combinedTokens.append(text)
return combinedTokens
class CombinedText:
'''A Combined Text Class'''
def __init__(self):
# save the linetype
self.tokentype = "CombinedText"
# use MutableString for efficiency
self._text = MutableString()
def add(self, text):
self._text += text
def getText(self):
return str(self._text)
class Text:
'''A Text Class'''
def __init__(self,
text,
punch=False,
speed=None,
shape=None,
volume=None,
voice=None):
self.text = text
self.punch = punch
self.speed = speed
self.shape = shape
self.volume = volume
self.voice = voice
self.tokentype = "Text"
def say(self):
print "> " + self.text
def getText(self):
# use MutableString for efficiency
comedy_text = MutableString()
# check if the reset tag is needed
tagAdded = False
# get the values
speed = self.speed
shape = self.shape
volume = self.volume
voice = self.voice
# if it is a punch line
# set the punch line defauls but not override
if self.punch:
if speed is None:
speed = punch_default_speed
if shape is None:
shape = punch_default_shape
if volume is None:
volume = punch_default_volume
# check for voice tags (if not None or default)
if speed and speed != 100:
comedy_text += '\\rspd=' + str(speed) + '\\'
tagAdded = True
if shape and shape != 100:
comedy_text += '\\vct=' + str(shape) + '\\'
tagAdded = True
if volume and volume != 100:
comedy_text += '\\vol=' + str(volume) + '\\'
tagAdded = True
if voice:
comedy_text += '\\vce=speaker=' + voice + '\\'
tagAdded = True
# add the text
comedy_text += self.text
# add the reset at the end
if tagAdded:
comedy_text += '\\rst\\'
return str(comedy_text)
def length(self):
return len(re.findall(r'\w+', self.text))
class Audio:
'''An Play Object'''
def __init__(self, filename, length):
# save the filename and length
self.filename = filename
self.length = length
self.tokentype = "Audio"
def say(self):
return self.getText()
def getText(self):
# Just for debug reasons.
# This should never reach the communication module
return "<audio file='%s' length='%s'/>" % (self.filename, self.length)
class Command:
'''A Command Object'''
def __init__(self, commandType, value, refsave=None):
# save the command type and value
self.commandType = commandType
self.value = value
self.refsave = refsave
self.tokentype = "Command"
def say(self):
return self.getText()
def getText(self):
# Just for debug reasons.
# This should never reach the communication module
return "<command %s='%s' />" % (self.commandType, self.value)
class Pause:
'''A Pause Object'''
def __init__(self, time=1000):
# save the time
self.time = time
self.tokentype = "Pause"
def say(self):
print '<pause:' + str(self.time) + '>'
def getText(self):
return '\\pau=' + str(self.time) + '\\'
class Response:
'''A Response Object'''
def __init__(self, trigger):
#save the type
self.trigger = trigger
self.tokentype = "Response"
class TagTokeniser(HTMLParser):
'''A SentenceTokeniser that parses HTML tags'''
def __init__(self):
# init HTMLParser
HTMLParser.__init__(self)
# init tokens list
self._tokens = []
# init params stack (LIFO)
self._params = []
''' HTMLParser handlers '''
def handle_starttag(self, tag, attrs):
# handle tags
if (tag == 'say'):
# will be handled in handle_data method
pass
elif (tag == 'audio'):
self.handle_audio(attrs)
elif (tag == 'command'):
self.handle_command(attrs)
elif (tag == 'pause'):
self.handle_pause(attrs)
elif (tag == 'response'):
self.handle_response(attrs)
else:
print "WARNING: Unknown tag '" + tag + "'"
# push to the params stack
self._params.append(attrs)
def handle_data(self, data):
if (self._params): # if not empty
# handle say with last object as attrs
attrs = self._params[-1]
else:
# when we have text without say tag
attrs = []
self.handle_say(data, attrs)
def handle_endtag(self, tag):
# pop from the params stack
if self._params:
self._params.pop()
else:
print "WARNING: Wrong handling of tag: " + tag
''' Custom handlers '''
def handle_say(self, text, params):
# init the say params to None
speed = None
shape = None
volume = None
voice = None
punch = False
# check if params are available
for item in params:
if (item[0] == 'speed'):
speed = int(item[1])
elif (item[0] == 'shape'):
shape = int(item[1])
elif (item[0] == 'volume'):
volume = int(item[1])
elif (item[0] == 'voice'):
voice = str(item[1])
elif (item[0] == 'punch'):
punch = bool(item[1])
else:
print "WARNING: Unsupported parameter in <say />:" + item[0]
textObject = Text(text, punch, speed, shape, volume, voice)
self._tokens.append(textObject)
def handle_audio(self, params):
# init the params to None
filename = None
length = None
# check if params are available
for item in params:
# play
if (item[0] == 'filename'):
filename = item[1]
elif (item[0] == 'length'):
length = float(item[1])
else:
param = item[0]
print "WARNING: Unsupported param in <command />: %s" % (param)
if filename is None:
print "WARNINNG: filename parameter should exist in <audio />"
if length is None:
print "WARNINNG: length parameter should exist in <audio />"
audioObject = Audio(filename, length)
self._tokens.append(audioObject)
def handle_command(self, params):
# init the params to None
commandType = None
value = None
refsave = None
# check if params are available
for item in params:
# type: sequence
if (item[0] in ['sequenceplay', 'sequencestop']):
commandType = item[0]
value = item[1]
# type: look
elif (item[0] == 'look'):
commandType = 'look'
value = item[1]
# type: point
elif (item[0] == 'point'):
commandType = 'point'
value = item[1]
# in case there is a refsave
elif (item[0] == 'refsave'):
refsave = item[1]
else:
print "WARNING: Unsupported param in <command />:" + item[0]
commandObject = Command(commandType, value, refsave)
self._tokens.append(commandObject)
def handle_pause(self, params):
# default pause time is 1000
time = 1000
# check if params are available
for item in params:
if (item[0] == 'time'):
time = int(item[1])
else:
print "WARNING: Unsupported parameter in <pause />:" + item[0]
pauseObject = Pause(time)
self._tokens.append(pauseObject)
def handle_response(self, params):
# default is laughter
trigger = "laughter"
# check if params are available
for item in params:
if (item[0] == "trigger"):
if item[1] not in ["laughter", "applauding"]:
print "WARNING: Unsupported trigger:" + item[1]
else:
trigger = item[1]
else:
print "WARNING: Unsupported param in <response />:" + item[0]
responseObject = Response(trigger)
self._tokens.append(responseObject)
''' other methods '''
def tokens(self):
if (self._params): # if not empty
count = len(self._params)
print "WARNING: %d starttag(s) exists without an endtag" % (count)
return self._tokens
def reset(self):
# call superclass reset
HTMLParser.reset(self)
# reset the parser
self._tokens = []
self._params = []
class Responces:
def __init__(self):
# init the responce list
self._positive = []
self._negative = []
def addResponse(self, responsetype, trigger, response):
# create the response
response = {"trigger": trigger,
"response": response}
if responsetype == "positive":
self._positive.append(response)
elif responsetype == "negative":
self._negative.append(response)
else:
print "WARNING: Unknown response type: " + responsetype
def getResponse(self, responsetype, trigger):
# should I get a responce?
p = 100 # Yes, with a probability of p%
if randint(1, 100) <= p:
respond = True
else:
respond = False
# init responce with None
response = None
# if we will respond
if respond:
if responsetype == "positive":
responcelist = self._positive
elif responsetype == "negative":
responcelist = self._negative
else:
responcelist = None
print "WARNING: Unknown response type: " + responsetype
# if list is not empty
if responcelist:
# get a list with all responses with same trigger
triggerList = filter(lambda x: x["trigger"] == trigger,
responcelist)
if triggerList:
response = choice(triggerList)
# remove it so we don't have duplicate responses
responcelist.remove(response)
else:
print "WARNING: List '" + trigger + "' is empty."
else:
print "WARNING: List '" + responsetype + "' is empty."
# return the response (even if it is None)
return response
''' main '''
if __name__ == '__main__':
# check if the the filename exists as a parameter
if (len(sys.argv) < 2):
sys.exit('Missing input file')
# read the filename from the 1st argument
filename = sys.argv[1]
# open the file
source = open(filename, 'r')
# print loading message
print "Loading Comedy Parser..."
# import json library
import json
# parse the json file
comedy = json.load(source)
# close the file
source.close()
# print some empty lines (separate from previous commands)
for i in range(0, 50):
print " "
# clear screen
import os
os.system('clear')
# parse comedy
comedy = Comedy(comedy)
# iterate (just for testing)
for joke in comedy.nextJoke():
for line in joke.nextLine():
for sentence in line.nextSentence():
for token in sentence.nextToken():
print token.getText()
|
|
"""
Functions for reading and writing shapefiles.
These are simple wrappers around OGR and Shapely
libraries.
"""
from __future__ import print_function
import six
try:
from osgeo import ogr,osr
except ImportError:
import ogr,osr
import glob,os,re
from shapely import wkb,wkt
from shapely.geometry import Polygon,LineString,Point,MultiPolygon,MultiLineString,MultiPoint
import shapely.geos
from shapely import ops # for transform()
from .geom_types import ogr2text,text2ogr
from . import proj_utils
import uuid
import numpy as np
def wkb2shp(shp_name,
input_wkbs,
srs_text='EPSG:26910',
field_gen = lambda f: {},
fields = None,
overwrite=False,
geom_type=None,
driver=None,
layer_name=None):
"""
Save data to a shapefile.
shp_name: filename.shp for writing the result
or 'MEMORY' to return an in-memory ogr layer.
input_wkbs: list of shapely geometry objects for each feature. They must all
be the same geometry type (no mixing lines and polygons, etc.)
There are three ways of specifying fields:
field_gen: a function which will be called once for each feature, with
the geometry as its argument, and returns a dict of fields.
fields: a numpy structure array with named fields, or
fields: a dict of field names, with each dictionary entry holding a sequence
of field values.
srs_text: sets the projection information when writing the shapefile. Expects
a string, for example 'EPSG:3095' or 'WGS84'.
driver: Directly specify an alternative driver, such as GPKG. if None, assumed
shapefile, unless shp_name is 'memory' in which case create an in-Memory
layer.
for GPKG, the optional layer_name argument can be used to name the layer
which would default to the basename of the shp_name otherwise
"""
if layer_name is None:
layer_name=shp_name
if driver=='GPKG':
drv = ogr.GetDriverByName('GPKG')
new_ds = drv.CreateDataSource(shp_name)
if shp_name.lower()=='memory':
drv = ogr.GetDriverByName('Memory')
new_ds = drv.CreateDataSource("mem_" + uuid.uuid1().hex)
else:
if os.path.exists(shp_name):
if shp_name[-4:] == '.shp':
if overwrite:
# remove any matching files:
print("Removing the old to make way for the new")
os.unlink(shp_name)
else:
raise Exception("Shapefile exists, but overwrite is False")
# open the output shapefile:
drv = ogr.GetDriverByName('ESRI Shapefile')
new_ds = drv.CreateDataSource(shp_name)
if isinstance(srs_text,osr.SpatialReference):
srs = srs_text
else:
srs = osr.SpatialReference()
srs.SetFromUserInput(srs_text)
## Depending on the inputs, populate
# geoms - a list or array of shapely geometries
# field_names - ordered list of field names
# field_values - list of lists of field values
# Case 1: all the data is packed into a numpy struct array
geoms = input_wkbs
if fields is not None and type(fields) == list: # sub case - fields is a list of dicts
field_iter = iter(fields)
field_gen = lambda x: six.next(field_iter)
fields = None
if fields is not None and isinstance(fields,dict):
field_names=list(fields.keys())
N=len(fields[field_names[0]])
field_values=[None]*N
for n in range(N):
row=[fields[fname][n] for fname in field_names]
field_values[n]=row
elif fields is not None and isinstance(fields,np.ndarray):
dt = fields.dtype
# Special case to round-trip shp2geom->wkb2shp
# Not working yet
if input_wkbs is None:
input_wkbs=fields['geom']
# Note that each field may itself have some shape - so we need to enumerate those
# dimensions, too.
field_names = []
for name in dt.names:
if name=='geom': continue
# ndindex iterates over tuples which index successive elements of the field
for index in np.ndindex( dt[name].shape ):
name_idx = name + "_".join([str(i) for i in index])
field_names.append(name_idx)
field_values = []
for i in range(len(fields)):
fields_onerow = []
for name in dt.names:
if name=='geom': continue
for index in np.ndindex( dt[name].shape ):
if index != ():
fields_onerow.append( fields[i][name][index] )
else:
fields_onerow.append( fields[i][name] )
field_values.append( fields_onerow )
else:
# Case 2: geometries and a field generator are specified
field_dicts = []
for g in geoms:
field_dicts.append( field_gen(g) )
# py3k: .keys() is a dict_keys object, not 100% compatible with a list.
field_names = list(field_dicts[0].keys())
field_values = []
for i in range(len(input_wkbs)):
field_values.append( [field_dicts[i][k] for k in field_names] )
for n in field_names:
if len(n)>10:
raise Exception("Cannot have field names longer than 10 characters")
if geom_type is None:
# find it by querying the features - minor bug - this only
# works when shapely geometries were passed in.
types = np.array( [text2ogr[g.type] for g in geoms] )
geom_type = int(types.max())
# print "Chose geometry type to be %s"%ogr2text[geom_type]
new_layer = new_ds.CreateLayer(layer_name,
srs=srs,
geom_type=geom_type)
# setup the feature definition:
# create fields based on the field key/value pairs
# return for the first wkb file
casters = []
for field_i,key in enumerate(field_names):
val = field_values[0][field_i]
if type(val) == int or isinstance(val,np.integer):
field_def = ogr.FieldDefn(key,ogr.OFTInteger)
casters.append( int )
elif isinstance(val,np.floating):
# above: use np.float, as it seems to be more compatible with
# 32-bit and 64-bit floats.
# This is an old bug - seems to work without this in the modern
# era, and in turn, asscalar does *not* work with list of lists
# # a numpy array of float32 ends up with
# # a type here of <type 'numpy.float32'>,
# # which doesn't match isinstance(...,float)
# # asscalar helps out with that
# 2018-07: np.floating is maybe the proper solution.
print( "float valued key is %s"%key)
field_def = ogr.FieldDefn(key,ogr.OFTReal)
field_def.SetWidth(64)
field_def.SetPrecision(10)
casters.append( float )
else:
field_def = ogr.FieldDefn(key,ogr.OFTString)
casters.append( str )
# print "Field name is %s"%key
new_layer.CreateField( field_def )
for i,geom in enumerate(geoms):
feature_fields = field_values[i]
# print "Processing: ",feature_fields
if type(geom) == str:
fp = open(wkb_file,'rb')
geom_wkbs = [fp.read()]
fp.close()
elif type(geom) in (Polygon,LineString,Point):
geom_wkbs = [geom.wkb]
elif type(geom) in (MultiPolygon,MultiLineString,MultiPoint):
geom_wkbs = [g.wkb for g in geom.geoms]
for geom_wkb in geom_wkbs:
feat_geom = ogr.CreateGeometryFromWkb(geom_wkb)
feat = ogr.Feature( new_layer.GetLayerDefn() )
feat.SetGeometryDirectly(feat_geom)
for i,val in enumerate(feature_fields):
feat.SetField(str(field_names[i]),casters[i](feature_fields[i]))
new_layer.CreateFeature(feat)
feat.Destroy()
if shp_name.lower()!="memory":
new_layer.SyncToDisk()
else:
return new_ds
# kind of the reverse of the above
def shp2geom(shp_fn,use_wkt=False,target_srs=None,
source_srs=None,return_srs=False,
query=None,layer_patt=None,
fold_to_lower=False):
"""
Read a shapefile into memory as a numpy array.
Data is returned as a record array, with geometry as a shapely
geometry object in the 'geom' field.
target_srs: input suitable for osgeo.osr.SetFromUserInput(), or an
existing osr.SpatialReference, to specify
a projection to which the data should be projected. If this is specified
but the shapefile does not specify a projection, and source_srs is not given,
then an exception is raised. source_srs will override the projection in
the shapefile if specified.
fold_to_lower: fold field names to lower case.
return_srs: return a tuple, second item being the text representation of the project, or
None if no projection information was found.
"""
ods = ogr.Open(shp_fn)
if ods is None:
raise ValueError("File '%s' corrupt or not found"%shp_fn)
if layer_patt is not None:
import re
names=[]
for layer_idx in range(ods.GetLayerCount()):
layer=ods.GetLayerByIndex(layer_idx)
names.append(layer.GetName())
if re.match(layer_patt,names[-1]):
break
else:
print("Layers were: ")
for l in names:
print(" "+l)
raise Exception("Pattern %s matched no layers"%layer_patt)
else:
layer = ods.GetLayer(0)
if query is not None:
layer.SetAttributeFilter(query)
if target_srs is not None: # potentially transform on the fly
if source_srs is None:
source_srs=layer.GetSpatialRef()
if source_srs is None:
raise Exception("Reprojection requested, but no source reference available")
mapper=proj_utils.mapper(source_srs,target_srs)
# have to massage it a bit to suit shapely's calling convention
def xform(x,y,z=None): # x,y,z may be scalar or array
# ugly code... annoying code...
if z is None:
xy=np.moveaxis( np.array([x,y]), 0, -1 )
xyp=mapper(xy)
return xyp[...,0], xyp[...,1]
else:
xyz=np.moveaxis( np.array([x,y,z]), 0, -1 )
xyzp=mapper(xyz)
return xyzp[...,0],xyzp[...,1],xyzp[...,2]
def geom_xform(g):
return ops.transform(xform,g)
else:
target_srs=layer.GetSpatialRef()
def geom_xform(g):
return g
feat = layer.GetNextFeature()
defn = feat.GetDefnRef()
fld_count = defn.GetFieldCount()
fields = []
for i in range(fld_count):
fdef =defn.GetFieldDefn(i)
name = fdef.name
if fold_to_lower:
name=name.lower()
ogr_type = fdef.GetTypeName()
if ogr_type == 'String':
np_type = object
getter = lambda f,i=i: f.GetFieldAsString(i)
elif ogr_type =='Integer':
np_type = np.int32
getter = lambda f,i=i: f.GetFieldAsInteger(i)
elif ogr_type == 'Date':
np_type = '<M8[s]' # np.datetime64
# this handles null and real dates, whereas GetFieldAsDateTime
# would take more finagling to deal with nulls.
getter = lambda f,i=i: np.datetime64(f.GetFieldAsString(i).replace('/','-'))
else:
np_type = np.float64
getter = lambda f,i=i: f.GetFieldAsDouble(i)
fields.append( (i,name,np_type,getter) )
# And one for the geometry
def rdr(f):
# The try..except block is from olden days when OGR was not stable
# to weird geometries.
geo_ref=f.GetGeometryRef()
if geo_ref is None:
# this is possible, for example, in QGIS delete all nodes of a
# line, but don't delete the actual feature.
return None
#try:
if use_wkt:
geo=wkt.loads( f.GetGeometryRef().ExportToWkt() )
else:
data=f.GetGeometryRef().ExportToWkb()
# API changed somewhere to return mutable bytearray, but
# wkb only accepts bytes.
if isinstance(data,bytearray):
data=bytes(data)
geo=wkb.loads( data )
geo=geom_xform(geo)
#except:
# geo=None
return geo
fields.append( (None,'geom',object,rdr) )
layer_dtype = [ (name,np_type) for i,name,np_type,getter in fields]
recs = []
layer.ResetReading()
while 1:
feat = layer.GetNextFeature()
if feat is None:
break
try:
field_vals = [getter(feat) for i,name,np_type,getter in fields]
except shapely.geos.WKBReadingError as exc:
# Used to just be shapely.geos.ReadingError
print("Failed to load geometry for feature")
print(exc)
continue
field_array = tuple(field_vals)
recs.append(field_array)
recs = np.array( recs, dtype=layer_dtype)
if return_srs:
return recs, target_srs.ExportToWkt()
else:
return recs
|
|
# -*- coding: utf-8 -*-
"""
Created on Feb 20, 2014
@author: Aaron Ponti
"""
import re
import random
from MicroscopyCompositeDatasetConfig import MicroscopyCompositeDatasetConfig
from ch.systemsx.cisd.openbis.dss.etl.dto.api import ChannelColor
from ch.systemsx.cisd.openbis.dss.etl.dto.api import ImageIdentifier
from ch.systemsx.cisd.openbis.dss.etl.dto.api import ImageMetadata
from ch.systemsx.cisd.openbis.dss.etl.dto.api import OriginalDataStorageFormat
from ch.systemsx.cisd.openbis.dss.etl.dto.api import ChannelColorRGB
from ch.systemsx.cisd.openbis.dss.etl.dto.api import Channel
import xml.etree.ElementTree as ET
class LeicaTIFFSeriesCompositeDatasetConfig(MicroscopyCompositeDatasetConfig):
"""Image data configuration class for Leica TIFF series."""
_DEBUG = False
# List of metadata attributes obtained either from the settings XML
# file generated by the Annotation Tool or returned by
# BioFormatsProcessor.getMetadata(asXML=False)
# (for all series in the file, sorted by series).
_allSeriesMetadata = None
# Number of the series to register (for a multi-series dataset).
_seriesNum = 0
# Series indices (since they might not always start from zero and
# grow monotonically.
_seriesIndices = []
# Logger
_logger = None
# Dataset base name
_basename = ""
# Metadata folder
_metadataFolder = ""
# Maintain a metadata array
_metadata = []
# Regular expression pattern
_pattern = re.compile("^(.*?)" + \
"((_Series|_s)(\d.*?))?" + \
"(_t(\d.*?))?" + \
"_z(\d.*?)" + \
"_ch(\d.*?)" + \
"\.tif{1,2}$", re.IGNORECASE)
def __init__(self, allSeriesMetadata, seriesIndices, logger, seriesNum=0):
"""Constructor.
@param allSeriesMetadata: list of metadata attributes generated either
by the Annotation Tool and parsed from the
settings XML file, or from BioFormatsProcessor
and returned via:
BioFormatsProcessor.getMetadataXML(asXML=False)
@param seriesIndices: list of known series indices (do not
necessarily need to start at 0 and increase
monotonically by one; could be [22, 30, 32]
@param seriesNum: Int Number of the series to register. All
other series in the file will be ignored.
seriesNum MUST BE CONTAINED in seriesIndices.
@param logger: logger object
"""
# Store the logger
self._logger = logger
# Store the series metadata
self._allSeriesMetadata = allSeriesMetadata
# Store the seriesIndices
if type(seriesIndices) == str:
seriesIndices = seriesIndices.split(",")
self._seriesIndices = map(int, seriesIndices)
# Store the series number: make sure that it belongs to seriesIndices
self._seriesNum = int(seriesNum)
try:
self._seriesIndices.index(self._seriesNum)
except:
raise("seriesNum (" + str(self._seriesNum) + ") MUST be contained " + \
"in seriesIndices " + str(self._seriesIndices) + "!")
# This is microscopy data
self.setMicroscopyData(True)
# Store raw data in original form
self.setOriginalDataStorageFormat(OriginalDataStorageFormat.UNCHANGED)
# Set the image library
self.setImageLibrary("BioFormats")
# Set the recognized extensions -- currently just tif(f)
self.setRecognizedImageExtensions(["tif", "tiff"])
# Set the dataset type
self.setDataSetType("MICROSCOPY_IMG")
def createChannel(self, channelCode):
"""Create a channel from the channelCode with the name as read from
the file via the MetadataReader and the color (RGB) as read.
@param channelCode Code of the channel as generated by extractImagesMetadata().
"""
# Get the indices of series and channel from the channel code
(seriesIndx, channelIndx) = self._getSeriesAndChannelNumbers(channelCode)
# Get the channel name
name = self._getChannelName(seriesIndx, channelIndx)
# Get the channel color (RGB)
colorRGB = self._getChannelColor(seriesIndx, channelIndx)
if self._DEBUG:
self._logger.info("LEICATIFFSERIESCOMPOSITEDATASETCONFIG::createChannel(): " +
"channel (s = " + str(seriesIndx) + ", c = " +
str(channelIndx) + ") has code " + channelCode +
", color (" + str(colorRGB) + " and name " + name)
# Return the channel with given name and color (the code is set to
# be the same as the channel name).
return Channel(channelCode, name, colorRGB)
def extractImagesMetadata(self, imagePath, imageIdentifiers):
"""Overrides extractImageMetadata method making sure to store
both series and channel indices in the channel code to be reused
later to extract color information and other metadata.
The channel code is in the form SERIES-(\d+)_CHANNEL-(\d+).
Only metadata for the relevant series number is returned!
@param imagePath Full path to the file to process
@param imageIdentifiers Array of ImageIdentifier's
@see constructor.
"""
# Extract the relevant information from the file name - the image
# identifiers in this case do not carry any useful information.
m = self._pattern.match(imagePath)
if m is None:
err = "MICROSCOPYCOMPOSITEDATASETCONFIG::extractImageMetadata(): " + \
"unexpected file name " + str(imagePath)
self._logger.error(err)
raise Exception(err)
# Get and store the base name
basename = m.group(1)
if self._basename == "" or self._basename != basename:
self._basename = basename
# The series number is not always defined in the file name.
# In the regex, the group(2) optionally matches _s{digits};
# in case group(2) is not None, the actual series number is
# stored in group(4).
if m.group(2) is None:
series = 0
else:
series = int(m.group(4))
# Make sure to process only the relevant series
if series != self._seriesNum:
return []
# The time index is also not always specified.
if m.group(5) is None:
timepoint = 0
else:
timepoint = int(m.group(6))
# Plane number is always specified
plane = int(m.group(7))
# Channel number is always specified
ch = int(m.group(8))
# Build the channel code
channelCode = "SERIES-" + str(series) + "_CHANNEL-" + str(ch)
if self._DEBUG:
msg = "Current file = " + imagePath + " has series = " + \
str(series) + " timepoint = " + str(timepoint) + " plane = " + \
str(plane) + " channel = " + str(ch) + "; channelCode = " + \
str(channelCode)
self._logger.info(msg)
# Initialize Metadata array
Metadata = []
# Initialize a new ImageMetadata object
imageMetadata = ImageMetadata();
# Fill in all information
imageMetadata.imageIdentifier = imageIdentifiers.get(0)
imageMetadata.seriesNumber = series
imageMetadata.timepoint = timepoint
imageMetadata.depth = plane
imageMetadata.channelCode = channelCode
imageMetadata.tileNumber = 1 # + self._seriesNum
imageMetadata.well = "IGNORED"
# Now return the image metadata object in an array
Metadata.append(imageMetadata)
return Metadata
def _getChannelName(self, seriesIndx, channelIndx):
"""Returns the channel name (from the parsed metadata) for
a given channel in a given series."
"""
# TODO: Get the real channel name from the metadata!
# Build name of the channel from series and channel indices
name = "SERIES_" + str(seriesIndx) + "_CHANNEL_" + str(channelIndx)
return name
def _getChannelColor(self, seriesIndx, channelIndx):
"""Returns the channel color (from the parsed metadata) for
a given channel in a given series."
"""
if self._DEBUG:
self._logger.info("Trying to find seriesIndx = " + \
str(seriesIndx) + " in seriesIndices = " + \
str(self._seriesIndices))
# Get the position in the seriesIndices list
indx = self._seriesIndices.index(int(seriesIndx))
# Get the metadata for the requested series
metadata = self._allSeriesMetadata[indx]
# Get the metadata
key = "channelColor" + str(channelIndx)
color = metadata[key]
if color is not None:
color = color.split(",")
R = int(255 * float(color[0]))
G = int(255 * float(color[1]))
B = int(255 * float(color[2]))
else:
if channelIndx == 0:
R = 255
G = 0
B = 0
elif channelIndx == 1:
R = 0
G = 255
B = 0
elif channelIndx == 2:
R = 0
G = 0
B = 255
else:
R = random.randint(0, 255)
G = random.randint(0, 255)
B = random.randint(0, 255)
# Work around an issue if all color components are 0
if R == G == B == 0:
R = 255
G = 255
B = 255
self._logger.info("Color changed from (0, 0, 0) to (255, 255, 255)")
# Create the ChannelColorRGB object
colorRGB = ChannelColorRGB(R, G, B)
# Return it
return colorRGB
def _getSeriesAndChannelNumbers(self, channelCode):
"""Extract series and channel number from channel code in
the form SERIES-(\d+)_CHANNEL-(\d+) to a tuple
(seriesIndx, channelIndx).
@param channelCode Code of the channel as generated by extractImagesMetadata().
"""
# Get the indices of series and channel from the channel code
p = re.compile("SERIES-(\d+)_CHANNEL-(\d+)")
m = p.match(channelCode)
if m is None or len(m.groups()) != 2:
err = "LEICATIFFSERIESCOMPOSITEDATASETCONFIG::_getSeriesAndChannelNumbers(): " + \
"Could not extract series and channel number!"
self._logger.error(err)
raise Exception(err)
# Now assign the indices
seriesIndx = int(m.group(1))
channelIndx = int(m.group(2))
if self._DEBUG:
self._logger.info("Current channel code " + channelCode + \
"corresponds to series = " + str(seriesIndx) + \
" and channel = " + str(channelIndx))
# Return them
return seriesIndx, channelIndx
|
|
#!/usr/bin/env python2
# copyright (c) 2014 the moorecoin core developers
# distributed under the mit software license, see the accompanying
# file copying or http://www.opensource.org/licenses/mit-license.php.
#
# test rest interface
#
from test_framework.test_framework import moorecointestframework
from test_framework.util import *
from struct import *
import binascii
import json
import stringio
try:
import http.client as httplib
except importerror:
import httplib
try:
import urllib.parse as urlparse
except importerror:
import urlparse
def deser_uint256(f):
r = 0
for i in range(8):
t = unpack(b"<i", f.read(4))[0]
r += t << (i * 32)
return r
#allows simple http get calls with a request body
def http_get_call(host, port, path, requestdata = '', response_object = 0):
conn = httplib.httpconnection(host, port)
conn.request('get', path, requestdata)
if response_object:
return conn.getresponse()
return conn.getresponse().read()
class resttest (moorecointestframework):
format_separator = "."
def setup_chain(self):
print("initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 3)
def setup_network(self, split=false):
self.nodes = start_nodes(3, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=false
self.sync_all()
def run_test(self):
url = urlparse.urlparse(self.nodes[0].url)
print "mining blocks..."
self.nodes[0].generate(1)
self.sync_all()
self.nodes[2].generate(100)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 50)
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
self.nodes[2].generate(1)
self.sync_all()
bb_hash = self.nodes[0].getbestblockhash()
assert_equal(self.nodes[1].getbalance(), decimal("0.1")) #balance now should be 0.1 on node 1
# load the latest 0.1 tx over the rest api
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.format_separator+"json")
json_obj = json.loads(json_string)
vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
# get n of 0.1 outpoint
n = 0
for vout in json_obj['vout']:
if vout['value'] == 0.1:
n = vout['n']
######################################
# getutxos: query a unspent outpoint #
######################################
json_request = '/checkmempool/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.format_separator+'json')
json_obj = json.loads(json_string)
#check chaintip response
assert_equal(json_obj['chaintiphash'], bb_hash)
#make sure there is one utxo
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['utxos'][0]['value'], 0.1)
################################################
# getutxos: now query a already spent outpoint #
################################################
json_request = '/checkmempool/'+vintx+'-0'
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.format_separator+'json')
json_obj = json.loads(json_string)
#check chaintip response
assert_equal(json_obj['chaintiphash'], bb_hash)
#make sure there is no utox in the response because this oupoint has been spent
assert_equal(len(json_obj['utxos']), 0)
#check bitmap
assert_equal(json_obj['bitmap'], "0")
##################################################
# getutxos: now check both with the same request #
##################################################
json_request = '/checkmempool/'+txid+'-'+str(n)+'/'+vintx+'-0'
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.format_separator+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['bitmap'], "10")
#test binary response
bb_hash = self.nodes[0].getbestblockhash()
binaryrequest = b'\x01\x02'
binaryrequest += binascii.unhexlify(txid)
binaryrequest += pack("i", n);
binaryrequest += binascii.unhexlify(vintx);
binaryrequest += pack("i", 0);
bin_response = http_get_call(url.hostname, url.port, '/rest/getutxos'+self.format_separator+'bin', binaryrequest)
output = stringio.stringio()
output.write(bin_response)
output.seek(0)
chainheight = unpack("i", output.read(4))[0]
hashfrombinresponse = hex(deser_uint256(output))[2:].zfill(65).rstrip("l")
assert_equal(bb_hash, hashfrombinresponse) #check if getutxo's chaintip during calculation was fine
assert_equal(chainheight, 102) #chain height must be 102
############################
# getutxos: mempool checks #
############################
# do a tx and don't sync
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.format_separator+"json")
json_obj = json.loads(json_string)
vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
# get n of 0.1 outpoint
n = 0
for vout in json_obj['vout']:
if vout['value'] == 0.1:
n = vout['n']
json_request = '/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.format_separator+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 0) #there should be a outpoint because it has just added to the mempool
json_request = '/checkmempool/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.format_separator+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1) #there should be a outpoint because it has just added to the mempool
#do some invalid requests
json_request = '{"checkmempool'
response = http_get_call(url.hostname, url.port, '/rest/getutxos'+self.format_separator+'json', json_request, true)
assert_equal(response.status, 500) #must be a 500 because we send a invalid json request
json_request = '{"checkmempool'
response = http_get_call(url.hostname, url.port, '/rest/getutxos'+self.format_separator+'bin', json_request, true)
assert_equal(response.status, 500) #must be a 500 because we send a invalid bin request
response = http_get_call(url.hostname, url.port, '/rest/getutxos/checkmempool'+self.format_separator+'bin', '', true)
assert_equal(response.status, 500) #must be a 500 because we send a invalid bin request
#test limits
json_request = '/checkmempool/'
for x in range(0, 20):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/")
response = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.format_separator+'json', '', true)
assert_equal(response.status, 500) #must be a 500 because we exceeding the limits
json_request = '/checkmempool/'
for x in range(0, 15):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/");
response = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.format_separator+'json', '', true)
assert_equal(response.status, 200) #must be a 500 because we exceeding the limits
self.nodes[0].generate(1) #generate block to not affect upcomming tests
self.sync_all()
################
# /rest/block/ #
################
# check binary format
response = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.format_separator+"bin", "", true)
assert_equal(response.status, 200)
assert_greater_than(int(response.getheader('content-length')), 80)
response_str = response.read()
# compare with block header
response_header = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.format_separator+"bin", "", true)
assert_equal(response_header.status, 200)
assert_equal(int(response_header.getheader('content-length')), 80)
response_header_str = response_header.read()
assert_equal(response_str[0:80], response_header_str)
# check block hex format
response_hex = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.format_separator+"hex", "", true)
assert_equal(response_hex.status, 200)
assert_greater_than(int(response_hex.getheader('content-length')), 160)
response_hex_str = response_hex.read()
assert_equal(response_str.encode("hex")[0:160], response_hex_str[0:160])
# compare with hex block header
response_header_hex = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.format_separator+"hex", "", true)
assert_equal(response_header_hex.status, 200)
assert_greater_than(int(response_header_hex.getheader('content-length')), 160)
response_header_hex_str = response_header_hex.read()
assert_equal(response_hex_str[0:160], response_header_hex_str[0:160])
assert_equal(response_header_str.encode("hex")[0:160], response_header_hex_str[0:160])
# check json format
json_string = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.format_separator+'json')
json_obj = json.loads(json_string)
assert_equal(json_obj['hash'], bb_hash)
# do tx test
tx_hash = json_obj['tx'][0]['txid'];
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+tx_hash+self.format_separator+"json")
json_obj = json.loads(json_string)
assert_equal(json_obj['txid'], tx_hash)
# check hex format response
hex_string = http_get_call(url.hostname, url.port, '/rest/tx/'+tx_hash+self.format_separator+"hex", "", true)
assert_equal(hex_string.status, 200)
assert_greater_than(int(response.getheader('content-length')), 10)
# check block tx details
# let's make 3 tx and mine them on node 1
txs = []
txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
self.sync_all()
# now mine the transactions
newblockhash = self.nodes[1].generate(1)
self.sync_all()
#check if the 3 tx show up in the new block
json_string = http_get_call(url.hostname, url.port, '/rest/block/'+newblockhash[0]+self.format_separator+'json')
json_obj = json.loads(json_string)
for tx in json_obj['tx']:
if not 'coinbase' in tx['vin'][0]: #exclude coinbase
assert_equal(tx['txid'] in txs, true)
#check the same but without tx details
json_string = http_get_call(url.hostname, url.port, '/rest/block/notxdetails/'+newblockhash[0]+self.format_separator+'json')
json_obj = json.loads(json_string)
for tx in txs:
assert_equal(tx in json_obj['tx'], true)
#test rest bestblock
bb_hash = self.nodes[0].getbestblockhash()
json_string = http_get_call(url.hostname, url.port, '/rest/chaininfo.json')
json_obj = json.loads(json_string)
assert_equal(json_obj['bestblockhash'], bb_hash)
if __name__ == '__main__':
resttest ().main ()
|
|
# -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2020 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from types import ModuleType
from pybuilder.core import (ENVIRONMENTS_ATTRIBUTE,
INITIALIZER_ATTRIBUTE,
NAME_ATTRIBUTE,
TASK_ATTRIBUTE,
Project,
PluginDef,
task,
depends,
dependents,
optional,
after,
before)
from pybuilder.errors import MissingPluginException, PyBuilderException, ProjectValidationFailedException
from pybuilder.execution import Task, TaskDependency, Action, ExecutionManager, Initializer
from pybuilder.pluginloader import PluginLoader
from pybuilder.reactor import Reactor
from test_utils import Mock, ANY, call, patch
class ReactorTest(unittest.TestCase):
def setUp(self):
self.old_reactor = Reactor.current_instance()
self.plugin_loader_mock = Mock(PluginLoader)
self.logger = Mock()
self.execution_manager = Mock(ExecutionManager)
self.reactor = Reactor(
self.logger, self.execution_manager, self.plugin_loader_mock)
def tearDown(self):
Reactor._set_current_instance(self.old_reactor)
def test_should_return_tasks_from_execution_manager_when_calling_get_tasks(self):
self.execution_manager.tasks = ["spam"]
self.assertEqual(["spam"], self.reactor.get_tasks())
def test_should_raise_exception_when_importing_plugin_and_plugin_not_found(self):
self.plugin_loader_mock.load_plugin.side_effect = MissingPluginException("not_found")
self.assertRaises(
MissingPluginException, self.reactor.import_plugin, PluginDef("not_found"))
self.plugin_loader_mock.load_plugin.assert_called_with(ANY, PluginDef("not_found"))
def test_should_collect_single_task(self):
def task():
pass
setattr(task, TASK_ATTRIBUTE, True)
module = ModuleType("mock_module")
module.task = task
self.reactor.collect_project_annotations(module)
self.assertEqual(len(self.execution_manager.register_task.call_args_list), 1)
self.assertTrue(isinstance(self.execution_manager.register_task.call_args[0][0], Task) and len(
self.execution_manager.register_task.call_args[0]) == 1)
self.assertEqual(self.execution_manager.register_task.call_args[0][0].name, "task")
def test_should_collect_single_task_with_overridden_name(self):
def task():
pass
setattr(task, TASK_ATTRIBUTE, True)
setattr(task, NAME_ATTRIBUTE, "overridden_name")
module = ModuleType("mock_module")
module.task = task
self.reactor.collect_project_annotations(module)
self.assertEqual(len(self.execution_manager.register_task.call_args_list), 1)
self.assertTrue(isinstance(self.execution_manager.register_task.call_args[0][0], Task) and len(
self.execution_manager.register_task.call_args[0]) == 1)
self.assertEqual(self.execution_manager.register_task.call_args[0][0].name, "overridden_name")
def test_should_collect_multiple_tasks(self):
def task():
pass
setattr(task, TASK_ATTRIBUTE, True)
def task2():
pass
setattr(task2, TASK_ATTRIBUTE, True)
module = ModuleType("mock_module")
module.task = task
module.task2 = task2
self.reactor.collect_project_annotations(module)
self.assertEqual(len(self.execution_manager.register_task.call_args_list), 2)
for call_args in self.execution_manager.register_task.call_args_list:
self.assertTrue(isinstance(call_args[0][0], Task) and len(call_args[0]) == 1)
def test_task_dependencies(self):
import pybuilder.reactor
with patch("pybuilder.reactor.Task"):
@task
def task1():
pass
@task
@depends(task1)
def task2():
pass
@task
def task3():
pass
@task
@depends(optional(task3))
@dependents("task6")
def task4():
pass
@task
@dependents("task6", optional(task3))
def task5():
pass
@task
@depends(task1, optional(task2))
def task6():
pass
module = ModuleType("mock_module")
module.task1 = task1
module.task2 = task2
module.task3 = task3
module.task4 = task4
module.task5 = task5
module.task6 = task6
self.reactor.collect_project_annotations(module)
pybuilder.reactor.Task.assert_has_calls([call("task1", task1, [], ''),
call("task2", task2, [TaskDependency(task1)], ''),
call("task3", task3, [TaskDependency(task5, True)], ''),
call("task4", task4, [TaskDependency(task3, True)], ''),
call("task5", task5, [], ''),
call("task6", task6,
[TaskDependency(task1), TaskDependency(task2, True),
TaskDependency(task4), TaskDependency(task5)], '')])
def test_task_dependencies_with_post_definition_injections(self):
import pybuilder.reactor
with patch("pybuilder.reactor.Task"):
@task
def task1():
pass
@task
@depends(task1)
def task2():
pass
@task
@depends(task1)
@dependents(task2)
def task3():
pass
module1 = ModuleType("mock_module_one")
module1.task1 = task1
module1.task2 = task2
module2 = ModuleType("mock_module_two")
module2.task3 = task3
self.reactor.collect_project_annotations(module1)
pybuilder.reactor.Task.assert_has_calls([call("task1", task1, [], ''),
call("task2", task2, [TaskDependency(task1)], '')])
self.reactor.collect_project_annotations(module2)
pybuilder.reactor.Task.assert_has_calls([call("task3", task3, [TaskDependency(task1)], '')])
self.execution_manager.register_late_task_dependencies.assert_has_calls(
[call({}), call({"task2": [TaskDependency(task3)]})])
def test_task_dependencies_with_post_definition_injections_custom_names(self):
import pybuilder.reactor
with patch("pybuilder.reactor.Task"):
@task
def task1():
pass
@task
@depends(task1)
def task2():
pass
@task("task_3")
@depends(task1)
@dependents(task2)
def task3():
pass
module1 = ModuleType("mock_module_one")
module1.task1 = task1
module1.task2 = task2
module2 = ModuleType("mock_module_two")
module2.task3 = task3
self.reactor.collect_project_annotations(module1)
pybuilder.reactor.Task.assert_has_calls([call("task1", task1, [], ''),
call("task2", task2, [TaskDependency(task1)], '')])
self.reactor.collect_project_annotations(module2)
pybuilder.reactor.Task.assert_has_calls([call("task_3", task3, [TaskDependency(task1)], '')])
self.execution_manager.register_late_task_dependencies.assert_has_calls(
[call({}), call({"task2": [TaskDependency("task_3")]})])
def test_should_collect_single_before_action(self):
@before("spam")
def action():
pass
module = ModuleType("mock_module")
module.task = action
self.reactor.collect_project_annotations(module)
self.assertEqual(self.execution_manager.register_action.call_count, 1)
self.assertTrue(isinstance(self.execution_manager.register_action.call_args[0][0], Action) and
len(self.execution_manager.register_action.call_args[0]) == 1)
def test_should_collect_single_after_action(self):
@after("spam")
def action():
pass
module = ModuleType("mock_module")
module.task = action
self.reactor.collect_project_annotations(module)
self.assertEqual(self.execution_manager.register_action.call_count, 1)
self.assertTrue(isinstance(self.execution_manager.register_action.call_args[0][0], Action) and
len(self.execution_manager.register_action.call_args[0]) == 1)
def test_should_collect_single_after_action_with_only_once_flag(self):
@after("spam", only_once=True)
def action():
pass
module = ModuleType("mock_module")
module.task = action
def register_action(action):
if not action.only_once:
raise AssertionError("Action is not marked as only_once")
self.execution_manager.register_action = register_action
self.reactor.collect_project_annotations(module)
def test_should_collect_single_after_action_with_teardown_flag(self):
@after("spam", teardown=True)
def action():
pass
module = ModuleType("mock_module")
module.task = action
self.reactor.collect_project_annotations(module)
def test_should_collect_single_initializer(self):
def init():
pass
setattr(init, INITIALIZER_ATTRIBUTE, True)
module = ModuleType("mock_module")
module.task = init
self.reactor.collect_project_annotations(module)
self.assertEqual(self.execution_manager.register_initializer.call_count, 1)
self.assertTrue(isinstance(self.execution_manager.register_initializer.call_args[0][0], Initializer) and
len(self.execution_manager.register_initializer.call_args[0]) == 1)
def test_should_collect_single_initializer_with_environments(self):
def init():
pass
setattr(init, INITIALIZER_ATTRIBUTE, True)
setattr(init, ENVIRONMENTS_ATTRIBUTE, ["any_environment"])
module = ModuleType("mock_module")
module.task = init
class ExecutionManagerMock(object):
def register_initializer(self, initializer):
self.initializer = initializer
def register_late_task_dependencies(self, dependencies):
pass
execution_manager_mock = ExecutionManagerMock()
self.reactor.execution_manager = execution_manager_mock
self.reactor.collect_project_annotations(module)
self.assertEqual(
execution_manager_mock.initializer.environments, ["any_environment"])
@patch("pybuilder.reactor.os.path.exists", return_value=False)
@patch("pybuilder.reactor.np", return_value="spam")
def test_should_raise_when_verifying_project_directory_and_directory_does_not_exist(self,
np,
os_path_exists):
self.assertRaises(
PyBuilderException, self.reactor.verify_project_directory, "spam", "eggs")
np.assert_called_with("spam")
os_path_exists.assert_called_with("spam")
@patch("pybuilder.reactor.os.path.isdir", return_value=False)
@patch("pybuilder.reactor.os.path.exists", return_value=True)
@patch("pybuilder.reactor.np", return_value="spam")
def test_should_raise_when_verifying_project_directory_and_directory_is_not_a_directory(self,
np,
os_path_exists,
os_path_isdir):
self.assertRaises(
PyBuilderException, self.reactor.verify_project_directory, "spam", "eggs")
np.assert_called_with("spam")
os_path_exists.assert_called_with("spam")
os_path_isdir.assert_called_with("spam")
@patch("pybuilder.reactor.jp", side_effect=lambda *x: "/".join(x))
@patch("pybuilder.reactor.os.path.isdir", return_value=True)
@patch("pybuilder.reactor.os.path.exists", side_effect=lambda x: True if x == "spam" else False)
@patch("pybuilder.reactor.np", return_value="spam")
def test_should_raise_when_verifying_project_directory_and_build_descriptor_does_not_exist(self,
np,
os_path_exists,
os_path_isdir,
jp):
self.assertRaises(
PyBuilderException, self.reactor.verify_project_directory, "spam", "eggs")
np.assert_called_with("spam")
os_path_exists.assert_has_calls([call("spam"), call("spam/eggs")])
os_path_isdir.assert_called_with("spam")
jp.assert_called_with("spam", "eggs")
@patch("pybuilder.reactor.os.path.isfile", return_value=False)
@patch("pybuilder.reactor.jp", side_effect=lambda *x: "/".join(x))
@patch("pybuilder.reactor.os.path.isdir", return_value=True)
@patch("pybuilder.reactor.os.path.exists", return_value=True)
@patch("pybuilder.reactor.np", return_value="spam")
def test_should_raise_when_verifying_project_directory_and_build_descriptor_is_not_a_file(self,
np,
os_path_exists,
os_path_isdir,
jp,
os_path_isfile):
self.assertRaises(
PyBuilderException, self.reactor.verify_project_directory, "spam", "eggs")
np.assert_called_with("spam")
os_path_exists.assert_has_calls([call("spam"), call("spam/eggs")])
os_path_isdir.assert_called_with("spam")
jp.assert_called_with("spam", "eggs")
os_path_isfile.assert_called_with("spam/eggs")
@patch("pybuilder.reactor.os.path.isfile", return_value=True)
@patch("pybuilder.reactor.jp", side_effect=lambda *x: "/".join(x))
@patch("pybuilder.reactor.os.path.isdir", return_value=True)
@patch("pybuilder.reactor.os.path.exists", return_value=True)
@patch("pybuilder.reactor.np", return_value="/spam")
def test_should_return_directory_and_full_path_of_descriptor_when_verifying_project_directory(self,
np,
os_path_exists,
os_path_isdir,
jp,
os_path_isfile):
self.assertEqual(
("/spam", "/spam/eggs"), self.reactor.verify_project_directory("spam", "eggs"))
np.assert_called_with("spam")
os_path_exists.assert_has_calls([call("/spam"), call("/spam/eggs")])
os_path_isdir.assert_called_with("/spam")
jp.assert_called_with("/spam", "eggs")
os_path_isfile.assert_called_with("/spam/eggs")
@patch("pybuilder.reactor.imp.load_source", side_effect=ImportError("spam"))
def test_should_raise_when_loading_project_module_and_import_raises_exception(self, imp_load_source):
self.assertRaises(
PyBuilderException, self.reactor.load_project_module, "spam")
imp_load_source.assert_called_with("build", "spam")
@patch("pybuilder.reactor.imp.load_source", return_value=Mock())
def test_should_return_module_when_loading_project_module_and_import_raises_exception(self, imp_load_source):
self.assertTrue(imp_load_source.return_value is self.reactor.load_project_module("spam"))
imp_load_source.assert_called_with("build", "spam")
def test_ensure_project_attributes_are_set_when_instantiating_project(self):
module = ModuleType("mock_module")
module.version = "version"
module.default_task = "default_task"
module.summary = "summary"
module.description = "description"
module.author = "author"
module.authors = "authors"
module.maintainer = "maintainer"
module.maintainers = "maintainers"
module.license = "license"
module.url = "url"
module.urls = "urls"
self.reactor.project = Mock()
self.reactor.project_module = module
self.reactor.apply_project_attributes()
self.assertEqual("version", self.reactor.project.version)
self.assertEqual("default_task", self.reactor.project.default_task)
self.assertEqual("summary", self.reactor.project.summary)
self.assertEqual("description", self.reactor.project.description)
self.assertEqual("author", self.reactor.project.author)
self.assertEqual("authors", self.reactor.project.authors)
self.assertEqual("maintainer", self.reactor.project.maintainer)
self.assertEqual("maintainers", self.reactor.project.maintainers)
self.assertEqual("license", self.reactor.project.license)
self.assertEqual("url", self.reactor.project.url)
self.assertEqual("urls", self.reactor.project.urls)
def test_ensure_project_name_is_set_from_attribute_when_instantiating_project(self):
module = ModuleType("mock_module")
module.name = "mock_module"
self.reactor.project = Mock()
self.reactor.project_module = module
self.reactor.apply_project_attributes()
self.assertEqual("mock_module", self.reactor.project.name)
def test_should_import_plugin_only_once(self):
plugin_module = ModuleType("mock_module")
self.plugin_loader_mock.load_plugin.return_value = plugin_module
self.reactor.require_plugin("spam")
self.reactor.require_plugin("spam")
self.reactor._load_deferred_plugins()
self.assertEqual(["spam"], self.reactor.get_plugins())
self.plugin_loader_mock.load_plugin.assert_called_with(ANY, PluginDef("spam"))
def test_ensure_project_properties_are_logged_when_calling_log_project_properties(self):
project = Project("spam")
project.set_property("spam", "spam")
project.set_property("eggs", "eggs")
self.reactor.project = project
self.reactor.log_project_properties()
call_args = self.logger.debug.call_args
self.assertEqual(call_args[0][0], "Project properties: %s")
self.assertTrue("basedir : %s" % project.basedir in call_args[0][1])
self.assertTrue("eggs : eggs" in call_args[0][1])
self.assertTrue("spam : spam" in call_args[0][1])
def test_should_raise_exception_when_project_is_not_valid(self):
self.reactor.project = Mock(properties={})
self.reactor.project.validate.return_value = ["spam"]
self.assertRaises(ProjectValidationFailedException, self.reactor.build)
def test_prepare_tasks(self):
self.reactor.project = Project("spam")
self.reactor.project.default_task = ["a", "b"]
self.assertEqual(self.reactor._prepare_tasks(["c"]), ["c"])
self.assertEqual(self.reactor._prepare_tasks(["+c"]), ["a", "b", "c"])
self.assertEqual(self.reactor._prepare_tasks(["+c", '^c']), ["a", "b"])
self.assertEqual(self.reactor._prepare_tasks(["^b"]), ["a"])
self.assertEqual(self.reactor._prepare_tasks(["^a"]), ["b"])
self.assertEqual(self.reactor._prepare_tasks(["^d"]), ["a", "b"])
self.assertEqual(self.reactor._prepare_tasks(["+c", "d"]), ["d", "c"])
self.assertEqual(self.reactor._prepare_tasks(["+c", "d", "^b"]), ["d", "c"])
self.assertEqual(self.reactor._prepare_tasks(["+c", "+", "^"]), ["+", "^", "c"])
self.assertEqual(self.reactor._prepare_tasks([]), ["a", "b"])
self.reactor.project.default_task = []
self.assertEqual(self.reactor._prepare_tasks(["c"]), ["c"])
self.assertEqual(self.reactor._prepare_tasks(["+c"]), ["c"])
self.assertEqual(self.reactor._prepare_tasks(["+c", "^d"]), ["c"])
self.assertEqual(self.reactor._prepare_tasks(["+c", "d", "^d"]), ["c"])
self.assertEqual(self.reactor._prepare_tasks(["^c", "c"]), [])
self.assertEqual(self.reactor._prepare_tasks(["+c", "^c"]), [])
self.assertEqual(self.reactor._prepare_tasks(["+c", "d"]), ["d", "c"])
self.assertEqual(self.reactor._prepare_tasks(["+c", "+"]), ["+", "c"])
self.assertRaises(PyBuilderException, self.reactor._prepare_tasks, [])
|
|
#!/usr/bin/env python3
import unittest
import random
from scapy.packet import Raw
from scapy.layers.l2 import Ether, Dot1Q
from scapy.layers.inet import IP, UDP
from framework import VppTestCase, VppTestRunner
from util import Host, ppp
from vpp_sub_interface import VppDot1QSubint, VppDot1ADSubint
class TestL2bd(VppTestCase):
""" L2BD Test Case """
@classmethod
def setUpClass(cls):
"""
Perform standard class setup (defined by class method setUpClass in
class VppTestCase) before running the test case, set test case related
variables and configure VPP.
:var int bd_id: Bridge domain ID.
:var int mac_entries_count: Number of MAC entries for bridge-domain to
learn.
:var int dot1q_tag: VLAN tag for dot1q sub-interface.
:var int dot1ad_sub_id: SubID of dot1ad sub-interface.
:var int dot1ad_outer_tag: VLAN S-tag for dot1ad sub-interface.
:var int dot1ad_inner_tag: VLAN C-tag for dot1ad sub-interface.
:var int sl_pkts_per_burst: Number of packets in burst for single-loop
test.
:var int dl_pkts_per_burst: Number of packets in burst for dual-loop
test.
"""
super(TestL2bd, cls).setUpClass()
# Test variables
cls.bd_id = 1
cls.mac_entries_count = 100
# cls.dot1q_sub_id = 100
cls.dot1q_tag = 100
cls.dot1ad_sub_id = 20
cls.dot1ad_outer_tag = 200
cls.dot1ad_inner_tag = 300
cls.sl_pkts_per_burst = 2
cls.dl_pkts_per_burst = 257
try:
# create 3 pg interfaces
cls.create_pg_interfaces(range(3))
# create 2 sub-interfaces for pg1 and pg2
cls.sub_interfaces = [
VppDot1QSubint(cls, cls.pg1, cls.dot1q_tag),
VppDot1ADSubint(cls, cls.pg2, cls.dot1ad_sub_id,
cls.dot1ad_outer_tag, cls.dot1ad_inner_tag)]
# packet flows mapping pg0 -> pg1, pg2, etc.
cls.flows = dict()
cls.flows[cls.pg0] = [cls.pg1, cls.pg2]
cls.flows[cls.pg1] = [cls.pg0, cls.pg2]
cls.flows[cls.pg2] = [cls.pg0, cls.pg1]
# packet sizes
cls.pg_if_packet_sizes = [64, 512, 1518, 9018]
cls.sub_if_packet_sizes = [64, 512, 1518 + 4, 9018 + 4]
cls.interfaces = list(cls.pg_interfaces)
cls.interfaces.extend(cls.sub_interfaces)
# Create BD with MAC learning enabled and put interfaces and
# sub-interfaces to this BD
for pg_if in cls.pg_interfaces:
sw_if_index = pg_if.sub_if.sw_if_index \
if hasattr(pg_if, 'sub_if') else pg_if.sw_if_index
cls.vapi.sw_interface_set_l2_bridge(rx_sw_if_index=sw_if_index,
bd_id=cls.bd_id)
# setup all interfaces
for i in cls.interfaces:
i.admin_up()
# mapping between packet-generator index and lists of test hosts
cls.hosts_by_pg_idx = dict()
# create test host entries and inject packets to learn MAC entries
# in the bridge-domain
cls.create_hosts_and_learn(cls.mac_entries_count)
cls.logger.info(cls.vapi.ppcli("show l2fib"))
except Exception:
super(TestL2bd, cls).tearDownClass()
raise
@classmethod
def tearDownClass(cls):
super(TestL2bd, cls).tearDownClass()
def setUp(self):
"""
Clear trace and packet infos before running each test.
"""
super(TestL2bd, self).setUp()
self.reset_packet_infos()
def tearDown(self):
"""
Show various debug prints after each test.
"""
super(TestL2bd, self).tearDown()
if not self.vpp_dead:
self.logger.info(self.vapi.ppcli("show l2fib verbose"))
self.logger.info(self.vapi.ppcli("show bridge-domain %s detail" %
self.bd_id))
@classmethod
def create_hosts_and_learn(cls, count):
"""
Create required number of host MAC addresses and distribute them among
interfaces. Create host IPv4 address for every host MAC address. Create
L2 MAC packet stream with host MAC addresses per interface to let
the bridge domain learn these MAC addresses.
:param count: Integer number of hosts to create MAC/IPv4 addresses for.
"""
n_int = len(cls.pg_interfaces)
macs_per_if = count // n_int
i = -1
for pg_if in cls.pg_interfaces:
i += 1
start_nr = macs_per_if * i
end_nr = count if i == (n_int - 1) else macs_per_if * (i + 1)
cls.hosts_by_pg_idx[pg_if.sw_if_index] = []
hosts = cls.hosts_by_pg_idx[pg_if.sw_if_index]
packets = []
for j in range(start_nr, end_nr):
host = Host(
"00:00:00:ff:%02x:%02x" % (pg_if.sw_if_index, j),
"172.17.1%02x.%u" % (pg_if.sw_if_index, j))
packet = (Ether(dst="ff:ff:ff:ff:ff:ff", src=host.mac))
hosts.append(host)
if hasattr(pg_if, 'sub_if'):
packet = pg_if.sub_if.add_dot1_layer(packet)
packets.append(packet)
pg_if.add_stream(packets)
cls.logger.info("Sending broadcast eth frames for MAC learning")
cls.pg_start()
def create_stream(self, src_if, packet_sizes, packets_per_burst):
"""
Create input packet stream for defined interface.
:param object src_if: Interface to create packet stream for.
:param list packet_sizes: List of required packet sizes.
:param int packets_per_burst: Number of packets in burst.
:return: Stream of packets.
"""
pkts = []
for i in range(0, packets_per_burst):
dst_if = self.flows[src_if][i % 2]
dst_host = random.choice(self.hosts_by_pg_idx[dst_if.sw_if_index])
src_host = random.choice(self.hosts_by_pg_idx[src_if.sw_if_index])
pkt_info = self.create_packet_info(src_if, dst_if)
payload = self.info_to_payload(pkt_info)
p = (Ether(dst=dst_host.mac, src=src_host.mac) /
IP(src=src_host.ip4, dst=dst_host.ip4) /
UDP(sport=1234, dport=1234) /
Raw(payload))
pkt_info.data = p.copy()
if hasattr(src_if, 'sub_if'):
p = src_if.sub_if.add_dot1_layer(p)
size = random.choice(packet_sizes)
self.extend_packet(p, size)
pkts.append(p)
return pkts
def verify_capture(self, pg_if, capture):
"""
Verify captured input packet stream for defined interface.
:param object pg_if: Interface to verify captured packet stream for.
:param list capture: Captured packet stream.
"""
last_info = dict()
for i in self.pg_interfaces:
last_info[i.sw_if_index] = None
dst_sw_if_index = pg_if.sw_if_index
for packet in capture:
payload_info = self.payload_to_info(packet[Raw])
src_sw_if_index = payload_info.src
src_if = None
for ifc in self.pg_interfaces:
if ifc != pg_if:
if ifc.sw_if_index == src_sw_if_index:
src_if = ifc
break
if hasattr(src_if, 'sub_if'):
# Check VLAN tags and Ethernet header
packet = src_if.sub_if.remove_dot1_layer(packet)
self.assertTrue(Dot1Q not in packet)
try:
ip = packet[IP]
udp = packet[UDP]
packet_index = payload_info.index
self.assertEqual(payload_info.dst, dst_sw_if_index)
self.logger.debug("Got packet on port %s: src=%u (id=%u)" %
(pg_if.name, payload_info.src, packet_index))
next_info = self.get_next_packet_info_for_interface2(
payload_info.src, dst_sw_if_index,
last_info[payload_info.src])
last_info[payload_info.src] = next_info
self.assertTrue(next_info is not None)
self.assertEqual(packet_index, next_info.index)
saved_packet = next_info.data
# Check standard fields
self.assertEqual(ip.src, saved_packet[IP].src)
self.assertEqual(ip.dst, saved_packet[IP].dst)
self.assertEqual(udp.sport, saved_packet[UDP].sport)
self.assertEqual(udp.dport, saved_packet[UDP].dport)
except:
self.logger.error(ppp("Unexpected or invalid packet:", packet))
raise
for i in self.pg_interfaces:
remaining_packet = self.get_next_packet_info_for_interface2(
i, dst_sw_if_index, last_info[i.sw_if_index])
self.assertTrue(
remaining_packet is None,
"Port %u: Packet expected from source %u didn't arrive" %
(dst_sw_if_index, i.sw_if_index))
def run_l2bd_test(self, pkts_per_burst):
""" L2BD MAC learning test """
# Create incoming packet streams for packet-generator interfaces
for i in self.pg_interfaces:
packet_sizes = self.sub_if_packet_sizes if hasattr(i, 'sub_if') \
else self.pg_if_packet_sizes
pkts = self.create_stream(i, packet_sizes, pkts_per_burst)
i.add_stream(pkts)
# Enable packet capture and start packet sending
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
# Verify outgoing packet streams per packet-generator interface
for i in self.pg_interfaces:
capture = i.get_capture()
self.logger.info("Verifying capture on interface %s" % i.name)
self.verify_capture(i, capture)
def test_l2bd_sl(self):
""" L2BD MAC learning single-loop test
Test scenario:
1.config
MAC learning enabled
learn 100 MAC entries
3 interfaces: untagged, dot1q, dot1ad (dot1q used instead of
dot1ad in the first version)
2.sending l2 eth pkts between 3 interface
64B, 512B, 1518B, 9200B (ether_size)
burst of 2 pkts per interface
"""
self.run_l2bd_test(self.sl_pkts_per_burst)
def test_l2bd_dl(self):
""" L2BD MAC learning dual-loop test
Test scenario:
1.config
MAC learning enabled
learn 100 MAC entries
3 interfaces: untagged, dot1q, dot1ad (dot1q used instead of
dot1ad in the first version)
2.sending l2 eth pkts between 3 interface
64B, 512B, 1518B, 9200B (ether_size)
burst of 257 pkts per interface
"""
self.run_l2bd_test(self.dl_pkts_per_burst)
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
|
|
import pytest
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.ui import Select
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import TimeoutException, NoSuchElementException
def _wait(browser):
return WebDriverWait(browser, 30)
def _load_notebook(browser, port, retries=5):
# go to the correct page
browser.get("http://localhost:{}/notebooks/blank.ipynb".format(port))
def page_loaded(browser):
return browser.execute_script(
'return typeof Jupyter !== "undefined" && Jupyter.page !== undefined;')
# wait for the page to load
try:
_wait(browser).until(page_loaded)
except TimeoutException:
if retries > 0:
print("Retrying page load...")
# page timeout, but sometimes this happens, so try refreshing?
_load_notebook(browser, port, retries=retries - 1)
else:
print("Failed to load the page too many times")
raise
def _activate_toolbar(browser, name="Create%20Assignment"):
def celltoolbar_exists(browser):
return browser.execute_script(
'return $("#view_menu #menu-cell-toolbar").find("[data-name=\'{}\']").length == 1;'.format(name))
# wait for the view menu to appear
_wait(browser).until(celltoolbar_exists)
# activate the Create Assignment toolbar
browser.execute_script(
"$('#view_menu #menu-cell-toolbar').find('[data-name=\"{}\"]').find('a').click();".format(name)
)
# make sure the toolbar appeared
if name == "Create%20Assignment":
_wait(browser).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".celltoolbar select")))
elif name == "Edit%20Metadata":
_wait(browser).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".celltoolbar button")))
def _select_none(browser, index=0):
select = Select(browser.find_elements_by_css_selector('.celltoolbar select')[index])
select.select_by_value('')
def _select_manual(browser, index=0):
select = Select(browser.find_elements_by_css_selector('.celltoolbar select')[index])
select.select_by_value('manual')
def _select_solution(browser, index=0):
select = Select(browser.find_elements_by_css_selector('.celltoolbar select')[index])
select.select_by_value('solution')
def _select_tests(browser, index=0):
select = Select(browser.find_elements_by_css_selector('.celltoolbar select')[index])
select.select_by_value('tests')
def _select_locked(browser, index=0):
select = Select(browser.find_elements_by_css_selector('.celltoolbar select')[index])
select.select_by_value('readonly')
def _set_points(browser, points=2, index=0):
elem = browser.find_elements_by_css_selector(".nbgrader-points-input")[index]
elem.clear()
elem.send_keys(points)
browser.find_elements_by_css_selector(".nbgrader-cell")[index].click()
def _set_id(browser, cell_id="foo", index=0):
elem = browser.find_elements_by_css_selector(".nbgrader-id-input")[index]
elem.clear()
elem.send_keys(cell_id)
browser.find_elements_by_css_selector(".nbgrader-cell")[index].click()
def _get_metadata(browser):
return browser.execute_script(
"""
var cell = Jupyter.notebook.get_cell(0);
return cell.metadata.nbgrader;
"""
)
def _get_total_points(browser):
element = browser.find_element_by_id("nbgrader-total-points")
return float(element.get_attribute("value"))
def _save(browser):
browser.execute_script("Jupyter.notebook.save_notebook();")
def _wait_for_modal(browser):
_wait(browser).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".modal-dialog")))
def _dismiss_modal(browser):
button = browser.find_element_by_css_selector(".modal-footer .btn-primary")
button.click()
def modal_gone(browser):
try:
browser.find_element_by_css_selector(".modal-dialog")
except NoSuchElementException:
return True
return False
_wait(browser).until(modal_gone)
@pytest.mark.nbextensions
def test_manual_cell(browser, port):
_load_notebook(browser, port)
_activate_toolbar(browser)
# does the nbgrader metadata exist?
assert _get_metadata(browser) is None
# make it manually graded
_select_manual(browser)
assert _get_metadata(browser)['solution']
assert _get_metadata(browser)['grade']
assert not _get_metadata(browser)['locked']
# wait for the points and id fields to appear
_wait(browser).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".nbgrader-points")))
_wait(browser).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".nbgrader-id")))
# set the points
_set_points(browser)
assert 2 == _get_metadata(browser)['points']
# set the id
_set_id(browser)
assert "foo" == _get_metadata(browser)['grade_id']
# make it nothing
_select_none(browser)
assert not _get_metadata(browser)['solution']
assert not _get_metadata(browser)['grade']
assert not _get_metadata(browser)['locked']
@pytest.mark.nbextensions
def test_solution_cell(browser, port):
_load_notebook(browser, port)
_activate_toolbar(browser)
# does the nbgrader metadata exist?
assert _get_metadata(browser) is None
# make it a solution cell
_select_solution(browser)
assert _get_metadata(browser)['solution']
assert not _get_metadata(browser)['grade']
assert not _get_metadata(browser)['locked']
# wait for the id field to appear
_wait(browser).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".nbgrader-id")))
# set the id
_set_id(browser)
assert "foo" == _get_metadata(browser)['grade_id']
# make it nothing
_select_none(browser)
assert not _get_metadata(browser)['solution']
assert not _get_metadata(browser)['grade']
assert not _get_metadata(browser)['locked']
@pytest.mark.nbextensions
def test_tests_cell(browser, port):
_load_notebook(browser, port)
_activate_toolbar(browser)
# does the nbgrader metadata exist?
assert _get_metadata(browser) is None
# make it autograder tests
_select_tests(browser)
assert not _get_metadata(browser)['solution']
assert _get_metadata(browser)['grade']
assert _get_metadata(browser)['locked']
# wait for the points and id fields to appear
_wait(browser).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".nbgrader-points")))
_wait(browser).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".nbgrader-id")))
WebDriverWait(browser, 30).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".lock-button")))
# set the points
_set_points(browser)
assert 2 == _get_metadata(browser)['points']
# set the id
_set_id(browser)
assert "foo" == _get_metadata(browser)['grade_id']
# make it nothing
_select_none(browser)
assert not _get_metadata(browser)['solution']
assert not _get_metadata(browser)['grade']
assert not _get_metadata(browser)['locked']
@pytest.mark.nbextensions
def test_locked_cell(browser, port):
_load_notebook(browser, port)
_activate_toolbar(browser)
# does the nbgrader metadata exist?
assert _get_metadata(browser) is None
# make it locked
_select_locked(browser)
assert not _get_metadata(browser)['solution']
assert not _get_metadata(browser)['grade']
assert _get_metadata(browser)['locked']
# wait for the id and lock button to appear
WebDriverWait(browser, 30).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".nbgrader-id")))
WebDriverWait(browser, 30).until(
EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".lock-button")))
# set the id
_set_id(browser)
assert "foo" == _get_metadata(browser)['grade_id']
# make it nothing
_select_none(browser)
assert not _get_metadata(browser)['solution']
assert not _get_metadata(browser)['grade']
assert not _get_metadata(browser)['locked']
@pytest.mark.nbextensions
def test_grade_cell_css(browser, port):
_load_notebook(browser, port)
_activate_toolbar(browser)
# make it manually graded
_select_manual(browser)
elements = browser.find_elements_by_css_selector(".nbgrader-cell")
assert len(elements) == 1
# make it nothing
_select_none(browser)
elements = browser.find_elements_by_css_selector(".nbgrader-cell")
assert len(elements) == 0
# make it a solution
_select_solution(browser)
elements = browser.find_elements_by_css_selector(".nbgrader-cell")
assert len(elements) == 1
# make it nothing
_select_none(browser)
elements = browser.find_elements_by_css_selector(".nbgrader-cell")
assert len(elements) == 0
# make it autograder tests
_select_tests(browser)
elements = browser.find_elements_by_css_selector(".nbgrader-cell")
assert len(elements) == 1
# make it nothing
_select_none(browser)
elements = browser.find_elements_by_css_selector(".nbgrader-cell")
assert len(elements) == 0
# make it autograder tests
_select_tests(browser)
elements = browser.find_elements_by_css_selector(".nbgrader-cell")
assert len(elements) == 1
# deactivate the toolbar
_activate_toolbar(browser, "None")
elements = browser.find_elements_by_css_selector(".nbgrader-cell")
assert len(elements) == 0
# activate the toolbar
_activate_toolbar(browser)
elements = browser.find_elements_by_css_selector(".nbgrader-cell")
assert len(elements) == 1
# deactivate the toolbar
_activate_toolbar(browser, "Edit%20Metadata")
elements = browser.find_elements_by_css_selector(".nbgrader-cell")
assert len(elements) == 0
@pytest.mark.nbextensions
def test_tabbing(browser, port):
_load_notebook(browser, port)
_activate_toolbar(browser)
# make it manually graded
_select_manual(browser)
# click the id field
element = browser.find_element_by_css_selector(".nbgrader-points-input")
element.click()
# get the active element
element = browser.execute_script("return document.activeElement")
assert "nbgrader-points-input" == element.get_attribute("class")
# press tab and check that the active element is correct
element.send_keys(Keys.TAB)
element = browser.execute_script("return document.activeElement")
assert "nbgrader-id-input" == element.get_attribute("class")
# make it autograder tests
_select_tests(browser)
# click the id field
element = browser.find_element_by_css_selector(".nbgrader-points-input")
element.click()
# get the active element
element = browser.execute_script("return document.activeElement")
assert "nbgrader-points-input" == element.get_attribute("class")
# press tab and check that the active element is correct
element.send_keys(Keys.TAB)
element = browser.execute_script("return document.activeElement")
assert "nbgrader-id-input" == element.get_attribute("class")
@pytest.mark.nbextensions
def test_total_points(browser, port):
_load_notebook(browser, port)
_activate_toolbar(browser)
# make sure the total points is zero
assert _get_total_points(browser) == 0
# make it autograder tests and set the points to two
_select_tests(browser)
_set_points(browser)
_set_id(browser)
assert _get_total_points(browser) == 2
# make it a solution make sure the total points is zero
_select_solution(browser)
assert _get_total_points(browser) == 0
# make it autograder tests
_select_tests(browser)
assert _get_total_points(browser) == 2
# make it manually graded
_select_manual(browser)
assert _get_total_points(browser) == 2
# create a new cell
element = browser.find_element_by_tag_name("body")
element.send_keys(Keys.ESCAPE)
element.send_keys("b")
# make sure the toolbar appeared
def find_toolbar(browser):
try:
browser.find_elements_by_css_selector(".celltoolbar select")[1]
except IndexError:
return False
return True
_wait(browser).until(find_toolbar)
# make it a test cell
_select_tests(browser, index=1)
_set_points(browser, points=1, index=1)
_set_id(browser, cell_id="bar", index=1)
assert _get_total_points(browser) == 3
# delete the new cell
element = browser.find_elements_by_css_selector(".cell")[0]
element.click()
element.send_keys(Keys.ESCAPE)
element.send_keys("d")
element.send_keys("d")
assert _get_total_points(browser) == 1
# delete the first cell
element = browser.find_elements_by_css_selector(".cell")[0]
element.send_keys("d")
element.send_keys("d")
assert _get_total_points(browser) == 0
@pytest.mark.nbextensions
def test_cell_ids(browser, port):
_load_notebook(browser, port)
_activate_toolbar(browser)
# turn it into a cell with an id
_select_solution(browser)
# save and check for an error (blank id)
_save(browser)
_wait_for_modal(browser)
_dismiss_modal(browser)
# set the label
_set_id(browser)
# create a new cell
element = browser.find_element_by_tag_name("body")
element.send_keys(Keys.ESCAPE)
element.send_keys("b")
# make sure the toolbar appeared
def find_toolbar(browser):
try:
browser.find_elements_by_css_selector(".celltoolbar select")[1]
except IndexError:
return False
return True
_wait(browser).until(find_toolbar)
# make it a test cell and set the label
_select_tests(browser, index=1)
_set_id(browser, index=1)
# save and check for an error (duplicate id)
_save(browser)
_wait_for_modal(browser)
_dismiss_modal(browser)
|
|
"""
This file holds all the functions common in the tests files
"""
import unittest
import json
from app.models.shopping import User, ShoppingList, ShoppingItem, BlacklistToken
from app import create_app, db
class BaseTestClass(unittest.TestCase):
"""
This is the parent class for all test classes of endpoints
"""
def setUp(self):
"""
Set up the db, the app and some variables
"""
self.app = create_app(config_name="testing")
self.client = self.app.test_client
self.shoppinglist_data = {'title': 'Groceries and Home stuff'}
self.shoppingitem_data = {'name':'fruit', 'quantity':5, 'unit': 'units'}
self.user_data = {
'username': 'johndoe',
'password': 'password',
'name': 'john doe',
'email': 'johndoe@example.com'
}
with self.app.app_context():
# create tables in test db
db.session.close()
db.drop_all()
db.create_all()
def logout_user(self, access_token=None):
"""
Helper method to logout a user
"""
if access_token:
with self.app.app_context():
response = self.client().post('/auth/logout',
headers=dict(Authorization="Bearer "+access_token))
return response
else:
raise ValueError('Invalid arguments for logout_user')
def register_user(self, user_data=None):
"""
Helper method to register a user
"""
required_keys = ('username', 'name', 'password', 'email')
if not user_data or not isinstance(user_data, dict):
user_data = self.user_data
with self.app.app_context():
# check if all the required keys are represented
if all(key in user_data for key in required_keys):
response = self.client().post('/auth/register',
data=json.dumps(user_data))
return response
else:
raise ValueError('Invalid arguments for register_user')
def login_user(self, user_data=None):
"""
Helper method to login a user
"""
required_keys = ('username', 'password')
if not user_data or not isinstance(user_data, dict):
user_data = self.user_data
with self.app.app_context():
# check if all the required keys are present
if all(key in user_data for key in required_keys):
login_details = dict(username=user_data['username'],
password=user_data['password'])
response = self.client().post('/auth/login',
data=json.dumps(login_details))
return response
else:
raise ValueError('Invalid arguments for login_user')
def make_request(self, method, *args, **kwargs):
"""
Helper method to make requests based on the method arg
"""
with self.app.app_context():
if method == 'POST':
return self.client().post(*args, **kwargs)
elif method == 'GET':
return self.client().get(*args, **kwargs)
elif method == 'PUT':
return self.client().put(*args, **kwargs)
elif method == 'DELETE':
return self.client().delete(*args, **kwargs)
else:
raise ValueError('Arguments are invalid for make request')
def invalid_data_request(self, url=None,
method='POST', invalid_data=None, access_token=None):
"""
Helper method to test invalid data for POST and PUT
"""
allowed_methods = ('PUT', 'POST')
invalid_data_message = 'The data you sent was in the wrong structure'
with self.app.app_context():
if method in allowed_methods and (isinstance(invalid_data, dict)
or not invalid_data) and isinstance(url, str) and isinstance(access_token, str):
# sending invalid data, return message error and 400
on_attempt = self.make_request(method, url,
headers=dict(Authorization='Bearer ' + access_token),
data=json.dumps(invalid_data)
)
self.assertEqual(on_attempt.status_code, 400)
attempt_response = json.loads(on_attempt.data.decode())
self.assertEqual(attempt_response['message'],
invalid_data_message)
else:
# after all the elif's if nothing matches, raise an error
raise ValueError('The arguments are invalid for invalid data request')
def unauthorized_request(self, url=None, method='POST', data=None):
"""
Helper method to test for unauthorized/unauthenticated requests
for POST, PUT, GET, DELETE
"""
post_or_put = ('POST', 'PUT')
get_or_delete = ('GET', 'DELETE')
unauthorized_access_message = 'You do not have the appropriate permissions'
if method in post_or_put and isinstance(data, dict) and isinstance(url, str):
with self.app.app_context():
# sending data without appropriate auth, return message error and 403
on_unauthorized_request = self.make_request(method, url,
headers=dict(Authorization='Bearer ' + 'random string'),
data=json.dumps(data)
)
self.assertEqual(on_unauthorized_request.status_code, 403)
unauthorized_request_response = json.loads(
on_unauthorized_request.data.decode())
self.assertEqual(unauthorized_request_response['message'],
unauthorized_access_message)
elif method in get_or_delete and isinstance(url, str):
with self.app.app_context():
# this does not need data to be anything but None
# an error message is returned and a status code of 403
on_unauthorized_request = self.make_request(method, url,
headers=dict(Authorization='Bearer ' + 'random string'))
self.assertEqual(on_unauthorized_request.status_code, 403)
unauthorized_request_response = json.loads(
on_unauthorized_request.data.decode())
self.assertEqual(unauthorized_request_response['message'],
unauthorized_access_message)
else:
# after all the elif's if nothing matches, raise an error
raise ValueError('The arguments are invalid for unauthorized request')
def make_logged_out_request(self, url, access_token, method, data=None):
"""
Helper method to make logged out request
"""
if isinstance(url, str) and isinstance(access_token, str) and isinstance(method, str):
post_or_put = ('POST', 'PUT')
get_or_delete = ('DELETE', 'GET')
self.logout_user(access_token=access_token)
if method in post_or_put:
# requires data
if not data or not isinstance(data, dict):
raise ValueError('For POST or PUT the data in dict form \
should be provided for make_logged_out_request')
with self.app.app_context():
# make the request
logged_out_response = self.make_request(method, url,
headers=dict(Authorization='Bearer ' + access_token),
data=json.dumps(data)
)
elif method in get_or_delete:
with self.app.app_context():
# make the request
logged_out_response = self.make_request(method, url,
headers=dict(Authorization='Bearer ' + access_token))
else:
raise ValueError('method can only be POST, PUT,\
GET or DELETE in make_logged_out_request')
# the status code should be 401
self.assertEqual(logged_out_response.status_code, 401)
self.assertEqual(json.loads(logged_out_response.data.decode())['message'],
'You are already logged out')
else:
raise ValueError('Invalid arguments for make_logged_out_request')
def get_default_token(self):
"""
A helper method to register and login a user
to get access token
"""
with self.app.app_context():
self.register_user()
response = self.login_user()
try:
data_got = json.loads(response.data.decode())
return data_got['access_token']
except KeyError:
raise KeyError('"access_token" is not a key. This is the data %s' % data_got)
def tearDown(self):
"""
Do cleanup of test database
"""
with self.app.app_context():
db.session.remove
db.drop_all()
class ShoppingParentTestClass(BaseTestClass):
"""
The test class to be inherited from by tests for endpoints of
ShoppingList and ShoppingItem
"""
def create_shopping_list(self, access_token, shoppinglist_data=None):
"""
A helper method to create a shopping list
"""
if not shoppinglist_data:
shoppinglist_data = self.shoppinglist_data
with self.app.app_context():
# create ShoppingList via post
on_create = self.client().post('/shoppinglists/',
headers=dict(Authorization='Bearer ' + access_token),
data=json.dumps(shoppinglist_data)
)
response = json.loads(on_create.data.decode())
if 'id' in response.keys():
return json.loads(on_create.data.decode())['id'], on_create
else:
return on_create.status_code, response
def make_get_request(self, url, access_token):
"""
Helper method to make get requests
"""
if url and access_token:
with self.app.app_context():
response = self.client().get(url,
headers=dict(Authorization="Bearer " + access_token))
return response
else:
raise ValueError('Invalid arguments for make_get_request')
class BaseModelTestClass(unittest.TestCase):
"""
Parent class for all model test classes
"""
def setUp(self):
"""
Initialize the app db
"""
self.app = create_app(config_name='testing')
with self.app.app_context():
db.create_all()
self.user = User('John Doe', 'john@example.com',
'password', 'johndoe')
self.shopping_list = ShoppingList('Groceries',
'family daily grocery shopping list', owner=self.user)
self.shopping_item = ShoppingItem('fruit', 5, 'units',
parent_list=self.shopping_list)
# create tables in test db
def tearDown(self):
"""
Do cleanup of test database
"""
with self.app.app_context():
db.session.remove
db.drop_all()
|
|
from themer import ColorParser, check_file_regex
import re
class VimColorParser(ColorParser):
check = check_file_regex('\.vim$')
vimGroups = ['Normal', 'Visual', 'VertSplit', 'Identifier', 'Statement', 'PreProc', 'Type', 'Function', 'String', 'Conditional', 'Repeat', 'Label', 'Operator', 'Keyword', 'Exception', 'Character', 'Number', 'Boolean', 'Float', 'Comment', 'Special', 'Error', 'Todo']
vimGroups.reverse()
mapping = {
'bg': ['background', 'black', 'alt_black'],
'fg': ['foreground', 'white'],
'rest': ['red', 'alt_red', 'green', 'alt_green', 'yellow', 'alt_yellow', 'blue', 'alt_blue', 'magenta', 'alt_magenta', 'cyan', 'alt_cyan', 'alt_white']
}
def mapVimColorNames(self, name):
#TODO handle unknown names
for o in vimColorsMapping:
if o['name'] == name:
return o['hex']
def read(self):
normalBg, normalFg = '', ''
self.colors = []
output = {}
groupRegex = re.compile(r'hi\s(\w*)\s')
guibgRegex = re.compile(r'guibg\=(\S*)\s')
guifgRegex = re.compile(r'guifg\=(\S*)\s')
with open(self.data) as fh:
for line in fh:
guifg = guifgRegex.findall(line)
guibg = guibgRegex.findall(line)
group = groupRegex.findall(line)
if(group and (guifg or guibg)):
currentLine = {
"group": group[0] if group else ''
, "fg": guifg[0] if guifg else ''
, "bg": guibg[0] if guibg else ''
}
# normalize each line
currentLine.update((x, y.strip().lower()) for x, y in currentLine.items())
# store the basic colors ('normal' colors) without any special hi gourps
if(currentLine["group"] == "normal"):
normalFg = currentLine["fg"];
normalBg = currentLine["bg"];
self.colors.append(currentLine)
# convert vim color name to hex
for entry in self.colors:
if(entry['fg'].find('#')):
entry['fg'] = self.mapVimColorNames(entry['fg'])
if(entry['bg'].find('#')):
entry['bg'] = self.mapVimColorNames(entry['bg'])
# walk through self colors, check for vim groups of interest - use those for fg,bg, pop through the rest
for fgtype in self.mapping['fg']:
output.update({fgtype: normalFg})
for bgtype in self.mapping['bg']:
output.update({bgtype: normalBg})
for el in self.colors:
for group in self.vimGroups:
if(len(self.mapping['rest'])>0 and el['group']==group.lower()):
output.update({self.mapping['rest'].pop(): el['fg']})
return output
vimColorsMapping = [
{
"name":"black",
"hex":"#000000"
},
{
"name":"gray0",
"hex":"#000000"
},
{
"name":"grey0",
"hex":"#000000"
},
{
"name":"gray1",
"hex":"#030303"
},
{
"name":"grey1",
"hex":"#030303"
},
{
"name":"gray2",
"hex":"#050505"
},
{
"name":"grey2",
"hex":"#050505"
},
{
"name":"gray3",
"hex":"#080808"
},
{
"name":"grey3",
"hex":"#080808"
},
{
"name":"navyblue",
"hex":"#000080"
},
{
"name":"navy blue",
"hex":"#000080"
},
{
"name":"navy",
"hex":"#000080"
},
{
"name":"gray4",
"hex":"#0a0a0a"
},
{
"name":"grey4",
"hex":"#0a0a0a"
},
{
"name":"darkblue",
"hex":"#00008b"
},
{
"name":"blue4",
"hex":"#00008b"
},
{
"name":"dark blue",
"hex":"#00008b"
},
{
"name":"gray5",
"hex":"#0d0d0d"
},
{
"name":"grey5",
"hex":"#0d0d0d"
},
{
"name":"mediumblue",
"hex":"#0000cd"
},
{
"name":"blue3",
"hex":"#0000cd"
},
{
"name":"medium blue",
"hex":"#0000cd"
},
{
"name":"gray6",
"hex":"#0f0f0f"
},
{
"name":"grey6",
"hex":"#0f0f0f"
},
{
"name":"blue2",
"hex":"#0000ee"
},
{
"name":"gray7",
"hex":"#121212"
},
{
"name":"grey7",
"hex":"#121212"
},
{
"name":"blue1",
"hex":"#0000ff"
},
{
"name":"blue",
"hex":"#0000ff"
},
{
"name":"gray8",
"hex":"#141414"
},
{
"name":"grey8",
"hex":"#141414"
},
{
"name":"gray9",
"hex":"#171717"
},
{
"name":"grey9",
"hex":"#171717"
},
{
"name":"gray10",
"hex":"#1a1a1a"
},
{
"name":"grey10",
"hex":"#1a1a1a"
},
{
"name":"gray11",
"hex":"#1c1c1c"
},
{
"name":"grey11",
"hex":"#1c1c1c"
},
{
"name":"darkred",
"hex":"#8b0000"
},
{
"name":"dark red",
"hex":"#8b0000"
},
{
"name":"red4",
"hex":"#8b0000"
},
{
"name":"gray12",
"hex":"#1f1f1f"
},
{
"name":"grey12",
"hex":"#1f1f1f"
},
{
"name":"midnightblue",
"hex":"#191970"
},
{
"name":"midnight blue",
"hex":"#191970"
},
{
"name":"gray13",
"hex":"#212121"
},
{
"name":"grey13",
"hex":"#212121"
},
{
"name":"gray14",
"hex":"#242424"
},
{
"name":"grey14",
"hex":"#242424"
},
{
"name":"gray15",
"hex":"#262626"
},
{
"name":"grey15",
"hex":"#262626"
},
{
"name":"darkmagenta",
"hex":"#8b008b"
},
{
"name":"dark magenta",
"hex":"#8b008b"
},
{
"name":"magenta4",
"hex":"#8b008b"
},
{
"name":"gray16",
"hex":"#292929"
},
{
"name":"grey16",
"hex":"#292929"
},
{
"name":"deeppink4",
"hex":"#8b0a50"
},
{
"name":"gray17",
"hex":"#2b2b2b"
},
{
"name":"grey17",
"hex":"#2b2b2b"
},
{
"name":"red3",
"hex":"#cd0000"
},
{
"name":"gray18",
"hex":"#2e2e2e"
},
{
"name":"grey18",
"hex":"#2e2e2e"
},
{
"name":"darkviolet",
"hex":"#9400d3"
},
{
"name":"dark violet",
"hex":"#9400d3"
},
{
"name":"purple4",
"hex":"#551a8b"
},
{
"name":"gray19",
"hex":"#303030"
},
{
"name":"grey19",
"hex":"#303030"
},
{
"name":"firebrick4",
"hex":"#8b1a1a"
},
{
"name":"red2",
"hex":"#ee0000"
},
{
"name":"gray20",
"hex":"#333333"
},
{
"name":"grey20",
"hex":"#333333"
},
{
"name":"gray21",
"hex":"#363636"
},
{
"name":"grey21",
"hex":"#363636"
},
{
"name":"red1",
"hex":"#ff0000"
},
{
"name":"red",
"hex":"#ff0000"
},
{
"name":"gray22",
"hex":"#383838"
},
{
"name":"grey22",
"hex":"#383838"
},
{
"name":"orangered4",
"hex":"#8b2500"
},
{
"name":"darkorchid4",
"hex":"#68228b"
},
{
"name":"maroon4",
"hex":"#8b1c62"
},
{
"name":"brown4",
"hex":"#8b2323"
},
{
"name":"magenta3",
"hex":"#cd00cd"
},
{
"name":"gray23",
"hex":"#3b3b3b"
},
{
"name":"grey23",
"hex":"#3b3b3b"
},
{
"name":"violetred4",
"hex":"#8b2252"
},
{
"name":"gray24",
"hex":"#3d3d3d"
},
{
"name":"grey24",
"hex":"#3d3d3d"
},
{
"name":"deeppink3",
"hex":"#cd1076"
},
{
"name":"gray25",
"hex":"#404040"
},
{
"name":"grey25",
"hex":"#404040"
},
{
"name":"royalblue4",
"hex":"#27408b"
},
{
"name":"firebrick",
"hex":"#b22222"
},
{
"name":"gray26",
"hex":"#424242"
},
{
"name":"grey26",
"hex":"#424242"
},
{
"name":"mediumvioletred",
"hex":"#c71585"
},
{
"name":"medium violet red",
"hex":"#c71585"
},
{
"name":"magenta2",
"hex":"#ee00ee"
},
{
"name":"slateblue4",
"hex":"#473c8b"
},
{
"name":"brown",
"hex":"#a52a2a"
},
{
"name":"purple3",
"hex":"#7d26cd"
},
{
"name":"darkslateblue",
"hex":"#483d8b"
},
{
"name":"dark slate blue",
"hex":"#483d8b"
},
{
"name":"gray27",
"hex":"#454545"
},
{
"name":"grey27",
"hex":"#454545"
},
{
"name":"dodgerblue4",
"hex":"#104e8b"
},
{
"name":"tomato4",
"hex":"#8b3626"
},
{
"name":"gray28",
"hex":"#474747"
},
{
"name":"grey28",
"hex":"#474747"
},
{
"name":"darkgreen",
"hex":"#006400"
},
{
"name":"dark green",
"hex":"#006400"
},
{
"name":"darkslategray",
"hex":"#2f4f4f"
},
{
"name":"darkslategrey",
"hex":"#2f4f4f"
},
{
"name":"dark slate gray",
"hex":"#2f4f4f"
},
{
"name":"dark slate grey",
"hex":"#2f4f4f"
},
{
"name":"magenta1",
"hex":"#ff00ff"
},
{
"name":"magenta",
"hex":"#ff00ff"
},
{
"name":"deeppink2",
"hex":"#ee1289"
},
{
"name":"firebrick3",
"hex":"#cd2626"
},
{
"name":"gray29",
"hex":"#4a4a4a"
},
{
"name":"grey29",
"hex":"#4a4a4a"
},
{
"name":"purple",
"hex":"#a020f0"
},
{
"name":"indianred4",
"hex":"#8b3a3a"
},
{
"name":"mediumorchid4",
"hex":"#7a378b"
},
{
"name":"blueviolet",
"hex":"#8a2be2"
},
{
"name":"blue violet",
"hex":"#8a2be2"
},
{
"name":"gray30",
"hex":"#4d4d4d"
},
{
"name":"grey30",
"hex":"#4d4d4d"
},
{
"name":"coral4",
"hex":"#8b3e2f"
},
{
"name":"violetred",
"hex":"#d02090"
},
{
"name":"violet red",
"hex":"#d02090"
},
{
"name":"hotpink4",
"hex":"#8b3a62"
},
{
"name":"maroon",
"hex":"#b03060"
},
{
"name":"darkorange4",
"hex":"#8b4500"
},
{
"name":"gray31",
"hex":"#4f4f4f"
},
{
"name":"grey31",
"hex":"#4f4f4f"
},
{
"name":"deeppink1",
"hex":"#ff1493"
},
{
"name":"deeppink",
"hex":"#ff1493"
},
{
"name":"deep pink",
"hex":"#ff1493"
},
{
"name":"purple2",
"hex":"#912cee"
},
{
"name":"saddlebrown",
"hex":"#8b4513"
},
{
"name":"chocolate4",
"hex":"#8b4513"
},
{
"name":"saddle brown",
"hex":"#8b4513"
},
{
"name":"mediumpurple4",
"hex":"#5d478b"
},
{
"name":"gray32",
"hex":"#525252"
},
{
"name":"grey32",
"hex":"#525252"
},
{
"name":"orangered3",
"hex":"#cd3700"
},
{
"name":"darkorchid",
"hex":"#9932cc"
},
{
"name":"dark orchid",
"hex":"#9932cc"
},
{
"name":"sienna4",
"hex":"#8b4726"
},
{
"name":"darkorchid3",
"hex":"#9a32cd"
},
{
"name":"maroon3",
"hex":"#cd2990"
},
{
"name":"brown3",
"hex":"#cd3333"
},
{
"name":"gray33",
"hex":"#545454"
},
{
"name":"grey33",
"hex":"#545454"
},
{
"name":"deepskyblue4",
"hex":"#00688b"
},
{
"name":"firebrick2",
"hex":"#ee2c2c"
},
{
"name":"purple1",
"hex":"#9b30ff"
},
{
"name":"gray34",
"hex":"#575757"
},
{
"name":"grey34",
"hex":"#575757"
},
{
"name":"palevioletred4",
"hex":"#8b475d"
},
{
"name":"violetred3",
"hex":"#cd3278"
},
{
"name":"salmon4",
"hex":"#8b4c39"
},
{
"name":"gray35",
"hex":"#595959"
},
{
"name":"grey35",
"hex":"#595959"
},
{
"name":"orchid4",
"hex":"#8b4789"
},
{
"name":"gray36",
"hex":"#5c5c5c"
},
{
"name":"grey36",
"hex":"#5c5c5c"
},
{
"name":"firebrick1",
"hex":"#ff3030"
},
{
"name":"steelblue4",
"hex":"#36648b"
},
{
"name":"orange4",
"hex":"#8b5a00"
},
{
"name":"gray37",
"hex":"#5e5e5e"
},
{
"name":"grey37",
"hex":"#5e5e5e"
},
{
"name":"royalblue3",
"hex":"#3a5fcd"
},
{
"name":"sienna",
"hex":"#a0522d"
},
{
"name":"orangered2",
"hex":"#ee4000"
},
{
"name":"darkorchid2",
"hex":"#b23aee"
},
{
"name":"lightsalmon4",
"hex":"#8b5742"
},
{
"name":"maroon2",
"hex":"#ee30a7"
},
{
"name":"gray38",
"hex":"#616161"
},
{
"name":"grey38",
"hex":"#616161"
},
{
"name":"tan4",
"hex":"#8b5a2b"
},
{
"name":"brown2",
"hex":"#ee3b3b"
},
{
"name":"darkolivegreen",
"hex":"#556b2f"
},
{
"name":"dark olive green",
"hex":"#556b2f"
},
{
"name":"gray39",
"hex":"#636363"
},
{
"name":"grey39",
"hex":"#636363"
},
{
"name":"green4",
"hex":"#008b00"
},
{
"name":"slateblue3",
"hex":"#6959cd"
},
{
"name":"slateblue",
"hex":"#6a5acd"
},
{
"name":"slate blue",
"hex":"#6a5acd"
},
{
"name":"gray40",
"hex":"#666666"
},
{
"name":"grey40",
"hex":"#666666"
},
{
"name":"violetred2",
"hex":"#ee3a8c"
},
{
"name":"darkgoldenrod4",
"hex":"#8b6508"
},
{
"name":"dodgerblue3",
"hex":"#1874cd"
},
{
"name":"darkorchid1",
"hex":"#bf3eff"
},
{
"name":"orangered1",
"hex":"#ff4500"
},
{
"name":"orangered",
"hex":"#ff4500"
},
{
"name":"orange red",
"hex":"#ff4500"
},
{
"name":"tomato3",
"hex":"#cd4f39"
},
{
"name":"maroon1",
"hex":"#ff34b3"
},
{
"name":"springgreen4",
"hex":"#008b45"
},
{
"name":"brown1",
"hex":"#ff4040"
},
{
"name":"lightpink4",
"hex":"#8b5f65"
},
{
"name":"dimgray",
"hex":"#696969"
},
{
"name":"dimgrey",
"hex":"#696969"
},
{
"name":"dim gray",
"hex":"#696969"
},
{
"name":"dim grey",
"hex":"#696969"
},
{
"name":"gray41",
"hex":"#696969"
},
{
"name":"grey41",
"hex":"#696969"
},
{
"name":"royalblue",
"hex":"#4169e1"
},
{
"name":"royal blue",
"hex":"#4169e1"
},
{
"name":"skyblue4",
"hex":"#4a708b"
},
{
"name":"turquoise4",
"hex":"#00868b"
},
{
"name":"goldenrod4",
"hex":"#8b6914"
},
{
"name":"gray42",
"hex":"#6b6b6b"
},
{
"name":"grey42",
"hex":"#6b6b6b"
},
{
"name":"pink4",
"hex":"#8b636c"
},
{
"name":"forestgreen",
"hex":"#228b22"
},
{
"name":"forest green",
"hex":"#228b22"
},
{
"name":"violetred1",
"hex":"#ff3e96"
},
{
"name":"darkcyan",
"hex":"#008b8b"
},
{
"name":"cyan4",
"hex":"#008b8b"
},
{
"name":"dark cyan",
"hex":"#008b8b"
},
{
"name":"gray43",
"hex":"#6e6e6e"
},
{
"name":"grey43",
"hex":"#6e6e6e"
},
{
"name":"royalblue2",
"hex":"#436eee"
},
{
"name":"indianred3",
"hex":"#cd5555"
},
{
"name":"mediumorchid3",
"hex":"#b452cd"
},
{
"name":"gray44",
"hex":"#707070"
},
{
"name":"grey44",
"hex":"#707070"
},
{
"name":"rosybrown4",
"hex":"#8b6969"
},
{
"name":"plum4",
"hex":"#8b668b"
},
{
"name":"gold4",
"hex":"#8b7500"
},
{
"name":"coral3",
"hex":"#cd5b45"
},
{
"name":"chartreuse4",
"hex":"#458b00"
},
{
"name":"gray45",
"hex":"#737373"
},
{
"name":"grey45",
"hex":"#737373"
},
{
"name":"seagreen4",
"hex":"#2e8b57"
},
{
"name":"seagreen",
"hex":"#2e8b57"
},
{
"name":"sea green",
"hex":"#2e8b57"
},
{
"name":"mediumorchid",
"hex":"#ba55d3"
},
{
"name":"medium orchid",
"hex":"#ba55d3"
},
{
"name":"indianred",
"hex":"#cd5c5c"
},
{
"name":"indian red",
"hex":"#cd5c5c"
},
{
"name":"darkorange3",
"hex":"#cd6600"
},
{
"name":"slateblue2",
"hex":"#7a67ee"
},
{
"name":"gray46",
"hex":"#757575"
},
{
"name":"grey46",
"hex":"#757575"
},
{
"name":"mediumslateblue",
"hex":"#7b68ee"
},
{
"name":"medium slate blue",
"hex":"#7b68ee"
},
{
"name":"burlywood4",
"hex":"#8b7355"
},
{
"name":"royalblue1",
"hex":"#4876ff"
},
{
"name":"mediumpurple3",
"hex":"#8968cd"
},
{
"name":"lightskyblue4",
"hex":"#607b8b"
},
{
"name":"chocolate3",
"hex":"#cd661d"
},
{
"name":"dodgerblue2",
"hex":"#1c86ee"
},
{
"name":"gray47",
"hex":"#787878"
},
{
"name":"grey47",
"hex":"#787878"
},
{
"name":"steelblue",
"hex":"#4682b4"
},
{
"name":"steel blue",
"hex":"#4682b4"
},
{
"name":"slategray4",
"hex":"#6c7b8b"
},
{
"name":"tomato2",
"hex":"#ee5c42"
},
{
"name":"lightsteelblue4",
"hex":"#6e7b8b"
},
{
"name":"chocolate",
"hex":"#d2691e"
},
{
"name":"peachpuff4",
"hex":"#8b7765"
},
{
"name":"gray48",
"hex":"#7a7a7a"
},
{
"name":"grey48",
"hex":"#7a7a7a"
},
{
"name":"sienna3",
"hex":"#cd6839"
},
{
"name":"aquamarine4",
"hex":"#458b74"
},
{
"name":"hotpink3",
"hex":"#cd6090"
},
{
"name":"navajowhite4",
"hex":"#8b795e"
},
{
"name":"palegreen4",
"hex":"#548b54"
},
{
"name":"cadetblue4",
"hex":"#53868b"
},
{
"name":"olivedrab4",
"hex":"#698b22"
},
{
"name":"deepskyblue3",
"hex":"#009acd"
},
{
"name":"gray49",
"hex":"#7d7d7d"
},
{
"name":"grey49",
"hex":"#7d7d7d"
},
{
"name":"slateblue1",
"hex":"#836fff"
},
{
"name":"slategray",
"hex":"#708090"
},
{
"name":"slategrey",
"hex":"#708090"
},
{
"name":"slate gray",
"hex":"#708090"
},
{
"name":"slate grey",
"hex":"#708090"
},
{
"name":"lightblue4",
"hex":"#68838b"
},
{
"name":"lightslateblue",
"hex":"#8470ff"
},
{
"name":"light slate blue",
"hex":"#8470ff"
},
{
"name":"bisque4",
"hex":"#8b7d6b"
},
{
"name":"olivedrab",
"hex":"#6b8e23"
},
{
"name":"olive drab",
"hex":"#6b8e23"
},
{
"name":"darkslategray4",
"hex":"#528b8b"
},
{
"name":"gray50",
"hex":"#7f7f7f"
},
{
"name":"grey50",
"hex":"#7f7f7f"
},
{
"name":"wheat4",
"hex":"#8b7e66"
},
{
"name":"mediumpurple",
"hex":"#9370db"
},
{
"name":"medium purple",
"hex":"#9370db"
},
{
"name":"darkolivegreen4",
"hex":"#6e8b3d"
},
{
"name":"lightgoldenrod4",
"hex":"#8b814c"
},
{
"name":"thistle4",
"hex":"#8b7b8b"
},
{
"name":"dodgerblue1",
"hex":"#1e90ff"
},
{
"name":"dodgerblue",
"hex":"#1e90ff"
},
{
"name":"dodger blue",
"hex":"#1e90ff"
},
{
"name":"mistyrose4",
"hex":"#8b7d7b"
},
{
"name":"palevioletred3",
"hex":"#cd6889"
},
{
"name":"indianred2",
"hex":"#ee6363"
},
{
"name":"yellow4",
"hex":"#8b8b00"
},
{
"name":"darkseagreen4",
"hex":"#698b69"
},
{
"name":"mediumorchid2",
"hex":"#d15fee"
},
{
"name":"salmon3",
"hex":"#cd7054"
},
{
"name":"gray51",
"hex":"#828282"
},
{
"name":"grey51",
"hex":"#828282"
},
{
"name":"tomato1",
"hex":"#ff6347"
},
{
"name":"tomato",
"hex":"#ff6347"
},
{
"name":"khaki4",
"hex":"#8b864e"
},
{
"name":"paleturquoise4",
"hex":"#668b8b"
},
{
"name":"antiquewhite4",
"hex":"#8b8378"
},
{
"name":"coral2",
"hex":"#ee6a50"
},
{
"name":"lavenderblush4",
"hex":"#8b8386"
},
{
"name":"gray52",
"hex":"#858585"
},
{
"name":"grey52",
"hex":"#858585"
},
{
"name":"orchid3",
"hex":"#cd69c9"
},
{
"name":"lightslategray",
"hex":"#778899"
},
{
"name":"lightslategrey",
"hex":"#778899"
},
{
"name":"light slate gray",
"hex":"#778899"
},
{
"name":"light slate grey",
"hex":"#778899"
},
{
"name":"seashell4",
"hex":"#8b8682"
},
{
"name":"darkorange2",
"hex":"#ee7600"
},
{
"name":"gray53",
"hex":"#878787"
},
{
"name":"grey53",
"hex":"#878787"
},
{
"name":"lightcyan4",
"hex":"#7a8b8b"
},
{
"name":"cornsilk4",
"hex":"#8b8878"
},
{
"name":"lemonchiffon4",
"hex":"#8b8970"
},
{
"name":"darkgoldenrod",
"hex":"#b8860b"
},
{
"name":"dark goldenrod",
"hex":"#b8860b"
},
{
"name":"honeydew4",
"hex":"#838b83"
},
{
"name":"palevioletred",
"hex":"#db7093"
},
{
"name":"pale violet red",
"hex":"#db7093"
},
{
"name":"azure4",
"hex":"#838b8b"
},
{
"name":"chocolate2",
"hex":"#ee7621"
},
{
"name":"snow4",
"hex":"#8b8989"
},
{
"name":"steelblue3",
"hex":"#4f94cd"
},
{
"name":"mediumpurple2",
"hex":"#9f79ee"
},
{
"name":"indianred1",
"hex":"#ff6a6a"
},
{
"name":"lightyellow4",
"hex":"#8b8b7a"
},
{
"name":"gray54",
"hex":"#8a8a8a"
},
{
"name":"grey54",
"hex":"#8a8a8a"
},
{
"name":"ivory4",
"hex":"#8b8b83"
},
{
"name":"hotpink2",
"hex":"#ee6aa7"
},
{
"name":"orange3",
"hex":"#cd8500"
},
{
"name":"mediumorchid1",
"hex":"#e066ff"
},
{
"name":"gray55",
"hex":"#8c8c8c"
},
{
"name":"grey55",
"hex":"#8c8c8c"
},
{
"name":"orchid",
"hex":"#da70d6"
},
{
"name":"sienna2",
"hex":"#ee7942"
},
{
"name":"coral1",
"hex":"#ff7256"
},
{
"name":"hotpink",
"hex":"#ff69b4"
},
{
"name":"hot pink",
"hex":"#ff69b4"
},
{
"name":"lightsalmon3",
"hex":"#cd8162"
},
{
"name":"gray56",
"hex":"#8f8f8f"
},
{
"name":"grey56",
"hex":"#8f8f8f"
},
{
"name":"peru",
"hex":"#cd853f"
},
{
"name":"tan3",
"hex":"#cd853f"
},
{
"name":"deepskyblue2",
"hex":"#00b2ee"
},
{
"name":"cadetblue",
"hex":"#5f9ea0"
},
{
"name":"cadet blue",
"hex":"#5f9ea0"
},
{
"name":"cornflowerblue",
"hex":"#6495ed"
},
{
"name":"cornflower blue",
"hex":"#6495ed"
},
{
"name":"gray57",
"hex":"#919191"
},
{
"name":"grey57",
"hex":"#919191"
},
{
"name":"darkorange1",
"hex":"#ff7f00"
},
{
"name":"hotpink1",
"hex":"#ff6eb4"
},
{
"name":"lightseagreen",
"hex":"#20b2aa"
},
{
"name":"light sea green",
"hex":"#20b2aa"
},
{
"name":"green3",
"hex":"#00cd00"
},
{
"name":"chocolate1",
"hex":"#ff7f24"
},
{
"name":"mediumpurple1",
"hex":"#ab82ff"
},
{
"name":"gray58",
"hex":"#949494"
},
{
"name":"grey58",
"hex":"#949494"
},
{
"name":"palevioletred2",
"hex":"#ee799f"
},
{
"name":"mediumseagreen",
"hex":"#3cb371"
},
{
"name":"medium sea green",
"hex":"#3cb371"
},
{
"name":"gray59",
"hex":"#969696"
},
{
"name":"grey59",
"hex":"#969696"
},
{
"name":"salmon2",
"hex":"#ee8262"
},
{
"name":"coral",
"hex":"#ff7f50"
},
{
"name":"darkgoldenrod3",
"hex":"#cd950c"
},
{
"name":"lightcoral",
"hex":"#f08080"
},
{
"name":"light coral",
"hex":"#f08080"
},
{
"name":"sienna1",
"hex":"#ff8247"
},
{
"name":"rosybrown",
"hex":"#bc8f8f"
},
{
"name":"rosy brown",
"hex":"#bc8f8f"
},
{
"name":"salmon",
"hex":"#fa8072"
},
{
"name":"gray60",
"hex":"#999999"
},
{
"name":"grey60",
"hex":"#999999"
},
{
"name":"springgreen3",
"hex":"#00cd66"
},
{
"name":"darkorange",
"hex":"#ff8c00"
},
{
"name":"dark orange",
"hex":"#ff8c00"
},
{
"name":"lightpink3",
"hex":"#cd8c95"
},
{
"name":"orchid2",
"hex":"#ee7ae9"
},
{
"name":"deepskyblue1",
"hex":"#00bfff"
},
{
"name":"deepskyblue",
"hex":"#00bfff"
},
{
"name":"deep sky blue",
"hex":"#00bfff"
},
{
"name":"turquoise3",
"hex":"#00c5cd"
},
{
"name":"gray61",
"hex":"#9c9c9c"
},
{
"name":"grey61",
"hex":"#9c9c9c"
},
{
"name":"skyblue3",
"hex":"#6ca6cd"
},
{
"name":"goldenrod3",
"hex":"#cd9b1d"
},
{
"name":"gray62",
"hex":"#9e9e9e"
},
{
"name":"grey62",
"hex":"#9e9e9e"
},
{
"name":"pink3",
"hex":"#cd919e"
},
{
"name":"palevioletred1",
"hex":"#ff82ab"
},
{
"name":"steelblue2",
"hex":"#5cacee"
},
{
"name":"orange2",
"hex":"#ee9a00"
},
{
"name":"violet",
"hex":"#ee82ee"
},
{
"name":"limegreen",
"hex":"#32cd32"
},
{
"name":"lime green",
"hex":"#32cd32"
},
{
"name":"gray63",
"hex":"#a1a1a1"
},
{
"name":"grey63",
"hex":"#a1a1a1"
},
{
"name":"cyan3",
"hex":"#00cdcd"
},
{
"name":"salmon1",
"hex":"#ff8c69"
},
{
"name":"darkturquoise",
"hex":"#00ced1"
},
{
"name":"dark turquoise",
"hex":"#00ced1"
},
{
"name":"gray64",
"hex":"#a3a3a3"
},
{
"name":"grey64",
"hex":"#a3a3a3"
},
{
"name":"lightsalmon2",
"hex":"#ee9572"
},
{
"name":"darksalmon",
"hex":"#e9967a"
},
{
"name":"dark salmon",
"hex":"#e9967a"
},
{
"name":"rosybrown3",
"hex":"#cd9b9b"
},
{
"name":"plum3",
"hex":"#cd96cd"
},
{
"name":"orchid1",
"hex":"#ff83fa"
},
{
"name":"gray65",
"hex":"#a6a6a6"
},
{
"name":"grey65",
"hex":"#a6a6a6"
},
{
"name":"tan2",
"hex":"#ee9a49"
},
{
"name":"goldenrod",
"hex":"#daa520"
},
{
"name":"gold3",
"hex":"#cdad00"
},
{
"name":"gray66",
"hex":"#a8a8a8"
},
{
"name":"grey66",
"hex":"#a8a8a8"
},
{
"name":"chartreuse3",
"hex":"#66cd00"
},
{
"name":"darkgray",
"hex":"#a9a9a9"
},
{
"name":"darkgrey",
"hex":"#a9a9a9"
},
{
"name":"dark gray",
"hex":"#a9a9a9"
},
{
"name":"dark grey",
"hex":"#a9a9a9"
},
{
"name":"seagreen3",
"hex":"#43cd80"
},
{
"name":"green2",
"hex":"#00ee00"
},
{
"name":"gray67",
"hex":"#ababab"
},
{
"name":"grey67",
"hex":"#ababab"
},
{
"name":"steelblue1",
"hex":"#63b8ff"
},
{
"name":"orange1",
"hex":"#ffa500"
},
{
"name":"orange",
"hex":"#ffa500"
},
{
"name":"gray68",
"hex":"#adadad"
},
{
"name":"grey68",
"hex":"#adadad"
},
{
"name":"burlywood3",
"hex":"#cdaa7d"
},
{
"name":"lightskyblue3",
"hex":"#8db6cd"
},
{
"name":"darkseagreen",
"hex":"#8fbc8f"
},
{
"name":"dark sea green",
"hex":"#8fbc8f"
},
{
"name":"darkgoldenrod2",
"hex":"#eead0e"
},
{
"name":"gray69",
"hex":"#b0b0b0"
},
{
"name":"grey69",
"hex":"#b0b0b0"
},
{
"name":"sandybrown",
"hex":"#f4a460"
},
{
"name":"sandy brown",
"hex":"#f4a460"
},
{
"name":"plum",
"hex":"#dda0dd"
},
{
"name":"lightsalmon1",
"hex":"#ffa07a"
},
{
"name":"lightsalmon",
"hex":"#ffa07a"
},
{
"name":"light salmon",
"hex":"#ffa07a"
},
{
"name":"tan1",
"hex":"#ffa54f"
},
{
"name":"lightsteelblue3",
"hex":"#a2b5cd"
},
{
"name":"springgreen2",
"hex":"#00ee76"
},
{
"name":"slategray3",
"hex":"#9fb6cd"
},
{
"name":"darkkhaki",
"hex":"#bdb76b"
},
{
"name":"dark khaki",
"hex":"#bdb76b"
},
{
"name":"lightpink2",
"hex":"#eea2ad"
},
{
"name":"gray70",
"hex":"#b3b3b3"
},
{
"name":"grey70",
"hex":"#b3b3b3"
},
{
"name":"peachpuff3",
"hex":"#cdaf95"
},
{
"name":"mediumturquoise",
"hex":"#48d1cc"
},
{
"name":"medium turquoise",
"hex":"#48d1cc"
},
{
"name":"mediumaquamarine",
"hex":"#66cdaa"
},
{
"name":"aquamarine3",
"hex":"#66cdaa"
},
{
"name":"medium aquamarine",
"hex":"#66cdaa"
},
{
"name":"turquoise2",
"hex":"#00e5ee"
},
{
"name":"gray71",
"hex":"#b5b5b5"
},
{
"name":"grey71",
"hex":"#b5b5b5"
},
{
"name":"skyblue2",
"hex":"#7ec0ee"
},
{
"name":"cadetblue3",
"hex":"#7ac5cd"
},
{
"name":"navajowhite3",
"hex":"#cdb38b"
},
{
"name":"goldenrod2",
"hex":"#eeb422"
},
{
"name":"palegreen3",
"hex":"#7ccd7c"
},
{
"name":"green1",
"hex":"#00ff00"
},
{
"name":"green",
"hex":"#00ff00"
},
{
"name":"olivedrab3",
"hex":"#9acd32"
},
{
"name":"yellowgreen",
"hex":"#9acd32"
},
{
"name":"yellow green",
"hex":"#9acd32"
},
{
"name":"tan",
"hex":"#d2b48c"
},
{
"name":"gray72",
"hex":"#b8b8b8"
},
{
"name":"grey72",
"hex":"#b8b8b8"
},
{
"name":"pink2",
"hex":"#eea9b8"
},
{
"name":"lightblue3",
"hex":"#9ac0cd"
},
{
"name":"bisque3",
"hex":"#cdb79e"
},
{
"name":"gray73",
"hex":"#bababa"
},
{
"name":"grey73",
"hex":"#bababa"
},
{
"name":"darkslategray3",
"hex":"#79cdcd"
},
{
"name":"cyan2",
"hex":"#00eeee"
},
{
"name":"wheat3",
"hex":"#cdba96"
},
{
"name":"mistyrose3",
"hex":"#cdb7b5"
},
{
"name":"darkolivegreen3",
"hex":"#a2cd5a"
},
{
"name":"lightgoldenrod3",
"hex":"#cdbe70"
},
{
"name":"darkgoldenrod1",
"hex":"#ffb90f"
},
{
"name":"thistle3",
"hex":"#cdb5cd"
},
{
"name":"burlywood",
"hex":"#deb887"
},
{
"name":"turquoise",
"hex":"#40e0d0"
},
{
"name":"gray74",
"hex":"#bdbdbd"
},
{
"name":"grey74",
"hex":"#bdbdbd"
},
{
"name":"mediumspringgreen",
"hex":"#00fa9a"
},
{
"name":"medium spring green",
"hex":"#00fa9a"
},
{
"name":"gray",
"hex":"#bebebe"
},
{
"name":"grey",
"hex":"#bebebe"
},
{
"name":"yellow3",
"hex":"#cdcd00"
},
{
"name":"darkseagreen3",
"hex":"#9bcd9b"
},
{
"name":"gray75",
"hex":"#bfbfbf"
},
{
"name":"grey75",
"hex":"#bfbfbf"
},
{
"name":"springgreen1",
"hex":"#00ff7f"
},
{
"name":"springgreen",
"hex":"#00ff7f"
},
{
"name":"spring green",
"hex":"#00ff7f"
},
{
"name":"lightpink1",
"hex":"#ffaeb9"
},
{
"name":"plum2",
"hex":"#eeaeee"
},
{
"name":"rosybrown2",
"hex":"#eeb4b4"
},
{
"name":"skyblue",
"hex":"#87ceeb"
},
{
"name":"sky blue",
"hex":"#87ceeb"
},
{
"name":"paleturquoise3",
"hex":"#96cdcd"
},
{
"name":"khaki3",
"hex":"#cdc673"
},
{
"name":"antiquewhite3",
"hex":"#cdc0b0"
},
{
"name":"lightsteelblue",
"hex":"#b0c4de"
},
{
"name":"light steel blue",
"hex":"#b0c4de"
},
{
"name":"turquoise1",
"hex":"#00f5ff"
},
{
"name":"gray76",
"hex":"#c2c2c2"
},
{
"name":"grey76",
"hex":"#c2c2c2"
},
{
"name":"lightskyblue",
"hex":"#87cefa"
},
{
"name":"light sky blue",
"hex":"#87cefa"
},
{
"name":"gold2",
"hex":"#eec900"
},
{
"name":"skyblue1",
"hex":"#87ceff"
},
{
"name":"goldenrod1",
"hex":"#ffc125"
},
{
"name":"chartreuse2",
"hex":"#76ee00"
},
{
"name":"lavenderblush3",
"hex":"#cdc1c5"
},
{
"name":"gray77",
"hex":"#c4c4c4"
},
{
"name":"grey77",
"hex":"#c4c4c4"
},
{
"name":"seagreen2",
"hex":"#4eee94"
},
{
"name":"pink1",
"hex":"#ffb5c5"
},
{
"name":"thistle",
"hex":"#d8bfd8"
},
{
"name":"seashell3",
"hex":"#cdc5bf"
},
{
"name":"lightpink",
"hex":"#ffb6c1"
},
{
"name":"light pink",
"hex":"#ffb6c1"
},
{
"name":"gray78",
"hex":"#c7c7c7"
},
{
"name":"grey78",
"hex":"#c7c7c7"
},
{
"name":"lemonchiffon3",
"hex":"#cdc9a5"
},
{
"name":"cornsilk3",
"hex":"#cdc8b1"
},
{
"name":"lightcyan3",
"hex":"#b4cdcd"
},
{
"name":"cyan1",
"hex":"#00ffff"
},
{
"name":"cyan",
"hex":"#00ffff"
},
{
"name":"gray79",
"hex":"#c9c9c9"
},
{
"name":"grey79",
"hex":"#c9c9c9"
},
{
"name":"honeydew3",
"hex":"#c1cdc1"
},
{
"name":"snow3",
"hex":"#cdc9c9"
},
{
"name":"burlywood2",
"hex":"#eec591"
},
{
"name":"azure3",
"hex":"#c1cdcd"
},
{
"name":"lightskyblue2",
"hex":"#a4d3ee"
},
{
"name":"lightyellow3",
"hex":"#cdcdb4"
},
{
"name":"gray80",
"hex":"#cccccc"
},
{
"name":"grey80",
"hex":"#cccccc"
},
{
"name":"ivory3",
"hex":"#cdcdc1"
},
{
"name":"rosybrown1",
"hex":"#ffc1c1"
},
{
"name":"pink",
"hex":"#ffc0cb"
},
{
"name":"plum1",
"hex":"#ffbbff"
},
{
"name":"lawngreen",
"hex":"#7cfc00"
},
{
"name":"lawn green",
"hex":"#7cfc00"
},
{
"name":"gray81",
"hex":"#cfcfcf"
},
{
"name":"grey81",
"hex":"#cfcfcf"
},
{
"name":"lightsteelblue2",
"hex":"#bcd2ee"
},
{
"name":"slategray2",
"hex":"#b9d3ee"
},
{
"name":"lightblue",
"hex":"#add8e6"
},
{
"name":"light blue",
"hex":"#add8e6"
},
{
"name":"gold1",
"hex":"#ffd700"
},
{
"name":"gold",
"hex":"#ffd700"
},
{
"name":"peachpuff2",
"hex":"#eecbad"
},
{
"name":"gray82",
"hex":"#d1d1d1"
},
{
"name":"grey82",
"hex":"#d1d1d1"
},
{
"name":"chartreuse1",
"hex":"#7fff00"
},
{
"name":"chartreuse",
"hex":"#7fff00"
},
{
"name":"aquamarine2",
"hex":"#76eec6"
},
{
"name":"navajowhite2",
"hex":"#eecfa1"
},
{
"name":"lightgray",
"hex":"#d3d3d3"
},
{
"name":"lightgrey",
"hex":"#d3d3d3"
},
{
"name":"light gray",
"hex":"#d3d3d3"
},
{
"name":"light grey",
"hex":"#d3d3d3"
},
{
"name":"cadetblue2",
"hex":"#8ee5ee"
},
{
"name":"lightgreen",
"hex":"#90ee90"
},
{
"name":"palegreen2",
"hex":"#90ee90"
},
{
"name":"light green",
"hex":"#90ee90"
},
{
"name":"seagreen1",
"hex":"#54ff9f"
},
{
"name":"gray83",
"hex":"#d4d4d4"
},
{
"name":"grey83",
"hex":"#d4d4d4"
},
{
"name":"olivedrab2",
"hex":"#b3ee3a"
},
{
"name":"gray84",
"hex":"#d6d6d6"
},
{
"name":"grey84",
"hex":"#d6d6d6"
},
{
"name":"powderblue",
"hex":"#b0e0e6"
},
{
"name":"powder blue",
"hex":"#b0e0e6"
},
{
"name":"lightblue2",
"hex":"#b2dfee"
},
{
"name":"bisque2",
"hex":"#eed5b7"
},
{
"name":"burlywood1",
"hex":"#ffd39b"
},
{
"name":"gray85",
"hex":"#d9d9d9"
},
{
"name":"grey85",
"hex":"#d9d9d9"
},
{
"name":"lightgoldenrod2",
"hex":"#eedc82"
},
{
"name":"darkslategray2",
"hex":"#8deeee"
},
{
"name":"lightskyblue1",
"hex":"#b0e2ff"
},
{
"name":"wheat2",
"hex":"#eed8ae"
},
{
"name":"darkolivegreen2",
"hex":"#bcee68"
},
{
"name":"thistle2",
"hex":"#eed2ee"
},
{
"name":"lightgoldenrod",
"hex":"#eedd82"
},
{
"name":"light goldenrod",
"hex":"#eedd82"
},
{
"name":"mistyrose2",
"hex":"#eed5d2"
},
{
"name":"gray86",
"hex":"#dbdbdb"
},
{
"name":"grey86",
"hex":"#dbdbdb"
},
{
"name":"gainsboro",
"hex":"#dcdcdc"
},
{
"name":"yellow2",
"hex":"#eeee00"
},
{
"name":"darkseagreen2",
"hex":"#b4eeb4"
},
{
"name":"gray87",
"hex":"#dedede"
},
{
"name":"grey87",
"hex":"#dedede"
},
{
"name":"slategray1",
"hex":"#c6e2ff"
},
{
"name":"lightsteelblue1",
"hex":"#cae1ff"
},
{
"name":"greenyellow",
"hex":"#adff2f"
},
{
"name":"green yellow",
"hex":"#adff2f"
},
{
"name":"palegreen",
"hex":"#98fb98"
},
{
"name":"pale green",
"hex":"#98fb98"
},
{
"name":"peachpuff1",
"hex":"#ffdab9"
},
{
"name":"peachpuff",
"hex":"#ffdab9"
},
{
"name":"peach puff",
"hex":"#ffdab9"
},
{
"name":"wheat",
"hex":"#f5deb3"
},
{
"name":"gray88",
"hex":"#e0e0e0"
},
{
"name":"grey88",
"hex":"#e0e0e0"
},
{
"name":"paleturquoise2",
"hex":"#aeeeee"
},
{
"name":"paleturquoise",
"hex":"#afeeee"
},
{
"name":"pale turquoise",
"hex":"#afeeee"
},
{
"name":"aquamarine1",
"hex":"#7fffd4"
},
{
"name":"aquamarine",
"hex":"#7fffd4"
},
{
"name":"khaki2",
"hex":"#eee685"
},
{
"name":"antiquewhite2",
"hex":"#eedfcc"
},
{
"name":"navajowhite1",
"hex":"#ffdead"
},
{
"name":"navajowhite",
"hex":"#ffdead"
},
{
"name":"navajo white",
"hex":"#ffdead"
},
{
"name":"khaki",
"hex":"#f0e68c"
},
{
"name":"cadetblue1",
"hex":"#98f5ff"
},
{
"name":"palegreen1",
"hex":"#9aff9a"
},
{
"name":"gray89",
"hex":"#e3e3e3"
},
{
"name":"grey89",
"hex":"#e3e3e3"
},
{
"name":"lavenderblush2",
"hex":"#eee0e5"
},
{
"name":"olivedrab1",
"hex":"#c0ff3e"
},
{
"name":"palegoldenrod",
"hex":"#eee8aa"
},
{
"name":"pale goldenrod",
"hex":"#eee8aa"
},
{
"name":"gray90",
"hex":"#e5e5e5"
},
{
"name":"grey90",
"hex":"#e5e5e5"
},
{
"name":"lightblue1",
"hex":"#bfefff"
},
{
"name":"moccasin",
"hex":"#ffe4b5"
},
{
"name":"seashell2",
"hex":"#eee5de"
},
{
"name":"lemonchiffon2",
"hex":"#eee9bf"
},
{
"name":"cornsilk2",
"hex":"#eee8cd"
},
{
"name":"bisque1",
"hex":"#ffe4c4"
},
{
"name":"bisque",
"hex":"#ffe4c4"
},
{
"name":"lavender",
"hex":"#e6e6fa"
},
{
"name":"lightcyan2",
"hex":"#d1eeee"
},
{
"name":"gray91",
"hex":"#e8e8e8"
},
{
"name":"grey91",
"hex":"#e8e8e8"
},
{
"name":"wheat1",
"hex":"#ffe7ba"
},
{
"name":"darkslategray1",
"hex":"#97ffff"
},
{
"name":"lightgoldenrod1",
"hex":"#ffec8b"
},
{
"name":"darkolivegreen1",
"hex":"#caff70"
},
{
"name":"mistyrose1",
"hex":"#ffe4e1"
},
{
"name":"mistyrose",
"hex":"#ffe4e1"
},
{
"name":"misty rose",
"hex":"#ffe4e1"
},
{
"name":"thistle1",
"hex":"#ffe1ff"
},
{
"name":"honeydew2",
"hex":"#e0eee0"
},
{
"name":"snow2",
"hex":"#eee9e9"
},
{
"name":"gray92",
"hex":"#ebebeb"
},
{
"name":"grey92",
"hex":"#ebebeb"
},
{
"name":"azure2",
"hex":"#e0eeee"
},
{
"name":"lightyellow2",
"hex":"#eeeed1"
},
{
"name":"yellow1",
"hex":"#ffff00"
},
{
"name":"yellow",
"hex":"#ffff00"
},
{
"name":"antiquewhite",
"hex":"#faebd7"
},
{
"name":"antique white",
"hex":"#faebd7"
},
{
"name":"ivory2",
"hex":"#eeeee0"
},
{
"name":"gray93",
"hex":"#ededed"
},
{
"name":"grey93",
"hex":"#ededed"
},
{
"name":"blanchedalmond",
"hex":"#ffebcd"
},
{
"name":"blanched almond",
"hex":"#ffebcd"
},
{
"name":"darkseagreen1",
"hex":"#c1ffc1"
},
{
"name":"gray94",
"hex":"#f0f0f0"
},
{
"name":"grey94",
"hex":"#f0f0f0"
},
{
"name":"khaki1",
"hex":"#fff68f"
},
{
"name":"papayawhip",
"hex":"#ffefd5"
},
{
"name":"papaya whip",
"hex":"#ffefd5"
},
{
"name":"paleturquoise1",
"hex":"#bbffff"
},
{
"name":"antiquewhite1",
"hex":"#ffefdb"
},
{
"name":"linen",
"hex":"#faf0e6"
},
{
"name":"gray95",
"hex":"#f2f2f2"
},
{
"name":"grey95",
"hex":"#f2f2f2"
},
{
"name":"beige",
"hex":"#f5f5dc"
},
{
"name":"lavenderblush1",
"hex":"#fff0f5"
},
{
"name":"lavenderblush",
"hex":"#fff0f5"
},
{
"name":"lavender blush",
"hex":"#fff0f5"
},
{
"name":"whitesmoke",
"hex":"#f5f5f5"
},
{
"name":"gray96",
"hex":"#f5f5f5"
},
{
"name":"grey96",
"hex":"#f5f5f5"
},
{
"name":"white smoke",
"hex":"#f5f5f5"
},
{
"name":"oldlace",
"hex":"#fdf5e6"
},
{
"name":"old lace",
"hex":"#fdf5e6"
},
{
"name":"seashell1",
"hex":"#fff5ee"
},
{
"name":"seashell",
"hex":"#fff5ee"
},
{
"name":"aliceblue",
"hex":"#f0f8ff"
},
{
"name":"alice blue",
"hex":"#f0f8ff"
},
{
"name":"gray97",
"hex":"#f7f7f7"
},
{
"name":"grey97",
"hex":"#f7f7f7"
},
{
"name":"lightgoldenrodyellow",
"hex":"#fafad2"
},
{
"name":"light goldenrod yellow",
"hex":"#fafad2"
},
{
"name":"cornsilk1",
"hex":"#fff8dc"
},
{
"name":"cornsilk",
"hex":"#fff8dc"
},
{
"name":"lemonchiffon1",
"hex":"#fffacd"
},
{
"name":"lemonchiffon",
"hex":"#fffacd"
},
{
"name":"lemon chiffon",
"hex":"#fffacd"
},
{
"name":"lightcyan1",
"hex":"#e0ffff"
},
{
"name":"lightcyan",
"hex":"#e0ffff"
},
{
"name":"light cyan",
"hex":"#e0ffff"
},
{
"name":"ghostwhite",
"hex":"#f8f8ff"
},
{
"name":"ghost white",
"hex":"#f8f8ff"
},
{
"name":"gray98",
"hex":"#fafafa"
},
{
"name":"grey98",
"hex":"#fafafa"
},
{
"name":"floralwhite",
"hex":"#fffaf0"
},
{
"name":"floral white",
"hex":"#fffaf0"
},
{
"name":"honeydew1",
"hex":"#f0fff0"
},
{
"name":"honeydew",
"hex":"#f0fff0"
},
{
"name":"snow1",
"hex":"#fffafa"
},
{
"name":"snow",
"hex":"#fffafa"
},
{
"name":"azure1",
"hex":"#f0ffff"
},
{
"name":"azure",
"hex":"#f0ffff"
},
{
"name":"gray99",
"hex":"#fcfcfc"
},
{
"name":"grey99",
"hex":"#fcfcfc"
},
{
"name":"mintcream",
"hex":"#f5fffa"
},
{
"name":"mint cream",
"hex":"#f5fffa"
},
{
"name":"lightyellow1",
"hex":"#ffffe0"
},
{
"name":"lightyellow",
"hex":"#ffffe0"
},
{
"name":"light yellow",
"hex":"#ffffe0"
},
{
"name":"ivory1",
"hex":"#fffff0"
},
{
"name":"ivory",
"hex":"#fffff0"
},
{
"name":"gray100",
"hex":"#ffffff"
},
{
"name":"grey100",
"hex":"#ffffff"
},
{
"name":"white",
"hex":"#ffffff"
}
]
|
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslotest import mockpatch
from tempest.common import credentials_factory as credentials
from tempest.common import dynamic_creds
from tempest import config
from tempest import exceptions
from tempest.lib.common import rest_client
from tempest.lib.services.identity.v2 import token_client as json_token_client
from tempest.services.identity.v2.json import identity_client as \
json_iden_client
from tempest.services.identity.v2.json import roles_client as \
json_roles_client
from tempest.services.identity.v2.json import tenants_client as \
json_tenants_client
from tempest.services.identity.v2.json import users_client as \
json_users_client
from tempest.services.network.json import routers_client
from tempest.tests import fake_config
from tempest.tests.lib import base
from tempest.tests.lib import fake_http
from tempest.tests.lib import fake_identity
class TestDynamicCredentialProvider(base.TestCase):
fixed_params = {'name': 'test class',
'identity_version': 'v2',
'admin_role': 'admin'}
def setUp(self):
super(TestDynamicCredentialProvider, self).setUp()
self.useFixture(fake_config.ConfigFixture())
self.stubs.Set(config, 'TempestConfigPrivate', fake_config.FakePrivate)
self.stubs.Set(json_token_client.TokenClient, 'raw_request',
fake_identity._fake_v2_response)
cfg.CONF.set_default('operator_role', 'FakeRole',
group='object-storage')
self._mock_list_ec2_credentials('fake_user_id', 'fake_tenant_id')
self.fixed_params.update(
admin_creds=self._get_fake_admin_creds())
def test_tempest_client(self):
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
self.assertIsInstance(creds.identity_admin_client,
json_iden_client.IdentityClient)
def _get_fake_admin_creds(self):
return credentials.get_credentials(
fill_in=False,
identity_version=self.fixed_params['identity_version'],
username='fake_username', password='fake_password',
tenant_name='fake_tenant')
def _mock_user_create(self, id, name):
user_fix = self.useFixture(mockpatch.PatchObject(
json_users_client.UsersClient,
'create_user',
return_value=(rest_client.ResponseBody
(200, {'user': {'id': id, 'name': name}}))))
return user_fix
def _mock_tenant_create(self, id, name):
tenant_fix = self.useFixture(mockpatch.PatchObject(
json_tenants_client.TenantsClient,
'create_tenant',
return_value=(rest_client.ResponseBody
(200, {'tenant': {'id': id, 'name': name}}))))
return tenant_fix
def _mock_list_roles(self, id, name):
roles_fix = self.useFixture(mockpatch.PatchObject(
json_roles_client.RolesClient,
'list_roles',
return_value=(rest_client.ResponseBody
(200,
{'roles': [{'id': id, 'name': name},
{'id': '1', 'name': 'FakeRole'},
{'id': '2', 'name': 'Member'}]}))))
return roles_fix
def _mock_list_2_roles(self):
roles_fix = self.useFixture(mockpatch.PatchObject(
json_roles_client.RolesClient,
'list_roles',
return_value=(rest_client.ResponseBody
(200,
{'roles': [{'id': '1234', 'name': 'role1'},
{'id': '1', 'name': 'FakeRole'},
{'id': '12345', 'name': 'role2'}]}))))
return roles_fix
def _mock_assign_user_role(self):
tenant_fix = self.useFixture(mockpatch.PatchObject(
json_roles_client.RolesClient,
'assign_user_role',
return_value=(rest_client.ResponseBody
(200, {}))))
return tenant_fix
def _mock_list_role(self):
roles_fix = self.useFixture(mockpatch.PatchObject(
json_roles_client.RolesClient,
'list_roles',
return_value=(rest_client.ResponseBody
(200, {'roles': [{'id': '1',
'name': 'FakeRole'}]}))))
return roles_fix
def _mock_list_ec2_credentials(self, user_id, tenant_id):
ec2_creds_fix = self.useFixture(mockpatch.PatchObject(
json_users_client.UsersClient,
'list_user_ec2_credentials',
return_value=(rest_client.ResponseBody
(200, {'credentials': [{
'access': 'fake_access',
'secret': 'fake_secret',
'tenant_id': tenant_id,
'user_id': user_id,
'trust_id': None}]}))))
return ec2_creds_fix
def _mock_network_create(self, iso_creds, id, name):
net_fix = self.useFixture(mockpatch.PatchObject(
iso_creds.networks_admin_client,
'create_network',
return_value={'network': {'id': id, 'name': name}}))
return net_fix
def _mock_subnet_create(self, iso_creds, id, name):
subnet_fix = self.useFixture(mockpatch.PatchObject(
iso_creds.subnets_admin_client,
'create_subnet',
return_value={'subnet': {'id': id, 'name': name}}))
return subnet_fix
def _mock_router_create(self, id, name):
router_fix = self.useFixture(mockpatch.PatchObject(
routers_client.RoutersClient,
'create_router',
return_value={'router': {'id': id, 'name': name}}))
return router_fix
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_primary_creds(self, MockRestClient):
cfg.CONF.set_default('neutron', False, 'service_available')
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
self._mock_assign_user_role()
self._mock_list_role()
self._mock_tenant_create('1234', 'fake_prim_tenant')
self._mock_user_create('1234', 'fake_prim_user')
primary_creds = creds.get_primary_creds()
self.assertEqual(primary_creds.username, 'fake_prim_user')
self.assertEqual(primary_creds.tenant_name, 'fake_prim_tenant')
# Verify IDs
self.assertEqual(primary_creds.tenant_id, '1234')
self.assertEqual(primary_creds.user_id, '1234')
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_admin_creds(self, MockRestClient):
cfg.CONF.set_default('neutron', False, 'service_available')
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
self._mock_list_roles('1234', 'admin')
self._mock_user_create('1234', 'fake_admin_user')
self._mock_tenant_create('1234', 'fake_admin_tenant')
user_mock = mock.patch.object(json_roles_client.RolesClient,
'assign_user_role')
user_mock.start()
self.addCleanup(user_mock.stop)
with mock.patch.object(json_roles_client.RolesClient,
'assign_user_role') as user_mock:
admin_creds = creds.get_admin_creds()
user_mock.assert_has_calls([
mock.call('1234', '1234', '1234')])
self.assertEqual(admin_creds.username, 'fake_admin_user')
self.assertEqual(admin_creds.tenant_name, 'fake_admin_tenant')
# Verify IDs
self.assertEqual(admin_creds.tenant_id, '1234')
self.assertEqual(admin_creds.user_id, '1234')
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_role_creds(self, MockRestClient):
cfg.CONF.set_default('neutron', False, 'service_available')
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
self._mock_list_2_roles()
self._mock_user_create('1234', 'fake_role_user')
self._mock_tenant_create('1234', 'fake_role_tenant')
user_mock = mock.patch.object(json_roles_client.RolesClient,
'assign_user_role')
user_mock.start()
self.addCleanup(user_mock.stop)
with mock.patch.object(json_roles_client.RolesClient,
'assign_user_role') as user_mock:
role_creds = creds.get_creds_by_roles(
roles=['role1', 'role2'])
calls = user_mock.mock_calls
# Assert that the role creation is called with the 2 specified roles
self.assertEqual(len(calls), 2)
args = map(lambda x: x[1], calls)
args = list(args)
self.assertIn(('1234', '1234', '1234'), args)
self.assertIn(('1234', '1234', '12345'), args)
self.assertEqual(role_creds.username, 'fake_role_user')
self.assertEqual(role_creds.tenant_name, 'fake_role_tenant')
# Verify IDs
self.assertEqual(role_creds.tenant_id, '1234')
self.assertEqual(role_creds.user_id, '1234')
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_all_cred_cleanup(self, MockRestClient):
cfg.CONF.set_default('neutron', False, 'service_available')
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
self._mock_assign_user_role()
self._mock_list_role()
self._mock_tenant_create('1234', 'fake_prim_tenant')
self._mock_user_create('1234', 'fake_prim_user')
creds.get_primary_creds()
self._mock_tenant_create('12345', 'fake_alt_tenant')
self._mock_user_create('12345', 'fake_alt_user')
creds.get_alt_creds()
self._mock_tenant_create('123456', 'fake_admin_tenant')
self._mock_user_create('123456', 'fake_admin_user')
self._mock_list_roles('123456', 'admin')
creds.get_admin_creds()
user_mock = self.patch(
'tempest.services.identity.v2.json.users_client.'
'UsersClient.delete_user')
tenant_mock = self.patch(
'tempest.services.identity.v2.json.tenants_client.'
'TenantsClient.delete_tenant')
creds.clear_creds()
# Verify user delete calls
calls = user_mock.mock_calls
self.assertEqual(len(calls), 3)
args = map(lambda x: x[1][0], calls)
args = list(args)
self.assertIn('1234', args)
self.assertIn('12345', args)
self.assertIn('123456', args)
# Verify tenant delete calls
calls = tenant_mock.mock_calls
self.assertEqual(len(calls), 3)
args = map(lambda x: x[1][0], calls)
args = list(args)
self.assertIn('1234', args)
self.assertIn('12345', args)
self.assertIn('123456', args)
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_alt_creds(self, MockRestClient):
cfg.CONF.set_default('neutron', False, 'service_available')
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
self._mock_assign_user_role()
self._mock_list_role()
self._mock_user_create('1234', 'fake_alt_user')
self._mock_tenant_create('1234', 'fake_alt_tenant')
alt_creds = creds.get_alt_creds()
self.assertEqual(alt_creds.username, 'fake_alt_user')
self.assertEqual(alt_creds.tenant_name, 'fake_alt_tenant')
# Verify IDs
self.assertEqual(alt_creds.tenant_id, '1234')
self.assertEqual(alt_creds.user_id, '1234')
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_no_network_creation_with_config_set(self, MockRestClient):
cfg.CONF.set_default('create_isolated_networks', False, group='auth')
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
self._mock_assign_user_role()
self._mock_list_role()
self._mock_user_create('1234', 'fake_prim_user')
self._mock_tenant_create('1234', 'fake_prim_tenant')
net = mock.patch.object(creds.networks_admin_client,
'delete_network')
net_mock = net.start()
subnet = mock.patch.object(creds.subnets_admin_client,
'delete_subnet')
subnet_mock = subnet.start()
router = mock.patch.object(creds.routers_admin_client,
'delete_router')
router_mock = router.start()
primary_creds = creds.get_primary_creds()
self.assertEqual(net_mock.mock_calls, [])
self.assertEqual(subnet_mock.mock_calls, [])
self.assertEqual(router_mock.mock_calls, [])
network = primary_creds.network
subnet = primary_creds.subnet
router = primary_creds.router
self.assertIsNone(network)
self.assertIsNone(subnet)
self.assertIsNone(router)
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_network_creation(self, MockRestClient):
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
self._mock_assign_user_role()
self._mock_list_role()
self._mock_user_create('1234', 'fake_prim_user')
self._mock_tenant_create('1234', 'fake_prim_tenant')
self._mock_network_create(creds, '1234', 'fake_net')
self._mock_subnet_create(creds, '1234', 'fake_subnet')
self._mock_router_create('1234', 'fake_router')
router_interface_mock = self.patch(
'tempest.services.network.json.routers_client.RoutersClient.'
'add_router_interface')
primary_creds = creds.get_primary_creds()
router_interface_mock.assert_called_once_with('1234', subnet_id='1234')
network = primary_creds.network
subnet = primary_creds.subnet
router = primary_creds.router
self.assertEqual(network['id'], '1234')
self.assertEqual(network['name'], 'fake_net')
self.assertEqual(subnet['id'], '1234')
self.assertEqual(subnet['name'], 'fake_subnet')
self.assertEqual(router['id'], '1234')
self.assertEqual(router['name'], 'fake_router')
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_network_cleanup(self, MockRestClient):
def side_effect(**args):
return {"security_groups": [{"tenant_id": args['tenant_id'],
"name": args['name'],
"description": args['name'],
"security_group_rules": [],
"id": "sg-%s" % args['tenant_id']}]}
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
# Create primary tenant and network
self._mock_assign_user_role()
self._mock_list_role()
self._mock_user_create('1234', 'fake_prim_user')
self._mock_tenant_create('1234', 'fake_prim_tenant')
self._mock_network_create(creds, '1234', 'fake_net')
self._mock_subnet_create(creds, '1234', 'fake_subnet')
self._mock_router_create('1234', 'fake_router')
router_interface_mock = self.patch(
'tempest.services.network.json.routers_client.RoutersClient.'
'add_router_interface')
creds.get_primary_creds()
router_interface_mock.assert_called_once_with('1234', subnet_id='1234')
router_interface_mock.reset_mock()
# Create alternate tenant and network
self._mock_user_create('12345', 'fake_alt_user')
self._mock_tenant_create('12345', 'fake_alt_tenant')
self._mock_network_create(creds, '12345', 'fake_alt_net')
self._mock_subnet_create(creds, '12345', 'fake_alt_subnet')
self._mock_router_create('12345', 'fake_alt_router')
creds.get_alt_creds()
router_interface_mock.assert_called_once_with('12345',
subnet_id='12345')
router_interface_mock.reset_mock()
# Create admin tenant and networks
self._mock_user_create('123456', 'fake_admin_user')
self._mock_tenant_create('123456', 'fake_admin_tenant')
self._mock_network_create(creds, '123456', 'fake_admin_net')
self._mock_subnet_create(creds, '123456', 'fake_admin_subnet')
self._mock_router_create('123456', 'fake_admin_router')
self._mock_list_roles('123456', 'admin')
creds.get_admin_creds()
self.patch('tempest.services.identity.v2.json.users_client.'
'UsersClient.delete_user')
self.patch('tempest.services.identity.v2.json.tenants_client.'
'TenantsClient.delete_tenant')
net = mock.patch.object(creds.networks_admin_client,
'delete_network')
net_mock = net.start()
subnet = mock.patch.object(creds.subnets_admin_client,
'delete_subnet')
subnet_mock = subnet.start()
router = mock.patch.object(creds.routers_admin_client,
'delete_router')
router_mock = router.start()
remove_router_interface_mock = self.patch(
'tempest.services.network.json.routers_client.RoutersClient.'
'remove_router_interface')
return_values = ({'status': 200}, {'ports': []})
port_list_mock = mock.patch.object(creds.ports_admin_client,
'list_ports',
return_value=return_values)
port_list_mock.start()
secgroup_list_mock = mock.patch.object(
creds.security_groups_admin_client,
'list_security_groups',
side_effect=side_effect)
secgroup_list_mock.start()
return_values = fake_http.fake_http_response({}, status=204), ''
remove_secgroup_mock = self.patch(
'tempest.lib.services.network.security_groups_client.'
'SecurityGroupsClient.delete', return_value=return_values)
creds.clear_creds()
# Verify default security group delete
calls = remove_secgroup_mock.mock_calls
self.assertEqual(len(calls), 3)
args = map(lambda x: x[1][0], calls)
args = list(args)
self.assertIn('v2.0/security-groups/sg-1234', args)
self.assertIn('v2.0/security-groups/sg-12345', args)
self.assertIn('v2.0/security-groups/sg-123456', args)
# Verify remove router interface calls
calls = remove_router_interface_mock.mock_calls
self.assertEqual(len(calls), 3)
args = map(lambda x: (x[1][0], x[2]), calls)
args = list(args)
self.assertIn(('1234', {'subnet_id': '1234'}), args)
self.assertIn(('12345', {'subnet_id': '12345'}), args)
self.assertIn(('123456', {'subnet_id': '123456'}), args)
# Verify network delete calls
calls = net_mock.mock_calls
self.assertEqual(len(calls), 3)
args = map(lambda x: x[1][0], calls)
args = list(args)
self.assertIn('1234', args)
self.assertIn('12345', args)
self.assertIn('123456', args)
# Verify subnet delete calls
calls = subnet_mock.mock_calls
self.assertEqual(len(calls), 3)
args = map(lambda x: x[1][0], calls)
args = list(args)
self.assertIn('1234', args)
self.assertIn('12345', args)
self.assertIn('123456', args)
# Verify router delete calls
calls = router_mock.mock_calls
self.assertEqual(len(calls), 3)
args = map(lambda x: x[1][0], calls)
args = list(args)
self.assertIn('1234', args)
self.assertIn('12345', args)
self.assertIn('123456', args)
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_network_alt_creation(self, MockRestClient):
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
self._mock_assign_user_role()
self._mock_list_role()
self._mock_user_create('1234', 'fake_alt_user')
self._mock_tenant_create('1234', 'fake_alt_tenant')
self._mock_network_create(creds, '1234', 'fake_alt_net')
self._mock_subnet_create(creds, '1234', 'fake_alt_subnet')
self._mock_router_create('1234', 'fake_alt_router')
router_interface_mock = self.patch(
'tempest.services.network.json.routers_client.RoutersClient.'
'add_router_interface')
alt_creds = creds.get_alt_creds()
router_interface_mock.assert_called_once_with('1234', subnet_id='1234')
network = alt_creds.network
subnet = alt_creds.subnet
router = alt_creds.router
self.assertEqual(network['id'], '1234')
self.assertEqual(network['name'], 'fake_alt_net')
self.assertEqual(subnet['id'], '1234')
self.assertEqual(subnet['name'], 'fake_alt_subnet')
self.assertEqual(router['id'], '1234')
self.assertEqual(router['name'], 'fake_alt_router')
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_network_admin_creation(self, MockRestClient):
creds = dynamic_creds.DynamicCredentialProvider(**self.fixed_params)
self._mock_assign_user_role()
self._mock_user_create('1234', 'fake_admin_user')
self._mock_tenant_create('1234', 'fake_admin_tenant')
self._mock_network_create(creds, '1234', 'fake_admin_net')
self._mock_subnet_create(creds, '1234', 'fake_admin_subnet')
self._mock_router_create('1234', 'fake_admin_router')
router_interface_mock = self.patch(
'tempest.services.network.json.routers_client.RoutersClient.'
'add_router_interface')
self._mock_list_roles('123456', 'admin')
admin_creds = creds.get_admin_creds()
router_interface_mock.assert_called_once_with('1234', subnet_id='1234')
network = admin_creds.network
subnet = admin_creds.subnet
router = admin_creds.router
self.assertEqual(network['id'], '1234')
self.assertEqual(network['name'], 'fake_admin_net')
self.assertEqual(subnet['id'], '1234')
self.assertEqual(subnet['name'], 'fake_admin_subnet')
self.assertEqual(router['id'], '1234')
self.assertEqual(router['name'], 'fake_admin_router')
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_no_network_resources(self, MockRestClient):
net_dict = {
'network': False,
'router': False,
'subnet': False,
'dhcp': False,
}
creds = dynamic_creds.DynamicCredentialProvider(
network_resources=net_dict,
**self.fixed_params)
self._mock_assign_user_role()
self._mock_list_role()
self._mock_user_create('1234', 'fake_prim_user')
self._mock_tenant_create('1234', 'fake_prim_tenant')
net = mock.patch.object(creds.networks_admin_client,
'delete_network')
net_mock = net.start()
subnet = mock.patch.object(creds.subnets_admin_client,
'delete_subnet')
subnet_mock = subnet.start()
router = mock.patch.object(creds.routers_admin_client,
'delete_router')
router_mock = router.start()
primary_creds = creds.get_primary_creds()
self.assertEqual(net_mock.mock_calls, [])
self.assertEqual(subnet_mock.mock_calls, [])
self.assertEqual(router_mock.mock_calls, [])
network = primary_creds.network
subnet = primary_creds.subnet
router = primary_creds.router
self.assertIsNone(network)
self.assertIsNone(subnet)
self.assertIsNone(router)
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_router_without_network(self, MockRestClient):
net_dict = {
'network': False,
'router': True,
'subnet': False,
'dhcp': False,
}
creds = dynamic_creds.DynamicCredentialProvider(
network_resources=net_dict,
**self.fixed_params)
self._mock_assign_user_role()
self._mock_list_role()
self._mock_user_create('1234', 'fake_prim_user')
self._mock_tenant_create('1234', 'fake_prim_tenant')
self.assertRaises(exceptions.InvalidConfiguration,
creds.get_primary_creds)
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_subnet_without_network(self, MockRestClient):
net_dict = {
'network': False,
'router': False,
'subnet': True,
'dhcp': False,
}
creds = dynamic_creds.DynamicCredentialProvider(
network_resources=net_dict,
**self.fixed_params)
self._mock_assign_user_role()
self._mock_list_role()
self._mock_user_create('1234', 'fake_prim_user')
self._mock_tenant_create('1234', 'fake_prim_tenant')
self.assertRaises(exceptions.InvalidConfiguration,
creds.get_primary_creds)
@mock.patch('tempest.lib.common.rest_client.RestClient')
def test_dhcp_without_subnet(self, MockRestClient):
net_dict = {
'network': False,
'router': False,
'subnet': False,
'dhcp': True,
}
creds = dynamic_creds.DynamicCredentialProvider(
network_resources=net_dict,
**self.fixed_params)
self._mock_assign_user_role()
self._mock_list_role()
self._mock_user_create('1234', 'fake_prim_user')
self._mock_tenant_create('1234', 'fake_prim_tenant')
self.assertRaises(exceptions.InvalidConfiguration,
creds.get_primary_creds)
|
|
import datetime
import logging
import re
from google.appengine.runtime.apiproxy_errors import DeadlineExceededError
class HolidayContext:
def __init__(self, calendar_service=None, start_date=None, end_date=None, email=None, name=None, message=None, search_calendars=None, dry_run=None):
self.calendar_service = calendar_service
self.start_date = start_date
self.end_date = end_date
self.email = email
self.name = name
self.message = message
self.search_calendars = search_calendars
self.dry_run = dry_run
class Event:
def __init__(self, summary, calendar_name, start, end, calendar_link, event_link='', exists=False):
self.summary = summary
self.calendar_name = calendar_name
self.start = start
self.end = end
self.calendar_link = calendar_link
self.event_link = event_link
self.exists = exists
def handle_batch_request(request_id, response, exception):
if exception is not None:
logging.warn("Received exception %s" % str(exception))
else:
pass
def execute_requests(context, requests):
received_error = False
if not requests:
return
batch_count = 15
logging.info("Executing %d requests batched by %d requests" % (len(requests), batch_count))
count = 0
batch = context.calendar_service.new_batch_http_request(callback=handle_batch_request)
for rq in requests:
batch.add(rq)
count += 1
if count >= batch_count:
logging.debug("Executing %d requests in batch" % count)
try:
batch.execute()
except DeadlineExceededError, e:
logging.warn("Received '%s' exception" % str(e))
received_error = True
batch = context.calendar_service.new_batch_http_request(callback=handle_batch_request)
count = 0
if count > 0:
logging.debug("Executing %d requests in batch" % count)
try:
batch.execute()
except DeadlineExceededError, e:
logging.warn("Received '%s' exception" % str(e))
received_error = True
return received_error
def should_cancel(event):
if 'self' in event['organizer'] and 'attendees' in event:
count = 0
for attendee in event['attendees']:
if 'self' in attendee or 'resource' in attendee:
continue
if re.match('[a-zA-Z0-9_-]+@', attendee['email']):
logging.debug("Found group email: %s, skipping event" % attendee['email'])
return False
if attendee['responseStatus'] != 'declined':
count += 1
if count <= 1:
return True
return False
def get_cancelled_events(context, events):
logging.info("Searching for unnecessary events which can be cancelled (e.g. One on one's)")
cancelled_events = []
requests = []
for event in events:
if should_cancel(event):
event['updates'] = ['cancel', 'notification']
cancelled_events.append(event)
if not context.dry_run:
requests.append(context.calendar_service.events().delete(calendarId=context.email, eventId=event['id'], sendNotifications=True))
if not cancelled_events:
logging.info('There is no any meeting which could be cancelled')
return cancelled_events, requests
def should_reject(event):
if 'attendees' in event:
for attendee in event['attendees']:
if 'self' in attendee and attendee['responseStatus'] == 'declined':
return False
else:
return False
return True
def should_grant_modify_rights_to_guests(event):
if 'self' not in event['organizer']:
return False
if 'guestsCanModify' not in event:
return True
def get_rejected_events(context, events):
logging.info('Searching for events which can be rejected')
rejected_events = []
requests = []
for event in events:
if should_cancel(event):
continue
if should_reject(event):
rejected_events.append(event)
event['updates'] = ['reject']
if 'self' not in event['organizer']:
event['updates'].append('notification')
if should_grant_modify_rights_to_guests(event):
grant_modify_rights_to_guests = True
event['updates'].append('edit')
else:
grant_modify_rights_to_guests = False
if not context.dry_run:
requests.append(reject_event(context, event['id'], event['attendees'], grant_modify_rights_to_guests))
if not rejected_events:
logging.info('There is no any meeting which needs to be rejected')
return rejected_events, requests
def reject_event(context, event_id, attendees, grant_modify_rights_to_guests=False):
for attendee in attendees:
if 'self' in attendee:
attendee['responseStatus'] = 'declined'
attendee['comment'] = context.message
break
event = {
'attendees': attendees,
}
if grant_modify_rights_to_guests:
event['guestsCanModify'] = True
return context.calendar_service.events().patch(calendarId=context.email, eventId=event_id, sendNotifications=True,
body=event)
def get_events(context, calendar_id, query=None):
start = context.start_date.isoformat() + 'Z'
end = (context.end_date + datetime.timedelta(days=1)).isoformat() + 'Z'
fields = 'items(attendees,end,guestsCanInviteOthers,guestsCanModify,guestsCanSeeOtherGuests,recurringEventId,htmlLink,id,organizer,start,summary)'
events_result = context.calendar_service.events().list(
calendarId=calendar_id, timeMin=start, timeMax=end, fields=fields,
maxResults=300, singleEvents=True,
orderBy='startTime', q=query).execute()
return events_result.get('items', [])
def create_event(context, summary, calendar_id, calendar_summary):
start = context.start_date.strftime("%Y-%m-%d")
end = context.end_date.strftime("%Y-%m-%d")
if context.email == calendar_id:
calendar_link = "https://calendar.google.com"
else:
calendar_link = "https://calendar.google.com/calendar/embed?src=" + calendar_id
events = get_events(context, calendar_id, summary)
for event in events:
if summary == event['summary']:
logging.debug("Event '%s' in calendar %s already exists" % (summary.encode('utf-8'), calendar_summary.encode('utf-8')))
return Event(summary, calendar_summary, start, end, calendar_link, event['htmlLink'], exists=True)
event = {
'summary': summary,
'start': {
'date': context.start_date.strftime("%Y-%m-%d"),
},
'end': {
'date': (context.end_date + datetime.timedelta(days=1)).strftime("%Y-%m-%d"),
},
'visibility': 'public',
'reminders': {
'useDefault': False
}
}
html_link = ''
if not context.dry_run:
event = context.calendar_service.events().insert(calendarId=calendar_id, body=event).execute()
html_link = event['htmlLink']
logging.debug("Event '%s' created in calendar '%s'" % (summary.encode('utf-8'), calendar_summary.encode('utf-8')))
return Event(summary, calendar_summary, start, end, calendar_link, html_link)
def create_additional_events(context):
events = []
logging.debug("Searching subscribed calendars for '%s'" % context.search_calendars)
calendars = context.calendar_service.calendarList().list(minAccessRole='writer').execute()
for calendar in calendars['items']:
if context.search_calendars.lower() in calendar['summary'].lower():
summary = context.name + " - " + context.message
events.append(create_event(context, summary, calendar['id'], calendar['summary']))
return events
|
|
from data_utils import featured_frame
from feature_extractor import *
from data_utils.featured_frame import *
import numpy as np
import scipy.stats as sts
import scipy.integrate as integr
from scipy.interpolate import interp1d
from sympy import mpmath
class TimeDomainFeatureExtractor(FeatureExtractor):
def __init__(self, derivative=True):
super(TimeDomainFeatureExtractor, self).__init__(derivative)
def extract_features(self, frame):
if not isinstance(frame, FeaturedFrame):
frame = FeaturedFrame(frame)
# add derivatives
if self.derivative:
self.add_derivative_coefficients(frame)
self.add_second_derivative_coefficients(frame)
# add peaks
if self.derivative:
self.add_peaks(frame)
# add features
self.add_mean(frame)
self.add_integral(frame)
self.add_kurtosis(frame)
self.add_abs_mean(frame)
self.add_variance(frame)
self.add_minimum(frame)
self.add_maximum(frame)
self.add_abs_minimum(frame)
self.add_abs_maximum(frame)
self.add_root_mean_square(frame)
self.add_min_max_difference(frame)
self.add_percentiles(frame)
self.add_skewness(frame)
self.add_median(frame)
self.add_std(frame)
if self.derivative:
self.add_avg_min_peak_distance(frame)
self.add_avg_max_peak_distance(frame)
self.add_mean_min_peaks(frame)
self.add_mean_max_peaks(frame)
self.add_variance_min_peaks(frame)
self.add_variance_max_peaks(frame)
if self.derivative:
# add derivative features
self.add_mean(frame, True)
self.add_integral(frame, True)
self.add_kurtosis(frame, True)
self.add_abs_mean(frame, True)
self.add_variance(frame, True)
self.add_minimum(frame, True)
self.add_maximum(frame, True)
self.add_abs_minimum(frame, True)
self.add_abs_maximum(frame, True)
self.add_root_mean_square(frame, True)
self.add_min_max_difference(frame, True)
self.add_percentiles(frame, True)
self.add_skewness(frame, True)
self.add_median(frame, True)
self.add_std(frame, True)
return frame
# ADD FEATURES
def add_mean(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_mean' + der, np.mean(x_axis))
frame.add_feature('y_mean' + der, np.mean(y_axis))
frame.add_feature('z_mean' + der, np.mean(z_axis))
def add_integral(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_integral' + der, integr.simps(x_axis))
frame.add_feature('y_integral' + der, integr.simps(y_axis))
frame.add_feature('z_integral' + der, integr.simps(z_axis))
def add_kurtosis(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_kurtosis' + der, sts.kurtosis(x_axis))
frame.add_feature('y_kurtosis' + der, sts.kurtosis(y_axis))
frame.add_feature('z_kurtosis' + der, sts.kurtosis(z_axis))
def add_abs_mean(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_abs_mean' + der, abs(np.mean(x_axis)))
frame.add_feature('y_abs_mean' + der, abs(np.mean(y_axis)))
frame.add_feature('z_abs_mean' + der, abs(np.mean(z_axis)))
def add_variance(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_variance' + der, np.var(x_axis))
frame.add_feature('y_variance' + der, np.var(y_axis))
frame.add_feature('z_variance' + der, np.var(z_axis))
def add_minimum(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_min' + der, min(x_axis))
frame.add_feature('y_min' + der, min(y_axis))
frame.add_feature('z_min' + der, min(z_axis))
def add_maximum(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_max' + der, max(x_axis))
frame.add_feature('y_max' + der, max(y_axis))
frame.add_feature('z_max' + der, max(z_axis))
def add_abs_minimum(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_abs_min' + der, abs(min(x_axis)))
frame.add_feature('y_abs_min' + der, abs(min(y_axis)))
frame.add_feature('z_abs_min' + der, abs(min(z_axis)))
def add_abs_maximum(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_abs_max' + der, abs(max(x_axis)))
frame.add_feature('y_abs_max' + der, abs(max(y_axis)))
frame.add_feature('z_abs_max' + der, abs(max(z_axis)))
def add_root_mean_square(self, frame, derivative=False):
der = ''
if derivative:
x_axis_pow = np.power(frame.get_derivative('x'), 2)
y_axis_pow = np.power(frame.get_derivative('y'), 2)
z_axis_pow = np.power(frame.get_derivative('z'), 2)
der = '_der'
else:
x_axis_pow = np.power(frame.get_x_data(), 2)
y_axis_pow = np.power(frame.get_y_data(), 2)
z_axis_pow = np.power(frame.get_z_data(), 2)
frame.add_feature('x_rms' + der, np.sqrt(np.mean(x_axis_pow)))
frame.add_feature('y_rms' + der, np.sqrt(np.mean(y_axis_pow)))
frame.add_feature('z_rms' + der, np.sqrt(np.mean(z_axis_pow)))
def add_min_max_difference(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_min_max_difference' + der, max(x_axis) - min(x_axis))
frame.add_feature('y_min_max_difference' + der, max(y_axis) - min(y_axis))
frame.add_feature('z_min_max_difference' + der, max(z_axis) - min(z_axis))
def add_percentiles(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_25_percentile' + der, np.percentile(x_axis, 25))
frame.add_feature('y_25_percentile' + der, np.percentile(y_axis, 25))
frame.add_feature('z_25_percentile' + der, np.percentile(z_axis, 25))
frame.add_feature('x_75_percentile' + der, np.percentile(x_axis, 75))
frame.add_feature('y_75_percentile' + der, np.percentile(y_axis, 75))
frame.add_feature('z_75_percentile' + der, np.percentile(z_axis, 75))
def add_skewness(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_skewness' + der, sts.skew(x_axis))
frame.add_feature('y_skewness' + der, sts.skew(y_axis))
frame.add_feature('z_skewness' + der, sts.skew(z_axis))
def add_median(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_median' + der, np.median(x_axis))
frame.add_feature('y_median' + der, np.median(y_axis))
frame.add_feature('z_median' + der, np.median(z_axis))
def add_std(self, frame, derivative=False):
der = ''
if derivative:
x_axis = frame.get_derivative('x')
y_axis = frame.get_derivative('y')
z_axis = frame.get_derivative('z')
der = '_der'
else:
x_axis = frame.get_x_data()
y_axis = frame.get_y_data()
z_axis = frame.get_z_data()
frame.add_feature('x_std' + der, np.std(x_axis))
frame.add_feature('y_std' + der, np.std(y_axis))
frame.add_feature('z_std' + der, np.std(z_axis))
def add_avg_min_peak_distance(self, frame):
x_peaks, y_peaks, z_peaks = frame.get_peaks('x_min'), frame.get_peaks('y_min'), frame.get_peaks('z_min')
frame.add_feature('x_avg_min_peak_distance', self.calculate_avg_distance(x_peaks))
frame.add_feature('y_avg_min_peak_distance', self.calculate_avg_distance(y_peaks))
frame.add_feature('z_avg_min_peak_distance', self.calculate_avg_distance(z_peaks))
def add_avg_max_peak_distance(self, frame):
x_peaks, y_peaks, z_peaks = frame.get_peaks('x_max'), frame.get_peaks('y_max'), frame.get_peaks('z_max')
frame.add_feature('x_avg_max_peak_distance', self.calculate_avg_distance(x_peaks))
frame.add_feature('y_avg_max_peak_distance', self.calculate_avg_distance(y_peaks))
frame.add_feature('z_avg_max_peak_distance', self.calculate_avg_distance(z_peaks))
def add_mean_min_peaks(self, frame):
x_peaks, y_peaks, z_peaks = frame.get_peaks('x_min'), frame.get_peaks('y_min'), frame.get_peaks('z_min')
f_x, f_y, f_z = frame.get_function('x'), frame.get_function('y'), frame.get_function('z')
frame.add_feature('x_min_peak_mean', self.calculate_peak_mean(x_peaks, f_x))
frame.add_feature('y_min_peak_mean', self.calculate_peak_mean(y_peaks, f_y))
frame.add_feature('z_min_peak_mean', self.calculate_peak_mean(z_peaks, f_z))
def add_mean_max_peaks(self, frame):
x_peaks, y_peaks, z_peaks = frame.get_peaks('x_max'), frame.get_peaks('y_max'), frame.get_peaks('z_max')
f_x, f_y, f_z = frame.get_function('x'), frame.get_function('y'), frame.get_function('z')
frame.add_feature('x_max_peak_mean', self.calculate_peak_mean(x_peaks, f_x))
frame.add_feature('y_max_peak_mean', self.calculate_peak_mean(y_peaks, f_y))
frame.add_feature('z_max_peak_mean', self.calculate_peak_mean(z_peaks, f_z))
def add_variance_min_peaks(self, frame):
x_peaks, y_peaks, z_peaks = frame.get_peaks('x_min'), frame.get_peaks('y_min'), frame.get_peaks('z_min')
f_x, f_y, f_z = frame.get_function('x'), frame.get_function('y'), frame.get_function('z')
frame.add_feature('x_min_peak_var', self.calculate_peak_variance(x_peaks, f_x))
frame.add_feature('y_min_peak_var', self.calculate_peak_variance(y_peaks, f_y))
frame.add_feature('z_min_peak_var', self.calculate_peak_variance(z_peaks, f_z))
def add_variance_max_peaks(self, frame):
x_peaks, y_peaks, z_peaks = frame.get_peaks('x_max'), frame.get_peaks('y_max'), frame.get_peaks('z_max')
f_x, f_y, f_z = frame.get_function('x'), frame.get_function('y'), frame.get_function('z')
frame.add_feature('x_max_peak_var', self.calculate_peak_variance(x_peaks, f_x))
frame.add_feature('y_max_peak_var', self.calculate_peak_variance(y_peaks, f_y))
frame.add_feature('z_max_peak_var', self.calculate_peak_variance(z_peaks, f_z))
def calculate_avg_distance(self, data):
distances = list()
if len(data) > 0:
i = 0
while i < len(data) - 1:
distance = data[i + 1] - data[i]
distances.append(distance)
i = i + 1
return np.mean(distances)
def calculate_peak_mean(self, data, f):
values = f(data)
return np.mean(values)
def calculate_peak_variance(self, data, f):
values = f(data)
return np.var(values)
# ADD DERIVATIVES
def add_derivative_coefficients(self, frame):
x_axis, y_axis, z_axis = frame.get_x_data(), frame.get_y_data(), frame.get_z_data()
frame.add_derivative('x', self.calculate_derivative_coefficients(x_axis))
frame.add_derivative('y', self.calculate_derivative_coefficients(y_axis))
frame.add_derivative('z', self.calculate_derivative_coefficients(z_axis))
def add_second_derivative_coefficients(self, frame):
x_der, y_der, z_der = frame.get_derivative('x'), frame.get_derivative('y'), frame.get_derivative('z')
frame.add_derivative('x2', self.calculate_derivative_coefficients(x_der))
frame.add_derivative('y2', self.calculate_derivative_coefficients(y_der))
frame.add_derivative('z2', self.calculate_derivative_coefficients(z_der))
def calculate_derivative_coefficients(self, data):
t = np.linspace(0, len(data) - 1, len(data))
t2 = np.linspace(1, len(data) - 2, len(data))
f = interp1d(t, data)
return mpmath.diff(f, t2)
# ADD PEAKS
def add_peaks(self, frame):
x_der, y_der, z_der = frame.get_derivative('x'), frame.get_derivative('y'), frame.get_derivative('z')
x_peaks, y_peaks, z_peaks = self.calculate_peaks(x_der), self.calculate_peaks(y_der), self.calculate_peaks(
z_der)
f_x_der2, f_y_der2, f_z_der2 = frame.get_function('x_der2'), frame.get_function('y_der2'), frame.get_function(
'z_der2')
frame.add_peaks('x_max', self.calculate_max_peaks(f_x_der2, x_peaks))
frame.add_peaks('y_max', self.calculate_max_peaks(f_y_der2, y_peaks))
frame.add_peaks('z_max', self.calculate_max_peaks(f_z_der2, z_peaks))
frame.add_peaks('x_min', self.calculate_min_peaks(f_x_der2, x_peaks))
frame.add_peaks('y_min', self.calculate_min_peaks(f_y_der2, y_peaks))
frame.add_peaks('z_min', self.calculate_min_peaks(f_z_der2, z_peaks))
def calculate_peaks(self, data):
t = np.linspace(1, len(data) - 2, len(data))
f = interp1d(t, data)
range = np.linspace(1, len(data) - 2, len(data) * 2)
return calculate_all_roots(f, range)
def calculate_max_peaks(self, f, peaks):
max_peaks = list()
for x in peaks:
if f(x) < 0:
max_peaks.append(x)
return max_peaks
def calculate_min_peaks(self, f, peaks):
min_peaks = list()
for x in peaks:
if f(x) > 0:
min_peaks.append(x)
return min_peaks
|
|
#!/usr/bin/env python
"""Display APOGEE instrument status
History:
2011-05-06 ROwen
2011-08-31 ROwen Added support for new keyword missingFibers and new utrData fields.
2011-08-31 ROwen Modified to better handle an unknown number of missing fibers.
2011-09-09 ROwen Added a title and improved the help strings.
Modified to use DataObjects.UTRData.
2015-11-03 ROwen Replace "== None" with "is None" and "!= None" with "is not None" to modernize the code.
"""
import Tkinter
import RO.Constants
import RO.StringUtil
import RO.Wdg
import TUI.Models
import DataObjects
class ReadStatusWdg(Tkinter.Frame):
def __init__(self, master, helpURL=None):
"""Create a status widget
"""
Tkinter.Frame.__init__(self, master)
gridder = RO.Wdg.Gridder(master=self, sticky="w")
self.gridder = gridder
self.model = TUI.Models.getModel("apogeeql")
self.apogeeModel = TUI.Models.getModel("apogee")
self.titleWdg = RO.Wdg.StrLabel(
master = self,
anchor = "w",
text = "Last Read\n",
helpText = "Data about the most recent read",
helpURL = helpURL,
)
gridder.gridWdg(None, self.titleWdg, colSpan=3)
helpSuffix = " for most recent read"
self.expNameWdg = RO.Wdg.StrLabel(
master = self,
anchor = "w",
helpText = "Name of exposure" + helpSuffix,
helpURL = helpURL,
)
gridder.gridWdg("Exp Name", self.expNameWdg, colSpan=2)
self.expTypeWdg = RO.Wdg.StrLabel(
master = self,
anchor = "w",
helpText = "Type of exposure" + helpSuffix,
helpURL = helpURL,
)
gridder.gridWdg("Exp Type", self.expTypeWdg, colSpan=2)
self.readNumWdg = RO.Wdg.StrLabel(
master = self,
anchor = "w",
helpText = "Current and total read number" + helpSuffix,
helpURL = helpURL,
)
gridder.gridWdg("Read Num", self.readNumWdg, colSpan=2)
# self.predReadsWdg = RO.Wdg.StrLabel(
# master = self,
# anchor = "w",
# helpText = "Predicted total number of reads" + helpSuffix,
# helpURL = helpURL,
# )
# gridder.gridWdg("Predicted Reads", self.predReadsWdg)
self.expTimeWdg = RO.Wdg.StrLabel(
master = self,
anchor = "w",
width = 20, # room for "xxx of xxx; pred xxx"
helpText = "Current and total exposure time" + helpSuffix,
helpURL = helpURL,
)
# gridder.gridWdg("Exp Time", self.expTimeWdg, "sec")
# self.predExpTimeWdg = RO.Wdg.StrLabel(
# master = self,
# anchor = "w",
# helpText = "Predicted total exposure time" + helpSuffix,
# helpURL = helpURL,
# )
# gridder.gridWdg("Pred. Exp. Time", self.predExpTimeWdg, "sec")
self.snrWdg = RO.Wdg.StrLabel(
master = self,
anchor = "w",
helpText = "Current and target S/N" + helpSuffix,
helpURL = helpURL,
)
gridder.gridWdg("S/N", self.snrWdg, colSpan=2)
self.ditherWdg = RO.Wdg.StrLabel(
master = self,
anchor = "w",
width = 13, # room for "Bad x.xx/x.xx"
helpText = "Measured/commanded dither position" + helpSuffix,
helpURL = helpURL,
)
gridder.gridWdg("Dither", self.ditherWdg, "pixels")
self.waveOffsetWdg = RO.Wdg.StrLabel(
master = self,
anchor = "w",
helpText = "Measured wavelength offset" + helpSuffix,
helpURL = helpURL,
)
gridder.gridWdg("Wave Offset", self.waveOffsetWdg, RO.StringUtil.AngstromStr)
self.statusWdg = RO.Wdg.StrLabel(
master = self,
anchor = "w",
helpText = "Are sky and FITS headers OK for most recent read?",
helpURL = helpURL,
)
gridder.gridWdg("Status", self.statusWdg, colSpan=2)
self.missingFibersWdg = RO.Wdg.StrEntry(
master = self,
readOnly = True,
helpText = "Most recent report of missing fibers from QuickLook",
helpURL = helpURL,
)
gridder.gridWdg(False, self.missingFibersWdg, colSpan=3, sticky="ew")
self.model.utrData.addCallback(self._utrDataCallback)
self.model.missingFibers.addCallback(self._missingFibersCallback)
gridder.allGridded()
def _missingFibersCallback(self, keyVar):
"""missingFibersCallback(self, keyVar):
Key('missingFibers',
String(name='expName', help='Exposure name'),
Int(name='readNum', help='Read number counter'),
Int(name='numMissing', help='Number of missing fibers'),
Int(name='fiberId', help='List of missing fiber IDs, if any; note fiber IDs start at 1')*(0,),
),
"""
numMissing = keyVar[2]
if numMissing is None:
self.missingFibersWdg.set(
"? Missing Fibers",
severity = RO.Constants.sevWarning,
)
elif numMissing == 0:
self.missingFibersWdg.set(
"No Missing Fibers",
severity = RO.Constants.sevNormal,
)
else:
missingFiberStr = " ".join(str(f) for f in keyVar[3:])
self.missingFibersWdg.set(
"%d Missing Fibers: %s" % (numMissing, missingFiberStr),
severity = RO.Constants.sevWarning,
)
def _utrDataCallback(self, keyVar):
"""utrData keyword callback
"""
def fmt(val, fmtStr="%s"):
if val is None:
return "?"
else:
return fmtStr % (val,)
def fmt2(val1, val2, sep=" of ", fmtStr="%s"):
return "%s%s%s" % (fmt(val1, fmtStr=fmtStr), sep, fmt(val2, fmtStr=fmtStr))
def r2t(reads):
timePerRead = self.apogeeModel.utrReadTime[0]
if None in (reads, timePerRead):
return None
return reads * timePerRead
def btest(bitField, bitInd):
if bitField is None:
return None
return bitField & 1 << bitInd
isCurrent = keyVar.isCurrent
utrData = DataObjects.UTRData(keyVar)
self.expNameWdg.set(utrData.expNum, isCurrent=isCurrent)
self.expTypeWdg.set(utrData.expType, isCurrent=isCurrent)
readNumStr = "%s; pred %s" % (fmt2(utrData.readNum, utrData.nReads), fmt(utrData.numReadsToTarget))
self.readNumWdg.set(readNumStr, isCurrent=isCurrent)
# self.predReadsWdg.set(fmt(utrData.numReadsToTarget, fmtStr="%0.1f"), isCurrent=isCurrent)
expTimeStr = "%s; pred %s" % (fmt2(r2t(utrData.readNum), r2t(utrData.nReads), fmtStr="%0.0f"), fmt(r2t(utrData.numReadsToTarget), fmtStr="%0.0f"))
self.expTimeWdg.set(expTimeStr, isCurrent=isCurrent)
# self.predExpTimeWdg.set(fmt(r2t(utrData.numReadsToTarget), fmtStr="%0.0f"), isCurrent=isCurrent)
snrGoal = self.model.snrGoal[0]
snrStr = fmt2(utrData.snr, snrGoal, fmtStr="%0.1f", sep="; want ")
self.snrWdg.set(snrStr, isCurrent=isCurrent)
ditherStrList = []
ditherSev = RO.Constants.sevNormal
if btest(utrData.statusWord, 1):
ditherStrList.append("Bad")
ditherSev = RO.Constants.sevError
ditherStrList.append(fmt2(utrData.measDitherPos, utrData.cmdDitherPos, sep="/", fmtStr="%0.2f"))
ditherStr = " ".join(ditherStrList)
self.ditherWdg.set(ditherStr, isCurrent=isCurrent, severity=ditherSev)
waveStrList = []
waveSev = RO.Constants.sevNormal
if btest(utrData.statusWord, 3):
waveStrList.append("Bad")
waveSev = RO.Constants.sevError
waveStrList.append(fmt(utrData.waveOffset, fmtStr="%0.2f"))
waveStr = " ".join(waveStrList)
self.waveOffsetWdg.set(waveStr, isCurrent=isCurrent, severity=waveSev)
statusStrList = []
statusSev = RO.Constants.sevNormal
for name, ind in (("FITS Hdr", 0), ("Sky", 2)):
if btest(utrData.statusWord, ind):
statusStrList.append(name)
if statusStrList:
statusStr = "Bad %s" % (", ".join(statusStrList))
statusSev = RO.Constants.sevError
else:
statusStr = "OK"
self.statusWdg.set(statusStr, isCurrent=isCurrent, severity=statusSev)
if __name__ == '__main__':
import TUI.Base.Wdg
root = RO.Wdg.PythonTk()
import TestData
tuiModel = TestData.tuiModel
testFrame = ReadStatusWdg(tuiModel.tkRoot)
testFrame.pack(side="top", expand=True)
statusBar = TUI.Base.Wdg.StatusBar(root)
statusBar.pack(side="top", expand=True, fill="x")
Tkinter.Button(text="Demo", command=TestData.animate).pack(side="top")
TestData.start()
tuiModel.reactor.run()
|
|
import boto
import sure # noqa
from freezegun import freeze_time
from moto import mock_dynamodb
from boto.dynamodb import condition
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
from boto.exception import DynamoDBResponseError
def create_table(conn):
message_table_schema = conn.create_schema(
hash_key_name='forum_name',
hash_key_proto_value=str,
)
table = conn.create_table(
name='messages',
schema=message_table_schema,
read_units=10,
write_units=10
)
return table
@freeze_time("2012-01-14")
@mock_dynamodb
def test_create_table():
conn = boto.connect_dynamodb()
create_table(conn)
expected = {
'Table': {
'CreationDateTime': 1326499200.0,
'ItemCount': 0,
'KeySchema': {
'HashKeyElement': {
'AttributeName': 'forum_name',
'AttributeType': 'S'
},
},
'ProvisionedThroughput': {
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
},
'TableName': 'messages',
'TableSizeBytes': 0,
'TableStatus': 'ACTIVE'
}
}
conn.describe_table('messages').should.equal(expected)
@mock_dynamodb
def test_delete_table():
conn = boto.connect_dynamodb()
create_table(conn)
conn.list_tables().should.have.length_of(1)
conn.layer1.delete_table('messages')
conn.list_tables().should.have.length_of(0)
conn.layer1.delete_table.when.called_with('messages').should.throw(DynamoDBResponseError)
@mock_dynamodb
def test_update_table_throughput():
conn = boto.connect_dynamodb()
table = create_table(conn)
table.read_units.should.equal(10)
table.write_units.should.equal(10)
table.update_throughput(5, 6)
table.refresh()
table.read_units.should.equal(5)
table.write_units.should.equal(6)
@mock_dynamodb
def test_item_add_and_describe_and_update():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User A',
'ReceivedTime': '12/9/2011 11:36:03 PM',
}
item = table.new_item(
hash_key='LOLCat Forum',
attrs=item_data,
)
item.put()
returned_item = table.get_item(
hash_key='LOLCat Forum',
attributes_to_get=['Body', 'SentBy']
)
dict(returned_item).should.equal({
'forum_name': 'LOLCat Forum',
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User A',
})
item['SentBy'] = 'User B'
item.put()
returned_item = table.get_item(
hash_key='LOLCat Forum',
attributes_to_get=['Body', 'SentBy']
)
dict(returned_item).should.equal({
'forum_name': 'LOLCat Forum',
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User B',
})
@mock_dynamodb
def test_item_put_without_table():
conn = boto.connect_dynamodb()
conn.layer1.put_item.when.called_with(
table_name='undeclared-table',
item=dict(
hash_key='LOLCat Forum',
),
).should.throw(DynamoDBResponseError)
@mock_dynamodb
def test_get_missing_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
table.get_item.when.called_with(
hash_key='tester',
).should.throw(DynamoDBKeyNotFoundError)
@mock_dynamodb
def test_get_item_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.get_item.when.called_with(
table_name='undeclared-table',
key={
'HashKeyElement': {'S': 'tester'},
},
).should.throw(DynamoDBKeyNotFoundError)
@mock_dynamodb
def test_delete_item():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User A',
'ReceivedTime': '12/9/2011 11:36:03 PM',
}
item = table.new_item(
hash_key='LOLCat Forum',
attrs=item_data,
)
item.put()
table.refresh()
table.item_count.should.equal(1)
response = item.delete()
response.should.equal({u'Attributes': [], u'ConsumedCapacityUnits': 0.5})
table.refresh()
table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb
def test_delete_item_with_attribute_response():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User A',
'ReceivedTime': '12/9/2011 11:36:03 PM',
}
item = table.new_item(
hash_key='LOLCat Forum',
attrs=item_data,
)
item.put()
table.refresh()
table.item_count.should.equal(1)
response = item.delete(return_values='ALL_OLD')
response.should.equal({
u'Attributes': {
u'Body': u'http://url_to_lolcat.gif',
u'forum_name': u'LOLCat Forum',
u'ReceivedTime': u'12/9/2011 11:36:03 PM',
u'SentBy': u'User A',
},
u'ConsumedCapacityUnits': 0.5
})
table.refresh()
table.item_count.should.equal(0)
item.delete.when.called_with().should.throw(DynamoDBResponseError)
@mock_dynamodb
def test_delete_item_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.delete_item.when.called_with(
table_name='undeclared-table',
key={
'HashKeyElement': {'S': 'tester'},
},
).should.throw(DynamoDBResponseError)
@mock_dynamodb
def test_query():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User A',
'ReceivedTime': '12/9/2011 11:36:03 PM',
}
item = table.new_item(
hash_key='the-key',
attrs=item_data,
)
item.put()
results = table.query(hash_key='the-key')
results.response['Items'].should.have.length_of(1)
@mock_dynamodb
def test_query_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.query.when.called_with(
table_name='undeclared-table',
hash_key_value={'S': 'the-key'},
).should.throw(DynamoDBResponseError)
@mock_dynamodb
def test_scan():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User A',
'ReceivedTime': '12/9/2011 11:36:03 PM',
}
item = table.new_item(
hash_key='the-key',
attrs=item_data,
)
item.put()
item = table.new_item(
hash_key='the-key2',
attrs=item_data,
)
item.put()
item_data = {
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User B',
'ReceivedTime': '12/9/2011 11:36:03 PM',
'Ids': set([1, 2, 3]),
'PK': 7,
}
item = table.new_item(
hash_key='the-key3',
attrs=item_data,
)
item.put()
results = table.scan()
results.response['Items'].should.have.length_of(3)
results = table.scan(scan_filter={'SentBy': condition.EQ('User B')})
results.response['Items'].should.have.length_of(1)
results = table.scan(scan_filter={'Body': condition.BEGINS_WITH('http')})
results.response['Items'].should.have.length_of(3)
results = table.scan(scan_filter={'Ids': condition.CONTAINS(2)})
results.response['Items'].should.have.length_of(1)
results = table.scan(scan_filter={'Ids': condition.NOT_NULL()})
results.response['Items'].should.have.length_of(1)
results = table.scan(scan_filter={'Ids': condition.NULL()})
results.response['Items'].should.have.length_of(2)
results = table.scan(scan_filter={'PK': condition.BETWEEN(8, 9)})
results.response['Items'].should.have.length_of(0)
results = table.scan(scan_filter={'PK': condition.BETWEEN(5, 8)})
results.response['Items'].should.have.length_of(1)
@mock_dynamodb
def test_scan_with_undeclared_table():
conn = boto.connect_dynamodb()
conn.layer1.scan.when.called_with(
table_name='undeclared-table',
scan_filter={
"SentBy": {
"AttributeValueList": [{
"S": "User B"}
],
"ComparisonOperator": "EQ"
}
},
).should.throw(DynamoDBResponseError)
@mock_dynamodb
def test_write_batch():
conn = boto.connect_dynamodb()
table = create_table(conn)
batch_list = conn.new_batch_write_list()
items = []
items.append(table.new_item(
hash_key='the-key',
attrs={
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User A',
'ReceivedTime': '12/9/2011 11:36:03 PM',
},
))
items.append(table.new_item(
hash_key='the-key2',
attrs={
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User B',
'ReceivedTime': '12/9/2011 11:36:03 PM',
'Ids': set([1, 2, 3]),
'PK': 7,
},
))
batch_list.add_batch(table, puts=items)
conn.batch_write_item(batch_list)
table.refresh()
table.item_count.should.equal(2)
batch_list = conn.new_batch_write_list()
batch_list.add_batch(table, deletes=[('the-key')])
conn.batch_write_item(batch_list)
table.refresh()
table.item_count.should.equal(1)
@mock_dynamodb
def test_batch_read():
conn = boto.connect_dynamodb()
table = create_table(conn)
item_data = {
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User A',
'ReceivedTime': '12/9/2011 11:36:03 PM',
}
item = table.new_item(
hash_key='the-key1',
attrs=item_data,
)
item.put()
item = table.new_item(
hash_key='the-key2',
attrs=item_data,
)
item.put()
item_data = {
'Body': 'http://url_to_lolcat.gif',
'SentBy': 'User B',
'ReceivedTime': '12/9/2011 11:36:03 PM',
'Ids': set([1, 2, 3]),
'PK': 7,
}
item = table.new_item(
hash_key='another-key',
attrs=item_data,
)
item.put()
items = table.batch_get_item([('the-key1'), ('another-key')])
# Iterate through so that batch_item gets called
count = len([x for x in items])
count.should.have.equal(2)
|
|
# -*- coding: utf-8 -*-
#
# Copyright 2015-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
from helpers import unittest, LuigiTestCase
from luigi import six
import luigi
import luigi.worker
import luigi.execution_summary
import threading
import datetime
class ExecutionSummaryTest(LuigiTestCase):
def setUp(self):
super(ExecutionSummaryTest, self).setUp()
self.scheduler = luigi.scheduler.CentralPlannerScheduler(prune_on_get_work=False)
self.worker = luigi.worker.Worker(scheduler=self.scheduler)
def run_task(self, task):
self.worker.add(task) # schedule
self.worker.run() # run
def summary_dict(self):
return luigi.execution_summary._summary_dict(self.worker)
def summary(self):
return luigi.execution_summary.summary(self.worker)
def test_all_statuses(self):
class Bar(luigi.Task):
num = luigi.IntParameter()
def run(self):
if self.num == 0:
raise ValueError()
def complete(self):
if self.num == 1:
return True
return False
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(5):
yield Bar(i)
self.run_task(Foo())
d = self.summary_dict()
self.assertEqual({Bar(num=1)}, d['already_done'])
self.assertEqual({Bar(num=2), Bar(num=3), Bar(num=4)}, d['completed'])
self.assertEqual({Bar(num=0)}, d['failed'])
self.assertEqual({Foo()}, d['upstream_failure'])
self.assertFalse(d['upstream_missing_dependency'])
self.assertFalse(d['run_by_other_worker'])
self.assertFalse(d['still_pending_ext'])
s = self.summary()
self.assertIn('\n* 3 ran successfully:\n - 3 Bar(num=', s)
self.assertIn('\n* 1 present dependencies were encountered:\n - 1 Bar(num=1)\n', s)
self.assertIn('\n* 1 failed:\n - 1 Bar(num=0)\n* 1 were left pending, among these:\n * 1 had failed dependencies:\n - 1 Foo()\n\nThis progress looks :( because there were failed tasks', s)
self.assertNotIn('\n\n\n', s)
def test_upstream_not_running(self):
class ExternalBar(luigi.ExternalTask):
num = luigi.IntParameter()
def complete(self):
if self.num == 1:
return True
return False
class Bar(luigi.Task):
num = luigi.IntParameter()
def run(self):
if self.num == 0:
raise ValueError()
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(5):
yield ExternalBar(i)
yield Bar(i)
self.run_task(Foo())
d = self.summary_dict()
self.assertEqual({ExternalBar(num=1)}, d['already_done'])
self.assertEqual({Bar(num=1), Bar(num=2), Bar(num=3), Bar(num=4)}, d['completed'])
self.assertEqual({Bar(num=0)}, d['failed'])
self.assertEqual({Foo()}, d['upstream_failure'])
self.assertEqual({Foo()}, d['upstream_missing_dependency'])
self.assertFalse(d['run_by_other_worker'])
self.assertEqual({ExternalBar(num=0), ExternalBar(num=2), ExternalBar(num=3), ExternalBar(num=4)}, d['still_pending_ext'])
s = self.summary()
self.assertIn('\n* 1 present dependencies were encountered:\n - 1 ExternalBar(num=1)\n', s)
self.assertIn('\n* 4 ran successfully:\n - 4 Bar(num=1...4)\n', s)
self.assertIn('\n* 1 failed:\n - 1 Bar(num=0)\n', s)
self.assertIn('\n* 5 were left pending, among these:\n * 4 were missing external dependencies:\n - 4 ExternalBar(num=', s)
self.assertIn('\n * 1 had failed dependencies:\n - 1 Foo()\n * 1 had missing external dependencies:\n - 1 Foo()\n\nThis progress looks :( because there were failed tasks\n', s)
self.assertNotIn('\n\n\n', s)
def test_already_running(self):
lock1 = threading.Lock()
lock2 = threading.Lock()
class ParentTask(luigi.Task):
def __init__(self, *args, **kwargs):
super(ParentTask, self).__init__(*args, **kwargs)
self.comp = False
def complete(self):
return self.comp
def run(self):
self.comp = True
def requires(self):
yield LockTask()
class LockTask(luigi.Task):
def __init__(self, *args, **kwargs):
super(LockTask, self).__init__(*args, **kwargs)
self.comp = False
def complete(self):
return self.comp
def run(self):
lock2.release()
lock1.acquire()
self.comp = True
lock1.acquire()
lock2.acquire()
other_worker = luigi.worker.Worker(scheduler=self.scheduler, worker_id="other_worker")
other_worker.add(ParentTask())
t1 = threading.Thread(target=other_worker.run)
t1.start()
lock2.acquire()
self.run_task(ParentTask())
lock1.release()
t1.join()
d = self.summary_dict()
self.assertEqual({LockTask()}, d['run_by_other_worker'])
self.assertEqual({ParentTask()}, d['upstream_run_by_other_worker'])
s = self.summary()
self.assertIn('\nScheduled 2 tasks of which:\n* 2 were left pending, among these:\n * 1 were being run by another worker:\n - 1 LockTask()\n * 1 had dependencies that were being run by other worker:\n - 1 ParentTask()\n', s)
self.assertIn('\n\nThe other workers were:\n - other_worker ran 1 tasks\n\nDid not run any tasks\nThis progress looks :) because there were no failed tasks or missing external dependencies\n', s)
self.assertNotIn('\n\n\n', s)
def test_larger_tree(self):
class Dog(luigi.Task):
def __init__(self, *args, **kwargs):
super(Dog, self).__init__(*args, **kwargs)
self.comp = False
def complete(self):
return self.comp
def run(self):
self.comp = True
def requires(self):
yield Cat(2)
class Cat(luigi.Task):
num = luigi.IntParameter()
def __init__(self, *args, **kwargs):
super(Cat, self).__init__(*args, **kwargs)
self.comp = False
def run(self):
if self.num == 2:
raise ValueError()
self.comp = True
def complete(self):
if self.num == 1:
return True
else:
return self.comp
class Bar(luigi.Task):
num = luigi.IntParameter()
def __init__(self, *args, **kwargs):
super(Bar, self).__init__(*args, **kwargs)
self.comp = False
def complete(self):
return self.comp
def run(self):
self.comp = True
def requires(self):
if self.num == 0:
yield ExternalBar()
yield Cat(0)
if self.num == 1:
yield Cat(0)
yield Cat(1)
if self.num == 2:
yield Dog()
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(3):
yield Bar(i)
class ExternalBar(luigi.ExternalTask):
def complete(self):
return False
self.run_task(Foo())
d = self.summary_dict()
self.assertEqual({Cat(num=1)}, d['already_done'])
self.assertEqual({Cat(num=0), Bar(num=1)}, d['completed'])
self.assertEqual({Cat(num=2)}, d['failed'])
self.assertEqual({Dog(), Bar(num=2), Foo()}, d['upstream_failure'])
self.assertEqual({Bar(num=0), Foo()}, d['upstream_missing_dependency'])
self.assertFalse(d['run_by_other_worker'])
self.assertEqual({ExternalBar()}, d['still_pending_ext'])
s = self.summary()
self.assertNotIn('\n\n\n', s)
def test_with_dates(self):
""" Just test that it doesn't crash with date params """
start = datetime.date(1998, 3, 23)
class Bar(luigi.Task):
date = luigi.DateParameter()
def __init__(self, *args, **kwargs):
super(Bar, self).__init__(*args, **kwargs)
self.comp = False
def run(self):
self.comp = True
def complete(self):
return self.comp
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(10):
new_date = start + datetime.timedelta(days=i)
yield Bar(date=new_date)
self.run_task(Foo())
d = self.summary_dict()
exp_set = {Bar(start + datetime.timedelta(days=i)) for i in range(10)}
exp_set.add(Foo())
self.assertEqual(exp_set, d['completed'])
s = self.summary()
self.assertIn('date=1998-0', s)
self.assertIn('Scheduled 11 tasks', s)
self.assertIn('Luigi Execution Summary', s)
self.assertNotIn('00:00:00', s)
self.assertNotIn('\n\n\n', s)
def test_with_ranges_minutes(self):
start = datetime.datetime(1998, 3, 23, 1, 50)
class Bar(luigi.Task):
time = luigi.DateMinuteParameter()
def __init__(self, *args, **kwargs):
super(Bar, self).__init__(*args, **kwargs)
self.comp = False
def run(self):
self.comp = True
def complete(self):
return self.comp
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(300):
new_time = start + datetime.timedelta(minutes=i)
yield Bar(time=new_time)
self.run_task(Foo())
d = self.summary_dict()
exp_set = {Bar(start + datetime.timedelta(minutes=i)) for i in range(300)}
exp_set.add(Foo())
self.assertEqual(exp_set, d['completed'])
s = self.summary()
self.assertIn('Bar(time=1998-03-23T0150...1998-03-23T0649)', s)
self.assertNotIn('\n\n\n', s)
def test_with_ranges_one_param(self):
class Bar(luigi.Task):
num = luigi.IntParameter()
def __init__(self, *args, **kwargs):
super(Bar, self).__init__(*args, **kwargs)
self.comp = False
def run(self):
self.comp = True
def complete(self):
return self.comp
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(11):
yield Bar(i)
self.run_task(Foo())
d = self.summary_dict()
exp_set = {Bar(i) for i in range(11)}
exp_set.add(Foo())
self.assertEqual(exp_set, d['completed'])
s = self.summary()
self.assertIn('Bar(num=0...10)', s)
self.assertNotIn('\n\n\n', s)
def test_with_ranges_multiple_params(self):
class Bar(luigi.Task):
num1 = luigi.IntParameter()
num2 = luigi.IntParameter()
num3 = luigi.IntParameter()
def __init__(self, *args, **kwargs):
super(Bar, self).__init__(*args, **kwargs)
self.comp = False
def run(self):
self.comp = True
def complete(self):
return self.comp
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(5):
yield Bar(5, i, 25)
self.run_task(Foo())
d = self.summary_dict()
exp_set = {Bar(5, i, 25) for i in range(5)}
exp_set.add(Foo())
self.assertEqual(exp_set, d['completed'])
s = self.summary()
self.assertIn('- 5 Bar(num1=5, num2=0...4, num3=25)', s)
self.assertNotIn('\n\n\n', s)
def test_with_two_tasks(self):
class Bar(luigi.Task):
num = luigi.IntParameter()
num2 = luigi.IntParameter()
def __init__(self, *args, **kwargs):
super(Bar, self).__init__(*args, **kwargs)
self.comp = False
def run(self):
self.comp = True
def complete(self):
return self.comp
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(2):
yield Bar(i, 2 * i)
self.run_task(Foo())
d = self.summary_dict()
self.assertEqual({Foo(), Bar(num=0, num2=0), Bar(num=1, num2=2)}, d['completed'])
s = self.summary()
self.assertIn(') and Bar(num=', s)
self.assertIn('- Bar(num=', s)
self.assertNotIn("Did not run any tasks", s)
self.assertNotIn('\n\n\n', s)
def test_really_long_param_name(self):
class Bar(luigi.Task):
This_is_a_really_long_parameter_that_we_should_not_print_out_because_people_will_get_annoyed = luigi.IntParameter()
def __init__(self, *args, **kwargs):
super(Bar, self).__init__(*args, **kwargs)
self.comp = False
def run(self):
self.comp = True
def complete(self):
return self.comp
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
yield Bar(0)
self.run_task(Foo())
s = self.summary()
self.assertIn('Bar(...)', s)
self.assertNotIn("Did not run any tasks", s)
self.assertNotIn('\n\n\n', s)
def test_multiple_params_multiple_same_task_family(self):
class Bar(luigi.Task):
num = luigi.IntParameter()
num2 = luigi.IntParameter()
def __init__(self, *args, **kwargs):
super(Bar, self).__init__(*args, **kwargs)
self.comp = False
def run(self):
self.comp = True
def complete(self):
return self.comp
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(4):
yield Bar(i, 2 * i)
self.run_task(Foo())
s = self.summary()
self.assertIn('- Bar(', s)
self .assertIn(' and 3 other Bar', s)
self.assertNotIn("Did not run any tasks", s)
self.assertNotIn('\n\n\n', s)
def test_happy_smiley_face_normal(self):
class Bar(luigi.Task):
num = luigi.IntParameter()
num2 = luigi.IntParameter()
def __init__(self, *args, **kwargs):
super(Bar, self).__init__(*args, **kwargs)
self.comp = False
def run(self):
self.comp = True
def complete(self):
return self.comp
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(4):
yield Bar(i, 2 * i)
self.run_task(Foo())
s = self.summary()
self.assertIn('\nThis progress looks :) because there were no failed tasks or missing external dependencies', s)
self.assertNotIn("Did not run any tasks", s)
self.assertNotIn('\n\n\n', s)
def test_happy_smiley_face_other_workers(self):
lock1 = threading.Lock()
lock2 = threading.Lock()
class ParentTask(luigi.Task):
def __init__(self, *args, **kwargs):
super(ParentTask, self).__init__(*args, **kwargs)
self.comp = False
def complete(self):
return self.comp
def run(self):
self.comp = True
def requires(self):
yield LockTask()
class LockTask(luigi.Task):
def __init__(self, *args, **kwargs):
super(LockTask, self).__init__(*args, **kwargs)
self.comp = False
def complete(self):
return self.comp
def run(self):
lock2.release()
lock1.acquire()
self.comp = True
lock1.acquire()
lock2.acquire()
other_worker = luigi.worker.Worker(scheduler=self.scheduler, worker_id="other_worker")
other_worker.add(ParentTask())
t1 = threading.Thread(target=other_worker.run)
t1.start()
lock2.acquire()
self.run_task(ParentTask())
lock1.release()
t1.join()
s = self.summary()
self.assertIn('\nThis progress looks :) because there were no failed tasks or missing external dependencies', s)
self.assertNotIn('\n\n\n', s)
def test_sad_smiley_face(self):
class ExternalBar(luigi.ExternalTask):
def complete(self):
return False
class Bar(luigi.Task):
num = luigi.IntParameter()
def run(self):
if self.num == 0:
raise ValueError()
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(5):
yield Bar(i)
yield ExternalBar()
self.run_task(Foo())
s = self.summary()
self.assertIn('\nThis progress looks :( because there were failed tasks', s)
self.assertNotIn("Did not run any tasks", s)
self.assertNotIn('\n\n\n', s)
def test_neutral_smiley_face(self):
class ExternalBar(luigi.ExternalTask):
def complete(self):
return False
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
yield ExternalBar()
self.run_task(Foo())
s = self.summary()
self.assertIn('\nThis progress looks :| because there were missing external dependencies', s)
self.assertNotIn('\n\n\n', s)
def test_did_not_run_any_tasks(self):
class ExternalBar(luigi.ExternalTask):
num = luigi.IntParameter()
def complete(self):
if self.num == 5:
return True
return False
class Foo(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(10):
yield ExternalBar(i)
self.run_task(Foo())
d = self.summary_dict()
self.assertEqual({ExternalBar(5)}, d['already_done'])
self.assertEqual({ExternalBar(i) for i in range(10) if i != 5}, d['still_pending_ext'])
self.assertEqual({Foo()}, d['upstream_missing_dependency'])
s = self.summary()
self.assertIn('\n\nDid not run any tasks\nThis progress looks :| because there were missing external dependencies', s)
self.assertNotIn('\n\n\n', s)
def test_example(self):
class MyExternal(luigi.ExternalTask):
def complete(self):
return False
class Boom(luigi.Task):
this_is_a_really_long_I_mean_way_too_long_and_annoying_parameter = luigi.IntParameter()
def run(self):
pass
def requires(self):
for i in range(5, 200):
yield Bar(i)
class Foo(luigi.Task):
num = luigi.IntParameter()
num2 = luigi.IntParameter()
def run(self):
pass
def requires(self):
yield MyExternal()
yield Boom(0)
class Bar(luigi.Task):
num = luigi.IntParameter()
def complete(self):
return True
def run(self):
pass
class DateTask(luigi.Task):
date = luigi.DateParameter()
num = luigi.IntParameter()
def run(self):
pass
def requires(self):
yield MyExternal()
yield Boom(0)
class EntryPoint(luigi.Task):
def run(self):
pass
def requires(self):
for i in range(10):
yield Foo(100, 2 * i)
for i in range(10):
yield DateTask(datetime.date(1998, 3, 23) + datetime.timedelta(days=i), 5)
self.run_task(EntryPoint())
s = self.summary()
self.assertIn('\n\nScheduled 218 tasks of which:\n', s)
self.assertIn('\n* 195 present dependencies were encountered:\n', s)
self.assertIn('\n* 1 ran successfully:\n', s)
self.assertIn('\n - 1 Boom(...)\n', s)
self.assertIn('\n* 22 were left pending, among these:\n', s)
self.assertIn('\n * 1 were missing external dependencies:\n', s)
self.assertIn('\n - 1 MyExternal()\n', s)
self.assertIn('\n * 21 had missing external dependencies:\n', s)
self.assertIn('\n - 1 EntryPoint()\n', s)
self.assertIn('\n - Foo(num=', s)
self.assertIn(') and 9 other Foo\n', s)
self.assertIn('\n - 10 DateTask(date=1998-03-23...1998-04-01, num=5)\n', s)
self.assertIn('\nThis progress looks :| because there were missing external dependencies\n\n', s)
self.assertNotIn('\n\n\n', s)
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import time
import calendar
import math
from collections import defaultdict
from . import wcwidth
from .displaying import colorme, FormattedValue, DEFAULT_VALUE_COLORS
from cassandra.cqltypes import EMPTY
unicode_controlchars_re = re.compile(r'[\x00-\x31\x7f-\xa0]')
controlchars_re = re.compile(r'[\x00-\x31\x7f-\xff]')
def _show_control_chars(match):
txt = repr(match.group(0))
if txt.startswith('u'):
txt = txt[2:-1]
else:
txt = txt[1:-1]
return txt
bits_to_turn_red_re = re.compile(r'\\([^uUx]|u[0-9a-fA-F]{4}|x[0-9a-fA-F]{2}|U[0-9a-fA-F]{8})')
def _make_turn_bits_red_f(color1, color2):
def _turn_bits_red(match):
txt = match.group(0)
if txt == '\\\\':
return '\\'
return color1 + txt + color2
return _turn_bits_red
default_null_placeholder = 'null'
default_time_format = ''
default_float_precision = 3
default_colormap = DEFAULT_VALUE_COLORS
empty_colormap = defaultdict(lambda: '')
def format_by_type(cqltype, val, encoding, colormap=None, addcolor=False,
nullval=None, time_format=None, float_precision=None):
if nullval is None:
nullval = default_null_placeholder
if val is None:
return colorme(nullval, colormap, 'error')
if addcolor is False:
colormap = empty_colormap
elif colormap is None:
colormap = default_colormap
if time_format is None:
time_format = default_time_format
if float_precision is None:
float_precision = default_float_precision
return format_value(cqltype, val, encoding=encoding, colormap=colormap,
time_format=time_format, float_precision=float_precision,
nullval=nullval)
def color_text(bval, colormap, displaywidth=None):
# note that here, we render natural backslashes as just backslashes,
# in the same color as surrounding text, when using color. When not
# using color, we need to double up the backslashes so it's not
# ambiguous. This introduces the unique difficulty of having different
# display widths for the colored and non-colored versions. To avoid
# adding the smarts to handle that in to FormattedValue, we just
# make an explicit check to see if a null colormap is being used or
# not.
if displaywidth is None:
displaywidth = len(bval)
tbr = _make_turn_bits_red_f(colormap['blob'], colormap['text'])
coloredval = colormap['text'] + bits_to_turn_red_re.sub(tbr, bval) + colormap['reset']
if colormap['text']:
displaywidth -= bval.count(r'\\')
return FormattedValue(bval, coloredval, displaywidth)
def format_value_default(val, colormap, **_):
val = str(val)
escapedval = val.replace('\\', '\\\\')
bval = controlchars_re.sub(_show_control_chars, escapedval)
return color_text(bval, colormap)
# Mapping cql type base names ("int", "map", etc) to formatter functions,
# making format_value a generic function
_formatters = {}
def format_value(type, val, **kwargs):
if val == EMPTY:
return format_value_default('', **kwargs)
formatter = _formatters.get(type.__name__, format_value_default)
return formatter(val, **kwargs)
def formatter_for(typname):
def registrator(f):
_formatters[typname] = f
return f
return registrator
@formatter_for('bytearray')
def format_value_blob(val, colormap, **_):
bval = '0x' + ''.join('%02x' % ord(c) for c in val)
return colorme(bval, colormap, 'blob')
formatter_for('buffer')(format_value_blob)
def format_python_formatted_type(val, colormap, color, quote=False):
bval = str(val)
if quote:
bval = "'%s'" % bval
return colorme(bval, colormap, color)
@formatter_for('Decimal')
def format_value_decimal(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'decimal')
@formatter_for('UUID')
def format_value_uuid(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'uuid')
@formatter_for('inet')
def formatter_value_inet(val, colormap, quote=False, **_):
return format_python_formatted_type(val, colormap, 'inet', quote=quote)
@formatter_for('bool')
def format_value_boolean(val, colormap, **_):
return format_python_formatted_type(val, colormap, 'boolean')
def format_floating_point_type(val, colormap, float_precision, **_):
if math.isnan(val):
bval = 'NaN'
elif math.isinf(val):
bval = 'Infinity'
else:
bval = '%.*g' % (float_precision, val)
return colorme(bval, colormap, 'float')
formatter_for('float')(format_floating_point_type)
def format_integer_type(val, colormap, **_):
# base-10 only for now; support others?
bval = str(val)
return colorme(bval, colormap, 'int')
formatter_for('long')(format_integer_type)
formatter_for('int')(format_integer_type)
@formatter_for('date')
def format_value_timestamp(val, colormap, time_format, quote=False, **_):
bval = strftime(time_format, calendar.timegm(val.utctimetuple()))
if quote:
bval = "'%s'" % bval
return colorme(bval, colormap, 'timestamp')
formatter_for('datetime')(format_value_timestamp)
def strftime(time_format, seconds):
local = time.localtime(seconds)
formatted = time.strftime(time_format, local)
if local.tm_isdst != 0:
offset = -time.altzone
else:
offset = -time.timezone
if formatted[-4:] != '0000' or time_format[-2:] != '%z' or offset == 0:
return formatted
# deal with %z on platforms where it isn't supported. see CASSANDRA-4746.
if offset < 0:
sign = '-'
else:
sign = '+'
hours, minutes = divmod(abs(offset) / 60, 60)
return formatted[:-5] + sign + '{0:0=2}{1:0=2}'.format(hours, minutes)
@formatter_for('str')
def format_value_text(val, encoding, colormap, quote=False, **_):
escapedval = val.replace(u'\\', u'\\\\')
if quote:
escapedval = escapedval.replace("'", "''")
escapedval = unicode_controlchars_re.sub(_show_control_chars, escapedval)
bval = escapedval.encode(encoding, 'backslashreplace')
if quote:
bval = "'%s'" % bval
displaywidth = wcwidth.wcswidth(bval.decode(encoding))
return color_text(bval, colormap, displaywidth)
# name alias
formatter_for('unicode')(format_value_text)
def format_simple_collection(val, lbracket, rbracket, encoding,
colormap, time_format, float_precision, nullval):
subs = [format_value(type(sval), sval, encoding=encoding, colormap=colormap,
time_format=time_format, float_precision=float_precision,
nullval=nullval, quote=True)
for sval in val]
bval = lbracket + ', '.join(sval.strval for sval in subs) + rbracket
lb, sep, rb = [colormap['collection'] + s + colormap['reset']
for s in (lbracket, ', ', rbracket)]
coloredval = lb + sep.join(sval.coloredval for sval in subs) + rb
displaywidth = 2 * len(subs) + sum(sval.displaywidth for sval in subs)
return FormattedValue(bval, coloredval, displaywidth)
@formatter_for('list')
def format_value_list(val, encoding, colormap, time_format, float_precision, nullval, **_):
return format_simple_collection(val, '[', ']', encoding, colormap,
time_format, float_precision, nullval)
formatter_for('tuple')(format_value_list)
@formatter_for('set')
def format_value_set(val, encoding, colormap, time_format, float_precision, nullval, **_):
return format_simple_collection(sorted(val), '{', '}', encoding, colormap,
time_format, float_precision, nullval)
formatter_for('frozenset')(format_value_set)
formatter_for('sortedset')(format_value_set)
@formatter_for('dict')
def format_value_map(val, encoding, colormap, time_format, float_precision, nullval, **_):
def subformat(v):
return format_value(type(v), v, encoding=encoding, colormap=colormap,
time_format=time_format, float_precision=float_precision,
nullval=nullval, quote=True)
subs = [(subformat(k), subformat(v)) for (k, v) in sorted(val.items())]
bval = '{' + ', '.join(k.strval + ': ' + v.strval for (k, v) in subs) + '}'
lb, comma, colon, rb = [colormap['collection'] + s + colormap['reset']
for s in ('{', ', ', ': ', '}')]
coloredval = lb \
+ comma.join(k.coloredval + colon + v.coloredval for (k, v) in subs) \
+ rb
displaywidth = 4 * len(subs) + sum(k.displaywidth + v.displaywidth for (k, v) in subs)
return FormattedValue(bval, coloredval, displaywidth)
formatter_for('OrderedDict')(format_value_map)
def format_value_utype(val, encoding, colormap, time_format, float_precision, nullval, **_):
def format_field_value(v):
if v is None:
return colorme(nullval, colormap, 'error')
return format_value(type(v), v, encoding=encoding, colormap=colormap,
time_format=time_format, float_precision=float_precision,
nullval=nullval, quote=True)
def format_field_name(name):
return format_value_text(name, encoding=encoding, colormap=colormap, quote=False)
subs = [(format_field_name(k), format_field_value(v)) for (k, v) in val._asdict().items()]
bval = '{' + ', '.join(k.strval + ': ' + v.strval for (k, v) in subs) + '}'
lb, comma, colon, rb = [colormap['collection'] + s + colormap['reset']
for s in ('{', ', ', ': ', '}')]
coloredval = lb \
+ comma.join(k.coloredval + colon + v.coloredval for (k, v) in subs) \
+ rb
displaywidth = 4 * len(subs) + sum(k.displaywidth + v.displaywidth for (k, v) in subs)
return FormattedValue(bval, coloredval, displaywidth)
|
|
# Copyright 2012 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011,2012 Akira YOSHIYAMA <akirayoshiyama@gmail.com>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This source code is based ./auth_token.py and ./ec2_token.py.
# See them for their copyright.
"""
S3 TOKEN MIDDLEWARE
.. warning::
This module is DEPRECATED and may be removed in the 2.0.0 release. The
s3_token middleware has been moved to the `keystonemiddleware repository
<http://docs.openstack.org/developer/keystonemiddleware/>`_.
This WSGI component:
* Get a request from the swift3 middleware with an S3 Authorization
access key.
* Validate s3 token in Keystone.
* Transform the account name to AUTH_%(tenant_name).
"""
import logging
from oslo_serialization import jsonutils
from oslo_utils import strutils
import requests
import six
from six.moves import urllib
import webob
PROTOCOL_NAME = 'S3 Token Authentication'
# TODO(kun): remove it after oslo merge this.
def split_path(path, minsegs=1, maxsegs=None, rest_with_last=False):
"""Validate and split the given HTTP request path.
**Examples**::
['a'] = split_path('/a')
['a', None] = split_path('/a', 1, 2)
['a', 'c'] = split_path('/a/c', 1, 2)
['a', 'c', 'o/r'] = split_path('/a/c/o/r', 1, 3, True)
:param path: HTTP Request path to be split
:param minsegs: Minimum number of segments to be extracted
:param maxsegs: Maximum number of segments to be extracted
:param rest_with_last: If True, trailing data will be returned as part
of last segment. If False, and there is
trailing data, raises ValueError.
:returns: list of segments with a length of maxsegs (non-existent
segments will return as None)
:raises: ValueError if given an invalid path
"""
if not maxsegs:
maxsegs = minsegs
if minsegs > maxsegs:
raise ValueError('minsegs > maxsegs: %d > %d' % (minsegs, maxsegs))
if rest_with_last:
segs = path.split('/', maxsegs)
minsegs += 1
maxsegs += 1
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs or
'' in segs[1:minsegs]):
raise ValueError('Invalid path: %s' % urllib.parse.quote(path))
else:
minsegs += 1
maxsegs += 1
segs = path.split('/', maxsegs)
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs + 1 or
'' in segs[1:minsegs] or
(count == maxsegs + 1 and segs[maxsegs])):
raise ValueError('Invalid path: %s' % urllib.parse.quote(path))
segs = segs[1:maxsegs]
segs.extend([None] * (maxsegs - 1 - len(segs)))
return segs
class ServiceError(Exception):
pass
class S3Token(object):
"""Auth Middleware that handles S3 authenticating client calls."""
def __init__(self, app, conf):
"""Common initialization code."""
self.app = app
self.logger = logging.getLogger(conf.get('log_name', __name__))
self.logger.debug('Starting the %s component', PROTOCOL_NAME)
self.logger.warning(
'This middleware module is deprecated as of v0.11.0 in favor of '
'keystonemiddleware.s3_token - please update your WSGI pipeline '
'to reference the new middleware package.')
self.reseller_prefix = conf.get('reseller_prefix', 'AUTH_')
# where to find the auth service (we use this to validate tokens)
auth_host = conf.get('auth_host')
auth_port = int(conf.get('auth_port', 35357))
auth_protocol = conf.get('auth_protocol', 'https')
self.request_uri = '%s://%s:%s' % (auth_protocol, auth_host, auth_port)
# SSL
insecure = strutils.bool_from_string(conf.get('insecure', False))
cert_file = conf.get('certfile')
key_file = conf.get('keyfile')
if insecure:
self.verify = False
elif cert_file and key_file:
self.verify = (cert_file, key_file)
elif cert_file:
self.verify = cert_file
else:
self.verify = None
def deny_request(self, code):
error_table = {
'AccessDenied': (401, 'Access denied'),
'InvalidURI': (400, 'Could not parse the specified URI'),
}
resp = webob.Response(content_type='text/xml')
resp.status = error_table[code][0]
error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
'<Error>\r\n <Code>%s</Code>\r\n '
'<Message>%s</Message>\r\n</Error>\r\n' %
(code, error_table[code][1]))
if six.PY3:
error_msg = error_msg.encode()
resp.body = error_msg
return resp
def _json_request(self, creds_json):
headers = {'Content-Type': 'application/json'}
try:
response = requests.post('%s/v2.0/s3tokens' % self.request_uri,
headers=headers, data=creds_json,
verify=self.verify)
except requests.exceptions.RequestException as e:
self.logger.info('HTTP connection exception: %s', e)
resp = self.deny_request('InvalidURI')
raise ServiceError(resp)
if response.status_code < 200 or response.status_code >= 300:
self.logger.debug('Keystone reply error: status=%s reason=%s',
response.status_code, response.reason)
resp = self.deny_request('AccessDenied')
raise ServiceError(resp)
return response
def __call__(self, environ, start_response):
"""Handle incoming request. authenticate and send downstream."""
req = webob.Request(environ)
self.logger.debug('Calling S3Token middleware.')
try:
parts = split_path(req.path, 1, 4, True)
version, account, container, obj = parts
except ValueError:
msg = 'Not a path query, skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
# Read request signature and access id.
if 'Authorization' not in req.headers:
msg = 'No Authorization header. skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
token = req.headers.get('X-Auth-Token',
req.headers.get('X-Storage-Token'))
if not token:
msg = 'You did not specify an auth or a storage token. skipping.'
self.logger.debug(msg)
return self.app(environ, start_response)
auth_header = req.headers['Authorization']
try:
access, signature = auth_header.split(' ')[-1].rsplit(':', 1)
except ValueError:
msg = 'You have an invalid Authorization header: %s'
self.logger.debug(msg, auth_header)
return self.deny_request('InvalidURI')(environ, start_response)
# NOTE(chmou): This is to handle the special case with nova
# when we have the option s3_affix_tenant. We will force it to
# connect to another account than the one
# authenticated. Before people start getting worried about
# security, I should point that we are connecting with
# username/token specified by the user but instead of
# connecting to its own account we will force it to go to an
# another account. In a normal scenario if that user don't
# have the reseller right it will just fail but since the
# reseller account can connect to every account it is allowed
# by the swift_auth middleware.
force_tenant = None
if ':' in access:
access, force_tenant = access.split(':')
# Authenticate request.
creds = {'credentials': {'access': access,
'token': token,
'signature': signature}}
creds_json = jsonutils.dumps(creds)
self.logger.debug('Connecting to Keystone sending this JSON: %s',
creds_json)
# NOTE(vish): We could save a call to keystone by having
# keystone return token, tenant, user, and roles
# from this call.
#
# NOTE(chmou): We still have the same problem we would need to
# change token_auth to detect if we already
# identified and not doing a second query and just
# pass it through to swiftauth in this case.
try:
resp = self._json_request(creds_json)
except ServiceError as e:
resp = e.args[0]
msg = 'Received error, exiting middleware with error: %s'
self.logger.debug(msg, resp.status_code)
return resp(environ, start_response)
self.logger.debug('Keystone Reply: Status: %d, Output: %s',
resp.status_code, resp.content)
try:
identity_info = resp.json()
token_id = str(identity_info['access']['token']['id'])
tenant = identity_info['access']['token']['tenant']
except (ValueError, KeyError):
error = 'Error on keystone reply: %d %s'
self.logger.debug(error, resp.status_code, resp.content)
return self.deny_request('InvalidURI')(environ, start_response)
req.headers['X-Auth-Token'] = token_id
tenant_to_connect = force_tenant or tenant['id']
self.logger.debug('Connecting with tenant: %s', tenant_to_connect)
new_tenant_name = '%s%s' % (self.reseller_prefix, tenant_to_connect)
environ['PATH_INFO'] = environ['PATH_INFO'].replace(account,
new_tenant_name)
return self.app(environ, start_response)
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def auth_filter(app):
return S3Token(app, conf)
return auth_filter
|
|
from datetime import datetime
import json
import django
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db.models.signals import pre_save
from django.test.testcases import TestCase
from tastypie import fields
from tastypie.exceptions import ApiFieldError, NotFound
from core.models import Note, MediaBit
from core.tests.mocks import MockRequest
from related_resource.api.resources import CategoryResource, ForumResource,\
FreshNoteResource, JobResource, NoteResource, OrderResource,\
NoteWithUpdatableUserResource, PersonResource, TagResource, UserResource
from related_resource.api.urls import api
from related_resource.models import Category, Label, Tag, Taggable,\
TaggableTag, ExtraData, Company, Person, Dog, DogHouse, Bone, Product,\
Address, Job, Payment, Forum, Order, OrderItem, Contact, ContactGroup
from testcases import TestCaseWithFixture
class M2MResourcesTestCase(TestCaseWithFixture):
def test_same_object_added(self):
"""
From Issue #1035
"""
user = User.objects.create(username='gjcourt')
ur = UserResource()
fr = ForumResource()
resp = self.client.post(fr.get_resource_uri(), content_type='application/json', data=json.dumps({
'name': 'Test Forum',
'members': [ur.get_resource_uri(user)],
'moderators': [ur.get_resource_uri(user)],
}))
self.assertEqual(resp.status_code, 201, resp.content)
data = json.loads(resp.content.decode('utf-8'))
self.assertEqual(len(data['moderators']), 1)
self.assertEqual(len(data['members']), 1)
class RelatedResourceTest(TestCaseWithFixture):
urls = 'related_resource.api.urls'
def setUp(self):
super(RelatedResourceTest, self).setUp()
self.user = User.objects.create(username="testy_mctesterson")
def test_cannot_access_user_resource(self):
resource = api.canonical_resource_for('users')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
request.set_body('{"username": "foobar"}')
resp = resource.wrap_view('dispatch_detail')(request, pk=self.user.pk)
self.assertEqual(resp.status_code, 405)
self.assertEqual(User.objects.get(id=self.user.id).username, self.user.username)
def test_related_resource_authorization(self):
resource = api.canonical_resource_for('notes')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.set_body('{"content": "The cat is back. The dog coughed him up out back.", "created": "2010-04-03 20:05:00", "is_active": true, "slug": "cat-is-back", "title": "The Cat Is Back", "updated": "2010-04-03 20:05:00", "author": null}')
resp = resource.post_list(request)
self.assertEqual(resp.status_code, 201)
self.assertEqual(User.objects.get(id=self.user.id).username, 'testy_mctesterson')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.set_body('{"content": "The cat is back. The dog coughed him up out back.", "created": "2010-04-03 20:05:00", "is_active": true, "slug": "cat-is-back-2", "title": "The Cat Is Back", "updated": "2010-04-03 20:05:00", "author": {"id": %s, "username": "foobar"}}' % self.user.id)
resp = resource.post_list(request)
self.assertEqual(resp.status_code, 201)
self.assertEqual(User.objects.get(id=self.user.id).username, 'foobar')
def test_ok_not_null_field_included(self):
"""
Posting a new detail with no related objects
should require one query to save the object
"""
company = Company.objects.create()
resource = api.canonical_resource_for('product')
request = MockRequest()
body = json.dumps({
'producer': {'pk': company.pk},
})
request.set_body(body)
resp = resource.post_list(request)
self.assertEqual(resp.status_code, 201)
def test_apifielderror_missing_not_null_field(self):
"""
Posting a new detail with no related objects
should require one query to save the object
"""
resource = api.canonical_resource_for('product')
request = MockRequest()
body = json.dumps({})
request.set_body(body)
with self.assertRaises(ApiFieldError):
resource.post_list(request)
class CategoryResourceTest(TestCaseWithFixture):
urls = 'related_resource.api.urls'
def setUp(self):
super(CategoryResourceTest, self).setUp()
self.parent_cat_1 = Category.objects.create(parent=None, name='Dad')
self.parent_cat_2 = Category.objects.create(parent=None, name='Mom')
self.child_cat_1 = Category.objects.create(parent=self.parent_cat_1, name='Son')
self.child_cat_2 = Category.objects.create(parent=self.parent_cat_2, name='Daughter')
def test_correct_relation(self):
resource = api.canonical_resource_for('category')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'GET'
resp = resource.wrap_view('dispatch_detail')(request, pk=self.parent_cat_1.pk)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content.decode('utf-8'))
self.assertEqual(data['parent'], None)
self.assertEqual(data['name'], 'Dad')
# Now try a child.
resp = resource.wrap_view('dispatch_detail')(request, pk=self.child_cat_2.pk)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content.decode('utf-8'))
self.assertEqual(data['parent'], '/v1/category/2/')
self.assertEqual(data['name'], 'Daughter')
def test_put_null(self):
resource = api.canonical_resource_for('category')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
request.set_body('{"parent": null, "name": "Son"}')
# Before the PUT, there should be a parent.
self.assertEqual(Category.objects.get(pk=self.child_cat_1.pk).parent.pk, self.parent_cat_1.pk)
# After the PUT, the parent should be ``None``.
resp = resource.put_detail(request, pk=self.child_cat_1.pk)
self.assertEqual(resp.status_code, 204)
self.assertEqual(Category.objects.get(pk=self.child_cat_1.pk).name, 'Son')
self.assertEqual(Category.objects.get(pk=self.child_cat_1.pk).parent, None)
class ExplicitM2MResourceRegressionTest(TestCaseWithFixture):
urls = 'related_resource.api.urls'
def setUp(self):
super(ExplicitM2MResourceRegressionTest, self).setUp()
self.tag_1 = Tag.objects.create(name='important')
self.taggable_1 = Taggable.objects.create(name='exam')
# Create relations between tags and taggables through the explicit m2m table
self.taggabletag_1 = TaggableTag.objects.create(tag=self.tag_1, taggable=self.taggable_1)
# Give each tag some extra data (the lookup of this data is what makes the test fail)
self.extradata_1 = ExtraData.objects.create(tag=self.tag_1, name='additional')
def test_correct_setup(self):
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'GET'
# Verify the explicit 'through' relationships has been created correctly
resource = api.canonical_resource_for('taggabletag')
resp = resource.wrap_view('dispatch_detail')(request, pk=self.taggabletag_1.pk)
data = json.loads(resp.content.decode('utf-8'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(data['tag'], '/v1/tag/1/')
self.assertEqual(data['taggable'], '/v1/taggable/1/')
resource = api.canonical_resource_for('taggable')
resp = resource.wrap_view('dispatch_detail')(request, pk=self.taggable_1.pk)
data = json.loads(resp.content.decode('utf-8'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(data['name'], 'exam')
resource = api.canonical_resource_for('tag')
request.path = "/v1/tag/%(pk)s/" % {'pk': self.tag_1.pk}
resp = resource.wrap_view('dispatch_detail')(request, pk=self.tag_1.pk)
data = json.loads(resp.content.decode('utf-8'))
self.assertEqual(resp.status_code, 200)
self.assertEqual(data['name'], 'important')
# and check whether the extradata is present
self.assertEqual(data['extradata']['name'], u'additional')
def test_post_new_tag(self):
resource = api.canonical_resource_for('tag')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.set_body('{"name": "school", "taggabletags": [ ]}')
# Prior to the addition of ``blank=True``, this would
# fail badly.
resp = resource.wrap_view('dispatch_list')(request)
self.assertEqual(resp.status_code, 201)
# GET the created object (through its headers.location)
self.assertTrue(resp.has_header('location'))
location = resp['Location']
resp = self.client.get(location, data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content.decode('utf-8'))
self.assertEqual(len(deserialized), 5)
self.assertEqual(deserialized['name'], 'school')
class OneToManySetupTestCase(TestCaseWithFixture):
urls = 'related_resource.api.urls'
def test_one_to_many(self):
# Sanity checks.
self.assertEqual(Note.objects.count(), 2)
self.assertEqual(MediaBit.objects.count(), 0)
fnr = FreshNoteResource()
data = {
'title': 'Create with related URIs',
'slug': 'create-with-related-uris',
'content': 'Some content here',
'is_active': True,
'media_bits': [
{
'title': 'Picture #1'
}
]
}
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.set_body(json.dumps(data))
resp = fnr.post_list(request)
self.assertEqual(resp.status_code, 201)
self.assertEqual(Note.objects.count(), 3)
note = Note.objects.latest('created')
self.assertEqual(note.media_bits.count(), 1)
self.assertEqual(note.media_bits.all()[0].title, u'Picture #1')
class FullCategoryResource(CategoryResource):
parent = fields.ToOneField('self', 'parent', null=True, full=True)
class RelationshipOppositeFromModelTestCase(TestCaseWithFixture):
"""
On the model, the Job relationship is defined on the Payment.
On the resource, the PaymentResource is defined on the JobResource as well
"""
def setUp(self):
super(RelationshipOppositeFromModelTestCase, self).setUp()
# a job with a payment exists to start with
self.some_time_str = datetime.now().strftime('%Y-%m-%d %H:%M')
job = Job.objects.create(name='SomeJob')
Payment.objects.create(job=job, scheduled=self.some_time_str)
def test_create_similar(self):
# We submit to job with the related payment included.
# Note that on the resource, the payment related resource is defined
# On the model, the Job class does not have a payment field,
# but it has a reverse relationship defined by the Payment class
resource = JobResource()
data = {
'name': 'OtherJob',
'payment': {
'scheduled': self.some_time_str
}
}
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.set_body(json.dumps(data))
resp = resource.post_list(request)
self.assertEqual(resp.status_code, 201)
self.assertEqual(Job.objects.count(), 2)
self.assertEqual(Payment.objects.count(), 2)
new_job = Job.objects.all().order_by('-id')[0]
new_payment = Payment.objects.all().order_by('-id')[0]
self.assertEqual(new_job.name, 'OtherJob')
self.assertEqual(new_job, new_payment.job)
class RelatedPatchTestCase(TestCaseWithFixture):
urls = 'related_resource.api.urls'
def test_patch_to_one(self):
resource = FullCategoryResource()
cat1 = Category.objects.create(name='Dad')
cat2 = Category.objects.create(parent=cat1, name='Child')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PATCH'
request.path = "/v1/category/%(pk)s/" % {'pk': cat2.pk}
data = {
'name': 'Kid'
}
request.set_body(json.dumps(data))
self.assertEqual(cat2.name, 'Child')
resp = resource.patch_detail(request, pk=cat2.pk)
self.assertEqual(resp.status_code, 202)
cat2 = Category.objects.get(pk=2)
self.assertEqual(cat2.name, 'Kid')
def test_patch_detail_with_missing_related_fields(self):
"""
When fields are excluded the value of the field should not be set to a
default value if updated by tastypie.
"""
resource = NoteWithUpdatableUserResource()
note = Note.objects.create(author_id=1)
user = User.objects.get(pk=1)
self.assertEqual(user.password, 'this_is_not_a_valid_password_string')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PATCH'
request.path = "/v1/noteswithupdatableuser/%(pk)s/" % {'pk': note.pk}
data = {
'author': {
'id': 1,
'username': 'johndoe',
'email': 'john@doetown.com',
}
}
request.set_body(json.dumps(data))
resp = resource.patch_detail(request, pk=note.pk)
self.assertEqual(resp.status_code, 202)
user2 = User.objects.get(pk=1)
self.assertEqual(user2.email, 'john@doetown.com')
self.assertEqual(user2.password, 'this_is_not_a_valid_password_string')
def test_patch_detail_dont_update_related_without_permission(self):
"""
When fields are excluded the value of the field should not be set to a
default value if updated by tastypie.
"""
resource = NoteResource()
note = Note.objects.create(author_id=1)
user = User.objects.get(pk=1)
self.assertEqual(user.password, 'this_is_not_a_valid_password_string')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PATCH'
request.path = "/v1/note/%(pk)s/" % {'pk': note.pk}
data = {
'author': {
'id': 1,
'username': 'johndoe',
'email': 'john@doetown.com',
}
}
request.set_body(json.dumps(data))
resp = resource.patch_detail(request, pk=note.pk)
self.assertEqual(resp.status_code, 202)
user2 = User.objects.get(pk=1)
self.assertEqual(user2.email, 'john@doetown.com')
self.assertEqual(user2.password, 'this_is_not_a_valid_password_string')
class NestedRelatedResourceTest(TestCaseWithFixture):
urls = 'related_resource.api.urls'
def test_one_to_one(self):
"""
Test a related ToOne resource with a nested full ToOne resource
"""
self.assertEqual(Person.objects.count(), 0)
self.assertEqual(Company.objects.count(), 0)
self.assertEqual(Address.objects.count(), 0)
pr = PersonResource()
data = {
'name': 'Joan Rivers',
'company': {
'name': 'Yum Yum Pie Factory!',
'address': {
'line': 'Somewhere, Utah'
}
}
}
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.set_body(json.dumps(data))
resp = pr.post_list(request)
self.assertEqual(resp.status_code, 201)
pk = Person.objects.all()[0].pk
request = MockRequest()
request.method = 'GET'
request.path = reverse('api_dispatch_detail', kwargs={
'pk': pk,
'resource_name': pr._meta.resource_name,
'api_name': pr._meta.api_name
})
resp = pr.get_detail(request, pk=pk)
self.assertEqual(resp.status_code, 200)
person = json.loads(resp.content.decode('utf-8'))
self.assertEqual(person['name'], 'Joan Rivers')
company = person['company']
self.assertEqual(company['name'], 'Yum Yum Pie Factory!')
address = company['address']
self.assertEqual(address['line'], 'Somewhere, Utah')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
request.path = reverse('api_dispatch_detail', kwargs={
'pk': pk,
'resource_name': pr._meta.resource_name,
'api_name': pr._meta.api_name
})
request.set_body(resp.content.decode('utf-8'))
resp = pr.put_detail(request, pk=pk)
self.assertEqual(resp.status_code, 204)
def test_one_to_many(self):
"""
Test a related ToOne resource with a nested full ToMany resource
"""
self.assertEqual(Person.objects.count(), 0)
self.assertEqual(Company.objects.count(), 0)
self.assertEqual(Product.objects.count(), 0)
pr = PersonResource()
data = {
'name': 'Joan Rivers',
'company': {
'name': 'Yum Yum Pie Factory!',
'products': [
{
'name': 'Tasty Pie'
}
]
}
}
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.set_body(json.dumps(data))
resp = pr.post_list(request)
self.assertEqual(resp.status_code, 201)
self.assertEqual(Person.objects.count(), 1)
self.assertEqual(Company.objects.count(), 1)
self.assertEqual(Product.objects.count(), 1)
pk = Person.objects.all()[0].pk
request = MockRequest()
request.method = 'GET'
resp = pr.get_detail(request, pk=pk)
self.assertEqual(resp.status_code, 200)
person = json.loads(resp.content.decode('utf-8'))
self.assertEqual(person['name'], 'Joan Rivers')
company = person['company']
self.assertEqual(company['name'], 'Yum Yum Pie Factory!')
self.assertEqual(len(company['products']), 1)
product = company['products'][0]
self.assertEqual(product['name'], 'Tasty Pie')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
request.set_body(json.dumps(person))
resp = pr.put_detail(request, pk=pk)
self.assertEqual(resp.status_code, 204)
def test_many_to_one(self):
"""
Test a related ToMany resource with a nested full ToOne resource
"""
self.assertEqual(Person.objects.count(), 0)
self.assertEqual(Dog.objects.count(), 0)
self.assertEqual(DogHouse.objects.count(), 0)
pr = PersonResource()
data = {
'name': 'Joan Rivers',
'dogs': [
{
'name': 'Snoopy',
'house': {
'color': 'Red'
}
}
]
}
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.set_body(json.dumps(data))
resp = pr.post_list(request)
self.assertEqual(resp.status_code, 201)
self.assertEqual(Person.objects.count(), 1)
self.assertEqual(Dog.objects.count(), 1)
self.assertEqual(DogHouse.objects.count(), 1)
pk = Person.objects.all()[0].pk
request = MockRequest()
request.method = 'GET'
request.path = reverse('api_dispatch_detail', kwargs={'pk': pk, 'resource_name': pr._meta.resource_name, 'api_name': pr._meta.api_name})
resp = pr.get_detail(request, pk=pk)
self.assertEqual(resp.status_code, 200)
person = json.loads(resp.content.decode('utf-8'))
self.assertEqual(person['name'], 'Joan Rivers')
self.assertEqual(len(person['dogs']), 1)
dog = person['dogs'][0]
self.assertEqual(dog['name'], 'Snoopy')
house = dog['house']
self.assertEqual(house['color'], 'Red')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
request.set_body(json.dumps(person))
request.path = reverse('api_dispatch_detail', kwargs={'pk': pk, 'resource_name': pr._meta.resource_name, 'api_name': pr._meta.api_name})
resp = pr.put_detail(request, pk=pk)
self.assertEqual(resp.status_code, 204)
def test_many_to_one_extra_data_ignored(self):
"""
Test a related ToMany resource with a nested full ToOne resource
FieldError would result when extra data is included on an embedded
resource for an already saved object.
"""
self.assertEqual(Person.objects.count(), 0)
self.assertEqual(Dog.objects.count(), 0)
self.assertEqual(DogHouse.objects.count(), 0)
pr = PersonResource()
data = {
'name': 'Joan Rivers',
'dogs': [
{
'name': 'Snoopy',
'house': {
'color': 'Red'
}
}
]
}
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.set_body(json.dumps(data))
resp = pr.post_list(request)
self.assertEqual(resp.status_code, 201)
self.assertEqual(Person.objects.count(), 1)
self.assertEqual(Dog.objects.count(), 1)
self.assertEqual(DogHouse.objects.count(), 1)
pk = Person.objects.all()[0].pk
request = MockRequest()
request.method = 'GET'
request.path = reverse('api_dispatch_detail', kwargs={'pk': pk, 'resource_name': pr._meta.resource_name, 'api_name': pr._meta.api_name})
resp = pr.get_detail(request, pk=pk)
self.assertEqual(resp.status_code, 200)
person = json.loads(resp.content.decode('utf-8'))
self.assertEqual(person['name'], 'Joan Rivers')
self.assertEqual(len(person['dogs']), 1)
dog = person['dogs'][0]
self.assertEqual(dog['name'], 'Snoopy')
house = dog['house']
self.assertEqual(house['color'], 'Red')
# clients may include extra data, which should be ignored. Make extra data is ignored on the resource and sub resources.
person['thisfieldshouldbeignored'] = 'foobar'
person['dogs'][0]['thisfieldshouldbeignored'] = 'foobar'
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
request.set_body(json.dumps(person))
request.path = reverse('api_dispatch_detail', kwargs={'pk': pk, 'resource_name': pr._meta.resource_name, 'api_name': pr._meta.api_name})
resp = pr.put_detail(request, pk=pk)
self.assertEqual(resp.status_code, 204)
def test_many_to_many(self):
"""
Test a related ToMany resource with a nested full ToMany resource
"""
self.assertEqual(Person.objects.count(), 0)
self.assertEqual(Dog.objects.count(), 0)
self.assertEqual(Bone.objects.count(), 0)
pr = PersonResource()
data = {
'name': 'Joan Rivers',
'dogs': [
{
'name': 'Snoopy',
'bones': [
{
'color': 'white'
}
]
}
]
}
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.path = reverse('api_dispatch_list', kwargs={'resource_name': pr._meta.resource_name, 'api_name': pr._meta.api_name})
request.set_body(json.dumps(data))
resp = pr.post_list(request)
self.assertEqual(resp.status_code, 201)
self.assertEqual(Person.objects.count(), 1)
self.assertEqual(Dog.objects.count(), 1)
self.assertEqual(Bone.objects.count(), 1)
pk = Person.objects.all()[0].pk
request = MockRequest()
request.method = 'GET'
request.path = reverse('api_dispatch_detail', kwargs={'pk': pk, 'resource_name': pr._meta.resource_name, 'api_name': pr._meta.api_name})
resp = pr.get_detail(request, pk=pk)
self.assertEqual(resp.status_code, 200)
person = json.loads(resp.content.decode('utf-8'))
self.assertEqual(person['name'], 'Joan Rivers')
self.assertEqual(len(person['dogs']), 1)
dog = person['dogs'][0]
self.assertEqual(dog['name'], 'Snoopy')
self.assertEqual(len(dog['bones']), 1)
bone = dog['bones'][0]
self.assertEqual(bone['color'], 'white')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
request.set_body(json.dumps(person))
request.path = reverse('api_dispatch_detail', kwargs={'pk': pk, 'resource_name': pr._meta.resource_name, 'api_name': pr._meta.api_name})
resp = pr.put_detail(request, pk=pk)
self.assertEqual(resp.status_code, 204)
def test_many_to_many_change_nested(self):
"""
Test a related ToMany resource with a nested full ToMany resource
"""
self.assertEqual(Person.objects.count(), 0)
self.assertEqual(Dog.objects.count(), 0)
self.assertEqual(Bone.objects.count(), 0)
pr = PersonResource()
person = Person.objects.create(name='Joan Rivers')
dog = person.dogs.create(name='Snoopy')
bone = dog.bones.create(color='white')
pk = person.pk
request = MockRequest()
request.method = 'GET'
request.path = reverse('api_dispatch_detail', kwargs={'pk': pk, 'resource_name': pr._meta.resource_name, 'api_name': pr._meta.api_name})
resp = pr.get_detail(request, pk=pk)
self.assertEqual(resp.status_code, 200)
data = json.loads(resp.content.decode('utf-8'))
self.assertEqual(data['dogs'][0]['bones'][0]['color'], 'white')
# Change just a nested resource via PUT
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
data['dogs'][0]['bones'][0]['color'] = 'gray'
body = json.dumps(data)
request.set_body(body)
request.path = reverse('api_dispatch_detail', kwargs={'pk': pk, 'resource_name': pr._meta.resource_name, 'api_name': pr._meta.api_name})
resp = pr.put_detail(request, pk=pk)
self.assertEqual(resp.status_code, 204)
self.assertEqual(Bone.objects.count(), 1)
bone = Bone.objects.all()[0]
self.assertEqual(bone.color, 'gray')
class RelatedSaveCallsTest(TestCaseWithFixture):
urls = 'related_resource.api.urls'
def test_one_query_for_post_list(self):
"""
Posting a new detail with no related objects
should require one query to save the object
"""
resource = api.canonical_resource_for('category')
request = MockRequest()
body = json.dumps({
'name': 'Foo',
'parent': None
})
request.set_body(body)
with self.assertNumQueries(1):
resource.post_list(request)
def test_two_queries_for_post_list(self):
"""
Posting a new detail with one related object, referenced via its
``resource_uri`` should require two queries: one to save the
object, and one to lookup the related object.
"""
parent = Category.objects.create(name='Bar')
resource = api.canonical_resource_for('category')
request = MockRequest()
body = json.dumps({
'name': 'Foo',
'parent': resource.get_resource_uri(parent)
})
request.set_body(body)
with self.assertNumQueries(2):
resource.post_list(request)
def test_no_save_m2m_unchanged(self):
"""
Posting a new detail with a related m2m object shouldn't
save the m2m object unless the m2m object is provided inline.
"""
def _save_fails_test(sender, **kwargs):
self.fail("Should not have saved Label")
pre_save.connect(_save_fails_test, sender=Label)
l1 = Label.objects.get(name='coffee')
resource = api.canonical_resource_for('post')
label_resource = api.canonical_resource_for('label')
request = MockRequest()
body = json.dumps({
'name': 'test post',
'label': [label_resource.get_resource_uri(l1)],
})
request.set_body(body)
resource.post_list(request) # _save_fails_test will explode if Label is saved
def test_save_m2m_changed(self):
"""
Posting a new or updated detail object with a related m2m object
should save the m2m object if it's included inline.
"""
resource = api.canonical_resource_for('tag')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
body_dict = {
'name': 'school',
'taggabletags': [{'extra': 7}]
}
request.set_body(json.dumps(body_dict))
with self.assertNumQueries(4):
resp = resource.wrap_view('dispatch_list')(request)
self.assertEqual(resp.status_code, 201)
# 'extra' should have been set
tag = Tag.objects.all()[0]
taggable_tag = tag.taggabletags.all()[0]
self.assertEqual(taggable_tag.extra, 7)
body_dict['taggabletags'] = [{'extra': 1234}]
request.set_body(json.dumps(body_dict))
request.path = reverse('api_dispatch_detail', kwargs={
'pk': tag.pk,
'resource_name': resource._meta.resource_name,
'api_name': resource._meta.api_name
})
with self.assertNumQueries(5):
resource.put_detail(request)
# 'extra' should have changed
tag = Tag.objects.all()[0]
taggable_tag = tag.taggabletags.all()[0]
self.assertEqual(taggable_tag.extra, 1234)
def test_no_save_m2m_unchanged_existing_data_persists(self):
"""
Data should persist when posting an updated detail object with
unchanged reverse related objects.
"""
person = Person.objects.create(name='Ryan')
dog = Dog.objects.create(name='Wilfred', owner=person)
bone1 = Bone.objects.create(color='White', dog=dog)
bone2 = Bone.objects.create(color='Grey', dog=dog)
self.assertEqual(dog.bones.count(), 2)
resource = api.canonical_resource_for('dog')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
request._load_post_and_files = lambda *args, **kwargs: None
body_dict = {
'id': dog.id,
'name': 'Wilfred',
'bones': [
{'id': bone1.id, 'color': bone1.color},
{'id': bone2.id, 'color': bone2.color}
]
}
request.set_body(json.dumps(body_dict))
with self.assertNumQueries(13 if django.VERSION >= (1, 9) else 14):
resp = resource.wrap_view('dispatch_detail')(request, pk=dog.pk)
self.assertEqual(resp.status_code, 204)
dog = Dog.objects.all()[0]
dog_bones = dog.bones.all()
self.assertEqual(len(dog_bones), 2)
self.assertEqual(dog_bones[0], bone1)
self.assertEqual(dog_bones[1], bone2)
def test_no_save_m2m_related(self):
"""
When saving an object with a M2M field, don't save that related object's related objects.
"""
cg1 = ContactGroup.objects.create(name='The Inebriati')
cg2 = ContactGroup.objects.create(name='The Stone Cutters')
c1 = Contact.objects.create(name='foo')
c2 = Contact.objects.create(name='bar')
c2.groups.add(cg1, cg2)
c3 = Contact.objects.create(name='baz')
c3.groups.add(cg1)
self.assertEqual(list(c1.groups.all()), [])
self.assertEqual(list(c2.groups.all()), [cg1, cg2])
self.assertEqual(list(c3.groups.all()), [cg1])
data = {
'name': c1.name,
'groups': [reverse('api_dispatch_detail', kwargs={'api_name': 'v1', 'resource_name': 'contactgroup', 'pk': cg1.pk})],
}
resource = api.canonical_resource_for('contact')
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'PUT'
request._load_post_and_files = lambda *args, **kwargs: None
request.set_body(json.dumps(data))
with self.assertNumQueries(8):
response = resource.wrap_view('dispatch_detail')(request, pk=c1.pk)
self.assertEqual(response.status_code, 204, response.content)
new_contacts = Contact.objects.all()
new_c1 = new_contacts[0]
new_c2 = new_contacts[1]
new_c3 = new_contacts[2]
self.assertEqual(new_c1.name, c1.name)
self.assertEqual(new_c1.id, c1.id)
self.assertEqual(list(new_c1.groups.all()), [cg1])
self.assertEqual(new_c2.id, c2.id)
self.assertEqual(list(new_c2.groups.all()), [cg1, cg2])
self.assertEqual(new_c3.id, c3.id)
self.assertEqual(list(new_c3.groups.all()), [cg1])
new_cg1 = ContactGroup.objects.get(id=cg1.id)
new_cg2 = ContactGroup.objects.get(id=cg2.id)
self.assertEqual(list(new_cg1.members.all()), [new_c1, new_c2, new_c3])
self.assertEqual(list(new_cg2.members.all()), [new_c2])
class CorrectUriRelationsTestCase(TestCaseWithFixture):
"""
Validate that incorrect URI (with PKs that line up to valid data) are not
accepted.
"""
urls = 'related_resource.api.urls'
def test_incorrect_uri(self):
self.assertEqual(Note.objects.count(), 2)
nr = NoteResource()
# For this test, we need a ``User`` with the same PK as a ``Note``.
note_1 = Note.objects.latest('created')
User.objects.create(
id=note_1.pk,
username='valid',
email='valid@exmaple.com',
password='junk'
)
data = {
# This URI is flat-out wrong (wrong resource).
# This should cause the request to fail.
'author': '/v1/notes/{0}/'.format(
note_1.pk
),
'title': 'Nopenopenope',
'slug': 'invalid-request',
'content': "This shouldn't work.",
'is_active': True,
}
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.set_body(json.dumps(data))
with self.assertRaises(NotFound) as cm:
nr.post_list(request)
self.assertEqual(str(cm.exception), "An incorrect URL was provided '/v1/notes/2/' for the 'UserResource' resource.")
self.assertEqual(Note.objects.count(), 2)
class PrefetchRelatedTests(TestCase):
def setUp(self):
self.forum = Forum.objects.create()
self.resource = api.canonical_resource_for('forum')
self.user_data = [
{
'username': 'valid but unique',
'email': 'valid.unique@exmaple.com',
'password': 'junk',
},
{
'username': 'valid and very unique',
'email': 'valid.very.unique@exmaple.com',
'password': 'junk',
},
{
'username': 'valid again',
'email': 'valid.very.unique@exmaple.com',
'password': 'junk',
},
]
def tearDown(self):
usernames = [data['username'] for data in self.user_data]
User.objects.filter(username__in=usernames).delete()
self.forum.delete()
def make_request(self, method):
request = MockRequest()
request.GET = {'format': 'json'}
request.method = method
request.set_body(json.dumps({
'members': [
self.user_data[0],
self.user_data[1],
],
'moderators': [self.user_data[2]],
}))
request.path = reverse('api_dispatch_detail', kwargs={
'pk': self.forum.pk,
'resource_name': self.resource._meta.resource_name,
'api_name': self.resource._meta.api_name
})
return request
def test_m2m_put(self):
request = self.make_request('PUT')
response = self.resource.put_detail(request)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content.decode('utf-8'))
# Check that the query does what it's supposed to
# and only the return value is wrong
self.assertEqual(User.objects.count(), 3)
self.assertEqual(len(data['members']), 2)
self.assertEqual(len(data['moderators']), 1)
def test_m2m_patch(self):
request = self.make_request('PATCH')
response = self.resource.patch_detail(request)
self.assertEqual(response.status_code, 202)
data = json.loads(response.content.decode('utf-8'))
# Check that the query does what it's supposed to
# and only the return value is wrong
self.assertEqual(User.objects.count(), 3)
self.assertEqual(len(data['members']), 2)
self.assertEqual(len(data['moderators']), 1)
class ModelWithReverseItemsRelationshipTest(TestCase):
def test_reverse_items_relationship(self):
order_resource = OrderResource()
data = {
'name': 'order1',
'items': [
{
'name': 'car',
},
{
'name': 'yacht',
}
]
}
request = MockRequest()
request.GET = {'format': 'json'}
request.method = 'POST'
request.path = reverse('api_dispatch_list',
kwargs={'resource_name': order_resource._meta.resource_name,
'api_name': order_resource._meta.api_name})
request.set_body(json.dumps(data))
resp = order_resource.post_list(request)
self.assertEqual(resp.status_code, 201)
self.assertEqual(Order.objects.count(), 1)
self.assertEqual(OrderItem.objects.count(), 2)
class OneToOneTestCase(TestCase):
def test_reverse_one_to_one_post(self):
ed = ExtraData.objects.create(name='ed_name')
resource = TagResource()
# Post the extradata element which is attached to a "reverse" OneToOne
request = MockRequest()
request.method = "POST"
request.body = json.dumps({
"name": "tag_name",
"tagged": [],
"extradata": "/v1/extradata/%s/" % ed.pk
})
resp = resource.post_list(request)
# Assert that the status code is CREATED
self.assertEqual(resp.status_code, 201)
tag = Tag.objects.get(pk=int(resp['Location'].split("/")[-2]))
self.assertEqual(tag.extradata, ed)
|
|
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
''' IMPORTS '''
import requests
from requests.auth import HTTPBasicAuth
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
''' GLOBALS/PARAMS '''
USERNAME = demisto.params()['credentials']['identifier']
PASSWORD = demisto.params()['credentials']['password']
LIMIT = int(demisto.params().get('limit'))
USE_SSL = not demisto.params().get('unsecure', False)
# Service base URL
BASE_URL = "https://takedown.netcraft.com/"
# codes for maicious site report
MALICIOUS_REPORT_SUCCESS = "TD_OK"
MALICIOUS_REPORT_ALREADY_EXISTS = "TD_EXISTS"
MALICIOUS_REPORT_URL_IS_WILDCARD = "TD_WILDCARD"
MALICIOUS_REPORT_ACCESS_DENIED = "TD_DENIED"
MALICIOUS_REPORT_ERROR = "TD_ERROR"
# suffix endpoints
REPORT_MALICIOUS_SUFFIX = "authorise.php"
GET_TAKEDOWN_INFO_SUFFIX = "apis/get-info.php"
ACCESS_TAKEDOWN_NOTES_SUFFIX = "apis/note.php"
ESCALATE_TAKEDOWN_SUFFIX = "apis/escalate.php"
TEST_MODULE_SUFFIX = "authorise-test.php"
# Table Headers
TAKEDOWN_INFO_HEADER = ["ID", "Status", "Attack Type", "Date Submitted", "Last Updated", "Reporter", "Group ID",
"Region", "Evidence URL", "Attack URL", "IP", "Domain", "Hostname", "Country Code",
"Domain Attack", "Targeted URL", "Certificate"]
TAKEDOWN_NOTE_HEADERS = ["Takedown ID", "Note ID", "Note", "Author", "Time", "Group ID"]
# Titles for human readables
TAKEDOWN_INFO_TITLE = "Takedowns information found:"
REPORT_MALICIOUS_SUCCESS_TITLE = "New takedown successfully created"
''' HELPER FUNCTIONS '''
@logger
def http_request(method, request_suffix, params=None, data=None, should_convert_to_json=True):
# A wrapper for requests lib to send our requests and handle requests and responses better
# the Netcraft API gets the arguments as params for GET requests, as data for POST
res = requests.request(
method,
BASE_URL + request_suffix,
verify=USE_SSL,
params=params,
data=data,
auth=HTTPBasicAuth(USERNAME, PASSWORD)
)
if should_convert_to_json:
return res.json()
else:
return res.text.splitlines()
@logger
def filter_by_id(result_list_to_filter, filtering_id_field, desired_id):
""" Given a list of results, returns only the ones that are tied to a given ID.
Args:
result_list_to_filter (list): list of dictionaries, containing data about entries.
filtering_id_field: The name of the field containing the IDs to filter.
desired_id: The ID to keep when filtering.
Returns:
list: A copy of the input list, containing only entries with the desired ID.
"""
new_results_list = [result for result in result_list_to_filter if result[filtering_id_field] == desired_id]
return new_results_list
@logger
def generate_report_malicious_site_human_readable(response_lines_array):
response_status_code = response_lines_array[0]
human_readable = ""
if response_status_code == MALICIOUS_REPORT_ALREADY_EXISTS:
human_readable = "### Takedown not submitted.\n " \
"A takedown for this URL already exists.\n" \
"ID number of the existing takedown: {}.".format(response_lines_array[1])
elif response_status_code == MALICIOUS_REPORT_URL_IS_WILDCARD:
human_readable = "### Takedown not submitted\n " \
"This URL is a wildcard sub-domain variation of an existing takedown.\n"
elif response_status_code == MALICIOUS_REPORT_ACCESS_DENIED:
human_readable = "### Takedown not submitted\n Access is denied."
elif response_status_code == MALICIOUS_REPORT_ERROR:
human_readable = "### Takedown not submitted\n " \
"An error has occurred while submitting your takedown.\n" \
"Error is: {}".format(" ".join(response_lines_array))
return human_readable
@logger
def return_dict_without_none_values(dict_with_none_values):
""" Removes all keys from given dict which have None as a value.
Args:
dict_with_none_values (dict): dict which may include keys with None as their value.
Returns:
dict: A new copy of the input dictionary, from which all keys with None as a value were removed.
"""
new_dict = {key: dict_with_none_values[key] for key in dict_with_none_values if
dict_with_none_values[key] is not None}
return new_dict
@logger
def generate_takedown_info_context(takedown_info):
takedown_info_context = {
"ID": takedown_info.get("id"),
"GroupID": takedown_info.get("group_id"),
"Status": takedown_info.get("status"),
"AttackType": takedown_info.get("attack_type"),
"AttackURL": takedown_info.get("attack_url"),
"Region": takedown_info.get("region"),
"DateSubmitted": takedown_info.get("date_submitted"),
"LastUpdated": takedown_info.get("last_updated"),
"EvidenceURL": takedown_info.get("evidence_url"),
"Reporter": takedown_info.get("reporter"),
"IP": takedown_info.get("ip"),
"Domain": takedown_info.get("domain"),
"Hostname": takedown_info.get("hostname"),
"CountryCode": takedown_info.get("country_code"),
"DomainAttack": takedown_info.get("domain_attack"),
"TargetedURL": takedown_info.get("targeted_url"),
"Certificate": takedown_info.get("certificate")
}
return createContext(takedown_info_context, removeNull=True)
@logger
def gen_takedown_info_human_readable(list_of_takedowns_contexts, title=TAKEDOWN_INFO_TITLE):
contexts_in_human_readable_format = []
for takedown_info_context in list_of_takedowns_contexts:
human_readable_dict = {
"ID": takedown_info_context.get("ID"),
"Status": takedown_info_context.get("Status"),
"Attack Type": takedown_info_context.get("AttackType"),
"Date Submitted": takedown_info_context.get("DateSubmitted"),
"Last Updated": takedown_info_context.get("LastUpdated"),
"Reporter": takedown_info_context.get("Reporter"),
"Group ID": takedown_info_context.get("GroupID"),
"Region": takedown_info_context.get("Region"),
"Evidence URL": takedown_info_context.get("EvidenceURL"),
"Attack URL": takedown_info_context.get("AttackURL"),
"IP": takedown_info_context.get("IP"),
"Domain": takedown_info_context.get("Domain"),
"Hostname": takedown_info_context.get("Hostname"),
"Country Code": takedown_info_context.get("CountryCode"),
"Domain Attack": takedown_info_context.get("DomainAttack"),
"Targeted URL": takedown_info_context.get("TargetedURL"),
"Certificate": takedown_info_context.get("Certificate")
}
contexts_in_human_readable_format.append(human_readable_dict)
human_readable = tableToMarkdown(title, contexts_in_human_readable_format,
headers=TAKEDOWN_INFO_HEADER, removeNull=True)
return human_readable
@logger
def generate_list_of_takedowns_context(list_of_takedowns_infos):
takedowns_contexts_list = []
for takedown_info in list_of_takedowns_infos:
takedown_context = generate_takedown_info_context(takedown_info)
takedowns_contexts_list.append(takedown_context)
return takedowns_contexts_list
@logger
def generate_takedown_note_context(takedown_note_json):
takedown_note_context = {
"TakedownID": takedown_note_json.get("takedown_id"),
"NoteID": takedown_note_json.get("note_id"),
"GroupID": takedown_note_json.get("group_id"),
"Author": takedown_note_json.get("author"),
"Note": takedown_note_json.get("note"),
"Time": takedown_note_json.get("time")
}
takedown_note_context = return_dict_without_none_values(takedown_note_context)
return takedown_note_context
@logger
def generate_list_of_takedown_notes_contexts(list_of_takedowns_notes):
takedown_notes_contexts_list = []
for takedown_note in list_of_takedowns_notes:
takedown_note_context = generate_takedown_note_context(takedown_note)
takedown_notes_contexts_list.append(takedown_note_context)
return takedown_notes_contexts_list
@logger
def gen_takedown_notes_human_readable(entry_context):
contexts_in_human_readable_format = []
for takedown_note_context in entry_context:
human_readable_dict = {
"Takedown ID": takedown_note_context.get("TakedownID"),
"Note ID": takedown_note_context.get("NoteID"),
"Group ID": takedown_note_context.get("GroupID"),
"Author": takedown_note_context.get("Author"),
"Note": takedown_note_context.get("Note"),
"Time": takedown_note_context.get("Time")
}
human_readable_dict = return_dict_without_none_values(human_readable_dict)
contexts_in_human_readable_format.append(human_readable_dict)
human_readable = tableToMarkdown(TAKEDOWN_INFO_TITLE, contexts_in_human_readable_format,
headers=TAKEDOWN_NOTE_HEADERS)
return human_readable
@logger
def generate_add_note_human_readable(response):
# if the request was successful, the response includes the id of the created note
if "note_id" in response:
human_readable = "### Note added succesfully\n" \
"ID of the note created: {0}".format(response["note_id"])
else:
human_readable = "### Failed to add note\n" \
"An error occured while trying to add the note.\n" \
"The error code is: {0}.\n" \
"The error message is: {1}.".format(response["error_code"], response["error_code"])
return human_readable
@logger
def string_to_bool(string_representing_bool):
return string_representing_bool.lower() == "true"
@logger
def generate_escalate_takedown_human_readable(response):
if "status" in response:
human_readable = "### Takedown escalated successfully"
else:
human_readable = "### Takedown escalation failed\n" \
"An error occured on the takedown escalation attempt.\n" \
"Error code is: {0}\n" \
"Error message from Netcraft is: {1}".format(response["error_code"], response["error_message"])
return human_readable
def add_or_update_note_context_in_takedown(note_context, cur_notes_in_takedown):
if isinstance(cur_notes_in_takedown, dict):
return [note_context]
else:
note_already_in_context = False
for i, cur_note_context in enumerate(cur_notes_in_takedown):
cur_note_context = cur_notes_in_takedown[i]
if cur_note_context["NoteID"] == note_context["NoteID"]:
note_already_in_context = True
cur_notes_in_takedown[i] = note_context
if not note_already_in_context:
cur_notes_in_takedown.append(note_context)
return cur_notes_in_takedown
def add_note_to_suitable_takedown_in_context(note_context, all_takedowns_entry_context):
note_takedown_index = -1
if isinstance(all_takedowns_entry_context, dict):
new_takedown_entry_context = {
"ID": note_context["TakedownID"],
"Note": [note_context]
}
all_takedowns_entry_context = [all_takedowns_entry_context, new_takedown_entry_context] \
if all_takedowns_entry_context else [new_takedown_entry_context]
else:
for i in range(len(all_takedowns_entry_context)):
cur_takedown_context = all_takedowns_entry_context[i]
if cur_takedown_context["ID"] == note_context["TakedownID"]:
note_takedown_index = i
if note_takedown_index == -1:
new_takedown_entry_context = {
"ID": note_context["TakedownID"],
"Note": [note_context]
}
all_takedowns_entry_context.append(new_takedown_entry_context)
else:
takedown_context_to_change = all_takedowns_entry_context[note_takedown_index]
cur_notes_in_takedown = takedown_context_to_change["Note"]
takedown_context_to_change["Note"] = add_or_update_note_context_in_takedown(note_context,
cur_notes_in_takedown)
all_takedowns_entry_context[note_takedown_index] = takedown_context_to_change
return all_takedowns_entry_context
def generate_netcraft_context_with_notes(list_of_notes_contexts):
all_takedowns_entry_context = demisto.context().get("Netcraft", {}).get("Takedown", {})
for note_context in list_of_notes_contexts:
all_takedowns_entry_context = add_note_to_suitable_takedown_in_context(note_context,
all_takedowns_entry_context)
return all_takedowns_entry_context
''' COMMANDS + REQUESTS FUNCTIONS '''
@logger
def escalate_takedown(takedown_id):
data_for_request = {
"takedown_id": takedown_id
}
request_result = http_request("POST", ESCALATE_TAKEDOWN_SUFFIX, data=data_for_request)
return request_result
def escalate_takedown_command():
args = demisto.args()
response = escalate_takedown(args["takedown_id"])
human_readable = generate_escalate_takedown_human_readable(response)
return_outputs(
readable_output=human_readable,
outputs={},
raw_response=response
)
@logger
def add_notes_to_takedown(takedown_id, note, notify):
data_for_request = {
"takedown_id": takedown_id,
"note": note,
"notify": notify
}
data_for_request = return_dict_without_none_values(data_for_request)
request_result = http_request("POST", ACCESS_TAKEDOWN_NOTES_SUFFIX, data=data_for_request)
return request_result
def add_notes_to_takedown_command():
args = demisto.args()
note = args.get("note")
notify = string_to_bool(args.get("notify")) if args.get("notify") else None
takedown_id = int(args["takedown_id"])
response = add_notes_to_takedown(takedown_id, note, notify)
human_readable = generate_add_note_human_readable(response)
return_outputs(
readable_output=human_readable,
outputs=response
)
def get_takedown_notes(takedown_id, group_id, date_from, date_to, author):
params_for_request = {
"takedown_id": takedown_id,
"group_id": group_id,
"date_to": date_to,
"date_from": date_from,
"author": author
}
params_for_request = return_dict_without_none_values(params_for_request)
request_result = http_request("GET", ACCESS_TAKEDOWN_NOTES_SUFFIX, params=params_for_request)
return request_result
def get_takedown_notes_command():
args = demisto.args()
takedown_id = int(args.get("takedown_id")) if args.get("takedown_id") else None
group_id = int(args.get("group_id")) if args.get("group_id") else None
date_from = args.get("date_from")
date_to = args.get("date_to")
author = args.get("author")
list_of_takedowns_notes = get_takedown_notes(takedown_id, group_id, date_from, date_to, author)
list_of_takedowns_notes = list_of_takedowns_notes[:LIMIT]
if takedown_id:
list_of_takedowns_notes = filter_by_id(list_of_takedowns_notes, "takedown_id", int(takedown_id))
list_of_notes_contexts = generate_list_of_takedown_notes_contexts(list_of_takedowns_notes)
entry_context = {
"Netcraft.Takedown(val.ID == obj.ID)": generate_netcraft_context_with_notes(list_of_notes_contexts)
}
human_readable = gen_takedown_notes_human_readable(list_of_notes_contexts)
return_outputs(
readable_output=human_readable,
outputs=entry_context,
raw_response=list_of_takedowns_notes
)
@logger
def get_takedown_info(takedown_id, ip, url, updated_since, date_from, region):
params_for_request = {
"id": takedown_id,
"ip": ip,
"url": url,
"updated_since": updated_since,
"date_from": date_from,
"region": region,
}
params_for_request = return_dict_without_none_values(params_for_request)
request_result = http_request("GET", GET_TAKEDOWN_INFO_SUFFIX, params=params_for_request)
return request_result
def get_takedown_info_command():
args = demisto.args()
takedown_id = int(args.get("id")) if args.get("id") else None
ip = args.get("ip")
url = args.get("url")
updated_since = args.get("updated_since")
date_from = args.get("date_from")
region = args.get("region")
list_of_takedowns_infos = get_takedown_info(takedown_id, ip, url, updated_since, date_from, region)
list_of_takedowns_infos = list_of_takedowns_infos[:LIMIT]
if takedown_id:
list_of_takedowns_infos = filter_by_id(list_of_takedowns_infos, "id", str(takedown_id))
list_of_takedowns_contexts = generate_list_of_takedowns_context(list_of_takedowns_infos)
human_readable = gen_takedown_info_human_readable(list_of_takedowns_contexts)
entry_context = {
'Netcraft.Takedown(val.ID == obj.ID)': list_of_takedowns_contexts
}
return_outputs(
readable_output=human_readable,
raw_response=list_of_takedowns_infos,
outputs=entry_context,
)
@logger
def report_attack(malicious_site_url, comment, is_test_request=False):
data_for_request = {
"attack": malicious_site_url,
"comment": comment
}
if is_test_request:
request_url_suffix = TEST_MODULE_SUFFIX
else:
request_url_suffix = REPORT_MALICIOUS_SUFFIX
request_result = http_request("POST", request_url_suffix, data=data_for_request, should_convert_to_json=False)
return request_result
def report_attack_command():
args = demisto.args()
entry_context: dict = {}
response_lines_array = report_attack(args["attack"], args["comment"])
result_answer = response_lines_array[0]
if result_answer == MALICIOUS_REPORT_SUCCESS:
new_takedown_id = response_lines_array[1]
# Until the API bug is fixed, this list will include info of all takedowns and not just the new one
new_takedown_infos = get_takedown_info(new_takedown_id, None, None, None, None, None)
new_takedown_infos = new_takedown_infos[:LIMIT]
new_takedown_infos = filter_by_id(new_takedown_infos, "id", new_takedown_id)
list_of_new_takedown_contexts = generate_list_of_takedowns_context(new_takedown_infos)
human_readable = gen_takedown_info_human_readable(list_of_new_takedown_contexts, REPORT_MALICIOUS_SUCCESS_TITLE)
entry_context = {
'Netcraft.Takedown(val.ID == obj.ID)': list_of_new_takedown_contexts
}
else:
human_readable = generate_report_malicious_site_human_readable(response_lines_array)
return_outputs(
readable_output=human_readable,
outputs=entry_context,
raw_response=entry_context
)
def test_module():
"""
Performs basic get request to get item samples
"""
test_result = report_attack("https://www.test.com", "test", True)
if test_result[0] != MALICIOUS_REPORT_SUCCESS:
raise Exception("Test request failed.")
demisto.results("ok")
''' COMMANDS MANAGER / SWITCH PANEL '''
LOG('Command being called is %s' % (demisto.command()))
try:
# Remove proxy if not set to true in params
handle_proxy()
if demisto.command() == 'test-module':
test_module()
elif demisto.command() == 'netcraft-report-attack':
report_attack_command()
elif demisto.command() == 'netcraft-get-takedown-info':
get_takedown_info_command()
elif demisto.command() == 'netcraft-get-takedown-notes':
get_takedown_notes_command()
elif demisto.command() == 'netcraft-add-notes-to-takedown':
add_notes_to_takedown_command()
elif demisto.command() == 'netcraft-escalate-takedown':
escalate_takedown_command()
# Log exceptions
except Exception as e:
return_error(str(e))
|
|
# Copyright (C) 2015 Philipp Baumgaertel
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the BSD license. See the LICENSE.txt file for details.
from math import fabs
import unittest
from skgpuppy.Covariance import GaussianCovariance, SPGPCovariance, Covariance, Dot, PeriodicCovariance
from skgpuppy.GaussianProcess import GaussianProcess
from skgpuppy.UncertaintyPropagation import UncertaintyPropagationNumerical, UncertaintyPropagationExact, UncertaintyPropagationLinear, UncertaintyPropagationApprox, UncertaintyPropagationMC, \
UncertaintyPropagationNumericalHG
import numpy as np
import time
import pickle
import os
from skgpuppy.MLE import MLE
from skgpuppy.Utilities import norm, expected_value_monte_carlo
from skgpuppy.InverseUncertaintyPropagation import InverseUncertaintyPropagationApprox, InverseUncertaintyPropagationNumerical
from skgpuppy.PDF import Normal, Skew_Normal
from functools import wraps
import scipy
import sys
class TestPDF(unittest.TestCase):
def test_skew_pdf(self):
s = Skew_Normal()
result = s.output_pdf(1,1,0.5,1,[1,2,3])
#Numbers generated by R package sn
self.assertAlmostEqual(result[0],0.398342403205,delta=1e-10 )
self.assertAlmostEqual(result[1],0.202124583151,delta=1e-10 )
self.assertAlmostEqual(result[2],0.0602633676246,delta=1e-10 )
self.assertAlmostEqual(s.estimate_min_max(1,1,0.5,1,0.9)[0], -0.478543996503,delta=1e-5)
self.assertAlmostEqual(s.estimate_min_max(1,1,0.5,1,0.9)[1], 2.79287447653,delta=1e-5)
self.assertAlmostEqual(s.estimate_min_max(1,1,0.5,1,0.2)[0], 0.666406364782,delta=1e-5)
self.assertAlmostEqual(s.estimate_min_max(1,1,0.5,1,0.2)[1], 1.16161779703,delta=1e-5)
def test_norm_pdf(self):
s = Skew_Normal()
n = Normal()
sresult = s.output_pdf(1,1,0,0,[1,2,3])
nresult = n.output_pdf(1,1,0,0,[1,2,3])
for i in range(3):
self.assertAlmostEqual(sresult[i],nresult[i],delta=1e-10 )
self.assertAlmostEqual(s.estimate_min_max(1,1,0,1,0.9)[0], n.estimate_min_max(1,1,0,1,0.9)[0],delta=1e-5)
self.assertAlmostEqual(s.estimate_min_max(1,1,0,1,0.9)[1], n.estimate_min_max(1,1,0,1,0.9)[1],delta=1e-5)
self.assertAlmostEqual(s.estimate_min_max(1,1,0,1,0.2)[0], n.estimate_min_max(1,1,0,1,0.2)[0],delta=1e-5)
self.assertAlmostEqual(s.estimate_min_max(1,1,0,1,0.2)[1], n.estimate_min_max(1,1,0,1,0.2)[1],delta=1e-5)
class TestMLE(unittest.TestCase):
def getCI(self, alpha, n, k):
from scipy.stats import beta
lowerci = beta.ppf(alpha / 2, k, n - k + 1)
upperci = beta.ppf(1 - alpha / 2, k + 1, n - k)
return lowerci, upperci
def mle(self, n, k):
lowerci, upperci = self.getCI(0.05, n, k)
def density(x, theta):
if 0.5 < x[0] <= 1:
return (theta[0]) * 2 # *2 to be a density -> integrate .. = 1
elif 0 <= x[0] <= 0.5:
return (1 - theta[0]) * 2
else:
return 0
samples = []
for i in range(k):
samples.append([1])
for i in range(n - k):
samples.append([0])
p = float(k) / float(n)
mle = MLE(density, [0.5],support=[(0,1)])
theta = mle.mle(samples)
sigma = mle.sigma(theta, observations=samples)
lb, ub = mle.mle_ci(samples)
fisher_sigma = mle.sigma(theta, n=len(samples))
self.assertAlmostEqual(fisher_sigma[0], sigma[0], delta=1e-5)
self.assertAlmostEqual(p, theta[0], delta=0.1 * p)
self.assertAlmostEqual(lb[0], lowerci, delta=0.15 * lowerci)
self.assertAlmostEqual(ub[0], upperci, delta=0.15 * upperci)
def test_fisher(self):
def density(x, theta):
if 0.5 < x[0] <= 1:
return (theta[0]) * 2 # *2 to be a density -> integrate .. = 1
elif 0 <= x[0] <= 0.5:
return (1 - theta[0]) * 2
else:
return 0
mle = MLE(density,[0.5],support=[(0,1)])
fisher = mle.get_fisher_function()
fisher_2nd = mle.get_fisher_function(order=2)
self.assertAlmostEqual(fisher([0.5],0),fisher_2nd([0.5],0),delta=1e-5)
self.assertAlmostEqual(fisher([0.05],0),fisher_2nd([0.05],0),delta=1e-5)
self.assertAlmostEqual(fisher([0.99],0),fisher_2nd([0.99],0),delta=1e-4)
#
# theta = np.atleast_2d(np.linspace(1e-2,1-1e-2,100)).T
# #y = [fisher(theta_i,0) for theta_i in theta]
# y = [1/np.sqrt(fisher(theta_i,0)) for theta_i in theta]
# fig = plt.figure()
# plt.plot(theta,y)
# plt.title('Fisher Information')
# plt.show()
#
# @unittest.skip("Too slow")
# def test_fisher_2d_norm(self):
# density2d = lambda x, theta : norm(x[0],theta[0],theta[1])*norm(x[1],theta[2],theta[3])
# #density2d = lambda x, theta : mvnorm(np.array(x),np.array(theta[0:2]),np.diag(theta[2:4]))
# mle = MLE([(-4,4),(-4,4)], [0.5,0.5,1,1],density=density2d)
# fisher = mle.get_fisher_function()
#
# for i in xrange(4):
# for j in xrange(4):
# print i,", ",j
# print fisher([0.5,0.5,1,1],i,j)
def test_fisher_norm_matrix(self):
density = lambda x, theta : norm(np.float128(x[0]),theta[0],theta[1])
fisher_matrix = [[lambda theta : 1/theta[1]**2, lambda theta : 0],[lambda theta : 0,lambda theta : 2/theta[1]**2]]
mle = MLE(density,[0,1],dims=1,fisher_matrix=fisher_matrix)
fisher = mle.get_fisher_function(order=1)
for i in range(2):
for j in range(2):
if i != j :
self.assertAlmostEqual(fisher([0.5,1],i,j),0,delta=0.015)
points = 100
sigma = np.linspace(1e-1,0.2,points)
theta = np.hstack((np.zeros((points,1)),np.atleast_2d(sigma).T))
f_mu = [fisher(theta_i,0) for theta_i in theta]
f_sigma = [fisher(theta_i,1) for theta_i in theta]
f_mu2 = [1/x_i**2 for x_i in sigma]
f_sigma2 = [2/(x_i**2) for x_i in sigma]
for i in range(points):
self.assertAlmostEqual(f_mu[i],f_mu2[i],delta=f_mu2[i]*1e-3)
self.assertAlmostEqual(f_sigma[i],f_sigma2[i],delta=f_sigma2[i]*1e-3)
def test_fisher_norm(self):
density = lambda x, theta : norm(np.float128(x[0]),theta[0],theta[1])
mle = MLE(density,[0,1],dims=1)#,support=[(-4,4)])
fisher = mle.get_fisher_function(order=1)
for i in range(2):
for j in range(2):
# print i,", ",j
# print fisher([0,1],i,j)
if i != j :
self.assertAlmostEqual(fisher([0.5,1],i,j),0,delta=0.015)
points = 100
#mu = np.linspace(1e-1,2,points)
#theta = np.array(np.hstack((np.atleast_2d(mu).T,np.zeros((points,1))+0.1)),dtype=np.float128)
sigma = np.linspace(1e-1,0.2,points)
theta = np.hstack((np.zeros((points,1)),np.atleast_2d(sigma).T))
f_mu = [fisher(theta_i,0) for theta_i in theta]
f_sigma = [fisher(theta_i,1) for theta_i in theta]
#print type(f_mu[0])
f_mu2 = [1/x_i**2 for x_i in sigma]
f_sigma2 = [2/(x_i**2) for x_i in sigma]
#y = [1/np.sqrt(fisher(theta_i,0)) for theta_i in theta]
# import matplotlib.pyplot as plt
#
# fig = plt.figure()
# #plt.plot(mu,f_mu,'o')
# #plt.plot(sigma,f_mu,'o',sigma,f_mu2)
# #plt.plot(sigma,f_sigma,'o',sigma,f_sigma2)
# plt.title('Fisher Information')
# plt.show()
for i in range(points):
self.assertAlmostEqual(f_mu[i],f_mu2[i],delta=f_mu2[i]*1e-3)
self.assertAlmostEqual(f_sigma[i],f_sigma2[i],delta=f_sigma2[i]*1e-3)
#
# n = mle.get_n([0.1])
# print n
def test_fisher_2d(self):
def density2d(x,theta):
if 0 <= x[0] <= 1 and 0 <= x[1] <= 1:
return theta[0]
if 0 <= x[0] <= 1 and 1 <= x[1] <= 2:
return 1-theta[0]
if 1 <= x[0] <= 2 and 0 <= x[1] <= 1:
return theta[1]
if 1 <= x[0] <= 2 and 1 <= x[1] <= 2:
return 1-theta[1]
mle = MLE(density2d,[0.25,0.25],support=[(0,2),(0,2)])
fisher = mle.get_fisher_function()
fisher_2nd = mle.get_fisher_function(order=2)
for i in range(2):
self.assertAlmostEqual(fisher([0.25,0.25],i), fisher_2nd([0.25,0.25],i), delta=1e-3)
for j in range(2):
if i != j :
self.assertAlmostEqual(fisher([0.25,0.25],i,j),0,1e-5)
def test_mle(self):
self.mle(400, 20)
self.mle(400, 200)
self.mle(4000, 200)
self.mle(40000, 2000)
class TestInverseUP(unittest.TestCase):
def setUp(self):
np.random.seed(123456)
# if not os.path.exists('tests/gp_2d.pkl'):
#Because it takes too long: pickle a gp
x = np.array([[x1,x2] for x1 in range(10) for x2 in range(10)],dtype=np.float64) #np.atleast_2d(np.linspace(0, 10, 30)).T
w = np.array([0.04,0.04])
v = 2
vt = 0#.01
theta = np.zeros(2+len(w))
theta[0] = np.log(v)
theta[1] = np.log(vt)
theta[2:2+len(w)] = np.log(w)
y = GaussianProcess.get_realisation(x, GaussianCovariance(), theta)
t = y + 0.1 * np.random.randn(len(x)) #-> vt = 0.01
self.gp_est = GaussianProcess(x, t, cov=GaussianCovariance())
means, variances = self.gp_est.estimate_many(x)
sigma = np.sqrt(variances)
for i in range(len(x)):
self.assertAlmostEqual(means[i], y[i], delta=4 * sigma[i])
# output = open('tests/gp_2d.pkl', 'wb')
#
# pickle.dump(self.gp_est, output)
# output.close()
#
# pkl_file = open('tests/gp_2d.pkl', 'rb')
#
# self.gp_est = pickle.load(pkl_file)
# pkl_file.close()
def test_GP_2D(self):
inputmean = np.array([5.0,5.0])
inputvariance = np.diag([0.2,0.3])
upn = UncertaintyPropagationNumerical(self.gp_est)
uph = UncertaintyPropagationNumericalHG(self.gp_est)
#upn = UncertaintyPropagationMC(self.gp_est,100000)
upe = UncertaintyPropagationExact(self.gp_est)
upl = UncertaintyPropagationLinear(self.gp_est)
upa = UncertaintyPropagationApprox(self.gp_est)
start = time.time()
meanL, varianceL = upl.propagate_GA(inputmean, inputvariance)
end = time.time()
print("Linear ", end - start)
start = time.time()
meanE, varianceE = upe.propagate_GA(inputmean, inputvariance)
end = time.time()
print("Exact ", end - start)
start = time.time()
meanN, varianceN = upn.propagate_GA(inputmean, inputvariance)
end = time.time()
print("Numerical ", end - start)
start = time.time()
meanH, varianceH = uph.propagate_GA(inputmean, inputvariance)
end = time.time()
print("Numerical HG", end - start)
start = time.time()
meanA, varianceA = upa.propagate_GA(inputmean, inputvariance)
end = time.time()
print("Approx ", end - start)
print(meanH, meanN, meanE, meanL, meanA)
print((varianceH),(varianceN), (varianceE), (varianceL), (varianceA))
print(np.sqrt(varianceH),np.sqrt(varianceN), np.sqrt(varianceE), np.sqrt(varianceL), np.sqrt(varianceA))
self.assertAlmostEqual(meanE, meanA, delta=0.002)
self.assertAlmostEqual(meanH, meanE, delta=0.1)
self.assertAlmostEqual(meanN, meanH, delta=0.01)
self.assertAlmostEqual(np.sqrt(varianceE), np.sqrt(varianceA), delta=0.001)
self.assertAlmostEqual(np.sqrt(varianceH), np.sqrt(varianceE), delta=0.05)
self.assertAlmostEqual(np.sqrt(varianceN), np.sqrt(varianceH), delta=0.015)
def test_IUPOpt(self):
self.IUPOpt()
def test_IUPOpt_wo_weaving(self):
import skgpuppy.UncertaintyPropagation
weaving = skgpuppy.UncertaintyPropagation.weaving
skgpuppy.UncertaintyPropagation.weaving = False
self.IUPOpt()
skgpuppy.UncertaintyPropagation.weaving = weaving
def IUPOpt(self):
output_variances = 0.2
u = np.array([5.0,5.0])
gps = self.gp_est
c = np.array([4.0,1.0])
I = 1/c
iupa = InverseUncertaintyPropagationApprox(output_variances,gps,u,c,I)
iupn = InverseUncertaintyPropagationNumerical(output_variances,gps,u,c,I,upga_class=UncertaintyPropagationApprox)
start = time.time()
sol_n = iupn.get_best_solution()
end = time.time()
print("Numerical ", end - start)
start = time.time()
sol_a = iupa.get_best_solution()
end = time.time()
print("Approx ", end - start)
up = UncertaintyPropagationExact(self.gp_est)
print("Numerical, NA, Approx Solution")
print(sol_n)
print(sol_a)
print("Numerical, NA, Approx Variances")
print(up.propagate_GA(u,np.diag(sol_n))[1])
print(up.propagate_GA(u,np.diag(sol_a))[1])
print("Numerical, NA, Approx Costs")
print(c[0] * 1/sol_n[0] + c[1] * 1/sol_n[1])
print(c[0] * 1/sol_a[0] + c[1] * 1/sol_a[1])
self.assertAlmostEqual(sol_n[0],sol_a[0],delta=1e-2)
self.assertAlmostEqual(sol_n[1],sol_a[1],delta=1e-3)
class TestMinimize(unittest.TestCase):
def test_minimize(self):
from skgpuppy.Utilities import minimize
func = lambda x: x[0]**2 + 2*x[1]**2 + 3* x[2]**2
fprime = lambda x: np.array([2*x[0], 4*x[1], 6* x[2]])
methods = ["tnc", "l_bfgs_b", "cobyla", "slsqp", "bfgs", "powell", "cg", "ncg", "simplex"]
for method in methods:
result = minimize(func,np.array([1.0,1.0,1.0]),fprime=fprime,method=method)
self.assertAlmostEqual(result[0],0,delta=1e-4)
self.assertAlmostEqual(result[1],0,delta=1e-4)
self.assertAlmostEqual(result[2],0,delta=1e-4)
class TestIntegration(unittest.TestCase):
def setUp(self):
np.random.seed(1234)
def test_integrate_nquad(self):
from skgpuppy.Utilities import _integrate_nquad
#Wolfram Alpha:
#integrate x+y+z dx dy dz from x = 0 to 1 from y = 0 to 1 from z = 0 to 1
self.assertAlmostEqual(_integrate_nquad(lambda x: x[0], [(0, 1)]), 0.5, delta=1e-5)
self.assertAlmostEqual(_integrate_nquad(lambda x: x[0] + x[1], [(0, 1), (0, 1)]), 1, delta=1e-5)
self.assertAlmostEqual(_integrate_nquad(lambda x: x[0] + x[1] + x[2], [(0, 1), (0, 1), (0, 1)]), 1.5, delta=1e-5)
#Wolfram Alpha:
#integrate x+2*y+3*z dx dy dz from x = 0 to 1 from y = 0 to 2 from z = 0 to 3
#Maxima:
#integrate(integrate(integrate(x+2*y+3*z,x,0,1),y,0,2),z,0,3);
self.assertAlmostEqual(_integrate_nquad(lambda x: x[0] + 2 * x[1] + 3 * x[2], [(0, 1), (0, 2), (0, 3)]), 42,
delta=1e-5)
def test_integrate_quad(self):
from skgpuppy.Utilities import _integrate_quad
#Wolfram Alpha:
#integrate x+y+z dx dy dz from x = 0 to 1 from y = 0 to 1 from z = 0 to 1
self.assertAlmostEqual(_integrate_quad(lambda x: x[0], [(0, 1)]), 0.5, delta=1e-5)
self.assertAlmostEqual(_integrate_quad(lambda x: x[0] + x[1], [(0, 1), (0, 1)]), 1, delta=1e-5)
self.assertAlmostEqual(_integrate_quad(lambda x: x[0] + x[1] + x[2], [(0, 1), (0, 1), (0, 1)]), 1.5, delta=1e-5)
#Wolfram Alpha:
#integrate x+2*y+3*z dx dy dz from x = 0 to 1 from y = 0 to 2 from z = 0 to 3
#Maxima:
#integrate(integrate(integrate(x+2*y+3*z,x,0,1),y,0,2),z,0,3);
self.assertAlmostEqual(_integrate_quad(lambda x: x[0] + 2 * x[1] + 3 * x[2], [(0, 1), (0, 2), (0, 3)]), 42,
delta=1e-5)
def test_integrate_romberg(self):
from skgpuppy.Utilities import _integrate_romberg
self.assertAlmostEqual(_integrate_romberg(lambda x: x[0], [(0, 1)]), 0.5, delta=1e-5)
self.assertAlmostEqual(_integrate_romberg(lambda x: x[0] + x[1], [(0, 1), (0, 1)]), 1, delta=1e-5)
self.assertAlmostEqual(_integrate_romberg(lambda x: x[0] + x[1] + x[2], [(0, 1), (0, 1), (0, 1)]), 1.5, delta=1e-5)
#Wolfram Alpha:
#integrate x+2*y+3*z dx dy dz from x = 0 to 1 from y = 0 to 2 from z = 0 to 3
#Maxima:
#integrate(integrate(integrate(x+2*y+3*z,x,0,1),y,0,2),z,0,3);
self.assertAlmostEqual(_integrate_romberg(lambda x: x[0] + 2 * x[1] + 3 * x[2], [(0, 1), (0, 2), (0, 3)]), 42,
delta=1e-5)
def test_expected_value_MC(self):
mu = np.array([1,1])
Sigma_x = np.array([[1,0],[0,1]])
func = lambda x: x[0]*2
self.assertAlmostEqual(2,expected_value_monte_carlo(func,mu,Sigma_x),delta=0.1)
class TestGaussianProcess(unittest.TestCase):
def setUp(self):
np.random.seed(1234)
def test_gaussian_cov_derivative(self):
x = np.atleast_2d(np.linspace(0, 10, 50)).T
w = np.array([0.02])
v = 2
vt = 0.01
n,d = np.shape(x)
theta_min = np.ones(2+d)
theta_min[0] = np.log(v)
theta_min[1] = np.log(vt)
theta_min[2:2+d] = np.log(w)
n,d = np.shape(x)
cov = GaussianCovariance()
for xi in x:
for xj in x:
for j in range(2+d):
self.assertAlmostEqual(Covariance._d_cov_d_theta(cov,xi,xj,theta_min,j),cov._d_cov_d_theta(xi,xj,theta_min,j),delta=1e-3)
def test_spgp_covariance(self):
spgpc = SPGPCovariance(10)
gc = GaussianCovariance()
x = np.array([[x1,x2] for x1 in range(10) for x2 in range(10)]) #np.atleast_2d(np.linspace(0, 10, 30)).T
w = np.array([0.04,0.04])
v = 2
vt = 0#.01
theta = np.zeros(2+len(w))
theta[0] = np.log(v)
theta[1] = np.log(vt)
theta[2:2+len(w)] = np.log(w)
y = GaussianProcess.get_realisation(x, GaussianCovariance(),theta)
t = y + 0.1 * np.random.randn(len(x)) #-> vt = 0.01
theta = spgpc.get_theta(x,t)
cov = spgpc.cov_matrix(x,theta)
cov_super = Covariance.cov_matrix_ij(spgpc,x,x,theta)
self.assertLessEqual(np.abs((cov-cov_super).sum()),1e-5)#-np.exp(theta[1])*np.eye(100)
invcov = np.linalg.inv(cov)
invcov2 = spgpc.inv_cov_matrix(x,theta)
self.assertLessEqual(np.abs((invcov-invcov2)).sum(),1e-5)
# theta_gc = gc.get_theta(x,t)
# gcov = gc.cov_matrix(x,theta_gc)
# invgcov = np.linalg.inv(gcov)
# print np.max(np.abs(gcov-cov)/gcov)
# print np.max(np.abs(invgcov-invcov)/invgcov)
start = time.time()
delta = 1e-5
dndt_est = []
for j in range(len(theta)):
d = np.zeros(len(theta))
d[j] = delta
#dndt_est.append( (spgpc.negativeloglikelihood(x,t,theta+d) - spgpc.negativeloglikelihood(x,t,theta-d))/2/delta )
dndt_est.append( (Covariance._negativeloglikelihood(spgpc,x,t,theta+d) - Covariance._negativeloglikelihood(spgpc,x,t,theta-d))/2/delta )
print("TIME numerical: ",time.time() -start)
# j = 0
# d = np.zeros(len(theta))
# d[j] = delta
# d_cov_dt = (spgpc.cov_matrix(x,theta+d)-spgpc.cov_matrix(x,theta-d))/2/delta
# print spgpc.d_cov_matrix_d_theta(x,theta,j) - d_cov_dt
start = time.time()
dndt = Covariance._d_nll_d_theta(spgpc,x,t,theta)
print("TIME classic: ",time.time() -start)
dot = Dot()
dot.reset()
start = time.time()
dndt = spgpc._d_nll_d_theta(x,t,theta)
print("TIME opt: ",time.time() -start)
print(dot)
self.assertLessEqual(np.abs((dndt_est-dndt)).sum(),1e-4)
#self.assertLessEqual(np.abs((dndt_est-dndt_m)).sum(),1e-3)
# gp_est_gc = GaussianProcess(x, t,GaussianCovariance())
# gp_est_spgp = GaussianProcess(x, t,SPGPCovariance(5))
#
#
# theta = gp_est_spgp.theta_min
# cov = spgpc.cov_matrix(x,theta)
#
#
# theta_gc = gp_est_gc.theta_min
# gcov = gc.cov_matrix(x,theta_gc)
# print np.max(np.abs(gcov-cov)/gcov)
#
# invcov = np.linalg.inv(cov)
# invgcov = np.linalg.inv(gcov)
# print np.max(np.abs(invgcov-invcov)/invgcov)
#
def test_covariance(self):
gc = GaussianCovariance()
x = np.array([[x1,x2] for x1 in range(10) for x2 in range(10)]) #np.atleast_2d(np.linspace(0, 10, 30)).T
theta = np.log(np.array([2,0.01,0.04,0.04]))
cov = gc.cov_matrix(x,theta)
cov_super = Covariance.cov_matrix(gc,x,theta)
self.assertLessEqual(np.abs((cov-cov_super-0.01*np.eye(100))).sum(),1e-10)
dcov = gc._d_cov_matrix_d_theta(x,theta,2)
dcov_super = Covariance._d_cov_matrix_d_theta(gc,x,theta,2)
self.assertLessEqual(np.abs((dcov-dcov_super)).sum(),1e-10)
#np.count_nonzero(cov-cov_super)
t = GaussianProcess.get_realisation(x, GaussianCovariance(), theta)
#t = y + 0.1 * np.random.randn(len(x)) #-> vt = 0.01
dndt = gc._d_nll_d_theta(x,t,theta)
print(dndt)
dndt_est = []
for j in range(len(theta)):
d = np.zeros(len(theta))
d[j] = 1e-5
#d = np.log(1+d/np.exp(theta)) # Addition of log: log(x+y) = log(x) + log(1+y/x)
dndt_est.append( (gc._negativeloglikelihood(x,t,theta+d) - gc._negativeloglikelihood(x,t,theta-d))/2e-5 )
dndt_est = np.array(dndt_est)
print(dndt_est)
print(np.abs(dndt_est - dndt))
self.assertTrue((np.abs(dndt_est - dndt) < 5e-1).all())
def test_pickle(self):
x = np.atleast_2d(np.linspace(0, 10, 30)).T
w = np.array([0.04])
v = 2
vt = 0#.01
theta = np.zeros(2+len(w))
theta[0] = np.log(v)
theta[1] = np.log(vt)
theta[2:2+len(w)] = np.log(w)
y = GaussianProcess.get_realisation(x, GaussianCovariance(), theta)
t = y + 0.1 * np.random.randn(len(x)) #-> vt = 0.01
gp_est = GaussianProcess(x, t, GaussianCovariance())
output = open('gp.pkl', 'wb')
pickle.dump(gp_est, output,protocol=0)
output.close()
pkl_file = open('gp.pkl', 'rb')
if sys.version_info.major == 3:
gp_unpickled = pickle.load(pkl_file, encoding='latin1')
else:
gp_unpickled = pickle.load(pkl_file)
pkl_file.close()
os.remove('gp.pkl')
means, variances = gp_est.estimate_many(x)
sigma = np.sqrt(variances)
meansp, variancesp = gp_unpickled.estimate_many(x)
sigmap = np.sqrt(variancesp)
for i in range(len(x)):
self.assertEqual(means[i], meansp[i])
self.assertEqual(sigma[i], sigmap[i])
def test_gp(self):
x = np.atleast_2d(np.linspace(0, 10, 100)).T
w = np.array([0.04])
v = 2
vt = 0#.01
theta = np.zeros(2+len(w))
theta[0] = np.log(v)
theta[1] = np.log(vt)
theta[2:2+len(w)] = np.log(w)
y = GaussianProcess.get_realisation(x, GaussianCovariance(),theta)
t = y + 0.1 * np.random.randn(len(x)) #-> vt = 0.01
n = 10
xt = np.atleast_2d(np.linspace(0, 10, 200)).T
gp_est = GaussianProcess(x, t,GaussianCovariance())
print("Theta:", np.exp(gp_est.theta_min[0:3]))
means_gp, variances_gp = gp_est.estimate_many(xt)
sigma_gp = np.sqrt(variances_gp)
spgpcov = SPGPCovariance(n)
gp_est = GaussianProcess(x, t,spgpcov)
print("Theta:", np.exp(gp_est.theta_min[0:3]))
means, variances = gp_est.estimate_many(xt)
theta_start = spgpcov.get_theta(x,t)
print("Thetastart:", np.exp(theta_start[0:3]))
#means = spgpcov.estimate(x,t,gp_est.theta_min,xt)
sigma = np.sqrt(variances)
# import matplotlib.pyplot as plt
# fig = plt.figure()
# plt.plot(x,y,x,t,'o',xt,means,xt,means_gp)
# plt.fill_between(xt.ravel(), means + 1.96 * sigma, means - 1.96 * sigma, facecolor='red',alpha=0.5)
# plt.fill_between(xt.ravel(), means_gp + 1.96 * sigma_gp, means_gp - 1.96 * sigma_gp, facecolor='lightblue',alpha=0.5)
# plt.title('own')
# plt.legend(['Realisation','Noisy','Estimation','EstimationGP'])
# plt.show()
for i in range(len(x)):
self.assertAlmostEqual(means_gp[i*2], y[i], delta=4 * sigma[i*2])
self.assertAlmostEqual(means[i*2], y[i], delta=4 * sigma[i*2])
def test_gp_2D(self):
x = np.array([[x1,x2] for x1 in range(10) for x2 in range(10)]) #np.atleast_2d(np.linspace(0, 10, 30)).T
w = np.array([0.04,0.04])
v = 2
vt = 0#.01
theta = np.zeros(2+len(w))
theta[0] = np.log(v)
theta[1] = np.log(vt)
theta[2:2+len(w)] = np.log(w)
y = GaussianProcess.get_realisation(x, GaussianCovariance(), theta)
t = y + 0.1 * np.random.randn(len(x)) #-> vt = 0.01
# gp_est = GaussianProcess(x, t)
# x_new = np.array([[x1/2.0,x2/2.0] for x1 in xrange(20) for x2 in xrange(20)])
# #x_new = x
# means, variances = gp_est.estimate_many(x_new)
# sigma = np.sqrt(variances)
#
# import pylab as p
# # #import matplotlib.axes3d as p3
# import mpl_toolkits.mplot3d.axes3d as p3
#
#
#
gp_est = GaussianProcess(x, t,SPGPCovariance(10))
#gp_est = GaussianProcess(x, t,GaussianCovariance())
means, variances = gp_est.estimate_many(x)
sigma = np.sqrt(variances)
for i in range(len(x)):
self.assertAlmostEqual(means[i], y[i], delta=5 * sigma[i])
# x_new = np.array([[x1/2.0,x2/2.0] for x1 in xrange(20) for x2 in xrange(20)])
# #x_new = x
# means, variances = gp_est.estimate_many(x_new)
#
# sigma = np.sqrt(variances)
# import matplotlib.pyplot as plt
# #
# from mpl_toolkits.mplot3d import Axes3D
# from matplotlib import cm
# fig = plt.figure()
# ax = fig.gca(projection='3d')
# ax.plot_trisurf(x.T[0],x.T[1],y, cmap=cm.autumn, linewidth=0.2)
# ax.plot_trisurf(x_new.T[0],x_new.T[1],means, cmap=cm.winter, linewidth=0.2)
# # #ax.plot_trisurf(x, y, z-z_est, cmap=cm.jet, linewidth=0.2)
# # #plt.zlim((-1,1))
# plt.show()
def test_spgp_nll(self):
x = np.array([[x1,x2] for x1 in range(10) for x2 in range(10)]) #np.atleast_2d(np.linspace(0, 10, 30)).T
w = np.array([0.04,0.04])
v = 2
vt = 0#.01
theta = np.zeros(2+len(w))
theta[0] = np.log(v)
theta[1] = np.log(vt)
theta[2:2+len(w)] = np.log(w)
y = GaussianProcess.get_realisation(x, GaussianCovariance(), theta)
t = y + 0.1 * np.random.randn(len(x)) #-> vt = 0.01
spgpcov = SPGPCovariance(10)
gc = GaussianCovariance()
theta = spgpcov.get_theta(x,t)
theta_gc = gc.get_theta(x,t)
start = time.time()
res_g = gc._negativeloglikelihood(x,t,theta_gc)
print("Gaussian NLL: ",res_g)
print("Time: ",time.time()-start)
start = time.time()
res = Covariance._negativeloglikelihood(spgpcov,x,t,theta)
print("My NLL: ",res)
print("Time: ",time.time()-start)
res_s = spgpcov._negativeloglikelihood(x,t,theta)
print("Snelson NLL: ", res_s)
print("Time: ", time.time()-start)
self.assertAlmostEqual(res,res_s,delta=2e-1)
class TestTaylor(unittest.TestCase):
#TODO: test estimate many
def test_Isserli(self):
from skgpuppy.TaylorPropagation import _Isserli, _fast_isserli
from itertools import product
for order in [4,6,8]:
n = 4
c = 0
c2 = 0
c3 = 0
Sigma_x = np.eye(n)
for e in product(list(range(order+1)),repeat=n):
if sum(e) == order:
i = _Isserli(np.array(e),Sigma_x,diagonal=False)
if i != 0:
c += 1
i = _Isserli(np.array(e),Sigma_x)
if i != 0:
c2 += 1
i = _fast_isserli(np.array(e),Sigma_x)
if i != 0:
c3 += 1
self.assertEqual(c,c2)
self.assertEqual(c,c3)
self.assertEqual(_Isserli(np.array([8,0,0,0]),Sigma_x),105)
self.assertEqual(_Isserli(np.array([6,0,0,0]),Sigma_x),15)
self.assertEqual(_Isserli(np.array([4,0,0,0]),Sigma_x),3)
self.assertEqual(_fast_isserli(np.array([8,0,0,0]),Sigma_x),105)
self.assertEqual(_fast_isserli(np.array([6,0,0,0]),Sigma_x),15)
self.assertEqual(_fast_isserli(np.array([4,0,0,0]),Sigma_x),3)
def test_derivative(self):
from skgpuppy.TaylorPropagation import _ndderivative
f = lambda x: np.sin(x[0]*x[1])
d10 = lambda x: x[1] * np.cos(x[0] * x[1])
d01 = lambda x: x[0] * np.cos(x[0] * x[1])
d11 = lambda x: np.cos(x[0] * x[1]) - x[0] * x[1] * np.sin(x[0] * x[1])
d20 = lambda x: -x[1]**2 * np.sin(x[0] * x[1])
nddev = _ndderivative(f)
self.assertAlmostEqual(nddev.ndderivative([1,1],[1,0]), d10([1,1]),delta=1e-5)
self.assertAlmostEqual(nddev.ndderivative([1,1],[0,1]), d01([1,1]),delta=1e-5)
self.assertAlmostEqual(nddev.ndderivative([1,1],[1,1]), d11([1,1]),delta=1e-4)
self.assertAlmostEqual(nddev.ndderivative([1,1],[2,0]), d20([1,1]),delta=1e-5)
def test_powerlists(self):
from skgpuppy.TaylorPropagation import _get_powerlists
from itertools import product
import time
order = 3
dims = 6
t = time.time()
p_lists = []
for e in product(list(range(order+1)),repeat=dims):
# Very inefficient
if sum(e) == order:
p_lists.append(e)
print(time.time()-t)
t = time.time()
p_lists_2 = _get_powerlists(order,dims)
print(time.time()-t)
self.assertEqual(len(p_lists),len(p_lists_2))
for i,p in enumerate(p_lists):
self.assertListEqual(list(p),list(p_lists_2[i]))
#t = time.time()
p_lists = []
for e in product(list(range(order+1)),repeat=dims):
# Very inefficient
if sum(e) <= order:
p_lists.append(e)
#print time.time()-t
#t = time.time()
p_lists_2 = _get_powerlists(order,dims,leq=True)
#print time.time()-t
self.assertEqual(len(p_lists),len(p_lists_2))
for i,p in enumerate(p_lists):
self.assertListEqual(list(p),list(p_lists_2[i]))
def test_Taylor_estimation(self):
from skgpuppy.TaylorPropagation import TaylorPropagation
from itertools import product
f = lambda x: np.sin(x[0])
#order 3 => dx = 1e-3
#order 6 => dx = 1e-2
#order 9 => dx = 1e-1
t = TaylorPropagation(f,[1],10,dx=1e-1)
self.assertAlmostEqual(t([1.1]),np.sin(1.1),delta=1e-3)
#print t([1.1])
#print np.sin(1.1)
# import matplotlib.pyplot as plt
# x_list = np.atleast_2d(np.linspace(-6, 6, 100)).T
# y_list = np.sin(x_list)
# y_est_list = t.estimate_many(x_list)
# #print x_list
# #print y_est_list
# fig = plt.figure()
# plt.ylim((-1,1))
# plt.plot(x_list,y_list)
# plt.plot(x_list,y_est_list)
# plt.title('Taylor Series')
# plt.show()
f = lambda x: np.sin(x[0]*x[1])
t = TaylorPropagation(f,[1,1],10,dx=1e-1)
#print t([1.1,1.1])
#print np.sin(1.1*1.1)
self.assertAlmostEqual(t([1.1,1.1]),np.sin(1.1*1.1),delta=1e-3)
# import matplotlib.pyplot as plt
# xs = np.linspace(-0.5,2.5,30)
# x_list = np.array(list(product(xs,repeat=2)))
# x = x_list.T[0]
# y = x_list.T[1]
# z = np.sin(x*y)
# z_est = np.array(t.estimate_many(x_list))
#
# from mpl_toolkits.mplot3d import Axes3D
# from matplotlib import cm
# fig = plt.figure()
# ax = fig.gca(projection='3d')
# ax.plot_trisurf(x, y, z, cmap=cm.jet, linewidth=0.2)
# ax.plot_trisurf(x, y, z_est, cmap=cm.jet, linewidth=0.2)
# #ax.plot_trisurf(x, y, z-z_est, cmap=cm.jet, linewidth=0.2)
# #plt.zlim((-1,1))
# plt.show()
def test_taylor_propagation(self):
from skgpuppy.TaylorPropagation import TaylorPropagation
f = lambda x: np.sin(x[0]*x[1])
stddev = 0.4
Sigma_x = np.diag(np.array([stddev,stddev])**2)
start = time.time()
vals = np.random.multivariate_normal(np.array([1,1]),Sigma_x,10**7).T
out = f(vals)
print(time.time()-start)
print(out.mean(), out.std())
#mean = t.propagate_mean(Sigma_x)
start = time.time()
t = TaylorPropagation(f,[1,1],10,dx=1e-1)
mean,variance = t.propagate(Sigma_x)
print(time.time()-start)
print(mean, np.sqrt(variance))
self.assertAlmostEqual(out.mean(),mean,delta=1e-3)
self.assertAlmostEqual(out.std(),np.sqrt(variance),delta=1e-3)
def test_hermgauss(self):
from numpy.polynomial.hermite import hermgauss
from scipy.integrate import quad
x,w = hermgauss(100)
y = []
sqrt2 = np.sqrt(2)
for xi in x:
y.append(np.cos(xi))
y = np.array(y)
h = lambda x: np.cos(x)*np.exp(-x**2)
self.assertAlmostEqual( (y*w).sum(), quad(h,-10,10)[0], delta=1e-5)
from skgpuppy.FFNI import FullFactorialNumericalIntegrationNaive
from skgpuppy.Utilities import integrate_hermgauss_nd, integrate_hermgauss
f = lambda x: np.sin(x[0])
ffni = FullFactorialNumericalIntegrationNaive(f,np.array([1]))
self.assertAlmostEqual(integrate_hermgauss(f,1,2,order=100),ffni.propagate(np.array([[4]]))[0],delta=1e-4)
f = lambda x: np.cos(x[0]*x[1])
stddev = 0.3
Sigma_x = np.diag(np.array([stddev,stddev])**2)
ffni = FullFactorialNumericalIntegrationNaive(f,np.array([1.0,1.0]))
self.assertAlmostEqual(integrate_hermgauss_nd(f,[1.0,1.0],Sigma_x,10),ffni.propagate(Sigma_x)[0],delta=1e-4)
def test_skewness(self):
from skgpuppy.TaylorPropagation import TaylorPropagation
from skgpuppy.FFNI import FullFactorialNumericalIntegrationEvans, FullFactorialNumericalIntegrationHermGauss
from scipy.stats import norm, scoreatpercentile
f = lambda x: np.sin(x[0]*x[1])#*x[2]*x[3]*x[4]*x[5]*x[6]*x[7]
stddev = 0.3
Sigma_x = np.diag(np.array([stddev,stddev])**2)#,stddev,stddev
mu = [1.0,1.0]#,1.0,1.0
start = time.time()
vals = np.random.multivariate_normal(np.array(mu),Sigma_x,10**7).T
out = f(vals)
print(time.time()-start)
print("Monte Carlo")
from scipy.stats import skew, kurtosis
skewness_mc = skew(out)
kurtosis_mc = kurtosis(out,fisher=False)
print(out.mean(), out.std(), skewness_mc, kurtosis_mc)#, skewness_mc2, kurtosis_mc2
print()
print("Taylor")
start = time.time()
t = TaylorPropagation(f,mu,5,dx=1e-1)
mean,variance,skewness,kurtosis = t.propagate(Sigma_x,skew=True,kurtosis=True)
print(time.time()-start)
print(mean, np.sqrt(variance),skewness, kurtosis)
self.assertAlmostEqual(out.mean(),mean,delta=0.002)
self.assertAlmostEqual(out.std(),np.sqrt(variance),delta=0.002)
self.assertAlmostEqual(skewness,skewness_mc,delta=1e-1)
self.assertAlmostEqual(kurtosis,kurtosis_mc,delta=0.5)
print()
print("Evans")
start = time.time()
t = FullFactorialNumericalIntegrationEvans(f,np.array(mu))
mean,variance,skewness,kurtosis = t.propagate(Sigma_x,skew=True,kurtosis=True)
print(time.time()-start)
print(mean, np.sqrt(variance),skewness, kurtosis)
#Evans is somehow awful
self.assertAlmostEqual(out.mean(),mean,delta=0.002)
self.assertAlmostEqual(out.std(),np.sqrt(variance),delta=0.002)
#self.assertAlmostEqual(skewness,skewness_mc,delta=1e-1)
#self.assertAlmostEqual(kurtosis,kurtosis_mc,delta=0.5)
print()
print("Gauss-Hermite")
start = time.time()
t = FullFactorialNumericalIntegrationHermGauss(f,np.array(mu),order=5)
mean,variance,skewness,kurtosis = t.propagate(Sigma_x,skew=True,kurtosis=True)
print(time.time()-start)
print(mean, np.sqrt(variance),skewness, kurtosis)
self.assertAlmostEqual(out.mean(),mean,delta=0.002)
self.assertAlmostEqual(out.std(),np.sqrt(variance),delta=0.002)
self.assertAlmostEqual(skewness,skewness_mc,delta=1e-1)
self.assertAlmostEqual(kurtosis,kurtosis_mc,delta=0.5)
# print
# print "Genz-Keister"
# start = time.time()
# t = FullFactorialNumericalIntegrationGenzKeister(f,np.array(mu),order=5)
# mean,variance,skewness,kurtosis = t.propagate(Sigma_x,skew=True,kurtosis=True)
# print time.time()-start
# print mean, np.sqrt(variance),skewness, kurtosis
#
# self.assertAlmostEqual(out.mean(),mean,delta=0.002)
# self.assertAlmostEqual(out.std(),np.sqrt(variance),delta=0.002)
# self.assertAlmostEqual(skewness,skewness_mc,delta=1e-1)
# self.assertAlmostEqual(kurtosis,kurtosis_mc,delta=0.5)
# n = Normal()
# _min,_max = n.estimate_min_max(mean,variance,skewness,kurtosis,0.95)
# print "Normal:"
# print _min, _max
#
# s = Skew_Normal()
# _min,_max = s.estimate_min_max(mean,variance,skewness,kurtosis,0.95)
# print "Skew Normal:"
# print _min, _max
# p = Pearson()
# _min,_max = p.estimate_min_max(mean,variance,skewness,kurtosis,0.95)
# print "Pearson"
# print _min, _max
# _min_sample = scoreatpercentile(out,2.5)
# _max_sample = scoreatpercentile(out,97.5)
# print "MC"
# print _min_sample,_max_sample
# self.assertAlmostEqual(_min,_min_sample,delta=2e-2)
# self.assertAlmostEqual(_max,_max_sample,delta=2e-2)
#
#plotting
# import matplotlib.pyplot as plt
# x_list = np.linspace(-1, 1.5, 100)
# # y_list = p.output_pdf(mean,variance,skewness,kurtosis,x_list)
# y_list_sn = s.output_pdf(mean,variance,skewness,kurtosis,x_list)
# y_list_n = n.output_pdf(mean,variance,skewness,kurtosis,x_list)
# #print x_list
# #print y_est_list
# fig = plt.figure()
# #plt.ylim((-1,1))
# # plt.plot(x_list,y_list, label="Pearson")
# plt.plot(x_list,y_list_n,label="Normal")
# plt.plot(x_list,y_list_sn,label="Skew Normal")
# plt.hist(out,bins=100,histtype='step',normed=True)
# plt.legend(['Pearson','Normal','Skew Normal', 'Hist'])
#
# plt.title('Output PDF')
# plt.show()
class TestUncertaintyPropagationSPGP(unittest.TestCase):
def setUp(self):
np.random.seed(1234)
self.x = np.atleast_2d(np.linspace(0, 10, 30)).T
w = np.array([0.04])
v = 2
vt = 0#.01
theta = np.zeros(2+len(w))
theta[0] = np.log(v)
theta[1] = np.log(vt)
theta[2:2+len(w)] = np.log(w)
y = GaussianProcess.get_realisation(self.x, GaussianCovariance(),theta)
self.t = y + 0.1 * np.random.randn(len(self.x)) #-> vt = 0.01
self.gp_est = GaussianProcess(self.x, self.t, cov=GaussianCovariance())
# means, variances = self.gp_est.estimate_many(self.x)
# sigma = np.sqrt(variances)
#
# import matplotlib.pyplot as plt
#
# fig = plt.figure()
# plt.plot(x,y,x,t,'o')
# plt.fill_between(x.ravel(), means + 1.96 * sigma, means - 1.96 * sigma, facecolor='lightblue')
# plt.title('own')
# plt.legend(['Realisation','Noisy','Estimation'])
# #plt.draw()
# plt.show()
def test_propagate(self):
self.propagate(5.0, 1e-3)
self.propagate(3.0, 1e-3)
self.propagate(8.0, 1e-3)
def propagate(self,inputmean,inputvariance):
upmc = UncertaintyPropagationMC(self.gp_est,10000)
upn = UncertaintyPropagationNumerical(self.gp_est)
uph = UncertaintyPropagationNumericalHG(self.gp_est)
mean, var = uph.propagate_GA(np.array([inputmean]), np.array([[inputvariance]]))
print(mean, var)
#Try some values in range [mean,mean+2*std]
for i in np.linspace(mean,mean+2*np.sqrt(var),5):
start = time.time()
t_n = upn.propagate(i,np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
start = time.time()
t_mc = upmc.propagate(i,np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
start = time.time()
t_h = uph.propagate(i,np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
t_ga = scipy.stats.norm.pdf(i, loc=mean, scale=np.sqrt(var))
print(t_n)
print(t_mc)
print(t_h)
print(t_ga)
#Fails
#self.assertAlmostEqual(t_n, t_h, delta=t_h/10)
self.assertAlmostEqual(t_h, t_mc, delta=t_h*1e-2)
self.assertAlmostEqual(t_h, t_ga, delta=t_h*1e-4)
def test_propagate_ga_approx(self):
self.propagate_ga_approx(5.0, 0.3)
self.propagate_ga_approx(3.0, 0.2)
self.propagate_ga_approx(8.0, 0.1)
def test_propagate_ga_approx_wo_weaving(self):
import skgpuppy.UncertaintyPropagation
weaving = skgpuppy.UncertaintyPropagation.weaving
skgpuppy.UncertaintyPropagation.weaving = False
self.propagate_ga_approx(5.0, 0.3)
self.propagate_ga_approx(3.0, 0.2)
self.propagate_ga_approx(8.0, 0.1)
skgpuppy.UncertaintyPropagation.weaving = weaving
def propagate_ga_approx(self, inputmean, inputvariance):
upn = UncertaintyPropagationNumerical(self.gp_est)
upe = UncertaintyPropagationExact(self.gp_est)
upl = UncertaintyPropagationLinear(self.gp_est)
upa = UncertaintyPropagationApprox(self.gp_est)
start = time.time()
meanE, varianceE = upe.propagate_GA(np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
print("Exact ", end - start)
start = time.time()
meanA, varianceA = upa.propagate_GA(np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
print("Approx ", end - start)
print(meanE, meanA)
print((varianceE), (varianceA))
print(np.sqrt(varianceE), np.sqrt(varianceA))
self.assertAlmostEqual(meanE, meanA, delta=1e-2)
self.assertAlmostEqual(varianceE, varianceA, delta=1e-2)
def test_propagate_ga(self):
self.propagate_ga(5.0, 1.0)
self.propagate_ga(3.0, 2.0)
self.propagate_ga(8.0, 1.0)
def propagate_ga(self, inputmean, inputvariance):
upmc = UncertaintyPropagationMC(self.gp_est)
upn = UncertaintyPropagationNumerical(self.gp_est)
upe = UncertaintyPropagationExact(self.gp_est)
upl = UncertaintyPropagationLinear(self.gp_est)
upa = UncertaintyPropagationApprox(self.gp_est)
uph = UncertaintyPropagationNumericalHG(self.gp_est)
start = time.time()
meanL, varianceL = upl.propagate_GA(np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
print("Linear ", end - start)
start = time.time()
meanE, varianceE = upe.propagate_GA(np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
print("Exact ", end - start)
start = time.time()
meanN, varianceN = upn.propagate_GA(np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
print("Numerical ", end - start)
start = time.time()
meanH, varianceH = uph.propagate_GA(np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
print("Numerical Herm Gauss", end - start)
start = time.time()
meanA, varianceA = upa.propagate_GA(np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
print("Approx ", end - start)
start = time.time()
meanMC, varianceMC = upmc.propagate_GA(np.array([inputmean]), np.array([[inputvariance]]))
end = time.time()
print("MC ", end - start)
print(meanH, meanN, meanE, meanL, meanA, meanMC)
print((varianceH), (varianceN), (varianceE), (varianceL), (varianceA), varianceMC)
print(np.sqrt(varianceH),np.sqrt(varianceN), np.sqrt(varianceE), np.sqrt(varianceL), np.sqrt(varianceA), np.sqrt(varianceMC))
self.assertAlmostEqual(meanN, meanH, delta=1e-4)
self.assertAlmostEqual(meanN, meanL, delta=0.3)
self.assertAlmostEqual(meanN, meanE, delta=1e-5)
self.assertAlmostEqual(meanN, meanMC, delta=0.1)
self.assertAlmostEqual(meanN, meanA, delta=0.1)
self.assertAlmostEqual(varianceN, varianceH, delta=1e-3)
self.assertAlmostEqual(varianceN, varianceE, delta=1e-5)
self.assertAlmostEqual(varianceN, varianceA, delta=0.3)
self.assertAlmostEqual(varianceN, varianceMC, delta=0.1)
self.assertAlmostEqual(np.sqrt(varianceE), np.sqrt(varianceL), delta=np.sqrt(max(varianceL, varianceE)) * 1)
def test_jacobian(self):
jacobian = self.gp_est._get_Jacobian(np.array([5]),np.array([4]))
est = (self.gp_est._covariance(np.array([5]),np.array([4.1]))-self.gp_est._covariance(np.array([5]),np.array([3.9])))/0.2
self.assertAlmostEqual(jacobian[0][0],est,delta=1e-2)
def test_hessian(self):
hessian = self.gp_est._get_Hessian(np.array([5]),np.array([4]))
est = (self.gp_est._covariance(np.array([5]),np.array([4.1])) - 2 * self.gp_est._covariance(np.array([5]),np.array([4.0]))+self.gp_est._covariance(np.array([5]),np.array([3.9])))/0.01
self.assertAlmostEqual(hessian[0][0],est,delta=1e-2)
class TestUncertaintyPropagationMETIS(unittest.TestCase):
def setUp(self):
# if not os.path.exists('tests/metis_gp.pkl'):
# if os.path.exists('skgpuppy/tests/metis_data.pkl'):
# with open('skgpuppy/tests/metis_data.pkl', 'rb') as output:
# if sys.version_info.major == 3:
# (collection,x,t) = pickle.load(output, encoding='latin1')
# else:
# (collection,x,t) = pickle.load(output)
# else:
# raise RuntimeError("Test data not found!")
from skgpuppy.tests.metis_data import x,t
self.gp_est = GaussianProcess(x, t,GaussianCovariance())
# with open('tests/metis_gp.pkl', 'wb') as output:
# pickle.dump(self.gp_est, output, protocol=-1)
# else:
# with open('tests/metis_gp.pkl', 'rb') as output:
# self.gp_est = pickle.load(output)
min = np.array([0.1, 0, 0])#, 200
max = np.array([30, 10, 0.05])#, 1000
self.mean = (min+max)/2
self.Sigma = np.diag([2**2,1**2,0.005**2])
def test_propagation(self):
#import skgpuppy.UncertaintyPropagation
#skgpuppy.UncertaintyPropagation.weaving = False
#from skgpuppy.UncertaintyPropagation2 import UncertaintyPropagationApprox, UncertaintyPropagationExact
# Too large to ship the data for testing
# if os.path.exists('skgpuppy/tests/metis_data_mc.pkl'):
# with open('skgpuppy/tests/metis_data_mc.pkl', 'rb') as output:
# if sys.version_info.major == 3:
# (collection,x,t) = pickle.load(output, encoding='latin1')
# else:
# (collection,x,t) = pickle.load(output)
# n = t.size
# print(n)
# print(t.mean(), t.std())
# #With Fisher info sd:
# ci_min = t.std()-1.96*t.std()/np.sqrt(n)/np.sqrt(2)
# ci_max = t.std()+1.96*t.std()/np.sqrt(n)/np.sqrt(2)
# print(ci_min, ci_max)
#
# # With Fisher info variance
# self.assertAlmostEqual(np.sqrt(t.std()**2-1.96*np.sqrt(2)*t.std()**2/np.sqrt(n)),ci_min,delta=1e-5)
# self.assertAlmostEqual(np.sqrt(t.std()**2+1.96*np.sqrt(2)*t.std()**2/np.sqrt(n)),ci_max,delta=1e-5)
# with chi squared distribution:
#print np.sqrt((n-1)*t.std()**2 / chi2.ppf(1-0.05/2,n-1)), np.sqrt((n-1)*t.std()**2 / chi2.ppf(0.05/2,n-1))
#else:
ci_min = 0.0410788036621
ci_max = 0.0422334526251
meanG,varianceG = self.gp_est(self.mean)
print("GP")
print(meanG, np.sqrt(varianceG))
print(self.gp_est._get_vt())
code_u = varianceG - self.gp_est._get_vt()
print("Code uncertainty: ", np.sqrt(code_u))
self.assertLess(np.sqrt(code_u),0.0006)
upe = UncertaintyPropagationExact(self.gp_est)
start = time.time()
meanE, varianceE = upe.propagate_GA(self.mean, self.Sigma)
print("UPE")
print(time.time()-start)
print(meanE, np.sqrt(varianceE))
print(np.sqrt(varianceE - code_u))
self.assertLess(ci_min, np.sqrt(varianceE - code_u))
self.assertLess(np.sqrt(varianceE - code_u),ci_max)
upa = UncertaintyPropagationApprox(self.gp_est)
start = time.time()
meanA, varianceA = upa.propagate_GA(self.mean, self.Sigma)
print("UPA")
print(time.time() - start)
print(meanA, np.sqrt(varianceA))
print(np.sqrt(varianceA - code_u))
self.assertLess(ci_min, np.sqrt(varianceA - code_u))
self.assertLess(np.sqrt(varianceA - code_u),ci_max)
def test_IUPOpt(self):
meanG,varianceG = self.gp_est(self.mean)
code_u = varianceG - self.gp_est._get_vt()
output_variance = 0.005**2 + code_u #0.005**2
u = self.mean
gps = self.gp_est
c = np.ones(3) # cost/fisher and fisher = 1/rate**2
I = np.array([1/u[1]**2,2/(u[1]**2),1/(u[2]*(1-u[2]))])
iupa = InverseUncertaintyPropagationApprox(output_variance,gps,u,c,I,coestimated=[[0,1]])
iupn = InverseUncertaintyPropagationNumerical(output_variance,gps,u,c,I,upga_class=UncertaintyPropagationApprox,coestimated=[[0,1]])
iupne = InverseUncertaintyPropagationNumerical(output_variance,gps,u,c,I,upga_class=UncertaintyPropagationExact,coestimated=[[0,1]])
upa = UncertaintyPropagationApprox(self.gp_est)
upe = UncertaintyPropagationExact(self.gp_est)
#file = open("output/IUPOpt_out_var_2.txt",'a')
start = time.time()
sol_a = iupa.get_best_solution()
end = time.time()
print("Approx ", end - start)
print(c/I/sol_a)
#Testing coestimated
self.assertAlmostEqual((c/I/sol_a)[0],(c/I/sol_a)[1],delta=1e-5)
print(np.sqrt(upa.propagate_GA(u,np.diag(sol_a))[1]-code_u))
print(np.sqrt(upe.propagate_GA(u,np.diag(sol_a))[1]-code_u))
self.assertAlmostEqual(np.sqrt(upa.propagate_GA(u,np.diag(sol_a))[1]-code_u),np.sqrt(upe.propagate_GA(u,np.diag(sol_a))[1]-code_u),delta=1e-5)
a_costs = (np.array([1,0,1])*c/I * 1.0/np.array(sol_a)).sum()
print(a_costs)
start = time.time()
sol_n = iupn.get_best_solution()
end = time.time()
print("Numerical ", end - start)
print(c/I/sol_n)
self.assertAlmostEqual((c/I/sol_n)[0],(c/I/sol_n)[1],delta=1e-5)
print(np.sqrt(upa.propagate_GA(u,np.diag(sol_n))[1]-code_u))
print(np.sqrt(upe.propagate_GA(u,np.diag(sol_n))[1]-code_u))
self.assertAlmostEqual(np.sqrt(upa.propagate_GA(u,np.diag(sol_n))[1]-code_u),np.sqrt(upe.propagate_GA(u,np.diag(sol_n))[1]-code_u),delta=1e-5)
n_costs = (np.array([1,0,1])*c/I * 1.0/np.array(sol_n)).sum()
print(n_costs)
start = time.time()
sol_ne = iupne.get_best_solution()
end = time.time()
print("Numerical Exact", end - start)
print(c/I/sol_ne)
self.assertAlmostEqual((c/I/sol_ne)[0],(c/I/sol_ne)[1],delta=1e-5)
print(np.sqrt(upa.propagate_GA(u,np.diag(sol_ne))[1]-code_u))
print(np.sqrt(upe.propagate_GA(u,np.diag(sol_ne))[1]-code_u))
self.assertAlmostEqual(np.sqrt(upa.propagate_GA(u,np.diag(sol_ne))[1]-code_u),np.sqrt(upe.propagate_GA(u,np.diag(sol_ne))[1]-code_u),delta=1e-5)
ne_costs = (np.array([1,0,1])*c/I * 1.0/np.array(sol_ne)).sum()
print(ne_costs)
self.assertAlmostEqual(a_costs,n_costs,delta=1e-1)
self.assertAlmostEqual(a_costs,ne_costs,delta=5e2)
#TODO: Use the MM1 sim as a Testcase
class TestPeriodicCovariance(unittest.TestCase):
def setUp(self):
np.random.seed(1234)
self.x = np.atleast_2d(np.linspace(0, 10, 50)).T
w = np.array([0.02])
w2 = np.array([2])
v = 2
vt = 0.01
p = np.array([3])
# v = np.exp(theta[0])
# vt = np.exp(theta[1])
# w = np.exp(theta[2:2+d])
# p = np.exp(theta[2+d:2+2*d])
# w2 = np.exp(theta[2+2*d:])
n,d = np.shape(self.x)
self.assertEqual(d,1)
self.theta_min = np.ones(2+3*d)
self.theta_min[0] = np.log(v)
self.theta_min[1] = np.log(vt)
self.theta_min[2:2+d] = np.log(w)
self.theta_min[2+d:2+2*d] = np.log(p)
self.theta_min[2+2*d:2+3*d] = np.log(w2)
self.y = GaussianProcess.get_realisation(self.x, PeriodicCovariance(),self.theta_min)
def test_cov_derivative(self):
n,d = np.shape(self.x)
cov = PeriodicCovariance()
for xi in self.x:
for xj in self.x:
for j in range(2+3*d):
self.assertAlmostEqual(Covariance._d_cov_d_theta(cov,xi,xj,self.theta_min,j),cov._d_cov_d_theta(xi,xj,self.theta_min,j),delta=1e-3)
def test_cov(self):
print("Theta_min: ", self.theta_min)
x = np.atleast_2d(np.linspace(0.1, 9.9, 200)).T # upper bound 15 to show prediction
gp_est = GaussianProcess(self.x, self.y,PeriodicCovariance())
means, variances = gp_est.estimate_many(x)
sigma = np.sqrt(variances)
gp_est2 = GaussianProcess(self.x, self.y,PeriodicCovariance(),self.theta_min)
means2, variances2 = gp_est2.estimate_many(x)
sigma2 = np.sqrt(variances2)
for i,mean in enumerate(means):
self.assertAlmostEqual(mean,means2[i],delta=5e-2)
self.assertAlmostEqual(sigma[i],sigma2[i],delta=5e-2)
# import matplotlib.pyplot as plt
# fig = plt.figure()
# plt.plot(self.x,self.y,'o',x,means2)#,x,means
#
# #plt.fill_between(x.ravel(), means + 1.96 * sigma, means - 1.96 * sigma, facecolor='lightblue')
# plt.fill_between(x.ravel(), means2 + 1.96 * sigma2, means2 - 1.96 * sigma2, facecolor='red')
# plt.title('periodic')
# plt.legend(['Realisation','GP'])
# #plt.draw()
# plt.show()
# for i,xi in enumerate(self.x):
# self.assertLess(means[i]-3*sigma[i],self.y[i])
# self.assertLess(self.y[i],means[i]+3*sigma[i])
|
|
"""This template tag library is used for displaying pagination links in
paginated Django views. It exposes a template tag `{% paginationplus %}` that
will take care of iterating over the page numbers.
Usage
-----
Add `paginationplus` to your `INSTALLED_APPS` in your settings.
At the start of the template for your paginated view, use the following to load
the tag module:
{% load paginationplus %}
Then, at the position you want your pagination links to appear, use the
following block tag.
{% paginationplus page_obj url_name url_arg1=... url_arg2=... %}
...
{% endpaginationplus %}
The first argument passed to the opening tag is the `Page` object of your
paginated view. The remaining arguments are the same as the arguments passed to
the built-in `{% url %}` tag, minus the argument that takes the value for the
page number in the view, eg. `page` in the generic view `ListView`.
The block iterates over the page numbers available from the `Paginator` object
associated with the `Page` object that is passed as the first argument to the
opening tag.
The block's content is rendered once for each iteration, and within this block,
a template variable named `paginationplus` is available.
This template variable exposes four attributes:
* `number`
The page number that is the subject of this iteration
* `url`
Contains the url of the page for the page number currently iterated over.
* `is_filler`
When this is True, the current iteration does not represent a page number,
but instead represents a filler, ie. a hole in the sequence of page numbers.
See below for more information.
* `is_current`
When this is True, the current iteration represents number of the page that
is currently displayed in the view.
Single tag usage
----------------
An alternative to the block tag, is the following:
{% paginationplus page_obj url_name url_arg1=... url_arg2=... ... with 'template/name.html' %}
Using `with` in the tag indicates that the iteration will not occur in a block,
but instead in the template that follows `with`. Within this template, the
parent template's full context is available, with an added `paginationplus`
variable. The template passed to the tag needn't be a string, any available
template variable will do.
Settings
--------
By default, paginationplus will support displaying the links for the first,
previous, current, next, and last page. For instance, if you have a paginated
view with 99 pages, and the current page is page 30, the following sequence will
be iterated over: `[1, None, 29, 30, 31, None, 99]`. Suppose the current page is
page 3, the sequence will be `[1, 2, 3, 4, None, 99]`.
In the above sequences, the `None` values represent a hole in the page number
sequence, and for these holes, the `paginationplus` template variable will have
its `is_filler` attribute set to `True`, the `number` and `url` attributes will
be set to `None`, and `is_current` will be set to `False`.
To disable this behavior, and iterate over all available page numbers, you can
set the `PAGINATIONPLUS_CONTIGUOUS` setting to `True` in your project's
settings.
To control the number of page numbers before and after the current page that
will be iterated over, you can set the `PAGINATIONPLUS_MAX_DISTANCE`.
For instance, when `PAGINATIONPLUS_MAX_DISTANCE` is set to 2, the following
sequence will be iterated over when the number of pages is 99 and the current
page is 30: `[1, None, 28, 29, 30, 31, 32, None, 99]`. And when the current page
is 3, the sequence will be `[1, 2, 3, 4, 5, None, 99]`.
"""
from django import template
from django.core import paginator
from django.core.urlresolvers import reverse
from django.core.exceptions import ImproperlyConfigured
from django.utils.safestring import mark_safe
import itertools
register = template.Library()
class PartialUrl(object):
"""An object that represents an URL to a paged view, without a value for the
'page' view argument
"""
def __init__(self, url_name, **kwargs):
self.url_name = url_name
self.kwargs = kwargs
def url_for_page(self, page):
"""Get the URL for the page
"""
kwargs = dict(self.kwargs)
kwargs['page'] = page
return reverse(self.url_name, kwargs=kwargs)
class PaginationPage(object):
def __init__(self, number, is_current, partial_url):
self.number = number
self.is_current = is_current
self.partial_url = partial_url
@property
def is_filler(self):
return not self.number
@property
def url(self):
if self.is_filler:
return None
if self.partial_url:
return self.partial_url.url_for_page(self.number)
return '?page=%d' % self.number
def __unicode__(self):
if self.is_filler:
return ''
return mark_safe(u'<a href="%s">%d</a>' % (self.url, self.number))
class PaginationPlusNode(template.Node):
def __init__(self, nodelist_or_include, page, url_name, url_kwargs):
self.nodelist_or_include = nodelist_or_include
self.page = page
self.url_name = url_name
self.url_kwargs = url_kwargs
from django.conf import settings
try:
self.max_distance = int(
getattr(settings, 'PAGINATIONPLUS_MAX_DISTANCE', 1)
)
if self.max_distance < 1:
raise ValueError
except (TypeError, ValueError):
raise ImproperlyConfigured(
'PAGINATIONPLUS_MAX_DISTANCE must be a number greater than 0'
)
self.contiguous = getattr(settings, 'PAGINATIONPLUS_CONTIGUOUS', False)
def pagination(self, partial_url):
page = self.page
paginator = page.paginator
if self.contiguous:
for p in itertools.imap(
lambda p: PaginationPage(p, p == page.number, partial_url),
paginator.page_range
):
yield p
return
yield PaginationPage(1, page.number == 1, partial_url)
last = 1
startRange = (1, 1)
currentRange = (
page.number - self.max_distance, page.number + self.max_distance
)
endRange = (paginator.num_pages, paginator.num_pages)
for r in (startRange, currentRange, endRange):
if r[0] > last + 1:
yield PaginationPage(None, False, partial_url)
rangeStart = max(min(paginator.num_pages, r[0]), last + 1)
rangeEnd = min(paginator.num_pages, r[1])
if rangeStart <= rangeEnd:
for p in xrange(rangeStart, rangeEnd + 1):
yield PaginationPage(p, p == page.number, partial_url)
last = rangeEnd
def url_kwargs_to_dict(self, context):
args = []
kwargs = {}
for index, kwd in enumerate(self.url_kwargs):
splitted = kwd.split('=')
if len(splitted) > 2:
raise template.TemplateSyntaxError(
'Could not parse argument %d: %r' % (index + 2, kwargs)
)
arg_name = None
if len(splitted) == 2:
arg_name, arg_value = splitted
else:
arg_value = splitted[0]
arg_value = template.Variable(arg_value)
arg_value = arg_value.resolve(context)
if arg_name:
kwargs[arg_name] = arg_value
else:
args.append(arg_value)
return args, kwargs
def render(self, context):
page = template.Variable(self.page)
page = page.resolve(context)
self.page = page
if not isinstance(page, paginator.Page):
raise template.TemplateSyntaxError(
'%r is not a valid Page object' % self.page
)
url_args, url_kwargs = self.url_kwargs_to_dict(context)
partial_url = PartialUrl(self.url_name, *url_args, **url_kwargs)
if not isinstance(self.nodelist_or_include, template.NodeList):
# 'with' used in tag, template include is assumed
nodelist_var = template.Variable(self.nodelist_or_include)
nodelist = template.loader.get_template(
nodelist_var.resolve(context)
)
else:
nodelist = self.nodelist_or_include
result = []
for paginated in self.pagination(partial_url):
context.push()
context['paginationplus'] = paginated
result.append(nodelist.render(context))
context.pop()
return u''.join(result)
@register.assignment_tag
def paginationplus_url(url_name, **kwargs):
return PartialUrl(url_name, **kwargs)
@register.filter
def paginate(partial_pagination_url, page):
return partial_pagination_url.render(page)
@register.tag('paginationplus')
def paginationplus(parser, token):
contents = token.split_contents()
try:
tag_name, page, url_name = contents[:3]
except ValueError:
raise template.TemplateSyntaxError(
'%r tag expects at least 2 arguments', contents[0]
)
url_kwargs = contents[3:]
if contents[-2] == 'with':
include_template = contents[-1]
url_kwargs = url_kwargs[:-2]
nodelist_or_include = include_template
else:
nodelist_or_include = parser.parse(('endpaginationplus',))
parser.delete_first_token()
return PaginationPlusNode(nodelist_or_include, page, url_name, url_kwargs)
|
|
# Copyright 2012 Anton Beloglazov
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" The main data collector module.
The data collector is deployed on every compute host and is executed
periodically to collect the CPU utilization data for each VM running
on the host and stores the data in the local file-based data store.
The data is stored as the average number of MHz consumed by a VM
during the last measurement interval. The CPU usage data are stored as
integers. This data format is portable: the stored values can be
converted to the CPU utilization for any host or VM type, supporting
heterogeneous hosts and VMs.
The actual data is obtained from Libvirt in the form of the CPU time
consumed by a VM to date. Using the CPU time collected at the previous
time frame, the CPU time for the past time interval is calculated.
According to the CPU frequency of the host and the length of the time
interval, the CPU time is converted into the required average MHz
consumed by the VM over the last time interval. The collected data are
stored both locally and submitted to the central database. The number
of the latest data values stored locally and passed to the underload /
overload detection and VM selection algorithms is defined using the
`data_collector_data_length` option in the configuration file.
At the beginning of every execution, the data collector obtains the
set of VMs currently running on the host using the Nova API and
compares them to the VMs running on the host at the previous time
step. If new VMs have been found, the data collector fetches the
historical data about them from the central database and stores the
data in the local file-based data store. If some VMs have been
removed, the data collector removes the data about these VMs from the
local data store.
The data collector stores the resource usage information locally in
files in the <local_data_directory>/vm directory, where
<local_data_directory> is defined in the configuration file using
the local_data_directory option. The data for each VM are stored in
a separate file named according to the UUID of the corresponding VM.
The format of the files is a new line separated list of integers
representing the average CPU consumption by the VMs in MHz during the
last measurement interval.
The data collector will be implemented as a Linux daemon running in
the background and collecting data on the resource usage by VMs every
data_collector_interval seconds. When the data collection phase is
invoked, the component performs the following steps:
1. Read the names of the files from the <local_data_directory>/vm
directory to determine the list of VMs running on the host at the
last data collection.
2. Call the Nova API to obtain the list of VMs that are currently
active on the host.
3. Compare the old and new lists of VMs and determine the newly added
or removed VMs.
4. Delete the files from the <local_data_directory>/vm directory
corresponding to the VMs that have been removed from the host.
5. Fetch the latest data_collector_data_length data values from the
central database for each newly added VM using the database
connection information specified in the sql_connection option and
save the data in the <local_data_directory>/vm directory.
6. Call the Libvirt API to obtain the CPU time for each VM active on
the host.
7. Transform the data obtained from the Libvirt API into the average
MHz according to the frequency of the host's CPU and time interval
from the previous data collection.
8. Store the converted data in the <local_data_directory>/vm
directory in separate files for each VM, and submit the data to the
central database.
9. Schedule the next execution after data_collector_interval
seconds.
"""
from contracts import contract
from neat.contracts_extra import *
import os
import time
from collections import deque
import libvirt
import neat.common as common
from neat.config import *
from neat.db_utils import *
import logging
log = logging.getLogger(__name__)
@contract
def start():
""" Start the data collector loop.
:return: The final state.
:rtype: dict(str: *)
"""
config = read_and_validate_config([DEFAILT_CONFIG_PATH, CONFIG_PATH],
REQUIRED_FIELDS)
common.init_logging(
config['log_directory'],
'data-collector.log',
int(config['log_level']))
vm_path = common.build_local_vm_path(config['local_data_directory'])
if not os.access(vm_path, os.F_OK):
os.makedirs(vm_path)
log.info('Created a local VM data directory: %s', vm_path)
else:
cleanup_all_local_data(config['local_data_directory'])
log.info('Creaned up the local data directory: %s',
config['local_data_directory'])
interval = config['data_collector_interval']
log.info('Starting the data collector, ' +
'iterations every %s seconds', interval)
return common.start(
init_state,
execute,
config,
int(interval))
@contract
def init_state(config):
""" Initialize a dict for storing the state of the data collector.
:param config: A config dictionary.
:type config: dict(str: *)
:return: A dict containing the initial state of the data collector.
:rtype: dict
"""
vir_connection = libvirt.openReadOnly(None)
if vir_connection is None:
message = 'Failed to open a connection to the hypervisor'
log.critical(message)
raise OSError(message)
hostname = vir_connection.getHostname()
host_cpu_mhz, host_ram = get_host_characteristics(vir_connection)
physical_cpus = common.physical_cpu_count(vir_connection)
host_cpu_usable_by_vms = float(config['host_cpu_usable_by_vms'])
db = init_db(config['sql_connection'])
db.update_host(hostname,
int(host_cpu_mhz * host_cpu_usable_by_vms),
physical_cpus,
host_ram)
return {'previous_time': 0.,
'previous_cpu_time': dict(),
'previous_cpu_mhz': dict(),
'previous_host_cpu_time_total': 0.,
'previous_host_cpu_time_busy': 0.,
'previous_overload': -1,
'vir_connection': vir_connection,
'hostname': hostname,
'host_cpu_overload_threshold':
float(config['host_cpu_overload_threshold']) * \
host_cpu_usable_by_vms,
'physical_cpus': physical_cpus,
'physical_cpu_mhz': host_cpu_mhz,
'physical_core_mhz': host_cpu_mhz / physical_cpus,
'db': db}
def execute(config, state):
""" Execute a data collection iteration.
1. Read the names of the files from the <local_data_directory>/vm
directory to determine the list of VMs running on the host at the
last data collection.
2. Call the Nova API to obtain the list of VMs that are currently
active on the host.
3. Compare the old and new lists of VMs and determine the newly added
or removed VMs.
4. Delete the files from the <local_data_directory>/vm directory
corresponding to the VMs that have been removed from the host.
5. Fetch the latest data_collector_data_length data values from the
central database for each newly added VM using the database
connection information specified in the sql_connection option and
save the data in the <local_data_directory>/vm directory.
6. Call the Libvirt API to obtain the CPU time for each VM active on
the host. Transform the data obtained from the Libvirt API into the
average MHz according to the frequency of the host's CPU and time
interval from the previous data collection.
8. Store the converted data in the <local_data_directory>/vm
directory in separate files for each VM, and submit the data to the
central database.
:param config: A config dictionary.
:type config: dict(str: *)
:param state: A state dictionary.
:type state: dict(str: *)
:return: The updated state dictionary.
:rtype: dict(str: *)
"""
log.info('Started an iteration')
vm_path = common.build_local_vm_path(config['local_data_directory'])
host_path = common.build_local_host_path(config['local_data_directory'])
data_length = int(config['data_collector_data_length'])
vms_previous = get_previous_vms(vm_path)
vms_current = get_current_vms(state['vir_connection'])
vms_added = get_added_vms(vms_previous, vms_current.keys())
added_vm_data = dict()
if vms_added:
if log.isEnabledFor(logging.DEBUG):
log.debug('Added VMs: %s', str(vms_added))
for i, vm in enumerate(vms_added):
if vms_current[vm] != libvirt.VIR_DOMAIN_RUNNING:
del vms_added[i]
del vms_current[vm]
if log.isEnabledFor(logging.DEBUG):
log.debug('Added VM %s skipped as migrating in', vm)
added_vm_data = fetch_remote_data(state['db'],
data_length,
vms_added)
if log.isEnabledFor(logging.DEBUG):
log.debug('Fetched remote data: %s', str(added_vm_data))
write_vm_data_locally(vm_path, added_vm_data, data_length)
vms_removed = get_removed_vms(vms_previous, vms_current.keys())
if vms_removed:
if log.isEnabledFor(logging.DEBUG):
log.debug('Removed VMs: %s', str(vms_removed))
cleanup_local_vm_data(vm_path, vms_removed)
for vm in vms_removed:
del state['previous_cpu_time'][vm]
del state['previous_cpu_mhz'][vm]
log.info('Started VM data collection')
current_time = time.time()
(cpu_time, cpu_mhz) = get_cpu_mhz(state['vir_connection'],
state['physical_core_mhz'],
state['previous_cpu_time'],
state['previous_time'],
current_time,
vms_current.keys(),
state['previous_cpu_mhz'],
added_vm_data)
log.info('Completed VM data collection')
log.info('Started host data collection')
(host_cpu_time_total,
host_cpu_time_busy,
host_cpu_mhz) = get_host_cpu_mhz(state['physical_cpu_mhz'],
state['previous_host_cpu_time_total'],
state['previous_host_cpu_time_busy'])
log.info('Completed host data collection')
if state['previous_time'] > 0:
append_vm_data_locally(vm_path, cpu_mhz, data_length)
append_vm_data_remotely(state['db'], cpu_mhz)
total_vms_cpu_mhz = sum(cpu_mhz.values())
host_cpu_mhz_hypervisor = host_cpu_mhz - total_vms_cpu_mhz
if host_cpu_mhz_hypervisor < 0:
host_cpu_mhz_hypervisor = 0
total_cpu_mhz = total_vms_cpu_mhz + host_cpu_mhz_hypervisor
append_host_data_locally(host_path, host_cpu_mhz_hypervisor, data_length)
append_host_data_remotely(state['db'],
state['hostname'],
host_cpu_mhz_hypervisor)
if log.isEnabledFor(logging.DEBUG):
log.debug('Collected VM CPU MHz: %s', str(cpu_mhz))
log.debug('Collected total VMs CPU MHz: %s', str(total_vms_cpu_mhz))
log.debug('Collected hypervisor CPU MHz: %s', str(host_cpu_mhz_hypervisor))
log.debug('Collected host CPU MHz: %s', str(host_cpu_mhz))
log.debug('Collected total CPU MHz: %s', str(total_cpu_mhz))
state['previous_overload'] = log_host_overload(
state['db'],
state['host_cpu_overload_threshold'],
state['hostname'],
state['previous_overload'],
state['physical_cpu_mhz'],
total_cpu_mhz)
state['previous_time'] = current_time
state['previous_cpu_time'] = cpu_time
state['previous_cpu_mhz'] = cpu_mhz
state['previous_host_cpu_time_total'] = host_cpu_time_total
state['previous_host_cpu_time_busy'] = host_cpu_time_busy
log.info('Completed an iteration')
return state
@contract
def get_previous_vms(path):
""" Get a list of VM UUIDs from the path.
:param path: A path to read VM UUIDs from.
:type path: str
:return: The list of VM UUIDs from the path.
:rtype: list(str)
"""
return os.listdir(path)
@contract()
def get_current_vms(vir_connection):
""" Get a dict of VM UUIDs to states from libvirt.
:param vir_connection: A libvirt connection object.
:type vir_connection: virConnect
:return: The dict of VM UUIDs to states from libvirt.
:rtype: dict(str: int)
"""
vm_uuids = {}
for vm_id in vir_connection.listDomainsID():
try:
vm = vir_connection.lookupByID(vm_id)
vm_uuids[vm.UUIDString()] = vm.state(0)[0]
except libvirt.libvirtError:
pass
return vm_uuids
@contract
def get_added_vms(previous_vms, current_vms):
""" Get a list of newly added VM UUIDs.
:param previous_vms: A list of VMs at the previous time frame.
:type previous_vms: list(str)
:param current_vms: A list of VM at the current time frame.
:type current_vms: list(str)
:return: A list of VM UUIDs added since the last time frame.
:rtype: list(str)
"""
return substract_lists(current_vms, previous_vms)
@contract
def get_removed_vms(previous_vms, current_vms):
""" Get a list of VM UUIDs removed since the last time frame.
:param previous_vms: A list of VMs at the previous time frame.
:type previous_vms: list(str)
:param current_vms: A list of VM at the current time frame.
:type current_vms: list(str)
:return: A list of VM UUIDs removed since the last time frame.
:rtype: list(str)
"""
return substract_lists(previous_vms, current_vms)
@contract
def substract_lists(list1, list2):
""" Return the elements of list1 that are not in list2.
:param list1: The first list.
:type list1: list
:param list2: The second list.
:type list2: list
:return: The list of element of list 1 that are not in list2.
:rtype: list
"""
return list(set(list1).difference(list2))
@contract
def cleanup_local_vm_data(path, vms):
""" Delete the local data related to the removed VMs.
:param path: A path to remove VM data from.
:type path: str
:param vms: A list of removed VM UUIDs.
:type vms: list(str)
"""
for vm in vms:
os.remove(os.path.join(path, vm))
@contract
def cleanup_all_local_data(path):
""" Delete all the local data about VMs.
:param path: A path to the local data directory.
:type path: str
"""
vm_path = common.build_local_vm_path(path)
cleanup_local_vm_data(vm_path, os.listdir(vm_path))
host_path = common.build_local_host_path(path)
if os.access(host_path, os.F_OK):
os.remove(host_path)
@contract
def fetch_remote_data(db, data_length, uuids):
""" Fetch VM data from the central DB.
:param db: The database object.
:type db: Database
:param data_length: The length of data to fetch.
:type data_length: int
:param uuids: A list of VM UUIDs to fetch data for.
:type uuids: list(str)
:return: A dictionary of VM UUIDs and the corresponding data.
:rtype: dict(str : list(int))
"""
result = dict()
for uuid in uuids:
result[uuid] = db.select_cpu_mhz_for_vm(uuid, data_length)
return result
@contract
def write_vm_data_locally(path, data, data_length):
""" Write a set of CPU MHz values for a set of VMs.
:param path: A path to write the data to.
:type path: str
:param data: A map of VM UUIDs onto the corresponing CPU MHz history.
:type data: dict(str : list(int))
:param data_length: The maximum allowed length of the data.
:type data_length: int
"""
for uuid, values in data.items():
with open(os.path.join(path, uuid), 'w') as f:
if data_length > 0:
f.write('\n'.join([str(x)
for x in values[-data_length:]]) + '\n')
@contract
def append_vm_data_locally(path, data, data_length):
""" Write a CPU MHz value for each out of a set of VMs.
:param path: A path to write the data to.
:type path: str
:param data: A map of VM UUIDs onto the corresponing CPU MHz values.
:type data: dict(str : int)
:param data_length: The maximum allowed length of the data.
:type data_length: int
"""
for uuid, value in data.items():
vm_path = os.path.join(path, uuid)
if not os.access(vm_path, os.F_OK):
with open(vm_path, 'w') as f:
f.write(str(value) + '\n')
else:
with open(vm_path, 'r+') as f:
values = deque(f.read().strip().splitlines(), data_length)
values.append(value)
f.truncate(0)
f.seek(0)
f.write('\n'.join([str(x) for x in values]) + '\n')
@contract
def append_vm_data_remotely(db, data):
""" Submit CPU MHz values to the central database.
:param db: The database object.
:type db: Database
:param data: A map of VM UUIDs onto the corresponing CPU MHz values.
:type data: dict(str : int)
"""
db.insert_vm_cpu_mhz(data)
@contract
def append_host_data_locally(path, cpu_mhz, data_length):
""" Write a CPU MHz value for the host.
:param path: A path to write the data to.
:type path: str
:param cpu_mhz: A CPU MHz value.
:type cpu_mhz: int,>=0
:param data_length: The maximum allowed length of the data.
:type data_length: int
"""
if not os.access(path, os.F_OK):
with open(path, 'w') as f:
f.write(str(cpu_mhz) + '\n')
else:
with open(path, 'r+') as f:
values = deque(f.read().strip().splitlines(), data_length)
values.append(cpu_mhz)
f.truncate(0)
f.seek(0)
f.write('\n'.join([str(x) for x in values]) + '\n')
@contract
def append_host_data_remotely(db, hostname, host_cpu_mhz):
""" Submit a host CPU MHz value to the central database.
:param db: The database object.
:type db: Database
:param hostname: The host name.
:type hostname: str
:param host_cpu_mhz: An average host CPU utilization in MHz.
:type host_cpu_mhz: int,>=0
"""
db.insert_host_cpu_mhz(hostname, host_cpu_mhz)
@contract
def get_cpu_mhz(vir_connection, physical_core_mhz, previous_cpu_time,
previous_time, current_time, current_vms,
previous_cpu_mhz, added_vm_data):
""" Get the average CPU utilization in MHz for a set of VMs.
:param vir_connection: A libvirt connection object.
:type vir_connection: virConnect
:param physical_core_mhz: The core frequency of the physical CPU in MHz.
:type physical_core_mhz: int
:param previous_cpu_time: A dict of previous CPU times for the VMs.
:type previous_cpu_time: dict(str : number)
:param previous_time: The previous timestamp.
:type previous_time: float
:param current_time: The previous timestamp.
:type current_time: float
:param current_vms: A list of VM UUIDs.
:type current_vms: list(str)
:param previous_cpu_mhz: A dict of VM UUIDs and previous CPU MHz.
:type previous_cpu_mhz: dict(str : int)
:param added_vm_data: A dict of VM UUIDs and the corresponding data.
:type added_vm_data: dict(str : list(int))
:return: The updated CPU times and average CPU utilization in MHz.
:rtype: tuple(dict(str : number), dict(str : int))
"""
previous_vms = previous_cpu_time.keys()
added_vms = get_added_vms(previous_vms, current_vms)
removed_vms = get_removed_vms(previous_vms, current_vms)
cpu_mhz = {}
for uuid in removed_vms:
del previous_cpu_time[uuid]
for uuid, cpu_time in previous_cpu_time.items():
current_cpu_time = get_cpu_time(vir_connection, uuid)
if current_cpu_time < cpu_time:
if log.isEnabledFor(logging.DEBUG):
log.debug('VM %s: current_cpu_time < cpu_time: ' +
'previous CPU time %d, ' +
'current CPU time %d',
uuid, cpu_time, current_cpu_time)
log.debug('VM %s: using previous CPU MHz %d',
uuid, previous_cpu_mhz[uuid])
cpu_mhz[uuid] = previous_cpu_mhz[uuid]
else:
if log.isEnabledFor(logging.DEBUG):
log.debug('VM %s: previous CPU time %d, ' +
'current CPU time %d, ' +
'previous time %.10f, ' +
'current time %.10f',
uuid, cpu_time, current_cpu_time,
previous_time, current_time)
cpu_mhz[uuid] = calculate_cpu_mhz(physical_core_mhz, previous_time,
current_time, cpu_time,
current_cpu_time)
previous_cpu_time[uuid] = current_cpu_time
if log.isEnabledFor(logging.DEBUG):
log.debug('VM %s: CPU MHz %d', uuid, cpu_mhz[uuid])
for uuid in added_vms:
if added_vm_data[uuid]:
cpu_mhz[uuid] = added_vm_data[uuid][-1]
previous_cpu_time[uuid] = get_cpu_time(vir_connection, uuid)
return previous_cpu_time, cpu_mhz
@contract
def get_cpu_time(vir_connection, uuid):
""" Get the CPU time of a VM specified by the UUID using libvirt.
:param vir_connection: A libvirt connection object.
:type vir_connection: virConnect
:param uuid: The UUID of a VM.
:type uuid: str[36]
:return: The CPU time of the VM.
:rtype: number,>=0
"""
try:
domain = vir_connection.lookupByUUIDString(uuid)
return domain.getCPUStats(True, 0)[0]['cpu_time']
except libvirt.libvirtError:
return 0.
@contract
def calculate_cpu_mhz(cpu_mhz, previous_time, current_time,
previous_cpu_time, current_cpu_time):
""" Calculate the average CPU utilization in MHz for a period of time.
:param cpu_mhz: The frequency of a core of the physical CPU in MHz.
:type cpu_mhz: int
:param previous_time: The previous time.
:type previous_time: float
:param current_time: The current time.
:type current_time: float
:param previous_cpu_time: The previous CPU time of the domain.
:type previous_cpu_time: number
:param current_cpu_time: The current CPU time of the domain.
:type current_cpu_time: number
:return: The average CPU utilization in MHz.
:rtype: int,>=0
"""
return int(cpu_mhz * float(current_cpu_time - previous_cpu_time) / \
((current_time - previous_time) * 1000000000))
@contract
def get_host_cpu_mhz(cpu_mhz, previous_cpu_time_total, previous_cpu_time_busy):
""" Get the average CPU utilization in MHz for a set of VMs.
:param cpu_mhz: The total frequency of the physical CPU in MHz.
:type cpu_mhz: int
:param previous_cpu_time_total: The previous total CPU time.
:type previous_cpu_time_total: float
:param previous_cpu_time_busy: The previous busy CPU time.
:type previous_cpu_time_busy: float
:return: The current total and busy CPU time, and CPU utilization in MHz.
:rtype: tuple(float, float, int)
"""
cpu_time_total, cpu_time_busy = get_host_cpu_time()
cpu_usage = int(cpu_mhz * (cpu_time_busy - previous_cpu_time_busy) / \
(cpu_time_total - previous_cpu_time_total))
if cpu_usage < 0:
raise ValueError('The host CPU usage in MHz must be >=0, but it is: ' + str(cpu_usage) +
'; cpu_mhz=' + str(cpu_mhz) +
'; previous_cpu_time_total=' + str(previous_cpu_time_total) +
'; cpu_time_total=' + str(cpu_time_total) +
'; previous_cpu_time_busy=' + str(previous_cpu_time_busy) +
'; cpu_time_busy=' + str(cpu_time_busy))
return cpu_time_total, \
cpu_time_busy, \
cpu_usage
@contract()
def get_host_cpu_time():
""" Get the total and busy CPU time of the host.
:return: A tuple of the total and busy CPU time.
:rtype: tuple(float, float)
"""
with open('/proc/stat', 'r') as f:
values = [float(x) for x in f.readline().split()[1:8]]
return sum(values), sum(values[0:3])
@contract()
def get_host_characteristics(vir_connection):
""" Get the total CPU MHz and RAM of the host.
:param vir_connection: A libvirt connection object.
:type vir_connection: virConnect
:return: A tuple of the total CPU MHz and RAM of the host.
:rtype: tuple(int, int)
"""
info = vir_connection.getInfo()
return info[2] * info[3], info[1]
@contract()
def log_host_overload(db, overload_threshold, hostname, previous_overload,
host_total_mhz, host_utilization_mhz):
""" Log to the DB whether the host is overloaded.
:param db: The database object.
:type db: Database
:param overload_threshold: The host overload threshold.
:type overload_threshold: float
:param hostname: The host name.
:type hostname: str
:param previous_overload: Whether the host has been overloaded.
:type previous_overload: int
:param host_total_mhz: The total frequency of the CPU in MHz.
:type host_total_mhz: int
:param host_utilization_mhz: The total CPU utilization in MHz.
:type host_utilization_mhz: int
:return: Whether the host is overloaded.
:rtype: int
"""
overload = overload_threshold * host_total_mhz < host_utilization_mhz
overload_int = int(overload)
if previous_overload != -1 and previous_overload != overload_int \
or previous_overload == -1:
db.insert_host_overload(hostname, overload)
if log.isEnabledFor(logging.DEBUG):
log.debug('Overload state logged: %s', str(overload))
return overload_int
|
|
"""Landlab component that simulates overland flow.
This component simulates overland flow using the 2-D numerical model of
shallow-water flow over topography using the de Almeida et al., 2012
algorithm for storage-cell inundation modeling.
.. codeauthor:: Jordan Adams
Examples
--------
>>> import numpy as np
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow import OverlandFlow
Create a grid on which to calculate overland flow.
>>> grid = RasterModelGrid((4, 5))
The grid will need some data to provide the overland flow component. To
check the names of the fields that provide input to the overland flow
component use the *input_var_names* class property.
>>> OverlandFlow.input_var_names
('surface_water__depth', 'topographic__elevation')
Create fields of data for each of these input variables.
>>> grid.at_node['topographic__elevation'] = np.array([
... 0., 0., 0., 0., 0.,
... 1., 1., 1., 1., 1.,
... 2., 2., 2., 2., 2.,
... 3., 3., 3., 3., 3.])
>>> grid.at_node['surface_water__depth'] = np.array([
... 0. , 0. , 0. , 0. , 0. ,
... 0. , 0. , 0. , 0. , 0. ,
... 0. , 0. , 0. , 0. , 0. ,
... 0.1, 0.1, 0.1, 0.1, 0.1])
Instantiate the `OverlandFlow` component to work on this grid, and run it.
>>> of = OverlandFlow(grid, steep_slopes=True)
>>> of.run_one_step()
After calculating the overland flow, new fields have been added to the
grid. Use the *output_var_names* property to see the names of the fields that
have been changed.
>>> of.output_var_names
('surface_water__depth', 'surface_water__discharge', 'water_surface__gradient')
The `surface_water__depth` field is defined at nodes.
>>> of.var_loc('surface_water__depth')
'node'
>>> grid.at_node['surface_water__depth'] # doctest: +NORMALIZE_WHITESPACE
array([ 1.00000000e-05, 1.00000000e-05, 1.00000000e-05,
1.00000000e-05, 1.00000000e-05, 1.00000000e-05,
1.00000000e-05, 1.00000000e-05, 1.00000000e-05,
1.00000000e-05, 1.00000000e-05, 2.00100000e-02,
2.00100000e-02, 2.00100000e-02, 1.00000000e-05,
1.00010000e-01, 1.00010000e-01, 1.00010000e-01,
1.00010000e-01, 1.00010000e-01])
The `surface_water__discharge` field is defined at links. Because our initial
topography was a dipping plane, there is no water discharge in the horizontal
direction, only toward the bottom of the grid.
>>> of.var_loc('surface_water__discharge')
'link'
>>> q = grid.at_link['surface_water__discharge'] # doctest: +NORMALIZE_WHITESPACE
>>> np.all(q[grid.horizontal_links] == 0.)
True
>>> np.all(q[grid.vertical_links] <= 0.)
True
The *water_surface__gradient* is also defined at links.
>>> of.var_loc('water_surface__gradient')
'link'
>>> grid.at_link['water_surface__gradient'] # doctest: +NORMALIZE_WHITESPACE
array([ 0. , 0. , 0. , 0. ,
0. , 1. , 1. , 1. , 0. ,
0. , 0. , 0. , 0. ,
0. , 1. , 1. , 1. , 0. ,
0. , 0. , 0. , 0. ,
0. , 1.1, 1.1, 1.1, 0. ,
0. , 0. , 0. , 0. ])
"""
from landlab import Component, FieldError
import numpy as np
from landlab.grid.structured_quad import links
from landlab.utils.decorators import use_file_name_or_kwds
_SEVEN_OVER_THREE = 7.0 / 3.0
class OverlandFlow(Component):
"""Simulate overland flow using de Almeida approximations.
Landlab component that simulates overland flow using the de Almeida
et al., 2012 approximations of the 1D shallow water equations to be used
for 2D flood inundation modeling.
This component calculates discharge, depth and shear stress after some
precipitation event across any raster grid. Default input file is named
"overland_flow_input.txt' and is contained in the
landlab.components.overland_flow folder.
Parameters
----------
grid : RasterModelGrid
A landlab grid.
h_init : float, optional
Thicknes of initial thin layer of water to prevent divide by zero
errors (m).
alpha : float, optional
Time step coeffcient, described in Bates et al., 2010 and
de Almeida et al., 2012.
mannings_n : float, optional
Manning's roughness coefficient.
g : float, optional
Acceleration due to gravity (m/s^2).
theta : float, optional
Weighting factor from de Almeida et al., 2012.
rainfall_intensity : float, optional
Rainfall intensity.
The primary method of this class is :func:`run_one_step`.
Construction::
OverlandFlow(grid, default_fixed_links=False, h_init=0.00001,
alpha=0.7, mannings_n=0.03, g=9.81, theta=0.8,
rainfall_intensity=0.0, steep_slopes=False, **kwds)
"""
_name = 'OverlandFlow'
_input_var_names = (
'surface_water__depth',
'topographic__elevation',
)
_output_var_names = (
'surface_water__depth',
'surface_water__discharge',
'water_surface__gradient',
)
_var_units = {
'surface_water__depth': 'm',
'surface_water__discharge': 'm3/s',
'topographic__elevation': 'm',
'water_surface__gradient': '-',
}
_var_mapping = {
'surface_water__depth': 'node',
'topographic__elevtation': 'node',
'surface_water__discharge': 'link',
'water_surface__gradient': 'link',
}
_var_doc = {
'surface_water__depth': 'The depth of water at each node.',
'topographic__elevtation': 'The land surface elevation.',
'surface_water__discharge': 'The discharge of water on active links.',
'water_surface__gradient': 'Downstream gradient of the water surface.',
}
@use_file_name_or_kwds
def __init__(self, grid, default_fixed_links=False, h_init=0.00001,
alpha=0.7, mannings_n=0.03, g=9.81, theta=0.8,
rainfall_intensity=0.0, steep_slopes=False, **kwds):
"""Create a overland flow component.
Parameters
----------
grid : RasterModelGrid
A landlab grid.
h_init : float, optional
Thicknes of initial thin layer of water to prevent divide by zero
errors (m).
alpha : float, optional
Time step coeffcient, described in Bates et al., 2010 and
de Almeida et al., 2012.
mannings_n : float, optional
Manning's roughness coefficient.
g : float, optional
Acceleration due to gravity (m/s^2).
theta : float, optional
Weighting factor from de Almeida et al., 2012.
rainfall_intensity : float, optional
Rainfall intensity.
"""
super(OverlandFlow, self).__init__(grid, **kwds)
# First we copy our grid
self._grid = grid
self.h_init = h_init
self.alpha = alpha
if type(mannings_n) is str:
self.mannings_n = self.grid.at_link[mannings_n]
else:
self.mannings_n = mannings_n
self.g = g
self.theta = theta
self.rainfall_intensity = rainfall_intensity
self.steep_slopes = steep_slopes
# Now setting up fields at the links...
# For water discharge
try:
self.q = grid.add_zeros('surface_water__discharge', at='link',
units=self._var_units['surface_water__discharge'])
except FieldError:
# Field was already set; still, fill it with zeros
self.q = grid.at_link['surface_water__discharge']
self.q.fill(0.)
# For water depths calculated at links
try:
self.h_links = grid.add_zeros('surface_water__depth', at='link',
units=self._var_units[
'surface_water__depth'])
except FieldError:
self.h_links = grid.at_link['surface_water__depth']
self.h_links.fill(0.)
self.h_links += self.h_init
try:
self.h = grid.add_zeros('surface_water__depth', at='node',
units=self._var_units['surface_water__depth'])
except FieldError:
# Field was already set
self.h = grid.at_node['surface_water__depth']
self.h += self.h_init
# For water surface slopes at links
try:
self.water_surface_slope = grid.add_zeros('water_surface__gradient', at='link')
except FieldError:
self.water_surface_slope = grid.at_link['water_surface__gradient']
self.water_surface_slope.fill(0.)
# Start time of simulation is at 1.0 s
self.elapsed_time = 1.0
self.dt = None
self.dhdt = grid.zeros()
# When we instantiate the class we recognize that neighbors have not
# been found. After the user either calls self.set_up_neighbor_array
# or self.overland_flow this will be set to True. This is done so
# that every iteration of self.overland_flow does NOT need to
# reinitalize the neighbors and saves computation time.
self.neighbor_flag = False
# When looking for neighbors, we automatically ignore inactive links
# by default. However, what about when we want to look at fixed links
# too? By default, we ignore these, but if they are important to your
# model and will be updated in your driver loop, they can be used by
# setting the flag in the initialization of the class to 'True'
self.default_fixed_links = default_fixed_links
# Assiging a class variable to the elevation field.
self.z = self._grid.at_node['topographic__elevation']
def calc_time_step(self):
"""Calculate time step.
Adaptive time stepper from Bates et al., 2010 and de Almeida
et al., 2012
"""
self.dt = (self.alpha * self._grid.dx / np.sqrt(self.g * np.amax(
self._grid.at_node['surface_water__depth'])))
return self.dt
def set_up_neighbor_arrays(self):
"""Create and initialize link neighbor arrays.
Set up arrays of neighboring horizontal and vertical links that are
needed for the de Almeida solution.
"""
# First we identify all active links
self.active_ids = links.active_link_ids(self.grid.shape,
self.grid.status_at_node)
self.active_links_at_open_bdy =self.grid._active_links_at_node(
self.grid.open_boundary_nodes).transpose()
self.active_links_at_open_bdy = self.active_links_at_open_bdy[
np.where(self.active_links_at_open_bdy > -1)]
# And then find all horizontal link IDs (Active and Inactive)
self.horizontal_ids = links.horizontal_link_ids(self.grid.shape)
# And make the array 1-D
self.horizontal_ids = self.horizontal_ids.flatten()
# Find all horizontal active link ids
self.horizontal_active_link_ids = links.horizontal_active_link_ids(
self.grid.shape, self.active_ids)
# Now we repeat this process for the vertical links.
# First find the vertical link ids and reshape it into a 1-D array
self.vertical_ids = links.vertical_link_ids(self.grid.shape).flatten()
# Find the *active* verical link ids
self.vertical_active_link_ids = links.vertical_active_link_ids(
self.grid.shape, self.active_ids)
if self.default_fixed_links is True:
fixed_link_ids = links.fixed_link_ids(
self.grid.shape, self.grid.status_at_node)
fixed_horizontal_links = links.horizontal_fixed_link_ids(
self.grid.shape, fixed_link_ids)
fixed_vertical_links = links.vertical_fixed_link_ids(
self.grid.shape, fixed_link_ids)
self.horizontal_active_link_ids = np.maximum(
self.horizontal_active_link_ids, fixed_horizontal_links)
self.vertical_active_link_ids = np.maximum(
self.vertical_active_link_ids, fixed_vertical_links)
self.active_neighbors = find_active_neighbors_for_fixed_links(
self.grid)
self.vert_bdy_ids = self.active_links_at_open_bdy[
links.is_vertical_link(self.grid.shape,
self.active_links_at_open_bdy)]
self.vert_bdy_ids = links.nth_vertical_link(self.grid.shape,
self.vert_bdy_ids)
self.horiz_bdy_ids = self.active_links_at_open_bdy[
links.is_horizontal_link(self.grid.shape,
self.active_links_at_open_bdy)]
self.horiz_bdy_ids = links.nth_horizontal_link(self.grid.shape,
self.horiz_bdy_ids)
# Using the active vertical link ids we can find the north
# and south vertical neighbors
self.north_neighbors = links.vertical_north_link_neighbor(
self.grid.shape, self.vertical_active_link_ids)
self.south_neighbors = links.vertical_south_link_neighbor(
self.grid.shape, self.vertical_active_link_ids)
# Using the horizontal active link ids, we can find the west and
# east neighbors
self.west_neighbors = links.horizontal_west_link_neighbor(
self.grid.shape, self.horizontal_active_link_ids)
self.east_neighbors = links.horizontal_east_link_neighbor(
self.grid.shape, self.horizontal_active_link_ids)
## replace bdy condition links
(ids, ) = np.where(self.west_neighbors[self.horiz_bdy_ids] == -1)
ids = self.horiz_bdy_ids[ids]
self.west_neighbors[ids] = self.horizontal_active_link_ids[ids]
(ids, ) = np.where(self.east_neighbors[self.horiz_bdy_ids] == -1)
ids = self.horiz_bdy_ids[ids]
self.east_neighbors[ids] = self.horizontal_active_link_ids[ids]
(ids, ) = np.where(self.north_neighbors[self.vert_bdy_ids] == -1)
ids = self.vert_bdy_ids[ids]
self.north_neighbors[ids] = self.vertical_active_link_ids[ids]
(ids, ) = np.where(self.south_neighbors[self.vert_bdy_ids] == -1)
ids = self.vert_bdy_ids[ids]
self.south_neighbors[ids] = self.vertical_active_link_ids[ids]
# Set up arrays for discharge in the horizontal & vertical directions.
self.q_horizontal = np.zeros(links.number_of_horizontal_links(
self.grid.shape))
self.q_vertical = np.zeros(links.number_of_vertical_links(
self.grid.shape))
# Once the neighbor arrays are set up, we change the flag to True!
self.neighbor_flag = True
def overland_flow(self, dt=None):
"""Generate overland flow across a grid.
For one time step, this generates 'overland flow' across a given grid
by calculating discharge at each node.
Using the depth slope product, shear stress is calculated at every
node.
Outputs water depth, discharge and shear stress values through time at
every point in the input grid.
"""
# DH adds a loop to enable an imposed tstep while maintaining stability
local_elapsed_time = 0.
if dt is None:
dt = np.inf # to allow the loop to begin
while local_elapsed_time < dt:
dt_local = self.calc_time_step()
# Can really get into trouble if nothing happens but we still run:
if not dt_local < np.inf:
break
if local_elapsed_time + dt_local > dt:
dt_local = dt - local_elapsed_time
self.dt = dt_local
# First, we check and see if the neighbor arrays have been
# initialized
if self.neighbor_flag is False:
self.set_up_neighbor_arrays()
# In case another component has added data to the fields, we just
# reset our water depths, topographic elevations and water
# discharge variables to the fields.
self.h = self.grid['node']['surface_water__depth']
self.z = self.grid['node']['topographic__elevation']
self.q = self.grid['link']['surface_water__discharge']
self.h_links = self.grid['link']['surface_water__depth']
# Here we identify the core nodes and active links for later use.
self.core_nodes = self.grid.core_nodes
self.active_links = self.grid.active_links
# Per Bates et al., 2010, this solution needs to find difference
# between the highest water surface in the two cells and the
# highest bed elevation
zmax = self._grid.map_max_of_link_nodes_to_link(self.z)
w = self.h + self.z
wmax = self._grid.map_max_of_link_nodes_to_link(w)
hflow = wmax[self._grid.active_links] - zmax[
self._grid.active_links]
# Insert this water depth into an array of water depths at the
# links.
self.h_links[self.active_links] = hflow
# Now we calculate the slope of the water surface elevation at
# active links
self.water_surface__gradient = (
self.grid.calc_grad_at_link(w)[self.grid.active_links])
# And insert these values into an array of all links
self.water_surface_slope[self.active_links
] = self.water_surface__gradient
# If the user chooses to set boundary links to the neighbor value,
# we set the discharge array to have the boundary links set to
# their neighbor value
if self.default_fixed_links is True:
self.q[self.grid.fixed_links] = self.q[self.active_neighbors]
# Now we can calculate discharge. To handle links with neighbors
# that do not exist, we will do a fancy indexing trick. Non-
# existent links or inactive links have an index of '-1', which in
# Python, looks to the end of a list or array. To accommodate these
# '-1' indices, we will simply insert an value of 0.0 discharge (in
# units of L^2/T) to the end of the discharge array.
self.q = np.append(self.q, [0])
horiz = self.horizontal_ids
vert = self.vertical_ids
# Now we calculate discharge in the horizontal direction
try:
self.q[horiz] = ((
self.theta * self.q[horiz] + (1. - self.theta) / 2. *
(self.q[self.west_neighbors] + self.q[self.east_neighbors]) -
self.g * self.h_links[horiz] * self.dt *
self.water_surface_slope[horiz]) / (1 + self.g * self.dt
* self.mannings_n ** 2. * abs(self.q[horiz]) /
self.h_links[horiz] ** _SEVEN_OVER_THREE))
# ... and in the vertical direction
self.q[vert] = ((
self.theta * self.q[vert] + (1 - self.theta) / 2. *
(self.q[self.north_neighbors] + self.q[self.south_neighbors]) -
self.g * self.h_links[vert] * self.dt *
self.water_surface_slope[vert]) / (1 + self.g * self.dt *
self.mannings_n ** 2. * abs(self.q[vert]) /
self.h_links[vert] ** _SEVEN_OVER_THREE))
except ValueError:
self.mannings_n = self.grid['link']['mannings_n']
# if manning's n in a field
# calc discharge in horizontal
self.q[horiz] = ((
self.theta * self.q[horiz] + (1. - self.theta) / 2. *
(self.q[self.west_neighbors] + self.q[self.east_neighbors]) -
self.g * self.h_links[horiz] * self.dt *
self.water_surface_slope[horiz]) / (1 + self.g * self.dt
* self.mannings_n[horiz]** 2. * abs(self.q[horiz]) /
self.h_links[horiz] ** _SEVEN_OVER_THREE))
# ... and in the vertical direction
self.q[vert] = ((
self.theta * self.q[vert] + (1 - self.theta) / 2. *
(self.q[self.north_neighbors] + self.q[self.south_neighbors]) -
self.g * self.h_links[vert] * self.dt *
self.water_surface_slope[self.vertical_ids]) / (1 + self.g
* self.dt * self.mannings_n[vert] ** 2. * abs(self.q[vert])
/ self.h_links[vert] ** _SEVEN_OVER_THREE))
# Now to return the array to its original length (length of number
# of all links), we delete the extra 0.0 value from the end of the
# array.
self.q = np.delete(self.q, len(self.q) - 1)
# Updating the discharge array to have the boundary links set to
# their neighbor
if self.default_fixed_links is True:
self.q[self.grid.fixed_links] = self.q[self.active_neighbors]
if self.steep_slopes is True:
# To prevent water from draining too fast for our time steps...
# Our Froude number.
Fr = 1.0
# Our two limiting factors, the froude number and courant
# number.
# Looking a calculated q to be compared to our Fr number.
calculated_q = (self.q / self.h_links) / np.sqrt(self.g *
self.h_links)
# Looking at our calculated q and comparing it to Courant no.,
q_courant = self.q * self.dt / self.grid.dx
# Water depth split equally between four links..
water_div_4 = self.h_links / 4.
# IDs where water discharge is positive...
(positive_q, ) = np.where(self.q > 0)
# ... and negative.
(negative_q, ) = np.where(self.q < 0)
# Where does our calculated q exceed the Froude number? If q
# does exceed the Froude number, we are getting supercritical
# flow and discharge needs to be reduced to maintain stability.
(Froude_logical, ) = np.where((calculated_q) > Fr)
(Froude_abs_logical, ) = np.where(abs(calculated_q) > Fr)
# Where does our calculated q exceed the Courant number and
# water depth divided amongst 4 links? If the calculated q
# exceeds the Courant number and is greater than the water
# depth divided by 4 links, we reduce discharge to maintain
# stability.
(water_logical, ) = np.where(q_courant > water_div_4)
(water_abs_logical, ) = np.where(abs(q_courant) > water_div_4)
# Where are these conditions met? For positive and negative q,
# there are specific rules to reduce q. This step finds where
# the discharge values are positive or negative and where
# discharge exceeds the Froude or Courant number.
self.if_statement_1 = np.intersect1d(positive_q,
Froude_logical)
self.if_statement_2 = np.intersect1d(negative_q,
Froude_abs_logical)
self.if_statement_3 = np.intersect1d(positive_q, water_logical)
self.if_statement_4 = np.intersect1d(negative_q,
water_abs_logical)
# Rules 1 and 2 reduce discharge by the Froude number.
self.q[self.if_statement_1] = (
self.h_links[self.if_statement_1] *
(np.sqrt(self.g * self.h_links[self.if_statement_1]) * Fr))
self.q[self.if_statement_2] = (
0. - (self.h_links[self.if_statement_2] *
np.sqrt(self.g * self.h_links[self.if_statement_2]) *
Fr))
# Rules 3 and 4 reduce discharge by the Courant number.
self.q[self.if_statement_3] = (((
self.h_links[self.if_statement_3] * self.grid.dx) / 5.) /
self.dt)
self.q[self.if_statement_4] = (0. - (self.h_links[
self.if_statement_4] * self.grid.dx / 5.) / self.dt)
# Once stability has been restored, we calculate the change in
# water depths on all core nodes by finding the difference between
# inputs (rainfall) and the inputs/outputs (flux divergence of
# discharge)
self.dhdt = (self.rainfall_intensity -
self.grid.calc_flux_div_at_node(self.q))
# Updating our water depths...
self.h[self.core_nodes] = (self.h[self.core_nodes] +
self.dhdt[self.core_nodes] * self.dt)
# To prevent divide by zero errors, a minimum threshold water depth
# must be maintained. To reduce mass imbalances, this is set to
# find locations where water depth is smaller than h_init (default
# is 0.001) and the new value is self.h_init * 10^-3. This was set
# as it showed the smallest amount of mass creation in the grid
# during testing.
if self.steep_slopes is True:
self.h[self.h < self.h_init] = self.h_init * 10.0 ** -3
# And reset our field values with the newest water depth and
# discharge.
self.grid.at_node['surface_water__depth'] = self.h
self.grid.at_link['surface_water__discharge'] = self.q
#
#
# self.helper_q = self.grid.map_upwind_node_link_max_to_node(self.q)
# self.helper_s = self.grid.map_upwind_node_link_max_to_node(
# self.water_surface_slope)
#
# self.helper_q = self.grid.map_max_of_link_nodes_to_link(self.helper_q)
# self.helper_s = self.grid.map_max_of_link_nodes_to_link(self.helper_s)
#
# self.grid['link']['surface_water__discharge'][
# self.active_links_at_open_bdy] = self.helper_q[
# self.active_links_at_open_bdy]
#
# self.grid['link']['water_surface__gradient'][
# self.active_links_at_open_bdy] = self.helper_s[
# self.active_links_at_open_bdy]
## Update nodes near boundary locations - nodes adjacent to
## boundaries may have discharge and water surface slopes
## artifically reduced due to boundary effects. This step removes
## those errors.
if dt is np.inf:
break
local_elapsed_time += self.dt
def run_one_step(self, dt=None):
"""Generate overland flow across a grid.
For one time step, this generates 'overland flow' across a given grid
by calculating discharge at each node.
Using the depth slope product, shear stress is calculated at every
node.
Outputs water depth, discharge and shear stress values through time at
every point in the input grid.
"""
self.overland_flow(dt=dt)
def discharge_mapper(self, input_discharge, convert_to_volume=False):
"""
Maps discharge value from links onto nodes.
This method takes the discharge values on links and determines the
links that are flowing INTO a given node. The fluxes moving INTO a
given node are summed.
This method ignores all flow moving OUT of a given node.
This takes values from the OverlandFlow component (by default) in
units of [L^2/T]. If the convert_to_cms flag is raised as True, this
method converts discharge to units [L^3/T] - as of Aug 2016, only
operates for square RasterModelGrid instances.
The output array is of length grid.number_of_nodes and can be used
with the Landlab imshow_grid plotter.
Returns a numpy array (discharge_vals)
"""
discharge_vals = np.zeros(self.grid.number_of_links)
discharge_vals[:] = input_discharge[:]
if convert_to_volume == True:
discharge_vals *= self.grid.dx
else:
pass
discharge_vals = (discharge_vals[self.grid.links_at_node] *
self.grid.link_dirs_at_node)
discharge_vals = discharge_vals.flatten()
discharge_vals[np.where(discharge_vals < 0)] = 0.0
discharge_vals = discharge_vals.reshape(self.grid.number_of_nodes, 4)
discharge_vals = discharge_vals.sum(axis=1.0)
return discharge_vals
def find_active_neighbors_for_fixed_links(grid):
"""Find active link neighbors for every fixed link.
Specialized link ID function used to ID the active links that neighbor
fixed links in the vertical and horizontal directions.
If the user wants to assign fixed gradients or values to the fixed
links dynamically, this function identifies the nearest active_link
neighbor.
Each fixed link can either have 0 or 1 active neighbor. This function
finds if and where that active neighbor is and stores those IDs in
an array.
Parameters
----------
grid : RasterModelGrid
A landlab grid.
Returns
-------
ndarray of int, shape `(*, )`
Flat array of links.
Examples
--------
>>> from landlab.grid.structured_quad.links import neighbors_at_link
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow.generate_overland_flow_deAlmeida import find_active_neighbors_for_fixed_links
>>> from landlab import RasterModelGrid, FIXED_GRADIENT_BOUNDARY
>>> grid = RasterModelGrid((4, 5))
>>> grid.status_at_node[:5] = FIXED_GRADIENT_BOUNDARY
>>> grid.status_at_node[::5] = FIXED_GRADIENT_BOUNDARY
>>> grid.status_at_node # doctest: +NORMALIZE_WHITESPACE
array([2, 2, 2, 2, 2,
2, 0, 0, 0, 1,
2, 0, 0, 0, 1,
2, 1, 1, 1, 1], dtype=int8)
>>> grid.fixed_links
array([ 5, 6, 7, 9, 18])
>>> grid.active_links
array([10, 11, 12, 14, 15, 16, 19, 20, 21, 23, 24, 25])
>>> find_active_neighbors_for_fixed_links(grid)
array([14, 15, 16, 10, 19])
>>> rmg = RasterModelGrid((4, 7))
>>> rmg.at_node['topographic__elevation'] = rmg.zeros(at='node')
>>> rmg.at_link['topographic__slope'] = rmg.zeros(at='link')
>>> rmg.set_fixed_link_boundaries_at_grid_edges(True, True, True, True)
>>> find_active_neighbors_for_fixed_links(rmg)
array([20, 21, 22, 23, 24, 14, 17, 27, 30, 20, 21, 22, 23, 24])
"""
neighbors = links.neighbors_at_link(grid.shape, grid.fixed_links).flat
return neighbors[np.in1d(neighbors, grid.active_links)]
|
|
#!/usr/bin/env python
import argparse
import common
import fileinput
import multiprocessing
import os
import os.path
import re
import subprocess
import sys
import traceback
EXCLUDED_PREFIXES = ("./generated/", "./thirdparty/", "./build", "./.git/",
"./bazel-", "./bazel/external", "./.cache",
"./tools/testdata/check_format/")
SUFFIXES = (".cc", ".h", "BUILD", ".md", ".rst", ".proto")
DOCS_SUFFIX = (".md", ".rst")
PROTO_SUFFIX = (".proto")
# Files in these paths can make reference to protobuf stuff directly
GOOGLE_PROTOBUF_WHITELIST = ('ci/prebuilt', 'source/common/protobuf', 'api/test')
CLANG_FORMAT_PATH = os.getenv("CLANG_FORMAT", "clang-format-5.0")
BUILDIFIER_PATH = os.getenv("BUILDIFIER_BIN", "$GOPATH/bin/buildifier")
ENVOY_BUILD_FIXER_PATH = os.path.join(
os.path.dirname(os.path.abspath(sys.argv[0])), "envoy_build_fixer.py")
HEADER_ORDER_PATH = os.path.join(
os.path.dirname(os.path.abspath(sys.argv[0])), "header_order.py")
SUBDIR_SET = set(common.includeDirOrder())
INCLUDE_ANGLE = "#include <"
INCLUDE_ANGLE_LEN = len(INCLUDE_ANGLE)
PROTOBUF_TYPE_ERRORS = {
# Well-known types should be referenced from the ProtobufWkt namespace.
"Protobuf::Any": "ProtobufWkt::Any",
"Protobuf::Empty": "ProtobufWkt::Empty",
"Protobuf::ListValue": "ProtobufWkt:ListValue",
"Protobuf::NULL_VALUE": "ProtobufWkt::NULL_VALUE",
"Protobuf::StringValue": "ProtobufWkt::StringValue",
"Protobuf::Struct": "ProtobufWkt::Struct",
"Protobuf::Value": "ProtobufWkt::Value",
# Maps including strings should use the protobuf string types.
"Protobuf::MapPair<std::string": "Protobuf::MapPair<Envoy::ProtobufTypes::String",
# Other common mis-namespacing of protobuf types.
"ProtobufWkt::Map": "Protobuf::Map",
"ProtobufWkt::MapPair": "Protobuf::MapPair",
"ProtobufUtil::MessageDifferencer": "Protobuf::util::MessageDifferencer"
}
def checkNamespace(file_path):
with open(file_path) as f:
text = f.read()
if not re.search('^\s*namespace\s+Envoy\s*{', text, re.MULTILINE) and \
not 'NOLINT(namespace-envoy)' in text:
return ["Unable to find Envoy namespace or NOLINT(namespace-envoy) for file: %s" % file_path]
return []
# To avoid breaking the Lyft import, we just check for path inclusion here.
def whitelistedForProtobufDeps(file_path):
return any(path_segment in file_path for path_segment in GOOGLE_PROTOBUF_WHITELIST)
def findSubstringAndReturnError(pattern, file_path, error_message):
with open(file_path) as f:
text = f.read()
if pattern in text:
error_messages = [file_path + ': ' + error_message]
for i, line in enumerate(text.splitlines()):
if pattern in line:
error_messages.append(" %s:%s" % (file_path, i + 1))
return error_messages
return []
def checkProtobufExternalDepsBuild(file_path):
if whitelistedForProtobufDeps(file_path):
return []
message = ("unexpected direct external dependency on protobuf, use "
"//source/common/protobuf instead.")
return findSubstringAndReturnError('"protobuf"', file_path, message)
def checkProtobufExternalDeps(file_path):
if whitelistedForProtobufDeps(file_path):
return []
with open(file_path) as f:
text = f.read()
if '"google/protobuf' in text or "google::protobuf" in text:
return [
"%s has unexpected direct dependency on google.protobuf, use "
"the definitions in common/protobuf/protobuf.h instead." % file_path]
return []
def isApiFile(file_path):
return file_path.startswith(args.api_prefix)
def isBuildFile(file_path):
basename = os.path.basename(file_path)
if basename in {"BUILD", "BUILD.bazel"} or basename.endswith(".BUILD"):
return True
return False
def hasInvalidAngleBracketDirectory(line):
if not line.startswith(INCLUDE_ANGLE):
return False
path = line[INCLUDE_ANGLE_LEN:]
slash = path.find("/")
if slash == -1:
return False
subdir = path[0:slash]
return subdir in SUBDIR_SET
def formatLineError(path, zero_based_line_number, message):
return "%s:%d: %s" % (path, zero_based_line_number + 1, message)
def checkFileContents(file_path):
error_messages = []
for line_number, line in enumerate(fileinput.input(file_path)):
if line.find(". ") != -1:
error_messages.append(formatLineError(file_path, line_number, "over-enthusiastic spaces"))
if hasInvalidAngleBracketDirectory(line):
error_messages.append(formatLineError(file_path, line_number,
"envoy includes should not have angle brackets"))
for invalid_construct, valid_construct in PROTOBUF_TYPE_ERRORS.items():
if invalid_construct in line:
error_messages.append(
formatLineError(file_path, line_number,
"incorrect protobuf type reference %s; "
"should be %s" % (invalid_construct, valid_construct)))
return error_messages
def fixFileContents(file_path):
for line in fileinput.input(file_path, inplace=True):
# Strip double space after '.' This may prove overenthusiastic and need to
# be restricted to comments and metadata files but works for now.
line = line.replace('. ', '. ')
if hasInvalidAngleBracketDirectory(line):
line = line.replace('<', '"').replace(">", '"')
# Fix incorrect protobuf namespace references.
for invalid_construct, valid_construct in PROTOBUF_TYPE_ERRORS.items():
line = line.replace(invalid_construct, valid_construct)
sys.stdout.write(str(line))
def checkFilePath(file_path):
error_messages = []
if isBuildFile(file_path):
# TODO(htuch): Add API specific BUILD fixer script.
if not isApiFile(file_path):
command = "%s %s | diff %s -" % (ENVOY_BUILD_FIXER_PATH, file_path,
file_path)
error_messages += executeCommand(
command, "envoy_build_fixer check failed", file_path)
command = "cat %s | %s -mode=fix | diff %s -" % (file_path, BUILDIFIER_PATH, file_path)
error_messages += executeCommand(command, "buildifier check failed", file_path)
error_messages += checkProtobufExternalDepsBuild(file_path)
return error_messages
error_messages += checkFileContents(file_path)
if file_path.endswith(DOCS_SUFFIX):
return error_messages
if not file_path.endswith(PROTO_SUFFIX):
error_messages += checkNamespace(file_path)
error_messages += checkProtobufExternalDeps(file_path)
command = ("%s %s | diff %s -" % (HEADER_ORDER_PATH, file_path, file_path))
error_messages += executeCommand(command, "header_order.py check failed", file_path)
command = ("%s %s | diff %s -" % (CLANG_FORMAT_PATH, file_path, file_path))
error_messages += executeCommand(command, "clang-format check failed", file_path)
return error_messages
# Example target outputs are:
# - "26,27c26"
# - "12,13d13"
# - "7a8,9"
def executeCommand(command, error_message, file_path,
regex=re.compile(r"^(\d+)[a|c|d]?\d*(?:,\d+[a|c|d]?\d*)?$")):
try:
output = subprocess.check_output(command, shell=True, stderr=subprocess.STDOUT).strip()
if output:
return output.split("\n")
return []
except subprocess.CalledProcessError as e:
if (e.returncode != 0 and e.returncode != 1):
return ["ERROR: something went wrong while executing: %s" % e.cmd]
# In case we can't find any line numbers, record an error message first.
error_messages = ["%s for file: %s" % (error_message, file_path)]
for line in e.output.splitlines():
for num in regex.findall(line):
error_messages.append(" %s:%s" % (file_path, num))
return error_messages
def fixHeaderOrder(file_path):
command = "%s --rewrite %s" % (HEADER_ORDER_PATH, file_path)
if os.system(command) != 0:
return ["header_order.py rewrite error: %s" % (file_path)]
return []
def clangFormat(file_path):
command = "%s -i %s" % (CLANG_FORMAT_PATH, file_path)
if os.system(command) != 0:
return ["clang-format rewrite error: %s" % (file_path)]
return []
def fixFilePath(file_path):
if isBuildFile(file_path):
# TODO(htuch): Add API specific BUILD fixer script.
if not isApiFile(file_path):
if os.system(
"%s %s %s" % (ENVOY_BUILD_FIXER_PATH, file_path, file_path)) != 0:
return ["envoy_build_fixer rewrite failed for file: %s" % file_path]
if os.system("%s -mode=fix %s" % (BUILDIFIER_PATH, file_path)) != 0:
return ["buildifier rewrite failed for file: %s" % file_path]
return []
fixFileContents(file_path)
if file_path.endswith(DOCS_SUFFIX):
return []
error_messages = []
if not file_path.endswith(PROTO_SUFFIX):
error_messages = checkNamespace(file_path)
if error_messages == []:
error_messages = checkProtobufExternalDepsBuild(file_path)
if error_messages == []:
error_messages = checkProtobufExternalDeps(file_path)
if error_messages:
return error_messages + ["This cannot be automatically corrected. Please fix by hand."]
error_messages = []
error_messages += fixHeaderOrder(file_path)
error_messages += clangFormat(file_path)
return error_messages
def checkFormat(file_path):
if file_path.startswith(EXCLUDED_PREFIXES):
return []
if not file_path.endswith(SUFFIXES):
return []
error_messages = []
if operation_type == "check":
error_messages += checkFilePath(file_path)
if operation_type == "fix":
error_messages += fixFilePath(file_path)
if error_messages:
return ["From %s" % file_path] + error_messages
return error_messages
def checkFormatReturnTraceOnError(file_path):
"""Run checkFormat and return the traceback of any exception."""
try:
return checkFormat(file_path)
except:
return traceback.format_exc().split("\n")
def checkFormatVisitor(arg, dir_name, names):
"""Run checkFormat in parallel for the given files.
Args:
arg: a tuple (pool, result_list) for starting tasks asynchronously.
dir_name: the parent directory of the given files.
names: a list of file names.
"""
# Unpack the multiprocessing.Pool process pool and list of results. Since
# python lists are passed as references, this is used to collect the list of
# async results (futures) from running checkFormat and passing them back to
# the caller.
pool, result_list = arg
for file_name in names:
result = pool.apply_async(checkFormatReturnTraceOnError, args=(dir_name + "/" + file_name,))
result_list.append(result)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Check or fix file format.')
parser.add_argument('operation_type', type=str, choices=['check', 'fix'],
help="specify if the run should 'check' or 'fix' format.")
parser.add_argument('target_path', type=str, nargs="?", default=".", help="specify the root directory"
" for the script to recurse over. Default '.'.")
parser.add_argument('--add-excluded-prefixes', type=str, nargs="+", help="exclude additional prefixes.")
parser.add_argument('-j', '--num-workers', type=int, default=multiprocessing.cpu_count(),
help="number of worker processes to use; defaults to one per core.")
parser.add_argument('--api-prefix', type=str, default='./api/', help="path of the API tree")
args = parser.parse_args()
operation_type = args.operation_type
target_path = args.target_path
if args.add_excluded_prefixes:
EXCLUDED_PREFIXES += tuple(args.add_excluded_prefixes)
if os.path.isfile(target_path):
error_messages = checkFormat("./" + target_path)
else:
pool = multiprocessing.Pool(processes=args.num_workers)
results = []
# For each file in target_path, start a new task in the pool and collect the
# results (results is passed by reference, and is used as an output).
os.path.walk(target_path, checkFormatVisitor, (pool, results))
# Close the pool to new tasks, wait for all of the running tasks to finish,
# then collect the error messages.
pool.close()
pool.join()
error_messages = sum((r.get() for r in results), [])
if error_messages:
for e in error_messages:
print "ERROR: %s" % e
print "ERROR: check format failed. run 'tools/check_format.py fix'"
sys.exit(1)
|
|
"""
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = '$Id: SOAPBuilder.py 1298 2006-11-07 00:54:15Z sanxiyn $'
from version import __version__
import cgi
from wstools.XMLname import toXMLname, fromXMLname
import fpconst
# SOAPpy modules
from Config import Config
from NS import NS
from Types import *
# Test whether this Python version has Types.BooleanType
# If it doesn't have it, then False and True are serialized as integers
try:
BooleanType
pythonHasBooleanType = 1
except NameError:
pythonHasBooleanType = 0
################################################################################
# SOAP Builder
################################################################################
class SOAPBuilder:
_xml_top = '<?xml version="1.0"?>\n'
_xml_enc_top = '<?xml version="1.0" encoding="%s"?>\n'
_env_top = ( '%(ENV_T)s:Envelope\n' + \
' %(ENV_T)s:encodingStyle="%(ENC)s"\n' ) % \
NS.__dict__
_env_bot = '</%(ENV_T)s:Envelope>\n' % NS.__dict__
# Namespaces potentially defined in the Envelope tag.
_env_ns = {NS.ENC: NS.ENC_T, NS.ENV: NS.ENV_T,
NS.XSD: NS.XSD_T, NS.XSD2: NS.XSD2_T, NS.XSD3: NS.XSD3_T,
NS.XSI: NS.XSI_T, NS.XSI2: NS.XSI2_T, NS.XSI3: NS.XSI3_T}
def __init__(self, args = (), kw = {}, method = None, namespace = None,
header = None, methodattrs = None, envelope = 1, encoding = 'UTF-8',
use_refs = 0, config = Config, noroot = 0):
# Test the encoding, raising an exception if it's not known
if encoding != None:
''.encode(encoding)
self.args = args
self.kw = kw
self.envelope = envelope
self.encoding = encoding
self.method = method
self.namespace = namespace
self.header = header
self.methodattrs= methodattrs
self.use_refs = use_refs
self.config = config
self.out = []
self.tcounter = 0
self.ncounter = 1
self.icounter = 1
self.envns = {}
self.ids = {}
self.depth = 0
self.multirefs = []
self.multis = 0
self.body = not isinstance(args, bodyType)
self.noroot = noroot
def build(self):
if Config.debug: print "In build."
ns_map = {}
# Cache whether typing is on or not
typed = self.config.typed
if self.header:
# Create a header.
self.dump(self.header, "Header", typed = typed)
#self.header = None # Wipe it out so no one is using it.
if self.body:
# Call genns to record that we've used SOAP-ENV.
self.depth += 1
body_ns = self.genns(ns_map, NS.ENV)[0]
self.out.append("<%sBody>\n" % body_ns)
if self.method:
# Save the NS map so that it can be restored when we
# fall out of the scope of the method definition
#print 'method:',self.method
save_ns_map = ns_map.copy()
self.depth += 1
a = ''
if self.methodattrs:
for (k, v) in self.methodattrs.items():
a += ' %s="%s"' % (k, v)
if self.namespace: # Use the namespace info handed to us
methodns, n = self.genns(ns_map, self.namespace)
else:
methodns, n = '', ''
self.out.append('<%s%s%s%s%s>\n' % (
methodns, self.method, n, a, self.genroot(ns_map)))
try:
if type(self.args) != TupleType:
args = (self.args,)
else:
args = self.args
for i in args:
self.dump(i, typed = typed, ns_map = ns_map)
if hasattr(self.config, "argsOrdering") and self.config.argsOrdering.has_key(self.method):
for k in self.config.argsOrdering.get(self.method):
self.dump(self.kw.get(k), k, typed = typed, ns_map = ns_map)
else:
for (k, v) in self.kw.items():
self.dump(v, k, typed = typed, ns_map = ns_map)
except RecursionError:
if self.use_refs == 0:
# restart
b = SOAPBuilder(args = self.args, kw = self.kw,
method = self.method, namespace = self.namespace,
header = self.header, methodattrs = self.methodattrs,
envelope = self.envelope, encoding = self.encoding,
use_refs = 1, config = self.config)
return b.build()
raise
if self.method:
self.out.append("</%s%s>\n" % (methodns, self.method))
# End of the method definition; drop any local namespaces
ns_map = save_ns_map
self.depth -= 1
if self.body:
# dump may add to self.multirefs, but the for loop will keep
# going until it has used all of self.multirefs, even those
# entries added while in the loop.
self.multis = 1
for obj, tag in self.multirefs:
self.dump(obj, tag, typed = typed, ns_map = ns_map)
self.out.append("</%sBody>\n" % body_ns)
self.depth -= 1
if self.envelope:
e = map (lambda ns: ' xmlns:%s="%s"\n' % (ns[1], ns[0]),
self.envns.items())
self.out = ['<', self._env_top] + e + ['>\n'] + \
self.out + \
[self._env_bot]
if self.encoding != None:
self.out.insert(0, self._xml_enc_top % self.encoding)
return ''.join(self.out).encode(self.encoding)
self.out.insert(0, self._xml_top)
return ''.join(self.out)
def gentag(self):
if Config.debug: print "In gentag."
self.tcounter += 1
return "v%d" % self.tcounter
def genns(self, ns_map, nsURI):
if nsURI == None:
return ('', '')
if type(nsURI) == TupleType: # already a tuple
if len(nsURI) == 2:
ns, nsURI = nsURI
else:
ns, nsURI = None, nsURI[0]
else:
ns = None
if ns_map.has_key(nsURI):
return (ns_map[nsURI] + ':', '')
if self._env_ns.has_key(nsURI):
ns = self.envns[nsURI] = ns_map[nsURI] = self._env_ns[nsURI]
return (ns + ':', '')
if not ns:
ns = "ns%d" % self.ncounter
self.ncounter += 1
ns_map[nsURI] = ns
if self.config.buildWithNamespacePrefix:
return (ns + ':', ' xmlns:%s="%s"' % (ns, nsURI))
else:
return ('', ' xmlns="%s"' % (nsURI))
def genroot(self, ns_map):
if self.noroot:
return ''
if self.depth != 2:
return ''
ns, n = self.genns(ns_map, NS.ENC)
return ' %sroot="%d"%s' % (ns, not self.multis, n)
# checkref checks an element to see if it needs to be encoded as a
# multi-reference element or not. If it returns None, the element has
# been handled and the caller can continue with subsequent elements.
# If it returns a string, the string should be included in the opening
# tag of the marshaled element.
def checkref(self, obj, tag, ns_map):
if self.depth < 2:
return ''
if not self.ids.has_key(id(obj)):
n = self.ids[id(obj)] = self.icounter
self.icounter = n + 1
if self.use_refs == 0:
return ''
if self.depth == 2:
return ' id="i%d"' % n
self.multirefs.append((obj, tag))
else:
if self.use_refs == 0:
raise RecursionError, "Cannot serialize recursive object"
n = self.ids[id(obj)]
if self.multis and self.depth == 2:
return ' id="i%d"' % n
self.out.append('<%s href="#i%d"%s/>\n' %
(tag, n, self.genroot(ns_map)))
return None
# dumpers
def dump(self, obj, tag = None, typed = 1, ns_map = {}):
if Config.debug: print "In dump.", "obj=", obj
ns_map = ns_map.copy()
self.depth += 1
if type(tag) not in (NoneType, StringType, UnicodeType):
raise KeyError, "tag must be a string or None"
try:
meth = getattr(self, "dump_" + type(obj).__name__)
except AttributeError:
if type(obj) == LongType:
obj_type = "integer"
elif pythonHasBooleanType and type(obj) == BooleanType:
obj_type = "boolean"
else:
obj_type = type(obj).__name__
self.out.append(self.dumper(None, obj_type, obj, tag, typed,
ns_map, self.genroot(ns_map)))
else:
meth(obj, tag, typed, ns_map)
self.depth -= 1
# generic dumper
def dumper(self, nsURI, obj_type, obj, tag, typed = 1, ns_map = {},
rootattr = '', id = '',
xml = '<%(tag)s%(type)s%(id)s%(attrs)s%(root)s>%(data)s</%(tag)s>\n'):
if Config.debug: print "In dumper."
if nsURI == None:
nsURI = self.config.typesNamespaceURI
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
a = n = t = ''
if typed and obj_type:
ns, n = self.genns(ns_map, nsURI)
ins = self.genns(ns_map, self.config.schemaNamespaceURI)[0]
t = ' %stype="%s%s"%s' % (ins, ns, obj_type, n)
try: a = obj._marshalAttrs(ns_map, self)
except: pass
#t = ' xmlns=""'
try: data = obj._marshalData()
except:
if (obj_type != "string"): # strings are already encoded
data = cgi.escape(str(obj))
else:
data = obj
return xml % {"tag": tag, "type": t, "data": data, "root": rootattr,
"id": id, "attrs": a}
def dump_float(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_float."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
if Config.strict_range:
doubleType(obj)
if fpconst.isPosInf(obj):
obj = "INF"
elif fpconst.isNegInf(obj):
obj = "-INF"
elif fpconst.isNaN(obj):
obj = "NaN"
else:
obj = repr(obj)
# Note: python 'float' is actually a SOAP 'double'.
self.out.append(self.dumper(None, "double", obj, tag, typed, ns_map,
self.genroot(ns_map)))
def dump_string(self, obj, tag, typed = 0, ns_map = {}):
if Config.debug: print "In dump_string."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
id = self.checkref(obj, tag, ns_map)
if id == None:
return
try: data = obj._marshalData()
except: data = obj
self.out.append(self.dumper(None, "string", cgi.escape(data), tag,
typed, ns_map, self.genroot(ns_map), id))
dump_str = dump_string # For Python 2.2+
dump_unicode = dump_string
def dump_None(self, obj, tag, typed = 0, ns_map = {}):
if Config.debug: print "In dump_None."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
ns = self.genns(ns_map, self.config.schemaNamespaceURI)[0]
self.out.append('<%s %snull="1"%s/>\n' %
(tag, ns, self.genroot(ns_map)))
dump_NoneType = dump_None # For Python 2.2+
def dump_list(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_list.", "obj=", obj
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
if type(obj) == InstanceType:
data = obj.data
else:
data = obj
if typed:
id = self.checkref(obj, tag, ns_map)
if id == None:
return
try:
sample = data[0]
empty = 0
except:
# preserve type if present
if getattr(obj,"_typed",None) and getattr(obj,"_type",None):
if getattr(obj, "_complexType", None):
sample = typedArrayType(typed=obj._type,
complexType = obj._complexType)
sample._typename = obj._type
if not getattr(obj,"_ns",None): obj._ns = NS.URN
else:
sample = typedArrayType(typed=obj._type)
else:
sample = structType()
empty = 1
# First scan list to see if all are the same type
same_type = 1
if not empty:
for i in data[1:]:
if type(sample) != type(i) or \
(type(sample) == InstanceType and \
sample.__class__ != i.__class__):
same_type = 0
break
ndecl = ''
if same_type:
if (isinstance(sample, structType)) or \
type(sample) == DictType or \
(isinstance(sample, anyType) and \
(getattr(sample, "_complexType", None) and \
sample._complexType)): # force to urn struct
try:
tns = obj._ns or NS.URN
except:
tns = NS.URN
ns, ndecl = self.genns(ns_map, tns)
try:
typename = sample._typename
except:
typename = "SOAPStruct"
t = ns + typename
elif isinstance(sample, anyType):
ns = sample._validNamespaceURI(self.config.typesNamespaceURI,
self.config.strictNamespaces)
if ns:
ns, ndecl = self.genns(ns_map, ns)
t = ns + str(sample._type)
else:
t = 'ur-type'
else:
typename = type(sample).__name__
# For Python 2.2+
if type(sample) == StringType: typename = 'string'
# HACK: unicode is a SOAP string
if type(sample) == UnicodeType: typename = 'string'
# HACK: python 'float' is actually a SOAP 'double'.
if typename=="float": typename="double"
t = self.genns(ns_map, self.config.typesNamespaceURI)[0] + \
typename
else:
t = self.genns(ns_map, self.config.typesNamespaceURI)[0] + \
"ur-type"
try: a = obj._marshalAttrs(ns_map, self)
except: a = ''
ens, edecl = self.genns(ns_map, NS.ENC)
ins, idecl = self.genns(ns_map, self.config.schemaNamespaceURI)
if typed:
self.out.append(
'<%s %sarrayType="%s[%d]" %stype="%sArray"%s%s%s%s%s%s>\n' %
(tag, ens, t, len(data), ins, ens, ndecl, edecl, idecl,
self.genroot(ns_map), id, a))
if typed:
try: elemsname = obj._elemsname
except: elemsname = "item"
else:
elemsname = tag
for i in data:
self.dump(i, elemsname, not same_type, ns_map)
if typed: self.out.append('</%s>\n' % tag)
dump_tuple = dump_list
def dump_dictionary(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_dictionary."
tag = tag or self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
id = self.checkref(obj, tag, ns_map)
if id == None:
return
try: a = obj._marshalAttrs(ns_map, self)
except: a = ''
self.out.append('<%s%s%s%s>\n' %
(tag, id, a, self.genroot(ns_map)))
for (k, v) in obj.items():
if k[0] != "_":
self.dump(v, k, 1, ns_map)
self.out.append('</%s>\n' % tag)
dump_dict = dump_dictionary # For Python 2.2+
def dump_instance(self, obj, tag, typed = 1, ns_map = {}):
if Config.debug: print "In dump_instance.", "obj=", obj, "tag=", tag
if not tag:
# If it has a name use it.
if isinstance(obj, anyType) and obj._name:
tag = obj._name
else:
tag = self.gentag()
tag = toXMLname(tag) # convert from SOAP 1.2 XML name encoding
if isinstance(obj, arrayType): # Array
self.dump_list(obj, tag, typed, ns_map)
return
if isinstance(obj, faultType): # Fault
cns, cdecl = self.genns(ns_map, NS.ENC)
vns, vdecl = self.genns(ns_map, NS.ENV)
self.out.append('''<%sFault %sroot="1"%s%s>
<faultcode>%s</faultcode>
<faultstring>%s</faultstring>
''' % (vns, cns, vdecl, cdecl, obj.faultcode, obj.faultstring))
if hasattr(obj, "detail"):
self.dump(obj.detail, "detail", typed, ns_map)
self.out.append("</%sFault>\n" % vns)
return
r = self.genroot(ns_map)
try: a = obj._marshalAttrs(ns_map, self)
except: a = ''
if isinstance(obj, voidType): # void
self.out.append("<%s%s%s></%s>\n" % (tag, a, r, tag))
return
id = self.checkref(obj, tag, ns_map)
if id == None:
return
if isinstance(obj, structType):
# Check for namespace
ndecl = ''
ns = obj._validNamespaceURI(self.config.typesNamespaceURI,
self.config.strictNamespaces)
if ns:
ns, ndecl = self.genns(ns_map, ns)
tag = ns + tag
if 'Body' not in tag:
self.out.append("<%s%s%s%s%s>\n" % (tag, ndecl, id, a, r))
keylist = obj.__dict__.keys()
# first write out items with order information
if hasattr(obj, '_keyord'):
for i in range(len(obj._keyord)):
self.dump(obj._aslist(i), obj._keyord[i], 1, ns_map)
keylist.remove(obj._keyord[i])
# now write out the rest
for k in keylist:
if (k[0] != "_"):
self.dump(getattr(obj,k), k, 1, ns_map)
if isinstance(obj, bodyType):
self.multis = 1
for v, k in self.multirefs:
self.dump(v, k, typed = typed, ns_map = ns_map)
if 'Body' not in tag:
self.out.append('</%s>\n' % tag)
elif isinstance(obj, anyType):
t = ''
if typed:
ns = obj._validNamespaceURI(self.config.typesNamespaceURI,
self.config.strictNamespaces)
if ns:
ons, ondecl = self.genns(ns_map, ns)
ins, indecl = self.genns(ns_map,
self.config.schemaNamespaceURI)
t = ' %stype="%s%s"%s%s' % \
(ins, ons, obj._type, ondecl, indecl)
self.out.append('<%s%s%s%s%s>%s</%s>\n' %
(tag, t, id, a, r, obj._marshalData(), tag))
else: # Some Class
self.out.append('<%s%s%s>\n' % (tag, id, r))
for (k, v) in obj.__dict__.items():
if k[0] != "_":
self.dump(v, k, 1, ns_map)
self.out.append('</%s>\n' % tag)
################################################################################
# SOAPBuilder's more public interface
################################################################################
def buildSOAP(args=(), kw={}, method=None, namespace=None,
header=None, methodattrs=None, envelope=1, encoding='UTF-8',
config=Config, noroot = 0):
t = SOAPBuilder(args=args, kw=kw, method=method, namespace=namespace,
header=header, methodattrs=methodattrs,envelope=envelope,
encoding=encoding, config=config,noroot=noroot)
return t.build()
|
|
"""
The following :class:`Filter` and :class:`Wheel` classes can be used to simulate
coronagraph observations in imaging mode.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import gridspec
import os
class Filter(object):
"""Filter for telescope imaging mode.
Parameters
----------
name : string
Name of filter
bandcenter : float
Wavelength at bandcenter (um)
FWHM : float
Full-width as half-maximum (um)
wl : array
Wavelength grid for response function (um)
response : array
Filter response function
notes : string
Notes of filter, e.g. 'Johnson-Cousins'
"""
def __init__(self, name=None, bandcenter=None, FWHM=None, wl=None, response=None, notes=''):
self.name=name
self.bandcenter=bandcenter
self.FWHM=FWHM
self.wl=wl
self.response=response
self.notes=notes
def __str__(self):
string = 'Filter: \n------------\n'+\
'- Name : '+"%s" % (self.name)+'\n'+\
'- Bandcenter (um) : '+"%s" % (self.bandcenter)+'\n'+\
'- Full width at half max (um) : '+"%s" % (self.FWHM)+'\n'+\
'- Wavelength array (um) : '+"%s" % (self.wl)+'\n'+\
'- Response : '+"%s" % (self.response)+'\n'+\
'- Notes : '+"%s" % (self.notes)+'\n'
return string
class Wheel(object):
"""Filter Wheel. Contains different filters as attributes.
"""
def __init__(self):
pass
def add_new_filter(self, filt, name='new_filter'):
"""Adds new filter to wheel
Parameters
----------
filt : Filter
New Filter object to be added to wheel
name : string (optional)
Name to give new filter attribute
"""
setattr(self, name, filt)
def plot(self, ax=None, ylim = None):
"""
Plot the filter response functions
Parameters
----------
ax : matplotlib.axis (optional)
Axis instance to plot on
Returns
-------
ax : ``matplotlib.axis``
Axis with plot
Note
----
Only returns an axis if an axis was not provided.
"""
if ax == None:
fig = plt.figure(figsize=(14,10))
gs = gridspec.GridSpec(1,1)
ax1 = plt.subplot(gs[0])
ax1.set_ylabel(r"Filter Response")
ax1.set_xlabel(r"Wavelength [$\mu$m]")
#ax1.set_ylim([0.0,1.0])
else:
ax1 = ax.twinx()
#ax1.set_ylim([0.0,10.0])
ax1.axes.get_yaxis().set_visible(False)
Nfilt = len(self.__dict__)
#colors,scalarMap,cNorm = scalarmap(np.arange(Nfilt),cmap='Dark2')
i = 0
fmax = 1.0
for attr, value in self.__dict__.items():
if np.max(value.response) > fmax: fmax = np.max(value.response)
wl, response = value.wl, value.response
#ax1.plot(wl,response, lw=3.0, label=value.name, c=colors[i])
ax1.fill_between(wl,response, color='purple', alpha=0.3)
i += 1
if ylim is None:
ax1.set_ylim([0.0,fmax*10.])
else:
ax1.set_ylim(ylim)
if ax==None:
return ax1
def __str__(self):
string = []
tdict = sorted(self.__dict__.items(), key=lambda x: x[1].bandcenter)
for attr, value in tdict:
string.append(attr)
print(string)
return ''#str(string)
def read_jc():
"""
Read and parse the Johnson-Cousins filter files.
Returns
-------
filters : `numpy.ndarray`
Array of filter response functions
filter_names : list
List of string names for the filters
bandcenters : `numpy.array`
Wavelength bandcenters for the filters [microns]
FWHM : `numpy.array`
Full-width at half max for the filters
"""
path = 'filters/UBVRI/'
# set file path relative to this file
path = os.path.join(os.path.dirname(__file__), path)
U = np.genfromtxt(path+'bess-u.pass')
U[:,0] = U[:,0]/1e4
B = np.genfromtxt(path+'bess-b.pass')
B[:,0] = B[:,0]/1e4
V = np.genfromtxt(path+'bess-v.pass')
V[:,0] = V[:,0]/1e4
R = np.genfromtxt(path+'bess-r.pass')
R[:,0] = R[:,0]/1e4
I = np.genfromtxt(path+'bess-i.pass')
I[:,0] = I[:,0]/1e4
filters = np.array([U,B,V,R,I])
filter_names = ['U','B','V','R','I']
bandcenters = np.array([365.6, 435.3, 547.7, 634.9, 800.0]) / 1e3
FWHM = np.array([34.0, 78.1, 99.1, 106.56, 289.2]) / 1e3
return filters, filter_names, bandcenters, FWHM
class johnson_cousins(Wheel):
"""
Instantiate a filter :class:`Wheel` with the Johnson-Cousins filters
(U, B, V, R, I).
Example
-------
>>> jc = cg.imager.johnson_cousins()
>>> jc.plot(ylim = (0.0, 1.2))
.. plot::
:align: center
import coronagraph as cg
jc = cg.imager.johnson_cousins()
import matplotlib.pyplot as plt
cg.plot_setup.setup()
jc.plot(ylim = (0.0, 1.2))
plt.show()
"""
def __init__(self):
filters, filter_names, bandcenters, FWHM = read_jc()
self.U=Filter(name='U', bandcenter=bandcenters[0], FWHM=FWHM[0], wl=filters[0][:,0], response=filters[0][:,1], notes='Johnson-Cousins')
self.B=Filter(name='B', bandcenter=bandcenters[1], FWHM=FWHM[1], wl=filters[1][:,0], response=filters[1][:,1], notes='Johnson-Cousins')
self.V=Filter(name='V', bandcenter=bandcenters[2], FWHM=FWHM[2], wl=filters[2][:,0], response=filters[2][:,1], notes='Johnson-Cousins')
self.R=Filter(name='R', bandcenter=bandcenters[3], FWHM=FWHM[3], wl=filters[3][:,0], response=filters[3][:,1], notes='Johnson-Cousins')
self.I=Filter(name='I', bandcenter=bandcenters[4], FWHM=FWHM[4], wl=filters[4][:,0], response=filters[4][:,1], notes='Johnson-Cousins')
def read_landsat():
"""
Read and parse the LANDSAT filter files.
Returns
-------
wl : list
List of wavelength grids for each filter [microns]
response : list
Filter responses for each filter
LANDSAT_names : list
Names of each LANDSAT filter
FWHM : `numpy.array`
Full-width at half max for the filters
bandcenters : `numpy.array`
Wavelength bandcenters for the filters [microns]
"""
path = 'filters/LANDSAT/'
# set file path relative to this file
path = os.path.join(os.path.dirname(__file__), path)
blue = np.loadtxt(os.path.join(path,'Blue.txt'), skiprows=1)
green = np.loadtxt(os.path.join(path,'Green.txt'), skiprows=1)
red = np.loadtxt(os.path.join(path,'Red.txt'), skiprows=1)
coastal = np.loadtxt(os.path.join(path,'CostalAerosol.txt'), skiprows=1)
cirrus = np.loadtxt(os.path.join(path,'Cirrus.txt'), skiprows=1)
nir = np.loadtxt(os.path.join(path,'NIR.txt'), skiprows=1)
pan = np.loadtxt(os.path.join(path,'Pan.txt'), skiprows=1)
swir1 = np.loadtxt(os.path.join(path,'SWIR1.txt'), skiprows=1)
swir2 = np.loadtxt(os.path.join(path,'SWIR2.txt'), skiprows=1)
LANDSAT_names = ['Coastal Aerosols','Blue','Green','Red',
'NIR','SWIR1','SWIR2','Pan','Cirrus']
titles = ['wl','response','std','bandwidth',
'FWHM_low','FWHM_high','bandcenter']
wl = [coastal[:,0]/1e3, blue[:,0]/1e3, green[:,0]/1e3, red[:,0]/1e3, nir[:,0]/1e3, swir1[:,0]/1e3, swir2[:,0]/1e3, pan[:,0]/1e3, cirrus[:,0]/1e3]
response = [coastal[:,1], blue[:,1], green[:,1], red[:,1], nir[:,1], swir1[:,1], swir2[:,1], pan[:,1], cirrus[:,1]]
FWHM = np.array([15.98, 60.04, 57.33, 37.47, 28.25, 84.72, 186.66, 172.40, 20.39]) / 1e3
bandcenters = np.array([442.96, 482.04, 561.41, 654.59, 864.67, 1608.86, 2200.73, 589.50, 1373.43]) / 1e3
return wl, response, LANDSAT_names, FWHM, bandcenters
class landsat(Wheel):
"""
Instantiate a filter :class:`Wheel` with the LANDSAT filters.
"""
def __init__(self):
wl, response, LANDSAT_names, FWHM, bandcenters = read_landsat()
self.CA=Filter(name=LANDSAT_names[0], bandcenter=bandcenters[0], FWHM=FWHM[0], wl=wl[0], response=response[0], notes='LANDSAT')
self.B=Filter(name=LANDSAT_names[1], bandcenter=bandcenters[1], FWHM=FWHM[1], wl=wl[1], response=response[1], notes='LANDSAT')
self.G=Filter(name=LANDSAT_names[2], bandcenter=bandcenters[2], FWHM=FWHM[2], wl=wl[2], response=response[2], notes='LANDSAT')
self.R=Filter(name=LANDSAT_names[3], bandcenter=bandcenters[3], FWHM=FWHM[3], wl=wl[3], response=response[3], notes='LANDSAT')
self.NIR=Filter(name=LANDSAT_names[4], bandcenter=bandcenters[4], FWHM=FWHM[4], wl=wl[4], response=response[4], notes='LANDSAT')
self.SWIR1=Filter(name=LANDSAT_names[5], bandcenter=bandcenters[5], FWHM=FWHM[5], wl=wl[5], response=response[5], notes='LANDSAT')
self.SWIR2=Filter(name=LANDSAT_names[6], bandcenter=bandcenters[6], FWHM=FWHM[6], wl=wl[6], response=response[6], notes='LANDSAT')
self.Pan=Filter(name=LANDSAT_names[7], bandcenter=bandcenters[7], FWHM=FWHM[7], wl=wl[7], response=response[7], notes='LANDSAT')
self.Cirrus=Filter(name=LANDSAT_names[8], bandcenter=bandcenters[8], FWHM=FWHM[8], wl=wl[8], response=response[8], notes='LANDSAT')
def read_jc2():
"""
Read and parse the Johnson-Cousins Bessel filter files.
Returns
-------
filters : `numpy.ndarray`
Array of filter response functions
filter_names : list
List of string names for the filters
bandcenters : `numpy.array`
Wavelength bandcenters for the filters [microns]
FWHM : `numpy.array`
Full-width at half max for the filters
"""
path = 'filters/UBVRI2/'
# set file path relative to this file
path = os.path.join(os.path.dirname(__file__), path)
U = np.genfromtxt(path+'Bessel_U-1.txt', skip_header=1)
U[:,0] = U[:,0]/1e3
B = np.genfromtxt(path+'Bessel_B-1.txt', skip_header=1)
B[:,0] = B[:,0]/1e3
V = np.genfromtxt(path+'Bessel_V-1.txt', skip_header=1)
V[:,0] = V[:,0]/1e3
R = np.genfromtxt(path+'Bessel_R-1.txt', skip_header=1)
R[:,0] = R[:,0]/1e3
I = np.genfromtxt(path+'Bessel_I-1.txt', skip_header=1)
I[:,0] = I[:,0]/1e3
filters = np.array([U[::-1,:],B[::-1,:],V[::-1,:],R[::-1,:],I[::-1,:]])
filter_names = ['U','B','V','R','I']
bandcenters = np.array([365.6, 435.3, 547.7, 634.9, 879.7]) / 1e3
FWHM = np.array([34.0, 78.1, 99.1, 106.56, 289.2]) / 1e3
return filters, filter_names, bandcenters, FWHM
class johnson_cousins2(Wheel):
"""
Instantiate a filter :class:`Wheel` with the Johnson-Cousins Bessel filters
(U, B, V, R, I).
"""
def __init__(self):
filters, filter_names, bandcenters, FWHM = read_jc2()
self.U=Filter(name='U', bandcenter=bandcenters[0], FWHM=FWHM[0], wl=filters[0][:,0], response=filters[0][:,1], notes='Johnson-Cousins')
self.B=Filter(name='B', bandcenter=bandcenters[1], FWHM=FWHM[1], wl=filters[1][:,0], response=filters[1][:,1], notes='Johnson-Cousins')
self.V=Filter(name='V', bandcenter=bandcenters[2], FWHM=FWHM[2], wl=filters[2][:,0], response=filters[2][:,1], notes='Johnson-Cousins')
self.R=Filter(name='R', bandcenter=bandcenters[3], FWHM=FWHM[3], wl=filters[3][:,0], response=filters[3][:,1], notes='Johnson-Cousins')
self.I=Filter(name='I', bandcenter=bandcenters[4], FWHM=FWHM[4], wl=filters[4][:,0], response=filters[4][:,1], notes='Johnson-Cousins')
|
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Converts checkpoint variables into Const ops in a standalone GraphDef file.
This script is designed to take a GraphDef proto, a SaverDef proto, and a set of
variable values stored in a checkpoint file, and output a GraphDef with all of
the variable ops converted into const ops containing the values of the
variables.
It's useful to do this when we need to load a single file in C++, especially in
environments like mobile or embedded where we may not have access to the
RestoreTensor ops and file loading calls that they rely on.
An example of command-line usage is:
bazel build tensorflow/python/tools:freeze_graph && \
bazel-bin/tensorflow/python/tools/freeze_graph \
--input_graph=some_graph_def.pb \
--input_checkpoint=model.ckpt-8361242 \
--output_graph=/tmp/frozen_graph.pb --output_node_names=softmax
You can also look at freeze_graph_test.py for an example of how to use it.
"""
import argparse
import re
import sys
from absl import app
from google.protobuf import text_format
from tensorflow.core.framework import graph_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.core.protobuf.meta_graph_pb2 import MetaGraphDef
from tensorflow.python.client import session
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import importer
from tensorflow.python.platform import gfile
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.tools import saved_model_utils
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training import py_checkpoint_reader
from tensorflow.python.training import saver as saver_lib
def _has_no_variables(sess):
"""Determines if the graph has any variables.
Args:
sess: TensorFlow Session.
Returns:
Bool.
"""
for op in sess.graph.get_operations():
if op.type.startswith("Variable") or op.type.endswith("VariableOp"):
return False
return True
def freeze_graph_with_def_protos(input_graph_def,
input_saver_def,
input_checkpoint,
output_node_names,
restore_op_name,
filename_tensor_name,
output_graph,
clear_devices,
initializer_nodes,
variable_names_whitelist="",
variable_names_denylist="",
input_meta_graph_def=None,
input_saved_model_dir=None,
saved_model_tags=None,
checkpoint_version=saver_pb2.SaverDef.V2):
"""Converts all variables in a graph and checkpoint into constants.
Args:
input_graph_def: A `GraphDef`.
input_saver_def: A `SaverDef` (optional).
input_checkpoint: The prefix of a V1 or V2 checkpoint, with V2 taking
priority. Typically the result of `Saver.save()` or that of
`tf.train.latest_checkpoint()`, regardless of sharded/non-sharded or
V1/V2.
output_node_names: The name(s) of the output nodes, comma separated.
restore_op_name: Unused.
filename_tensor_name: Unused.
output_graph: String where to write the frozen `GraphDef`.
clear_devices: A Bool whether to remove device specifications.
initializer_nodes: Comma separated string of initializer nodes to run before
freezing.
variable_names_whitelist: The set of variable names to convert (optional, by
default, all variables are converted).
variable_names_denylist: The set of variable names to omit converting
to constants (optional).
input_meta_graph_def: A `MetaGraphDef` (optional),
input_saved_model_dir: Path to the dir with TensorFlow 'SavedModel' file
and variables (optional).
saved_model_tags: Group of comma separated tag(s) of the MetaGraphDef to
load, in string format (optional).
checkpoint_version: Tensorflow variable file format (saver_pb2.SaverDef.V1
or saver_pb2.SaverDef.V2)
Returns:
Location of the output_graph_def.
"""
del restore_op_name, filename_tensor_name # Unused by updated loading code.
# 'input_checkpoint' may be a prefix if we're using Saver V2 format
if (not input_saved_model_dir and
not checkpoint_management.checkpoint_exists(input_checkpoint)):
raise ValueError("Input checkpoint '" + input_checkpoint +
"' doesn't exist!")
if not output_node_names:
raise ValueError(
"You need to supply the name of a node to --output_node_names.")
# Remove all the explicit device specifications for this node. This helps to
# make the graph more portable.
if clear_devices:
if input_meta_graph_def:
for node in input_meta_graph_def.graph_def.node:
node.device = ""
elif input_graph_def:
for node in input_graph_def.node:
node.device = ""
if input_graph_def:
_ = importer.import_graph_def(input_graph_def, name="")
with session.Session() as sess:
if input_saver_def:
saver = saver_lib.Saver(
saver_def=input_saver_def, write_version=checkpoint_version)
saver.restore(sess, input_checkpoint)
elif input_meta_graph_def:
restorer = saver_lib.import_meta_graph(
input_meta_graph_def, clear_devices=True)
restorer.restore(sess, input_checkpoint)
if initializer_nodes:
sess.run(initializer_nodes.replace(" ", "").split(","))
elif input_saved_model_dir:
if saved_model_tags is None:
saved_model_tags = []
loader.load(sess, saved_model_tags, input_saved_model_dir)
else:
var_list = {}
reader = py_checkpoint_reader.NewCheckpointReader(input_checkpoint)
var_to_shape_map = reader.get_variable_to_shape_map()
# List of all partition variables. Because the condition is heuristic
# based, the list could include false positives.
all_partition_variable_names = [
tensor.name.split(":")[0]
for op in sess.graph.get_operations()
for tensor in op.values()
if re.search(r"/part_\d+/", tensor.name)
]
has_partition_var = False
for key in var_to_shape_map:
try:
tensor = sess.graph.get_tensor_by_name(key + ":0")
if any(key in name for name in all_partition_variable_names):
has_partition_var = True
except KeyError:
# This tensor doesn't exist in the graph (for example it's
# 'global_step' or a similar housekeeping element) so skip it.
continue
var_list[key] = tensor
try:
saver = saver_lib.Saver(
var_list=var_list, write_version=checkpoint_version)
except TypeError as e:
# `var_list` is required to be a map of variable names to Variable
# tensors. Partition variables are Identity tensors that cannot be
# handled by Saver.
if has_partition_var:
raise ValueError(
"Models containing partition variables cannot be converted "
"from checkpoint files. Please pass in a SavedModel using "
"the flag --input_saved_model_dir.")
# Models that have been frozen previously do not contain Variables.
elif _has_no_variables(sess):
raise ValueError(
"No variables were found in this model. It is likely the model "
"was frozen previously. You cannot freeze a graph twice.")
return 0
else:
raise e
saver.restore(sess, input_checkpoint)
if initializer_nodes:
sess.run(initializer_nodes.replace(" ", "").split(","))
variable_names_whitelist = (
variable_names_whitelist.replace(" ", "").split(",")
if variable_names_whitelist else None)
variable_names_denylist = (
variable_names_denylist.replace(" ", "").split(",")
if variable_names_denylist else None)
if input_meta_graph_def:
output_graph_def = graph_util.convert_variables_to_constants(
sess,
input_meta_graph_def.graph_def,
output_node_names.replace(" ", "").split(","),
variable_names_whitelist=variable_names_whitelist,
variable_names_blacklist=variable_names_denylist)
else:
output_graph_def = graph_util.convert_variables_to_constants(
sess,
input_graph_def,
output_node_names.replace(" ", "").split(","),
variable_names_whitelist=variable_names_whitelist,
variable_names_blacklist=variable_names_denylist)
# Write GraphDef to file if output path has been given.
if output_graph:
with gfile.GFile(output_graph, "wb") as f:
f.write(output_graph_def.SerializeToString())
return output_graph_def
def _parse_input_graph_proto(input_graph, input_binary):
"""Parses input tensorflow graph into GraphDef proto."""
if not gfile.Exists(input_graph):
raise IOError("Input graph file '" + input_graph + "' does not exist!")
input_graph_def = graph_pb2.GraphDef()
mode = "rb" if input_binary else "r"
with gfile.GFile(input_graph, mode) as f:
if input_binary:
input_graph_def.ParseFromString(f.read())
else:
text_format.Merge(f.read(), input_graph_def)
return input_graph_def
def _parse_input_meta_graph_proto(input_graph, input_binary):
"""Parses input tensorflow graph into MetaGraphDef proto."""
if not gfile.Exists(input_graph):
raise IOError("Input meta graph file '" + input_graph + "' does not exist!")
input_meta_graph_def = MetaGraphDef()
mode = "rb" if input_binary else "r"
with gfile.GFile(input_graph, mode) as f:
if input_binary:
input_meta_graph_def.ParseFromString(f.read())
else:
text_format.Merge(f.read(), input_meta_graph_def)
print("Loaded meta graph file '" + input_graph)
return input_meta_graph_def
def _parse_input_saver_proto(input_saver, input_binary):
"""Parses input tensorflow Saver into SaverDef proto."""
if not gfile.Exists(input_saver):
raise IOError("Input saver file '" + input_saver + "' does not exist!")
mode = "rb" if input_binary else "r"
with gfile.GFile(input_saver, mode) as f:
saver_def = saver_pb2.SaverDef()
if input_binary:
saver_def.ParseFromString(f.read())
else:
text_format.Merge(f.read(), saver_def)
return saver_def
def freeze_graph(input_graph,
input_saver,
input_binary,
input_checkpoint,
output_node_names,
restore_op_name,
filename_tensor_name,
output_graph,
clear_devices,
initializer_nodes,
variable_names_whitelist="",
variable_names_denylist="",
input_meta_graph=None,
input_saved_model_dir=None,
saved_model_tags=tag_constants.SERVING,
checkpoint_version=saver_pb2.SaverDef.V2):
"""Converts all variables in a graph and checkpoint into constants.
Args:
input_graph: A `GraphDef` file to load.
input_saver: A TensorFlow Saver file.
input_binary: A Bool. True means input_graph is .pb, False indicates .pbtxt.
input_checkpoint: The prefix of a V1 or V2 checkpoint, with V2 taking
priority. Typically the result of `Saver.save()` or that of
`tf.train.latest_checkpoint()`, regardless of sharded/non-sharded or
V1/V2.
output_node_names: The name(s) of the output nodes, comma separated.
restore_op_name: Unused.
filename_tensor_name: Unused.
output_graph: String where to write the frozen `GraphDef`.
clear_devices: A Bool whether to remove device specifications.
initializer_nodes: Comma separated list of initializer nodes to run before
freezing.
variable_names_whitelist: The set of variable names to convert (optional, by
default, all variables are converted),
variable_names_denylist: The set of variable names to omit converting
to constants (optional).
input_meta_graph: A `MetaGraphDef` file to load (optional).
input_saved_model_dir: Path to the dir with TensorFlow 'SavedModel' file and
variables (optional).
saved_model_tags: Group of comma separated tag(s) of the MetaGraphDef to
load, in string format.
checkpoint_version: Tensorflow variable file format (saver_pb2.SaverDef.V1
or saver_pb2.SaverDef.V2).
Returns:
String that is the location of frozen GraphDef.
"""
input_graph_def = None
if input_saved_model_dir:
input_graph_def = saved_model_utils.get_meta_graph_def(
input_saved_model_dir, saved_model_tags).graph_def
elif input_graph:
input_graph_def = _parse_input_graph_proto(input_graph, input_binary)
input_meta_graph_def = None
if input_meta_graph:
input_meta_graph_def = _parse_input_meta_graph_proto(
input_meta_graph, input_binary)
input_saver_def = None
if input_saver:
input_saver_def = _parse_input_saver_proto(input_saver, input_binary)
return freeze_graph_with_def_protos(
input_graph_def,
input_saver_def,
input_checkpoint,
output_node_names,
restore_op_name,
filename_tensor_name,
output_graph,
clear_devices,
initializer_nodes,
variable_names_whitelist,
variable_names_denylist,
input_meta_graph_def,
input_saved_model_dir,
[tag for tag in saved_model_tags.replace(" ", "").split(",") if tag],
checkpoint_version=checkpoint_version)
def main(unused_args, flags):
if flags.checkpoint_version == 1:
checkpoint_version = saver_pb2.SaverDef.V1
elif flags.checkpoint_version == 2:
checkpoint_version = saver_pb2.SaverDef.V2
else:
raise ValueError("Invalid checkpoint version (must be '1' or '2'): %d" %
flags.checkpoint_version)
freeze_graph(flags.input_graph, flags.input_saver, flags.input_binary,
flags.input_checkpoint, flags.output_node_names,
flags.restore_op_name, flags.filename_tensor_name,
flags.output_graph, flags.clear_devices, flags.initializer_nodes,
flags.variable_names_whitelist, flags.variable_names_denylist,
flags.input_meta_graph, flags.input_saved_model_dir,
flags.saved_model_tags, checkpoint_version)
def run_main():
"""Main function of freeze_graph."""
parser = argparse.ArgumentParser()
parser.register("type", "bool", lambda v: v.lower() == "true")
parser.add_argument(
"--input_graph",
type=str,
default="",
help="TensorFlow \'GraphDef\' file to load.")
parser.add_argument(
"--input_saver",
type=str,
default="",
help="TensorFlow saver file to load.")
parser.add_argument(
"--input_checkpoint",
type=str,
default="",
help="TensorFlow variables file to load.")
parser.add_argument(
"--checkpoint_version",
type=int,
default=2,
help="Tensorflow variable file format")
parser.add_argument(
"--output_graph",
type=str,
default="",
help="Output \'GraphDef\' file name.")
parser.add_argument(
"--input_binary",
nargs="?",
const=True,
type="bool",
default=False,
help="Whether the input files are in binary format.")
parser.add_argument(
"--output_node_names",
type=str,
default="",
help="The name of the output nodes, comma separated.")
parser.add_argument(
"--restore_op_name",
type=str,
default="save/restore_all",
help="""\
The name of the master restore operator. Deprecated, unused by updated \
loading code.
""")
parser.add_argument(
"--filename_tensor_name",
type=str,
default="save/Const:0",
help="""\
The name of the tensor holding the save path. Deprecated, unused by \
updated loading code.
""")
parser.add_argument(
"--clear_devices",
nargs="?",
const=True,
type="bool",
default=True,
help="Whether to remove device specifications.")
parser.add_argument(
"--initializer_nodes",
type=str,
default="",
help="Comma separated list of initializer nodes to run before freezing.")
parser.add_argument(
"--variable_names_whitelist",
type=str,
default="",
help="""\
Comma separated list of variables to convert to constants. If specified, \
only those variables will be converted to constants.\
""")
parser.add_argument(
"--variable_names_denylist",
type=str,
default="",
help="""\
Comma separated list of variables to skip converting to constants.\
""")
parser.add_argument(
"--input_meta_graph",
type=str,
default="",
help="TensorFlow \'MetaGraphDef\' file to load.")
parser.add_argument(
"--input_saved_model_dir",
type=str,
default="",
help="Path to the dir with TensorFlow \'SavedModel\' file and variables.")
parser.add_argument(
"--saved_model_tags",
type=str,
default="serve",
help="""\
Group of tag(s) of the MetaGraphDef to load, in string format,\
separated by \',\'. For tag-set contains multiple tags, all tags \
must be passed in.\
""")
flags, unparsed = parser.parse_known_args()
my_main = lambda unused_args: main(unused_args, flags)
app.run(main=my_main, argv=[sys.argv[0]] + unparsed)
if __name__ == "__main__":
run_main()
|
|
#!/usr/bin/env python
# coding: utf-8
import pytest
import os
import warnings
from pandas import DataFrame, Series
from pandas.compat import zip, iteritems
from pandas.util._decorators import cache_readonly
from pandas.core.dtypes.api import is_list_like
import pandas.util.testing as tm
from pandas.util.testing import (ensure_clean,
assert_is_valid_plot_return_object)
import pandas.util._test_decorators as td
import numpy as np
from numpy import random
import pandas.plotting as plotting
from pandas.plotting._tools import _flatten
"""
This is a common base class used for various plotting tests
"""
def _skip_if_no_scipy_gaussian_kde():
try:
from scipy.stats import gaussian_kde # noqa
except ImportError:
pytest.skip("scipy version doesn't support gaussian_kde")
def _ok_for_gaussian_kde(kind):
if kind in ['kde', 'density']:
try:
from scipy.stats import gaussian_kde # noqa
except ImportError:
return False
return True
@td.skip_if_no_mpl
class TestPlotBase(object):
def setup_method(self, method):
import matplotlib as mpl
mpl.rcdefaults()
self.mpl_ge_2_0_1 = plotting._compat._mpl_ge_2_0_1()
self.mpl_ge_2_1_0 = plotting._compat._mpl_ge_2_1_0()
self.mpl_ge_2_2_0 = plotting._compat._mpl_ge_2_2_0()
self.mpl_ge_2_2_2 = plotting._compat._mpl_ge_2_2_2()
self.mpl_ge_3_0_0 = plotting._compat._mpl_ge_3_0_0()
self.bp_n_objects = 7
self.polycollection_factor = 2
self.default_figsize = (6.4, 4.8)
self.default_tick_position = 'left'
n = 100
with tm.RNGContext(42):
gender = np.random.choice(['Male', 'Female'], size=n)
classroom = np.random.choice(['A', 'B', 'C'], size=n)
self.hist_df = DataFrame({'gender': gender,
'classroom': classroom,
'height': random.normal(66, 4, size=n),
'weight': random.normal(161, 32, size=n),
'category': random.randint(4, size=n)})
self.tdf = tm.makeTimeDataFrame()
self.hexbin_df = DataFrame({"A": np.random.uniform(size=20),
"B": np.random.uniform(size=20),
"C": np.arange(20) + np.random.uniform(
size=20)})
def teardown_method(self, method):
tm.close()
@cache_readonly
def plt(self):
import matplotlib.pyplot as plt
return plt
@cache_readonly
def colorconverter(self):
import matplotlib.colors as colors
return colors.colorConverter
def _check_legend_labels(self, axes, labels=None, visible=True):
"""
Check each axes has expected legend labels
Parameters
----------
axes : matplotlib Axes object, or its list-like
labels : list-like
expected legend labels
visible : bool
expected legend visibility. labels are checked only when visible is
True
"""
if visible and (labels is None):
raise ValueError('labels must be specified when visible is True')
axes = self._flatten_visible(axes)
for ax in axes:
if visible:
assert ax.get_legend() is not None
self._check_text_labels(ax.get_legend().get_texts(), labels)
else:
assert ax.get_legend() is None
def _check_data(self, xp, rs):
"""
Check each axes has identical lines
Parameters
----------
xp : matplotlib Axes object
rs : matplotlib Axes object
"""
xp_lines = xp.get_lines()
rs_lines = rs.get_lines()
def check_line(xpl, rsl):
xpdata = xpl.get_xydata()
rsdata = rsl.get_xydata()
tm.assert_almost_equal(xpdata, rsdata)
assert len(xp_lines) == len(rs_lines)
[check_line(xpl, rsl) for xpl, rsl in zip(xp_lines, rs_lines)]
tm.close()
def _check_visible(self, collections, visible=True):
"""
Check each artist is visible or not
Parameters
----------
collections : matplotlib Artist or its list-like
target Artist or its list or collection
visible : bool
expected visibility
"""
from matplotlib.collections import Collection
if not isinstance(collections,
Collection) and not is_list_like(collections):
collections = [collections]
for patch in collections:
assert patch.get_visible() == visible
def _get_colors_mapped(self, series, colors):
unique = series.unique()
# unique and colors length can be differed
# depending on slice value
mapped = dict(zip(unique, colors))
return [mapped[v] for v in series.values]
def _check_colors(self, collections, linecolors=None, facecolors=None,
mapping=None):
"""
Check each artist has expected line colors and face colors
Parameters
----------
collections : list-like
list or collection of target artist
linecolors : list-like which has the same length as collections
list of expected line colors
facecolors : list-like which has the same length as collections
list of expected face colors
mapping : Series
Series used for color grouping key
used for andrew_curves, parallel_coordinates, radviz test
"""
from matplotlib.lines import Line2D
from matplotlib.collections import (
Collection, PolyCollection, LineCollection
)
conv = self.colorconverter
if linecolors is not None:
if mapping is not None:
linecolors = self._get_colors_mapped(mapping, linecolors)
linecolors = linecolors[:len(collections)]
assert len(collections) == len(linecolors)
for patch, color in zip(collections, linecolors):
if isinstance(patch, Line2D):
result = patch.get_color()
# Line2D may contains string color expression
result = conv.to_rgba(result)
elif isinstance(patch, (PolyCollection, LineCollection)):
result = tuple(patch.get_edgecolor()[0])
else:
result = patch.get_edgecolor()
expected = conv.to_rgba(color)
assert result == expected
if facecolors is not None:
if mapping is not None:
facecolors = self._get_colors_mapped(mapping, facecolors)
facecolors = facecolors[:len(collections)]
assert len(collections) == len(facecolors)
for patch, color in zip(collections, facecolors):
if isinstance(patch, Collection):
# returned as list of np.array
result = patch.get_facecolor()[0]
else:
result = patch.get_facecolor()
if isinstance(result, np.ndarray):
result = tuple(result)
expected = conv.to_rgba(color)
assert result == expected
def _check_text_labels(self, texts, expected):
"""
Check each text has expected labels
Parameters
----------
texts : matplotlib Text object, or its list-like
target text, or its list
expected : str or list-like which has the same length as texts
expected text label, or its list
"""
if not is_list_like(texts):
assert texts.get_text() == expected
else:
labels = [t.get_text() for t in texts]
assert len(labels) == len(expected)
for label, e in zip(labels, expected):
assert label == e
def _check_ticks_props(self, axes, xlabelsize=None, xrot=None,
ylabelsize=None, yrot=None):
"""
Check each axes has expected tick properties
Parameters
----------
axes : matplotlib Axes object, or its list-like
xlabelsize : number
expected xticks font size
xrot : number
expected xticks rotation
ylabelsize : number
expected yticks font size
yrot : number
expected yticks rotation
"""
from matplotlib.ticker import NullFormatter
axes = self._flatten_visible(axes)
for ax in axes:
if xlabelsize or xrot:
if isinstance(ax.xaxis.get_minor_formatter(), NullFormatter):
# If minor ticks has NullFormatter, rot / fontsize are not
# retained
labels = ax.get_xticklabels()
else:
labels = ax.get_xticklabels() + ax.get_xticklabels(
minor=True)
for label in labels:
if xlabelsize is not None:
tm.assert_almost_equal(label.get_fontsize(),
xlabelsize)
if xrot is not None:
tm.assert_almost_equal(label.get_rotation(), xrot)
if ylabelsize or yrot:
if isinstance(ax.yaxis.get_minor_formatter(), NullFormatter):
labels = ax.get_yticklabels()
else:
labels = ax.get_yticklabels() + ax.get_yticklabels(
minor=True)
for label in labels:
if ylabelsize is not None:
tm.assert_almost_equal(label.get_fontsize(),
ylabelsize)
if yrot is not None:
tm.assert_almost_equal(label.get_rotation(), yrot)
def _check_ax_scales(self, axes, xaxis='linear', yaxis='linear'):
"""
Check each axes has expected scales
Parameters
----------
axes : matplotlib Axes object, or its list-like
xaxis : {'linear', 'log'}
expected xaxis scale
yaxis : {'linear', 'log'}
expected yaxis scale
"""
axes = self._flatten_visible(axes)
for ax in axes:
assert ax.xaxis.get_scale() == xaxis
assert ax.yaxis.get_scale() == yaxis
def _check_axes_shape(self, axes, axes_num=None, layout=None,
figsize=None):
"""
Check expected number of axes is drawn in expected layout
Parameters
----------
axes : matplotlib Axes object, or its list-like
axes_num : number
expected number of axes. Unnecessary axes should be set to
invisible.
layout : tuple
expected layout, (expected number of rows , columns)
figsize : tuple
expected figsize. default is matplotlib default
"""
if figsize is None:
figsize = self.default_figsize
visible_axes = self._flatten_visible(axes)
if axes_num is not None:
assert len(visible_axes) == axes_num
for ax in visible_axes:
# check something drawn on visible axes
assert len(ax.get_children()) > 0
if layout is not None:
result = self._get_axes_layout(_flatten(axes))
assert result == layout
tm.assert_numpy_array_equal(
visible_axes[0].figure.get_size_inches(),
np.array(figsize, dtype=np.float64))
def _get_axes_layout(self, axes):
x_set = set()
y_set = set()
for ax in axes:
# check axes coordinates to estimate layout
points = ax.get_position().get_points()
x_set.add(points[0][0])
y_set.add(points[0][1])
return (len(y_set), len(x_set))
def _flatten_visible(self, axes):
"""
Flatten axes, and filter only visible
Parameters
----------
axes : matplotlib Axes object, or its list-like
"""
axes = _flatten(axes)
axes = [ax for ax in axes if ax.get_visible()]
return axes
def _check_has_errorbars(self, axes, xerr=0, yerr=0):
"""
Check axes has expected number of errorbars
Parameters
----------
axes : matplotlib Axes object, or its list-like
xerr : number
expected number of x errorbar
yerr : number
expected number of y errorbar
"""
axes = self._flatten_visible(axes)
for ax in axes:
containers = ax.containers
xerr_count = 0
yerr_count = 0
for c in containers:
has_xerr = getattr(c, 'has_xerr', False)
has_yerr = getattr(c, 'has_yerr', False)
if has_xerr:
xerr_count += 1
if has_yerr:
yerr_count += 1
assert xerr == xerr_count
assert yerr == yerr_count
def _check_box_return_type(self, returned, return_type, expected_keys=None,
check_ax_title=True):
"""
Check box returned type is correct
Parameters
----------
returned : object to be tested, returned from boxplot
return_type : str
return_type passed to boxplot
expected_keys : list-like, optional
group labels in subplot case. If not passed,
the function checks assuming boxplot uses single ax
check_ax_title : bool
Whether to check the ax.title is the same as expected_key
Intended to be checked by calling from ``boxplot``.
Normal ``plot`` doesn't attach ``ax.title``, it must be disabled.
"""
from matplotlib.axes import Axes
types = {'dict': dict, 'axes': Axes, 'both': tuple}
if expected_keys is None:
# should be fixed when the returning default is changed
if return_type is None:
return_type = 'dict'
assert isinstance(returned, types[return_type])
if return_type == 'both':
assert isinstance(returned.ax, Axes)
assert isinstance(returned.lines, dict)
else:
# should be fixed when the returning default is changed
if return_type is None:
for r in self._flatten_visible(returned):
assert isinstance(r, Axes)
return
assert isinstance(returned, Series)
assert sorted(returned.keys()) == sorted(expected_keys)
for key, value in iteritems(returned):
assert isinstance(value, types[return_type])
# check returned dict has correct mapping
if return_type == 'axes':
if check_ax_title:
assert value.get_title() == key
elif return_type == 'both':
if check_ax_title:
assert value.ax.get_title() == key
assert isinstance(value.ax, Axes)
assert isinstance(value.lines, dict)
elif return_type == 'dict':
line = value['medians'][0]
axes = line.axes
if check_ax_title:
assert axes.get_title() == key
else:
raise AssertionError
def _check_grid_settings(self, obj, kinds, kws={}):
# Make sure plot defaults to rcParams['axes.grid'] setting, GH 9792
import matplotlib as mpl
def is_grid_on():
xoff = all(not g.gridOn
for g in self.plt.gca().xaxis.get_major_ticks())
yoff = all(not g.gridOn
for g in self.plt.gca().yaxis.get_major_ticks())
return not (xoff and yoff)
spndx = 1
for kind in kinds:
if not _ok_for_gaussian_kde(kind):
continue
self.plt.subplot(1, 4 * len(kinds), spndx)
spndx += 1
mpl.rc('axes', grid=False)
obj.plot(kind=kind, **kws)
assert not is_grid_on()
self.plt.subplot(1, 4 * len(kinds), spndx)
spndx += 1
mpl.rc('axes', grid=True)
obj.plot(kind=kind, grid=False, **kws)
assert not is_grid_on()
if kind != 'pie':
self.plt.subplot(1, 4 * len(kinds), spndx)
spndx += 1
mpl.rc('axes', grid=True)
obj.plot(kind=kind, **kws)
assert is_grid_on()
self.plt.subplot(1, 4 * len(kinds), spndx)
spndx += 1
mpl.rc('axes', grid=False)
obj.plot(kind=kind, grid=True, **kws)
assert is_grid_on()
def _unpack_cycler(self, rcParams, field='color'):
"""
Auxiliary function for correctly unpacking cycler after MPL >= 1.5
"""
return [v[field] for v in rcParams['axes.prop_cycle']]
def _check_plot_works(f, filterwarnings='always', **kwargs):
import matplotlib.pyplot as plt
ret = None
with warnings.catch_warnings():
warnings.simplefilter(filterwarnings)
try:
try:
fig = kwargs['figure']
except KeyError:
fig = plt.gcf()
plt.clf()
ax = kwargs.get('ax', fig.add_subplot(211)) # noqa
ret = f(**kwargs)
assert_is_valid_plot_return_object(ret)
try:
kwargs['ax'] = fig.add_subplot(212)
ret = f(**kwargs)
except Exception:
pass
else:
assert_is_valid_plot_return_object(ret)
with ensure_clean(return_filelike=True) as path:
plt.savefig(path)
finally:
tm.close(fig)
return ret
def curpath():
pth, _ = os.path.split(os.path.abspath(__file__))
return pth
|
|
#!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""An example functional test
The module-level docstring should include a high-level description of
what the test is doing. It's the first thing people see when they open
the file and should give the reader information about *what* the test
is testing and *how* it's being tested
"""
# Imports should be in PEP8 ordering (std library first, then third party
# libraries then local imports).
from collections import defaultdict
# Avoid wildcard * imports if possible
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.mininode import (
CInv,
NetworkThread,
NodeConn,
NodeConnCB,
mininode_lock,
msg_block,
msg_getdata,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
p2p_port,
wait_until,
)
# NodeConnCB is a class containing callbacks to be executed when a P2P
# message is received from the node-under-test. Subclass NodeConnCB and
# override the on_*() methods if you need custom behaviour.
class BaseNode(NodeConnCB):
def __init__(self):
"""Initialize the NodeConnCB
Used to inialize custom properties for the Node that aren't
included by default in the base class. Be aware that the NodeConnCB
base class already stores a counter for each P2P message type and the
last received message of each type, which should be sufficient for the
needs of most tests.
Call super().__init__() first for standard initialization and then
initialize custom properties."""
super().__init__()
# Stores a dictionary of all blocks received
self.block_receive_map = defaultdict(int)
def on_block(self, conn, message):
"""Override the standard on_block callback
Store the hash of a received block in the dictionary."""
message.block.calc_sha256()
self.block_receive_map[message.block.sha256] += 1
def on_inv(self, conn, message):
"""Override the standard on_inv callback"""
pass
def custom_function():
"""Do some custom behaviour
If this function is more generally useful for other tests, consider
moving it to a module in test_framework."""
# self.log.info("running custom_function") # Oops! Can't run self.log outside the BitcoinTestFramework
pass
class ExampleTest(BitcoinTestFramework):
# Each functional test is a subclass of the BitcoinTestFramework class.
# Override the __init__(), add_options(), setup_chain(), setup_network()
# and setup_nodes() methods to customize the test setup as required.
def __init__(self):
"""Initialize the test
Call super().__init__() first, and then override any test parameters
for your individual test."""
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
# Use self.extra_args to change command-line arguments for the nodes
self.extra_args = [[], ["-logips"], []]
# self.log.info("I've finished __init__") # Oops! Can't run self.log before run_test()
# Use add_options() to add specific command-line options for your test.
# In practice this is not used very much, since the tests are mostly written
# to be run in automated environments without command-line options.
# def add_options()
# pass
# Use setup_chain() to customize the node data directories. In practice
# this is not used very much since the default behaviour is almost always
# fine
# def setup_chain():
# pass
def setup_network(self):
"""Setup the test network topology
Often you won't need to override this, since the standard network topology
(linear: node0 <-> node1 <-> node2 <-> ...) is fine for most tests.
If you do override this method, remember to start the nodes, assign
them to self.nodes, connect them and then sync."""
self.setup_nodes()
# In this test, we're not connecting node2 to node0 or node1. Calls to
# sync_all() should not include node2, since we're not expecting it to
# sync.
connect_nodes(self.nodes[0], 1)
self.sync_all([self.nodes[0:1]])
# Use setup_nodes() to customize the node start behaviour (for example if
# you don't want to start all nodes at the start of the test).
# def setup_nodes():
# pass
def custom_method(self):
"""Do some custom behaviour for this test
Define it in a method here because you're going to use it repeatedly.
If you think it's useful in general, consider moving it to the base
BitcoinTestFramework class so other tests can use it."""
self.log.info("Running custom_method")
def run_test(self):
"""Main test logic"""
# Create a P2P connection to one of the nodes
node0 = BaseNode()
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0))
node0.add_connection(connections[0])
# Start up network handling in another thread. This needs to be called
# after the P2P connections have been created.
NetworkThread().start()
# wait_for_verack ensures that the P2P connection is fully up.
node0.wait_for_verack()
# Generating a block on one of the nodes will get us out of IBD
blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
self.sync_all([self.nodes[0:1]])
# Notice above how we called an RPC by calling a method with the same
# name on the node object. Notice also how we used a keyword argument
# to specify a named RPC argument. Neither of those are defined on the
# node object. Instead there's some __getattr__() magic going on under
# the covers to dispatch unrecognised attribute calls to the RPC
# interface.
# Logs are nice. Do plenty of them. They can be used in place of comments for
# breaking the test into sub-sections.
self.log.info("Starting test!")
self.log.info("Calling a custom function")
custom_function()
self.log.info("Calling a custom method")
self.custom_method()
self.log.info("Create some blocks")
self.tip = int(self.nodes[0].getbestblockhash(), 16)
self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1
height = 1
for i in range(10):
# Use the mininode and blocktools functionality to manually build a block
# Calling the generate() rpc is easier, but this allows us to exactly
# control the blocks and transactions.
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
block_message = msg_block(block)
# Send message is used to send a P2P message to the node over our NodeConn connection
node0.send_message(block_message)
self.tip = block.sha256
blocks.append(self.tip)
self.block_time += 1
height += 1
self.log.info("Wait for node1 to reach current tip (height 11) using RPC")
self.nodes[1].waitforblockheight(11)
self.log.info("Connect node2 and node1")
connect_nodes(self.nodes[1], 2)
self.log.info("Add P2P connection to node2")
node2 = BaseNode()
connections.append(NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], node2))
node2.add_connection(connections[1])
node2.wait_for_verack()
self.log.info("Wait for node2 reach current tip. Test that it has propagated all the blocks to us")
getdata_request = msg_getdata()
for block in blocks:
getdata_request.inv.append(CInv(2, block))
node2.send_message(getdata_request)
# wait_until() will loop until a predicate condition is met. Use it to test properties of the
# NodeConnCB objects.
wait_until(lambda: sorted(blocks) == sorted(list(node2.block_receive_map.keys())), timeout=5, lock=mininode_lock)
self.log.info("Check that each block was received only once")
# The network thread uses a global lock on data access to the NodeConn objects when sending and receiving
# messages. The test thread should acquire the global lock before accessing any NodeConn data to avoid locking
# and synchronization issues. Note wait_until() acquires this global lock when testing the predicate.
with mininode_lock:
for block in node2.block_receive_map.values():
assert_equal(block, 1)
if __name__ == '__main__':
ExampleTest().main()
|
|
#!/usr/bin/env python2.4
"""static - A stupidly simple WSGI way to serve static (or mixed) content.
(See the docstrings of the various functions and classes.)
Copyright (C) 2006 Luke Arno - http://lukearno.com/
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to:
The Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
Luke Arno can be found at http://lukearno.com/
"""
import mimetypes
import rfc822
import time
import string
import os
from wsgiref import util
class StatusApp:
"""A WSGI app that just returns the given status."""
def __init__(self, status, message=None):
self.status = status
if message is None:
self.message = status
else:
self.message = message
def __call__(self, environ, start_response, headers=[]):
start_response(self.status, headers)
if environ['REQUEST_METHOD'] == 'GET':
return [self.message]
else:
return [""]
def generate_xhtml(path, dirs, files):
"""Return a XHTML document listing the directories and files."""
# Prepare the path to display.
if path != '/':
dirs.insert(0, '..')
if not path.endswith('/'):
path += '/'
def itemize(item):
return '<a href="%s">%s</a>' % (item, path+item)
dirs = [d + '/' for d in dirs]
return """
<html>
<body>
<h1>%s</h1>
<pre>%s\n%s</pre>
</body>
</html>
""" % (path, '\n'.join(itemize(dir) for dir in dirs), '\n'.join(itemize(file) for file in files))
def get_entries(path):
"""Return sorted lists of directories and files in the given path."""
dirs, files = [], []
for entry in os.listdir(path):
# Categorize entry as directory or file.
if os.path.isdir(os.path.join(path, entry)):
dirs.append(entry)
else:
files.append(entry)
dirs.sort()
files.sort()
return dirs, files
class Static(object):
"""A stupidly simple way to serve static content via WSGI.
Serve the file of the same path as PATH_INFO in self.datadir.
Look up the Content-type in self.content_types by extension
or use 'text/plain' if the extension is not found.
Serve up the contents of the file or delegate to self.not_found.
"""
block_size = 16 * 4096
index_file = 'index.html'
not_found = StatusApp('404 Not Found')
not_modified = StatusApp('304 Not Modified', "")
moved_permanently = StatusApp('301 Moved Permanently')
method_not_allowed = StatusApp('405 Method Not Allowed')
def __init__(self, root, **kw):
"""Just set the root and any other attribs passes via **kw."""
self.root = root
for k, v in kw.iteritems():
setattr(self, k, v)
def __call__(self, environ, start_response):
"""Respond to a request when called in the usual WSGI way."""
if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'):
return self.method_not_allowed(environ, start_response)
path_info = environ.get('PATH_INFO', '')
full_path = self._full_path(path_info)
# guard against arbitrary file retrieval
if not (os.path.abspath(full_path+'/'))\
.startswith(os.path.abspath(self.root+'/')):
return self.not_found(environ, start_response)
if os.path.isdir(full_path):
if full_path[-1] <> '/' or full_path == self.root:
location = util.request_uri(environ, include_query=False) + '/'
if environ.get('QUERY_STRING'):
location += '?' + environ.get('QUERY_STRING')
headers = [('Location', location)]
return self.moved_permanently(environ, start_response, headers)
else:
headers = [('Date', rfc822.formatdate(time.time()))]
headers.append(('Content-Type', 'text/html' ))
start_response("200 OK", headers)
if environ['REQUEST_METHOD'] == 'GET':
return [generate_xhtml(path_info, *get_entries(full_path))]
content_type = self._guess_type(full_path)
try:
etag, last_modified, length = self._conditions(full_path, environ)
headers = [('Date', rfc822.formatdate(time.time())),
('Last-Modified', last_modified),
('ETag', etag),
('Content-Length', str(length))]
if_modified = environ.get('HTTP_IF_MODIFIED_SINCE')
if if_modified and (rfc822.parsedate(if_modified)
>= rfc822.parsedate(last_modified)):
return self.not_modified(environ, start_response, headers)
if_none = environ.get('HTTP_IF_NONE_MATCH')
if if_none and (if_none == '*' or etag in if_none):
return self.not_modified(environ, start_response, headers)
file_like = self._file_like(full_path)
headers.append(('Content-Type', content_type))
start_response("200 OK", headers)
if environ['REQUEST_METHOD'] == 'GET':
return self._body(full_path, environ, file_like)
else:
return ['']
except (IOError, OSError), e:
return self.not_found(environ, start_response)
def _full_path(self, path_info):
"""Return the full path from which to read."""
return self.root + path_info
def _guess_type(self, full_path):
"""Guess the mime type using the mimetypes module."""
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return a tuple of etag, last_modified by mtime from stat."""
mtime = os.stat(full_path).st_mtime
size = os.stat(full_path).st_size
return str(mtime), rfc822.formatdate(mtime), size
def _file_like(self, full_path):
"""Return the appropriate file object."""
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
def iter_and_close(file_like, block_size):
"""Yield file contents by block then close the file."""
while 1:
try:
block = file_like.read(block_size)
if block: yield block
else: raise StopIteration
except StopIteration, si:
file_like.close()
return
from cogen.web import wsgi
from cogen.common import *
wsgi.server_factory({}, '0.0.0.0', 9000)(Static('.'))
#~ from wsgiref.simple_server import make_server
#make_server('localhost', 9000, debug).serve_forever()
#~ make_server('localhost', 9000, app).serve_forever()
|
|
"""Assets represent Financial Assets, these are claims based on a contract.
These are often referred to as Securities or Products in other libraries.
"""
from __future__ import absolute_import, division, print_function
import pandas as pd
from pandas import DataFrame
from numbers import Number
from pandas.tseries.offsets import DateOffset, CustomBusinessDay
from pennies.time import daycounter
RATETYPES = ['FIXED', 'IBOR']
class Asset(object):
"""Base class of all Financial Assets"""
def __init__(self):
self.frame = pd.DataFrame()
def accept(self, visitor, *args, **kwargs):
"""Accepts visitors that calculate various measures on the Asset.
Stub if we wish to use visitor pattern.
Currently using multipledispatch
"""
return visitor.visit(Asset, *args, **kwargs)
def __eq__(self, other):
return self.frame.equals(other.frame)
def all_assets():
"""Provides a list of all available Assets"""
return Asset.__subclasses__()
class ZeroCouponBond(Asset):
"""A single payment of an amount of currency on a given date.
This has a number of aliases: ZCB, Zero, DiscountBond, Bullet
By default, the amount is $1 received.
Attributes
----------
dt_payment: datetime
Date (and time) on which amount is received
currency : str
Currency code of amount received
notional: float
Notional in given currency. Received if positive, else paid.
notional: float
Notional in given currency. Received if positive, else paid.
"""
def __init__(self, dt_payment, currency='USD', notional=1.0, bday=None):
"""
Parameters
----------
dt_payment: datetime
Date (and time) on which notional is received
currency : str, optional
Currency code of notional received
notional: float, optional
Currency Amount. Received if positive, else paid.
bday: str, optional
Rule to adjust dates that fall on weekends and holidays.
"""
super(ZeroCouponBond, self).__init__()
self.dt_payment = dt_payment
self.currency = currency
self.notional = notional
self.frame = pd.DataFrame({
'pay': dt_payment,
'notional': notional,
'currency': currency},
index=[0])
class Annuity(Asset):
"""Fixed Rate Annuity.
This is used as the fixed leg of a Swap, as the core of fixed rate Bonds,
and the natural Numeraire when pricing Swaptions.
The primary representation of the asset is a dataframe where each
row is a single cashflow.
"""
# TODO Capture additional cases outlined below
# TODO Stubs: Short and Long, Front and Back
# TODO Daycount conventions: add more
# TODO Business day adjustments conventions: add more
# TODO Holiday calendars: add
def __init__(self, df, notl_exchange=True):
"""Create Annuity from DataFrame.
Not meant to be the primary constructor.
Instead, calls like Annuity.from_tenor will be more common.
This is here because classmethods must return a call to constructor
so that return type is known.
Parameters
----------
df: DataFrame
Required columns = ['start','end', 'pay', 'fixing',
'period', 'frequency', 'notional', 'dcc','lag_pay', 'bday_adj', 'stub']
notl_exchange: bool
If true, notional is paid at the final pay date
"""
super(Annuity, self).__init__()
# Primary representation
self.frame = df
# Scalar Metadata
self.notl_exchange = notl_exchange
try:
vals = set(df.currency)
assert len(vals) == 1, ('currency column should have just one '
'value: Found {}'.format(vals))
self.currency = vals.pop()
except KeyError:
print('Required key, currency, not contained in frame')
raise
try:
vals = set(df.frequency)
assert len(vals) == 1, ('frequency column should have just one '
'value: Found {}'.format(vals))
self.frequency = vals.pop()
except KeyError:
print('Required key, frequency, not contained in frame')
raise
try:
vals = set(df.type)
assert len(vals) == 1, ('type column should have just one '
'value: Found {}'.format(vals))
self.type = vals.pop()
except KeyError:
print('Optional key, type, not contained in frame. Set to None')
self.type = None
@classmethod
def from_tenor(cls, dt_settlement, tenor, frequency, rate=1.0, dcc=None,
notional=1.0, currency='USD', receive=True, payment_lag=0,
bday=None, stub='front', notl_exchange=True,
rate_type='FIXED'):
"""Construct a fixed rate Annuity from start date, length and frequency.
Parameters
----------
dt_settlement: datetime
Date (and time) on which leg begins to accrue interest
tenor: int
Length of the entire leg, as number of months
frequency: int
Number of months between cash flows
dcc: str, optional
Daycount Convention for computing accrual of interest
rate: float, optional
Rate of interest accrual. Simple accrual, no compounding in period.
notional: float, optional
Notional amount. Received if positive, else paid.
currency : str, optional
Currency code of amount received
receive: bool, optional
Alternative method of specifying sign of notional.
Multiplies given notional by -1 if False
payment_lag: int, optional
Number of days after accrual end dates that payments are made.
bday: str, optional
Rule to adjust dates that fall on weekends and holidays.
stub: str, optional
If schedule building leads to one period of different length,
this decides if it is the first ('front') or last period ('back').
notl_exchange: bool
If true, notional is paid at the final pay date
rate_type: str, optional
Defines whether the rate being paid is fixed, or of some floating
index such as an IBOR.
"""
# TODO: Check behavior when stubs implied
dt_maturity = dt_settlement + DateOffset(months=tenor)
period = DateOffset(months=frequency)
sched_end = pd.date_range(dt_settlement, dt_maturity,
freq=period, closed='right')
sched_start = sched_end - period # TODO Test stub cases. start[i] should be end[i-1]
if bday or payment_lag:
sched_pay = sched_end + CustomBusinessDay(payment_lag, holidays=None)
else:
sched_pay = sched_end
# Primary representation of leg as Pandas DataFrame
assert rate_type in RATETYPES
frame = pd.DataFrame({
'start': sched_start,
'end': sched_end,
'pay': sched_pay,
'rate': rate,
'notional': notional,
'frequency': frequency,
'currency': currency,
'dcc': dcc,
'pay_lag': payment_lag,
'bday_adj': bday,
'stub': stub,
'type': rate_type})
year_frac = daycounter(dcc)(frame.start, frame.end)
frame['period'] = year_frac
return Annuity(frame, notl_exchange=notl_exchange)
@classmethod
def from_frame(cls, df, notl_exchange=True):
return Annuity(df, notl_exchange=notl_exchange)
def __str__(self):
return str(self.frame)
def __eq__(self, other):
return (isinstance(other, Annuity) and
super(Annuity, self).__eq__(other))
class FixedLeg(Annuity):
def __init__(self, df, fixed_rate=None, notl_exchange=True):
super(FixedLeg, self).__init__(df, notl_exchange=notl_exchange)
self.type = 'FIXED'
self.frame['type'] = self.type
if fixed_rate:
self.frame['rate'] = fixed_rate
def __eq__(self, other):
return (isinstance(other, FixedLeg) and
super(FixedLeg, self).__eq__(other))
@classmethod
def from_tenor(cls, dt_settlement, tenor, frequency, rate=1.0, dcc='30360',
notional=1.0, currency='USD', receive=True, payment_lag=0,
bday=None, stub='front', notl_exchange=True):
annuity = Annuity.from_tenor(dt_settlement, tenor, frequency, rate,
dcc, notional, currency, receive,
payment_lag, bday, stub, notl_exchange,
rate_type='FIXED')
if isinstance(rate, Number):
return FixedLeg(annuity.frame, fixed_rate=rate)
else:
raise NotImplementedError("FixedLeg requires scalar rate.")
@classmethod
def from_frame(cls, df, fixed_rate=1.0, notl_exchange=True):
return FixedLeg(df, fixed_rate=fixed_rate, notl_exchange=notl_exchange)
class IborLeg(Annuity):
"""Series of coupons based on fixings of an IBOR.
IBOR = Inter-Bank Offered Rate, eg 3M USD LIBOR (3-month dollar Libor)
Used as Floating Leg of a Swap or Floating Rate Note.
"""
def __init__(self, df, notl_exchange=True):
"""Compute from DataFrame.
This is unlikely to be the primary constructor, but classmethods must
return a call to constructor so that type is known.
Parameters
----------
df: DataFrame
Required columns = ['start','end', 'pay', 'fixing',
'period', 'frequency', 'notional', 'dcc','lag_pay', 'bday_adj', 'stub']
"""
# Primary representation
super(IborLeg, self).__init__(df, notl_exchange=notl_exchange)
self.type = 'IBOR'
self.frame['rate_type'] = self.type
@classmethod
def from_tenor(cls, dt_settlement, tenor, frequency, rate=None, dcc=None,
notional=1.0, currency='USD', receive=True, payment_lag=0,
fixing_lag=0, bday=None, stub='front', notl_exchange=True):
annuity = Annuity.from_tenor(dt_settlement, tenor, frequency, rate,
dcc, notional, currency, receive,
payment_lag, bday, stub, notl_exchange,
rate_type='IBOR')
df = annuity.frame
if bday or fixing_lag:
df['fixing'] = df['start'] + CustomBusinessDay(fixing_lag, holidays=None)
else:
df['fixing'] = df['start']
return IborLeg(df, notl_exchange=notl_exchange)
@classmethod
def from_frame(cls, df, notl_exchange=True):
return IborLeg(df, notl_exchange=notl_exchange)
def __eq__(self, other):
return (isinstance(other, IborLeg) and
super(IborLeg, self).__eq__(other))
class CompoundAsset(Asset):
"""This Asset is composed of a list of Assets.
This is a convenient way to structure a bespoke trade that contains
numerous parts, like embedded options, or different first coupons.
Attributes
----------
underlying_contracts: list
List of instances of Assets
"""
def __init__(self, underlying_contracts):
"""
Parameters
----------
underlying_contracts: list of Asset's
"""
super(CompoundAsset, self).__init__()
self.underlying_contracts = underlying_contracts
class Swap(CompoundAsset):
def __init__(self, receive_leg, pay_leg):
""" This takes two frames"""
self.underlying_contracts = [receive_leg, pay_leg]
self.leg_receive = receive_leg
self.leg_pay = pay_leg
def __eq__(self, other):
return (isinstance(other, Swap) and
self.leg_pay == other.leg_pay and
self.leg_receive == other.leg_receive)
def __str__(self):
return ('\nPay Leg:\n' + str(self.leg_pay) +
'\nReceive Leg:\n' + str(self.leg_receive))
class VanillaSwap(Swap):
def __init__(self, fixed_leg, floating_leg):
assert isinstance(fixed_leg, FixedLeg)
assert isinstance(floating_leg, IborLeg)
assert fixed_leg.currency == floating_leg.currency, \
'Currencies differ in legs of VanillaSwap'
assert fixed_leg.type == 'FIXED'
self.leg_fixed = fixed_leg
assert floating_leg.type == 'IBOR'
self.leg_float = floating_leg
initial_notl_fixed = fixed_leg.frame.notional.iloc[0]
initial_notl_float = floating_leg.frame.notional.iloc[0]
if initial_notl_fixed * initial_notl_float > 0.0:
raise ValueError("Notional values of both legs have same sign")
elif initial_notl_fixed >= 0.0:
super(VanillaSwap, self).__init__(receive_leg=fixed_leg,
pay_leg=floating_leg)
else:
super(VanillaSwap, self).__init__(receive_leg=floating_leg,
pay_leg=fixed_leg)
def __eq__(self, other):
return (isinstance(other, VanillaSwap) and
self.leg_fixed == other.leg_fixed and
self.leg_float == other.leg_float)
class FRA(Asset):
"""Forward Rate Agreement"""
def __init__(self, fixed_rate, dt_fixing, dt_payment,
dt_accrual_start=None, dt_accrual_end=None,
daycount=None, notional=1.0, pay_upfront=True):
raise NotImplementedError
class StirFuture(Asset):
"""Short term interest rate Future"""
def __init__(self):
raise NotImplementedError
class Deposit(Asset):
"""Short term cash deposit paying simple (not compounded) interest"""
def __init__(self):
raise NotImplementedError
class IborFixing(Asset):
"""Current Fixing of an Inter-Bank Offered Rate
Used to calibrate yield curves. Not an asset per-se.
"""
def __init__(self):
raise NotImplementedError
class TenorSwap(Swap):
"""Swap with two floating legs, each of different rate tenors"""
def __init__(Asset):
raise NotImplementedError
class CurrencySwap(Swap):
"""Swap with two floating legs, each of different currencies
and often rate and payment frequency"""
def __init__(self):
raise NotImplementedError
# TODO: Check whether this sort of aliasing is a good idea
Zero = ZeroCouponBond
"""Alias for a ZeroCouponBond"""
ZCB = ZeroCouponBond
"""Alias for a ZeroCouponBond"""
DiscountBond = ZeroCouponBond
"""Alias for a ZeroCouponBond"""
BulletPayment = ZeroCouponBond
"""Alias for a ZeroCouponBond"""
SettlementPayment = ZeroCouponBond
"""BulletPayment used to settle trades"""
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Cloudscaling Group, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pprint
import socket
import string
import sys
import types
import uuid
import eventlet
from eventlet.green import zmq
import greenlet
from cinder.openstack.common import cfg
from cinder.openstack.common.gettextutils import _
from cinder.openstack.common import importutils
from cinder.openstack.common import jsonutils
from cinder.openstack.common.rpc import common as rpc_common
# for convenience, are not modified.
pformat = pprint.pformat
Timeout = eventlet.timeout.Timeout
LOG = rpc_common.LOG
RemoteError = rpc_common.RemoteError
RPCException = rpc_common.RPCException
zmq_opts = [
cfg.StrOpt('rpc_zmq_bind_address', default='*',
help='ZeroMQ bind address. Should be a wildcard (*), '
'an ethernet interface, or IP. '
'The "host" option should point or resolve to this '
'address.'),
# The module.Class to use for matchmaking.
cfg.StrOpt(
'rpc_zmq_matchmaker',
default=('cinder.openstack.common.rpc.'
'matchmaker.MatchMakerLocalhost'),
help='MatchMaker driver',
),
# The following port is unassigned by IANA as of 2012-05-21
cfg.IntOpt('rpc_zmq_port', default=9501,
help='ZeroMQ receiver listening port'),
cfg.IntOpt('rpc_zmq_port_pub', default=9502,
help='ZeroMQ fanout publisher port'),
cfg.IntOpt('rpc_zmq_contexts', default=1,
help='Number of ZeroMQ contexts, defaults to 1'),
cfg.StrOpt('rpc_zmq_ipc_dir', default='/var/run/openstack',
help='Directory for holding IPC sockets'),
cfg.StrOpt('rpc_zmq_host', default=socket.gethostname(),
help='Name of this node. Must be a valid hostname, FQDN, or '
'IP address. Must match "host" option, if running Nova.')
]
# These globals are defined in register_opts(conf),
# a mandatory initialization call
CONF = None
ZMQ_CTX = None # ZeroMQ Context, must be global.
matchmaker = None # memoized matchmaker object
def _serialize(data):
"""
Serialization wrapper
We prefer using JSON, but it cannot encode all types.
Error if a developer passes us bad data.
"""
try:
return str(jsonutils.dumps(data, ensure_ascii=True))
except TypeError:
LOG.error(_("JSON serialization failed."))
raise
def _deserialize(data):
"""
Deserialization wrapper
"""
LOG.debug(_("Deserializing: %s"), data)
return jsonutils.loads(data)
class ZmqSocket(object):
"""
A tiny wrapper around ZeroMQ to simplify the send/recv protocol
and connection management.
Can be used as a Context (supports the 'with' statement).
"""
def __init__(self, addr, zmq_type, bind=True, subscribe=None):
self.sock = ZMQ_CTX.socket(zmq_type)
self.addr = addr
self.type = zmq_type
self.subscriptions = []
# Support failures on sending/receiving on wrong socket type.
self.can_recv = zmq_type in (zmq.PULL, zmq.SUB)
self.can_send = zmq_type in (zmq.PUSH, zmq.PUB)
self.can_sub = zmq_type in (zmq.SUB, )
# Support list, str, & None for subscribe arg (cast to list)
do_sub = {
list: subscribe,
str: [subscribe],
type(None): []
}[type(subscribe)]
for f in do_sub:
self.subscribe(f)
str_data = {'addr': addr, 'type': self.socket_s(),
'subscribe': subscribe, 'bind': bind}
LOG.debug(_("Connecting to %(addr)s with %(type)s"), str_data)
LOG.debug(_("-> Subscribed to %(subscribe)s"), str_data)
LOG.debug(_("-> bind: %(bind)s"), str_data)
try:
if bind:
self.sock.bind(addr)
else:
self.sock.connect(addr)
except Exception:
raise RPCException(_("Could not open socket."))
def socket_s(self):
"""Get socket type as string."""
t_enum = ('PUSH', 'PULL', 'PUB', 'SUB', 'REP', 'REQ', 'ROUTER',
'DEALER')
return dict(map(lambda t: (getattr(zmq, t), t), t_enum))[self.type]
def subscribe(self, msg_filter):
"""Subscribe."""
if not self.can_sub:
raise RPCException("Cannot subscribe on this socket.")
LOG.debug(_("Subscribing to %s"), msg_filter)
try:
self.sock.setsockopt(zmq.SUBSCRIBE, msg_filter)
except Exception:
return
self.subscriptions.append(msg_filter)
def unsubscribe(self, msg_filter):
"""Unsubscribe."""
if msg_filter not in self.subscriptions:
return
self.sock.setsockopt(zmq.UNSUBSCRIBE, msg_filter)
self.subscriptions.remove(msg_filter)
def close(self):
if self.sock is None or self.sock.closed:
return
# We must unsubscribe, or we'll leak descriptors.
if len(self.subscriptions) > 0:
for f in self.subscriptions:
try:
self.sock.setsockopt(zmq.UNSUBSCRIBE, f)
except Exception:
pass
self.subscriptions = []
# Linger -1 prevents lost/dropped messages
try:
self.sock.close(linger=-1)
except Exception:
pass
self.sock = None
def recv(self):
if not self.can_recv:
raise RPCException(_("You cannot recv on this socket."))
return self.sock.recv_multipart()
def send(self, data):
if not self.can_send:
raise RPCException(_("You cannot send on this socket."))
self.sock.send_multipart(data)
class ZmqClient(object):
"""Client for ZMQ sockets."""
def __init__(self, addr, socket_type=zmq.PUSH, bind=False):
self.outq = ZmqSocket(addr, socket_type, bind=bind)
def cast(self, msg_id, topic, data):
self.outq.send([str(topic), str(msg_id), str('cast'),
_serialize(data)])
def close(self):
self.outq.close()
class RpcContext(rpc_common.CommonRpcContext):
"""Context that supports replying to a rpc.call."""
def __init__(self, **kwargs):
self.replies = []
super(RpcContext, self).__init__(**kwargs)
def deepcopy(self):
values = self.to_dict()
values['replies'] = self.replies
return self.__class__(**values)
def reply(self, reply=None, failure=None, ending=False):
if ending:
return
self.replies.append(reply)
@classmethod
def marshal(self, ctx):
ctx_data = ctx.to_dict()
return _serialize(ctx_data)
@classmethod
def unmarshal(self, data):
return RpcContext.from_dict(_deserialize(data))
class InternalContext(object):
"""Used by ConsumerBase as a private context for - methods."""
def __init__(self, proxy):
self.proxy = proxy
self.msg_waiter = None
def _get_response(self, ctx, proxy, topic, data):
"""Process a curried message and cast the result to topic."""
LOG.debug(_("Running func with context: %s"), ctx.to_dict())
data.setdefault('version', None)
data.setdefault('args', [])
try:
result = proxy.dispatch(
ctx, data['version'], data['method'], **data['args'])
return ConsumerBase.normalize_reply(result, ctx.replies)
except greenlet.GreenletExit:
# ignore these since they are just from shutdowns
pass
except Exception:
return {'exc':
rpc_common.serialize_remote_exception(sys.exc_info())}
def reply(self, ctx, proxy,
msg_id=None, context=None, topic=None, msg=None):
"""Reply to a casted call."""
# Our real method is curried into msg['args']
child_ctx = RpcContext.unmarshal(msg[0])
response = ConsumerBase.normalize_reply(
self._get_response(child_ctx, proxy, topic, msg[1]),
ctx.replies)
LOG.debug(_("Sending reply"))
cast(CONF, ctx, topic, {
'method': '-process_reply',
'args': {
'msg_id': msg_id,
'response': response
}
})
class ConsumerBase(object):
"""Base Consumer."""
def __init__(self):
self.private_ctx = InternalContext(None)
@classmethod
def normalize_reply(self, result, replies):
#TODO(ewindisch): re-evaluate and document this method.
if isinstance(result, types.GeneratorType):
return list(result)
elif replies:
return replies
else:
return [result]
def consume(self, sock):
raise NotImplementedError()
def process(self, style, target, proxy, ctx, data):
# Method starting with - are
# processed internally. (non-valid method name)
method = data['method']
# Internal method
# uses internal context for safety.
if data['method'][0] == '-':
# For reply / process_reply
method = method[1:]
if method == 'reply':
self.private_ctx.reply(ctx, proxy, **data['args'])
return
data.setdefault('version', None)
data.setdefault('args', [])
proxy.dispatch(ctx, data['version'],
data['method'], **data['args'])
class ZmqBaseReactor(ConsumerBase):
"""
A consumer class implementing a
centralized casting broker (PULL-PUSH)
for RoundRobin requests.
"""
def __init__(self, conf):
super(ZmqBaseReactor, self).__init__()
self.mapping = {}
self.proxies = {}
self.threads = []
self.sockets = []
self.subscribe = {}
self.pool = eventlet.greenpool.GreenPool(conf.rpc_thread_pool_size)
def register(self, proxy, in_addr, zmq_type_in, out_addr=None,
zmq_type_out=None, in_bind=True, out_bind=True,
subscribe=None):
LOG.info(_("Registering reactor"))
if zmq_type_in not in (zmq.PULL, zmq.SUB):
raise RPCException("Bad input socktype")
# Items push in.
inq = ZmqSocket(in_addr, zmq_type_in, bind=in_bind,
subscribe=subscribe)
self.proxies[inq] = proxy
self.sockets.append(inq)
LOG.info(_("In reactor registered"))
if not out_addr:
return
if zmq_type_out not in (zmq.PUSH, zmq.PUB):
raise RPCException("Bad output socktype")
# Items push out.
outq = ZmqSocket(out_addr, zmq_type_out, bind=out_bind)
self.mapping[inq] = outq
self.mapping[outq] = inq
self.sockets.append(outq)
LOG.info(_("Out reactor registered"))
def consume_in_thread(self):
def _consume(sock):
LOG.info(_("Consuming socket"))
while True:
self.consume(sock)
for k in self.proxies.keys():
self.threads.append(
self.pool.spawn(_consume, k)
)
def wait(self):
for t in self.threads:
t.wait()
def close(self):
for s in self.sockets:
s.close()
for t in self.threads:
t.kill()
class ZmqProxy(ZmqBaseReactor):
"""
A consumer class implementing a
topic-based proxy, forwarding to
IPC sockets.
"""
def __init__(self, conf):
super(ZmqProxy, self).__init__(conf)
self.topic_proxy = {}
ipc_dir = CONF.rpc_zmq_ipc_dir
self.topic_proxy['zmq_replies'] = \
ZmqSocket("ipc://%s/zmq_topic_zmq_replies" % (ipc_dir, ),
zmq.PUB, bind=True)
self.sockets.append(self.topic_proxy['zmq_replies'])
self.topic_proxy['fanout~'] = \
ZmqSocket("tcp://%s:%s" % (CONF.rpc_zmq_bind_address,
CONF.rpc_zmq_port_pub), zmq.PUB, bind=True)
self.sockets.append(self.topic_proxy['fanout~'])
def consume(self, sock):
ipc_dir = CONF.rpc_zmq_ipc_dir
#TODO(ewindisch): use zero-copy (i.e. references, not copying)
data = sock.recv()
topic, msg_id, style, in_msg = data
topic = topic.split('.', 1)[0]
LOG.debug(_("CONSUMER GOT %s"), ' '.join(map(pformat, data)))
# Handle zmq_replies magic
if topic.startswith('fanout~'):
sock_type = zmq.PUB
# This doesn't change what is in the message,
# it only specifies that these messages go to
# the generic fanout topic.
topic = 'fanout~'
elif topic.startswith('zmq_replies'):
sock_type = zmq.PUB
inside = _deserialize(in_msg)
msg_id = inside[-1]['args']['msg_id']
response = inside[-1]['args']['response']
LOG.debug(_("->response->%s"), response)
data = [str(msg_id), _serialize(response)]
else:
sock_type = zmq.PUSH
if not topic in self.topic_proxy:
outq = ZmqSocket("ipc://%s/zmq_topic_%s" % (ipc_dir, topic),
sock_type, bind=True)
self.topic_proxy[topic] = outq
self.sockets.append(outq)
LOG.info(_("Created topic proxy: %s"), topic)
LOG.debug(_("ROUTER RELAY-OUT START %(data)s") % {'data': data})
self.topic_proxy[topic].send(data)
LOG.debug(_("ROUTER RELAY-OUT SUCCEEDED %(data)s") % {'data': data})
class CallbackReactor(ZmqBaseReactor):
"""
A consumer class passing messages to a callback
"""
def __init__(self, conf, callback):
self._cb = callback
super(CallbackReactor, self).__init__(conf)
def consume(self, sock):
data = sock.recv()
self._cb(data[3])
class ZmqReactor(ZmqBaseReactor):
"""
A consumer class implementing a
consumer for messages. Can also be
used as a 1:1 proxy
"""
def __init__(self, conf):
super(ZmqReactor, self).__init__(conf)
def consume(self, sock):
#TODO(ewindisch): use zero-copy (i.e. references, not copying)
data = sock.recv()
LOG.debug(_("CONSUMER RECEIVED DATA: %s"), data)
if sock in self.mapping:
LOG.debug(_("ROUTER RELAY-OUT %(data)s") % {
'data': data})
self.mapping[sock].send(data)
return
topic, msg_id, style, in_msg = data
ctx, request = _deserialize(in_msg)
ctx = RpcContext.unmarshal(ctx)
proxy = self.proxies[sock]
self.pool.spawn_n(self.process, style, topic,
proxy, ctx, request)
class Connection(rpc_common.Connection):
"""Manages connections and threads."""
def __init__(self, conf):
self.reactor = ZmqReactor(conf)
def _consume_fanout(self, reactor, topic, proxy, bind=False):
for topic, host in matchmaker.queues("publishers~%s" % (topic, )):
inaddr = "tcp://%s:%s" % (host, CONF.rpc_zmq_port)
reactor.register(proxy, inaddr, zmq.SUB, in_bind=bind)
def declare_topic_consumer(self, topic, callback=None,
queue_name=None):
"""declare_topic_consumer is a private method, but
it is being used by Quantum (Folsom).
This has been added compatibility.
"""
# Only consume on the base topic name.
topic = topic.split('.', 1)[0]
if CONF.rpc_zmq_host in matchmaker.queues("fanout~%s" % (topic, )):
return
reactor = CallbackReactor(CONF, callback)
self._consume_fanout(reactor, topic, None, bind=False)
def create_consumer(self, topic, proxy, fanout=False):
# Only consume on the base topic name.
topic = topic.split('.', 1)[0]
LOG.info(_("Create Consumer for topic (%(topic)s)") %
{'topic': topic})
# Consume direct-push fanout messages (relay to local consumers)
if fanout:
# If we're not in here, we can't receive direct fanout messages
if CONF.rpc_zmq_host in matchmaker.queues(topic):
# Consume from all remote publishers.
self._consume_fanout(self.reactor, topic, proxy)
else:
LOG.warn("This service cannot receive direct PUSH fanout "
"messages without being known by the matchmaker.")
return
# Configure consumer for direct pushes.
subscribe = (topic, fanout)[type(fanout) == str]
sock_type = zmq.SUB
topic = 'fanout~' + topic
inaddr = "tcp://127.0.0.1:%s" % (CONF.rpc_zmq_port_pub, )
else:
sock_type = zmq.PULL
subscribe = None
# Receive messages from (local) proxy
inaddr = "ipc://%s/zmq_topic_%s" % \
(CONF.rpc_zmq_ipc_dir, topic)
LOG.debug(_("Consumer is a zmq.%s"),
['PULL', 'SUB'][sock_type == zmq.SUB])
# Consume messages from local rpc-zmq-receiver daemon.
self.reactor.register(proxy, inaddr, sock_type,
subscribe=subscribe, in_bind=False)
def close(self):
self.reactor.close()
def wait(self):
self.reactor.wait()
def consume_in_thread(self):
self.reactor.consume_in_thread()
def _cast(addr, context, msg_id, topic, msg, timeout=None):
timeout_cast = timeout or CONF.rpc_cast_timeout
payload = [RpcContext.marshal(context), msg]
with Timeout(timeout_cast, exception=rpc_common.Timeout):
try:
conn = ZmqClient(addr)
# assumes cast can't return an exception
conn.cast(msg_id, topic, payload)
except zmq.ZMQError:
raise RPCException("Cast failed. ZMQ Socket Exception")
finally:
if 'conn' in vars():
conn.close()
def _call(addr, context, msg_id, topic, msg, timeout=None):
# timeout_response is how long we wait for a response
timeout = timeout or CONF.rpc_response_timeout
# The msg_id is used to track replies.
msg_id = str(uuid.uuid4().hex)
# Replies always come into the reply service.
reply_topic = "zmq_replies.%s" % CONF.rpc_zmq_host
LOG.debug(_("Creating payload"))
# Curry the original request into a reply method.
mcontext = RpcContext.marshal(context)
payload = {
'method': '-reply',
'args': {
'msg_id': msg_id,
'context': mcontext,
'topic': reply_topic,
'msg': [mcontext, msg]
}
}
LOG.debug(_("Creating queue socket for reply waiter"))
# Messages arriving async.
# TODO(ewindisch): have reply consumer with dynamic subscription mgmt
with Timeout(timeout, exception=rpc_common.Timeout):
try:
msg_waiter = ZmqSocket(
"ipc://%s/zmq_topic_zmq_replies" % CONF.rpc_zmq_ipc_dir,
zmq.SUB, subscribe=msg_id, bind=False
)
LOG.debug(_("Sending cast"))
_cast(addr, context, msg_id, topic, payload)
LOG.debug(_("Cast sent; Waiting reply"))
# Blocks until receives reply
msg = msg_waiter.recv()
LOG.debug(_("Received message: %s"), msg)
LOG.debug(_("Unpacking response"))
responses = _deserialize(msg[-1])
# ZMQError trumps the Timeout error.
except zmq.ZMQError:
raise RPCException("ZMQ Socket Error")
finally:
if 'msg_waiter' in vars():
msg_waiter.close()
# It seems we don't need to do all of the following,
# but perhaps it would be useful for multicall?
# One effect of this is that we're checking all
# responses for Exceptions.
for resp in responses:
if isinstance(resp, types.DictType) and 'exc' in resp:
raise rpc_common.deserialize_remote_exception(CONF, resp['exc'])
return responses[-1]
def _multi_send(method, context, topic, msg, timeout=None):
"""
Wraps the sending of messages,
dispatches to the matchmaker and sends
message to all relevant hosts.
"""
conf = CONF
LOG.debug(_("%(msg)s") % {'msg': ' '.join(map(pformat, (topic, msg)))})
queues = matchmaker.queues(topic)
LOG.debug(_("Sending message(s) to: %s"), queues)
# Don't stack if we have no matchmaker results
if len(queues) == 0:
LOG.warn(_("No matchmaker results. Not casting."))
# While not strictly a timeout, callers know how to handle
# this exception and a timeout isn't too big a lie.
raise rpc_common.Timeout, "No match from matchmaker."
# This supports brokerless fanout (addresses > 1)
for queue in queues:
(_topic, ip_addr) = queue
_addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port)
if method.__name__ == '_cast':
eventlet.spawn_n(method, _addr, context,
_topic, _topic, msg, timeout)
return
return method(_addr, context, _topic, _topic, msg, timeout)
def create_connection(conf, new=True):
return Connection(conf)
def multicall(conf, *args, **kwargs):
"""Multiple calls."""
return _multi_send(_call, *args, **kwargs)
def call(conf, *args, **kwargs):
"""Send a message, expect a response."""
data = _multi_send(_call, *args, **kwargs)
return data[-1]
def cast(conf, *args, **kwargs):
"""Send a message expecting no reply."""
_multi_send(_cast, *args, **kwargs)
def fanout_cast(conf, context, topic, msg, **kwargs):
"""Send a message to all listening and expect no reply."""
# NOTE(ewindisch): fanout~ is used because it avoid splitting on .
# and acts as a non-subtle hint to the matchmaker and ZmqProxy.
_multi_send(_cast, context, 'fanout~' + str(topic), msg, **kwargs)
def notify(conf, context, topic, msg, **kwargs):
"""
Send notification event.
Notifications are sent to topic-priority.
This differs from the AMQP drivers which send to topic.priority.
"""
# NOTE(ewindisch): dot-priority in rpc notifier does not
# work with our assumptions.
topic.replace('.', '-')
cast(conf, context, topic, msg, **kwargs)
def cleanup():
"""Clean up resources in use by implementation."""
global ZMQ_CTX
global matchmaker
matchmaker = None
ZMQ_CTX.term()
ZMQ_CTX = None
def register_opts(conf):
"""Registration of options for this driver."""
#NOTE(ewindisch): ZMQ_CTX and matchmaker
# are initialized here as this is as good
# an initialization method as any.
# We memoize through these globals
global ZMQ_CTX
global matchmaker
global CONF
if not CONF:
conf.register_opts(zmq_opts)
CONF = conf
# Don't re-set, if this method is called twice.
if not ZMQ_CTX:
ZMQ_CTX = zmq.Context(conf.rpc_zmq_contexts)
if not matchmaker:
# rpc_zmq_matchmaker should be set to a 'module.Class'
mm_path = conf.rpc_zmq_matchmaker.split('.')
mm_module = '.'.join(mm_path[:-1])
mm_class = mm_path[-1]
# Only initialize a class.
if mm_path[-1][0] not in string.ascii_uppercase:
LOG.error(_("Matchmaker could not be loaded.\n"
"rpc_zmq_matchmaker is not a class."))
raise RPCException(_("Error loading Matchmaker."))
mm_impl = importutils.import_module(mm_module)
mm_constructor = getattr(mm_impl, mm_class)
matchmaker = mm_constructor()
register_opts(cfg.CONF)
|
|
#!/usr/bin/python
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generic proxy to access any DFA web service."""
__author__ = 'api.jdilallo@gmail.com (Joseph DiLallo)'
import warnings
from adspygoogle import SOAPpy
from adspygoogle.common import Utils
from adspygoogle.common.Errors import Error
from adspygoogle.common.GenericApiService import GenericApiService
from adspygoogle.common.GenericApiService import MethodInfoKeys
from adspygoogle.dfa import DfaUtils
from adspygoogle.dfa import LIB_SIG
from adspygoogle.dfa import LIB_URL
from adspygoogle.dfa import WSSE_NS
from adspygoogle.dfa.DfaErrors import DfaApiError
from adspygoogle.dfa.DfaErrors import DfaAuthenticationError
from adspygoogle.dfa.DfaSoapBuffer import DfaSoapBuffer
_DEPRECATION_WARNING = ('Legacy DFA passwords are deprecated. Please use '
'OAuth 2.0')
warnings.filterwarnings('always', _DEPRECATION_WARNING, DeprecationWarning)
class GenericDfaService(GenericApiService):
"""Wrapper for any DFA web service."""
# The _WRAP_LISTS constant indicates that all DFA services need to wrap lists
# in an extra layer of XML element tags.
_WRAP_LISTS = True
# The _BUFFER_CLASS is the subclass of SoapBuffer that should be used to track
# all SOAP interactions.
_BUFFER_CLASS = DfaSoapBuffer
# The _TOKEN_EXPIRED_ERROR_MESSAGE is returned by the DFA API when a DFA token
# needs to be refreshed.
_TOKEN_EXPIRED_ERROR_MESSAGE = 'Authentication token has expired.'
def __init__(self, headers, config, op_config, lock, logger, service_name):
"""Inits GenericDfaService.
Args:
headers: dict Dictionary object with populated authentication
credentials.
config: dict Dictionary object with populated configuration values.
op_config: dict Dictionary object with additional configuration values for
this operation.
lock: threading.RLock Thread lock to use to synchronize requests.
logger: Logger Instance of Logger to use for logging.
service_name: string The name of this service.
"""
service_url = '/'.join([op_config['server'], op_config['version'],
'api/dfa-api', service_name])
namespace = '/'.join(['http://www.doubleclick.net/dfa-api',
op_config['version']])
namespace_extractor = _DetermineNamespacePrefix
super(GenericDfaService, self).__init__(
headers, config, op_config, lock, logger, service_name, service_url,
GenericDfaService._WRAP_LISTS, GenericDfaService._BUFFER_CLASS,
namespace, namespace_extractor)
# DFA-specific changes to the SOAPpy.WSDL.Proxy
methodattrs = {
'xmlns:dfa': self._namespace
}
self._soappyservice.soapproxy.methodattrs = methodattrs
def _WrapSoapCall(self, soap_call_function):
"""Gives the service a chance to wrap a call in a product-specific function.
DFA uses this function to listen for expired DDMM tokens and refresh them.
Calls which fail due to expired tokens will be retried.
Args:
soap_call_function: function The function to make a SOAP call.
Returns:
function A new function wrapping the input function which listens for
token expired errors and retries the failed call.
"""
def RefreshTokenIfExpired(*args, **kargs):
try:
return soap_call_function(*args, **kargs)
except DfaAuthenticationError, e:
if e.message == self._TOKEN_EXPIRED_ERROR_MESSAGE:
self._GenerateToken()
return soap_call_function(*args, **kargs)
else:
raise e
return RefreshTokenIfExpired
def _SetHeaders(self):
"""Sets the SOAP headers for this service's requests."""
soap_headers = SOAPpy.Types.headerType()
if self._service_name != 'login':
if 'AuthToken' not in self._headers or not self._headers['AuthToken']:
self._GenerateToken()
wsse_header = SOAPpy.Types.structType(
data={
'UsernameToken': {
'Username': self._headers['Username'],
'Password': self._headers['AuthToken']
}
},
name='Security', typed=0, attrs={'xmlns': WSSE_NS})
soap_headers.Security = wsse_header
request_header = SOAPpy.Types.structType(
data={'applicationName': ''.join([self._headers['appName'], LIB_SIG])},
name='RequestHeader', typed=0)
soap_headers.RequestHeader = request_header
self._soappyservice.soapproxy.header = soap_headers
def _ReadyOAuth(self):
"""If OAuth is on, sets the transport handler to add OAuth2 HTTP header.
DFA overrides the default implementation because only the login service
should have this header.
"""
if self._service_name == 'login':
super(GenericDfaService, self)._ReadyOAuth()
def _GetMethodInfo(self, method_name):
"""Pulls all of the relevant data about a method from a SOAPpy service.
The return dictionary has two keys, MethodInfoKeys.INPUTS and
MethodInfoKeys.OUTPUTS. Each of these keys has a list value. The list
value contains a dictionary of information on each input/output parameter,
in order.
Args:
method_name: string The name of the method to pull information for.
Returns:
dict A dictionary containing information about a SOAP method.
"""
rval = {}
rval[MethodInfoKeys.INPUTS] = []
for i in range(len(self._soappyservice.methods[method_name].inparams)):
param_attributes = self._soappyservice.methods[method_name].inparams[i]
if hasattr(param_attributes, 'maxOccurs'):
max_occurs = param_attributes.maxOccurs
else:
max_occurs = '1'
inparam = {
MethodInfoKeys.ELEMENT_NAME: param_attributes.name,
MethodInfoKeys.NS: param_attributes.type[0],
MethodInfoKeys.TYPE: param_attributes.type[1],
MethodInfoKeys.MAX_OCCURS: max_occurs
}
rval[MethodInfoKeys.INPUTS].append(inparam)
rval[MethodInfoKeys.OUTPUTS] = []
for i in range(len(self._soappyservice.methods[method_name].outparams)):
param_attributes = self._soappyservice.methods[method_name].outparams[i]
if hasattr(param_attributes, 'maxOccurs'):
max_occurs = param_attributes.maxOccurs
else:
max_occurs = '1'
outparam = {
MethodInfoKeys.ELEMENT_NAME: param_attributes.name,
MethodInfoKeys.NS: param_attributes.type[0],
MethodInfoKeys.TYPE: param_attributes.type[1],
MethodInfoKeys.MAX_OCCURS: max_occurs
}
rval[MethodInfoKeys.OUTPUTS].append(outparam)
return rval
def _TakeActionOnSoapCall(self, method_name, args):
"""Gives the service a chance to take product-specific action on raw inputs.
DFA will try to determine xsi_types for saveAd and saveCreative calls.
Args:
method_name: string The name of the SOAP operation being called.
args: tuple The arguments passed into the SOAP operation.
Returns:
tuple The method arguments, possibly modified.
"""
if method_name.lower() == 'savecreative':
DfaUtils.AssignCreativeXsi(args[0])
elif method_name.lower() == 'savead':
DfaUtils.AssignAdXsi(args[0])
return args
def _ReadyCompression(self):
"""Sets whether the HTTP transport layer should use compression.
Overloaded for DFA because the DFA servers do not accept compressed
messages. They do support returning compressed messages.
"""
compress = Utils.BoolTypeConvert(self._config['compress'])
self._soappyservice.soapproxy.config.send_compressed = False
self._soappyservice.soapproxy.config.accept_compressed = compress
def _HandleLogsAndErrors(self, buf, start_time, stop_time, error=None):
"""Manage SOAP XML message.
Args:
buf: SoapBuffer SOAP buffer.
start_time: str Time before service call was invoked.
stop_time: str Time after service call was invoked.
[optional]
error: dict Error, if any.
Raises:
DfaApiError: if the API calls returns a SOAP error message.
Error: if the call returns a non-SOAP error message, such as an HTTP 502.
"""
if error is None:
error = {}
try:
handlers = self.__GetLogHandlers(buf)
fault = super(GenericDfaService, self)._ManageSoap(
buf, handlers, LIB_URL, start_time, stop_time, error)
if fault:
# Raise a specific error, subclass of DfaApiError.
if fault['detail'] is None: del fault['detail']
if 'detail' in fault:
if ('google' in fault['detail'] and
'doubleclick' not in fault['detail']):
fault['detail']['doubleclick'] = fault['detail']['google']
if ('doubleclick' in fault['detail'] and
'errorCode' in fault['detail']['doubleclick']):
code = int(fault['detail']['doubleclick']['errorCode'])
if code == 4:
raise DfaAuthenticationError(fault)
else:
raise DfaApiError(fault)
if isinstance(fault, (str, dict)):
raise DfaApiError(fault)
except DfaApiError, e:
raise e
except Error, e:
if error: e = error
raise Error(e)
def _GenerateToken(self):
"""Attempts to generate a token for the WSSE security header.
Raises:
DfaAuthenticationError: if there are not enough credentials to generate a
token or if the given credentials are invalid.
"""
if ('Username' in self._headers and
('Password' in self._headers or 'oauth2credentials' in self._headers)):
if not self._headers.get('oauth2credentials'):
warnings.warn(_DEPRECATION_WARNING, DeprecationWarning, stacklevel=5)
# Ensure the 'raw_response' config value is off while generating tokens.
old_raw_response = self._config['raw_response']
self._config['raw_response'] = 'n'
try:
login_service = GenericDfaService(
self._headers, self._config, self._op_config, self._lock,
self._logger, 'login')
self._headers['AuthToken'] = login_service.authenticate(
self._headers['Username'],
self._headers.get('Password'))[0]['token']
finally:
self._config['raw_response'] = old_raw_response
else:
fault = {
'faultstring': ('Authentication data, username/password or username/'
'oauth2credentials, is missing.')
}
raise DfaAuthenticationError(fault)
def __GetLogHandlers(self, buf):
"""Gets a list of log handlers for the DFA library.
Args:
buf: SoapBuffer SOAP buffer from which calls are retrieved for logging.
Returns:
list Log handlers for the DFA library.
"""
return [
{
'tag': 'xml_log',
'name': 'soap_xml',
'data': ''
},
{
'tag': 'request_log',
'name': 'request_info',
'data': str('host=%s service=%s method=%s responseTime=%s '
'requestID=%s'
% (Utils.GetNetLocFromUrl(self._service_url),
self._service_name, buf.GetCallName(),
buf.GetCallResponseTime(), buf.GetCallRequestId()))
},
{
'tag': '',
'name': 'dfa_api_lib',
'data': ''
}
]
def _DetermineNamespacePrefix(unused_url):
"""Returns the SOAP prefix to use for definitions within the given namespace.
Args:
unused_url: string The URL of the namespace. The DFA library doesn't
actually check this value.
Returns:
string The SOAP namespace prefix to use for the given namespace. The DFA
library always returns 'dfa:'.
"""
return 'dfa:'
|
|
from __future__ import absolute_import
import cgi
import email.utils
import hashlib
import getpass
import json
import logging
import mimetypes
import os
import platform
import re
import shutil
import sys
import tempfile
from pip._vendor.six.moves.urllib import parse as urllib_parse
import pip
from pip.exceptions import InstallationError, HashMismatch
from pip.utils import (splitext, rmtree, format_size, display_path,
backup_dir, ask_path_exists, unpack_file)
from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner
from pip.locations import write_delete_marker_file
from pip.vcs import vcs
from pip._vendor import requests, six
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.models import Response
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.lockfile import LockError
from pip._vendor.six.moves import xmlrpc_client
__all__ = ['get_file_content',
'is_url', 'url_to_path', 'path_to_url',
'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url',
'unpack_http_url', 'unpack_url']
logger = logging.getLogger(__name__)
def user_agent():
"""
Return a string representing the user agent.
"""
data = {
"installer": {"name": "pip", "version": pip.__version__},
"python": platform.python_version(),
"implementation": {
"name": platform.python_implementation(),
},
}
if data["implementation"]["name"] == 'CPython':
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
pypy_version_info = sys.pypy_version_info[:3]
else:
pypy_version_info = sys.pypy_version_info
data["implementation"]["version"] = ".".join(
[str(x) for x in pypy_version_info]
)
elif data["implementation"]["name"] == 'Jython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'IronPython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
if sys.platform.startswith("linux"):
distro = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], platform.linux_distribution()),
))
libc = dict(filter(
lambda x: x[1],
zip(["lib", "version"], platform.libc_ver()),
))
if libc:
distro["libc"] = libc
if distro:
data["distro"] = distro
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
data["distro"] = {"name": "OS X", "version": platform.mac_ver()[0]}
if platform.system():
data.setdefault("system", {})["name"] = platform.system()
if platform.release():
data.setdefault("system", {})["release"] = platform.release()
if platform.machine():
data["cpu"] = platform.machine()
return "{data[installer][name]}/{data[installer][version]} {json}".format(
data=data,
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
)
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True):
self.prompting = prompting
self.passwords = {}
def __call__(self, req):
parsed = urllib_parse.urlparse(req.url)
# Get the netloc without any embedded credentials
netloc = parsed.netloc.rsplit("@", 1)[-1]
# Set the url of the request to the url without any credentials
req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
# Use any stored credentials that we have for this netloc
username, password = self.passwords.get(netloc, (None, None))
# Extract credentials embedded in the url if we have none stored
if username is None:
username, password = self.parse_credentials(parsed.netloc)
if username or password:
# Store the username and password
self.passwords[netloc] = (username, password)
# Send the basic auth with this request
req = HTTPBasicAuth(username or "", password or "")(req)
# Attach a hook to handle 401 responses
req.register_hook("response", self.handle_401)
return req
def handle_401(self, resp, **kwargs):
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simple return the response
if not self.prompting:
return resp
parsed = urllib_parse.urlparse(resp.url)
# Prompt the user for a new username and password
username = six.moves.input("User for %s: " % parsed.netloc)
password = getpass.getpass("Password: ")
# Store the new username and password to use for future requests
if username or password:
self.passwords[parsed.netloc] = (username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def parse_credentials(self, netloc):
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
return userinfo.split(":", 1)
return userinfo, None
return None, None
class LocalFSAdapter(BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
class SafeFileCache(FileCache):
"""
A file based cache which is safe to use even when the target directory may
not be accessible or writable.
"""
def get(self, *args, **kwargs):
try:
return super(SafeFileCache, self).get(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def set(self, *args, **kwargs):
try:
return super(SafeFileCache, self).set(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def delete(self, *args, **kwargs):
try:
return super(SafeFileCache, self).delete(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
class PipSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop("retries", 0)
cache = kwargs.pop("cache", None)
super(PipSession, self).__init__(*args, **kwargs)
# Attach our User Agent to the request
self.headers["User-Agent"] = user_agent()
# Attach our Authentication handler to the session
self.auth = MultiDomainBasicAuth()
if cache:
http_adapter = CacheControlAdapter(
cache=SafeFileCache(cache),
max_retries=retries,
)
else:
http_adapter = HTTPAdapter(max_retries=retries)
self.mount("http://", http_adapter)
self.mount("https://", http_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
def request(self, method, url, *args, **kwargs):
# Allow setting a default timeout on a session
kwargs.setdefault("timeout", self.timeout)
# Dispatch the actual request
return super(PipSession, self).request(method, url, *args, **kwargs)
def get_file_content(url, comes_from=None, session=None):
"""Gets the content of a file; it may be a filename, file: URL, or
http: URL. Returns (location, content). Content is unicode."""
if session is None:
raise TypeError(
"get_file_content() missing 1 required keyword argument: 'session'"
)
match = _scheme_re.search(url)
if match:
scheme = match.group(1).lower()
if (scheme == 'file' and comes_from
and comes_from.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
if scheme == 'file':
path = url.split(':', 1)[1]
path = path.replace('\\', '/')
match = _url_slash_drive_re.match(path)
if match:
path = match.group(1) + ':' + path.split('|', 1)[1]
path = urllib_parse.unquote(path)
if path.startswith('/'):
path = '/' + path.lstrip('/')
url = path
else:
# FIXME: catch some errors
resp = session.get(url)
resp.raise_for_status()
if six.PY3:
return resp.url, resp.text
else:
return resp.url, resp.content
try:
with open(url) as f:
content = f.read()
except IOError as exc:
raise InstallationError(
'Could not open requirements file: %s' % str(exc)
)
return url, content
_scheme_re = re.compile(r'^(http|https|file):', re.I)
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
def is_url(name):
"""Returns true if the name looks like a URL"""
if ':' not in name:
return False
scheme = name.split(':', 1)[0].lower()
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urllib_parse.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
else:
path = '/' + path
return path
_drive_re = re.compile('^([a-z]):', re.I)
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
def path_to_url(path):
"""
Convert a path to a file: URL. The path will be made absolute and have
quoted path parts.
"""
path = os.path.normpath(os.path.abspath(path))
drive, path = os.path.splitdrive(path)
filepath = path.split(os.path.sep)
url = '/'.join([urllib_parse.quote(part) for part in filepath])
if not drive:
url = url.lstrip('/')
return 'file:///' + drive + url
def is_archive_file(name):
"""Return True if `name` is a considered as an archive file."""
archives = (
'.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.whl'
)
ext = splitext(name)[1].lower()
if ext in archives:
return True
return False
def unpack_vcs_link(link, location, only_download=False):
vcs_backend = _get_used_vcs_backend(link)
if only_download:
vcs_backend.export(location)
else:
vcs_backend.unpack(location)
def _get_used_vcs_backend(link):
for backend in vcs.backends:
if link.scheme in backend.schemes:
vcs_backend = backend(link.url)
return vcs_backend
def is_vcs_url(link):
return bool(_get_used_vcs_backend(link))
def is_file_url(link):
return link.url.lower().startswith('file:')
def _check_hash(download_hash, link):
if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
logger.critical(
"Hash digest size of the package %d (%s) doesn't match the "
"expected hash name %s!",
download_hash.digest_size, link, link.hash_name,
)
raise HashMismatch('Hash name mismatch for package %s' % link)
if download_hash.hexdigest() != link.hash:
logger.critical(
"Hash of the package %s (%s) doesn't match the expected hash %s!",
link, download_hash.hexdigest(), link.hash,
)
raise HashMismatch(
'Bad %s hash for package %s' % (link.hash_name, link)
)
def _get_hash_from_file(target_file, link):
try:
download_hash = hashlib.new(link.hash_name)
except (ValueError, TypeError):
logger.warning(
"Unsupported hash name %s for package %s", link.hash_name, link,
)
return None
with open(target_file, 'rb') as fp:
while True:
chunk = fp.read(4096)
if not chunk:
break
download_hash.update(chunk)
return download_hash
def _download_url(resp, link, content_file):
download_hash = None
if link.hash and link.hash_name:
try:
download_hash = hashlib.new(link.hash_name)
except ValueError:
logger.warning(
"Unsupported hash name %s for package %s",
link.hash_name, link,
)
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
cached_resp = getattr(resp, "from_cache", False)
if cached_resp:
show_progress = False
elif total_length > (40 * 1000):
show_progress = True
elif not total_length:
show_progress = True
else:
show_progress = False
show_url = link.show_url
try:
def resp_read(chunk_size):
try:
# Special case for urllib3.
for chunk in resp.raw.stream(
chunk_size,
# We use decode_content=False here because we do
# want urllib3 to mess with the raw bytes we get
# from the server. If we decompress inside of
# urllib3 then we cannot verify the checksum
# because the checksum will be of the compressed
# file. This breakage will only occur if the
# server adds a Content-Encoding header, which
# depends on how the server was configured:
# - Some servers will notice that the file isn't a
# compressible file and will leave the file alone
# and with an empty Content-Encoding
# - Some servers will notice that the file is
# already compressed and will leave the file
# alone and will add a Content-Encoding: gzip
# header
# - Some servers won't notice anything at all and
# will take a file that's already been compressed
# and compress it again and set the
# Content-Encoding: gzip header
#
# By setting this not to decode automatically we
# hope to eliminate problems with the second case.
decode_content=False):
yield chunk
except AttributeError:
# Standard file-like object.
while True:
chunk = resp.raw.read(chunk_size)
if not chunk:
break
yield chunk
progress_indicator = lambda x, *a, **k: x
if show_progress: # We don't show progress on cached responses
if total_length:
logger.info(
"Downloading %s (%s)", show_url, format_size(total_length),
)
progress_indicator = DownloadProgressBar(
max=total_length,
).iter
else:
logger.info("Downloading %s", show_url)
progress_indicator = DownloadProgressSpinner().iter
elif cached_resp:
logger.info("Using cached %s", show_url)
else:
logger.info("Downloading %s", show_url)
logger.debug('Downloading from URL %s', link)
for chunk in progress_indicator(resp_read(4096), 4096):
if download_hash is not None:
download_hash.update(chunk)
content_file.write(chunk)
finally:
if link.hash and link.hash_name:
_check_hash(download_hash, link)
return download_hash
def _copy_file(filename, location, content_type, link):
copy = True
download_location = os.path.join(location, link.filename)
if os.path.exists(download_location):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
display_path(download_location), ('i', 'w', 'b'))
if response == 'i':
copy = False
elif response == 'w':
logger.warning('Deleting %s', display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warning(
'Backing up %s to %s',
display_path(download_location),
display_path(dest_file),
)
shutil.move(download_location, dest_file)
if copy:
shutil.copy(filename, download_location)
logger.info('Saved %s', display_path(download_location))
def unpack_http_url(link, location, download_dir=None, session=None):
if session is None:
raise TypeError(
"unpack_http_url() missing 1 required keyword argument: 'session'"
)
temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
# If a download dir is specified, is the file already downloaded there?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link, download_dir)
if already_downloaded_path:
from_path = already_downloaded_path
content_type = mimetypes.guess_type(from_path)[0]
else:
# let's download to a tmp dir
from_path, content_type = _download_http_url(link, session, temp_dir)
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified; let's copy the archive there
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, content_type, link)
if not already_downloaded_path:
os.unlink(from_path)
os.rmdir(temp_dir)
def unpack_file_url(link, location, download_dir=None):
"""Unpack link into location.
If download_dir is provided and link points to a file, make a copy
of the link file inside download_dir."""
link_path = url_to_path(link.url_without_fragment)
# If it's a url to a local directory
if os.path.isdir(link_path):
if os.path.isdir(location):
rmtree(location)
shutil.copytree(link_path, location, symlinks=True)
if download_dir:
logger.info('Link is a directory, ignoring download_dir')
return
# if link has a hash, let's confirm it matches
if link.hash:
link_path_hash = _get_hash_from_file(link_path, link)
_check_hash(link_path_hash, link)
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link, download_dir)
if already_downloaded_path:
from_path = already_downloaded_path
else:
from_path = link_path
content_type = mimetypes.guess_type(from_path)[0]
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified and not already downloaded
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, content_type, link)
class PipXmlrpcTransport(xmlrpc_client.Transport):
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
object.
"""
def __init__(self, index_url, session, use_datetime=False):
xmlrpc_client.Transport.__init__(self, use_datetime)
index_parts = urllib_parse.urlparse(index_url)
self._scheme = index_parts.scheme
self._session = session
def request(self, host, handler, request_body, verbose=False):
parts = (self._scheme, host, handler, None, None, None)
url = urllib_parse.urlunparse(parts)
try:
headers = {'Content-Type': 'text/xml'}
response = self._session.post(url, data=request_body,
headers=headers, stream=True)
response.raise_for_status()
self.verbose = verbose
return self.parse_response(response.raw)
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s",
exc.response.status_code, url,
)
raise
def unpack_url(link, location, download_dir=None,
only_download=False, session=None):
"""Unpack link.
If link is a VCS link:
if only_download, export into download_dir and ignore location
else unpack into location
for other types of link:
- unpack into location
- if download_dir, copy the file into download_dir
- if only_download, mark location for deletion
"""
# non-editable vcs urls
if is_vcs_url(link):
unpack_vcs_link(link, location, only_download)
# file urls
elif is_file_url(link):
unpack_file_url(link, location, download_dir)
if only_download:
write_delete_marker_file(location)
# http urls
else:
if session is None:
session = PipSession()
unpack_http_url(
link,
location,
download_dir,
session,
)
if only_download:
write_delete_marker_file(location)
def _download_http_url(link, session, temp_dir):
"""Download link url into temp_dir using provided session"""
target_url = link.url.split('#', 1)[0]
try:
resp = session.get(
target_url,
# We use Accept-Encoding: identity here because requests
# defaults to accepting compressed responses. This breaks in
# a variety of ways depending on how the server is configured.
# - Some servers will notice that the file isn't a compressible
# file and will leave the file alone and with an empty
# Content-Encoding
# - Some servers will notice that the file is already
# compressed and will leave the file alone and will add a
# Content-Encoding: gzip header
# - Some servers won't notice anything at all and will take
# a file that's already been compressed and compress it again
# and set the Content-Encoding: gzip header
# By setting this to request only the identity encoding We're
# hoping to eliminate the third case. Hopefully there does not
# exist a server which when given a file will notice it is
# already compressed and that you're not asking for a
# compressed file and will then decompress it before sending
# because if that's the case I don't think it'll ever be
# possible to make this work.
headers={"Accept-Encoding": "identity"},
stream=True,
)
resp.raise_for_status()
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s", exc.response.status_code, link,
)
raise
content_type = resp.headers.get('content-type', '')
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess
content_disposition = resp.headers.get('content-disposition')
if content_disposition:
type, params = cgi.parse_header(content_disposition)
# We use ``or`` here because we don't want to use an "empty" value
# from the filename param.
filename = params.get('filename') or filename
ext = splitext(filename)[1]
if not ext:
ext = mimetypes.guess_extension(content_type)
if ext:
filename += ext
if not ext and link.url != resp.url:
ext = os.path.splitext(resp.url)[1]
if ext:
filename += ext
file_path = os.path.join(temp_dir, filename)
with open(file_path, 'wb') as content_file:
_download_url(resp, link, content_file)
return file_path, content_type
def _check_download_dir(link, download_dir):
""" Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
"""
download_path = os.path.join(download_dir, link.filename)
if os.path.exists(download_path):
# If already downloaded, does its hash match?
logger.info('File was already downloaded %s', download_path)
if link.hash:
download_hash = _get_hash_from_file(download_path, link)
try:
_check_hash(download_hash, link)
except HashMismatch:
logger.warning(
'Previously-downloaded file %s has bad hash, '
're-downloading.',
download_path
)
os.unlink(download_path)
return None
return download_path
return None
|
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""SQL backends for the various services.
Before using this module, call initialize(). This has to be done before
CONF() because it sets up configuration options.
"""
import contextlib
import functools
from oslo.config import cfg
from oslo.db import exception as db_exception
from oslo.db import options as db_options
from oslo.db.sqlalchemy import models
from oslo.db.sqlalchemy import session as db_session
from oslo.serialization import jsonutils
import six
import sqlalchemy as sql
from sqlalchemy.ext import declarative
from sqlalchemy.orm.attributes import flag_modified, InstrumentedAttribute
from sqlalchemy import types as sql_types
from keystone.common import utils
from keystone import exception
from keystone.i18n import _
from keystone.openstack.common import log
CONF = cfg.CONF
LOG = log.getLogger(__name__)
ModelBase = declarative.declarative_base()
# For exporting to other modules
Column = sql.Column
Index = sql.Index
String = sql.String
Integer = sql.Integer
Enum = sql.Enum
ForeignKey = sql.ForeignKey
DateTime = sql.DateTime
IntegrityError = sql.exc.IntegrityError
DBDuplicateEntry = db_exception.DBDuplicateEntry
OperationalError = sql.exc.OperationalError
NotFound = sql.orm.exc.NoResultFound
Boolean = sql.Boolean
Text = sql.Text
UniqueConstraint = sql.UniqueConstraint
PrimaryKeyConstraint = sql.PrimaryKeyConstraint
joinedload = sql.orm.joinedload
# Suppress flake8's unused import warning for flag_modified:
flag_modified = flag_modified
def initialize():
"""Initialize the module."""
db_options.set_defaults(
CONF,
connection="sqlite:///keystone.db")
def initialize_decorator(init):
"""Ensure that the length of string field do not exceed the limit.
This decorator check the initialize arguments, to make sure the
length of string field do not exceed the length limit, or raise a
'StringLengthExceeded' exception.
Use decorator instead of inheritance, because the metaclass will
check the __tablename__, primary key columns, etc. at the class
definition.
"""
def initialize(self, *args, **kwargs):
cls = type(self)
for k, v in kwargs.items():
if hasattr(cls, k):
attr = getattr(cls, k)
if isinstance(attr, InstrumentedAttribute):
column = attr.property.columns[0]
if isinstance(column.type, String):
if not isinstance(v, six.text_type):
v = six.text_type(v)
if column.type.length and column.type.length < len(v):
raise exception.StringLengthExceeded(
string=v, type=k, length=column.type.length)
init(self, *args, **kwargs)
return initialize
ModelBase.__init__ = initialize_decorator(ModelBase.__init__)
# Special Fields
class JsonBlob(sql_types.TypeDecorator):
impl = sql.Text
def process_bind_param(self, value, dialect):
return jsonutils.dumps(value)
def process_result_value(self, value, dialect):
return jsonutils.loads(value)
class DictBase(models.ModelBase):
attributes = []
@classmethod
def from_dict(cls, d):
new_d = d.copy()
new_d['extra'] = dict((k, new_d.pop(k)) for k in six.iterkeys(d)
if k not in cls.attributes and k != 'extra')
return cls(**new_d)
def to_dict(self, include_extra_dict=False):
"""Returns the model's attributes as a dictionary.
If include_extra_dict is True, 'extra' attributes are literally
included in the resulting dictionary twice, for backwards-compatibility
with a broken implementation.
"""
d = self.extra.copy()
for attr in self.__class__.attributes:
d[attr] = getattr(self, attr)
if include_extra_dict:
d['extra'] = self.extra.copy()
return d
def __getitem__(self, key):
if key in self.extra:
return self.extra[key]
return getattr(self, key)
class ModelDictMixin(object):
@classmethod
def from_dict(cls, d):
"""Returns a model instance from a dictionary."""
return cls(**d)
def to_dict(self):
"""Returns the model's attributes as a dictionary."""
names = (column.name for column in self.__table__.columns)
return dict((name, getattr(self, name)) for name in names)
_engine_facade = None
def _get_engine_facade():
global _engine_facade
if not _engine_facade:
_engine_facade = db_session.EngineFacade.from_config(CONF)
return _engine_facade
def cleanup():
global _engine_facade
_engine_facade = None
def get_engine():
return _get_engine_facade().get_engine()
def get_session(expire_on_commit=False):
return _get_engine_facade().get_session(expire_on_commit=expire_on_commit)
@contextlib.contextmanager
def transaction(expire_on_commit=False):
"""Return a SQLAlchemy session in a scoped transaction."""
session = get_session(expire_on_commit=expire_on_commit)
with session.begin():
yield session
def truncated(f):
"""Ensure list truncation is detected in Driver list entity methods.
This is designed to wrap and sql Driver list_{entity} methods in order to
calculate if the resultant list has been truncated. Provided a limit dict
is found in the hints list, we increment the limit by one so as to ask the
wrapped function for one more entity than the limit, and then once the list
has been generated, we check to see if the original limit has been
exceeded, in which case we truncate back to that limit and set the
'truncated' boolean to 'true' in the hints limit dict.
"""
@functools.wraps(f)
def wrapper(self, hints, *args, **kwargs):
if not hasattr(hints, 'limit'):
raise exception.UnexpectedError(
_('Cannot truncate a driver call without hints list as '
'first parameter after self '))
if hints.limit is None:
return f(self, hints, *args, **kwargs)
# A limit is set, so ask for one more entry than we need
list_limit = hints.limit['limit']
hints.set_limit(list_limit + 1)
ref_list = f(self, hints, *args, **kwargs)
# If we got more than the original limit then trim back the list and
# mark it truncated. In both cases, make sure we set the limit back
# to its original value.
if len(ref_list) > list_limit:
hints.set_limit(list_limit, truncated=True)
return ref_list[:list_limit]
else:
hints.set_limit(list_limit)
return ref_list
return wrapper
def _filter(model, query, hints):
"""Applies filtering to a query.
:param model: the table model in question
:param query: query to apply filters to
:param hints: contains the list of filters yet to be satisfied.
Any filters satisfied here will be removed so that
the caller will know if any filters remain.
:returns query: query, updated with any filters satisfied
"""
def inexact_filter(model, query, filter_, hints):
"""Applies an inexact filter to a query.
:param model: the table model in question
:param query: query to apply filters to
:param filter_: the dict that describes this filter
:param hints: contains the list of filters yet to be satisfied.
Any filters satisfied here will be removed so that
the caller will know if any filters remain.
:returns query: query updated to add any inexact filters we could
satisfy
"""
column_attr = getattr(model, filter_['name'])
# TODO(henry-nash): Sqlalchemy 0.7 defaults to case insensitivity
# so once we find a way of changing that (maybe on a call-by-call
# basis), we can add support for the case sensitive versions of
# the filters below. For now, these case sensitive versions will
# be handled at the controller level.
if filter_['case_sensitive']:
return query
if filter_['comparator'] == 'contains':
query_term = column_attr.ilike('%%%s%%' % filter_['value'])
elif filter_['comparator'] == 'startswith':
query_term = column_attr.ilike('%s%%' % filter_['value'])
elif filter_['comparator'] == 'endswith':
query_term = column_attr.ilike('%%%s' % filter_['value'])
else:
# It's a filter we don't understand, so let the caller
# work out if they need to do something with it.
return query
hints.filters.remove(filter_)
return query.filter(query_term)
def exact_filter(model, filter_, cumulative_filter_dict, hints):
"""Applies an exact filter to a query.
:param model: the table model in question
:param filter_: the dict that describes this filter
:param cumulative_filter_dict: a dict that describes the set of
exact filters built up so far
:param hints: contains the list of filters yet to be satisfied.
Any filters satisfied here will be removed so that
the caller will know if any filters remain.
:returns: updated cumulative dict
"""
key = filter_['name']
if isinstance(getattr(model, key).property.columns[0].type,
sql.types.Boolean):
cumulative_filter_dict[key] = (
utils.attr_as_boolean(filter_['value']))
else:
cumulative_filter_dict[key] = filter_['value']
hints.filters.remove(filter_)
return cumulative_filter_dict
filter_dict = {}
for filter_ in hints.filters:
if filter_['name'] not in model.attributes:
continue
if filter_['comparator'] == 'equals':
filter_dict = exact_filter(model, filter_, filter_dict, hints)
else:
query = inexact_filter(model, query, filter_, hints)
# Apply any exact filters we built up
if filter_dict:
query = query.filter_by(**filter_dict)
return query
def _limit(query, hints):
"""Applies a limit to a query.
:param query: query to apply filters to
:param hints: contains the list of filters and limit details.
:returns updated query
"""
# NOTE(henry-nash): If we were to implement pagination, then we
# we would expand this method to support pagination and limiting.
# If we satisfied all the filters, set an upper limit if supplied
if hints.limit:
query = query.limit(hints.limit['limit'])
return query
def filter_limit_query(model, query, hints):
"""Applies filtering and limit to a query.
:param model: table model
:param query: query to apply filters to
:param hints: contains the list of filters and limit details. This may
be None, indicating that there are no filters or limits
to be applied. If it's not None, then any filters
satisfied here will be removed so that the caller will
know if any filters remain.
:returns: updated query
"""
if hints is None:
return query
# First try and satisfy any filters
query = _filter(model, query, hints)
# NOTE(henry-nash): Any unsatisfied filters will have been left in
# the hints list for the controller to handle. We can only try and
# limit here if all the filters are already satisfied since, if not,
# doing so might mess up the final results. If there are still
# unsatisfied filters, we have to leave any limiting to the controller
# as well.
if not hints.filters:
return _limit(query, hints)
else:
return query
def handle_conflicts(conflict_type='object'):
"""Converts select sqlalchemy exceptions into HTTP 409 Conflict."""
_conflict_msg = 'Conflict %(conflict_type)s: %(details)s'
def decorator(method):
@functools.wraps(method)
def wrapper(*args, **kwargs):
try:
return method(*args, **kwargs)
except db_exception.DBDuplicateEntry as e:
# LOG the exception for debug purposes, do not send the
# exception details out with the raised Conflict exception
# as it can contain raw SQL.
LOG.debug(_conflict_msg, {'conflict_type': conflict_type,
'details': six.text_type(e)})
raise exception.Conflict(type=conflict_type,
details=_('Duplicate Entry'))
except db_exception.DBError as e:
# TODO(blk-u): inspecting inner_exception breaks encapsulation;
# oslo.db should provide exception we need.
if isinstance(e.inner_exception, IntegrityError):
# LOG the exception for debug purposes, do not send the
# exception details out with the raised Conflict exception
# as it can contain raw SQL.
LOG.debug(_conflict_msg, {'conflict_type': conflict_type,
'details': six.text_type(e)})
# NOTE(morganfainberg): This is really a case where the SQL
# failed to store the data. This is not something that the
# user has done wrong. Example would be a ForeignKey is
# missing; the code that is executed before reaching the
# SQL writing to the DB should catch the issue.
raise exception.UnexpectedError(
_('An unexpected error occurred when trying to '
'store %s') % conflict_type)
raise
return wrapper
return decorator
|
|
"""Object-oriented interface to SWF wrapping boto.swf.layer1.Layer1"""
import time
from functools import wraps
from boto.swf.layer1 import Layer1
from boto.swf.layer1_decisions import Layer1Decisions
DEFAULT_CREDENTIALS = {
'aws_access_key_id': None,
'aws_secret_access_key': None
}
def set_default_credentials(aws_access_key_id, aws_secret_access_key):
"""Set default credentials."""
DEFAULT_CREDENTIALS.update({
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
})
class SWFBase(object):
name = None
domain = None
aws_access_key_id = None
aws_secret_access_key = None
def __init__(self, **kwargs):
# Set default credentials.
for credkey in ('aws_access_key_id', 'aws_secret_access_key'):
if DEFAULT_CREDENTIALS.get(credkey):
setattr(self, credkey, DEFAULT_CREDENTIALS[credkey])
# Override attributes with keyword args.
for kwarg in kwargs:
setattr(self, kwarg, kwargs[kwarg])
self._swf = Layer1(self.aws_access_key_id,
self.aws_secret_access_key)
def __repr__(self):
rep_str = str(self.name)
if hasattr(self, 'version'):
rep_str += '-' + str(getattr(self, 'version'))
return '<%s %r at 0x%x>' % (self.__class__.__name__, rep_str, id(self))
class Domain(SWFBase):
"""Simple Workflow Domain."""
description = None
retention = 30
@wraps(Layer1.describe_domain)
def describe(self):
"""DescribeDomain."""
return self._swf.describe_domain(self.name)
@wraps(Layer1.deprecate_domain)
def deprecate(self):
"""DeprecateDomain"""
self._swf.deprecate_domain(self.name)
@wraps(Layer1.register_domain)
def register(self):
"""RegisterDomain."""
self._swf.register_domain(self.name, str(self.retention),
self.description)
@wraps(Layer1.list_activity_types)
def activities(self, status='REGISTERED', **kwargs):
"""ListActivityTypes."""
act_types = self._swf.list_activity_types(self.name, status, **kwargs)
act_objects = []
for act_args in act_types['typeInfos']:
act_ident = act_args['activityType']
del act_args['activityType']
act_args.update(act_ident)
act_args.update({
'aws_access_key_id': self.aws_access_key_id,
'aws_secret_access_key': self.aws_secret_access_key,
'domain': self.name,
})
act_objects.append(ActivityType(**act_args))
return act_objects
@wraps(Layer1.list_workflow_types)
def workflows(self, status='REGISTERED', **kwargs):
"""ListWorkflowTypes."""
wf_types = self._swf.list_workflow_types(self.name, status, **kwargs)
wf_objects = []
for wf_args in wf_types['typeInfos']:
wf_ident = wf_args['workflowType']
del wf_args['workflowType']
wf_args.update(wf_ident)
wf_args.update({
'aws_access_key_id': self.aws_access_key_id,
'aws_secret_access_key': self.aws_secret_access_key,
'domain': self.name,
})
wf_objects.append(WorkflowType(**wf_args))
return wf_objects
def executions(self, closed=False, **kwargs):
"""List list open/closed executions.
For a full list of available parameters refer to
:py:func:`boto.swf.layer1.Layer1.list_closed_workflow_executions` and
:py:func:`boto.swf.layer1.Layer1.list_open_workflow_executions`
"""
if closed:
executions = self._swf.list_closed_workflow_executions(self.name,
**kwargs)
else:
if 'oldest_date' not in kwargs:
# Last 24 hours.
kwargs['oldest_date'] = time.time() - (3600 * 24)
executions = self._swf.list_open_workflow_executions(self.name,
**kwargs)
exe_objects = []
for exe_args in executions['executionInfos']:
for nested_key in ('execution', 'workflowType'):
nested_dict = exe_args[nested_key]
del exe_args[nested_key]
exe_args.update(nested_dict)
exe_args.update({
'aws_access_key_id': self.aws_access_key_id,
'aws_secret_access_key': self.aws_secret_access_key,
'domain': self.name,
})
exe_objects.append(WorkflowExecution(**exe_args))
return exe_objects
@wraps(Layer1.count_pending_activity_tasks)
def count_pending_activity_tasks(self, task_list):
"""CountPendingActivityTasks."""
return self._swf.count_pending_activity_tasks(self.name, task_list)
@wraps(Layer1.count_pending_decision_tasks)
def count_pending_decision_tasks(self, task_list):
"""CountPendingDecisionTasks."""
return self._swf.count_pending_decision_tasks(self.name, task_list)
class Actor(SWFBase):
task_list = None
last_tasktoken = None
domain = None
def run(self):
"""To be overloaded by subclasses."""
raise NotImplementedError()
class ActivityWorker(Actor):
"""Base class for SimpleWorkflow activity workers."""
@wraps(Layer1.respond_activity_task_canceled)
def cancel(self, task_token=None, details=None):
"""RespondActivityTaskCanceled."""
if task_token is None:
task_token = self.last_tasktoken
return self._swf.respond_activity_task_canceled(task_token, details)
@wraps(Layer1.respond_activity_task_completed)
def complete(self, task_token=None, result=None):
"""RespondActivityTaskCompleted."""
if task_token is None:
task_token = self.last_tasktoken
return self._swf.respond_activity_task_completed(task_token, result)
@wraps(Layer1.respond_activity_task_failed)
def fail(self, task_token=None, details=None, reason=None):
"""RespondActivityTaskFailed."""
if task_token is None:
task_token = self.last_tasktoken
return self._swf.respond_activity_task_failed(task_token, details,
reason)
@wraps(Layer1.record_activity_task_heartbeat)
def heartbeat(self, task_token=None, details=None):
"""RecordActivityTaskHeartbeat."""
if task_token is None:
task_token = self.last_tasktoken
return self._swf.record_activity_task_heartbeat(task_token, details)
@wraps(Layer1.poll_for_activity_task)
def poll(self, **kwargs):
"""PollForActivityTask."""
task_list = self.task_list
if 'task_list' in kwargs:
task_list = kwargs.get('task_list')
del kwargs['task_list']
task = self._swf.poll_for_activity_task(self.domain, task_list,
**kwargs)
self.last_tasktoken = task.get('taskToken')
return task
class Decider(Actor):
"""Base class for SimpleWorkflow deciders."""
@wraps(Layer1.respond_decision_task_completed)
def complete(self, task_token=None, decisions=None, **kwargs):
"""RespondDecisionTaskCompleted."""
if isinstance(decisions, Layer1Decisions):
# Extract decision list from a Layer1Decisions instance.
decisions = decisions._data
if task_token is None:
task_token = self.last_tasktoken
return self._swf.respond_decision_task_completed(task_token, decisions,
**kwargs)
@wraps(Layer1.poll_for_decision_task)
def poll(self, **kwargs):
"""PollForDecisionTask."""
task_list = self.task_list
if 'task_list' in kwargs:
task_list = kwargs.get('task_list')
del kwargs['task_list']
decision_task = self._swf.poll_for_decision_task(self.domain, task_list,
**kwargs)
self.last_tasktoken = decision_task.get('taskToken')
return decision_task
class WorkflowType(SWFBase):
"""A versioned workflow type."""
version = None
task_list = None
child_policy = 'TERMINATE'
@wraps(Layer1.describe_workflow_type)
def describe(self):
"""DescribeWorkflowType."""
return self._swf.describe_workflow_type(self.domain, self.name,
self.version)
@wraps(Layer1.register_workflow_type)
def register(self, **kwargs):
"""RegisterWorkflowType."""
args = {
'default_execution_start_to_close_timeout': '3600',
'default_task_start_to_close_timeout': '300',
'default_child_policy': 'TERMINATE',
}
args.update(kwargs)
self._swf.register_workflow_type(self.domain, self.name, self.version,
**args)
@wraps(Layer1.deprecate_workflow_type)
def deprecate(self):
"""DeprecateWorkflowType."""
self._swf.deprecate_workflow_type(self.domain, self.name, self.version)
@wraps(Layer1.start_workflow_execution)
def start(self, **kwargs):
"""StartWorkflowExecution."""
if 'workflow_id' in kwargs:
workflow_id = kwargs['workflow_id']
del kwargs['workflow_id']
else:
workflow_id = '%s-%s-%i' % (self.name, self.version, time.time())
for def_attr in ('task_list', 'child_policy'):
kwargs[def_attr] = kwargs.get(def_attr, getattr(self, def_attr))
run_id = self._swf.start_workflow_execution(self.domain, workflow_id,
self.name, self.version, **kwargs)['runId']
return WorkflowExecution(name=self.name, version=self.version,
runId=run_id, domain=self.domain, workflowId=workflow_id,
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key)
class WorkflowExecution(SWFBase):
"""An instance of a workflow."""
workflowId = None
runId = None
@wraps(Layer1.signal_workflow_execution)
def signal(self, signame, **kwargs):
"""SignalWorkflowExecution."""
self._swf.signal_workflow_execution(self.domain, signame,
self.workflowId, **kwargs)
@wraps(Layer1.terminate_workflow_execution)
def terminate(self, **kwargs):
"""TerminateWorkflowExecution (p. 103)."""
return self._swf.terminate_workflow_execution(self.domain,
self.workflowId, **kwargs)
@wraps(Layer1.get_workflow_execution_history)
def history(self, **kwargs):
"""GetWorkflowExecutionHistory."""
return self._swf.get_workflow_execution_history(self.domain, self.runId,
self.workflowId, **kwargs)['events']
@wraps(Layer1.describe_workflow_execution)
def describe(self):
"""DescribeWorkflowExecution."""
return self._swf.describe_workflow_execution(self.domain, self.runId,
self.workflowId)
@wraps(Layer1.request_cancel_workflow_execution)
def request_cancel(self):
"""RequestCancelWorkflowExecution."""
return self._swf.request_cancel_workflow_execution(self.domain,
self.workflowId, self.runId)
class ActivityType(SWFBase):
"""A versioned activity type."""
version = None
@wraps(Layer1.deprecate_activity_type)
def deprecate(self):
"""DeprecateActivityType."""
return self._swf.deprecate_activity_type(self.domain, self.name,
self.version)
@wraps(Layer1.describe_activity_type)
def describe(self):
"""DescribeActivityType."""
return self._swf.describe_activity_type(self.domain, self.name,
self.version)
@wraps(Layer1.register_activity_type)
def register(self, **kwargs):
"""RegisterActivityType."""
args = {
'default_task_heartbeat_timeout': '600',
'default_task_schedule_to_close_timeout': '3900',
'default_task_schedule_to_start_timeout': '300',
'default_task_start_to_close_timeout': '3600',
}
args.update(kwargs)
self._swf.register_activity_type(self.domain, self.name, self.version,
**args)
|
|
#
# Evy - a concurrent networking library for Python
#
# Unless otherwise noted, the files in Evy are under the following MIT license:
#
# Copyright (c) 2012, Alvaro Saurin
# Copyright (c) 2008-2010, Eventlet Contributors (see AUTHORS)
# Copyright (c) 2007-2010, Linden Research, Inc.
# Copyright (c) 2005-2006, Bob Ippolito
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
"""
Unit tests for socket timeout feature.
"""
import unittest
from test import test_support
# This requires the 'network' resource as given on the regrtest command line.
skip_expected = not test_support.is_resource_enabled('network')
import time
from evy.patched import socket
class TestGreenSocketTimeouts(unittest.TestCase):
"""
Test case for socket.gettimeout() and socket.settimeout()
"""
def setUp (self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def tearDown (self):
self.sock.close()
def test_object_creation (self):
# Test Socket creation
self.assertEqual(self.sock.gettimeout(), None,
"timeout not disabled by default")
def test_float_return_value (self):
# Test return value of gettimeout()
self.sock.settimeout(7.345)
self.assertEqual(self.sock.gettimeout(), 7.345)
self.sock.settimeout(3)
self.assertEqual(self.sock.gettimeout(), 3)
self.sock.settimeout(None)
self.assertEqual(self.sock.gettimeout(), None)
def test_return_type (self):
# Test return type of gettimeout()
self.sock.settimeout(1)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
self.sock.settimeout(3.9)
self.assertEqual(type(self.sock.gettimeout()), type(1.0))
def test_type_check (self):
# Test type checking by settimeout()
self.sock.settimeout(0)
self.sock.settimeout(0L)
self.sock.settimeout(0.0)
self.sock.settimeout(None)
self.assertRaises(TypeError, self.sock.settimeout, "")
self.assertRaises(TypeError, self.sock.settimeout, u"")
self.assertRaises(TypeError, self.sock.settimeout, ())
self.assertRaises(TypeError, self.sock.settimeout, [])
self.assertRaises(TypeError, self.sock.settimeout, {})
self.assertRaises(TypeError, self.sock.settimeout, 0j)
def test_range_check (self):
# Test range checking by settimeout()
self.assertRaises(ValueError, self.sock.settimeout, -1)
self.assertRaises(ValueError, self.sock.settimeout, -1L)
self.assertRaises(ValueError, self.sock.settimeout, -1.0)
def test_timeout_then_blocking (self):
# Test settimeout() followed by setblocking()
self.sock.settimeout(10)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.settimeout(10)
self.sock.setblocking(0)
self.assertEqual(self.sock.gettimeout(), 0.0)
self.sock.setblocking(1)
self.assertEqual(self.sock.gettimeout(), None)
def test_blocking_then_timeout (self):
# Test setblocking() followed by settimeout()
self.sock.setblocking(0)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
self.sock.setblocking(1)
self.sock.settimeout(1)
self.assertEqual(self.sock.gettimeout(), 1)
class TimeoutTestCase(unittest.TestCase):
"""Test case for socket.socket() timeout functions"""
# There are a number of tests here trying to make sure that an operation
# doesn't take too much longer than expected. But competing machine
# activity makes it inevitable that such tests will fail at times.
# When fuzz was at 1.0, I (tim) routinely saw bogus failures on Win2K
# and Win98SE. Boosting it to 2.0 helped a lot, but isn't a real
# solution.
fuzz = 2.0
def setUp (self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.addr_remote = ('www.python.org.', 80)
self.localhost = '127.0.0.1'
def tearDown (self):
self.sock.close()
def test_connect_timeout (self):
# Choose a private address that is unlikely to exist to prevent
# failures due to the connect succeeding before the timeout.
# Use a dotted IP address to avoid including the DNS lookup time
# with the connect time. This avoids failing the assertion that
# the timeout occurred fast enough.
addr = ('10.0.0.0', 12345)
# Test connect() timeout
_timeout = 0.001
self.sock.settimeout(_timeout)
_t1 = time.time()
self.assertRaises(socket.error, self.sock.connect, addr)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assertTrue(_delta < _timeout + self.fuzz,
"timeout (%g) is more than %g seconds more than expected (%g)"
% (_delta, self.fuzz, _timeout))
def test_recv_timeout (self):
# Test recv() timeout
_timeout = 0.02
with test_support.transient_internet(self.addr_remote[0]):
self.sock.connect(self.addr_remote)
self.sock.settimeout(_timeout)
_t1 = time.time()
self.assertRaises(socket.timeout, self.sock.recv, 1024)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assertTrue(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
% (_delta, self.fuzz, _timeout))
def test_accept_timeout (self):
# Test accept() timeout
_timeout = 2
self.sock.settimeout(_timeout)
# Prevent "Address already in use" socket exceptions
test_support.bind_port(self.sock, self.localhost)
self.sock.listen(5)
_t1 = time.time()
self.assertRaises(socket.error, self.sock.accept)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assertTrue(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
% (_delta, self.fuzz, _timeout))
def test_recvfrom_timeout (self):
# Test recvfrom() timeout
_timeout = 2
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.settimeout(_timeout)
# Prevent "Address already in use" socket exceptions
test_support.bind_port(self.sock, self.localhost)
_t1 = time.time()
self.assertRaises(socket.error, self.sock.recvfrom, 8192)
_t2 = time.time()
_delta = abs(_t1 - _t2)
self.assertTrue(_delta < _timeout + self.fuzz,
"timeout (%g) is %g seconds more than expected (%g)"
% (_delta, self.fuzz, _timeout))
|
|
# Copyright (c) 2020 Dell Inc. or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""test_pyu4v_system.py."""
import testtools
import time
from pathlib import Path
from unittest import mock
from unittest.mock import MagicMock
from PyU4V import common
from PyU4V import rest_requests
from PyU4V.tests.unit_tests import pyu4v_common_data as pcd
from PyU4V.tests.unit_tests import pyu4v_fakes as pf
from PyU4V import univmax_conn
from PyU4V.utils import constants
from PyU4V.utils import exception
from PyU4V.utils import file_handler
ARRAY_ID = constants.ARRAY_ID
ARRAY_NUM = constants.ARRAY_NUM
HEALTH = constants.HEALTH
HEALTH_CHECK = constants.HEALTH_CHECK
SG_ID = constants.SG_ID
SG_NUM = constants.SG_NUM
SYMMETRIX = constants.SYMMETRIX
SYSTEM = constants.SYSTEM
LOCAL_USER = constants.LOCAL_USER
TAG = constants.TAG
TAG_NAME = constants.TAG_NAME
ALERT_ID = pcd.CommonData.alert_id
ALERT = constants.ALERT
AUDIT_LOG_RECORD = constants.AUDIT_LOG_RECORD
EXPORT_FILE = constants.EXPORT_FILE
AUDIT_LOG_FILENAME = constants.AUDIT_LOG_FILENAME
SUCCESS = constants.SUCCESS
BINARY_DATA = constants.BINARY_DATA
AUDIT_RECORD_PATH = constants.AUDIT_RECORD_PATH
class PyU4VSystemTest(testtools.TestCase):
"""Test System."""
def setUp(self):
"""Setup."""
super(PyU4VSystemTest, self).setUp()
self.data = pcd.CommonData()
self.conf_file, self.conf_dir = (
pf.FakeConfigFile.create_fake_config_file())
univmax_conn.file_path = self.conf_file
with mock.patch.object(
rest_requests.RestRequests, 'establish_rest_session',
return_value=pf.FakeRequestsSession()):
self.conn = univmax_conn.U4VConn(array_id=self.data.array)
self.common = self.conn.common
self.system = self.conn.system
def test_get_system_health(self):
"""Test get_system_health."""
health_check_result = self.system.get_system_health()
self.assertEqual(self.data.array_health, health_check_result)
def test_list_system_health_check(self):
"""Test list_system_health_checks."""
health_check_list = self.system.list_system_health_check()
self.assertEqual(self.data.array_health_check_list, health_check_list)
def test_get_health_check_details(self):
"""Test get_health_check_details."""
health_check = self.system.get_health_check_details(health_check_id=1)
self.assertEqual(self.data.health_check_response, health_check)
def test_perform_health_check(self):
"""Test perform_health_check."""
run_test = self.system.perform_health_check()
self.assertEqual(run_test, self.data.perform_health_check_response)
def test_delete_health_check(self):
"""Test delete_health_check."""
common.CommonFunctions.delete_resource = MagicMock(
side_effect=self.common.delete_resource)
self.system.delete_health_check(health_check_id=1)
common.CommonFunctions.delete_resource.assert_called_once_with(
category=SYSTEM, resource_level=SYMMETRIX,
resource_level_id=self.conn.array_id, resource_type=HEALTH,
resource_type_id=HEALTH_CHECK, object_type=1)
def test_get_disk_id_list(self):
"""Test get_disk_id_list."""
disk_list = self.system.get_disk_id_list(failed=True)
self.assertEqual(self.data.disk_list, disk_list)
def test_get_disk_details(self):
"""Test get_disk_details."""
disk_info = self.system.get_disk_details(disk_id='1')
self.assertEqual(self.data.disk_info, disk_info)
def test_get_tags(self):
"""Test get_tags."""
common.CommonFunctions.get_resource = MagicMock(
side_effect=self.common.get_resource)
tag_list = self.system.get_tags(
array_id=self.conn.array_id, tag_name='UNIT-TEST',
storage_group_id='TEST-SG', num_of_storage_groups=1,
num_of_arrays=3)
common.CommonFunctions.get_resource.assert_called_once_with(
category=SYSTEM, resource_level=TAG, params={
ARRAY_ID: self.conn.array_id, TAG_NAME: 'UNIT-TEST',
SG_ID: 'TEST-SG', SG_NUM: '1', ARRAY_NUM: '3'})
self.assertEqual(self.data.tag_list, tag_list)
def test_get_tagged_objects(self):
"""Test get_tagged_objects."""
tagged_objects = self.system.get_tagged_objects(tag_name='UNIT-TEST')
self.assertEqual(self.data.tagged_objects, tagged_objects)
def test_get_alert_summary(self):
"""Test get_alert_summary."""
alert_summary = self.system.get_alert_summary()
self.assertEqual(self.data.alert_summary, alert_summary)
def test_get_alert_ids(self):
"""Test get_alert_ids."""
alert_ids = self.system.get_alert_ids(
array='123456789', _type='Server', severity='Warning', state='New',
created_date='1234455', _object='BE', object_type='Director',
acknowledged=True, description='Alert')
self.assertEqual(self.data.alert_list['alertId'], alert_ids)
def test_get_alert_details(self):
"""Test get_alert_details."""
alert_details = self.system.get_alert_details(
alert_id=ALERT_ID)
self.assertEqual(self.data.alert_details, alert_details)
@mock.patch.object(common.CommonFunctions, 'modify_resource')
def test_acknowledge_alert(self, mck_modify):
"""Test acknowledge_alert."""
ref_payload = {'editAlertActionParam': 'ACKNOWLEDGE'}
self.system.acknowledge_alert(alert_id=ALERT_ID)
mck_modify.assert_called_once_with(
category=SYSTEM, resource_level=ALERT,
resource_level_id=ALERT_ID, payload=ref_payload)
@mock.patch.object(common.CommonFunctions, 'delete_resource')
def test_delete_alert(self, mck_delete):
"""Test delete_alert."""
self.system.delete_alert(alert_id=ALERT_ID)
mck_delete.assert_called_once_with(
category=SYSTEM, resource_level=ALERT, resource_level_id=ALERT_ID)
@mock.patch.object(
common.CommonFunctions, 'download_file',
return_value=pf.FakeResponse(200, dict(), content=b'test_binary_data'))
def test_download_settings_success_return_binary(self, mck_dl):
"""Test _download_settings success binary data returned."""
response = self.system._download_settings(request_body=dict(),
return_binary=True)
mck_dl.assert_called_once_with(
category=constants.SYSTEM, resource_level=constants.SETTINGS,
resource_type=constants.EXPORT_FILE, payload=dict())
self.assertTrue(response['success'])
self.assertIn('binary_data', response.keys())
self.assertEqual(b'test_binary_data', response['binary_data'])
@mock.patch('builtins.open', new_callable=mock.mock_open)
@mock.patch.object(
common.CommonFunctions, 'download_file',
return_value=pf.FakeResponse(200, dict(), content=b'test_binary_data'))
def test_download_settings_success_write_file(self, mck_dl, mck_open):
"""Test _download_settings success"""
response = self.system._download_settings(request_body=dict())
mck_dl.assert_called_once_with(
category=constants.SYSTEM, resource_level=constants.SETTINGS,
resource_type=constants.EXPORT_FILE, payload=dict())
mck_open.assert_called_once()
self.assertTrue(response['success'])
self.assertIn(str(Path.cwd()), str(response['settings_path']))
self.assertIn(constants.SETTINGS_FILENAME_TEMPLATE,
str(response['settings_path']))
@mock.patch('builtins.open', new_callable=mock.mock_open)
@mock.patch.object(
common.CommonFunctions, 'download_file',
return_value=pf.FakeResponse(200, dict(), content=b'test_binary_data'))
def test_download_settings_success_write_file_custom_path(self, mck_dl,
mck_open):
"""Test _download_settings success"""
response = self.system._download_settings(
request_body=dict(), file_name='test', dir_path=Path.home())
mck_dl.assert_called_once_with(
category=constants.SYSTEM, resource_level=constants.SETTINGS,
resource_type=constants.EXPORT_FILE, payload=dict())
mck_open.assert_called_once()
self.assertTrue(response['success'])
self.assertIn(str(Path.home()), str(response['settings_path']))
self.assertIn('test.zip', str(response['settings_path']))
@mock.patch.object(common.CommonFunctions, 'download_file',
return_value=None)
def test_download_settings_fail_no_response(self, mck_dl):
"""Test _download_settings fail no response from API."""
response = self.system._download_settings(request_body=dict())
mck_dl.assert_called_once_with(
category=constants.SYSTEM, resource_level=constants.SETTINGS,
resource_type=constants.EXPORT_FILE, payload=dict())
self.assertEqual({'success': False}, response)
@mock.patch.object(
common.CommonFunctions, 'download_file',
return_value=pf.FakeResponse(200, dict(), content=b'test_binary_data'))
def test_download_settings_dir_path_exception(self, mck_dl):
"""Test _download_settings directory doesn't exist exception."""
self.assertRaises(
exception.InvalidInputException,
self.system._download_settings,
request_body=dict(), dir_path='fake')
mck_dl.assert_called_once_with(
category=constants.SYSTEM, resource_level=constants.SETTINGS,
resource_type=constants.EXPORT_FILE, payload=dict())
def test_download_all_settings(self):
"""Test download_all_settings."""
ref_request_body = {
constants.FILE_PASSWORD: 'test_password',
constants.SRC_ARRAY: self.data.remote_array}
with mock.patch.object(self.system, '_download_settings') as mck_dl:
self.system.download_all_settings(
file_password='test_password', dir_path='/test/file/path',
file_name='test_filename', array_id=self.data.remote_array,
return_binary=True)
mck_dl.assert_called_once_with(
request_body=ref_request_body, dir_path='/test/file/path',
file_name='test_filename', return_binary=True)
def test_download_unisphere_settings_1_2_params(self):
"""Test download_unisphere_settings success 1 & 2 output params."""
ref_request_body = {
constants.FILE_PASSWORD: 'test_password',
constants.SRC_ARRAY: self.conn.array_id,
constants.EXCLUDE_SYS_SETTINGS: [constants.ALL_SETTINGS],
constants.EXCLUDE_UNI_SETTINGS: [constants.UNI_ALERT_SETTINGS,
constants.UNI_PERF_PREF_SETTINGS]}
with mock.patch.object(self.system, '_download_settings') as mck_dl:
self.system.download_unisphere_settings(
file_password='test_password', dir_path='/test/file/path',
file_name='test_filename', return_binary=True,
exclude_alert_notification_settings=True,
exclude_performance_preference_settings=True)
mck_dl.assert_called_once_with(
request_body=ref_request_body, dir_path='/test/file/path',
file_name='test_filename', return_binary=True)
def test_download_unisphere_settings_3_4_params(self):
"""Test download_unisphere_settings success 3 & 4 output params."""
ref_request_body = {
constants.FILE_PASSWORD: 'test_password',
constants.SRC_ARRAY: self.conn.array_id,
constants.EXCLUDE_SYS_SETTINGS: [constants.ALL_SETTINGS],
constants.EXCLUDE_UNI_SETTINGS: [
constants.UNI_PERF_USER_SETTINGS,
constants.UNI_PERF_METRIC_SETTINGS]}
with mock.patch.object(self.system, '_download_settings') as mck_dl:
self.system.download_unisphere_settings(
file_password='test_password', dir_path='/test/file/path',
file_name='test_filename', return_binary=True,
exclude_performance_user_templates=True,
exclude_performance_metric_settings=True)
mck_dl.assert_called_once_with(
request_body=ref_request_body, dir_path='/test/file/path',
file_name='test_filename', return_binary=True)
def test_download_unisphere_settings_all_excluded_exception(self):
"""Test download_unisphere_settings all settings excluded exception."""
self.assertRaises(
exception.InvalidInputException,
self.system.download_unisphere_settings,
file_password='test', exclude_alert_notification_settings=True,
exclude_performance_preference_settings=True,
exclude_performance_user_templates=True,
exclude_performance_metric_settings=True)
def test_download_system_settings_1_2_params(self):
"""Test download_system_settings success 1 & 2 output params."""
ref_request_body = {
constants.FILE_PASSWORD: 'test_password',
constants.SRC_ARRAY: self.data.remote_array,
constants.EXCLUDE_SYS_SETTINGS: [
constants.SYS_ALERT_SETTINGS,
constants.SYS_ALERT_NOTIFI_SETTINGS],
constants.EXCLUDE_UNI_SETTINGS: [constants.ALL_SETTINGS]}
with mock.patch.object(self.system, '_download_settings') as mck_dl:
self.system.download_system_settings(
file_password='test_password', dir_path='/test/file/path',
file_name='test_filename', array_id=self.data.remote_array,
return_binary=True, exclude_alert_policy_settings=True,
alert_level_notification_settings=True)
mck_dl.assert_called_once_with(
request_body=ref_request_body, dir_path='/test/file/path',
file_name='test_filename', return_binary=True)
def test_download_system_settings_3_4_params(self):
"""Test download_system_settings success 3 & 4 output params."""
ref_request_body = {
constants.FILE_PASSWORD: 'test_password',
constants.SRC_ARRAY: self.data.remote_array,
constants.EXCLUDE_SYS_SETTINGS: [
constants.SYS_THRESH_SETTINGS,
constants.SYS_PERF_THRESH_SETTINGS],
constants.EXCLUDE_UNI_SETTINGS: [constants.ALL_SETTINGS]}
with mock.patch.object(self.system, '_download_settings') as mck_dl:
self.system.download_system_settings(
file_password='test_password', dir_path='/test/file/path',
file_name='test_filename', array_id=self.data.remote_array,
return_binary=True, exclude_system_threshold_settings=True,
exclude_performance_threshold_settings=True)
mck_dl.assert_called_once_with(
request_body=ref_request_body, dir_path='/test/file/path',
file_name='test_filename', return_binary=True)
def test_download_system_settings_all_excluded_exception(self):
"""Test download_system_settings all settings excluded exception."""
self.assertRaises(
exception.InvalidInputException,
self.system.download_system_settings,
file_password='test', exclude_alert_policy_settings=True,
alert_level_notification_settings=True,
exclude_system_threshold_settings=True,
exclude_performance_threshold_settings=True)
@mock.patch.object(common.CommonFunctions, 'upload_file')
@mock.patch('builtins.open', return_value=__file__)
def test_upload_settings(self, mck_open, mck_up):
"""Test upload_settings success."""
ref_form_data = {
constants.ZIP_FILE: __file__,
constants.TGT_ARRAYS: self.data.remote_array,
constants.FILE_PASSWORD: 'test_password'}
self.system.upload_settings(file_password='test_password',
file_path=__file__,
array_id=self.data.remote_array)
mck_up.assert_called_once_with(
category=constants.SYSTEM,
resource_level=constants.SETTINGS,
resource_type=constants.IMPORT_FILE,
form_data=ref_form_data)
@mock.patch.object(common.CommonFunctions, 'upload_file')
def test_upload_settings_binary_data(self, mck_up):
"""Test upload_settings binary data success."""
ref_form_data = {
constants.ZIP_FILE: b'test_binary_data',
constants.TGT_ARRAYS: self.conn.array_id,
constants.FILE_PASSWORD: 'test_password'}
self.system.upload_settings(file_password='test_password',
binary_data=b'test_binary_data')
mck_up.assert_called_once_with(
category=constants.SYSTEM,
resource_level=constants.SETTINGS,
resource_type=constants.IMPORT_FILE,
form_data=ref_form_data)
def test_upload_settings_path_exception(self):
"""Test upload_settings path doesn't exist exception."""
self.assertRaises(
exception.InvalidInputException,
self.system.upload_settings,
file_password='test', file_path='/fake')
def test_upload_settings_invalid_data_type(self):
"""Test upload_settings invalid data type"""
self.assertRaises(
exception.InvalidInputException,
self.system.upload_settings,
file_password='test', binary_data='/fake')
def test_get_audit_log_list(self):
"""Test get_audit_log_list success."""
end = int(time.time())
# Set start time 24hrs + 1 second to trigger LOG warning
start = end - (60 * 60 * 24) - 1
response = self.system.get_audit_log_list(
start_time=start, end_time=end, array_id='test', user_name='test',
host_name='test', client_host='test', message='test',
record_id='test', activity_id='test', application_id='test',
application_version='test', task_id='test', process_id='test',
vendor_id='test', os_type='test', os_revision='test',
api_library='test', api_version='test', audit_class='test',
action_code='test', function_class='test')
self.assertTrue(response)
self.assertIsInstance(response, list)
@mock.patch.object(common.CommonFunctions, 'get_request',
return_value={'count': 0})
def test_get_audit_log_list_no_content(self, mck_get):
"""Test upload_settings binary data success."""
end = int(time.time())
start = end
response = self.system.get_audit_log_list(
start_time=start, end_time=end)
self.assertFalse(response)
self.assertIsInstance(response, list)
def test_get_audit_log_record(self):
"""Test get_audit_log_record."""
response = self.system.get_audit_log_record(record_id='test')
self.assertTrue(response)
self.assertIsInstance(response, dict)
@mock.patch.object(
common.CommonFunctions, 'download_file',
return_value=pf.FakeResponse(200, dict(), content=b'test_binary_data'))
def test_download_audit_log_record_return_binary(self, mck_dl):
"""Test download_audit_log_record return binary."""
ref_req_body = {AUDIT_LOG_FILENAME: 'test'}
response = self.system.download_audit_log_record(
file_name='test', return_binary=True)
mck_dl.assert_called_once_with(
category=SYSTEM, resource_level=SYMMETRIX,
resource_level_id=self.system.array_id,
resource_type=AUDIT_LOG_RECORD, resource=EXPORT_FILE,
payload=ref_req_body)
self.assertTrue(response[SUCCESS])
self.assertIn(BINARY_DATA, response.keys())
self.assertEqual(b'test_binary_data', response[BINARY_DATA])
@mock.patch.object(file_handler, 'write_binary_data_to_file',
return_value='/test/test.pdf')
def test_download_audit_log_record_write_file(self, mck_write):
"""Test download_audit_log_record write to file."""
ref_response = pf.FakeResponse(200, dict(),
content=b'test_binary_data')
with mock.patch.object(
common.CommonFunctions, 'download_file',
return_value=ref_response) as mck_dl:
ref_req_body = {AUDIT_LOG_FILENAME: 'test'}
response = self.system.download_audit_log_record(
file_name='test', dir_path='test')
mck_dl.assert_called_once_with(
category=SYSTEM, resource_level=SYMMETRIX,
resource_level_id=self.system.array_id,
resource_type=AUDIT_LOG_RECORD, resource=EXPORT_FILE,
payload=ref_req_body)
mck_write.assert_called_once_with(
data=ref_response, file_extension=constants.PDF_SUFFIX,
file_name='test', dir_path='test')
self.assertTrue(response[SUCCESS])
self.assertIn('/test/test.pdf', str(response[AUDIT_RECORD_PATH]))
@mock.patch.object(file_handler, 'write_binary_data_to_file',
return_value='/test/test.pdf')
def test_download_audit_log_record_write_file_no_name(self, mck_write):
"""Test download_audit_log_record no file name provided."""
ref_response = pf.FakeResponse(200, dict(),
content=b'test_binary_data')
with mock.patch.object(
common.CommonFunctions, 'download_file',
return_value=ref_response) as mck_dl:
response = self.system.download_audit_log_record()
mck_dl.assert_called_once()
mck_write.assert_called_once()
self.assertTrue(response[SUCCESS])
self.assertIn('/test/test.pdf', str(response[AUDIT_RECORD_PATH]))
def test_get_director_list(self):
"""Test get_director_list."""
array_id = self.data.array
dir_list = self.system.get_director_list(array_id=array_id)
self.assertTrue(dir_list)
self.assertIsInstance(dir_list, list)
self.assertEqual([self.data.director_id1, self.data.director_id2],
dir_list)
def test_get_iscsi_director_list(self):
"""Test get_director_list iscsi_only set to True."""
array_id = self.data.array
iscsi_dir_list = self.system.get_director_list(
array_id=array_id, iscsi_only=True)
self.assertTrue(iscsi_dir_list)
self.assertIsInstance(iscsi_dir_list, list)
self.assertEqual([self.data.director_id2], iscsi_dir_list)
def test_get_director_port_list(self):
"""Test get_director_port_list."""
director_id = self.data.director_id1
dir_port_list = self.system.get_director_port_list(
director_id=director_id, iscsi_target=False)
self.assertTrue(dir_port_list)
self.assertIsInstance(dir_port_list, list)
self.assertEqual(self.data.port_key_list.get('symmetrixPortKey'),
dir_port_list)
def test_get_ip_interface_list(self):
"""Test get_ip_interface_list"""
director_id = self.data.director_id2
port_id = 0
ip_int_list = self.system.get_ip_interface_list(
director_id=director_id, port_id=port_id)
self.assertTrue(ip_int_list)
self.assertIsInstance(ip_int_list, list)
self.assertEqual(self.data.ip_interface_list.get('ipInterfaceId'),
ip_int_list)
def test_get_ip_interface(self):
"""Test get_ip_interface."""
director_id = self.data.director_id2
port_id = 0
interface_id = self.data.ip_interface_address_network
ip_int_info = self.system.get_ip_interface(
director_id=director_id, port_id=port_id,
interface_id=interface_id)
self.assertTrue(ip_int_info)
self.assertIsInstance(ip_int_info, dict)
self.assertEqual(self.data.ip_interface_details, ip_int_info)
@mock.patch.object(common.CommonFunctions, 'modify_resource')
def test_change_local_user_password(self, mck_modify):
"""Test change_local_user_password."""
self.system.change_local_user_password(
username='testchange', current_password='oldpass',
new_password='newpassword')
payload = {
'username': 'testchange',
'action': "SetPassword",
'set_password': {
'current_password': 'oldpass',
'new_password': 'newpassword'
}
}
mck_modify.assert_called_once_with(
category=SYSTEM, resource_level=LOCAL_USER, payload=payload)
|
|
#!/usr/bin/env python
#
# This library is free software, distributed under the terms of
# the GNU Lesser General Public License Version 3, or any later version.
# See the COPYING file included in this archive
#
# The docstrings in this module contain epytext markup; API documentation
# may be created by processing this file with epydoc: http://epydoc.sf.net
import hashlib
from twisted.internet import defer
import kademlia.node
from kademlia.node import rpcmethod
class EntangledNode(kademlia.node.Node):
""" Entangled DHT node
This is basically a Kademlia node, but with a few more (non-standard, but
useful) RPCs defined.
"""
def __init__(self, id=None, udpPort=4000, dataStore=None, routingTable=None, networkProtocol=None):
kademlia.node.Node.__init__(self, id, udpPort, dataStore, routingTable, networkProtocol)
self.invalidKeywords = []
self.keywordSplitters = ['_', '.', '/']
def searchForKeywords(self, keywords):
""" The Entangled search operation (keyword-based)
Call this to find keys in the DHT which contain the specified
keyword(s).
"""
if type(keywords) == str:
for splitter in self.keywordSplitters:
keywords = keywords.replace(splitter, ' ')
keywords = keywords.lower().split()
keyword = None
for testWord in keywords:
if testWord not in self.invalidKeywords:
keyword = testWord
break
if keyword == None:
df = defer.Deferred()
df.callback([])
return df
keywords.remove(keyword)
h = hashlib.sha1()
h.update(keyword)
key = h.digest()
def checkResult(result):
if type(result) == dict:
# Value was found; this should be list of "real names" (not keys, in this implementation)
index = result[key]
filteredResults = list(index)
# We found values containing our first keyword; Now filter for the rest
for name in index:
for kw in keywords:
if name.lower().find(kw) == -1:
filteredResults.remove(name)
index = filteredResults
else:
# Value wasn't found
index = []
return index
df = self.iterativeFindValue(key)
df.addCallback(checkResult)
return df
def publishData(self, name, data):
""" The Entangled high-level data publishing operation
Call this to store data in the Entangled DHT.
@note: This will automatically create a hash of the specified C{name}
parameter, and add the published data to the appropriate inverted
indexes, to enable keyword-based searching. If this behaviour is not
wanted/needed, rather call the Kademlia base node's
C{iterativeStore()} method directly.
"""
h = hashlib.sha1()
h.update(name)
mainKey = h.digest()
outerDf = defer.Deferred()
def publishKeywords(deferredResult=None):
# Create hashes for the keywords in the name
keywordKeys = self._keywordHashesFromString(name)
# Update the appropriate inverted indexes
df = self._addToInvertedIndexes(keywordKeys, name)
df.addCallback(lambda _: outerDf.callback(None))
# Store the main key, with its value...
df = self.iterativeStore(mainKey, data)
df.addCallback(publishKeywords)
return outerDf
def _addToInvertedIndexes(self, keywordKeys, indexLink):
# Prepare a deferred result for this operation
outerDf = defer.Deferred()
kwIndex = [-1] # using a list for this counter because Python doesn't allow binding a new value to a name in an enclosing (non-global) scope
# ...and now update the inverted indexes (or add them, if they don't exist yet)
def addToInvertedIndex(results):
kwKey = keywordKeys[kwIndex[0]]
if type(results) == dict:
# An index already exists; add our value to it
index = results[kwKey]
#TODO: this might not actually be an index, but a value... do some name-mangling to avoid this
index.append(indexLink)
else:
# An index does not yet exist for this keyword; create one
index = [indexLink]
df = self.iterativeStore(kwKey, index)
df.addCallback(storeNextKeyword)
def storeNextKeyword(results=None):
kwIndex[0] += 1
if kwIndex[0] < len(keywordKeys):
kwKey = keywordKeys[kwIndex[0]]
# We use the find algorithm directly so that kademlia does not replicate the un-updated inverted index
if kwKey in self._dataStore:
df = defer.Deferred()
df.callback({kwKey: self._dataStore[kwKey]})
else:
df = self._iterativeFind(kwKey, rpc='findValue')
df.addCallback(addToInvertedIndex)
else:
# We're done. Let the caller of the parent method know
outerDf.callback(None)
if len(keywordKeys) > 0:
# Start the "keyword store"-cycle
storeNextKeyword()
else:
outerDf.callback(None)
return outerDf
def removeData(self, name):
""" The Entangled high-level data removal (delete) operation
Call this to remove data from the Entangled DHT.
@note: This will automatically create a hash of the specified C{name}
parameter. It will also remove the published data from the appropriate
inverted indexes, so as to maintain reliability of keyword-based
searching. If this behaviour is not wanted/needed, rather call this
node's C{iterativeDelete()} method directly.
"""
h = hashlib.sha1()
h.update(name)
mainKey = h.digest()
# Remove the main key
self.iterativeDelete(mainKey)
# Create hashes for the keywords in the name
keywordKeys = self._keywordHashesFromString(name)
# Update the appropriate inverted indexes
df = self._removeFromInvertedIndexes(keywordKeys, name)
return df
def _removeFromInvertedIndexes(self, keywordKeys, indexLink):
# Prepare a deferred result for this operation
outerDf = defer.Deferred()
kwIndex = [-1] # using a list for this counter because Python doesn't allow binding a new value to a name in an enclosing (non-global) scope
# ...and now update the inverted indexes (or ignore them, if they don't exist yet)
def removeFromInvertedIndex(results):
kwKey = keywordKeys[kwIndex[0]]
if type(results) == dict:
# An index for this keyword exists; remove our value from it
index = results[kwKey]
#TODO: this might not actually be an index, but a value... do some name-mangling to avoid this
try:
index.remove(indexLink)
except ValueError:
df = defer.Deferred()
df.callback(None)
else:
# Remove the index completely if it is empty, otherwise put it back
if len(index) > 0:
df = self.iterativeStore(kwKey, index)
else:
df = self.iterativeDelete(kwKey)
df.addCallback(findNextKeyword)
else:
# No index exists for this keyword; skip it
findNextKeyword()
def findNextKeyword(results=None):
kwIndex[0] += 1
if kwIndex[0] < len(keywordKeys):
kwKey = keywordKeys[kwIndex[0]]
# We use the find algorithm directly so that kademlia does not replicate the un-updated inverted index
if kwKey in self._dataStore:
df = defer.Deferred()
df.callback({kwKey: self._dataStore[kwKey]})
else:
df = self._iterativeFind(kwKey, rpc='findValue')
df.addCallback(removeFromInvertedIndex)
else:
# We're done. Let the caller of the parent method know
outerDf.callback(None)
if len(keywordKeys) > 0:
# Start the "keyword store"-cycle
findNextKeyword()
return outerDf
def iterativeDelete(self, key):
""" The Entangled delete operation
Call this to remove data from the DHT.
The Entangled delete operation uses the basic Kademlia node lookup
algorithm (same as Kademlia's search/retrieve). The algorithm behaves
the same as when issueing the FIND_NODE RPC - the only difference is
that the DELETE RPC (defined in C{delete()}) is used instead of
FIND_NODE.
@param key: The hashtable key of the data
@type key: str
"""
# Delete our own copy of the data
if key in self._dataStore:
del self._dataStore[key]
df = self._iterativeFind(key, rpc='delete')
return df
@rpcmethod
def delete(self, key, **kwargs):
""" Deletes the the specified key (and it's value) if present in
this node's data, and executes FIND_NODE for the key
@param key: The hashtable key of the data to delete
@type key: str
@return: A list of contact triples closest to the specified key.
This method will return C{k} (or C{count}, if specified)
contacts if at all possible; it will only return fewer if the
node is returning all of the contacts that it knows of.
@rtype: list
"""
# Delete our own copy of the data (if we have one)...
if key in self._dataStore:
del self._dataStore[key]
# ...and make this RPC propagate through the network (like a FIND_VALUE for a non-existant value)
return self.findNode(key, **kwargs)
def _keywordHashesFromString(self, text):
""" Create hash keys for the keywords contained in the specified text string """
keywordKeys = []
splitText = text.lower()
for splitter in self.keywordSplitters:
splitText = splitText.replace(splitter, ' ')
for keyword in splitText.split():
# Only consider keywords with 3 or more letters
if len(keyword) >= 3 and keyword != text and keyword not in self.invalidKeywords:
h = hashlib.sha1()
h.update(keyword)
key = h.digest()
keywordKeys.append(key)
return keywordKeys
if __name__ == '__main__':
import twisted.internet.reactor
from kademlia.datastore import SQLiteDataStore
import sys, os
if len(sys.argv) < 2:
print 'Usage:\n%s UDP_PORT [KNOWN_NODE_IP KNOWN_NODE_PORT]' % sys.argv[0]
print 'or:\n%s UDP_PORT [FILE_WITH_KNOWN_NODES]' % sys.argv[0]
print '\nIf a file is specified, it should containg one IP address and UDP port\nper line, seperated by a space.'
sys.exit(1)
try:
int(sys.argv[1])
except ValueError:
print '\nUDP_PORT must be an integer value.\n'
print 'Usage:\n%s UDP_PORT [KNOWN_NODE_IP KNOWN_NODE_PORT]' % sys.argv[0]
print 'or:\n%s UDP_PORT [FILE_WITH_KNOWN_NODES]' % sys.argv[0]
print '\nIf a file is specified, it should contain one IP address and UDP port\nper line, seperated by a space.'
sys.exit(1)
if len(sys.argv) == 4:
knownNodes = [(sys.argv[2], int(sys.argv[3]))]
elif len(sys.argv) == 3:
knownNodes = []
f = open(sys.argv[2], 'r')
lines = f.readlines()
f.close()
for line in lines:
ipAddress, udpPort = line.split()
knownNodes.append((ipAddress, int(udpPort)))
else:
knownNodes = None
if os.path.isfile('/tmp/dbFile%s.db' % sys.argv[1]):
os.remove('/tmp/dbFile%s.db' % sys.argv[1])
dataStore = SQLiteDataStore(dbFile = '/tmp/dbFile%s.db' % sys.argv[1])
node = EntangledNode( udpPort=int(sys.argv[1]), dataStore=dataStore )
#node = EntangledNode( udpPort=int(sys.argv[1]) )
node.joinNetwork(knownNodes)
twisted.internet.reactor.run()
|
|
# (C) Copyright 2021 Inova Development Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Click definitions for the pywbemlistener commands.
NOTE: Commands are ordered in help display by their order in this file.
"""
from __future__ import absolute_import, print_function
import sys
import os
import io
import subprocess
import signal
import atexit
import threading
import argparse
import importlib
from time import sleep
from datetime import datetime
import click
import psutil
import six
from pywbem import WBEMListener, ListenerError, CIMInstance, CIMProperty, \
Uint16, WBEMConnection, Error
from .._click_extensions import PywbemtoolsCommand, CMD_OPTS_TXT
from .._options import add_options, help_option, validate_required_arg
from .._output_formatting import format_table
from . import _config
from .pywbemlistener import cli
# Signals used for having the 'run' command signal startup completion
# back to its parent 'start' process.
# The default handlers for these signals are replaced.
# The success signal can be any signal that can be handled.
# In order to handle keyboard interrupts during password prompt correctly,
# the failure signal must be SIGINT.
try:
# Unix/Linux/macOS
# pylint: disable=no-member
SIGNAL_RUN_STARTUP_SUCCESS = signal.SIGUSR1
except AttributeError:
# native Windows
# pylint: disable=no-member
SIGNAL_RUN_STARTUP_SUCCESS = signal.SIGBREAK
SIGNAL_RUN_STARTUP_FAILURE = signal.SIGINT
# Status and condition used to communicate the startup completion status of the
# 'run' process between the signal handlers and other functions in the
# 'start' process.
RUN_STARTUP_STATUS = None
RUN_STARTUP_COND = threading.Condition()
# Timeout in seconds for the 'run' command starting up. This timeout
# also ends a possible prompt for the password of the private key file.
RUN_STARTUP_TIMEOUT = 60
DEFAULT_LISTENER_PORT = 25989
DEFAULT_LISTENER_SCHEME = 'https'
DEFAULT_INDI_FORMAT = '{dt} {h} {i_mof}'
LISTEN_OPTIONS = [
click.option('-p', '--port', type=int, metavar='PORT',
required=False, default=DEFAULT_LISTENER_PORT,
help=u'The port number the listener will open to receive '
'indications. This can be any available port. '
'Default: {}'.format(DEFAULT_LISTENER_PORT)),
click.option('-s', '--scheme', type=click.Choice(['http', 'https']),
metavar='SCHEME',
required=False, default=DEFAULT_LISTENER_SCHEME,
help=u'The scheme used by the listener (http, https). '
'Default: {}'.format(DEFAULT_LISTENER_SCHEME)),
click.option('-c', '--certfile', type=str, metavar='FILE',
required=False, default=None,
envvar=_config.PYWBEMLISTENER_CERTFILE_ENVVAR,
help=u'Path name of a PEM file containing the certificate '
'that will be presented as a server certificate during '
'SSL/TLS handshake. Required when using https. '
'The file may in addition contain the private key of the '
'certificate. '
'Default: EnvVar {ev}, or no certificate file.'.
format(ev=_config.PYWBEMLISTENER_CERTFILE_ENVVAR)),
click.option('-k', '--keyfile', type=str, metavar='FILE',
required=False, default=None,
envvar=_config.PYWBEMLISTENER_KEYFILE_ENVVAR,
help=u'Path name of a PEM file containing the private key '
'of the server certificate. '
'Required when using https and when the certificate file '
'does not contain the private key. '
'Default: EnvVar {ev}, or no key file.'.
format(ev=_config.PYWBEMLISTENER_KEYFILE_ENVVAR)),
click.option('--indi-call', type=str, metavar='MODULE.FUNCTION',
required=False, default=None,
help=u'Call a Python function for each received indication. '
'Invoke with --help-call for details on the function '
'interface. '
'Default: No function is called.'),
click.option('--indi-file', type=str, metavar='FILE',
required=False, default=None,
help=u'Append received indications to a file. '
'The format can be modified using the --indi-format option. '
'Default: Not appended.'),
click.option('--indi-format', type=str, metavar='FORMAT',
required=False, default=DEFAULT_INDI_FORMAT,
help=u'Sets the format to be used when displaying received '
'indications. '
'Invoke with --help-format for details on the format '
'specification. '
'Default: "{dif}".'.format(dif=DEFAULT_INDI_FORMAT)),
click.option('--help-format', is_flag=True,
required=False, default=False, is_eager=True,
help=u'Show help message for the format specification used '
'with the --indi-format option and exit.'),
click.option('--help-call', is_flag=True,
required=False, default=False, is_eager=True,
help=u'Show help message for calling a Python function for '
'each received indication when using the --indi-call option '
'and exit.'),
]
def print_out(line):
"""
Print a line to stdout, and flush stdout.
"""
print(line)
sys.stdout.flush()
class ListenerProperties(object):
"""
The properties of a running named listener.
"""
def __init__(self, name, port, scheme, certfile, keyfile,
indi_call, indi_file, indi_format,
logfile, pid, start_pid, created):
self._name = name
self._port = port
self._scheme = scheme
self._certfile = certfile
self._keyfile = keyfile
self._indi_call = indi_call
self._indi_file = indi_file
self._indi_format = indi_format
self._logfile = logfile
self._pid = pid
self._start_pid = start_pid
self._created = created
def show_row(self):
"""Return a tuple of the properties for 'show' command"""
return (
self.name,
str(self.port),
self.scheme,
self.certfile,
self.keyfile,
self.indi_call,
self.indi_file,
self.logfile,
str(self.pid),
str(self.start_pid),
self.created.strftime("%Y-%m-%d %H:%M:%S"),
)
@staticmethod
def show_headers():
"""Return a tuple of the header labels for 'show' command"""
return (
'Name',
'Port',
'Scheme',
'Certificate file',
'Key file',
'Indication call',
'Indication file',
'Log file',
'PID',
'Start PID',
'Created',
)
def list_row(self):
"""Return a tuple of the properties for 'list' command"""
return (
self.name,
str(self.port),
self.scheme,
str(self.pid),
self.created.strftime("%Y-%m-%d %H:%M:%S"),
)
@staticmethod
def list_headers():
"""Return a tuple of the header labels for 'list' command"""
return (
'Name',
'Port',
'Scheme',
'PID',
'Created',
)
@property
def name(self):
"""string: Name of the listener"""
return self._name
@property
def port(self):
"""int: Port number of the listener"""
return self._port
@property
def scheme(self):
"""string: Scheme of the listener"""
return self._scheme
@property
def certfile(self):
"""string: Path name of certificate file of the listener"""
return self._certfile
@property
def keyfile(self):
"""string: Path name of key file of the listener"""
return self._keyfile
@property
def indi_call(self):
"""string: Call function MODULE.FUNCTION for each received indication"""
return self._indi_call
@property
def indi_file(self):
"""string: Append each received indication to a file in a format"""
return self._indi_file
@property
def indi_format(self):
"""string: Set format of indication"""
return self._indi_format
@property
def logfile(self):
"""string: Path name of log file"""
return self._logfile
@property
def pid(self):
"""int: Process ID of the listener"""
return self._pid
@property
def start_pid(self):
"""int: Process ID of the start command of the listener"""
return self._start_pid
@property
def created(self):
"""datetime: Point in time when the listener process was created"""
return self._created
@cli.command('run', cls=PywbemtoolsCommand, options_metavar=CMD_OPTS_TXT)
@click.argument('name', type=str, metavar='NAME', required=False)
@click.option('--start-pid', type=str, metavar='PID',
required=False, default=None,
help=u'PID of the "pywbemlistener start" process to be '
'notified about the startup of the run command. '
'Default: No such notification will happen.')
@add_options(LISTEN_OPTIONS)
@add_options(help_option)
@click.pass_obj
def listener_run(context, name, **options):
"""
Run as a named WBEM indication listener.
Run this command as a named WBEM indication listener until it gets
terminated, e.g. by a keyboard interrupt, break signal (e.g. kill), or the
`pywbemlistener stop` command.
A listener with that name must not be running, otherwise the command fails.
Note: The `pywbemlistener start` command should be used to start listeners,
and it starts a `pywbemlistener run` command as a background process.
Use the `pywbemlistener run` command only when you need to have control
over how exactly the process runs in the background.
Note: The --start-pid option is needed because on Windows, the
`pywbemlistener run` command is not the direct child process of the
`pywbemlistener start` command starting it.
Examples:
pywbemlistener run lis1
"""
if show_help_options(options):
return
validate_required_arg(name, 'NAME')
context.execute_cmd(lambda: cmd_listener_run(context, name, options))
@cli.command('start', cls=PywbemtoolsCommand, options_metavar=CMD_OPTS_TXT)
@click.argument('name', type=str, metavar='NAME', required=False)
@add_options(LISTEN_OPTIONS)
@add_options(help_option)
@click.pass_obj
def listener_start(context, name, **options):
"""
Start a named WBEM indication listener in the background.
A listener with that name must not be running, otherwise the command fails.
A listener is identified by its hostname or IP address and a port number.
It can be started with any free port.
Examples:
pywbemlistener start lis1
"""
if show_help_options(options):
return
validate_required_arg(name, 'NAME')
context.execute_cmd(lambda: cmd_listener_start(context, name, options))
@cli.command('stop', cls=PywbemtoolsCommand, options_metavar=CMD_OPTS_TXT)
@click.argument('name', type=str, metavar='NAME', required=True)
@add_options(help_option)
@click.pass_obj
def listener_stop(context, name):
"""
Stop a named WBEM indication listener.
The listener will shut down gracefully.
A listener with that name must be running, otherwise the command fails.
Examples:
pywbemlistener stop lis1
"""
context.execute_cmd(lambda: cmd_listener_stop(context, name))
@cli.command('show', cls=PywbemtoolsCommand, options_metavar=CMD_OPTS_TXT)
@click.argument('name', type=str, metavar='NAME', required=True)
@add_options(help_option)
@click.pass_obj
def listener_show(context, name):
"""
Show a named WBEM indication listener.
A listener with that name must be running, otherwise the command fails.
Examples:
pywbemlistener show lis1
"""
context.execute_cmd(lambda: cmd_listener_show(context, name))
@cli.command('list', cls=PywbemtoolsCommand, options_metavar=CMD_OPTS_TXT)
@add_options(help_option)
@click.pass_obj
def listener_list(context):
"""
List the currently running named WBEM indication listeners.
This is done by listing the currently running `pywbemlistener run`
commands.
"""
context.execute_cmd(lambda: cmd_listener_list(context))
@cli.command('test', cls=PywbemtoolsCommand, options_metavar=CMD_OPTS_TXT)
@click.argument('name', type=str, metavar='NAME', required=False)
@click.option('-c', '--count', type=int, metavar='INT',
required=False, default=1,
help=u'Count of test indications to send. '
'Default: 1')
@add_options(help_option)
@click.pass_obj
def listener_test(context, name, **options):
"""
Send a test indication to a named WBEM indication listener.
The indication is an alert indication with fixed properties. This allows
testing the listener and what it does with the indication.
Examples:
pywbemlistener test lis1
"""
validate_required_arg(name, 'NAME')
context.execute_cmd(lambda: cmd_listener_test(context, name, options))
################################################################
#
# Common methods for The action functions for the listener click group
#
###############################################################
def get_logfile(logdir, name):
"""
Return path name of run log file, or None if no log directory is specified.
Parameters:
logdir (string): Path name of log directors, or None.
name (string): Listener name.
"""
if logdir is None:
return None
return os.path.join(logdir, 'pywbemlistener_{}.log'.format(name))
def get_listeners(name=None):
"""
List the running listener processes, or the running listener process(es)
with the specified name.
Note that in case of the 'run' command, it is possible that this
function is called with a name and finds two listener processes with that
name: A previosly started one, and the one that is about to run now.
Both will be returned so this situation can be handled by the caller.
Returns:
list of ListenerProperties
"""
if sys.platform == 'win32':
cmdname = 'pywbemlistener-script.py'
else:
cmdname = 'pywbemlistener'
ret = []
for p in psutil.process_iter():
try:
cmdline = p.cmdline()
except (psutil.AccessDenied, psutil.ZombieProcess,
psutil.NoSuchProcess):
# Ignore processes we cannot access or that ended meanwhile
continue
for i, item in enumerate(cmdline):
if item.endswith(cmdname):
listener_index = i
break
else:
# Ignore processes that are not 'pywbemlistener'
continue
listener_args = cmdline[listener_index + 1:] # After 'pywbemlistener'
args = parse_listener_args(listener_args)
if args:
if name is None or args.name == name:
# pylint: disable=no-member
# Note: This is a workaround for Pylint raising no-member on
# Python 3.9 (see issue #1001)
logfile = get_logfile(args.logdir, args.name)
lis = ListenerProperties(
name=args.name, port=args.port, scheme=args.scheme,
certfile=args.certfile, keyfile=args.keyfile,
indi_call=args.indi_call, indi_file=args.indi_file,
indi_format=args.indi_format,
logfile=logfile, pid=p.pid, start_pid=args.start_pid,
created=datetime.fromtimestamp(p.create_time()))
# pylint: enable=no-member
# Note: End of workaround
ret.append(lis)
return ret
def prepare_startup_completion():
"""
In the 'start' command, prepare for a later use of
wait_startup_completion() by setting up the necessary signal handlers.
"""
signal.signal(SIGNAL_RUN_STARTUP_SUCCESS, success_signal_handler)
signal.signal(SIGNAL_RUN_STARTUP_FAILURE, failure_signal_handler)
def success_signal_handler(sig, frame):
# pylint: disable=unused-argument
"""
Signal handler in 'start' process for the signal indicating
success of startup completion of the 'run' child process.
"""
# pylint: disable=global-statement,global-variable-not-assigned
global RUN_STARTUP_STATUS, RUN_STARTUP_COND
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Start process: Handling success signal ({}) from run "
"process".format(sig))
RUN_STARTUP_STATUS = 'success'
with RUN_STARTUP_COND:
RUN_STARTUP_COND.notify()
def failure_signal_handler(sig, frame):
# pylint: disable=unused-argument
"""
Signal handler in 'start' process for the signal indicating
failure of startup completion of the 'run' child process.
"""
# pylint: disable=global-statement,global-variable-not-assigned
global RUN_STARTUP_STATUS, RUN_STARTUP_COND
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Start process: Handling failure signal ({}) from run "
"process".format(sig))
RUN_STARTUP_STATUS = 'failure'
with RUN_STARTUP_COND:
RUN_STARTUP_COND.notify()
def wait_startup_completion(child_pid):
"""
In the 'start' command, wait for the 'run' child process
to either successfully complete its startup or to fail its startup.
Returns:
int: Return code indicating whether the child started up successfully (0)
or failed its startup (1).
"""
# pylint: disable=global-statement,global-variable-not-assigned
global RUN_STARTUP_STATUS, RUN_STARTUP_COND
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Start process: Waiting for run process {} to complete "
"startup".format(child_pid))
RUN_STARTUP_STATUS = 'failure'
with RUN_STARTUP_COND:
rc = RUN_STARTUP_COND.wait(RUN_STARTUP_TIMEOUT)
# Before Python 3.2, wait() always returns None. Since 3.2, it returns
# a boolean indicating whether the timeout expired (False) or the
# condition was triggered (True).
if rc is None or rc is True:
status = RUN_STARTUP_STATUS
else:
# Only since Python 3.2
status = 'timeout'
if status == 'success':
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Start process: Startup of run process {} succeeded".
format(child_pid))
return 0
if status == 'timeout':
click.echo("Timeout waiting for signal handler in start process to "
"trigger wait condition")
# The 'run' child process may still be running, or already a
# zombie, or no longer exist. If it still is running, the likely cause is
# that it was in a password prompt for the keyfile password that was not
# entered.
sleep(0.5) # Give it some time to finish by itself before we clean it up
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Start process: Startup of run process {} failed".
format(child_pid))
child_exists = False
try:
child_ps = psutil.Process(child_pid)
child_status = child_ps.status()
if child_status != psutil.STATUS_ZOMBIE:
child_exists = True
except (psutil.NoSuchProcess, psutil.ZombieProcess):
# No need to clean up anything in these cases.
pass
if child_exists:
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Start process: Cleaning up run process {} and status {}".
format(child_pid, child_status))
try:
child_ps.terminate()
child_ps.wait()
except (IOError, OSError) as exc:
raise click.ClickException(
"Cannot clean up 'run' child process with PID {}: {}: {}".
format(child_pid, type(exc), exc))
else:
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Start process: Run process {} does not exist anymore".
format(child_pid))
return 1
def run_exit_handler(start_pid, log_fp):
"""
Exit handler that gets etablished for the 'run' command.
This exit handler signals a failed startup of the 'run' command
to the 'start' process, if it still exists. If the 'start'
process no longer exists, this means that the startup of the 'run'
command succeeded earlier, and it is now terminated by some means.
In addition, it closes the log_fp log file.
"""
try:
start_p = psutil.Process(start_pid)
except psutil.NoSuchProcess:
start_p = None
if start_p:
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Run process exit handler: Sending failure signal ({}) "
"to start process {}".
format(SIGNAL_RUN_STARTUP_FAILURE, start_pid))
try:
os.kill(start_pid, SIGNAL_RUN_STARTUP_FAILURE) # Sends the signal
except OSError:
# Note: ProcessLookupError is a subclass of OSError but was
# introduced only in Python 3.3.
# The original start parent no longer exists.
# This can only happen if the process goes away in the short time
# window between checking for it at the begin of this function,
# and here.
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Run process exit handler: Start process {} does "
"not exist anymore".format(start_pid))
if log_fp:
print_out("Closing 'run' output log file at {}".format(datetime.now()))
log_fp.close()
def format_indication(indication, host, indi_format=None):
"""
Return a string that contains the indication formatted according to the
given format.
"""
dt = datetime.now()
try:
dt = dt.astimezone()
except TypeError:
# Below Python 3.6, it cannot determine the local timezone, and its
# tzinfo argument is required.
pass
tz = dt.tzname() or ''
i_mof = indication.tomof().replace('\n', ' ')
format_kwargs = dict(
dt=dt,
tz=tz,
h=host,
i_mof=i_mof,
i=indication,
)
if indi_format is None:
indi_format = DEFAULT_INDI_FORMAT
indi_str = indi_format.format(**format_kwargs)
return indi_str
def show_help_format():
"""
Display help for the format specification used with the --indi-format
option.
"""
# pylint: disable=line-too-long
print("""
Help for the format specification with option: --indi-format FORMAT
FORMAT is a Python new-style format string that can use the following keyword
arguments:
* 'dt' - Python datetime object for the point in time the listener received the
indication. If used directly in a format specifier, it is shown in a standard
date & time format using local time and UTC offset of the local timezone.
This keyword argument can also be used for accessing its Python object
attributes in the format specifier (e.g. '{dt.hour}').
* 'tz' - Timezone name of the local timezone. On Python versions before 3.6,
the empty string.
* 'h' - Host name or IP address of the host that sent the indication.
* 'i_mof' - Indication instance in single-line MOF representation.
* 'i' - pywbem.CIMInstance object with the indication instance. This keyword
argument can be used for accessing its Python object attributes in the format
specifier (e.g. '{i.classname}'), or its CIM property values
(e.g. '{i[PropName]}'). For more complex cases, attributes of the CIMProperty
objects can also be accessed (e.g. '{i.properties[PropName].type}').
The default format is: '""" + DEFAULT_INDI_FORMAT + """'
Examples:
--indi-format '{dt} {h} {i_mof}'
2021-05-13 17:51:05.831117+02:00 instance of CIM_AlertIndication { Message = "test"; ... }
--indi-format 'At {dt.hour}:{dt.minute} from {h}: {i.classname}: {i[Message]}'
At 17:51 from 127.0.0.1: CIM_AlertIndication: test
""") # noqa: E501
def show_help_call():
"""
Display help for calling a Python function for each received indication
when using the --indi-call option.
"""
print("""
Help for calling a Python function with option: --indi-call MODULE.FUNCTION
MODULE must be a module name or a dotted package name in the module search
path, e.g. 'mymodule' or 'mypackage.mymodule'.
The current directory is added to the front of the Python module search path,
if needed. Thus, the module can be a single module file in the current
directory, for example:
./mymodule.py
or a module in a package in the current directory, for example:
./mypackage/__init__.py
./mypackage/mymodule.py
FUNCTION must be a function in that module with the following interface:
def func(indication, host)
Parameters:
* 'indication' is a 'pywbem.CIMInstance' object representing the CIM indication
that has been received. Its 'path' attribute is None.
* 'host' is a string with the host name or IP address of the indication sender
(typically a WBEM server).
The return value of the function will be ignored.
Exceptions raised when importing the module cause the 'pywbemlistener run'
command to terminate with an error. Exceptions raised by the function when
it is called cause an error message to be displayed.
""")
def show_help_options(options):
"""
Show the help messages for the --help-... options, if specified.
Returns:
bool: Indicates whether help was shown.
"""
ret = False
if options['help_call']:
show_help_call()
ret = True
if options['help_format']:
show_help_format()
ret = True
return ret
class SilentArgumentParser(argparse.ArgumentParser):
"""
argparse.ArgumentParser subclass that silences any errors and exit and
just raises them as SystemExit.
"""
def error(self, message=None):
"""Called for usage errors detected by the parser"""
raise SystemExit(2)
def exit(self, status=0, message=None):
"""Not sure when this is called"""
raise SystemExit(status)
def parse_listener_args(listener_args):
"""
Parse the command line arguments of a process. If it is a listener process
return its parsed arguments (after the 'pywbemlistener' command); otherwise
return None.
"""
parser = SilentArgumentParser()
# Note: The following options must ne in sync with the Click general options
parser.add_argument('--output-format', '-o', type=str, default=None)
parser.add_argument('--logdir', '-l', type=str, default=None)
parser.add_argument('--verbose', '-v', action='count', default=0)
parser.add_argument('--pdb', action='store_true', default=False)
parser.add_argument('--warn', default=False, action='store_true')
parser.add_argument('run', type=str, default=None)
parser.add_argument('name', type=str, default=None)
# Note: The following options must ne in sync with the Click command options
parser.add_argument('--start-pid', type=int, default=None)
parser.add_argument('--port', '-p', type=int,
default=DEFAULT_LISTENER_PORT)
parser.add_argument('--scheme', '-s', type=str,
default=DEFAULT_LISTENER_SCHEME)
parser.add_argument('--certfile', type=str, default=None)
parser.add_argument('--keyfile', type=str, default=None)
parser.add_argument('--indi-call', type=str, default=None)
parser.add_argument('--indi-file', type=str, default=None)
parser.add_argument('--indi-format', type=str, default=None)
try:
parsed_args = parser.parse_args(listener_args)
except SystemExit:
# Invalid arguments
return None
if parsed_args.run != 'run':
return None
return parsed_args
def run_term_signal_handler(sig, frame):
# pylint: disable=unused-argument
"""
Signal handler for the 'run' command that gets called for the
SIGTERM signal, i.e. when the 'run' process gets terminated by
some means.
This handler ensures that the main loop of the the 'run' command
gets control and can gracefully stop the listener.
"""
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Run process: Received termination signal ({})".format(sig))
# This triggers the registered exit handler run_exit_handler()
raise SystemExit(1)
def transpose(headers, rows):
"""
Return transposed headers and rows (i.e. switch columns and rows).
"""
ret_headers = ['Attribute', 'Value']
ret_rows = []
for header in headers:
ret_row = [header]
ret_rows.append(ret_row)
for row in rows:
assert len(headers) == len(row)
for i, col in enumerate(row):
ret_rows[i].append(col)
return ret_headers, ret_rows
def display_show_listener(listener, table_format):
"""
Display a listener for the 'show' command.
"""
headers = ListenerProperties.show_headers()
rows = [listener.show_row()]
headers, rows = transpose(headers, rows)
table = format_table(
rows, headers, table_format=table_format,
sort_columns=None, hide_empty_cols=None, float_fmt=None)
click.echo(table)
def display_list_listeners(listeners, table_format):
"""
Display listeners for the 'list' command.
"""
headers = ListenerProperties.list_headers()
rows = []
for lis in listeners:
rows.append(lis.list_row())
table = format_table(
rows, headers, table_format=table_format,
sort_columns=None, hide_empty_cols=None, float_fmt=None)
click.echo(table)
################################################################
#
# Common methods for The action functions for the listener click group
#
###############################################################
def cmd_listener_run(context, name, options):
"""
Run as a listener.
"""
port = options['port']
scheme = options['scheme']
host = 'localhost'
start_pid = options['start_pid']
if start_pid is not None:
start_pid = int(start_pid)
# If the stdout of the run process is a pipe (e.g. when capturing
# the output of the run command or its parent start command during
# tests, or when a user pipes the output of the run or start command),
# the parent process will not terminate because its child run process
# has the same pipe open. This is addressed by setting the file descriptor
# of stdout to the file descriptor of the opened log file (when logging)
# or the file descriptor of the opened null device (when not logging),
# using os.dup2().
# Note that this needs to be done at the OS file descriptor level. Setting
# sys.stdout is not sufficient, because its prior file descriptor would
# still be the open pipe.
pid = os.getpid()
logfile = get_logfile(context.logdir, name)
if logfile:
# This message goes to the original stdout of the run process (wherever
# that is directed to)
print_out("Run process {}: Output is logged to: {}".
format(pid, logfile))
# pylint: disable=consider-using-with
log_fp = io.open(logfile, 'a', encoding='utf-8')
if sys.platform == 'win32':
# On Windows, the standard file descriptors are not inherited
# to the run process (probably due to the additional process
# in between), so the recommended way of redirecting stdout
# does not prevent the start process from terminating.
sys.stdout = log_fp
else:
# On UNIX, see the comment at the begin of this function.
# The null device will be closed in run_exit_handler()
os.dup2(log_fp.fileno(), sys.stdout.fileno())
# This message is the first one of this run in the log file (appended)
print_out("Opening 'run' output log file at {}".format(datetime.now()))
else:
# pylint: disable=consider-using-with
log_fp = io.open(os.devnull, 'w', encoding='utf-8')
if sys.platform == 'win32':
# On Windows, the standard file descriptors are not inherited
# to the run process (probably due to the additional process
# in between), so the recommended way of redirecting stdout
# does not prevent the start process from terminating.
sys.stdout = log_fp
else:
# On UNIX, see the comment at the begin of this function.
# The null device will be closed in run_exit_handler()
os.dup2(log_fp.fileno(), sys.stdout.fileno())
print_out("Run process {}: Assertion: This message should not appear")
# Register a termination signal handler that causes the loop further down
# to get control via SystemExit.
signal.signal(signal.SIGTERM, run_term_signal_handler)
# If this run process is started from a start process, register a Python
# atexit handler to make sure we get control when Click exceptions terminate
# the process. The exit handler signals a failed startup to the start
# process.
if start_pid:
atexit.register(run_exit_handler, start_pid, log_fp)
listeners = get_listeners(name)
if len(listeners) > 1: # This upcoming listener and a previous one
lis = listeners[0]
url = '{}://{}:{}'.format(lis.scheme, host, lis.port)
raise click.ClickException(
"Listener {} already running at {}".format(name, url))
if scheme == 'http':
http_port = port
https_port = None
certfile = None
keyfile = None
else:
assert scheme == 'https'
https_port = port
http_port = None
certfile = options['certfile']
keyfile = options['keyfile'] or certfile
url = '{}://{}:{}'.format(scheme, host, port)
context.spinner_stop()
try:
listener = WBEMListener(
host=host, http_port=http_port, https_port=https_port,
certfile=certfile, keyfile=keyfile)
except ValueError as exc:
raise click.ClickException(
"Cannot create listener {}: {}".format(name, exc))
try:
listener.start()
except (IOError, OSError, ListenerError) as exc:
raise click.ClickException(
"Cannot start listener {}: {}".format(name, exc))
indi_call = options['indi_call']
indi_file = options['indi_file']
indi_format = options['indi_format'] or DEFAULT_INDI_FORMAT
def file_func(indication, host):
"""
Indication callback function that appends the indication to a file
using the specified format.
"""
try:
display_str = format_indication(indication, host, indi_format)
except Exception as exc: # pylint: disable=broad-except
display_str = ("Error: Cannot format indication using format "
"\"{}\": {}: {}".
format(indi_format, exc.__class__.__name__, exc))
with io.open(indi_file, 'a', encoding='utf-8') as fp:
fp.write(display_str)
fp.write(u'\n')
if indi_call:
mod_func = indi_call.rsplit('.', 1)
if len(mod_func) < 2:
raise click.ClickException(
"The --indi-call option does not specify MODULE.FUNCTION: {}".
format(indi_call))
mod_name = mod_func[0]
func_name = mod_func[1]
curdir = os.getcwd()
if sys.path[0] != curdir:
if context.verbose >= _config.VERBOSE_SETTINGS:
click.echo("Inserting current directory into front of Python "
"module search path: {}".format(curdir))
sys.path.insert(0, curdir)
try:
module = importlib.import_module(mod_name)
except ImportError as exc:
raise click.ClickException(
"Cannot import module {}: {}".
format(mod_name, exc))
except SyntaxError as exc:
raise click.ClickException(
"Cannot import module {}: SyntaxError: {}".
format(mod_name, exc))
try:
func = getattr(module, func_name)
except AttributeError:
raise click.ClickException(
"Function {}() not found in module {}".
format(func_name, mod_name))
listener.add_callback(func)
if context.verbose >= _config.VERBOSE_SETTINGS:
click.echo("Added indication handler for calling function {}() "
"in module {}".format(func_name, mod_name))
if indi_file:
listener.add_callback(file_func)
if context.verbose >= _config.VERBOSE_SETTINGS:
click.echo("Added indication handler for appending to file {} "
"with format \"{}\"".format(indi_file, indi_format))
click.echo("Running listener {} at {}".format(name, url))
# Signal successful startup completion to the parent 'start' process.
if start_pid:
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Run process: Sending success signal ({}) to "
"start process {}".
format(SIGNAL_RUN_STARTUP_SUCCESS, start_pid))
os.kill(start_pid, SIGNAL_RUN_STARTUP_SUCCESS) # Sends the signal
try:
while True:
sleep(60)
except (KeyboardInterrupt, SystemExit) as exc:
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Run process: Caught exception {}: {}".
format(type(exc), exc))
# Note: SystemExit occurs only due to being raised in the signal handler
# that was registered.
listener.stop()
click.echo("Shut down listener {} running at {}".format(name, url))
def cmd_listener_start(context, name, options):
"""
Start a named listener.
"""
port = options['port']
scheme = options['scheme']
certfile = options['certfile']
keyfile = options['keyfile']
indi_call = options['indi_call']
indi_file = options['indi_file']
indi_format = options['indi_format']
host = 'localhost'
listeners = get_listeners(name)
if listeners:
lis = listeners[0]
url = '{}://{}:{}'.format(lis.scheme, host, lis.port)
raise click.ClickException(
"Listener {} already running at {}".format(name, url))
pid = os.getpid()
run_args = [
'pywbemlistener',
]
if context.verbose:
run_args.append('-{}'.format('v' * context.verbose))
if context.logdir:
run_args.extend(['--logdir', context.logdir])
run_args.extend([
'run', name,
'--port', str(port),
'--scheme', scheme,
'--start-pid', str(pid),
])
if certfile:
run_args.extend(['--certfile', certfile])
if keyfile:
run_args.extend(['--keyfile', keyfile])
if indi_call:
run_args.extend(['--indi-call', indi_call])
if indi_file:
run_args.extend(['--indi-file', indi_file])
if indi_format:
run_args.extend(['--indi-format', indi_format])
# While we stop the spinner of this 'start' command, the spinner of the
# invoked 'run' command will still be spinning until its startup/exit
# completion is detected. When the output of the 'start'command is
# redirected, the spinner of the child process will also be suppressed,
# so this behavior is consistent and should be fine.
context.spinner_stop()
prepare_startup_completion()
popen_kwargs = dict(shell=False)
if six.PY3:
popen_kwargs['start_new_session'] = True
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Start process {}: Starting run process as: {}".
format(pid, run_args))
# pylint: disable=consider-using-with
p = subprocess.Popen(run_args, **popen_kwargs)
# Wait for startup completion or for error exit
try:
rc = wait_startup_completion(p.pid)
except KeyboardInterrupt:
raise click.ClickException(
"Keyboard interrupt while waiting for listener to start up")
if rc != 0:
# Error has already been displayed
raise SystemExit(rc)
# A message about the successful startup has already been displayed by
# the child process.
def cmd_listener_stop(context, name):
"""
Stop a named listener.
"""
listeners = get_listeners(name)
if not listeners:
raise click.ClickException(
"No running listener found with name {}".format(name))
listener = listeners[0]
context.spinner_stop()
p = psutil.Process(listener.pid)
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Terminating run process {}".format(listener.pid))
p.terminate()
if _config.VERBOSE_PROCESSES_ENABLED:
print_out("Waiting for run process {} to complete".format(listener.pid))
p.wait()
# A message about the successful shutdown has already been displayed by
# the child process.
def cmd_listener_show(context, name):
"""
Show a named listener.
"""
listeners = get_listeners(name)
if not listeners:
raise click.ClickException(
"No running listener found with name {}".format(name))
context.spinner_stop()
display_show_listener(listeners[0], table_format=context.output_format)
def cmd_listener_list(context):
"""
List all named listeners.
"""
listeners = get_listeners()
context.spinner_stop()
if not listeners:
click.echo("No running listeners")
else:
display_list_listeners(listeners, table_format=context.output_format)
def cmd_listener_test(context, name, options):
"""
Send a test indication to a named listener.
"""
listeners = get_listeners(name)
if not listeners:
raise click.ClickException(
"No running listener found with name {}".format(name))
listener = listeners[0]
count = options['count'] # optional but defaulted
if count < 1:
raise click.ClickException(
"Invalid count specified: {}".format(count))
# Construct an alert indication
indication = CIMInstance("CIM_AlertIndication")
indication['IndicationIdentifier'] = \
CIMProperty('IndicationIdentifier', value=None, type='string')
indication['AlertingElementFormat'] = Uint16(2) # CIMObjectPath
indication['AlertingManagedElement'] = \
CIMProperty('AlertingManagedElement', value=None, type='string')
indication['AlertType'] = Uint16(2) # Communications Alert
indication['Message'] = "Test message"
indication['OwningEntity'] = 'TEST'
indication['PerceivedSeverity'] = Uint16(2) # Information
indication['ProbableCause'] = Uint16(0) # Unknown
indication['SystemName'] = \
CIMProperty('SystemName', value=None, type='string')
indication['MessageArguments'] = \
CIMProperty('MessageArguments', value=[], type='string', is_array=True)
indication['IndicationTime'] = datetime.now()
indication['MessageID'] = 'TESTnnnn'
context.spinner_stop()
click.echo("Sending the following test indication:\n{}".
format(indication.tomof()))
for i in range(1, count + 1):
indication['MessageID'] = 'TEST{:04d}'.format(i)
conn_kwargs = {}
conn_kwargs['creds'] = None
if listener.scheme == 'https':
url = 'https://localhost:{}'.format(listener.port)
conn_kwargs['x509'] = None
conn_kwargs['no_verification'] = True
else: # http
url = 'http://localhost:{}'.format(listener.port)
conn = WBEMConnection(url, **conn_kwargs)
try:
conn.ExportIndication(indication)
except Error as exc:
raise click.ClickException(str(exc))
click.echo("Sent test indication #{} to listener {} at {}".
format(i, name, url))
|
|
# -*- coding: utf-8 -*-
"""
CMS
Simple Content Management System
"""
module = request.controller
resourcename = request.function
if not settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
from datetime import timedelta
# =============================================================================
def index():
""" Module homepage """
return s3db.cms_index(module, alt_function="index_alt")
# -----------------------------------------------------------------------------
def index_alt():
"""
Module homepage for non-Admin users when no CMS content found
"""
# Just redirect to the list of Posts
s3_redirect_default(URL(f="post"))
# -----------------------------------------------------------------------------
def series():
""" RESTful CRUD controller """
# Pre-process
def prep(r):
if r.component:
# Settings are defined at the series level
table = s3db.cms_post
_avatar = table.avatar
_avatar.readable = _avatar.writable = False
_avatar.default = r.record.avatar
_location = table.location_id
if not r.record.location:
_location.readable = _location.writable = False
_replies = table.replies
_replies.readable = _replies.writable = False
_replies.default = r.record.replies
_roles_permitted = table.roles_permitted
_roles_permitted.readable = _roles_permitted.writable = False
_roles_permitted.default = r.record.roles_permitted
if r.record.richtext:
table.body.represent = lambda body: XML(body)
table.body.widget = s3_richtext_widget
else:
table.body.represent = lambda body: XML(s3_URLise(body))
table.body.widget = None
# Titles do show up
table.name.comment = ""
return True
s3.prep = prep
return s3_rest_controller(rheader=s3db.cms_rheader)
# -----------------------------------------------------------------------------
def tag():
""" RESTful CRUD controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def blog():
"""
RESTful CRUD controller for display of a series of posts as a full-page
read-only showing last 5 items in reverse time order
@ToDo: Convert to dataList
"""
# Pre-process
def prep(r):
s3db.configure(r.tablename, listadd=False)
return True
s3.prep = prep
# Post-process
def postp(r, output):
if r.record:
response.view = s3base.S3CRUD._view(r, "cms/blog.html")
return output
s3.postp = postp
output = s3_rest_controller("cms", "series")
return output
# -----------------------------------------------------------------------------
def post():
""" RESTful CRUD controller """
tablename = "cms_post"
# Filter out those posts which are part of a series
#table = s3db[tablename]
#s3.filter = (table.series_id == None)
# Custom Method to add Comments
s3db.set_method(module, resourcename,
method = "discuss",
action = discuss)
def prep(r):
if r.interactive:
if r.method in ("create", "update"):
table = r.table
# Filter from a Profile page?"
series = get_vars.get("~.series_id$name", None)
if series:
# Lookup ID
stable = db.cms_series
row = db(stable.name == series).select(stable.id,
limitby=(0, 1)
).first()
if row:
field = table.series_id
field.default = row.id
field.readable = field.writable = False
# Context from a Profile page?"
location_id = get_vars.get("(location)", None)
if location_id:
field = table.location_id
field.default = location_id
field.readable = field.writable = False
page = get_vars.get("page", None)
url = get_vars.get("url") # custom redirect?
if page:
table.name.default = page
table.name.readable = table.name.writable = False
_crud = s3.crud_strings[tablename]
_crud.label_create = T("New Page")
_crud.title_update = T("Edit Page")
if not url:
url = URL(c="default", f="index", vars={"page": page})
s3db.configure(tablename,
create_next = url,
update_next = url,
)
_module = get_vars.get("module", None)
if _module:
table.avatar.readable = table.avatar.writable = False
table.location_id.readable = table.location_id.writable = False
table.date.readable = table.date.writable = False
table.expired.readable = table.expired.writable = False
# We always want the Rich Text widget here
table.body.widget = s3base.s3_richtext_widget
resource = get_vars.get("resource", None)
if resource in ("about", "contact", "help", "index"):
if resource == "about":
# We're creating/updating text for the About page
table.name.default = "About Page"
elif resource == "contact":
# We're creating/updating text for a Contact page
table.name.default = "Contact Page"
elif resource == "help":
# We're creating/updating text for the Help page
table.name.default = "Help Page"
else:
# We're creating/updating text for the Home page
table.name.default = "Home Page"
#table.title.readable = table.title.writable = False
table.replies.readable = table.replies.writable = False
if not url:
url = URL(c=_module, f=resource)
else:
record = get_vars.get("record", None)
if record:
# We're creating/updating text for a Profile page
table.name.default = "%s %s Profile Page" % (resource, record)
table.title.readable = table.title.writable = False
table.replies.readable = table.replies.writable = False
if not url:
url = URL(c=_module, f=resource, args=[record, "profile"])
elif resource:
# We're creating/updating text for a Resource Summary page
table.name.default = "%s Summary Page Header" % resource
table.title.readable = table.title.writable = False
table.replies.readable = table.replies.writable = False
if not url:
url = URL(c=_module, f=resource, args="summary")
else:
# We're creating/updating a Module home page
table.name.default = "%s Home Page" % _module
_crud = s3.crud_strings[tablename]
_crud.label_create = T("New Page")
_crud.title_update = T("Edit Page")
if not url:
url = URL(c=_module, f="index")
s3db.configure(tablename,
create_next = url,
update_next = url,
)
layer_id = get_vars.get("layer_id", None)
if layer_id:
# Editing cms_post_layer
table.name.default = "Metadata Page for Layer %s" % layer_id
table.name.readable = table.name.writable = False
table.avatar.readable = table.avatar.writable = False
table.location_id.readable = table.location_id.writable = False
table.title.readable = table.title.writable = False
table.replies.readable = table.replies.writable = False
table.date.readable = table.date.writable = False
table.expired.readable = table.expired.writable = False
_crud = s3.crud_strings[tablename]
_crud.label_create = T("Add Metadata")
_crud.title_update = T("Edit Metadata")
if r.component_name == "module":
modules = {}
_modules = current.deployment_settings.modules
for module in _modules:
if module in ("appadmin", "errors", "ocr"):
continue
modules[module] = _modules[module].name_nice
s3db.cms_post_module.field.requires = \
IS_IN_SET_LAZY(lambda: sort_dict_by_values(modules))
return True
s3.prep = prep
output = s3_rest_controller(rheader=s3db.cms_rheader)
return output
# -----------------------------------------------------------------------------
def page():
"""
RESTful CRUD controller for display of a post as a full-page read-only
- with optional Comments
"""
found = True
get_vars = request.get_vars
if "name" in get_vars:
table = s3db.cms_post
query = (table.name == get_vars.name) & \
(table.deleted != True)
row = db(query).select(table.id, limitby=(0, 1)).first()
if row:
request.args.append(str(row.id))
else:
found = False
# Pre-process
def prep(r):
if not found:
r.error(404, T("Page not found"), next=auth.permission.homepage)
s3db.configure(r.tablename, listadd=False)
return True
s3.prep = prep
# Post-process
def postp(r, output):
if r.record and not r.transformable():
output = {"item": s3base.S3XMLContents(r.record.body).xml()}
current.menu.options = None
response.view = s3base.S3CRUD._view(r, "cms/page.html")
if r.record.replies:
ckeditor = URL(c="static", f="ckeditor", args="ckeditor.js")
s3.scripts.append(ckeditor)
adapter = URL(c="static", f="ckeditor", args=["adapters",
"jquery.js"])
s3.scripts.append(adapter)
# Toolbar options: http://docs.cksource.com/CKEditor_3.x/Developers_Guide/Toolbar
js = "".join((
'''i18n.reply="''', str(T("Reply")), '''"
var img_path=S3.Ap.concat('/static/img/jCollapsible/')
var ck_config={toolbar:[['Bold','Italic','-','NumberedList','BulletedList','-','Link','Unlink','-','Smiley','-','Source','Maximize']],toolbarCanCollapse:false,removePlugins:'elementspath'}
function comment_reply(id){
$('#cms_comment_post_id__row').hide()
$('#cms_comment_post_id__row1').hide()
$('#comment-title').html(i18n.reply)
$('#cms_comment_body').ckeditorGet().destroy()
$('#cms_comment_body').ckeditor(ck_config)
$('#comment-form').insertAfter($('#comment-'+id))
$('#cms_comment_parent').val(id)
var post_id = $('#comment-'+id).attr('post_id')
$('#cms_comment_post_id').val(post_id)
}'''))
s3.js_global.append(js)
return output
s3.postp = postp
output = s3_rest_controller("cms", "post")
return output
# -----------------------------------------------------------------------------
def filter_formstyle(row_id, label, widget, comment, hidden=False):
"""
Custom Formstyle for FilterForm
@param row_id: HTML id for the row
@param label: the label
@param widget: the form widget
@param comment: the comment
@param hidden: whether the row should initially be hidden or not
"""
if hidden:
_class = "advanced hide"
else:
_class= ""
if label:
return DIV(label, widget, _id=row_id, _class=_class)
else:
return DIV(widget, _id=row_id, _class=_class)
# -----------------------------------------------------------------------------
def cms_post_age(row):
"""
The age of the post
- used for colour-coding markers of Alerts & Incidents
"""
if hasattr(row, "cms_post"):
row = row.cms_post
try:
date = row.date
except:
# not available
return messages["NONE"]
now = request.utcnow
age = now - date
if age < timedelta(days=2):
return 1
elif age < timedelta(days=7):
return 2
else:
return 3
# -----------------------------------------------------------------------------
def newsfeed():
"""
RESTful CRUD controller for display of posts as a filterable dataList
(use with /datalist method)
"""
# Load Model
table = s3db.cms_post
stable = db.cms_series
title_list = T("Latest Information")
# Hide Posts linked to Modules and Maps & Expired Posts
s3.filter = (FS("post_module.module") == None) & \
(FS("post_layer.layer_id") == None) & \
(FS("expired") != True)
# Ensure that filtered views translate into options which update the Widget
if "~.series_id$name" in get_vars:
series_name = get_vars["~.series_id$name"]
# Disabled as can change filters dynamically
# @ToDo: Better Mechanism: Another field in cms_series?
#if series_name == "Request":
# title_list = T("Latest Requests")
#elif series_name == "Offer":
# title_list = T("Latest Offers")
series = db(stable.name == series_name).select(stable.id,
cache=s3db.cache,
limitby=(0, 1)).first()
if series:
series_id = str(series.id)
get_vars.pop("~.series_id$name")
get_vars["~.series_id__belongs"] = series_id
s3.crud_strings["cms_post"].title_list = title_list
contact_field = settings.get_cms_person()
org_field = settings.get_cms_organisation()
org_group_field = settings.get_cms_organisation_group()
show_events = settings.get_cms_show_events()
hidden = not settings.get_cms_filter_open()
from s3.s3filter import S3TextFilter, S3OptionsFilter, S3LocationFilter, S3DateFilter
filter_widgets = [S3TextFilter(["body"],
label = T("Search"),
_class = "filter-search",
#_placeholder = T("Search").upper(),
),
S3LocationFilter("location_id",
label = T("Filter by Location"),
hidden = hidden,
),
]
fappend = filter_widgets.append
finsert = filter_widgets.insert
if show_events:
fappend(S3OptionsFilter("event_post.event_id",
label = T("Filter by Disaster"),
hidden = hidden,
))
if org_field:
fappend(S3OptionsFilter(org_field,
label = T("Filter by Organization"),
# Can't use this for created_by as integer, use field.represent instead
#represent = "%(name)s",
hidden = hidden,
))
if org_group_field:
group_label = settings.get_org_groups()
if group_label:
fappend(S3OptionsFilter(org_group_field,
label = T("Filter by %(type)s") % dict(type=T(group_label)),
# Can't use this for created_by as integer, use field.represent instead
#represent = "%(name)s",
hidden = hidden,
))
fappend(S3DateFilter("date",
label = T("Filter by Date"),
hide_time = True,
hidden = hidden,
))
if settings.get_cms_show_tags():
finsert(1, S3OptionsFilter("tag_post.tag_id",
label = T("Filter by Tag"),
represent = "%(name)s",
hidden = hidden,
))
if settings.get_cms_bookmarks() and auth.user:
finsert(1, S3OptionsFilter("bookmark.user_id",
label = T("Filter by Bookmark"),
# Can't just use "" as this is then omitted from rendering
options = {"*": T("All"),
auth.user.id: T("My Bookmarks"),
},
cols = 2,
multiple = False,
hidden = hidden,
))
notify_fields = [(T("Date"), "date"),
(T("Location"), "location_id"),
]
len_series = db(stable.deleted == False).count()
if len_series > 3:
notify_fields.insert(0, (T("Type"), "series_id"))
# Multiselect widget
finsert(1, S3OptionsFilter("series_id",
label = T("Filter by Type"),
# We want translations
#represent = "%(name)s",
hidden = hidden,
))
elif len_series > 1:
notify_fields.insert(0, (T("Type"), "series_id"))
# Checkboxes
finsert(1, S3OptionsFilter("series_id",
label = T("Filter by Type"),
# We want translations
#represent = "%(name)s",
cols = 2,
hidden = hidden,
))
else:
# No Widget or notify_field
pass
nappend = notify_fields.append
if org_field:
nappend((T("Organization"), org_field))
if org_group_field:
if isinstance(group_label, bool):
group_label = T("Organisation Group")
nappend((T(group_label), org_group_field))
if contact_field:
nappend((T("Contact"), contact_field))
nappend((T("Description"), "body"))
# @todo: allow configuration (?)
filter_formstyle = settings.get_ui_formstyle()
s3db.configure("cms_post",
# We could use a custom Advanced widget
#filter_advanced = False,
filter_formstyle = filter_formstyle,
# No Submit button (done automatically)
#filter_submit = (T("SEARCH"), "btn btn-primary"),
filter_widgets = filter_widgets,
# Default anyway now:
#list_layout = s3db.cms_post_list_layout,
# Create form comes via AJAX in a Modal
#insertable = False,
notify_fields = notify_fields,
notify_template = "notify_post",
)
s3.dl_pagelength = 6 # 5 forces an AJAX call
def prep(r):
if r.interactive or r.representation == "aadata":
s3db.cms_customise_post_fields()
if r.interactive:
if len_series > 1:
refresh = get_vars.get("refresh", None)
if refresh == "datalist":
# We must be coming from the News Feed page so can change the type on-the-fly
field = table.series_id
field.label = T("Type")
field.readable = field.writable = True
else:
field = table.series_id
row = db(stable.deleted == False).select(stable.id,
limitby=(0, 1)
).first()
try:
field.default = row.id
except:
# Prepop not done: expose field to show error
field.label = T("Type")
field.readable = field.writable = True
else:
field.readable = field.writable = False
if r.method == "read":
# Restore the label for the Location
table.location_id.label = T("Location")
elif r.method == "create":
pass
# @ToDo: deployment_setting
#ADMIN = session.s3.system_roles.ADMIN
#if (not auth.s3_has_role(ADMIN)):
# represent = S3Represent(lookup="cms_series",
# translate=settings.get_L10n_translate_cms_series())
# field.requires = IS_ONE_OF(db,
# "cms_series.id",
# represent,
# not_filterby="name",
# not_filter_opts = ("Alert",),
# )
#field = table.name
#field.readable = field.writable = False
#field = table.title
#field.readable = field.writable = False
field = table.avatar
field.default = True
#field.readable = field.writable = False
field = table.replies
field.default = False
#field.readable = field.writable = False
field = table.body
field.label = T("Description")
# Plain text not Rich
from s3.s3widgets import s3_comments_widget
field.widget = s3_comments_widget
#table.comments.readable = table.comments.writable = False
#if request.controller == "default":
# # Don't override card layout for News Feed/Homepage
# return True
from s3.s3forms import S3SQLCustomForm, S3SQLInlineComponent
# Filter from a Profile page?
# If so, then default the fields we know
location_id = get_vars.get("~.(location)", None)
if location_id:
table.location_id.default = location_id
event_id = get_vars.get("~.(event)", None)
if event_id:
def create_onaccept(form):
table = current.s3db.event_post
table.insert(event_id=event_id,
post_id=form.vars.id)
s3db.configure("cms_post",
create_onaccept = create_onaccept,
)
crud_fields = ["date",
"series_id",
]
cappend = crud_fields.append
if settings.get_cms_show_tags():
cappend("title")
crud_fields.extend(("body",
"location_id",
))
if not event_id and show_events:
cappend(S3SQLInlineComponent("event_post",
# @ToDo: deployment_setting (use same one used to activate?)
#label = T("Disaster(s)"),
label = T("Disaster"),
multiple = False,
fields = [("", "event_id")],
orderby = "event_id$name",
))
if org_field == "post_organisation.organisation_id":
cappend(S3SQLInlineComponent("post_organisation",
label = T("Organization"),
fields = [("", "organisation_id")],
# @ToDo: deployment_setting
multiple = False,
))
if org_group_field == "post_organisation_group.group_id":
cappend(S3SQLInlineComponent("post_organisation_group",
label = T(group_label),
fields = [("", "group_id")],
# @ToDo: deployment_setting
multiple = False,
))
if contact_field == "person_id":
cappend("person_id")
if settings.get_cms_show_attachments():
cappend(S3SQLInlineComponent("document",
name = "file",
label = T("Files"),
fields = [("", "file"),
#"comments",
],
))
if settings.get_cms_show_links():
cappend(S3SQLInlineComponent("document",
name = "url",
label = T("Links"),
fields = [("", "url"),
#"comments",
],
))
crud_form = S3SQLCustomForm(*crud_fields)
# Return to List view after create/update/delete
# We now do all this in Popups
#url_next = URL(c="default", f="index", args="newsfeed")
s3db.configure("cms_post",
#create_next = url_next,
#delete_next = url_next,
#update_next = url_next,
crud_form = crud_form,
# Don't include a Create form in 'More' popups
listadd = False,
)
elif r.representation == "xls":
table.body.represent = None
table.created_by.represent = s3base.s3_auth_user_represent_name
#table.created_on.represent = datetime_represent
utable = auth.settings.table_user
utable.organisation_id.represent = s3db.org_organisation_represent
list_fields = [(T("Date"), "date"),
#(T("Disaster"), "event_post.event_id"),
(T("Type"), "series_id"),
(T("Details"), "body"),
]
lappend = list_fields.append
# Which levels of Hierarchy are we using?
gis = current.gis
levels = gis.get_relevant_hierarchy_levels()
hierarchy = gis.get_location_hierarchy()
for level in levels:
lappend((hierarchy[level], "location_id$%s" % level))
if contact_field:
lappend((T("Contact"), contact_field))
if org_field:
lappend((T("Organization"), org_field))
if org_group_field:
lappend((T(group_label), org_group_field))
s3db.configure("cms_post",
list_fields = list_fields,
)
elif r.representation == "plain":
# Map Popups
table.location_id.represent = s3db.gis_LocationRepresent(sep=" | ")
table.created_by.represent = s3base.s3_auth_user_represent_name
# Used by default popups
series = table.series_id.represent(r.record.series_id)
s3.crud_strings["cms_post"].title_display = "%(series)s Details" % dict(series=series)
s3db.configure("cms_post",
popup_url = "",
)
table.avatar.readable = False
table.body.label = ""
table.expired.readable = False
table.replies.readable = False
table.created_by.readable = True
table.created_by.label = T("Author")
# Used by cms_post_popup
#table.created_on.represent = datetime_represent
elif r.representation == "geojson":
r.table.age = Field.Method("age", cms_post_age)
return True
s3.prep = prep
def postp(r, output):
if r.interactive:
if r.method == "datalist" and r.representation != "dl":
# Hide side menu
current.menu.options = None
response.view = s3base.S3CRUD._view(r, "cms/newsfeed.html")
return output
s3.postp = postp
output = s3_rest_controller("cms", "post")
return output
# =============================================================================
# Comments
# =============================================================================
def discuss(r, **attr):
""" Custom Method to manage the discussion of a Post """
id = r.id
# Add the RHeader to maintain consistency with the other pages
rheader = s3db.cms_rheader(r)
ckeditor = URL(c="static", f="ckeditor", args="ckeditor.js")
s3.scripts.append(ckeditor)
adapter = URL(c="static", f="ckeditor", args=["adapters",
"jquery.js"])
s3.scripts.append(adapter)
# Toolbar options: http://docs.cksource.com/CKEditor_3.x/Developers_Guide/Toolbar
js = "".join((
'''i18n.reply="''', str(T("Reply")), '''"
var img_path=S3.Ap.concat('/static/img/jCollapsible/')
var ck_config={toolbar:[['Bold','Italic','-','NumberedList','BulletedList','-','Link','Unlink','-','Smiley','-','Source','Maximize']],toolbarCanCollapse:false,removePlugins:'elementspath'}
function comment_reply(id){
$('#cms_comment_post_id__row').hide()
$('#cms_comment_post_id__row1').hide()
$('#comment-title').html(i18n.reply)
$('#cms_comment_body').ckeditorGet().destroy()
$('#cms_comment_body').ckeditor(ck_config)
$('#comment-form').insertAfter($('#comment-'+id))
$('#cms_comment_parent').val(id)
var post_id=$('#comment-'+id).attr('post_id')
$('#cms_comment_post_id').val(post_id)
}'''))
s3.js_global.append(js)
response.view = "cms/discuss.html"
return dict(rheader=rheader,
id=id)
# -----------------------------------------------------------------------------
def comment_parse(comment, comments, post_id=None):
"""
Parse a Comment
@param: comment - a gluon.sql.Row: the current comment
@param: comments - a gluon.sql.Rows: full list of comments
@param: post_id - a reference ID: optional post commented on
"""
author = B(T("Anonymous"))
if comment.created_by:
utable = s3db.auth_user
ptable = s3db.pr_person
ltable = s3db.pr_person_user
query = (utable.id == comment.created_by)
left = [ltable.on(ltable.user_id == utable.id),
ptable.on(ptable.pe_id == ltable.pe_id)]
row = db(query).select(utable.email,
ptable.first_name,
ptable.middle_name,
ptable.last_name,
left=left, limitby=(0, 1)).first()
if row:
person = row.pr_person
user = row[utable._tablename]
username = s3_fullname(person)
email = user.email.strip().lower()
import hashlib
hash = hashlib.md5(email).hexdigest()
url = "http://www.gravatar.com/%s" % hash
author = B(A(username, _href=url, _target="top"))
if not post_id and comment.post_id:
post = "re: %s" % s3db.cms_post[comment.post_id].name
header = DIV(author, " ", post)
post_id = comment.post_id
else:
header = author
thread = LI(DIV(s3base.s3_avatar_represent(comment.created_by),
DIV(DIV(header,
_class="comment-header"),
DIV(XML(comment.body)),
_class="comment-text"),
DIV(DIV(comment.created_on,
_class="comment-date"),
DIV(A(T("Reply"),
_class="action-btn"),
_onclick="comment_reply(%i);" % comment.id,
_class="comment-reply"),
_class="fright"),
_id="comment-%i" % comment.id,
_post_id=post_id,
_class="comment-box"))
# Add the children of this thread
children = UL(_class="children")
id = comment.id
count = 0
for comment in comments:
if comment.parent == id:
count = 1
child = comment_parse(comment, comments, post_id=post_id)
children.append(child)
if count == 1:
thread.append(children)
return thread
# -----------------------------------------------------------------------------
def comments():
"""
Function accessed by AJAX to handle Comments
- for discuss(() & page()
"""
try:
post_id = request.args[0]
except:
raise HTTP(400)
table = s3db.cms_comment
# Form to add a new Comment
table.post_id.default = post_id
table.post_id.writable = table.post_id.readable = False
form = crud.create(table)
# List of existing Comments
comments = db(table.post_id == post_id).select(table.id,
table.parent,
table.body,
table.created_by,
table.created_on)
output = UL(_id="comments")
for comment in comments:
if not comment.parent:
# Show top-level threads at top-level
thread = comment_parse(comment, comments, post_id=post_id)
output.append(thread)
# Also see the outer discuss()
script = \
'''$('#comments').collapsible({xoffset:'-5',yoffset:'50',imagehide:img_path+'arrow-down.png',imageshow:img_path+'arrow-right.png',defaulthide:false})
$('#cms_comment_parent__row1').hide()
$('#cms_comment_parent__row').hide()
$('#cms_comment_body').ckeditor(ck_config)
$('#submit_record__row input').click(function(){
$('#comment-form').hide()
$('#cms_comment_body').ckeditorGet().destroy()
return true
})'''
# No layout in this output!
#s3.jquery_ready.append(script)
output = DIV(output,
DIV(H4(T("New Post"),
_id="comment-title"),
form,
_id="comment-form",
_class="clear"),
SCRIPT(script))
return XML(output)
# -----------------------------------------------------------------------------
def posts():
"""
Function accessed by AJAX to handle a Series of Posts
"""
try:
series_id = request.args[0]
except:
raise HTTP(400)
try:
recent = request.args[1]
except:
recent = 5
table = s3db.cms_post
# List of Posts in this Series
query = (table.series_id == series_id)
posts = db(query).select(table.name,
table.body,
table.avatar,
table.created_by,
table.created_on,
limitby=(0, recent))
output = UL(_id="comments")
import hashlib
for post in posts:
author = B(T("Anonymous"))
if post.created_by:
utable = s3db.auth_user
ptable = s3db.pr_person
ltable = s3db.pr_person_user
query = (utable.id == post.created_by)
left = [ltable.on(ltable.user_id == utable.id),
ptable.on(ptable.pe_id == ltable.pe_id)]
row = db(query).select(utable.email,
ptable.first_name,
ptable.middle_name,
ptable.last_name,
left=left, limitby=(0, 1)).first()
if row:
person = row.pr_person
user = row[utable._tablename]
username = s3_fullname(person)
email = user.email.strip().lower()
hash = hashlib.md5(email).hexdigest()
url = "http://www.gravatar.com/%s" % hash
author = B(A(username, _href=url, _target="top"))
header = H4(post.name)
if post.avatar:
avatar = s3base.s3_avatar_represent(post.created_by)
else:
avatar = ""
row = LI(DIV(avatar,
DIV(DIV(header,
_class="comment-header"),
DIV(XML(post.body),
_class="comment-body"),
_class="comment-text"),
DIV(DIV(post.created_on,
_class="comment-date"),
_class="fright"),
DIV(author,
_class="comment-footer"),
_class="comment-box"))
output.append(row)
return XML(output)
# END =========================================================================
|
|
import pytest
from jinja2 import TemplateNotFound
from werkzeug.http import parse_cache_control_header
import flask
def test_blueprint_specific_error_handling(app, client):
frontend = flask.Blueprint("frontend", __name__)
backend = flask.Blueprint("backend", __name__)
sideend = flask.Blueprint("sideend", __name__)
@frontend.errorhandler(403)
def frontend_forbidden(e):
return "frontend says no", 403
@frontend.route("/frontend-no")
def frontend_no():
flask.abort(403)
@backend.errorhandler(403)
def backend_forbidden(e):
return "backend says no", 403
@backend.route("/backend-no")
def backend_no():
flask.abort(403)
@sideend.route("/what-is-a-sideend")
def sideend_no():
flask.abort(403)
app.register_blueprint(frontend)
app.register_blueprint(backend)
app.register_blueprint(sideend)
@app.errorhandler(403)
def app_forbidden(e):
return "application itself says no", 403
assert client.get("/frontend-no").data == b"frontend says no"
assert client.get("/backend-no").data == b"backend says no"
assert client.get("/what-is-a-sideend").data == b"application itself says no"
def test_blueprint_specific_user_error_handling(app, client):
class MyDecoratorException(Exception):
pass
class MyFunctionException(Exception):
pass
blue = flask.Blueprint("blue", __name__)
@blue.errorhandler(MyDecoratorException)
def my_decorator_exception_handler(e):
assert isinstance(e, MyDecoratorException)
return "boom"
def my_function_exception_handler(e):
assert isinstance(e, MyFunctionException)
return "bam"
blue.register_error_handler(MyFunctionException, my_function_exception_handler)
@blue.route("/decorator")
def blue_deco_test():
raise MyDecoratorException()
@blue.route("/function")
def blue_func_test():
raise MyFunctionException()
app.register_blueprint(blue)
assert client.get("/decorator").data == b"boom"
assert client.get("/function").data == b"bam"
def test_blueprint_app_error_handling(app, client):
errors = flask.Blueprint("errors", __name__)
@errors.app_errorhandler(403)
def forbidden_handler(e):
return "you shall not pass", 403
@app.route("/forbidden")
def app_forbidden():
flask.abort(403)
forbidden_bp = flask.Blueprint("forbidden_bp", __name__)
@forbidden_bp.route("/nope")
def bp_forbidden():
flask.abort(403)
app.register_blueprint(errors)
app.register_blueprint(forbidden_bp)
assert client.get("/forbidden").data == b"you shall not pass"
assert client.get("/nope").data == b"you shall not pass"
@pytest.mark.parametrize(
("prefix", "rule", "url"),
(
("", "/", "/"),
("/", "", "/"),
("/", "/", "/"),
("/foo", "", "/foo"),
("/foo/", "", "/foo/"),
("", "/bar", "/bar"),
("/foo/", "/bar", "/foo/bar"),
("/foo/", "bar", "/foo/bar"),
("/foo", "/bar", "/foo/bar"),
("/foo/", "//bar", "/foo/bar"),
("/foo//", "/bar", "/foo/bar"),
),
)
def test_blueprint_prefix_slash(app, client, prefix, rule, url):
bp = flask.Blueprint("test", __name__, url_prefix=prefix)
@bp.route(rule)
def index():
return "", 204
app.register_blueprint(bp)
assert client.get(url).status_code == 204
def test_blueprint_url_defaults(app, client):
bp = flask.Blueprint("test", __name__)
@bp.route("/foo", defaults={"baz": 42})
def foo(bar, baz):
return f"{bar}/{baz:d}"
@bp.route("/bar")
def bar(bar):
return str(bar)
app.register_blueprint(bp, url_prefix="/1", url_defaults={"bar": 23})
app.register_blueprint(bp, name="test2", url_prefix="/2", url_defaults={"bar": 19})
assert client.get("/1/foo").data == b"23/42"
assert client.get("/2/foo").data == b"19/42"
assert client.get("/1/bar").data == b"23"
assert client.get("/2/bar").data == b"19"
def test_blueprint_url_processors(app, client):
bp = flask.Blueprint("frontend", __name__, url_prefix="/<lang_code>")
@bp.url_defaults
def add_language_code(endpoint, values):
values.setdefault("lang_code", flask.g.lang_code)
@bp.url_value_preprocessor
def pull_lang_code(endpoint, values):
flask.g.lang_code = values.pop("lang_code")
@bp.route("/")
def index():
return flask.url_for(".about")
@bp.route("/about")
def about():
return flask.url_for(".index")
app.register_blueprint(bp)
assert client.get("/de/").data == b"/de/about"
assert client.get("/de/about").data == b"/de/"
def test_templates_and_static(test_apps):
from blueprintapp import app
client = app.test_client()
rv = client.get("/")
assert rv.data == b"Hello from the Frontend"
rv = client.get("/admin/")
assert rv.data == b"Hello from the Admin"
rv = client.get("/admin/index2")
assert rv.data == b"Hello from the Admin"
rv = client.get("/admin/static/test.txt")
assert rv.data.strip() == b"Admin File"
rv.close()
rv = client.get("/admin/static/css/test.css")
assert rv.data.strip() == b"/* nested file */"
rv.close()
# try/finally, in case other tests use this app for Blueprint tests.
max_age_default = app.config["SEND_FILE_MAX_AGE_DEFAULT"]
try:
expected_max_age = 3600
if app.config["SEND_FILE_MAX_AGE_DEFAULT"] == expected_max_age:
expected_max_age = 7200
app.config["SEND_FILE_MAX_AGE_DEFAULT"] = expected_max_age
rv = client.get("/admin/static/css/test.css")
cc = parse_cache_control_header(rv.headers["Cache-Control"])
assert cc.max_age == expected_max_age
rv.close()
finally:
app.config["SEND_FILE_MAX_AGE_DEFAULT"] = max_age_default
with app.test_request_context():
assert (
flask.url_for("admin.static", filename="test.txt")
== "/admin/static/test.txt"
)
with app.test_request_context():
with pytest.raises(TemplateNotFound) as e:
flask.render_template("missing.html")
assert e.value.name == "missing.html"
with flask.Flask(__name__).test_request_context():
assert flask.render_template("nested/nested.txt") == "I'm nested"
def test_default_static_max_age(app):
class MyBlueprint(flask.Blueprint):
def get_send_file_max_age(self, filename):
return 100
blueprint = MyBlueprint("blueprint", __name__, static_folder="static")
app.register_blueprint(blueprint)
# try/finally, in case other tests use this app for Blueprint tests.
max_age_default = app.config["SEND_FILE_MAX_AGE_DEFAULT"]
try:
with app.test_request_context():
unexpected_max_age = 3600
if app.config["SEND_FILE_MAX_AGE_DEFAULT"] == unexpected_max_age:
unexpected_max_age = 7200
app.config["SEND_FILE_MAX_AGE_DEFAULT"] = unexpected_max_age
rv = blueprint.send_static_file("index.html")
cc = parse_cache_control_header(rv.headers["Cache-Control"])
assert cc.max_age == 100
rv.close()
finally:
app.config["SEND_FILE_MAX_AGE_DEFAULT"] = max_age_default
def test_templates_list(test_apps):
from blueprintapp import app
templates = sorted(app.jinja_env.list_templates())
assert templates == ["admin/index.html", "frontend/index.html"]
def test_dotted_name_not_allowed(app, client):
with pytest.raises(ValueError):
flask.Blueprint("app.ui", __name__)
def test_dotted_names_from_app(app, client):
test = flask.Blueprint("test", __name__)
@app.route("/")
def app_index():
return flask.url_for("test.index")
@test.route("/test/")
def index():
return flask.url_for("app_index")
app.register_blueprint(test)
rv = client.get("/")
assert rv.data == b"/test/"
def test_empty_url_defaults(app, client):
bp = flask.Blueprint("bp", __name__)
@bp.route("/", defaults={"page": 1})
@bp.route("/page/<int:page>")
def something(page):
return str(page)
app.register_blueprint(bp)
assert client.get("/").data == b"1"
assert client.get("/page/2").data == b"2"
def test_route_decorator_custom_endpoint(app, client):
bp = flask.Blueprint("bp", __name__)
@bp.route("/foo")
def foo():
return flask.request.endpoint
@bp.route("/bar", endpoint="bar")
def foo_bar():
return flask.request.endpoint
@bp.route("/bar/123", endpoint="123")
def foo_bar_foo():
return flask.request.endpoint
@bp.route("/bar/foo")
def bar_foo():
return flask.request.endpoint
app.register_blueprint(bp, url_prefix="/py")
@app.route("/")
def index():
return flask.request.endpoint
assert client.get("/").data == b"index"
assert client.get("/py/foo").data == b"bp.foo"
assert client.get("/py/bar").data == b"bp.bar"
assert client.get("/py/bar/123").data == b"bp.123"
assert client.get("/py/bar/foo").data == b"bp.bar_foo"
def test_route_decorator_custom_endpoint_with_dots(app, client):
bp = flask.Blueprint("bp", __name__)
with pytest.raises(ValueError):
bp.route("/", endpoint="a.b")(lambda: "")
with pytest.raises(ValueError):
bp.add_url_rule("/", endpoint="a.b")
def view():
return ""
view.__name__ = "a.b"
with pytest.raises(ValueError):
bp.add_url_rule("/", view_func=view)
def test_endpoint_decorator(app, client):
from werkzeug.routing import Rule
app.url_map.add(Rule("/foo", endpoint="bar"))
bp = flask.Blueprint("bp", __name__)
@bp.endpoint("bar")
def foobar():
return flask.request.endpoint
app.register_blueprint(bp, url_prefix="/bp_prefix")
assert client.get("/foo").data == b"bar"
assert client.get("/bp_prefix/bar").status_code == 404
def test_template_filter(app):
bp = flask.Blueprint("bp", __name__)
@bp.app_template_filter()
def my_reverse(s):
return s[::-1]
app.register_blueprint(bp, url_prefix="/py")
assert "my_reverse" in app.jinja_env.filters.keys()
assert app.jinja_env.filters["my_reverse"] == my_reverse
assert app.jinja_env.filters["my_reverse"]("abcd") == "dcba"
def test_add_template_filter(app):
bp = flask.Blueprint("bp", __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse)
app.register_blueprint(bp, url_prefix="/py")
assert "my_reverse" in app.jinja_env.filters.keys()
assert app.jinja_env.filters["my_reverse"] == my_reverse
assert app.jinja_env.filters["my_reverse"]("abcd") == "dcba"
def test_template_filter_with_name(app):
bp = flask.Blueprint("bp", __name__)
@bp.app_template_filter("strrev")
def my_reverse(s):
return s[::-1]
app.register_blueprint(bp, url_prefix="/py")
assert "strrev" in app.jinja_env.filters.keys()
assert app.jinja_env.filters["strrev"] == my_reverse
assert app.jinja_env.filters["strrev"]("abcd") == "dcba"
def test_add_template_filter_with_name(app):
bp = flask.Blueprint("bp", __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse, "strrev")
app.register_blueprint(bp, url_prefix="/py")
assert "strrev" in app.jinja_env.filters.keys()
assert app.jinja_env.filters["strrev"] == my_reverse
assert app.jinja_env.filters["strrev"]("abcd") == "dcba"
def test_template_filter_with_template(app, client):
bp = flask.Blueprint("bp", __name__)
@bp.app_template_filter()
def super_reverse(s):
return s[::-1]
app.register_blueprint(bp, url_prefix="/py")
@app.route("/")
def index():
return flask.render_template("template_filter.html", value="abcd")
rv = client.get("/")
assert rv.data == b"dcba"
def test_template_filter_after_route_with_template(app, client):
@app.route("/")
def index():
return flask.render_template("template_filter.html", value="abcd")
bp = flask.Blueprint("bp", __name__)
@bp.app_template_filter()
def super_reverse(s):
return s[::-1]
app.register_blueprint(bp, url_prefix="/py")
rv = client.get("/")
assert rv.data == b"dcba"
def test_add_template_filter_with_template(app, client):
bp = flask.Blueprint("bp", __name__)
def super_reverse(s):
return s[::-1]
bp.add_app_template_filter(super_reverse)
app.register_blueprint(bp, url_prefix="/py")
@app.route("/")
def index():
return flask.render_template("template_filter.html", value="abcd")
rv = client.get("/")
assert rv.data == b"dcba"
def test_template_filter_with_name_and_template(app, client):
bp = flask.Blueprint("bp", __name__)
@bp.app_template_filter("super_reverse")
def my_reverse(s):
return s[::-1]
app.register_blueprint(bp, url_prefix="/py")
@app.route("/")
def index():
return flask.render_template("template_filter.html", value="abcd")
rv = client.get("/")
assert rv.data == b"dcba"
def test_add_template_filter_with_name_and_template(app, client):
bp = flask.Blueprint("bp", __name__)
def my_reverse(s):
return s[::-1]
bp.add_app_template_filter(my_reverse, "super_reverse")
app.register_blueprint(bp, url_prefix="/py")
@app.route("/")
def index():
return flask.render_template("template_filter.html", value="abcd")
rv = client.get("/")
assert rv.data == b"dcba"
def test_template_test(app):
bp = flask.Blueprint("bp", __name__)
@bp.app_template_test()
def is_boolean(value):
return isinstance(value, bool)
app.register_blueprint(bp, url_prefix="/py")
assert "is_boolean" in app.jinja_env.tests.keys()
assert app.jinja_env.tests["is_boolean"] == is_boolean
assert app.jinja_env.tests["is_boolean"](False)
def test_add_template_test(app):
bp = flask.Blueprint("bp", __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean)
app.register_blueprint(bp, url_prefix="/py")
assert "is_boolean" in app.jinja_env.tests.keys()
assert app.jinja_env.tests["is_boolean"] == is_boolean
assert app.jinja_env.tests["is_boolean"](False)
def test_template_test_with_name(app):
bp = flask.Blueprint("bp", __name__)
@bp.app_template_test("boolean")
def is_boolean(value):
return isinstance(value, bool)
app.register_blueprint(bp, url_prefix="/py")
assert "boolean" in app.jinja_env.tests.keys()
assert app.jinja_env.tests["boolean"] == is_boolean
assert app.jinja_env.tests["boolean"](False)
def test_add_template_test_with_name(app):
bp = flask.Blueprint("bp", __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean, "boolean")
app.register_blueprint(bp, url_prefix="/py")
assert "boolean" in app.jinja_env.tests.keys()
assert app.jinja_env.tests["boolean"] == is_boolean
assert app.jinja_env.tests["boolean"](False)
def test_template_test_with_template(app, client):
bp = flask.Blueprint("bp", __name__)
@bp.app_template_test()
def boolean(value):
return isinstance(value, bool)
app.register_blueprint(bp, url_prefix="/py")
@app.route("/")
def index():
return flask.render_template("template_test.html", value=False)
rv = client.get("/")
assert b"Success!" in rv.data
def test_template_test_after_route_with_template(app, client):
@app.route("/")
def index():
return flask.render_template("template_test.html", value=False)
bp = flask.Blueprint("bp", __name__)
@bp.app_template_test()
def boolean(value):
return isinstance(value, bool)
app.register_blueprint(bp, url_prefix="/py")
rv = client.get("/")
assert b"Success!" in rv.data
def test_add_template_test_with_template(app, client):
bp = flask.Blueprint("bp", __name__)
def boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(boolean)
app.register_blueprint(bp, url_prefix="/py")
@app.route("/")
def index():
return flask.render_template("template_test.html", value=False)
rv = client.get("/")
assert b"Success!" in rv.data
def test_template_test_with_name_and_template(app, client):
bp = flask.Blueprint("bp", __name__)
@bp.app_template_test("boolean")
def is_boolean(value):
return isinstance(value, bool)
app.register_blueprint(bp, url_prefix="/py")
@app.route("/")
def index():
return flask.render_template("template_test.html", value=False)
rv = client.get("/")
assert b"Success!" in rv.data
def test_add_template_test_with_name_and_template(app, client):
bp = flask.Blueprint("bp", __name__)
def is_boolean(value):
return isinstance(value, bool)
bp.add_app_template_test(is_boolean, "boolean")
app.register_blueprint(bp, url_prefix="/py")
@app.route("/")
def index():
return flask.render_template("template_test.html", value=False)
rv = client.get("/")
assert b"Success!" in rv.data
def test_context_processing(app, client):
answer_bp = flask.Blueprint("answer_bp", __name__)
template_string = lambda: flask.render_template_string( # noqa: E731
"{% if notanswer %}{{ notanswer }} is not the answer. {% endif %}"
"{% if answer %}{{ answer }} is the answer.{% endif %}"
)
# App global context processor
@answer_bp.app_context_processor
def not_answer_context_processor():
return {"notanswer": 43}
# Blueprint local context processor
@answer_bp.context_processor
def answer_context_processor():
return {"answer": 42}
# Setup endpoints for testing
@answer_bp.route("/bp")
def bp_page():
return template_string()
@app.route("/")
def app_page():
return template_string()
# Register the blueprint
app.register_blueprint(answer_bp)
app_page_bytes = client.get("/").data
answer_page_bytes = client.get("/bp").data
assert b"43" in app_page_bytes
assert b"42" not in app_page_bytes
assert b"42" in answer_page_bytes
assert b"43" in answer_page_bytes
def test_template_global(app):
bp = flask.Blueprint("bp", __name__)
@bp.app_template_global()
def get_answer():
return 42
# Make sure the function is not in the jinja_env already
assert "get_answer" not in app.jinja_env.globals.keys()
app.register_blueprint(bp)
# Tests
assert "get_answer" in app.jinja_env.globals.keys()
assert app.jinja_env.globals["get_answer"] is get_answer
assert app.jinja_env.globals["get_answer"]() == 42
with app.app_context():
rv = flask.render_template_string("{{ get_answer() }}")
assert rv == "42"
def test_request_processing(app, client):
bp = flask.Blueprint("bp", __name__)
evts = []
@bp.before_request
def before_bp():
evts.append("before")
@bp.after_request
def after_bp(response):
response.data += b"|after"
evts.append("after")
return response
@bp.teardown_request
def teardown_bp(exc):
evts.append("teardown")
# Setup routes for testing
@bp.route("/bp")
def bp_endpoint():
return "request"
app.register_blueprint(bp)
assert evts == []
rv = client.get("/bp")
assert rv.data == b"request|after"
assert evts == ["before", "after", "teardown"]
def test_app_request_processing(app, client):
bp = flask.Blueprint("bp", __name__)
evts = []
@bp.before_app_first_request
def before_first_request():
evts.append("first")
@bp.before_app_request
def before_app():
evts.append("before")
@bp.after_app_request
def after_app(response):
response.data += b"|after"
evts.append("after")
return response
@bp.teardown_app_request
def teardown_app(exc):
evts.append("teardown")
app.register_blueprint(bp)
# Setup routes for testing
@app.route("/")
def bp_endpoint():
return "request"
# before first request
assert evts == []
# first request
resp = client.get("/").data
assert resp == b"request|after"
assert evts == ["first", "before", "after", "teardown"]
# second request
resp = client.get("/").data
assert resp == b"request|after"
assert evts == ["first"] + ["before", "after", "teardown"] * 2
def test_app_url_processors(app, client):
bp = flask.Blueprint("bp", __name__)
# Register app-wide url defaults and preprocessor on blueprint
@bp.app_url_defaults
def add_language_code(endpoint, values):
values.setdefault("lang_code", flask.g.lang_code)
@bp.app_url_value_preprocessor
def pull_lang_code(endpoint, values):
flask.g.lang_code = values.pop("lang_code")
# Register route rules at the app level
@app.route("/<lang_code>/")
def index():
return flask.url_for("about")
@app.route("/<lang_code>/about")
def about():
return flask.url_for("index")
app.register_blueprint(bp)
assert client.get("/de/").data == b"/de/about"
assert client.get("/de/about").data == b"/de/"
def test_nested_blueprint(app, client):
parent = flask.Blueprint("parent", __name__)
child = flask.Blueprint("child", __name__)
grandchild = flask.Blueprint("grandchild", __name__)
@parent.errorhandler(403)
def forbidden(e):
return "Parent no", 403
@parent.route("/")
def parent_index():
return "Parent yes"
@parent.route("/no")
def parent_no():
flask.abort(403)
@child.route("/")
def child_index():
return "Child yes"
@child.route("/no")
def child_no():
flask.abort(403)
@grandchild.errorhandler(403)
def grandchild_forbidden(e):
return "Grandchild no", 403
@grandchild.route("/")
def grandchild_index():
return "Grandchild yes"
@grandchild.route("/no")
def grandchild_no():
flask.abort(403)
child.register_blueprint(grandchild, url_prefix="/grandchild")
parent.register_blueprint(child, url_prefix="/child")
app.register_blueprint(parent, url_prefix="/parent")
assert client.get("/parent/").data == b"Parent yes"
assert client.get("/parent/child/").data == b"Child yes"
assert client.get("/parent/child/grandchild/").data == b"Grandchild yes"
assert client.get("/parent/no").data == b"Parent no"
assert client.get("/parent/child/no").data == b"Parent no"
assert client.get("/parent/child/grandchild/no").data == b"Grandchild no"
def test_nested_callback_order(app, client):
parent = flask.Blueprint("parent", __name__)
child = flask.Blueprint("child", __name__)
@app.before_request
def app_before1():
flask.g.setdefault("seen", []).append("app_1")
@app.teardown_request
def app_teardown1(e=None):
assert flask.g.seen.pop() == "app_1"
@app.before_request
def app_before2():
flask.g.setdefault("seen", []).append("app_2")
@app.teardown_request
def app_teardown2(e=None):
assert flask.g.seen.pop() == "app_2"
@app.context_processor
def app_ctx():
return dict(key="app")
@parent.before_request
def parent_before1():
flask.g.setdefault("seen", []).append("parent_1")
@parent.teardown_request
def parent_teardown1(e=None):
assert flask.g.seen.pop() == "parent_1"
@parent.before_request
def parent_before2():
flask.g.setdefault("seen", []).append("parent_2")
@parent.teardown_request
def parent_teardown2(e=None):
assert flask.g.seen.pop() == "parent_2"
@parent.context_processor
def parent_ctx():
return dict(key="parent")
@child.before_request
def child_before1():
flask.g.setdefault("seen", []).append("child_1")
@child.teardown_request
def child_teardown1(e=None):
assert flask.g.seen.pop() == "child_1"
@child.before_request
def child_before2():
flask.g.setdefault("seen", []).append("child_2")
@child.teardown_request
def child_teardown2(e=None):
assert flask.g.seen.pop() == "child_2"
@child.context_processor
def child_ctx():
return dict(key="child")
@child.route("/a")
def a():
return ", ".join(flask.g.seen)
@child.route("/b")
def b():
return flask.render_template_string("{{ key }}")
parent.register_blueprint(child)
app.register_blueprint(parent)
assert (
client.get("/a").data == b"app_1, app_2, parent_1, parent_2, child_1, child_2"
)
assert client.get("/b").data == b"child"
@pytest.mark.parametrize(
"parent_init, child_init, parent_registration, child_registration",
[
("/parent", "/child", None, None),
("/parent", None, None, "/child"),
(None, None, "/parent", "/child"),
("/other", "/something", "/parent", "/child"),
],
)
def test_nesting_url_prefixes(
parent_init,
child_init,
parent_registration,
child_registration,
app,
client,
) -> None:
parent = flask.Blueprint("parent", __name__, url_prefix=parent_init)
child = flask.Blueprint("child", __name__, url_prefix=child_init)
@child.route("/")
def index():
return "index"
parent.register_blueprint(child, url_prefix=child_registration)
app.register_blueprint(parent, url_prefix=parent_registration)
response = client.get("/parent/child/")
assert response.status_code == 200
def test_unique_blueprint_names(app, client) -> None:
bp = flask.Blueprint("bp", __name__)
bp2 = flask.Blueprint("bp", __name__)
app.register_blueprint(bp)
with pytest.raises(ValueError):
app.register_blueprint(bp) # same bp, same name, error
app.register_blueprint(bp, name="again") # same bp, different name, ok
with pytest.raises(ValueError):
app.register_blueprint(bp2) # different bp, same name, error
app.register_blueprint(bp2, name="alt") # different bp, different name, ok
def test_self_registration(app, client) -> None:
bp = flask.Blueprint("bp", __name__)
with pytest.raises(ValueError):
bp.register_blueprint(bp)
def test_blueprint_renaming(app, client) -> None:
bp = flask.Blueprint("bp", __name__)
bp2 = flask.Blueprint("bp2", __name__)
@bp.get("/")
def index():
return flask.request.endpoint
@bp.get("/error")
def error():
flask.abort(403)
@bp.errorhandler(403)
def forbidden(_: Exception):
return "Error", 403
@bp2.get("/")
def index2():
return flask.request.endpoint
bp.register_blueprint(bp2, url_prefix="/a", name="sub")
app.register_blueprint(bp, url_prefix="/a")
app.register_blueprint(bp, url_prefix="/b", name="alt")
assert client.get("/a/").data == b"bp.index"
assert client.get("/b/").data == b"alt.index"
assert client.get("/a/a/").data == b"bp.sub.index2"
assert client.get("/b/a/").data == b"alt.sub.index2"
assert client.get("/a/error").data == b"Error"
assert client.get("/b/error").data == b"Error"
|
|
"""
Usage:
cfgen FILE [list] [OPTIONS] [--overwrite]
Commands:
list list variables and their values
Options:
--overwrite overwrite existing file
metaconfig cache
.cfgen.cache
<variable_name> = <value>
metaconfig file
cfgen.metaconfig
<variable_name> = <shell_command>
*.template
jinja template
"""
# terminology
#
# metaconfig - file where variables are defined
# metaconfig cache - file where values for variables are cached
# target template - file where template of the target config is defined
# target config - file to be generated from target template
#
# file path: /home/joe/Documents/file1.txt
# directory path: /home/joe/Documents/
# directory name: Documents
# file name: file1.txt
# stem: file1
# extension: .txt
from __future__ import print_function
from collections import OrderedDict
import os
import subprocess
import sys
import docopt
import future.utils
import jinja2
from jinja2.loaders import FileSystemLoader
_prg_name = "cfgen"
_metaconfig_file_name = _prg_name + ".metaconfig"
_target_template_file_extension = ".template"
_metaconfig_cache_file_name = "." + _prg_name + ".cache"
_metaconfig_caching_file_name = _prg_name + ".caching"
def main():
args = docopt.docopt(__doc__)
target_file = args["FILE"]
if args["list"]:
cmd_list(target_file)
else:
cmd_write(target_file, args.get("--overwrite"))
def cmd_list(target_file_name):
values = load_all(target_file_name)
for name, value in values.items():
print(name, value)
def cmd_write(target_file_name, overwrite_target):
if os.path.exists(target_file_name) and not overwrite_target:
raise ValueError(target_file_name + " already exists")
expressions, values = load_all(target_file_name)
values = evaluate_expressions(expressions, values)
rendered = render_template(target_file_name, values)
with open(target_file_name, "w") as f:
print(rendered, end="", file=f)
# cache at the end of successful write
cache_values(values, _metaconfig_cache_file_name, _metaconfig_caching_file_name)
def render_template(target_file_name, values):
env = jinja2.Environment(loader=FileSystemLoader("/"),
undefined=jinja2.runtime.StrictUndefined)
template_file_name = get_target_template_file_name(target_file_name)
return env.get_template(lookup_template_file(template_file_name)).render(values)
def lookup_template_file(template_file_name):
template_file_path = None
directory_path = ""
for directory_name in get_current_path_elements():
directory_path += os.sep + directory_name
template_file_path_candidate = directory_path + os.sep + template_file_name
if os.path.isfile(template_file_path_candidate):
# last one will win
template_file_path = template_file_path_candidate
if not template_file_path:
raise ValueError("Template file not found " + template_file_name)
return template_file_path
def load_all(target_file_name):
expressions = load_metaconfigs()
values = load_metaconfig(_metaconfig_cache_file_name)
return expressions, values
def cache_values(definitions, cache_file_name, caching_file_name):
# read what needs to be cahed
if os.path.exists(caching_file_name):
with open(caching_file_name) as f:
caching_var_names = f.read().splitlines()
# cache
with open(cache_file_name, "w") as f:
for name, value in definitions.items():
if name in caching_var_names:
print(name + " = " + value, file=f)
def load_metaconfigs():
""" Loads all metaconfig files and returns merged variable to expression dictionary """
definitions = OrderedDict()
directory_path = ""
for directory_name in get_current_path_elements():
directory_path += os.sep + directory_name
metaconfig_file_path = directory_path + os.sep + _metaconfig_file_name
definitions.update(load_metaconfig(metaconfig_file_path))
return definitions
def load_metaconfig(file_path):
""" Loads a single metaconfig file and returns variable to expression dictionary """
definitions = OrderedDict()
if os.path.isfile(file_path):
with open(file_path) as f:
for line in [l.strip(os.linesep).strip() for l in f.readlines()]:
# skip comments
if line.startswith("#"):
continue
# skip empty lines
if not line:
continue
# parse line
try:
var_name, var_expression = parse_metaconfig_line(line)
definitions[var_name] = var_expression
except ValueError as e:
future.utils.raise_from(
ValueError("Cannot parse metaconfig; file=" + file_path), e)
return definitions
def evaluate_expressions(expressions, values):
for var_name, var_expression in expressions.items():
if not var_name in values:
values[var_name] = evaluate_expression(var_expression, values)
return values
def evaluate_expression(var_expression, eval_env):
""" Evaluates given expression """
try:
completed_process = subprocess.check_output(var_expression,
env=eval_env,
shell=True,
universal_newlines=True)
return get_string(completed_process).rstrip()
except:
e = sys.exc_info()[1]
future.utils.raise_from(
ValueError("Cannot evaluate expression " + var_expression), e)
def parse_metaconfig_line(line):
""" Parses line and return key=value tuple """
delimiter_pos = line.find("=")
if delimiter_pos == -1:
raise ValueError("'=' not found in metaconfig line " + line)
var_name = line[:delimiter_pos].strip()
if len(var_name) == 0:
raise ValueError("variable name not found in metaconfig line " + line)
var_expr = line[delimiter_pos + 1:].strip()
return var_name, var_expr
def get_current_path_elements():
# first element is empty, remove it
return os.getcwd().split(os.sep)[1:]
def get_target_template_file_name(target_file_name):
return target_file_name + _target_template_file_extension
def get_string(value):
""" Returns string representation of the value or empty string if None """
if value:
return str(value)
else:
return ""
if __name__ == '__main__':
main()
|
|
import unittest
from test import test_support
import os
import socket
import StringIO
import urllib2
from urllib2 import Request, OpenerDirector
# XXX
# Request
# CacheFTPHandler (hard to write)
# parse_keqv_list, parse_http_list, HTTPDigestAuthHandler
class TrivialTests(unittest.TestCase):
def test_trivial(self):
# A couple trivial tests
self.assertRaises(ValueError, urllib2.urlopen, 'bogus url')
# XXX Name hacking to get this to work on Windows.
fname = os.path.abspath(urllib2.__file__).replace('\\', '/')
# And more hacking to get it to work on MacOS. This assumes
# urllib.pathname2url works, unfortunately...
if os.name == 'riscos':
import string
fname = os.expand(fname)
fname = fname.translate(string.maketrans("/.", "./"))
if os.name == 'nt':
file_url = "file:///%s" % fname
else:
file_url = "file://%s" % fname
f = urllib2.urlopen(file_url)
buf = f.read()
f.close()
def test_parse_http_list(self):
tests = [('a,b,c', ['a', 'b', 'c']),
('path"o,l"og"i"cal, example', ['path"o,l"og"i"cal', 'example']),
('a, b, "c", "d", "e,f", g, h', ['a', 'b', '"c"', '"d"', '"e,f"', 'g', 'h']),
('a="b\\"c", d="e\\,f", g="h\\\\i"', ['a="b"c"', 'd="e,f"', 'g="h\\i"'])]
for string, list in tests:
self.assertEqual(urllib2.parse_http_list(string), list)
def test_request_headers_dict():
"""
The Request.headers dictionary is not a documented interface. It should
stay that way, because the complete set of headers are only accessible
through the .get_header(), .has_header(), .header_items() interface.
However, .headers pre-dates those methods, and so real code will be using
the dictionary.
The introduction in 2.4 of those methods was a mistake for the same reason:
code that previously saw all (urllib2 user)-provided headers in .headers
now sees only a subset (and the function interface is ugly and incomplete).
A better change would have been to replace .headers dict with a dict
subclass (or UserDict.DictMixin instance?) that preserved the .headers
interface and also provided access to the "unredirected" headers. It's
probably too late to fix that, though.
Check .capitalize() case normalization:
>>> url = "http://example.com"
>>> Request(url, headers={"Spam-eggs": "blah"}).headers["Spam-eggs"]
'blah'
>>> Request(url, headers={"spam-EggS": "blah"}).headers["Spam-eggs"]
'blah'
Currently, Request(url, "Spam-eggs").headers["Spam-Eggs"] raises KeyError,
but that could be changed in future.
"""
def test_request_headers_methods():
"""
Note the case normalization of header names here, to .capitalize()-case.
This should be preserved for backwards-compatibility. (In the HTTP case,
normalization to .title()-case is done by urllib2 before sending headers to
httplib).
>>> url = "http://example.com"
>>> r = Request(url, headers={"Spam-eggs": "blah"})
>>> r.has_header("Spam-eggs")
True
>>> r.header_items()
[('Spam-eggs', 'blah')]
>>> r.add_header("Foo-Bar", "baz")
>>> items = r.header_items()
>>> items.sort()
>>> items
[('Foo-bar', 'baz'), ('Spam-eggs', 'blah')]
Note that e.g. r.has_header("spam-EggS") is currently False, and
r.get_header("spam-EggS") returns None, but that could be changed in
future.
>>> r.has_header("Not-there")
False
>>> print r.get_header("Not-there")
None
>>> r.get_header("Not-there", "default")
'default'
"""
def test_password_manager(self):
"""
>>> mgr = urllib2.HTTPPasswordMgr()
>>> add = mgr.add_password
>>> add("Some Realm", "http://example.com/", "joe", "password")
>>> add("Some Realm", "http://example.com/ni", "ni", "ni")
>>> add("c", "http://example.com/foo", "foo", "ni")
>>> add("c", "http://example.com/bar", "bar", "nini")
>>> add("b", "http://example.com/", "first", "blah")
>>> add("b", "http://example.com/", "second", "spam")
>>> add("a", "http://example.com", "1", "a")
>>> add("Some Realm", "http://c.example.com:3128", "3", "c")
>>> add("Some Realm", "d.example.com", "4", "d")
>>> add("Some Realm", "e.example.com:3128", "5", "e")
>>> mgr.find_user_password("Some Realm", "example.com")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/spam")
('joe', 'password')
>>> mgr.find_user_password("Some Realm", "http://example.com/spam/spam")
('joe', 'password')
>>> mgr.find_user_password("c", "http://example.com/foo")
('foo', 'ni')
>>> mgr.find_user_password("c", "http://example.com/bar")
('bar', 'nini')
Actually, this is really undefined ATM
## Currently, we use the highest-level path where more than one match:
## >>> mgr.find_user_password("Some Realm", "http://example.com/ni")
## ('joe', 'password')
Use latest add_password() in case of conflict:
>>> mgr.find_user_password("b", "http://example.com/")
('second', 'spam')
No special relationship between a.example.com and example.com:
>>> mgr.find_user_password("a", "http://example.com/")
('1', 'a')
>>> mgr.find_user_password("a", "http://a.example.com/")
(None, None)
Ports:
>>> mgr.find_user_password("Some Realm", "c.example.com")
(None, None)
>>> mgr.find_user_password("Some Realm", "c.example.com:3128")
('3', 'c')
>>> mgr.find_user_password("Some Realm", "http://c.example.com:3128")
('3', 'c')
>>> mgr.find_user_password("Some Realm", "d.example.com")
('4', 'd')
>>> mgr.find_user_password("Some Realm", "e.example.com:3128")
('5', 'e')
"""
pass
def test_password_manager_default_port(self):
"""
>>> mgr = urllib2.HTTPPasswordMgr()
>>> add = mgr.add_password
The point to note here is that we can't guess the default port if there's
no scheme. This applies to both add_password and find_user_password.
>>> add("f", "http://g.example.com:80", "10", "j")
>>> add("g", "http://h.example.com", "11", "k")
>>> add("h", "i.example.com:80", "12", "l")
>>> add("i", "j.example.com", "13", "m")
>>> mgr.find_user_password("f", "g.example.com:100")
(None, None)
>>> mgr.find_user_password("f", "g.example.com:80")
('10', 'j')
>>> mgr.find_user_password("f", "g.example.com")
(None, None)
>>> mgr.find_user_password("f", "http://g.example.com:100")
(None, None)
>>> mgr.find_user_password("f", "http://g.example.com:80")
('10', 'j')
>>> mgr.find_user_password("f", "http://g.example.com")
('10', 'j')
>>> mgr.find_user_password("g", "h.example.com")
('11', 'k')
>>> mgr.find_user_password("g", "h.example.com:80")
('11', 'k')
>>> mgr.find_user_password("g", "http://h.example.com:80")
('11', 'k')
>>> mgr.find_user_password("h", "i.example.com")
(None, None)
>>> mgr.find_user_password("h", "i.example.com:80")
('12', 'l')
>>> mgr.find_user_password("h", "http://i.example.com:80")
('12', 'l')
>>> mgr.find_user_password("i", "j.example.com")
('13', 'm')
>>> mgr.find_user_password("i", "j.example.com:80")
(None, None)
>>> mgr.find_user_password("i", "http://j.example.com")
('13', 'm')
>>> mgr.find_user_password("i", "http://j.example.com:80")
(None, None)
"""
class MockOpener:
addheaders = []
def open(self, req, data=None,timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.req, self.data, self.timeout = req, data, timeout
def error(self, proto, *args):
self.proto, self.args = proto, args
class MockFile:
def read(self, count=None): pass
def readline(self, count=None): pass
def close(self): pass
class MockHeaders(dict):
def getheaders(self, name):
return self.values()
class MockResponse(StringIO.StringIO):
def __init__(self, code, msg, headers, data, url=None):
StringIO.StringIO.__init__(self, data)
self.code, self.msg, self.headers, self.url = code, msg, headers, url
def info(self):
return self.headers
def geturl(self):
return self.url
class MockCookieJar:
def add_cookie_header(self, request):
self.ach_req = request
def extract_cookies(self, response, request):
self.ec_req, self.ec_r = request, response
class FakeMethod:
def __init__(self, meth_name, action, handle):
self.meth_name = meth_name
self.handle = handle
self.action = action
def __call__(self, *args):
return self.handle(self.meth_name, self.action, *args)
class MockHTTPResponse:
def __init__(self, fp, msg, status, reason):
self.fp = fp
self.msg = msg
self.status = status
self.reason = reason
def read(self):
return ''
class MockHTTPClass:
def __init__(self):
self.req_headers = []
self.data = None
self.raise_on_endheaders = False
self._tunnel_headers = {}
def __call__(self, host, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.host = host
self.timeout = timeout
return self
def set_debuglevel(self, level):
self.level = level
def set_tunnel(self, host, port=None, headers=None):
self._tunnel_host = host
self._tunnel_port = port
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
def request(self, method, url, body=None, headers=None):
self.method = method
self.selector = url
if headers is not None:
self.req_headers += headers.items()
self.req_headers.sort()
if body:
self.data = body
if self.raise_on_endheaders:
import socket
raise socket.error()
def getresponse(self):
return MockHTTPResponse(MockFile(), {}, 200, "OK")
def close(self):
pass
class MockHandler:
# useful for testing handler machinery
# see add_ordered_mock_handlers() docstring
handler_order = 500
def __init__(self, methods):
self._define_methods(methods)
def _define_methods(self, methods):
for spec in methods:
if len(spec) == 2: name, action = spec
else: name, action = spec, None
meth = FakeMethod(name, action, self.handle)
setattr(self.__class__, name, meth)
def handle(self, fn_name, action, *args, **kwds):
self.parent.calls.append((self, fn_name, args, kwds))
if action is None:
return None
elif action == "return self":
return self
elif action == "return response":
res = MockResponse(200, "OK", {}, "")
return res
elif action == "return request":
return Request("http://blah/")
elif action.startswith("error"):
code = action[action.rfind(" ")+1:]
try:
code = int(code)
except ValueError:
pass
res = MockResponse(200, "OK", {}, "")
return self.parent.error("http", args[0], res, code, "", {})
elif action == "raise":
raise urllib2.URLError("blah")
assert False
def close(self): pass
def add_parent(self, parent):
self.parent = parent
self.parent.calls = []
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# No handler_order, leave in original order. Yuck.
return True
return self.handler_order < other.handler_order
def add_ordered_mock_handlers(opener, meth_spec):
"""Create MockHandlers and add them to an OpenerDirector.
meth_spec: list of lists of tuples and strings defining methods to define
on handlers. eg:
[["http_error", "ftp_open"], ["http_open"]]
defines methods .http_error() and .ftp_open() on one handler, and
.http_open() on another. These methods just record their arguments and
return None. Using a tuple instead of a string causes the method to
perform some action (see MockHandler.handle()), eg:
[["http_error"], [("http_open", "return request")]]
defines .http_error() on one handler (which simply returns None), and
.http_open() on another handler, which returns a Request object.
"""
handlers = []
count = 0
for meths in meth_spec:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order += count
h.add_parent(opener)
count = count + 1
handlers.append(h)
opener.add_handler(h)
return handlers
def build_test_opener(*handler_instances):
opener = OpenerDirector()
for h in handler_instances:
opener.add_handler(h)
return opener
class MockHTTPHandler(urllib2.BaseHandler):
# useful for testing redirections and auth
# sends supplied headers and code as first response
# sends 200 OK as second response
def __init__(self, code, headers):
self.code = code
self.headers = headers
self.reset()
def reset(self):
self._count = 0
self.requests = []
def http_open(self, req):
import mimetools, httplib, copy
from StringIO import StringIO
self.requests.append(copy.deepcopy(req))
if self._count == 0:
self._count = self._count + 1
name = httplib.responses[self.code]
msg = mimetools.Message(StringIO(self.headers))
return self.parent.error(
"http", req, MockFile(), self.code, name, msg)
else:
self.req = req
msg = mimetools.Message(StringIO("\r\n\r\n"))
return MockResponse(200, "OK", msg, "", req.get_full_url())
class MockHTTPSHandler(urllib2.AbstractHTTPHandler):
# Useful for testing the Proxy-Authorization request by verifying the
# properties of httpcon
def __init__(self):
urllib2.AbstractHTTPHandler.__init__(self)
self.httpconn = MockHTTPClass()
def https_open(self, req):
return self.do_open(self.httpconn, req)
class MockPasswordManager:
def add_password(self, realm, uri, user, password):
self.realm = realm
self.url = uri
self.user = user
self.password = password
def find_user_password(self, realm, authuri):
self.target_realm = realm
self.target_url = authuri
return self.user, self.password
class OpenerDirectorTests(unittest.TestCase):
def test_add_non_handler(self):
class NonHandler(object):
pass
self.assertRaises(TypeError,
OpenerDirector().add_handler, NonHandler())
def test_badly_named_methods(self):
# test work-around for three methods that accidentally follow the
# naming conventions for handler methods
# (*_open() / *_request() / *_response())
# These used to call the accidentally-named methods, causing a
# TypeError in real code; here, returning self from these mock
# methods would either cause no exception, or AttributeError.
from urllib2 import URLError
o = OpenerDirector()
meth_spec = [
[("do_open", "return self"), ("proxy_open", "return self")],
[("redirect_request", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
o.add_handler(urllib2.UnknownHandler())
for scheme in "do", "proxy", "redirect":
self.assertRaises(URLError, o.open, scheme+"://example.com/")
def test_handled(self):
# handler returning non-None means no more handlers will be called
o = OpenerDirector()
meth_spec = [
["http_open", "ftp_open", "http_error_302"],
["ftp_open"],
[("http_open", "return self")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
r = o.open(req)
# Second .http_open() gets called, third doesn't, since second returned
# non-None. Handlers without .http_open() never get any methods called
# on them.
# In fact, second mock handler defining .http_open() returns self
# (instead of response), which becomes the OpenerDirector's return
# value.
self.assertEqual(r, handlers[2])
calls = [(handlers[0], "http_open"), (handlers[2], "http_open")]
for expected, got in zip(calls, o.calls):
handler, name, args, kwds = got
self.assertEqual((handler, name), expected)
self.assertEqual(args, (req,))
def test_handler_order(self):
o = OpenerDirector()
handlers = []
for meths, handler_order in [
([("http_open", "return self")], 500),
(["http_open"], 0),
]:
class MockHandlerSubclass(MockHandler): pass
h = MockHandlerSubclass(meths)
h.handler_order = handler_order
handlers.append(h)
o.add_handler(h)
r = o.open("http://example.com/")
# handlers called in reverse order, thanks to their sort order
self.assertEqual(o.calls[0][0], handlers[1])
self.assertEqual(o.calls[1][0], handlers[0])
def test_raise(self):
# raising URLError stops processing of request
o = OpenerDirector()
meth_spec = [
[("http_open", "raise")],
[("http_open", "return self")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
self.assertRaises(urllib2.URLError, o.open, req)
self.assertEqual(o.calls, [(handlers[0], "http_open", (req,), {})])
## def test_error(self):
## # XXX this doesn't actually seem to be used in standard library,
## # but should really be tested anyway...
def test_http_error(self):
# XXX http_error_default
# http errors are a special case
o = OpenerDirector()
meth_spec = [
[("http_open", "error 302")],
[("http_error_400", "raise"), "http_open"],
[("http_error_302", "return response"), "http_error_303",
"http_error"],
[("http_error_302")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
class Unknown:
def __eq__(self, other): return True
req = Request("http://example.com/")
r = o.open(req)
assert len(o.calls) == 2
calls = [(handlers[0], "http_open", (req,)),
(handlers[2], "http_error_302",
(req, Unknown(), 302, "", {}))]
for expected, got in zip(calls, o.calls):
handler, method_name, args = expected
self.assertEqual((handler, method_name), got[:2])
self.assertEqual(args, got[2])
def test_processors(self):
# *_request / *_response methods get called appropriately
o = OpenerDirector()
meth_spec = [
[("http_request", "return request"),
("http_response", "return response")],
[("http_request", "return request"),
("http_response", "return response")],
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://example.com/")
r = o.open(req)
# processor methods are called on *all* handlers that define them,
# not just the first handler that handles the request
calls = [
(handlers[0], "http_request"), (handlers[1], "http_request"),
(handlers[0], "http_response"), (handlers[1], "http_response")]
for i, (handler, name, args, kwds) in enumerate(o.calls):
if i < 2:
# *_request
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 1)
self.assertIsInstance(args[0], Request)
else:
# *_response
self.assertEqual((handler, name), calls[i])
self.assertEqual(len(args), 2)
self.assertIsInstance(args[0], Request)
# response from opener.open is None, because there's no
# handler that defines http_open to handle it
self.assertTrue(args[1] is None or
isinstance(args[1], MockResponse))
def sanepathname2url(path):
import urllib
urlpath = urllib.pathname2url(path)
if os.name == "nt" and urlpath.startswith("///"):
urlpath = urlpath[2:]
# XXX don't ask me about the mac...
return urlpath
class HandlerTests(unittest.TestCase):
def test_ftp(self):
class MockFTPWrapper:
def __init__(self, data): self.data = data
def retrfile(self, filename, filetype):
self.filename, self.filetype = filename, filetype
return StringIO.StringIO(self.data), len(self.data)
class NullFTPHandler(urllib2.FTPHandler):
def __init__(self, data): self.data = data
def connect_ftp(self, user, passwd, host, port, dirs,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
self.user, self.passwd = user, passwd
self.host, self.port = host, port
self.dirs = dirs
self.ftpwrapper = MockFTPWrapper(self.data)
return self.ftpwrapper
import ftplib
data = "rheum rhaponicum"
h = NullFTPHandler(data)
o = h.parent = MockOpener()
for url, host, port, user, passwd, type_, dirs, filename, mimetype in [
("ftp://localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://%25parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "%parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://%2542parrot@localhost/foo/bar/baz.html",
"localhost", ftplib.FTP_PORT, "%42parrot", "", "I",
["foo", "bar"], "baz.html", "text/html"),
("ftp://localhost:80/foo/bar/",
"localhost", 80, "", "", "D",
["foo", "bar"], "", None),
("ftp://localhost/baz.gif;type=a",
"localhost", ftplib.FTP_PORT, "", "", "A",
[], "baz.gif", None), # XXX really this should guess image/gif
]:
req = Request(url)
req.timeout = None
r = h.ftp_open(req)
# ftp authentication not yet implemented by FTPHandler
self.assertEqual(h.user, user)
self.assertEqual(h.passwd, passwd)
self.assertEqual(h.host, socket.gethostbyname(host))
self.assertEqual(h.port, port)
self.assertEqual(h.dirs, dirs)
self.assertEqual(h.ftpwrapper.filename, filename)
self.assertEqual(h.ftpwrapper.filetype, type_)
headers = r.info()
self.assertEqual(headers.get("Content-type"), mimetype)
self.assertEqual(int(headers["Content-length"]), len(data))
def test_file(self):
import rfc822, socket
h = urllib2.FileHandler()
o = h.parent = MockOpener()
TESTFN = test_support.TESTFN
urlpath = sanepathname2url(os.path.abspath(TESTFN))
towrite = "hello, world\n"
urls = [
"file://localhost%s" % urlpath,
"file://%s" % urlpath,
"file://%s%s" % (socket.gethostbyname('localhost'), urlpath),
]
try:
localaddr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
localaddr = ''
if localaddr:
urls.append("file://%s%s" % (localaddr, urlpath))
for url in urls:
f = open(TESTFN, "wb")
try:
try:
f.write(towrite)
finally:
f.close()
r = h.file_open(Request(url))
try:
data = r.read()
headers = r.info()
respurl = r.geturl()
finally:
r.close()
stats = os.stat(TESTFN)
modified = rfc822.formatdate(stats.st_mtime)
finally:
os.remove(TESTFN)
self.assertEqual(data, towrite)
self.assertEqual(headers["Content-type"], "text/plain")
self.assertEqual(headers["Content-length"], "13")
self.assertEqual(headers["Last-modified"], modified)
self.assertEqual(respurl, url)
for url in [
"file://localhost:80%s" % urlpath,
"file:///file_does_not_exist.txt",
"file://%s:80%s/%s" % (socket.gethostbyname('localhost'),
os.getcwd(), TESTFN),
"file://somerandomhost.ontheinternet.com%s/%s" %
(os.getcwd(), TESTFN),
]:
try:
f = open(TESTFN, "wb")
try:
f.write(towrite)
finally:
f.close()
self.assertRaises(urllib2.URLError,
h.file_open, Request(url))
finally:
os.remove(TESTFN)
h = urllib2.FileHandler()
o = h.parent = MockOpener()
# XXXX why does // mean ftp (and /// mean not ftp!), and where
# is file: scheme specified? I think this is really a bug, and
# what was intended was to distinguish between URLs like:
# file:/blah.txt (a file)
# file://localhost/blah.txt (a file)
# file:///blah.txt (a file)
# file://ftp.example.com/blah.txt (an ftp URL)
for url, ftp in [
("file://ftp.example.com//foo.txt", True),
("file://ftp.example.com///foo.txt", False),
# XXXX bug: fails with OSError, should be URLError
("file://ftp.example.com/foo.txt", False),
("file://somehost//foo/something.txt", True),
("file://localhost//foo/something.txt", False),
]:
req = Request(url)
try:
h.file_open(req)
# XXXX remove OSError when bug fixed
except (urllib2.URLError, OSError):
self.assertTrue(not ftp)
else:
self.assertTrue(o.req is req)
self.assertEqual(req.type, "ftp")
self.assertEqual(req.type == "ftp", ftp)
def test_http(self):
h = urllib2.AbstractHTTPHandler()
o = h.parent = MockOpener()
url = "http://example.com/"
for method, data in [("GET", None), ("POST", "blah")]:
req = Request(url, data, {"Foo": "bar"})
req.timeout = None
req.add_unredirected_header("Spam", "eggs")
http = MockHTTPClass()
r = h.do_open(http, req)
# result attributes
r.read; r.readline # wrapped MockFile methods
r.info; r.geturl # addinfourl methods
r.code, r.msg == 200, "OK" # added from MockHTTPClass.getreply()
hdrs = r.info()
hdrs.get; hdrs.has_key # r.info() gives dict from .getreply()
self.assertEqual(r.geturl(), url)
self.assertEqual(http.host, "example.com")
self.assertEqual(http.level, 0)
self.assertEqual(http.method, method)
self.assertEqual(http.selector, "/")
self.assertEqual(http.req_headers,
[("Connection", "close"),
("Foo", "bar"), ("Spam", "eggs")])
self.assertEqual(http.data, data)
# check socket.error converted to URLError
http.raise_on_endheaders = True
self.assertRaises(urllib2.URLError, h.do_open, http, req)
# check adding of standard headers
o.addheaders = [("Spam", "eggs")]
for data in "", None: # POST, GET
req = Request("http://example.com/", data)
r = MockResponse(200, "OK", {}, "")
newreq = h.do_request_(req)
if data is None: # GET
self.assertNotIn("Content-length", req.unredirected_hdrs)
self.assertNotIn("Content-type", req.unredirected_hdrs)
else: # POST
self.assertEqual(req.unredirected_hdrs["Content-length"], "0")
self.assertEqual(req.unredirected_hdrs["Content-type"],
"application/x-www-form-urlencoded")
# XXX the details of Host could be better tested
self.assertEqual(req.unredirected_hdrs["Host"], "example.com")
self.assertEqual(req.unredirected_hdrs["Spam"], "eggs")
# don't clobber existing headers
req.add_unredirected_header("Content-length", "foo")
req.add_unredirected_header("Content-type", "bar")
req.add_unredirected_header("Host", "baz")
req.add_unredirected_header("Spam", "foo")
newreq = h.do_request_(req)
self.assertEqual(req.unredirected_hdrs["Content-length"], "foo")
self.assertEqual(req.unredirected_hdrs["Content-type"], "bar")
self.assertEqual(req.unredirected_hdrs["Host"], "baz")
self.assertEqual(req.unredirected_hdrs["Spam"], "foo")
def test_http_doubleslash(self):
# Checks that the presence of an unnecessary double slash in a url doesn't break anything
# Previously, a double slash directly after the host could cause incorrect parsing of the url
h = urllib2.AbstractHTTPHandler()
o = h.parent = MockOpener()
data = ""
ds_urls = [
"http://example.com/foo/bar/baz.html",
"http://example.com//foo/bar/baz.html",
"http://example.com/foo//bar/baz.html",
"http://example.com/foo/bar//baz.html",
]
for ds_url in ds_urls:
ds_req = Request(ds_url, data)
# Check whether host is determined correctly if there is no proxy
np_ds_req = h.do_request_(ds_req)
self.assertEqual(np_ds_req.unredirected_hdrs["Host"],"example.com")
# Check whether host is determined correctly if there is a proxy
ds_req.set_proxy("someproxy:3128",None)
p_ds_req = h.do_request_(ds_req)
self.assertEqual(p_ds_req.unredirected_hdrs["Host"],"example.com")
def test_fixpath_in_weirdurls(self):
# Issue4493: urllib2 to supply '/' when to urls where path does not
# start with'/'
h = urllib2.AbstractHTTPHandler()
o = h.parent = MockOpener()
weird_url = 'http://www.python.org?getspam'
req = Request(weird_url)
newreq = h.do_request_(req)
self.assertEqual(newreq.get_host(),'www.python.org')
self.assertEqual(newreq.get_selector(),'/?getspam')
url_without_path = 'http://www.python.org'
req = Request(url_without_path)
newreq = h.do_request_(req)
self.assertEqual(newreq.get_host(),'www.python.org')
self.assertEqual(newreq.get_selector(),'')
def test_errors(self):
h = urllib2.HTTPErrorProcessor()
o = h.parent = MockOpener()
url = "http://example.com/"
req = Request(url)
# all 2xx are passed through
r = MockResponse(200, "OK", {}, "", url)
newr = h.http_response(req, r)
self.assertTrue(r is newr)
self.assertTrue(not hasattr(o, "proto")) # o.error not called
r = MockResponse(202, "Accepted", {}, "", url)
newr = h.http_response(req, r)
self.assertTrue(r is newr)
self.assertTrue(not hasattr(o, "proto")) # o.error not called
r = MockResponse(206, "Partial content", {}, "", url)
newr = h.http_response(req, r)
self.assertTrue(r is newr)
self.assertTrue(not hasattr(o, "proto")) # o.error not called
# anything else calls o.error (and MockOpener returns None, here)
r = MockResponse(502, "Bad gateway", {}, "", url)
self.assertTrue(h.http_response(req, r) is None)
self.assertEqual(o.proto, "http") # o.error called
self.assertEqual(o.args, (req, r, 502, "Bad gateway", {}))
def test_cookies(self):
cj = MockCookieJar()
h = urllib2.HTTPCookieProcessor(cj)
o = h.parent = MockOpener()
req = Request("http://example.com/")
r = MockResponse(200, "OK", {}, "")
newreq = h.http_request(req)
self.assertTrue(cj.ach_req is req is newreq)
self.assertEqual(req.get_origin_req_host(), "example.com")
self.assertTrue(not req.is_unverifiable())
newr = h.http_response(req, r)
self.assertTrue(cj.ec_req is req)
self.assertTrue(cj.ec_r is r is newr)
def test_redirect(self):
from_url = "http://example.com/a.html"
to_url = "http://example.com/b.html"
h = urllib2.HTTPRedirectHandler()
o = h.parent = MockOpener()
# ordinary redirect behaviour
for code in 301, 302, 303, 307:
for data in None, "blah\nblah\n":
method = getattr(h, "http_error_%s" % code)
req = Request(from_url, data)
req.add_header("Nonsense", "viking=withhold")
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
if data is not None:
req.add_header("Content-Length", str(len(data)))
req.add_unredirected_header("Spam", "spam")
try:
method(req, MockFile(), code, "Blah",
MockHeaders({"location": to_url}))
except urllib2.HTTPError:
# 307 in response to POST requires user OK
self.assertTrue(code == 307 and data is not None)
self.assertEqual(o.req.get_full_url(), to_url)
try:
self.assertEqual(o.req.get_method(), "GET")
except AttributeError:
self.assertTrue(not o.req.has_data())
# now it's a GET, there should not be headers regarding content
# (possibly dragged from before being a POST)
headers = [x.lower() for x in o.req.headers]
self.assertNotIn("content-length", headers)
self.assertNotIn("content-type", headers)
self.assertEqual(o.req.headers["Nonsense"],
"viking=withhold")
self.assertNotIn("Spam", o.req.headers)
self.assertNotIn("Spam", o.req.unredirected_hdrs)
# loop detection
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
def redirect(h, req, url=to_url):
h.http_error_302(req, MockFile(), 302, "Blah",
MockHeaders({"location": url}))
# Note that the *original* request shares the same record of
# redirections with the sub-requests caused by the redirections.
# detect infinite loop redirect of a URL to itself
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/")
count = count + 1
except urllib2.HTTPError:
# don't stop until max_repeats, because cookies may introduce state
self.assertEqual(count, urllib2.HTTPRedirectHandler.max_repeats)
# detect endless non-repeating chain of redirects
req = Request(from_url, origin_req_host="example.com")
count = 0
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
try:
while 1:
redirect(h, req, "http://example.com/%d" % count)
count = count + 1
except urllib2.HTTPError:
self.assertEqual(count,
urllib2.HTTPRedirectHandler.max_redirections)
def test_invalid_redirect(self):
from_url = "http://example.com/a.html"
valid_schemes = ['http', 'https', 'ftp']
invalid_schemes = ['file', 'imap', 'ldap']
schemeless_url = "example.com/b.html"
h = urllib2.HTTPRedirectHandler()
o = h.parent = MockOpener()
req = Request(from_url)
req.timeout = socket._GLOBAL_DEFAULT_TIMEOUT
for scheme in invalid_schemes:
invalid_url = scheme + '://' + schemeless_url
self.assertRaises(urllib2.HTTPError, h.http_error_302,
req, MockFile(), 302, "Security Loophole",
MockHeaders({"location": invalid_url}))
for scheme in valid_schemes:
valid_url = scheme + '://' + schemeless_url
h.http_error_302(req, MockFile(), 302, "That's fine",
MockHeaders({"location": valid_url}))
self.assertEqual(o.req.get_full_url(), valid_url)
def test_cookie_redirect(self):
# cookies shouldn't leak into redirected requests
from cookielib import CookieJar
from test.test_cookielib import interact_netscape
cj = CookieJar()
interact_netscape(cj, "http://www.example.com/", "spam=eggs")
hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n")
hdeh = urllib2.HTTPDefaultErrorHandler()
hrh = urllib2.HTTPRedirectHandler()
cp = urllib2.HTTPCookieProcessor(cj)
o = build_test_opener(hh, hdeh, hrh, cp)
o.open("http://www.example.com/")
self.assertTrue(not hh.req.has_header("Cookie"))
def test_redirect_fragment(self):
redirected_url = 'http://www.example.com/index.html#OK\r\n\r\n'
hh = MockHTTPHandler(302, 'Location: ' + redirected_url)
hdeh = urllib2.HTTPDefaultErrorHandler()
hrh = urllib2.HTTPRedirectHandler()
o = build_test_opener(hh, hdeh, hrh)
fp = o.open('http://www.example.com')
self.assertEqual(fp.geturl(), redirected_url.strip())
def test_proxy(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
o.add_handler(ph)
meth_spec = [
[("http_open", "return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("http://acme.example.com/")
self.assertEqual(req.get_host(), "acme.example.com")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual([(handlers[0], "http_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_no_proxy(self):
os.environ['no_proxy'] = 'python.org'
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com"))
o.add_handler(ph)
req = Request("http://www.perl.org/")
self.assertEqual(req.get_host(), "www.perl.org")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com")
req = Request("http://www.python.org")
self.assertEqual(req.get_host(), "www.python.org")
r = o.open(req)
self.assertEqual(req.get_host(), "www.python.org")
del os.environ['no_proxy']
def test_proxy_https(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(https='proxy.example.com:3128'))
o.add_handler(ph)
meth_spec = [
[("https_open","return response")]
]
handlers = add_ordered_mock_handlers(o, meth_spec)
req = Request("https://www.example.com/")
self.assertEqual(req.get_host(), "www.example.com")
r = o.open(req)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual([(handlers[0], "https_open")],
[tup[0:2] for tup in o.calls])
def test_proxy_https_proxy_authorization(self):
o = OpenerDirector()
ph = urllib2.ProxyHandler(dict(https='proxy.example.com:3128'))
o.add_handler(ph)
https_handler = MockHTTPSHandler()
o.add_handler(https_handler)
req = Request("https://www.example.com/")
req.add_header("Proxy-Authorization","FooBar")
req.add_header("User-Agent","Grail")
self.assertEqual(req.get_host(), "www.example.com")
self.assertIsNone(req._tunnel_host)
r = o.open(req)
# Verify Proxy-Authorization gets tunneled to request.
# httpsconn req_headers do not have the Proxy-Authorization header but
# the req will have.
self.assertNotIn(("Proxy-Authorization","FooBar"),
https_handler.httpconn.req_headers)
self.assertIn(("User-Agent","Grail"),
https_handler.httpconn.req_headers)
self.assertIsNotNone(req._tunnel_host)
self.assertEqual(req.get_host(), "proxy.example.com:3128")
self.assertEqual(req.get_header("Proxy-authorization"),"FooBar")
def test_basic_auth(self, quote_char='"'):
opener = OpenerDirector()
password_manager = MockPasswordManager()
auth_handler = urllib2.HTTPBasicAuthHandler(password_manager)
realm = "ACME Widget Store"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm=%s%s%s\r\n\r\n' %
(quote_char, realm, quote_char) )
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
def test_basic_auth_with_single_quoted_realm(self):
self.test_basic_auth(quote_char="'")
def test_proxy_basic_auth(self):
opener = OpenerDirector()
ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
opener.add_handler(ph)
password_manager = MockPasswordManager()
auth_handler = urllib2.ProxyBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Proxy-authorization",
realm, http_handler, password_manager,
"http://acme.example.com:3128/protected",
"proxy.example.com:3128",
)
def test_basic_and_digest_auth_handlers(self):
# HTTPDigestAuthHandler threw an exception if it couldn't handle a 40*
# response (http://python.org/sf/1479302), where it should instead
# return None to allow another handler (especially
# HTTPBasicAuthHandler) to handle the response.
# Also (http://python.org/sf/14797027, RFC 2617 section 1.2), we must
# try digest first (since it's the strongest auth scheme), so we record
# order of calls here to check digest comes first:
class RecordingOpenerDirector(OpenerDirector):
def __init__(self):
OpenerDirector.__init__(self)
self.recorded = []
def record(self, info):
self.recorded.append(info)
class TestDigestAuthHandler(urllib2.HTTPDigestAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("digest")
urllib2.HTTPDigestAuthHandler.http_error_401(self,
*args, **kwds)
class TestBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
def http_error_401(self, *args, **kwds):
self.parent.record("basic")
urllib2.HTTPBasicAuthHandler.http_error_401(self,
*args, **kwds)
opener = RecordingOpenerDirector()
password_manager = MockPasswordManager()
digest_handler = TestDigestAuthHandler(password_manager)
basic_handler = TestBasicAuthHandler(password_manager)
realm = "ACME Networks"
http_handler = MockHTTPHandler(
401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(basic_handler)
opener.add_handler(digest_handler)
opener.add_handler(http_handler)
# check basic auth isn't blocked by digest handler failing
self._test_basic_auth(opener, basic_handler, "Authorization",
realm, http_handler, password_manager,
"http://acme.example.com/protected",
"http://acme.example.com/protected",
)
# check digest was tried before basic (twice, because
# _test_basic_auth called .open() twice)
self.assertEqual(opener.recorded, ["digest", "basic"]*2)
def _test_basic_auth(self, opener, auth_handler, auth_header,
realm, http_handler, password_manager,
request_url, protected_url):
import base64
user, password = "wile", "coyote"
# .add_password() fed through to password manager
auth_handler.add_password(realm, request_url, user, password)
self.assertEqual(realm, password_manager.realm)
self.assertEqual(request_url, password_manager.url)
self.assertEqual(user, password_manager.user)
self.assertEqual(password, password_manager.password)
r = opener.open(request_url)
# should have asked the password manager for the username/password
self.assertEqual(password_manager.target_realm, realm)
self.assertEqual(password_manager.target_url, protected_url)
# expect one request without authorization, then one with
self.assertEqual(len(http_handler.requests), 2)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
userpass = '%s:%s' % (user, password)
auth_hdr_value = 'Basic '+base64.encodestring(userpass).strip()
self.assertEqual(http_handler.requests[1].get_header(auth_header),
auth_hdr_value)
self.assertEqual(http_handler.requests[1].unredirected_hdrs[auth_header],
auth_hdr_value)
# if the password manager can't find a password, the handler won't
# handle the HTTP auth error
password_manager.user = password_manager.password = None
http_handler.reset()
r = opener.open(request_url)
self.assertEqual(len(http_handler.requests), 1)
self.assertFalse(http_handler.requests[0].has_header(auth_header))
class MiscTests(unittest.TestCase):
def test_build_opener(self):
class MyHTTPHandler(urllib2.HTTPHandler): pass
class FooHandler(urllib2.BaseHandler):
def foo_open(self): pass
class BarHandler(urllib2.BaseHandler):
def bar_open(self): pass
build_opener = urllib2.build_opener
o = build_opener(FooHandler, BarHandler)
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# can take a mix of classes and instances
o = build_opener(FooHandler, BarHandler())
self.opener_has_handler(o, FooHandler)
self.opener_has_handler(o, BarHandler)
# subclasses of default handlers override default handlers
o = build_opener(MyHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
# a particular case of overriding: default handlers can be passed
# in explicitly
o = build_opener()
self.opener_has_handler(o, urllib2.HTTPHandler)
o = build_opener(urllib2.HTTPHandler)
self.opener_has_handler(o, urllib2.HTTPHandler)
o = build_opener(urllib2.HTTPHandler())
self.opener_has_handler(o, urllib2.HTTPHandler)
# Issue2670: multiple handlers sharing the same base class
class MyOtherHTTPHandler(urllib2.HTTPHandler): pass
o = build_opener(MyHTTPHandler, MyOtherHTTPHandler)
self.opener_has_handler(o, MyHTTPHandler)
self.opener_has_handler(o, MyOtherHTTPHandler)
def opener_has_handler(self, opener, handler_class):
for h in opener.handlers:
if h.__class__ == handler_class:
break
else:
self.assertTrue(False)
class RequestTests(unittest.TestCase):
def setUp(self):
self.get = urllib2.Request("http://www.python.org/~jeremy/")
self.post = urllib2.Request("http://www.python.org/~jeremy/",
"data",
headers={"X-Test": "test"})
def test_method(self):
self.assertEqual("POST", self.post.get_method())
self.assertEqual("GET", self.get.get_method())
def test_add_data(self):
self.assertTrue(not self.get.has_data())
self.assertEqual("GET", self.get.get_method())
self.get.add_data("spam")
self.assertTrue(self.get.has_data())
self.assertEqual("POST", self.get.get_method())
def test_get_full_url(self):
self.assertEqual("http://www.python.org/~jeremy/",
self.get.get_full_url())
def test_selector(self):
self.assertEqual("/~jeremy/", self.get.get_selector())
req = urllib2.Request("http://www.python.org/")
self.assertEqual("/", req.get_selector())
def test_get_type(self):
self.assertEqual("http", self.get.get_type())
def test_get_host(self):
self.assertEqual("www.python.org", self.get.get_host())
def test_get_host_unquote(self):
req = urllib2.Request("http://www.%70ython.org/")
self.assertEqual("www.python.org", req.get_host())
def test_proxy(self):
self.assertTrue(not self.get.has_proxy())
self.get.set_proxy("www.perl.org", "http")
self.assertTrue(self.get.has_proxy())
self.assertEqual("www.python.org", self.get.get_origin_req_host())
self.assertEqual("www.perl.org", self.get.get_host())
def test_wrapped_url(self):
req = Request("<URL:http://www.python.org>")
self.assertEqual("www.python.org", req.get_host())
def test_url_fragment(self):
req = Request("http://www.python.org/?qs=query#fragment=true")
self.assertEqual("/?qs=query", req.get_selector())
req = Request("http://www.python.org/#fun=true")
self.assertEqual("/", req.get_selector())
# Issue 11703: geturl() omits fragment in the original URL.
url = 'http://docs.python.org/library/urllib2.html#OK'
req = Request(url)
self.assertEqual(req.get_full_url(), url)
def test_main(verbose=None):
from test import test_urllib2
test_support.run_doctest(test_urllib2, verbose)
test_support.run_doctest(urllib2, verbose)
tests = (TrivialTests,
OpenerDirectorTests,
HandlerTests,
MiscTests,
RequestTests)
test_support.run_unittest(*tests)
if __name__ == "__main__":
test_main(verbose=True)
|
|
# Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An AgentFlow Subtask to Environment adaptor."""
import re
import traceback
from typing import Callable, List, Sequence
from absl import logging
from dm_control import composer
from dm_control import mjcf
import dm_env
from dm_robotics import agentflow as af
from dm_robotics.agentflow import spec_utils
from dm_robotics.moma import base_task
from dm_robotics.moma import effector
from dm_robotics.moma import moma_option
import numpy as np
# Internal profiling
def _fixed_timestep(timestep: dm_env.TimeStep) -> dm_env.TimeStep:
if timestep.reward is None:
timestep = timestep._replace(reward=0.0)
if timestep.discount is None:
timestep = timestep._replace(discount=1.0)
return timestep
class SubTaskEnvironment(dm_env.Environment):
"""SubTask to `dm_env.Environment` adapter.
One important note is that users need to either call close() on the
environment when they are done using it or they should use them within
a context manager.
Example:
with subtask_env() as env:
...
"""
def __init__(self,
env: composer.Environment,
effectors: Sequence[effector.Effector],
subtask: af.SubTask,
reset_option: moma_option.MomaOption):
if env is None:
raise ValueError('env is None')
if subtask is None:
raise ValueError('subtask is None')
if reset_option is None:
raise ValueError('reset_option is None')
if not effectors:
raise ValueError('no effectors specified')
self._env = env
self._effectors = effectors
self._subtask = subtask
self._reset_option = reset_option
self._reset_required = True
self._last_internal_timestep = None # type: dm_env.TimeStep
self._last_external_timestep = None # type: dm_env.TimeStep
# Stub action for the moma task step. Actual actuation is done directly
# through effectors.
self._stub_env_action = np.zeros_like(
self._env.action_spec().generate_value())
self._effectors_action_spec = None
self._observers = [] # type: List [af.SubTaskObserver]
self._teardown_callables = [] # type: List [Callable[[], None]]
self._is_closed = False # type: bool
# If the user does not close the environment we issue an error and
# print out where the environment was created
self._traceback = traceback.format_stack()
def __del__(self):
if not self._is_closed:
logging.error(
'You must call .close() on the environment created at:\n %s',
''.join(self._traceback))
# Profiling for .wrap()
def reset(self) -> dm_env.TimeStep:
if self._is_closed:
raise RuntimeError(
'The environment has been closed, it can no longer be used.')
env_timestep = _fixed_timestep(self._env.reset())
self._reset_option.on_selected(timestep=env_timestep)
# Run the reset option to completion.
pterm = 0 # An option is stepped before we ask for pterm.
while pterm < np.random.random():
# The reset_option is a MomaOption which handles actuating the effectors
# internally.
self._reset_option.step(env_timestep)
env_timestep = self._env.step(self._stub_env_action)
pterm = self._reset_option.pterm(env_timestep)
# Send LAST timestep to reset option's delegate. This means we want to
# step the option without actuating the effectors. Ignore the action it
# returns.
self._reset_option.step_delegate(
env_timestep._replace(step_type=dm_env.StepType.LAST))
# send env_timestep through the subtask.
self._last_internal_timestep = env_timestep._replace(
step_type=dm_env.StepType.FIRST)
self._subtask.reset(env_timestep)
timestep = self._subtask.parent_to_agent_timestep(
self._last_internal_timestep,
own_arg_key=self._subtask.get_arg_key(None))
self._reset_required = False
if not timestep.first():
raise ValueError(f'SubTask returned non FIRST timestep: {timestep}')
timestep = timestep._replace(reward=None)
timestep = timestep._replace(discount=None)
self._last_external_timestep = timestep
return timestep
# Profiling for .wrap()
def step(self, action: np.ndarray) -> dm_env.TimeStep:
if self._is_closed:
raise RuntimeError(
'The environment has been closed, it can no longer be used.')
if self._reset_required:
return self.reset() # `action` is deliberately ignored.
# subtask_env does not use Option-argument mechanism.
dummy_arg_key = self._subtask.get_arg_key(None)
pterm = self._subtask.pterm(self._last_internal_timestep, dummy_arg_key)
action = action.astype(self._subtask.action_spec().dtype)
external_action = np.clip(action,
self._subtask.action_spec().minimum,
self._subtask.action_spec().maximum)
internal_action = self._subtask.agent_to_parent_action(external_action)
for obs in self._observers:
obs.step(
parent_timestep=self._last_internal_timestep,
parent_action=internal_action,
agent_timestep=self._last_external_timestep,
agent_action=external_action)
self._actuate_effectors(internal_action)
internal_timestep = self._env.step(self._stub_env_action)
self._last_internal_timestep = internal_timestep
external_timestep = self._subtask.parent_to_agent_timestep(
internal_timestep, dummy_arg_key)
# If subtask wants to stop, this is the last timestep.
if pterm > np.random.random():
external_timestep = external_timestep._replace(
step_type=dm_env.StepType.LAST)
# If subtask or base env emit a LAST timestep, we need to reset next.
if external_timestep.last():
self._reset_required = True
# For a LAST timestep, step the observers with a None action. This ensures
# the observers will see every timestep of the task.
for obs in self._observers:
obs.step(
parent_timestep=internal_timestep,
parent_action=None,
agent_timestep=external_timestep,
agent_action=None)
# This shouldn't happen, but just in case.
if external_timestep.first():
external_timestep = external_timestep._replace(reward=None)
external_timestep = external_timestep._replace(discount=None)
self._last_external_timestep = external_timestep
return external_timestep
def _actuate_effectors(self, action):
if self._effectors_action_spec is None:
aspecs = [a.action_spec(self.physics) for a in self._effectors]
self._effectors_action_spec = spec_utils.merge_specs(aspecs)
spec_utils.validate(self._effectors_action_spec, action)
for ef in self._effectors:
e_cmd = action[self._find_effector_indices(ef)]
ef.set_control(self.physics, e_cmd)
def _find_effector_indices(self, ef: effector.Effector) -> List[bool]:
actuator_names = self._effectors_action_spec.name.split('\t')
prefix_expr = re.compile(ef.prefix)
return [re.match(prefix_expr, name) is not None for name in actuator_names]
def observation_spec(self):
return self._subtask.observation_spec()
def action_spec(self):
return self._subtask.action_spec()
def reward_spec(self):
return self._subtask.reward_spec()
def discount_spec(self):
return self._subtask.discount_spec()
@property
def base_env(self) -> composer.Environment:
return self._env
@property
def physics(self) -> mjcf.Physics:
return self._env.physics
@property
def task(self) -> composer.Task:
"""Returns the underlying composer.Task, defining the world."""
return self._env.task
@property
def subtask(self) -> af.SubTask:
"""Returns the underlying agentflow.SubTask, defining the task."""
return self._subtask
@property
def reset_option(self) -> moma_option.MomaOption:
return self._reset_option
@reset_option.setter
def reset_option(self, reset_option: moma_option.MomaOption) -> None:
"""Changes the reset option.
Sometimes the correct reset option is not constructible when the
SubTaskEnvironment is initialized, so this property allows the caller to
overwrite the original reset option.
Args:
reset_option: New reset option for this environment.
"""
self._reset_option = reset_option
def add_observer(self, observer: af.SubTaskObserver) -> None:
"""Adds a subtask observer to the environment."""
self._observers.append(observer)
def add_teardown_callable(self, teardown_fn: Callable[[], None]):
"""Adds a function to be called when the environment is closed.
When running our environment we might need to start processes that need
to be closed when we are done using the environment.
Args:
teardown_fn: Function to run when we close the environment.
"""
self._teardown_callables.append(teardown_fn)
def close(self):
"""Cleanup when we are done using the environment."""
if self._is_closed:
logging.warning('The environment has already been closed.')
return
# A MoMa base tasks holds all the effectors and sensors. When running a
# real environment we need to make sure that we close all the sensors and
# effectors used with the real robot. The `close` method of the task ensures
# this.
if isinstance(self.task, base_task.BaseTask):
self.task.close()
# Call all the provided teardowns when closing the environment.
for teardown_callable in self._teardown_callables:
teardown_callable()
# Close the base class
super().close()
self._is_closed = True
|
|
#!/usr/bin/env python
import copy
import datetime
import email
from email.utils import parseaddr
import logging
import logging.handlers
from pprint import pprint, pformat
import re
import smtplib
import string
import sys
import time
import traceback
from configobj import ConfigObj
from email_reply_parser import EmailReplyParser
import json
import pytz
from tzlocal import get_localzone
from validate import Validator
# For JiraCommandProcessor
import requests
import __main__
class BufferingSMTPHandler(logging.handlers.BufferingHandler):
"""Replacement for SMTPHandler that buffers log messages.
Add log messages to a buffer and send them as one single SMTP
message when the buffer is flushed, rather than sending each one
individually like the SMTPHandler in Python's standard library.
"""
def __init__(self, mailhost, fromaddr, toaddrs, subject,
capacity = 5000):
self.mailhost = mailhost
self.mailport = None
self.fromaddr = fromaddr
self.toaddrs = toaddrs
self.subject = subject
# BufferingHandler is an old-style object
logging.handlers.BufferingHandler.__init__(self, capacity)
def flush(self):
"""Flush buffers and send all buffered messages via SMTP.
"""
if len(self.buffer) > 0:
try:
port = self.mailport
if not port:
port = smtplib.SMTP_PORT
smtp = smtplib.SMTP(self.mailhost, port)
fmt = "From: {fromaddr}\r\nTo: {toaddrs}\r\nSubject: {subject}\r\n\r\n{content}"
msg = fmt.format(
fromaddr=self.fromaddr,
toaddrs=self._join_addrs(self.toaddrs),
subject=self.subject,
content="\r\n".join(self.format(r) for r in self.buffer))
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except:
self.handleError(None) # no particular record
self.buffer = []
def _join_addrs(self, addrs):
"""Join a list of addresses using the RFC822 address separator.
If provided a list of email addresses, return a string
containing those email addresses separated by commas. If
provided a string, return that string only.
"""
if type(addrs) == list:
return ','.join(addrs)
else:
return addrs
class ConfigError(Exception):
"""Raised when a parsed configuration is invalid.
"""
pass
class EventDataError(Exception):
"""Raised when event data cannot be parsed from a message.
"""
pass
class CommandProcessor(object):
"""Base class for command processors.
Provides a list of commands that the processor can respond to,
and facilities to dispatch commands to handler methods inside the
processor object.
"""
def __init__(self, config, event_data):
self.config = config
self.event_data = copy.copy(event_data)
def get_handled_commands(self):
"""Return a list of registered commands that this responds to.
"""
return self.handled_commands.keys()
def handle_command(self, command, data):
"""Dispatch a command to the appropriate handler method.
"""
(opts, data) = self._parse_options(data)
log.debug('Handling command: {command}'.format(
command=command))
self.handled_commands[command](opts, data)
def _parse_options(self, data):
"""Parse inline option flags from a command string.
Parse data in any of the following formats: key, key:value,
key:"quoted value". Returns a tuple (opts, data) containing
the parsed options and the unparsed remainder of the line.
"""
if data is None:
return ({}, '')
# Match tags of formats: #option, #option:arg,
# #option:"quoted arg"
expr = re.compile("#(\w+)(?::(\w+|\"[^\"]\")?)?")
matches = expr.findall(data)
data = expr.sub('', data).strip()
opts = dict(matches)
return (opts, data)
class EchoCommandProcessor(CommandProcessor):
"""Processor that echoes back a received message."
Used for basic testing only.
"""
def __init__(self, config, event_data):
self.handled_commands = {'echo': self._echo_handler}
super(EchoCommandProcessor, self).__init__(config, event_data)
def _echo_handler(self, command, data):
log.info(self.event_data['message'])
class JiraCommandProcessor(CommandProcessor):
def __init__(self, config, event_data):
self.handled_commands = {'create-issue': self._create_issue_handler}
super(JiraCommandProcessor, self).__init__(config, event_data)
self.event_data['fqdn'] = '{hostname}.{domain}'.format(
hostname=self.event_data['hostname'],
domain=self.config['domain'])
def _create_issue_handler(self, opts, data):
"""Handle the #create-issue command.
Check to see if an issue in JIRA has already been created for
this event. If no duplicate is found, create an issue.
"""
rest_endpoint = self.config['rest_endpoint']
url = '/'.join([rest_endpoint, 'issue'])
username = self.config['username']
password = self.config['password']
issue_type = self.config['issue_type']
project_key = self.config['project_key']
headers = {'Content-Type': 'application/json'}
# Create JSON skeleton
payload = {
'fields': {
'description': self._get_issue_description(opts, data),
'issuetype': {
'name': issue_type
},
'project': {
'key': project_key
},
'summary': self._get_issue_summary(opts, data)
}
}
# Then merge in custom fields as defined in configuration file
fields = self.config.get('fields', {})
for (name, tmpl) in fields.items():
payload['fields'][name] = self._format(tmpl,
**self.event_data)
if self._duplicate_alert_exists(payload):
log.error('Duplicate alert exists; not creating.')
return
log.debug("Posting to {url}: {payload}".format(url=url,
payload=json.dumps(payload)))
result = requests.post(url, auth=(username, password),
data=json.dumps(payload),
headers=headers)
if len(result.json.get('errors', [])) > 0:
log.error('Failed to create JIRA issue: ' + result.text)
else:
key = result.json.get('key', '')
log.info('Created JIRA issue: {key}'.format(key=key))
def _duplicate_alert_exists(self, payload):
"""Check whether a duplicate alert already exists in JIRA.
Currently unimplemented. Returns False always.
"""
return False
def _format(self, value, **kwargs):
"""Format a list of arguments.
If value is a list, return every item in value formatted with
variables substituted from **kwargs. If value is a string,
return the result of value.format(**kwargs).
"""
if type(value) == list:
return [s.format(**kwargs) for s in value]
else:
return value.format(**kwargs)
def _get_issue_description(self, opts, data):
parts = [data, self.event_data['alert']]
parts = [part for part in parts if part != '']
return "\n\n---\n\n".join(parts)
def _get_issue_summary(self, opts, data):
if self.event_data['type'] == 'service':
tmpl = self.config['service_summary']
else:
tmpl = self.config['host_summary']
return tmpl.format(**self.event_data)
def _query(self, jql):
"""
Run a JQL query against JIRA and return the JSON result.
Currently unimplemented.
"""
return None
class NagiosCommandProcessor(CommandProcessor):
"""Translates email commands into Nagios commands.
Maintains a set of Nagios commands, their type signatures, and
sensible default arguments. Processes email commands and writes
the appropriate Nagios commands into the Nagios command file.
"""
def __init__(self, config, event_data):
self.handled_commands = {'acknowledge': self._acknowledge_handler,
'comment': self._comment_handler,
'disable-notifications':
self._disable_notifications_handler,
'enable-notifications':
self._enable_notifications_handler}
self.cf_type_map = {'host': 'HOST', 'service': 'SVC'}
self.fh = None
# Type map of Nagios commands and their arguments/types, in
# order, so an ordered command can be produced.
self.nagios_commands = {
'ACKNOWLEDGE_HOST_PROBLEM':
[('host_name', str, ''),
('sticky', bool, True),
('notify', bool, True),
('persistent', bool, True),
('author', str, 'Unknown'),
('comment', str, '')],
'ACKNOWLEDGE_SVC_PROBLEM':
[('host_name', str, ''),
('service_description', str, ''),
('sticky', bool, True),
('notify', bool, True),
('persistent', bool, True),
('author', str, 'Unknown'),
('comment', str, '')],
'ADD_HOST_COMMENT':
[('host_name', str, ''),
('persistent', bool, True),
('author', str, ''),
('comment', str, '')],
'ADD_SVC_COMMENT':
[('host_name', str, ''),
('service_description', str, ''),
('persistent', bool, True),
('author', str, ''),
('comment', str, '')],
'DISABLE_HOST_NOTIFICATIONS':
[('host_name', str, '')],
'DISABLE_SVC_NOTIFICATIONS':
[('host_name', str, ''),
('service_description', str, '')],
'ENABLE_HOST_NOTIFICATIONS':
[('host_name', str, '')],
'ENABLE_SVC_NOTIFICATIONS':
[('host_name', str, ''),
('service_description', str, '')],
}
super(NagiosCommandProcessor, self).__init__(config, event_data)
def handle_command(self, command, data):
"""Dispatch a #command to the appropriate handler method.
Before dispatching any command, we want to ensure that the
command file is open.
"""
self._open_command_file()
super(NagiosCommandProcessor, self).handle_command(
command, data)
def _acknowledge_handler(self, opts, data):
"""Handle #acknowledge command.
Acknowledge an alert on a host or service.
"""
opts = dict(opts.items() + [('comment', data)])
log.debug("Acknowledging alert")
self._submit_command('ACKNOWLEDGE_{type}_PROBLEM', opts)
def _comment_handler(self, opts, data):
"""Handle #comment command.
Add a comment to a host or service definition.
"""
opts = dict(opts.items() + [('comment', data)])
log.debug("Adding comment")
self._submit_command('ADD_{type}_COMMENT', opts)
def _disable_notifications_handler(self, opts, data):
"""Handle #disable-notifications command.
Disable notifications on a host or service.
"""
log.debug("Disabling notifications")
self._submit_command('DISABLE_{type}_NOTIFICATIONS', opts)
def _enable_notifications_handler(self, opts, data):
"""Handle #enable-notifications command.
Enable notifications on a host or service.
"""
log.debug("Enabling notifications")
self._submit_command('ENABLE_{type}_NOTIFICATIONS', opts)
def _as_str(self, value):
"""Convert arguments to strings in the Nagios command format.
The Nagios command format expects boolean values to be
expressed as integers, rather than the string literals True
or False. If value is a bool, return the string representation
of its integer value. For all other types, return the string
representation of value.
"""
if type(value) == bool:
return str(int(value))
else:
return str(value)
def _build_command_line(self, command, opts):
"""Build a command line for the Nagios command file.
Given a command and a list of options, cross-reference the
command spec for the argument order and default values,
then build a string suitable for insertion into the Nagios
command file.
"""
opt_spec = self.nagios_commands.get(command, [])
keys = [key for (key, type_, default) in opt_spec]
default_opts = dict([(key, default) for (key, type_, default)
in opt_spec])
opts = dict(opts.items() + default_opts.items() + opts.items())
arg_list = [command]
for (key, type_, default) in opt_spec:
arg_list.append(self._cast_arg(opts[key], type_))
arg_list = [self._as_str(arg) for arg in arg_list]
cmdline = ';'.join(arg_list)
timestamp = int(time.time())
return '[{timestamp}] {cmdline}'.format(cmdline=cmdline,
timestamp=timestamp)
def _cast_arg(self, value, type_):
"""Cast an input value to the type specified in the optspec.
Boolean values can take many forms: 1, True, On, etc.
Convert all of these strings to
"""
s = str(value)
if type_ == bool:
if re.match('(on|true|yes|1)$', s, re.IGNORECASE):
return True
elif re.match('off|false|no|0)$', s, re.IGNORECASE):
return False
else:
fmt = 'Could not convert {value} to {type}'
msg = fmt.format(type=type_, value=value)
raise ValueError(msg)
elif type_ == int:
return int(s)
elif type_ == str:
return s
else:
# Fixme: Should throw exception
return None
def _cf_type(self):
"""Returns the event type in a Nagios-friendly format.
The command file has two forms of many commands, such as
ADD_HOST_COMMENT and ADD_SVC_COMMENT. These, rather predictably,
function on hosts and services, respectively. This method
converts the internal event data type name into one suitable
for disambiguating Nagios commands.
"""
if self.event_data['type'] == 'host':
return 'HOST'
elif self.event_data['type'] == 'service':
return 'SVC'
def _map_options(self, opts):
"""Remap program option names to Nagios API canonical names.
Convert internal option keys to the key names used in the Nagios
External Commands API:
http://old.nagios.org/developerinfo/externalcommands/commandlist.php
"""
map = {'hostname': 'host_name',
'service': 'service_description'}
new_opts = copy.copy(opts)
for (old_key, new_key) in map.items():
if old_key in new_opts:
new_opts[new_key] = new_opts[old_key]
del new_opts[old_key]
return new_opts
def _open_command_file(self):
"""Open the command file if it is not already open.
"""
log.debug("Opening command file")
# Todo: Handle IOError properly
if self.fh is None:
self.fh = open(self.config['command_file'], 'a')
def _submit_command(self, command, opts):
"""Submit a command for processing.
Takes a command and a dict of keyword arguments and converts
them into a valid set of positional parameters, then writes
it to the command file.
"""
command = command.format(type=self._cf_type())
args = dict(opts.items() + self.event_data.items())
args = self._map_options(args)
cmd_line = self._build_command_line(command, args)
self._write_command_file(cmd_line)
def _write_command_file(self, s):
log.info("Writing to command file: " + s)
self.fh.write(s + '\n')
def create_processors(config, event_data):
"""Create instances of Processors defined in configuration.
"""
processors = []
for class_name, class_config in config.items():
try:
if not class_config.as_bool('enable'):
continue
except (KeyError, ValueError) as ex:
continue
class_ = getattr(__main__, class_name)
instance = class_(config=class_config, event_data=event_data)
processors.append(instance)
return processors
def extract_author_name(s):
"""Return the name portion of an RFC822 email address.
Given a string parsed from an email's From: field, e.g.
"Author <email@domain>", return only the author's name.
Return the mailbox portion of the email address if no author
name is supplied. Return the whole address if the email is not in
username@domain format.
"""
(real_name, address) = parseaddr(s)
if real_name != '':
return real_name
elif real_name == '' and address == '':
return 'Unknown Author'
else:
matches = re.match('([^@]+)@(.+)', address)
if matches:
return matches.group(1)
else:
return address
def extract_event_data(msg, config):
"""Parse event data from a Message object.
"""
author = extract_author_name(msg.get('From', ''))
subject = msg.get('Subject', '')
alert = extract_alert(msg)
reply = extract_reply(msg)
parsed_fields = parse_alert_fields(alert, config['fields'])
if parsed_fields['service'] is not None:
type_ = 'service'
else:
type_ = 'host'
if parsed_fields['timestamp'] == None:
parsed_fields['timestamp'] = ''
timestamp = parse_timestamp(parsed_fields['timestamp'],
config['date_format'])
fields = {'alert': alert,
'author': author,
'message': msg.as_string(),
'reply': reply,
'timestamp': timestamp,
'type': type_}
return dict(parsed_fields.items() + fields.items())
def extract_alert(msg):
"""Extract the original alert from an email thread.
Walk through all replies comprising the message, locate the
original alert email, strip off all pseudo-headers, remove quote
markers, and return the result.
"""
for part in msg.walk():
if part.get_content_type() == 'text/plain':
content = EmailReplyParser.read(
part.get_payload(decode=True))
for fragment in content.fragments:
content = fragment._content
if content != extract_reply(msg):
return sanitize_email_fragment(content)
return ''
def extract_reply(msg):
"""Extracts the portion of an email that should contain commands.
"""
for part in msg.walk():
if part.get_content_type() == 'text/plain':
content = part.get_payload(decode=True)
return EmailReplyParser.parse_reply(content)
def parse_alert_fields(s, config):
"""Parse named fields from an alert message.
Given a list of key names and regular expressions in the config,
attempt to extract all identified fields from the alert message
and return them as a dict.
"""
fields = {}
flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
for (field, expr) in config.items():
matches = re.search(expr, s, flags)
if matches is not None:
fields[field] = matches.group(1)
else:
fields[field] = None
return fields
def parse_command(s):
"""Parse a command string from a reply email.
Given a single-line command string, e.g.:
#acknowlege Server owner notified of problem.
return a tuple (command, data) consisting of the command name
(acknowledge) and the remainder of the line. Argument parsing is
delegated to Handler classes so that Handler authors can override
option syntax if they so choose.
"""
matches = re.match('#(\S+)(?:(?::\s*|\s+)(.+))?', s)
if matches is not None:
return (matches.group(1), matches.group(2))
else:
return (None, None)
def parse_timestamp(ts, format_name):
"""Attempt to parse the time from an alert email.
Nagios has four configurable time formats, and uses none of them if
the $LONGDATETIME$ macro is used in an alert (as it does by default)
instead of the $DATETIME$ macro.
Parsing is attempted in the following order:
- $LONGDATETIME$ format
- Nagios configured date format (format_name)
If the timestamp is unparsable, give up and return the current
datetime instead. It's better than nothing.
"""
long_date_formats = {'us': '%m/%d/%Y %H:%M:%S',
'euro': '%d/%m/%Y %H:%M:%S',
'iso8601': '%Y-%m-%d %H:%M:%S',
'strict-iso8601': '%Y-%M-%dT%H:%M:%S'}
# Try parsing the date using the default Nagios $LONGDATETIME$
# format. If that fails, attempt parsing using the $DATETIME$ format
# specified in the configuration file. If that fails too, use
# the current date and time (it's better than nothing).
try:
format_str = '%a %b %d %H:%M:%S %Z %Y'
dt = datetime.datetime.strptime(ts, format_str)
except ValueError, ex:
try:
format_str = long_date_formats[format_name]
dt = datetime.datetime.strptime(ts, format_str)
except ValueError, ex:
dt = datetime.datetime.now()
# Nagios and Icinga alert using the local timezone rather than UTC,
# so we need to embed that information into our datetime.
try:
tz = get_localzone()
return tz.localize(dt)
except pytz.UnknownTimeZoneError, ex:
return dt
def sanitize_email_fragment(s):
"""Remove pseudo-headers from message fragments in an email thread.
EmailReplyParser returns fragments undisturbed, with inline heading
(From:, To:, etc.) and quote markers (>) intact. This method removes
these things and attempts to return the fragment to a pristine state
(as much as possible).
"""
expr = re.compile('^(--|On .+ at .+ wrote:|(?:\w+: .+)?$)')
lines = s.split("\n")
# Find the first line that doesn't look like a reply demarcation or
# an inline heading, then return it and every line after it.
for i in range(0, len(lines)):
line = lines[i]
if not expr.match(line):
return "\n".join(map(strip_quote_marker, lines[i:-1]))
return ''
def strip_quote_marker(s):
"""Strip out '>' quote markers in a quoted message fragment.
"""
return re.sub('^(>\s*)+', '', s)
def validate_config(config):
"""Ensure that the application configuration is valid.
"""
validator = Validator()
result = config.validate(validator)
if result != True:
raise ConfigError(result.items())
def main():
"""Do the needful.
"""
config = ConfigObj('/etc/koboli/koboli.ini',
configspec='/etc/koboli/kobolispec.ini',
list_values=True)
try:
validate_config(config)
except ConfigError, ex:
# Fixme: better error reporting needed here
log.critical("Config file validation failed; exiting.")
sys.exit(1)
msg = email.message_from_file(sys.stdin)
# Set up the logging handler to respond
from_addr = msg.get('To')
to_addr = msg.get('From')
subject = msg.get('Subject')
if to_addr is not None:
handler = BufferingSMTPHandler(mailhost='localhost',
fromaddr=from_addr,
toaddrs=to_addr,
subject=subject)
handler.setLevel(logging.INFO)
log.addHandler(handler)
try:
event_data = extract_event_data(msg, config['global'])
log.debug("Parsed event data: " + pformat(event_data))
except EventDataError, ex:
log.critical("Could not parse event data from email; exiting.")
sys.exit(1)
processors = create_processors(config, event_data)
command_processors = {}
for processor in processors:
for command in processor.get_handled_commands():
command_processors[command] = processor
input_lines = event_data['reply'].split("\n")
input_lines = [line for line in input_lines if line.strip() != '']
commands = [parse_command(command) for command in input_lines]
commands = [(command, data) for (command, data) in commands
if command in command_processors]
log.info("Processing commands...")
for (command, data) in commands:
command_processors[command].handle_command(command, data)
log.info("Finished processing successfully.")
if __name__ == '__main__':
log = logging.getLogger('log')
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
log.addHandler(handler)
log.setLevel(logging.DEBUG)
log.debug('Testing')
try:
main()
except:
log.error("Unhandled exception:\n{traceback}" \
.format(traceback=traceback.format_exc()))
finally:
for handler in log.handlers:
handler.flush()
|
|
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import literal
from sqlalchemy import literal_column
from sqlalchemy import null
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy.orm import aliased
from sqlalchemy.orm import Bundle
from sqlalchemy.orm import class_mapper
from sqlalchemy.orm import create_session
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import mapper
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import subqueryload
from sqlalchemy.orm import with_polymorphic
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
class SingleInheritanceTest(testing.AssertsCompiledSQL, fixtures.MappedTest):
__dialect__ = "default"
@classmethod
def define_tables(cls, metadata):
Table(
"employees",
metadata,
Column(
"employee_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("name", String(50)),
Column("manager_data", String(50)),
Column("engineer_info", String(50)),
Column("type", String(20)),
)
Table(
"reports",
metadata,
Column(
"report_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("employee_id", ForeignKey("employees.employee_id")),
Column("name", String(50)),
)
@classmethod
def setup_classes(cls):
global Employee, Manager, Engineer, JuniorEngineer
class Employee(cls.Comparable):
pass
class Manager(Employee):
pass
class Engineer(Employee):
pass
class JuniorEngineer(Engineer):
pass
@classmethod
def setup_mappers(cls):
Employee, Manager, JuniorEngineer, employees, Engineer = (
cls.classes.Employee,
cls.classes.Manager,
cls.classes.JuniorEngineer,
cls.tables.employees,
cls.classes.Engineer,
)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Manager, inherits=Employee, polymorphic_identity="manager")
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
mapper(
JuniorEngineer,
inherits=Engineer,
polymorphic_identity="juniorengineer",
)
def _fixture_one(self):
JuniorEngineer, Manager, Engineer = (
self.classes.JuniorEngineer,
self.classes.Manager,
self.classes.Engineer,
)
session = create_session()
m1 = Manager(name="Tom", manager_data="knows how to manage things")
e1 = Engineer(name="Kurt", engineer_info="knows how to hack")
e2 = JuniorEngineer(name="Ed", engineer_info="oh that ed")
session.add_all([m1, e1, e2])
session.flush()
return session, m1, e1, e2
def test_single_inheritance(self):
Employee, JuniorEngineer, Manager, Engineer = (
self.classes.Employee,
self.classes.JuniorEngineer,
self.classes.Manager,
self.classes.Engineer,
)
session, m1, e1, e2 = self._fixture_one()
assert session.query(Employee).all() == [m1, e1, e2]
assert session.query(Engineer).all() == [e1, e2]
assert session.query(Manager).all() == [m1]
assert session.query(JuniorEngineer).all() == [e2]
m1 = session.query(Manager).one()
session.expire(m1, ["manager_data"])
eq_(m1.manager_data, "knows how to manage things")
row = (
session.query(Engineer.name, Engineer.employee_id)
.filter(Engineer.name == "Kurt")
.first()
)
assert row.name == "Kurt"
assert row.employee_id == e1.employee_id
def test_multi_qualification(self):
Manager, Engineer = (self.classes.Manager, self.classes.Engineer)
session, m1, e1, e2 = self._fixture_one()
ealias = aliased(Engineer)
eq_(session.query(Manager, ealias).all(), [(m1, e1), (m1, e2)])
eq_(session.query(Manager.name).all(), [("Tom",)])
eq_(
session.query(Manager.name, ealias.name).all(),
[("Tom", "Kurt"), ("Tom", "Ed")],
)
eq_(
session.query(
func.upper(Manager.name), func.upper(ealias.name)
).all(),
[("TOM", "KURT"), ("TOM", "ED")],
)
eq_(
session.query(Manager).add_entity(ealias).all(),
[(m1, e1), (m1, e2)],
)
eq_(
session.query(Manager.name).add_column(ealias.name).all(),
[("Tom", "Kurt"), ("Tom", "Ed")],
)
# TODO: I think raise error on this for now
# self.assertEquals(
# session.query(Employee.name, Manager.manager_data,
# Engineer.engineer_info).all(),
# []
# )
def test_column_qualification(self):
Employee, JuniorEngineer, Manager, Engineer = (
self.classes.Employee,
self.classes.JuniorEngineer,
self.classes.Manager,
self.classes.Engineer,
)
session, m1, e1, e2 = self._fixture_one()
m1id, e1id, e2id = m1.employee_id, e1.employee_id, e2.employee_id
def scalar(q):
return [x for x, in q]
eq_(scalar(session.query(Employee.employee_id)), [m1id, e1id, e2id])
eq_(scalar(session.query(Engineer.employee_id)), [e1id, e2id])
eq_(scalar(session.query(Manager.employee_id)), [m1id])
# this currently emits "WHERE type IN (?, ?) AND type IN (?, ?)",
# so no result.
eq_(session.query(Manager.employee_id, Engineer.employee_id).all(), [])
eq_(scalar(session.query(JuniorEngineer.employee_id)), [e2id])
def test_bundle_qualification(self):
Employee, JuniorEngineer, Manager, Engineer = (
self.classes.Employee,
self.classes.JuniorEngineer,
self.classes.Manager,
self.classes.Engineer,
)
session, m1, e1, e2 = self._fixture_one()
m1id, e1id, e2id = m1.employee_id, e1.employee_id, e2.employee_id
def scalar(q):
return [x[0] for x, in q]
eq_(
scalar(session.query(Bundle("name", Employee.employee_id))),
[m1id, e1id, e2id],
)
eq_(
scalar(session.query(Bundle("name", Engineer.employee_id))),
[e1id, e2id],
)
eq_(scalar(session.query(Bundle("name", Manager.employee_id))), [m1id])
# this currently emits "WHERE type IN (?, ?) AND type IN (?, ?)",
# so no result.
eq_(
session.query(
Bundle("name", Manager.employee_id, Engineer.employee_id)
).all(),
[],
)
eq_(
scalar(session.query(Bundle("name", JuniorEngineer.employee_id))),
[e2id],
)
def test_from_self(self):
Engineer = self.classes.Engineer
sess = create_session()
self.assert_compile(
sess.query(Engineer).from_self(),
"SELECT anon_1.employees_employee_id AS "
"anon_1_employees_employee_id, "
"anon_1.employees_name AS "
"anon_1_employees_name, "
"anon_1.employees_manager_data AS "
"anon_1_employees_manager_data, "
"anon_1.employees_engineer_info AS "
"anon_1_employees_engineer_info, "
"anon_1.employees_type AS "
"anon_1_employees_type FROM (SELECT "
"employees.employee_id AS "
"employees_employee_id, employees.name AS "
"employees_name, employees.manager_data AS "
"employees_manager_data, "
"employees.engineer_info AS "
"employees_engineer_info, employees.type "
"AS employees_type FROM employees WHERE "
"employees.type IN (:type_1, :type_2)) AS "
"anon_1",
use_default_dialect=True,
)
def test_select_from_aliased_w_subclass(self):
Engineer = self.classes.Engineer
sess = create_session()
a1 = aliased(Engineer)
self.assert_compile(
sess.query(a1.employee_id).select_from(a1),
"SELECT employees_1.employee_id AS employees_1_employee_id "
"FROM employees AS employees_1 WHERE employees_1.type "
"IN (:type_1, :type_2)",
)
self.assert_compile(
sess.query(literal("1")).select_from(a1),
"SELECT :param_1 AS param_1 FROM employees AS employees_1 "
"WHERE employees_1.type IN (:type_1, :type_2)",
)
def test_union_modifiers(self):
Engineer, Manager = self.classes("Engineer", "Manager")
sess = create_session()
q1 = sess.query(Engineer).filter(Engineer.engineer_info == "foo")
q2 = sess.query(Manager).filter(Manager.manager_data == "bar")
assert_sql = (
"SELECT anon_1.employees_employee_id AS "
"anon_1_employees_employee_id, "
"anon_1.employees_name AS anon_1_employees_name, "
"anon_1.employees_manager_data AS anon_1_employees_manager_data, "
"anon_1.employees_engineer_info AS anon_1_employees_engineer_info, " # noqa
"anon_1.employees_type AS anon_1_employees_type "
"FROM (SELECT employees.employee_id AS employees_employee_id, "
"employees.name AS employees_name, "
"employees.manager_data AS employees_manager_data, "
"employees.engineer_info AS employees_engineer_info, "
"employees.type AS employees_type FROM employees "
"WHERE employees.engineer_info = :engineer_info_1 "
"AND employees.type IN (:type_1, :type_2) "
"%(token)s "
"SELECT employees.employee_id AS employees_employee_id, "
"employees.name AS employees_name, "
"employees.manager_data AS employees_manager_data, "
"employees.engineer_info AS employees_engineer_info, "
"employees.type AS employees_type FROM employees "
"WHERE employees.manager_data = :manager_data_1 "
"AND employees.type IN (:type_3)) AS anon_1"
)
for meth, token in [
(q1.union, "UNION"),
(q1.union_all, "UNION ALL"),
(q1.except_, "EXCEPT"),
(q1.except_all, "EXCEPT ALL"),
(q1.intersect, "INTERSECT"),
(q1.intersect_all, "INTERSECT ALL"),
]:
self.assert_compile(
meth(q2),
assert_sql % {"token": token},
checkparams={
"manager_data_1": "bar",
"type_2": "juniorengineer",
"type_3": "manager",
"engineer_info_1": "foo",
"type_1": "engineer",
},
)
def test_from_self_count(self):
Engineer = self.classes.Engineer
sess = create_session()
col = func.count(literal_column("*"))
self.assert_compile(
sess.query(Engineer.employee_id).from_self(col),
"SELECT count(*) AS count_1 "
"FROM (SELECT employees.employee_id AS employees_employee_id "
"FROM employees "
"WHERE employees.type IN (:type_1, :type_2)) AS anon_1",
use_default_dialect=True,
)
def test_select_from_count(self):
Manager, Engineer = (self.classes.Manager, self.classes.Engineer)
sess = create_session()
m1 = Manager(name="Tom", manager_data="data1")
e1 = Engineer(name="Kurt", engineer_info="knows how to hack")
sess.add_all([m1, e1])
sess.flush()
eq_(sess.query(func.count(1)).select_from(Manager).all(), [(1,)])
def test_select_from_subquery(self):
Manager, JuniorEngineer, employees, Engineer = (
self.classes.Manager,
self.classes.JuniorEngineer,
self.tables.employees,
self.classes.Engineer,
)
sess = create_session()
m1 = Manager(name="Tom", manager_data="data1")
m2 = Manager(name="Tom2", manager_data="data2")
e1 = Engineer(name="Kurt", engineer_info="knows how to hack")
e2 = JuniorEngineer(name="Ed", engineer_info="oh that ed")
sess.add_all([m1, m2, e1, e2])
sess.flush()
eq_(
sess.query(Manager)
.select_entity_from(employees.select().limit(10).subquery())
.all(),
[m1, m2],
)
def test_count(self):
Employee = self.classes.Employee
JuniorEngineer = self.classes.JuniorEngineer
Manager = self.classes.Manager
Engineer = self.classes.Engineer
sess = create_session()
m1 = Manager(name="Tom", manager_data="data1")
m2 = Manager(name="Tom2", manager_data="data2")
e1 = Engineer(name="Kurt", engineer_info="data3")
e2 = JuniorEngineer(name="marvin", engineer_info="data4")
sess.add_all([m1, m2, e1, e2])
sess.flush()
eq_(sess.query(Manager).count(), 2)
eq_(sess.query(Engineer).count(), 2)
eq_(sess.query(Employee).count(), 4)
eq_(sess.query(Manager).filter(Manager.name.like("%m%")).count(), 2)
eq_(sess.query(Employee).filter(Employee.name.like("%m%")).count(), 3)
def test_exists_standalone(self):
Engineer = self.classes.Engineer
sess = create_session()
self.assert_compile(
sess.query(
sess.query(Engineer).filter(Engineer.name == "foo").exists()
),
"SELECT EXISTS (SELECT 1 FROM employees WHERE "
"employees.name = :name_1 AND employees.type "
"IN (:type_1, :type_2)) AS anon_1",
)
def test_type_filtering(self):
Employee, Manager, reports, Engineer = (
self.classes.Employee,
self.classes.Manager,
self.tables.reports,
self.classes.Engineer,
)
class Report(fixtures.ComparableEntity):
pass
mapper(
Report,
reports,
properties={"employee": relationship(Employee, backref="reports")},
)
sess = create_session()
m1 = Manager(name="Tom", manager_data="data1")
r1 = Report(employee=m1)
sess.add_all([m1, r1])
sess.flush()
rq = sess.query(Report)
assert (
len(rq.filter(Report.employee.of_type(Manager).has()).all()) == 1
)
assert (
len(rq.filter(Report.employee.of_type(Engineer).has()).all()) == 0
)
def test_type_joins(self):
Employee, Manager, reports, Engineer = (
self.classes.Employee,
self.classes.Manager,
self.tables.reports,
self.classes.Engineer,
)
class Report(fixtures.ComparableEntity):
pass
mapper(
Report,
reports,
properties={"employee": relationship(Employee, backref="reports")},
)
sess = create_session()
m1 = Manager(name="Tom", manager_data="data1")
r1 = Report(employee=m1)
sess.add_all([m1, r1])
sess.flush()
rq = sess.query(Report)
assert len(rq.join(Report.employee.of_type(Manager)).all()) == 1
assert len(rq.join(Report.employee.of_type(Engineer)).all()) == 0
class RelationshipFromSingleTest(
testing.AssertsCompiledSQL, fixtures.MappedTest
):
@classmethod
def define_tables(cls, metadata):
Table(
"employee",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("name", String(50)),
Column("type", String(20)),
)
Table(
"employee_stuff",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("employee_id", Integer, ForeignKey("employee.id")),
Column("name", String(50)),
)
@classmethod
def setup_classes(cls):
class Employee(cls.Comparable):
pass
class Manager(Employee):
pass
class Stuff(cls.Comparable):
pass
def test_subquery_load(self):
employee, employee_stuff, Employee, Stuff, Manager = (
self.tables.employee,
self.tables.employee_stuff,
self.classes.Employee,
self.classes.Stuff,
self.classes.Manager,
)
mapper(
Employee,
employee,
polymorphic_on=employee.c.type,
polymorphic_identity="employee",
)
mapper(
Manager,
inherits=Employee,
polymorphic_identity="manager",
properties={"stuff": relationship(Stuff)},
)
mapper(Stuff, employee_stuff)
sess = create_session()
context = (
sess.query(Manager)
.options(subqueryload("stuff"))
._compile_context()
)
subq = context.attributes[
(
"subquery",
(class_mapper(Manager), class_mapper(Manager).attrs.stuff),
)
]
self.assert_compile(
subq,
"SELECT employee_stuff.id AS "
"employee_stuff_id, employee_stuff.employee"
"_id AS employee_stuff_employee_id, "
"employee_stuff.name AS "
"employee_stuff_name, anon_1.employee_id "
"AS anon_1_employee_id FROM (SELECT "
"employee.id AS employee_id FROM employee "
"WHERE employee.type IN (:type_1)) AS anon_1 "
"JOIN employee_stuff ON anon_1.employee_id "
"= employee_stuff.employee_id ORDER BY "
"anon_1.employee_id",
use_default_dialect=True,
)
class RelationshipToSingleTest(
testing.AssertsCompiledSQL, fixtures.MappedTest
):
__dialect__ = "default"
@classmethod
def define_tables(cls, metadata):
Table(
"employees",
metadata,
Column(
"employee_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("name", String(50)),
Column("manager_data", String(50)),
Column("engineer_info", String(50)),
Column("type", String(20)),
Column("company_id", Integer, ForeignKey("companies.company_id")),
)
Table(
"companies",
metadata,
Column(
"company_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("name", String(50)),
)
@classmethod
def setup_classes(cls):
class Company(cls.Comparable):
pass
class Employee(cls.Comparable):
pass
class Manager(Employee):
pass
class Engineer(Employee):
pass
class JuniorEngineer(Engineer):
pass
def test_of_type(self):
(
JuniorEngineer,
Company,
companies,
Manager,
Employee,
employees,
Engineer,
) = (
self.classes.JuniorEngineer,
self.classes.Company,
self.tables.companies,
self.classes.Manager,
self.classes.Employee,
self.tables.employees,
self.classes.Engineer,
)
mapper(
Company,
companies,
properties={
"employees": relationship(Employee, backref="company")
},
)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Manager, inherits=Employee, polymorphic_identity="manager")
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
mapper(
JuniorEngineer,
inherits=Engineer,
polymorphic_identity="juniorengineer",
)
sess = sessionmaker()()
c1 = Company(name="c1")
c2 = Company(name="c2")
m1 = Manager(name="Tom", manager_data="data1", company=c1)
m2 = Manager(name="Tom2", manager_data="data2", company=c2)
e1 = Engineer(
name="Kurt", engineer_info="knows how to hack", company=c2
)
e2 = JuniorEngineer(name="Ed", engineer_info="oh that ed", company=c1)
sess.add_all([c1, c2, m1, m2, e1, e2])
sess.commit()
sess.expunge_all()
eq_(
sess.query(Company)
.filter(Company.employees.of_type(JuniorEngineer).any())
.all(),
[Company(name="c1")],
)
eq_(
sess.query(Company)
.join(Company.employees.of_type(JuniorEngineer))
.all(),
[Company(name="c1")],
)
def test_of_type_aliased_fromjoinpoint(self):
Company, Employee, Engineer = (
self.classes.Company,
self.classes.Employee,
self.classes.Engineer,
)
companies, employees = self.tables.companies, self.tables.employees
mapper(
Company, companies, properties={"employee": relationship(Employee)}
)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
sess = create_session()
self.assert_compile(
sess.query(Company).outerjoin(
Company.employee.of_type(Engineer),
aliased=True,
from_joinpoint=True,
),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name FROM companies "
"LEFT OUTER JOIN employees AS employees_1 ON "
"companies.company_id = employees_1.company_id "
"AND employees_1.type IN (:type_1)",
)
def test_join_explicit_onclause_no_discriminator(self):
# test issue #3462
Company, Employee, Engineer = (
self.classes.Company,
self.classes.Employee,
self.classes.Engineer,
)
companies, employees = self.tables.companies, self.tables.employees
mapper(
Company,
companies,
properties={"employees": relationship(Employee)},
)
mapper(Employee, employees)
mapper(Engineer, inherits=Employee)
sess = create_session()
self.assert_compile(
sess.query(Company, Engineer.name).join(
Engineer, Company.company_id == Engineer.company_id
),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name, "
"employees.name AS employees_name "
"FROM companies JOIN "
"employees ON companies.company_id = employees.company_id",
)
def test_outer_join_prop(self):
Company, Employee, Engineer = (
self.classes.Company,
self.classes.Employee,
self.classes.Engineer,
)
companies, employees = self.tables.companies, self.tables.employees
mapper(
Company,
companies,
properties={"engineers": relationship(Engineer)},
)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
sess = create_session()
self.assert_compile(
sess.query(Company, Engineer.name).outerjoin("engineers"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name, "
"employees.name AS employees_name "
"FROM companies LEFT OUTER JOIN employees ON companies.company_id "
"= employees.company_id AND employees.type IN (:type_1)",
)
def test_outer_join_prop_alias(self):
Company, Employee, Engineer = (
self.classes.Company,
self.classes.Employee,
self.classes.Engineer,
)
companies, employees = self.tables.companies, self.tables.employees
mapper(
Company,
companies,
properties={"engineers": relationship(Engineer)},
)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
eng_alias = aliased(Engineer)
sess = create_session()
self.assert_compile(
sess.query(Company, eng_alias.name).outerjoin(
eng_alias, Company.engineers
),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name, employees_1.name AS "
"employees_1_name FROM companies LEFT OUTER "
"JOIN employees AS employees_1 ON companies.company_id "
"= employees_1.company_id AND employees_1.type IN (:type_1)",
)
def test_outer_join_literal_onclause(self):
Company, Employee, Engineer = (
self.classes.Company,
self.classes.Employee,
self.classes.Engineer,
)
companies, employees = self.tables.companies, self.tables.employees
mapper(
Company,
companies,
properties={"engineers": relationship(Engineer)},
)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
sess = create_session()
self.assert_compile(
sess.query(Company, Engineer).outerjoin(
Engineer, Company.company_id == Engineer.company_id
),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name, "
"employees.employee_id AS employees_employee_id, "
"employees.name AS employees_name, "
"employees.manager_data AS employees_manager_data, "
"employees.engineer_info AS employees_engineer_info, "
"employees.type AS employees_type, "
"employees.company_id AS employees_company_id FROM companies "
"LEFT OUTER JOIN employees ON "
"companies.company_id = employees.company_id "
"AND employees.type IN (:type_1)",
)
def test_outer_join_literal_onclause_alias(self):
Company, Employee, Engineer = (
self.classes.Company,
self.classes.Employee,
self.classes.Engineer,
)
companies, employees = self.tables.companies, self.tables.employees
mapper(
Company,
companies,
properties={"engineers": relationship(Engineer)},
)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
eng_alias = aliased(Engineer)
sess = create_session()
self.assert_compile(
sess.query(Company, eng_alias).outerjoin(
eng_alias, Company.company_id == eng_alias.company_id
),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name, "
"employees_1.employee_id AS employees_1_employee_id, "
"employees_1.name AS employees_1_name, "
"employees_1.manager_data AS employees_1_manager_data, "
"employees_1.engineer_info AS employees_1_engineer_info, "
"employees_1.type AS employees_1_type, "
"employees_1.company_id AS employees_1_company_id "
"FROM companies LEFT OUTER JOIN employees AS employees_1 ON "
"companies.company_id = employees_1.company_id "
"AND employees_1.type IN (:type_1)",
)
def test_outer_join_no_onclause(self):
Company, Employee, Engineer = (
self.classes.Company,
self.classes.Employee,
self.classes.Engineer,
)
companies, employees = self.tables.companies, self.tables.employees
mapper(
Company,
companies,
properties={"engineers": relationship(Engineer)},
)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
sess = create_session()
self.assert_compile(
sess.query(Company, Engineer).outerjoin(Engineer),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name, "
"employees.employee_id AS employees_employee_id, "
"employees.name AS employees_name, "
"employees.manager_data AS employees_manager_data, "
"employees.engineer_info AS employees_engineer_info, "
"employees.type AS employees_type, "
"employees.company_id AS employees_company_id "
"FROM companies LEFT OUTER JOIN employees ON "
"companies.company_id = employees.company_id "
"AND employees.type IN (:type_1)",
)
def test_outer_join_no_onclause_alias(self):
Company, Employee, Engineer = (
self.classes.Company,
self.classes.Employee,
self.classes.Engineer,
)
companies, employees = self.tables.companies, self.tables.employees
mapper(
Company,
companies,
properties={"engineers": relationship(Engineer)},
)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
eng_alias = aliased(Engineer)
sess = create_session()
self.assert_compile(
sess.query(Company, eng_alias).outerjoin(eng_alias),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name, "
"employees_1.employee_id AS employees_1_employee_id, "
"employees_1.name AS employees_1_name, "
"employees_1.manager_data AS employees_1_manager_data, "
"employees_1.engineer_info AS employees_1_engineer_info, "
"employees_1.type AS employees_1_type, "
"employees_1.company_id AS employees_1_company_id "
"FROM companies LEFT OUTER JOIN employees AS employees_1 ON "
"companies.company_id = employees_1.company_id "
"AND employees_1.type IN (:type_1)",
)
def test_correlated_column_select(self):
Company, Employee, Engineer = (
self.classes.Company,
self.classes.Employee,
self.classes.Engineer,
)
companies, employees = self.tables.companies, self.tables.employees
mapper(Company, companies)
mapper(
Employee,
employees,
polymorphic_on=employees.c.type,
properties={"company": relationship(Company)},
)
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
sess = create_session()
engineer_count = (
sess.query(func.count(Engineer.employee_id))
.select_from(Engineer)
.filter(Engineer.company_id == Company.company_id)
.correlate(Company)
.scalar_subquery()
)
self.assert_compile(
sess.query(Company.company_id, engineer_count),
"SELECT companies.company_id AS companies_company_id, "
"(SELECT count(employees.employee_id) AS count_1 "
"FROM employees WHERE employees.company_id = "
"companies.company_id AND employees.type IN (:type_1)) AS anon_1 "
"FROM companies",
)
def test_no_aliasing_from_overlap(self):
# test [ticket:3233]
Company, Employee, Engineer, Manager = (
self.classes.Company,
self.classes.Employee,
self.classes.Engineer,
self.classes.Manager,
)
companies, employees = self.tables.companies, self.tables.employees
mapper(
Company,
companies,
properties={
"employees": relationship(Employee, backref="company")
},
)
mapper(Employee, employees, polymorphic_on=employees.c.type)
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
mapper(Manager, inherits=Employee, polymorphic_identity="manager")
s = create_session()
q1 = (
s.query(Engineer)
.join(Engineer.company)
.join(Manager, Company.employees)
)
q2 = (
s.query(Engineer)
.join(Engineer.company)
.join(Manager, Company.company_id == Manager.company_id)
)
q3 = (
s.query(Engineer)
.join(Engineer.company)
.join(Manager, Company.employees.of_type(Manager))
)
q4 = (
s.query(Engineer)
.join(Company, Company.company_id == Engineer.company_id)
.join(Manager, Company.employees.of_type(Manager))
)
q5 = (
s.query(Engineer)
.join(Company, Company.company_id == Engineer.company_id)
.join(Manager, Company.company_id == Manager.company_id)
)
# note that the query is incorrect SQL; we JOIN to
# employees twice. However, this is what's expected so we seek
# to be consistent; previously, aliasing would sneak in due to the
# nature of the "left" side.
for q in [q1, q2, q3, q4, q5]:
self.assert_compile(
q,
"SELECT employees.employee_id AS employees_employee_id, "
"employees.name AS employees_name, "
"employees.manager_data AS employees_manager_data, "
"employees.engineer_info AS employees_engineer_info, "
"employees.type AS employees_type, "
"employees.company_id AS employees_company_id "
"FROM employees JOIN companies "
"ON companies.company_id = employees.company_id "
"JOIN employees "
"ON companies.company_id = employees.company_id "
"AND employees.type IN (:type_1) "
"WHERE employees.type IN (:type_2)",
)
def test_relationship_to_subclass(self):
(
JuniorEngineer,
Company,
companies,
Manager,
Employee,
employees,
Engineer,
) = (
self.classes.JuniorEngineer,
self.classes.Company,
self.tables.companies,
self.classes.Manager,
self.classes.Employee,
self.tables.employees,
self.classes.Engineer,
)
mapper(
Company,
companies,
properties={"engineers": relationship(Engineer)},
)
mapper(
Employee,
employees,
polymorphic_on=employees.c.type,
properties={"company": relationship(Company)},
)
mapper(Manager, inherits=Employee, polymorphic_identity="manager")
mapper(Engineer, inherits=Employee, polymorphic_identity="engineer")
mapper(
JuniorEngineer,
inherits=Engineer,
polymorphic_identity="juniorengineer",
)
sess = sessionmaker()()
c1 = Company(name="c1")
c2 = Company(name="c2")
m1 = Manager(name="Tom", manager_data="data1", company=c1)
m2 = Manager(name="Tom2", manager_data="data2", company=c2)
e1 = Engineer(
name="Kurt", engineer_info="knows how to hack", company=c2
)
e2 = JuniorEngineer(name="Ed", engineer_info="oh that ed", company=c1)
sess.add_all([c1, c2, m1, m2, e1, e2])
sess.commit()
eq_(c1.engineers, [e2])
eq_(c2.engineers, [e1])
sess.expunge_all()
eq_(
sess.query(Company).order_by(Company.name).all(),
[
Company(name="c1", engineers=[JuniorEngineer(name="Ed")]),
Company(name="c2", engineers=[Engineer(name="Kurt")]),
],
)
# eager load join should limit to only "Engineer"
sess.expunge_all()
eq_(
sess.query(Company)
.options(joinedload("engineers"))
.order_by(Company.name)
.all(),
[
Company(name="c1", engineers=[JuniorEngineer(name="Ed")]),
Company(name="c2", engineers=[Engineer(name="Kurt")]),
],
)
# join() to Company.engineers, Employee as the requested entity
sess.expunge_all()
eq_(
sess.query(Company, Employee)
.join(Company.engineers)
.order_by(Company.name)
.all(),
[
(Company(name="c1"), JuniorEngineer(name="Ed")),
(Company(name="c2"), Engineer(name="Kurt")),
],
)
# join() to Company.engineers, Engineer as the requested entity.
# this actually applies the IN criterion twice which is less than
# ideal.
sess.expunge_all()
eq_(
sess.query(Company, Engineer)
.join(Company.engineers)
.order_by(Company.name)
.all(),
[
(Company(name="c1"), JuniorEngineer(name="Ed")),
(Company(name="c2"), Engineer(name="Kurt")),
],
)
# join() to Company.engineers without any Employee/Engineer entity
sess.expunge_all()
eq_(
sess.query(Company)
.join(Company.engineers)
.filter(Engineer.name.in_(["Tom", "Kurt"]))
.all(),
[Company(name="c2")],
)
# this however fails as it does not limit the subtypes to just
# "Engineer". with joins constructed by filter(), we seem to be
# following a policy where we don't try to make decisions on how to
# join to the target class, whereas when using join() we seem to have
# a lot more capabilities. we might want to document
# "advantages of join() vs. straight filtering", or add a large
# section to "inheritance" laying out all the various behaviors Query
# has.
@testing.fails_on_everything_except()
def go():
sess.expunge_all()
eq_(
sess.query(Company)
.filter(Company.company_id == Engineer.company_id)
.filter(Engineer.name.in_(["Tom", "Kurt"]))
.all(),
[Company(name="c2")],
)
go()
class ManyToManyToSingleTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
@classmethod
def define_tables(cls, metadata):
Table(
"parent",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
)
Table(
"m2m",
metadata,
Column(
"parent_id", Integer, ForeignKey("parent.id"), primary_key=True
),
Column(
"child_id", Integer, ForeignKey("child.id"), primary_key=True
),
)
Table(
"child",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("discriminator", String(20)),
Column("name", String(20)),
)
@classmethod
def setup_classes(cls):
class Parent(cls.Comparable):
pass
class Child(cls.Comparable):
pass
class SubChild1(Child):
pass
class SubChild2(Child):
pass
@classmethod
def setup_mappers(cls):
mapper(
cls.classes.Parent,
cls.tables.parent,
properties={
"s1": relationship(
cls.classes.SubChild1,
secondary=cls.tables.m2m,
uselist=False,
),
"s2": relationship(
cls.classes.SubChild2, secondary=cls.tables.m2m
),
},
)
mapper(
cls.classes.Child,
cls.tables.child,
polymorphic_on=cls.tables.child.c.discriminator,
)
mapper(
cls.classes.SubChild1,
inherits=cls.classes.Child,
polymorphic_identity="sub1",
)
mapper(
cls.classes.SubChild2,
inherits=cls.classes.Child,
polymorphic_identity="sub2",
)
@classmethod
def insert_data(cls):
Parent = cls.classes.Parent
SubChild1 = cls.classes.SubChild1
SubChild2 = cls.classes.SubChild2
s = Session()
s.add_all(
[
Parent(
s1=SubChild1(name="sc1_1"),
s2=[SubChild2(name="sc2_1"), SubChild2(name="sc2_2")],
)
]
)
s.commit()
def test_eager_join(self):
Parent = self.classes.Parent
SubChild1 = self.classes.SubChild1
s = Session()
p1 = s.query(Parent).options(joinedload(Parent.s1)).all()[0]
eq_(p1.__dict__["s1"], SubChild1(name="sc1_1"))
def test_manual_join(self):
Parent = self.classes.Parent
Child = self.classes.Child
SubChild1 = self.classes.SubChild1
s = Session()
p1, c1 = s.query(Parent, Child).outerjoin(Parent.s1).all()[0]
eq_(c1, SubChild1(name="sc1_1"))
def test_assert_join_sql(self):
Parent = self.classes.Parent
Child = self.classes.Child
s = Session()
self.assert_compile(
s.query(Parent, Child).outerjoin(Parent.s1),
"SELECT parent.id AS parent_id, child.id AS child_id, "
"child.discriminator AS child_discriminator, "
"child.name AS child_name "
"FROM parent LEFT OUTER JOIN (m2m AS m2m_1 "
"JOIN child ON child.id = m2m_1.child_id "
"AND child.discriminator IN (:discriminator_1)) "
"ON parent.id = m2m_1.parent_id",
)
def test_assert_joinedload_sql(self):
Parent = self.classes.Parent
s = Session()
self.assert_compile(
s.query(Parent).options(joinedload(Parent.s1)),
"SELECT parent.id AS parent_id, child_1.id AS child_1_id, "
"child_1.discriminator AS child_1_discriminator, "
"child_1.name AS child_1_name "
"FROM parent LEFT OUTER JOIN "
"(m2m AS m2m_1 JOIN child AS child_1 "
"ON child_1.id = m2m_1.child_id AND child_1.discriminator "
"IN (:discriminator_1)) ON parent.id = m2m_1.parent_id",
)
class SingleOnJoinedTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global persons_table, employees_table
persons_table = Table(
"persons",
metadata,
Column(
"person_id",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("name", String(50)),
Column("type", String(20), nullable=False),
)
employees_table = Table(
"employees",
metadata,
Column(
"person_id",
Integer,
ForeignKey("persons.person_id"),
primary_key=True,
),
Column("employee_data", String(50)),
Column("manager_data", String(50)),
)
def test_single_on_joined(self):
class Person(fixtures.ComparableEntity):
pass
class Employee(Person):
pass
class Manager(Employee):
pass
mapper(
Person,
persons_table,
polymorphic_on=persons_table.c.type,
polymorphic_identity="person",
)
mapper(
Employee,
employees_table,
inherits=Person,
polymorphic_identity="engineer",
)
mapper(Manager, inherits=Employee, polymorphic_identity="manager")
sess = create_session()
sess.add(Person(name="p1"))
sess.add(Employee(name="e1", employee_data="ed1"))
sess.add(Manager(name="m1", employee_data="ed2", manager_data="md1"))
sess.flush()
sess.expunge_all()
eq_(
sess.query(Person).order_by(Person.person_id).all(),
[
Person(name="p1"),
Employee(name="e1", employee_data="ed1"),
Manager(name="m1", employee_data="ed2", manager_data="md1"),
],
)
sess.expunge_all()
eq_(
sess.query(Employee).order_by(Person.person_id).all(),
[
Employee(name="e1", employee_data="ed1"),
Manager(name="m1", employee_data="ed2", manager_data="md1"),
],
)
sess.expunge_all()
eq_(
sess.query(Manager).order_by(Person.person_id).all(),
[Manager(name="m1", employee_data="ed2", manager_data="md1")],
)
sess.expunge_all()
def go():
eq_(
sess.query(Person)
.with_polymorphic("*")
.order_by(Person.person_id)
.all(),
[
Person(name="p1"),
Employee(name="e1", employee_data="ed1"),
Manager(
name="m1", employee_data="ed2", manager_data="md1"
),
],
)
self.assert_sql_count(testing.db, go, 1)
class SingleFromPolySelectableTest(
fixtures.DeclarativeMappedTest, AssertsCompiledSQL
):
__dialect__ = "default"
@classmethod
def setup_classes(cls, with_polymorphic=None, include_sub_defaults=False):
Base = cls.DeclarativeBasic
class Employee(Base):
__tablename__ = "employee"
id = Column(Integer, primary_key=True)
name = Column(String(50))
type = Column(String(50))
__mapper_args__ = {
"polymorphic_identity": "employee",
"polymorphic_on": type,
}
class Engineer(Employee):
__tablename__ = "engineer"
id = Column(Integer, ForeignKey("employee.id"), primary_key=True)
engineer_info = Column(String(50))
manager_id = Column(ForeignKey("manager.id"))
__mapper_args__ = {"polymorphic_identity": "engineer"}
class Manager(Employee):
__tablename__ = "manager"
id = Column(Integer, ForeignKey("employee.id"), primary_key=True)
manager_data = Column(String(50))
__mapper_args__ = {"polymorphic_identity": "manager"}
class Boss(Manager):
__mapper_args__ = {"polymorphic_identity": "boss"}
def _with_poly_fixture(self):
employee = self.classes.Employee.__table__
engineer = self.classes.Engineer.__table__
manager = self.classes.Manager.__table__
poly = (
select(
[
employee.c.id,
employee.c.type,
employee.c.name,
manager.c.manager_data,
null().label("engineer_info"),
null().label("manager_id"),
]
)
.select_from(employee.join(manager))
.apply_labels()
.union_all(
select(
[
employee.c.id,
employee.c.type,
employee.c.name,
null().label("manager_data"),
engineer.c.engineer_info,
engineer.c.manager_id,
]
)
.select_from(employee.join(engineer))
.apply_labels()
)
.alias()
)
return poly
def test_wpoly_single_inh_subclass(self):
poly = with_polymorphic(
self.classes.Employee,
[self.classes.Boss, self.classes.Manager, self.classes.Engineer],
self._with_poly_fixture(),
)
s = Session()
q = s.query(poly.Boss)
self.assert_compile(
q,
"SELECT "
"anon_1.employee_id AS anon_1_employee_id, "
"anon_1.employee_name AS anon_1_employee_name, "
"anon_1.employee_type AS anon_1_employee_type, "
"anon_1.manager_manager_data AS anon_1_manager_manager_data "
"FROM "
"(SELECT "
"employee.id AS employee_id, employee.type AS employee_type, "
"employee.name AS employee_name, "
"manager.manager_data AS manager_manager_data, "
"NULL AS engineer_info, NULL AS manager_id FROM employee "
"JOIN manager ON employee.id = manager.id "
"UNION ALL "
"SELECT employee.id AS employee_id, "
"employee.type AS employee_type, "
"employee.name AS employee_name, NULL AS manager_data, "
"engineer.engineer_info AS engineer_engineer_info, "
"engineer.manager_id AS engineer_manager_id "
"FROM employee JOIN engineer ON employee.id = engineer.id) "
"AS anon_1 "
"WHERE anon_1.employee_type IN (:type_1)",
)
def test_query_wpoly_single_inh_subclass(self):
Boss = self.classes.Boss
poly = self._with_poly_fixture()
s = Session()
q = s.query(Boss).with_polymorphic(Boss, poly)
self.assert_compile(
q,
"SELECT anon_1.employee_id AS anon_1_employee_id, "
"anon_1.employee_name AS anon_1_employee_name, "
"anon_1.employee_type AS anon_1_employee_type, "
"anon_1.manager_manager_data AS anon_1_manager_manager_data "
"FROM (SELECT employee.id AS employee_id, employee.type "
"AS employee_type, employee.name AS employee_name, "
"manager.manager_data AS manager_manager_data, "
"NULL AS engineer_info, NULL AS manager_id FROM employee "
"JOIN manager ON employee.id = manager.id "
"UNION ALL SELECT employee.id AS employee_id, "
"employee.type AS employee_type, employee.name AS employee_name, "
"NULL AS manager_data, "
"engineer.engineer_info AS engineer_engineer_info, "
"engineer.manager_id AS engineer_manager_id "
"FROM employee JOIN engineer ON employee.id = engineer.id) "
"AS anon_1 WHERE anon_1.employee_type IN (:type_1)",
)
def test_single_inh_subclass_join_joined_inh_subclass(self):
Boss, Engineer = self.classes("Boss", "Engineer")
s = Session()
q = s.query(Boss).join(Engineer, Engineer.manager_id == Boss.id)
self.assert_compile(
q,
"SELECT manager.id AS manager_id, employee.id AS employee_id, "
"employee.name AS employee_name, "
"employee.type AS employee_type, "
"manager.manager_data AS manager_manager_data "
"FROM employee JOIN manager ON employee.id = manager.id "
"JOIN (employee AS employee_1 JOIN engineer AS engineer_1 "
"ON employee_1.id = engineer_1.id) "
"ON engineer_1.manager_id = manager.id "
"WHERE employee.type IN (:type_1)",
)
def test_single_inh_subclass_join_wpoly_joined_inh_subclass(self):
Boss = self.classes.Boss
poly = with_polymorphic(
self.classes.Employee,
[self.classes.Boss, self.classes.Manager, self.classes.Engineer],
self._with_poly_fixture(),
)
s = Session()
q = s.query(Boss).join(
poly.Engineer, poly.Engineer.manager_id == Boss.id
)
self.assert_compile(
q,
"SELECT manager.id AS manager_id, employee.id AS employee_id, "
"employee.name AS employee_name, employee.type AS employee_type, "
"manager.manager_data AS manager_manager_data "
"FROM employee JOIN manager ON employee.id = manager.id "
"JOIN (SELECT employee.id AS employee_id, "
"employee.type AS employee_type, employee.name AS employee_name, "
"manager.manager_data AS manager_manager_data, "
"NULL AS engineer_info, NULL AS manager_id "
"FROM employee JOIN manager ON employee.id = manager.id "
"UNION ALL "
"SELECT employee.id AS employee_id, "
"employee.type AS employee_type, employee.name AS employee_name, "
"NULL AS manager_data, "
"engineer.engineer_info AS engineer_engineer_info, "
"engineer.manager_id AS engineer_manager_id "
"FROM employee "
"JOIN engineer ON employee.id = engineer.id) AS anon_1 "
"ON anon_1.manager_id = manager.id "
"WHERE employee.type IN (:type_1)",
)
def test_joined_inh_subclass_join_single_inh_subclass(self):
Engineer = self.classes.Engineer
Boss = self.classes.Boss
s = Session()
q = s.query(Engineer).join(Boss, Engineer.manager_id == Boss.id)
self.assert_compile(
q,
"SELECT engineer.id AS engineer_id, employee.id AS employee_id, "
"employee.name AS employee_name, employee.type AS employee_type, "
"engineer.engineer_info AS engineer_engineer_info, "
"engineer.manager_id AS engineer_manager_id "
"FROM employee JOIN engineer ON employee.id = engineer.id "
"JOIN (employee AS employee_1 JOIN manager AS manager_1 "
"ON employee_1.id = manager_1.id) "
"ON engineer.manager_id = manager_1.id "
"AND employee_1.type IN (:type_1)",
)
class EagerDefaultEvalTest(fixtures.DeclarativeMappedTest):
@classmethod
def setup_classes(cls, with_polymorphic=None, include_sub_defaults=False):
Base = cls.DeclarativeBasic
class Foo(Base):
__tablename__ = "foo"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
type = Column(String(50))
created_at = Column(Integer, server_default="5")
__mapper_args__ = {
"polymorphic_on": type,
"polymorphic_identity": "foo",
"eager_defaults": True,
"with_polymorphic": with_polymorphic,
}
class Bar(Foo):
bar = Column(String(50))
if include_sub_defaults:
bat = Column(Integer, server_default="10")
__mapper_args__ = {"polymorphic_identity": "bar"}
def test_persist_foo(self):
Foo = self.classes.Foo
foo = Foo()
session = Session()
session.add(foo)
session.flush()
eq_(foo.__dict__["created_at"], 5)
assert "bat" not in foo.__dict__
session.close()
def test_persist_bar(self):
Bar = self.classes.Bar
bar = Bar()
session = Session()
session.add(bar)
session.flush()
eq_(bar.__dict__["created_at"], 5)
if "bat" in inspect(Bar).attrs:
eq_(bar.__dict__["bat"], 10)
session.close()
class EagerDefaultEvalTestSubDefaults(EagerDefaultEvalTest):
@classmethod
def setup_classes(cls):
super(EagerDefaultEvalTestSubDefaults, cls).setup_classes(
include_sub_defaults=True
)
class EagerDefaultEvalTestPolymorphic(EagerDefaultEvalTest):
@classmethod
def setup_classes(cls):
super(EagerDefaultEvalTestPolymorphic, cls).setup_classes(
with_polymorphic="*"
)
|
|
"""
pygments.lexers._tsql_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
These are manually translated lists from https://msdn.microsoft.com.
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# See https://msdn.microsoft.com/en-us/library/ms174986.aspx.
OPERATORS = (
'!<',
'!=',
'!>',
'<',
'<=',
'<>',
'=',
'>',
'>=',
'+',
'+=',
'-',
'-=',
'*',
'*=',
'/',
'/=',
'%',
'%=',
'&',
'&=',
'|',
'|=',
'^',
'^=',
'~',
'::',
)
OPERATOR_WORDS = (
'all',
'and',
'any',
'between',
'except',
'exists',
'in',
'intersect',
'like',
'not',
'or',
'some',
'union',
)
_KEYWORDS_SERVER = (
'add',
'all',
'alter',
'and',
'any',
'as',
'asc',
'authorization',
'backup',
'begin',
'between',
'break',
'browse',
'bulk',
'by',
'cascade',
'case',
'catch',
'check',
'checkpoint',
'close',
'clustered',
'coalesce',
'collate',
'column',
'commit',
'compute',
'constraint',
'contains',
'containstable',
'continue',
'convert',
'create',
'cross',
'current',
'current_date',
'current_time',
'current_timestamp',
'current_user',
'cursor',
'database',
'dbcc',
'deallocate',
'declare',
'default',
'delete',
'deny',
'desc',
'disk',
'distinct',
'distributed',
'double',
'drop',
'dump',
'else',
'end',
'errlvl',
'escape',
'except',
'exec',
'execute',
'exists',
'exit',
'external',
'fetch',
'file',
'fillfactor',
'for',
'foreign',
'freetext',
'freetexttable',
'from',
'full',
'function',
'goto',
'grant',
'group',
'having',
'holdlock',
'identity',
'identity_insert',
'identitycol',
'if',
'in',
'index',
'inner',
'insert',
'intersect',
'into',
'is',
'join',
'key',
'kill',
'left',
'like',
'lineno',
'load',
'merge',
'national',
'nocheck',
'nonclustered',
'not',
'null',
'nullif',
'of',
'off',
'offsets',
'on',
'open',
'opendatasource',
'openquery',
'openrowset',
'openxml',
'option',
'or',
'order',
'outer',
'over',
'percent',
'pivot',
'plan',
'precision',
'primary',
'print',
'proc',
'procedure',
'public',
'raiserror',
'read',
'readtext',
'reconfigure',
'references',
'replication',
'restore',
'restrict',
'return',
'revert',
'revoke',
'right',
'rollback',
'rowcount',
'rowguidcol',
'rule',
'save',
'schema',
'securityaudit',
'select',
'semantickeyphrasetable',
'semanticsimilaritydetailstable',
'semanticsimilaritytable',
'session_user',
'set',
'setuser',
'shutdown',
'some',
'statistics',
'system_user',
'table',
'tablesample',
'textsize',
'then',
'throw',
'to',
'top',
'tran',
'transaction',
'trigger',
'truncate',
'try',
'try_convert',
'tsequal',
'union',
'unique',
'unpivot',
'update',
'updatetext',
'use',
'user',
'values',
'varying',
'view',
'waitfor',
'when',
'where',
'while',
'with',
'within',
'writetext',
)
_KEYWORDS_FUTURE = (
'absolute',
'action',
'admin',
'after',
'aggregate',
'alias',
'allocate',
'are',
'array',
'asensitive',
'assertion',
'asymmetric',
'at',
'atomic',
'before',
'binary',
'bit',
'blob',
'boolean',
'both',
'breadth',
'call',
'called',
'cardinality',
'cascaded',
'cast',
'catalog',
'char',
'character',
'class',
'clob',
'collation',
'collect',
'completion',
'condition',
'connect',
'connection',
'constraints',
'constructor',
'corr',
'corresponding',
'covar_pop',
'covar_samp',
'cube',
'cume_dist',
'current_catalog',
'current_default_transform_group',
'current_path',
'current_role',
'current_schema',
'current_transform_group_for_type',
'cycle',
'data',
'date',
'day',
'dec',
'decimal',
'deferrable',
'deferred',
'depth',
'deref',
'describe',
'descriptor',
'destroy',
'destructor',
'deterministic',
'diagnostics',
'dictionary',
'disconnect',
'domain',
'dynamic',
'each',
'element',
'end-exec',
'equals',
'every',
'exception',
'false',
'filter',
'first',
'float',
'found',
'free',
'fulltexttable',
'fusion',
'general',
'get',
'global',
'go',
'grouping',
'hold',
'host',
'hour',
'ignore',
'immediate',
'indicator',
'initialize',
'initially',
'inout',
'input',
'int',
'integer',
'intersection',
'interval',
'isolation',
'iterate',
'language',
'large',
'last',
'lateral',
'leading',
'less',
'level',
'like_regex',
'limit',
'ln',
'local',
'localtime',
'localtimestamp',
'locator',
'map',
'match',
'member',
'method',
'minute',
'mod',
'modifies',
'modify',
'module',
'month',
'multiset',
'names',
'natural',
'nchar',
'nclob',
'new',
'next',
'no',
'none',
'normalize',
'numeric',
'object',
'occurrences_regex',
'old',
'only',
'operation',
'ordinality',
'out',
'output',
'overlay',
'pad',
'parameter',
'parameters',
'partial',
'partition',
'path',
'percent_rank',
'percentile_cont',
'percentile_disc',
'position_regex',
'postfix',
'prefix',
'preorder',
'prepare',
'preserve',
'prior',
'privileges',
'range',
'reads',
'real',
'recursive',
'ref',
'referencing',
'regr_avgx',
'regr_avgy',
'regr_count',
'regr_intercept',
'regr_r2',
'regr_slope',
'regr_sxx',
'regr_sxy',
'regr_syy',
'relative',
'release',
'result',
'returns',
'role',
'rollup',
'routine',
'row',
'rows',
'savepoint',
'scope',
'scroll',
'search',
'second',
'section',
'sensitive',
'sequence',
'session',
'sets',
'similar',
'size',
'smallint',
'space',
'specific',
'specifictype',
'sql',
'sqlexception',
'sqlstate',
'sqlwarning',
'start',
'state',
'statement',
'static',
'stddev_pop',
'stddev_samp',
'structure',
'submultiset',
'substring_regex',
'symmetric',
'system',
'temporary',
'terminate',
'than',
'time',
'timestamp',
'timezone_hour',
'timezone_minute',
'trailing',
'translate_regex',
'translation',
'treat',
'true',
'uescape',
'under',
'unknown',
'unnest',
'usage',
'using',
'value',
'var_pop',
'var_samp',
'varchar',
'variable',
'whenever',
'width_bucket',
'window',
'within',
'without',
'work',
'write',
'xmlagg',
'xmlattributes',
'xmlbinary',
'xmlcast',
'xmlcomment',
'xmlconcat',
'xmldocument',
'xmlelement',
'xmlexists',
'xmlforest',
'xmliterate',
'xmlnamespaces',
'xmlparse',
'xmlpi',
'xmlquery',
'xmlserialize',
'xmltable',
'xmltext',
'xmlvalidate',
'year',
'zone',
)
_KEYWORDS_ODBC = (
'absolute',
'action',
'ada',
'add',
'all',
'allocate',
'alter',
'and',
'any',
'are',
'as',
'asc',
'assertion',
'at',
'authorization',
'avg',
'begin',
'between',
'bit',
'bit_length',
'both',
'by',
'cascade',
'cascaded',
'case',
'cast',
'catalog',
'char',
'char_length',
'character',
'character_length',
'check',
'close',
'coalesce',
'collate',
'collation',
'column',
'commit',
'connect',
'connection',
'constraint',
'constraints',
'continue',
'convert',
'corresponding',
'count',
'create',
'cross',
'current',
'current_date',
'current_time',
'current_timestamp',
'current_user',
'cursor',
'date',
'day',
'deallocate',
'dec',
'decimal',
'declare',
'default',
'deferrable',
'deferred',
'delete',
'desc',
'describe',
'descriptor',
'diagnostics',
'disconnect',
'distinct',
'domain',
'double',
'drop',
'else',
'end',
'end-exec',
'escape',
'except',
'exception',
'exec',
'execute',
'exists',
'external',
'extract',
'false',
'fetch',
'first',
'float',
'for',
'foreign',
'fortran',
'found',
'from',
'full',
'get',
'global',
'go',
'goto',
'grant',
'group',
'having',
'hour',
'identity',
'immediate',
'in',
'include',
'index',
'indicator',
'initially',
'inner',
'input',
'insensitive',
'insert',
'int',
'integer',
'intersect',
'interval',
'into',
'is',
'isolation',
'join',
'key',
'language',
'last',
'leading',
'left',
'level',
'like',
'local',
'lower',
'match',
'max',
'min',
'minute',
'module',
'month',
'names',
'national',
'natural',
'nchar',
'next',
'no',
'none',
'not',
'null',
'nullif',
'numeric',
'octet_length',
'of',
'on',
'only',
'open',
'option',
'or',
'order',
'outer',
'output',
'overlaps',
'pad',
'partial',
'pascal',
'position',
'precision',
'prepare',
'preserve',
'primary',
'prior',
'privileges',
'procedure',
'public',
'read',
'real',
'references',
'relative',
'restrict',
'revoke',
'right',
'rollback',
'rows',
'schema',
'scroll',
'second',
'section',
'select',
'session',
'session_user',
'set',
'size',
'smallint',
'some',
'space',
'sql',
'sqlca',
'sqlcode',
'sqlerror',
'sqlstate',
'sqlwarning',
'substring',
'sum',
'system_user',
'table',
'temporary',
'then',
'time',
'timestamp',
'timezone_hour',
'timezone_minute',
'to',
'trailing',
'transaction',
'translate',
'translation',
'trim',
'true',
'union',
'unique',
'unknown',
'update',
'upper',
'usage',
'user',
'using',
'value',
'values',
'varchar',
'varying',
'view',
'when',
'whenever',
'where',
'with',
'work',
'write',
'year',
'zone',
)
# See https://msdn.microsoft.com/en-us/library/ms189822.aspx.
KEYWORDS = sorted(set(_KEYWORDS_FUTURE + _KEYWORDS_ODBC + _KEYWORDS_SERVER))
# See https://msdn.microsoft.com/en-us/library/ms187752.aspx.
TYPES = (
'bigint',
'binary',
'bit',
'char',
'cursor',
'date',
'datetime',
'datetime2',
'datetimeoffset',
'decimal',
'float',
'hierarchyid',
'image',
'int',
'money',
'nchar',
'ntext',
'numeric',
'nvarchar',
'real',
'smalldatetime',
'smallint',
'smallmoney',
'sql_variant',
'table',
'text',
'time',
'timestamp',
'tinyint',
'uniqueidentifier',
'varbinary',
'varchar',
'xml',
)
# See https://msdn.microsoft.com/en-us/library/ms174318.aspx.
FUNCTIONS = (
'$partition',
'abs',
'acos',
'app_name',
'applock_mode',
'applock_test',
'ascii',
'asin',
'assemblyproperty',
'atan',
'atn2',
'avg',
'binary_checksum',
'cast',
'ceiling',
'certencoded',
'certprivatekey',
'char',
'charindex',
'checksum',
'checksum_agg',
'choose',
'col_length',
'col_name',
'columnproperty',
'compress',
'concat',
'connectionproperty',
'context_info',
'convert',
'cos',
'cot',
'count',
'count_big',
'current_request_id',
'current_timestamp',
'current_transaction_id',
'current_user',
'cursor_status',
'database_principal_id',
'databasepropertyex',
'dateadd',
'datediff',
'datediff_big',
'datefromparts',
'datename',
'datepart',
'datetime2fromparts',
'datetimefromparts',
'datetimeoffsetfromparts',
'day',
'db_id',
'db_name',
'decompress',
'degrees',
'dense_rank',
'difference',
'eomonth',
'error_line',
'error_message',
'error_number',
'error_procedure',
'error_severity',
'error_state',
'exp',
'file_id',
'file_idex',
'file_name',
'filegroup_id',
'filegroup_name',
'filegroupproperty',
'fileproperty',
'floor',
'format',
'formatmessage',
'fulltextcatalogproperty',
'fulltextserviceproperty',
'get_filestream_transaction_context',
'getansinull',
'getdate',
'getutcdate',
'grouping',
'grouping_id',
'has_perms_by_name',
'host_id',
'host_name',
'iif',
'index_col',
'indexkey_property',
'indexproperty',
'is_member',
'is_rolemember',
'is_srvrolemember',
'isdate',
'isjson',
'isnull',
'isnumeric',
'json_modify',
'json_query',
'json_value',
'left',
'len',
'log',
'log10',
'lower',
'ltrim',
'max',
'min',
'min_active_rowversion',
'month',
'nchar',
'newid',
'newsequentialid',
'ntile',
'object_definition',
'object_id',
'object_name',
'object_schema_name',
'objectproperty',
'objectpropertyex',
'opendatasource',
'openjson',
'openquery',
'openrowset',
'openxml',
'original_db_name',
'original_login',
'parse',
'parsename',
'patindex',
'permissions',
'pi',
'power',
'pwdcompare',
'pwdencrypt',
'quotename',
'radians',
'rand',
'rank',
'replace',
'replicate',
'reverse',
'right',
'round',
'row_number',
'rowcount_big',
'rtrim',
'schema_id',
'schema_name',
'scope_identity',
'serverproperty',
'session_context',
'session_user',
'sign',
'sin',
'smalldatetimefromparts',
'soundex',
'sp_helplanguage',
'space',
'sqrt',
'square',
'stats_date',
'stdev',
'stdevp',
'str',
'string_escape',
'string_split',
'stuff',
'substring',
'sum',
'suser_id',
'suser_name',
'suser_sid',
'suser_sname',
'switchoffset',
'sysdatetime',
'sysdatetimeoffset',
'system_user',
'sysutcdatetime',
'tan',
'textptr',
'textvalid',
'timefromparts',
'todatetimeoffset',
'try_cast',
'try_convert',
'try_parse',
'type_id',
'type_name',
'typeproperty',
'unicode',
'upper',
'user_id',
'user_name',
'var',
'varp',
'xact_state',
'year',
)
|
|
# Copyright (c) 2012 NetApp, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the NFS driver module."""
import errno
import os
import mock
import mox as mox_lib
from mox import IgnoreArg
from mox import IsA
from mox import stubout
from oslo.config import cfg
from cinder import context
from cinder import exception
from cinder.image import image_utils
from cinder.openstack.common import units
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers import nfs
from cinder.volume.drivers import remotefs
class DumbVolume(object):
fields = {}
def __setitem__(self, key, value):
self.fields[key] = value
def __getitem__(self, item):
return self.fields[item]
class RemoteFsDriverTestCase(test.TestCase):
TEST_FILE_NAME = 'test.txt'
TEST_EXPORT = 'nas-host1:/export'
TEST_MNT_POINT = '/mnt/nas'
def setUp(self):
super(RemoteFsDriverTestCase, self).setUp()
self._driver = remotefs.RemoteFSDriver()
self._mox = mox_lib.Mox()
self.configuration = mox_lib.MockObject(conf.Configuration)
self.configuration.append_config_values(mox_lib.IgnoreArg())
self.configuration.nas_secure_file_permissions = 'false'
self.configuration.nas_secure_file_operations = 'false'
self._driver = remotefs.RemoteFSDriver(
configuration=self.configuration)
self.addCleanup(self._mox.UnsetStubs)
def test_create_sparsed_file(self):
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('truncate', '-s', '1G', '/path', run_as_root=True).\
AndReturn("")
mox.ReplayAll()
drv._create_sparsed_file('/path', 1)
mox.VerifyAll()
def test_create_regular_file(self):
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('dd', 'if=/dev/zero', 'of=/path', 'bs=1M', 'count=1024',
run_as_root=True)
mox.ReplayAll()
drv._create_regular_file('/path', 1)
mox.VerifyAll()
def test_create_qcow2_file(self):
(mox, drv) = self._mox, self._driver
file_size = 1
mox.StubOutWithMock(drv, '_execute')
drv._execute('qemu-img', 'create', '-f', 'qcow2',
'-o', 'preallocation=metadata', '/path',
'%s' % str(file_size * units.Gi), run_as_root=True)
mox.ReplayAll()
drv._create_qcow2_file('/path', file_size)
mox.VerifyAll()
def test_set_rw_permissions_for_all(self):
(mox, drv) = self._mox, self._driver
mox.StubOutWithMock(drv, '_execute')
drv._execute('chmod', 'ugo+rw', '/path', run_as_root=True)
mox.ReplayAll()
drv._set_rw_permissions_for_all('/path')
mox.VerifyAll()
@mock.patch.object(remotefs, 'LOG')
def test_set_rw_permissions_with_secure_file_permissions(self, LOG):
drv = self._driver
drv._mounted_shares = [self.TEST_EXPORT]
drv.configuration.nas_secure_file_permissions = 'true'
self.stubs.Set(drv, '_execute', mock.Mock())
drv._set_rw_permissions(self.TEST_FILE_NAME)
self.assertFalse(LOG.warn.called)
@mock.patch.object(remotefs, 'LOG')
def test_set_rw_permissions_without_secure_file_permissions(self, LOG):
drv = self._driver
self.configuration.nas_secure_file_permissions = 'false'
self.stubs.Set(drv, '_execute', mock.Mock())
drv._set_rw_permissions(self.TEST_FILE_NAME)
self.assertTrue(LOG.warn.called)
warn_msg = "%s is being set with open permissions: ugo+rw" % \
self.TEST_FILE_NAME
LOG.warn.assert_called_once_with(warn_msg)
@mock.patch('os.path.join')
@mock.patch('os.path.isfile', return_value=False)
def test_determine_nas_security_options_when_auto_and_new_install(
self,
mock_isfile,
mock_join):
"""Test the setting of the NAS Security Option
In this test case, we will create the marker file. No pre-exxisting
Cinder volumes found during bootup.
"""
drv = self._driver
drv._mounted_shares = [self.TEST_EXPORT]
file_path = '%s/.cinderSecureEnvIndicator' % self.TEST_MNT_POINT
is_new_install = True
drv._ensure_shares_mounted = mock.Mock()
nas_mount = drv._get_mount_point_for_share = mock.Mock(
return_value=self.TEST_MNT_POINT)
mock_join.return_value = file_path
secure_file_permissions = 'auto'
nas_option = drv._determine_nas_security_option_setting(
secure_file_permissions,
nas_mount, is_new_install)
self.assertEqual(nas_option, 'true')
secure_file_operations = 'auto'
nas_option = drv._determine_nas_security_option_setting(
secure_file_operations,
nas_mount, is_new_install)
self.assertEqual(nas_option, 'true')
@mock.patch('os.path.join')
@mock.patch('os.path.isfile')
def test_determine_nas_security_options_when_auto_and_new_install_exists(
self,
isfile,
join):
"""Test the setting of the NAS Security Option
In this test case, the marker file already exists. Cinder volumes
found during bootup.
"""
drv = self._driver
drv._mounted_shares = [self.TEST_EXPORT]
file_path = '%s/.cinderSecureEnvIndicator' % self.TEST_MNT_POINT
is_new_install = False
drv._ensure_shares_mounted = mock.Mock()
nas_mount = drv._get_mount_point_for_share = mock.Mock(
return_value=self.TEST_MNT_POINT)
join.return_value = file_path
isfile.return_value = True
secure_file_permissions = 'auto'
nas_option = drv._determine_nas_security_option_setting(
secure_file_permissions,
nas_mount, is_new_install)
self.assertEqual(nas_option, 'true')
secure_file_operations = 'auto'
nas_option = drv._determine_nas_security_option_setting(
secure_file_operations,
nas_mount, is_new_install)
self.assertEqual(nas_option, 'true')
@mock.patch('os.path.join')
@mock.patch('os.path.isfile')
def test_determine_nas_security_options_when_auto_and_old_install(self,
isfile,
join):
"""Test the setting of the NAS Security Option
In this test case, the marker file does not exist. There are also
pre-existing Cinder volumes.
"""
drv = self._driver
drv._mounted_shares = [self.TEST_EXPORT]
file_path = '%s/.cinderSecureEnvIndicator' % self.TEST_MNT_POINT
is_new_install = False
drv._ensure_shares_mounted = mock.Mock()
nas_mount = drv._get_mount_point_for_share = mock.Mock(
return_value=self.TEST_MNT_POINT)
join.return_value = file_path
isfile.return_value = False
secure_file_permissions = 'auto'
nas_option = drv._determine_nas_security_option_setting(
secure_file_permissions,
nas_mount, is_new_install)
self.assertEqual(nas_option, 'false')
secure_file_operations = 'auto'
nas_option = drv._determine_nas_security_option_setting(
secure_file_operations,
nas_mount, is_new_install)
self.assertEqual(nas_option, 'false')
def test_determine_nas_security_options_when_admin_set_true(self):
"""Test the setting of the NAS Security Option
In this test case, the Admin set the flag to 'true'.
"""
drv = self._driver
drv._mounted_shares = [self.TEST_EXPORT]
is_new_install = False
drv._ensure_shares_mounted = mock.Mock()
nas_mount = drv._get_mount_point_for_share = mock.Mock(
return_value=self.TEST_MNT_POINT)
secure_file_permissions = 'true'
nas_option = drv._determine_nas_security_option_setting(
secure_file_permissions,
nas_mount, is_new_install)
self.assertEqual(nas_option, 'true')
secure_file_operations = 'true'
nas_option = drv._determine_nas_security_option_setting(
secure_file_operations,
nas_mount, is_new_install)
self.assertEqual(nas_option, 'true')
def test_determine_nas_security_options_when_admin_set_false(self):
"""Test the setting of the NAS Security Option
In this test case, the Admin set the flag to 'true'.
"""
drv = self._driver
drv._mounted_shares = [self.TEST_EXPORT]
is_new_install = False
drv._ensure_shares_mounted = mock.Mock()
nas_mount = drv._get_mount_point_for_share = mock.Mock(
return_value=self.TEST_MNT_POINT)
secure_file_permissions = 'false'
nas_option = drv._determine_nas_security_option_setting(
secure_file_permissions,
nas_mount, is_new_install)
self.assertEqual(nas_option, 'false')
secure_file_operations = 'false'
nas_option = drv._determine_nas_security_option_setting(
secure_file_operations,
nas_mount, is_new_install)
self.assertEqual(nas_option, 'false')
@mock.patch.object(remotefs, 'LOG')
def test_set_nas_security_options(self, LOG):
"""Test setting of NAS Security options.
The RemoteFS driver will force set options to false. The derived
objects will provide an inherited interface to properly set options.
"""
drv = self._driver
is_new_install = False
drv.set_nas_security_options(is_new_install)
self.assertEqual(drv.configuration.nas_secure_file_operations, 'false')
self.assertEqual(drv.configuration.nas_secure_file_permissions,
'false')
self.assertTrue(LOG.warn.called)
def test_secure_file_operations_enabled_true(self):
"""Test nas_secure_file_operations = 'true'
Networked file system based drivers may support secure file
operations. This test verifies the settings when secure.
"""
drv = self._driver
self.configuration.nas_secure_file_operations = 'true'
ret_flag = drv.secure_file_operations_enabled()
self.assertTrue(ret_flag)
def test_secure_file_operations_enabled_false(self):
"""Test nas_secure_file_operations = 'false'
Networked file system based drivers may support secure file
operations. This test verifies the settings when not secure.
"""
drv = self._driver
self.configuration.nas_secure_file_operations = 'false'
ret_flag = drv.secure_file_operations_enabled()
self.assertFalse(ret_flag)
class NfsDriverTestCase(test.TestCase):
"""Test case for NFS driver."""
TEST_NFS_EXPORT1 = 'nfs-host1:/export'
TEST_NFS_EXPORT2 = 'nfs-host2:/export'
TEST_NFS_EXPORT2_OPTIONS = '-o intr'
TEST_SIZE_IN_GB = 1
TEST_MNT_POINT = '/mnt/nfs'
TEST_MNT_POINT_BASE = '/mnt/test'
TEST_LOCAL_PATH = '/mnt/nfs/volume-123'
TEST_FILE_NAME = 'test.txt'
TEST_SHARES_CONFIG_FILE = '/etc/cinder/test-shares.conf'
TEST_NFS_EXPORT_SPACES = 'nfs-host3:/export this'
TEST_MNT_POINT_SPACES = '/ 0 0 0 /foo'
def setUp(self):
super(NfsDriverTestCase, self).setUp()
self._mox = mox_lib.Mox()
self.stubs = stubout.StubOutForTesting()
self.configuration = mox_lib.MockObject(conf.Configuration)
self.configuration.append_config_values(mox_lib.IgnoreArg())
self.configuration.nfs_shares_config = None
self.configuration.nfs_sparsed_volumes = True
self.configuration.nfs_used_ratio = 0.95
self.configuration.nfs_oversub_ratio = 1.0
self.configuration.nfs_mount_point_base = self.TEST_MNT_POINT_BASE
self.configuration.nfs_mount_options = None
self.configuration.nas_secure_file_permissions = 'false'
self.configuration.nas_secure_file_operations = 'false'
self.configuration.volume_dd_blocksize = '1M'
self._driver = nfs.NfsDriver(configuration=self.configuration)
self._driver.shares = {}
self.addCleanup(self.stubs.UnsetAll)
self.addCleanup(self._mox.UnsetStubs)
def stub_out_not_replaying(self, obj, attr_name):
attr_to_replace = getattr(obj, attr_name)
stub = mox_lib.MockObject(attr_to_replace)
self.stubs.Set(obj, attr_name, stub)
def test_local_path(self):
"""local_path common use case."""
self.configuration.nfs_mount_point_base = self.TEST_MNT_POINT_BASE
drv = self._driver
volume = DumbVolume()
volume['provider_location'] = self.TEST_NFS_EXPORT1
volume['name'] = 'volume-123'
self.assertEqual(
'/mnt/test/2f4f60214cf43c595666dd815f0360a4/volume-123',
drv.local_path(volume))
def test_copy_image_to_volume(self):
"""resize_image common case usage."""
mox = self._mox
drv = self._driver
TEST_IMG_SOURCE = 'foo.img'
volume = {'size': self.TEST_SIZE_IN_GB, 'name': TEST_IMG_SOURCE}
def fake_local_path(volume):
return volume['name']
self.stubs.Set(drv, 'local_path', fake_local_path)
mox.StubOutWithMock(image_utils, 'fetch_to_raw')
image_utils.fetch_to_raw(None, None, None, TEST_IMG_SOURCE,
mox_lib.IgnoreArg(),
size=self.TEST_SIZE_IN_GB,
run_as_root=True)
mox.StubOutWithMock(image_utils, 'resize_image')
image_utils.resize_image(TEST_IMG_SOURCE, self.TEST_SIZE_IN_GB,
run_as_root=True)
mox.StubOutWithMock(image_utils, 'qemu_img_info')
data = mox_lib.MockAnything()
data.virtual_size = 1 * units.Gi
image_utils.qemu_img_info(TEST_IMG_SOURCE,
run_as_root=True).AndReturn(data)
mox.ReplayAll()
drv.copy_image_to_volume(None, volume, None, None)
mox.VerifyAll()
def test_get_mount_point_for_share(self):
"""_get_mount_point_for_share should calculate correct value."""
drv = self._driver
self.configuration.nfs_mount_point_base = self.TEST_MNT_POINT_BASE
self.assertEqual('/mnt/test/2f4f60214cf43c595666dd815f0360a4',
drv._get_mount_point_for_share(self.TEST_NFS_EXPORT1))
def test_get_capacity_info(self):
"""_get_capacity_info should calculate correct value."""
mox = self._mox
drv = self._driver
stat_total_size = 2620544
stat_avail = 2129984
stat_output = '1 %d %d' % (stat_total_size, stat_avail)
du_used = 490560
du_output = '%d /mnt' % du_used
mox.StubOutWithMock(drv, '_get_mount_point_for_share')
drv._get_mount_point_for_share(self.TEST_NFS_EXPORT1).\
AndReturn(self.TEST_MNT_POINT)
mox.StubOutWithMock(drv, '_execute')
drv._execute('stat', '-f', '-c', '%S %b %a',
self.TEST_MNT_POINT,
run_as_root=True).AndReturn((stat_output, None))
drv._execute('du', '-sb', '--apparent-size',
'--exclude', '*snapshot*',
self.TEST_MNT_POINT,
run_as_root=True).AndReturn((du_output, None))
mox.ReplayAll()
self.assertEqual((stat_total_size, stat_avail, du_used),
drv._get_capacity_info(self.TEST_NFS_EXPORT1))
mox.VerifyAll()
def test_get_capacity_info_for_share_and_mount_point_with_spaces(self):
"""_get_capacity_info should calculate correct value."""
mox = self._mox
drv = self._driver
stat_total_size = 2620544
stat_avail = 2129984
stat_output = '1 %d %d' % (stat_total_size, stat_avail)
du_used = 490560
du_output = '%d /mnt' % du_used
mox.StubOutWithMock(drv, '_get_mount_point_for_share')
drv._get_mount_point_for_share(self.TEST_NFS_EXPORT_SPACES).\
AndReturn(self.TEST_MNT_POINT_SPACES)
mox.StubOutWithMock(drv, '_execute')
drv._execute('stat', '-f', '-c', '%S %b %a',
self.TEST_MNT_POINT_SPACES,
run_as_root=True).AndReturn((stat_output, None))
drv._execute('du', '-sb', '--apparent-size',
'--exclude', '*snapshot*',
self.TEST_MNT_POINT_SPACES,
run_as_root=True).AndReturn((du_output, None))
mox.ReplayAll()
self.assertEqual((stat_total_size, stat_avail, du_used),
drv._get_capacity_info(self.TEST_NFS_EXPORT_SPACES))
mox.VerifyAll()
def test_load_shares_config(self):
mox = self._mox
drv = self._driver
drv.configuration.nfs_shares_config = self.TEST_SHARES_CONFIG_FILE
mox.StubOutWithMock(drv, '_read_config_file')
config_data = []
config_data.append(self.TEST_NFS_EXPORT1)
config_data.append('#' + self.TEST_NFS_EXPORT2)
config_data.append('')
config_data.append(self.TEST_NFS_EXPORT2 + ' ' +
self.TEST_NFS_EXPORT2_OPTIONS)
config_data.append('broken:share_format')
drv._read_config_file(self.TEST_SHARES_CONFIG_FILE).\
AndReturn(config_data)
mox.ReplayAll()
drv._load_shares_config(drv.configuration.nfs_shares_config)
self.assertIn(self.TEST_NFS_EXPORT1, drv.shares)
self.assertIn(self.TEST_NFS_EXPORT2, drv.shares)
self.assertEqual(len(drv.shares), 2)
self.assertEqual(drv.shares[self.TEST_NFS_EXPORT2],
self.TEST_NFS_EXPORT2_OPTIONS)
mox.VerifyAll()
def test_ensure_shares_mounted_should_save_mounting_successfully(self):
"""_ensure_shares_mounted should save share if mounted with success."""
mox = self._mox
drv = self._driver
mox.StubOutWithMock(drv, '_read_config_file')
config_data = []
config_data.append(self.TEST_NFS_EXPORT1)
drv._read_config_file(self.TEST_SHARES_CONFIG_FILE).\
AndReturn(config_data)
mox.StubOutWithMock(drv, '_ensure_share_mounted')
drv.configuration.nfs_shares_config = self.TEST_SHARES_CONFIG_FILE
drv._ensure_share_mounted(self.TEST_NFS_EXPORT1)
mox.ReplayAll()
drv._ensure_shares_mounted()
self.assertEqual(1, len(drv._mounted_shares))
self.assertEqual(self.TEST_NFS_EXPORT1, drv._mounted_shares[0])
mox.VerifyAll()
def test_ensure_shares_mounted_should_not_save_mounting_with_error(self):
"""_ensure_shares_mounted should not save share if failed to mount."""
config_data = []
config_data.append(self.TEST_NFS_EXPORT1)
self._driver.configuration.nfs_shares_config =\
self.TEST_SHARES_CONFIG_FILE
self.mock_object(self._driver, '_read_config_file',
mock.Mock(return_value=config_data))
self.mock_object(self._driver, '_ensure_share_mounted',
mock.Mock(side_effect=Exception()))
self.mock_object(remotefs, 'LOG')
self._driver._ensure_shares_mounted()
self.assertEqual(0, len(self._driver._mounted_shares))
self._driver._read_config_file.assert_called_once_with(
self.TEST_SHARES_CONFIG_FILE)
self._driver._ensure_share_mounted.assert_called_once_with(
self.TEST_NFS_EXPORT1)
self.assertEqual(1, remotefs.LOG.error.call_count)
def test_setup_should_throw_error_if_shares_config_not_configured(self):
"""do_setup should throw error if shares config is not configured."""
drv = self._driver
self.configuration.nfs_shares_config = self.TEST_SHARES_CONFIG_FILE
self.assertRaises(exception.NfsException,
drv.do_setup, IsA(context.RequestContext))
def test_setup_should_throw_error_if_oversub_ratio_less_than_zero(self):
"""do_setup should throw error if nfs_oversub_ratio is less than 0."""
drv = self._driver
self.configuration.nfs_oversub_ratio = -1
self.assertRaises(exception.NfsException,
drv.do_setup,
IsA(context.RequestContext))
def test_setup_should_throw_error_if_used_ratio_less_than_zero(self):
"""do_setup should throw error if nfs_used_ratio is less than 0."""
drv = self._driver
self.configuration.nfs_used_ratio = -1
self.assertRaises(exception.NfsException,
drv.do_setup,
IsA(context.RequestContext))
def test_setup_should_throw_error_if_used_ratio_greater_than_one(self):
"""do_setup should throw error if nfs_used_ratio is greater than 1."""
drv = self._driver
self.configuration.nfs_used_ratio = 2
self.assertRaises(exception.NfsException,
drv.do_setup,
IsA(context.RequestContext))
def test_setup_should_throw_exception_if_nfs_client_is_not_installed(self):
"""do_setup should throw error if nfs client is not installed."""
mox = self._mox
drv = self._driver
self.configuration.nfs_shares_config = self.TEST_SHARES_CONFIG_FILE
mox.StubOutWithMock(os.path, 'exists')
os.path.exists(self.TEST_SHARES_CONFIG_FILE).AndReturn(True)
mox.StubOutWithMock(drv, '_execute')
drv._execute('mount.nfs', check_exit_code=False, run_as_root=False).\
AndRaise(OSError(errno.ENOENT, 'No such file or directory'))
mox.ReplayAll()
self.assertRaises(exception.NfsException,
drv.do_setup, IsA(context.RequestContext))
mox.VerifyAll()
def test_find_share_should_throw_error_if_there_is_no_mounted_shares(self):
"""_find_share should throw error if there is no mounted shares."""
drv = self._driver
drv._mounted_shares = []
self.assertRaises(exception.NfsNoSharesMounted, drv._find_share,
self.TEST_SIZE_IN_GB)
def test_find_share(self):
"""_find_share simple use case."""
mox = self._mox
drv = self._driver
drv._mounted_shares = [self.TEST_NFS_EXPORT1, self.TEST_NFS_EXPORT2]
mox.StubOutWithMock(drv, '_get_capacity_info')
drv._get_capacity_info(self.TEST_NFS_EXPORT1).\
AndReturn((5 * units.Gi, 2 * units.Gi,
2 * units.Gi))
drv._get_capacity_info(self.TEST_NFS_EXPORT1).\
AndReturn((5 * units.Gi, 2 * units.Gi,
2 * units.Gi))
drv._get_capacity_info(self.TEST_NFS_EXPORT2).\
AndReturn((10 * units.Gi, 3 * units.Gi,
1 * units.Gi))
drv._get_capacity_info(self.TEST_NFS_EXPORT2).\
AndReturn((10 * units.Gi, 3 * units.Gi,
1 * units.Gi))
mox.ReplayAll()
self.assertEqual(self.TEST_NFS_EXPORT2,
drv._find_share(self.TEST_SIZE_IN_GB))
mox.VerifyAll()
def test_find_share_should_throw_error_if_there_is_no_enough_place(self):
"""_find_share should throw error if there is no share to host vol."""
mox = self._mox
drv = self._driver
drv._mounted_shares = [self.TEST_NFS_EXPORT1, self.TEST_NFS_EXPORT2]
mox.StubOutWithMock(drv, '_get_capacity_info')
drv._get_capacity_info(self.TEST_NFS_EXPORT1).\
AndReturn((5 * units.Gi, 0, 5 * units.Gi))
drv._get_capacity_info(self.TEST_NFS_EXPORT2).\
AndReturn((10 * units.Gi, 0,
10 * units.Gi))
mox.ReplayAll()
self.assertRaises(exception.NfsNoSuitableShareFound, drv._find_share,
self.TEST_SIZE_IN_GB)
mox.VerifyAll()
def _simple_volume(self):
volume = DumbVolume()
volume['provider_location'] = '127.0.0.1:/mnt'
volume['name'] = 'volume_name'
volume['size'] = 10
return volume
def test_create_sparsed_volume(self):
mox = self._mox
drv = self._driver
volume = self._simple_volume()
cfg.CONF.set_override('nfs_sparsed_volumes', True)
mox.StubOutWithMock(drv, '_create_sparsed_file')
mox.StubOutWithMock(drv, '_set_rw_permissions')
drv._create_sparsed_file(IgnoreArg(), IgnoreArg())
drv._set_rw_permissions(IgnoreArg())
mox.ReplayAll()
drv._do_create_volume(volume)
mox.VerifyAll()
def test_create_nonsparsed_volume(self):
mox = self._mox
drv = self._driver
self.configuration.nfs_sparsed_volumes = False
volume = self._simple_volume()
cfg.CONF.set_override('nfs_sparsed_volumes', False)
mox.StubOutWithMock(drv, '_create_regular_file')
mox.StubOutWithMock(drv, '_set_rw_permissions')
drv._create_regular_file(IgnoreArg(), IgnoreArg())
drv._set_rw_permissions(IgnoreArg())
mox.ReplayAll()
drv._do_create_volume(volume)
mox.VerifyAll()
def test_create_volume_should_ensure_nfs_mounted(self):
"""create_volume ensures shares provided in config are mounted."""
mox = self._mox
drv = self._driver
self.stub_out_not_replaying(nfs, 'LOG')
self.stub_out_not_replaying(drv, '_find_share')
self.stub_out_not_replaying(drv, '_do_create_volume')
mox.StubOutWithMock(drv, '_ensure_shares_mounted')
drv._ensure_shares_mounted()
mox.ReplayAll()
volume = DumbVolume()
volume['size'] = self.TEST_SIZE_IN_GB
drv.create_volume(volume)
mox.VerifyAll()
def test_create_volume_should_return_provider_location(self):
"""create_volume should return provider_location with found share."""
mox = self._mox
drv = self._driver
self.stub_out_not_replaying(nfs, 'LOG')
self.stub_out_not_replaying(drv, '_ensure_shares_mounted')
self.stub_out_not_replaying(drv, '_do_create_volume')
mox.StubOutWithMock(drv, '_find_share')
drv._find_share(self.TEST_SIZE_IN_GB).AndReturn(self.TEST_NFS_EXPORT1)
mox.ReplayAll()
volume = DumbVolume()
volume['size'] = self.TEST_SIZE_IN_GB
result = drv.create_volume(volume)
self.assertEqual(self.TEST_NFS_EXPORT1, result['provider_location'])
mox.VerifyAll()
def test_delete_volume(self):
"""delete_volume simple test case."""
mox = self._mox
drv = self._driver
self.stub_out_not_replaying(drv, '_ensure_share_mounted')
volume = DumbVolume()
volume['name'] = 'volume-123'
volume['provider_location'] = self.TEST_NFS_EXPORT1
mox.StubOutWithMock(drv, 'local_path')
drv.local_path(volume).AndReturn(self.TEST_LOCAL_PATH)
mox.StubOutWithMock(drv, '_execute')
drv._execute('rm', '-f', self.TEST_LOCAL_PATH, run_as_root=True)
mox.ReplayAll()
drv.delete_volume(volume)
mox.VerifyAll()
def test_delete_should_ensure_share_mounted(self):
"""delete_volume should ensure that corresponding share is mounted."""
mox = self._mox
drv = self._driver
self.stub_out_not_replaying(drv, '_execute')
volume = DumbVolume()
volume['name'] = 'volume-123'
volume['provider_location'] = self.TEST_NFS_EXPORT1
mox.StubOutWithMock(drv, '_ensure_share_mounted')
drv._ensure_share_mounted(self.TEST_NFS_EXPORT1)
mox.ReplayAll()
drv.delete_volume(volume)
mox.VerifyAll()
def test_delete_should_not_delete_if_provider_location_not_provided(self):
"""delete_volume shouldn't delete if provider_location missed."""
drv = self._driver
self.stubs.Set(drv, '_ensure_share_mounted', mock.Mock())
self.stubs.Set(drv, 'local_path', mock.Mock())
volume = DumbVolume()
volume['name'] = 'volume-123'
volume['provider_location'] = None
with mock.patch.object(drv, '_execute') as mock_execute:
drv.delete_volume(volume)
self.assertEqual(mock_execute.call_count, 0)
def test_get_volume_stats(self):
"""get_volume_stats must fill the correct values."""
mox = self._mox
drv = self._driver
drv._mounted_shares = [self.TEST_NFS_EXPORT1, self.TEST_NFS_EXPORT2]
mox.StubOutWithMock(drv, '_ensure_shares_mounted')
mox.StubOutWithMock(drv, '_get_capacity_info')
drv._ensure_shares_mounted()
drv._get_capacity_info(self.TEST_NFS_EXPORT1).\
AndReturn((10 * units.Gi, 2 * units.Gi,
2 * units.Gi))
drv._get_capacity_info(self.TEST_NFS_EXPORT2).\
AndReturn((20 * units.Gi, 3 * units.Gi,
3 * units.Gi))
mox.ReplayAll()
drv.get_volume_stats()
self.assertEqual(drv._stats['total_capacity_gb'], 30.0)
self.assertEqual(drv._stats['free_capacity_gb'], 5.0)
mox.VerifyAll()
def _check_is_share_eligible(self, total_size, total_available,
total_allocated, requested_volume_size):
with mock.patch.object(self._driver, '_get_capacity_info')\
as mock_get_capacity_info:
mock_get_capacity_info.return_value = (total_size,
total_available,
total_allocated)
return self._driver._is_share_eligible('fake_share',
requested_volume_size)
def test_is_share_eligible(self):
total_size = 100.0 * units.Gi
total_available = 90.0 * units.Gi
total_allocated = 10.0 * units.Gi
requested_volume_size = 1 # GiB
self.assertTrue(self._check_is_share_eligible(total_size,
total_available,
total_allocated,
requested_volume_size))
def test_is_share_eligible_above_used_ratio(self):
total_size = 100.0 * units.Gi
total_available = 4.0 * units.Gi
total_allocated = 96.0 * units.Gi
requested_volume_size = 1 # GiB
# Check used > used_ratio statement entered
self.assertFalse(self._check_is_share_eligible(total_size,
total_available,
total_allocated,
requested_volume_size))
def test_is_share_eligible_above_oversub_ratio(self):
total_size = 100.0 * units.Gi
total_available = 10.0 * units.Gi
total_allocated = 90.0 * units.Gi
requested_volume_size = 10 # GiB
# Check apparent_available <= requested_volume_size statement entered
self.assertFalse(self._check_is_share_eligible(total_size,
total_available,
total_allocated,
requested_volume_size))
def test_is_share_eligible_reserved_space_above_oversub_ratio(self):
total_size = 100.0 * units.Gi
total_available = 10.0 * units.Gi
total_allocated = 100.0 * units.Gi
requested_volume_size = 1 # GiB
# Check total_allocated / total_size >= oversub_ratio
# statement entered
self.assertFalse(self._check_is_share_eligible(total_size,
total_available,
total_allocated,
requested_volume_size))
def test_extend_volume(self):
"""Extend a volume by 1."""
drv = self._driver
volume = {'id': '80ee16b6-75d2-4d54-9539-ffc1b4b0fb10', 'size': 1,
'provider_location': 'nfs_share'}
path = 'path'
newSize = volume['size'] + 1
with mock.patch.object(image_utils, 'resize_image') as resize:
with mock.patch.object(drv, 'local_path', return_value=path):
with mock.patch.object(drv, '_is_share_eligible',
return_value=True):
with mock.patch.object(drv, '_is_file_size_equal',
return_value=True):
drv.extend_volume(volume, newSize)
resize.assert_called_once_with(path, newSize,
run_as_root=True)
def test_extend_volume_failure(self):
"""Error during extend operation."""
drv = self._driver
volume = {'id': '80ee16b6-75d2-4d54-9539-ffc1b4b0fb10', 'size': 1,
'provider_location': 'nfs_share'}
with mock.patch.object(image_utils, 'resize_image'):
with mock.patch.object(drv, 'local_path', return_value='path'):
with mock.patch.object(drv, '_is_share_eligible',
return_value=True):
with mock.patch.object(drv, '_is_file_size_equal',
return_value=False):
self.assertRaises(exception.ExtendVolumeError,
drv.extend_volume, volume, 2)
def test_extend_volume_insufficient_space(self):
"""Insufficient space on nfs_share during extend operation."""
drv = self._driver
volume = {'id': '80ee16b6-75d2-4d54-9539-ffc1b4b0fb10', 'size': 1,
'provider_location': 'nfs_share'}
with mock.patch.object(image_utils, 'resize_image'):
with mock.patch.object(drv, 'local_path', return_value='path'):
with mock.patch.object(drv, '_is_share_eligible',
return_value=False):
with mock.patch.object(drv, '_is_file_size_equal',
return_value=False):
self.assertRaises(exception.ExtendVolumeError,
drv.extend_volume, volume, 2)
def test_is_file_size_equal(self):
"""File sizes are equal."""
drv = self._driver
path = 'fake/path'
size = 2
data = mock.MagicMock()
data.virtual_size = size * units.Gi
with mock.patch.object(image_utils, 'qemu_img_info',
return_value=data):
self.assertTrue(drv._is_file_size_equal(path, size))
def test_is_file_size_equal_false(self):
"""File sizes are not equal."""
drv = self._driver
path = 'fake/path'
size = 2
data = mock.MagicMock()
data.virtual_size = (size + 1) * units.Gi
with mock.patch.object(image_utils, 'qemu_img_info',
return_value=data):
self.assertFalse(drv._is_file_size_equal(path, size))
@mock.patch.object(nfs, 'LOG')
def test_set_nas_security_options_when_true(self, LOG):
"""Test higher level setting of NAS Security options.
The NFS driver overrides the base method with a driver specific
version.
"""
drv = self._driver
drv._mounted_shares = [self.TEST_NFS_EXPORT1]
is_new_install = True
drv._ensure_shares_mounted = mock.Mock()
drv._get_mount_point_for_share = mock.Mock(
return_value=self.TEST_MNT_POINT)
drv._determine_nas_security_option_setting = mock.Mock(
return_value='true')
drv.set_nas_security_options(is_new_install)
self.assertEqual(drv.configuration.nas_secure_file_operations, 'true')
self.assertEqual(drv.configuration.nas_secure_file_permissions, 'true')
self.assertFalse(LOG.warn.called)
@mock.patch.object(nfs, 'LOG')
def test_set_nas_security_options_when_false(self, LOG):
"""Test higher level setting of NAS Security options.
The NFS driver overrides the base method with a driver specific
version.
"""
drv = self._driver
drv._mounted_shares = [self.TEST_NFS_EXPORT1]
is_new_install = False
drv._ensure_shares_mounted = mock.Mock()
drv._get_mount_point_for_share = mock.Mock(
return_value=self.TEST_MNT_POINT)
drv._determine_nas_security_option_setting = mock.Mock(
return_value='false')
drv.set_nas_security_options(is_new_install)
self.assertEqual(drv.configuration.nas_secure_file_operations, 'false')
self.assertEqual(drv.configuration.nas_secure_file_permissions,
'false')
self.assertTrue(LOG.warn.called)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.